* add pytest_nose plugin
* have unittest functions always receive a fresh instance --HG-- branch : 1.0.x
This commit is contained in:
parent
a01e4769cc
commit
b552f6eb46
|
@ -1,6 +1,10 @@
|
|||
Changes between 1.0.0 and 1.0.1
|
||||
=====================================
|
||||
|
||||
* added a 'pytest_nose' plugin which handles nose.SkipTest,
|
||||
nose-style function/method/generator setup/teardown and
|
||||
tries to report functions correctly.
|
||||
|
||||
* unicode fixes: capturing and unicode writes to sys.stdout
|
||||
(through e.g a print statement) now work within tests,
|
||||
they are encoded as "utf8" by default, also terminalwriting
|
||||
|
|
|
@ -0,0 +1,64 @@
|
|||
|
||||
pytest_nose plugin
|
||||
==================
|
||||
|
||||
nose-compatibility plugin: allow to run nose test suites natively.
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
|
||||
This is an experimental plugin for allowing to run tests written
|
||||
in the 'nosetests' style with py.test.
|
||||
nosetests is a popular clone
|
||||
of py.test and thus shares some philosophy. This plugin is an
|
||||
attempt to understand and neutralize differences. It allows to
|
||||
run nosetests' own test suite and a number of other test suites
|
||||
without problems.
|
||||
|
||||
Usage
|
||||
-------------
|
||||
|
||||
If you type::
|
||||
|
||||
py.test -p nose
|
||||
|
||||
where you would type ``nosetests``, you can run your nose style tests.
|
||||
You might also try to run without the nose plugin to see where your test
|
||||
suite is incompatible to the default py.test.
|
||||
|
||||
To avoid the need for specifying a command line option you can set an environment
|
||||
variable::
|
||||
|
||||
PYTEST_PLUGINS=nose
|
||||
|
||||
or create a ``conftest.py`` file in your test directory or below::
|
||||
|
||||
# conftest.py
|
||||
pytest_plugins = "nose",
|
||||
|
||||
If you find issues or have suggestions you may run::
|
||||
|
||||
py.test -p nose --pastebin=all
|
||||
|
||||
to create a URL of a test run session and send it with comments to the issue
|
||||
tracker or mailing list.
|
||||
|
||||
Known issues
|
||||
------------------
|
||||
|
||||
- nose-style doctests are not collected and executed correctly,
|
||||
also fixtures don't work.
|
||||
|
||||
Start improving this plugin in 30 seconds
|
||||
=========================================
|
||||
|
||||
|
||||
Do you find the above documentation or the plugin itself lacking?
|
||||
|
||||
1. Download `pytest_nose.py`_ plugin source code
|
||||
2. put it somewhere as ``pytest_nose.py`` into your import path
|
||||
3. a subsequent ``py.test`` run will use your local version
|
||||
|
||||
Further information: extend_ documentation, other plugins_ or contact_.
|
||||
|
||||
.. include:: links.txt
|
|
@ -7,7 +7,7 @@ plugins = [
|
|||
('Plugins related to Python test functions and programs',
|
||||
'xfail figleaf monkeypatch capture recwarn',),
|
||||
('Plugins for other testing styles and languages',
|
||||
'unittest doctest oejskit restdoc'),
|
||||
'oejskit unittest nose doctest restdoc'),
|
||||
('Plugins for generic reporting and failure logging',
|
||||
'pastebin resultlog terminal',),
|
||||
('internal plugins / core functionality',
|
||||
|
|
|
@ -396,6 +396,11 @@ class Directory(FSCollector):
|
|||
def _ignore(self, path):
|
||||
ignore_paths = self.config.getconftest_pathlist("collect_ignore", path=path)
|
||||
return ignore_paths and path in ignore_paths
|
||||
# XXX more refined would be:
|
||||
if ignore_paths:
|
||||
for p in ignore_paths:
|
||||
if path == p or path.relto(p):
|
||||
return True
|
||||
|
||||
def consider(self, path):
|
||||
if self._ignore(path):
|
||||
|
|
|
@ -15,10 +15,15 @@ def pytest_funcarg__testdir(request):
|
|||
# testdir.plugins.append(obj.testplugin)
|
||||
# break
|
||||
#else:
|
||||
basename = request.module.__name__.split(".")[-1]
|
||||
if basename.startswith("pytest_"):
|
||||
modname = request.module.__name__.split(".")[-1]
|
||||
if modname.startswith("pytest_"):
|
||||
testdir.plugins.append(vars(request.module))
|
||||
testdir.plugins.append(basename)
|
||||
testdir.plugins.append(modname)
|
||||
#elif modname.startswith("test_pytest"):
|
||||
# pname = modname[5:]
|
||||
# assert pname not in testdir.plugins
|
||||
# testdir.plugins.append(pname)
|
||||
# #testdir.plugins.append(vars(request.module))
|
||||
else:
|
||||
pass # raise ValueError("need better support code")
|
||||
return testdir
|
||||
|
|
|
@ -0,0 +1,97 @@
|
|||
"""nose-compatibility plugin: allow to run nose test suites natively.
|
||||
|
||||
This is an experimental plugin for allowing to run tests written
|
||||
in the 'nosetests' style with py.test.
|
||||
nosetests is a popular clone
|
||||
of py.test and thus shares some philosophy. This plugin is an
|
||||
attempt to understand and neutralize differences. It allows to
|
||||
run nosetests' own test suite and a number of other test suites
|
||||
without problems.
|
||||
|
||||
Usage
|
||||
-------------
|
||||
|
||||
If you type::
|
||||
|
||||
py.test -p nose
|
||||
|
||||
where you would type ``nosetests``, you can run your nose style tests.
|
||||
You might also try to run without the nose plugin to see where your test
|
||||
suite is incompatible to the default py.test.
|
||||
|
||||
To avoid the need for specifying a command line option you can set an environment
|
||||
variable::
|
||||
|
||||
PYTEST_PLUGINS=nose
|
||||
|
||||
or create a ``conftest.py`` file in your test directory or below::
|
||||
|
||||
# conftest.py
|
||||
pytest_plugins = "nose",
|
||||
|
||||
If you find issues or have suggestions you may run::
|
||||
|
||||
py.test -p nose --pastebin=all
|
||||
|
||||
to create a URL of a test run session and send it with comments to the issue
|
||||
tracker or mailing list.
|
||||
|
||||
Known issues
|
||||
------------------
|
||||
|
||||
- nose-style doctests are not collected and executed correctly,
|
||||
also fixtures don't work.
|
||||
|
||||
"""
|
||||
import py
|
||||
import inspect
|
||||
import sys
|
||||
|
||||
def pytest_runtest_makereport(__call__, item, call):
|
||||
SkipTest = getattr(sys.modules.get('nose', None), 'SkipTest', None)
|
||||
if SkipTest:
|
||||
if call.excinfo and call.excinfo.errisinstance(SkipTest):
|
||||
# let's substitute the excinfo with a py.test.skip one
|
||||
call2 = call.__class__(lambda: py.test.skip(str(call.excinfo.value)), call.when)
|
||||
call.excinfo = call2.excinfo
|
||||
|
||||
def pytest_report_iteminfo(item):
|
||||
# nose 0.11.1 uses decorators for "raises" and other helpers.
|
||||
# for reporting progress by filename we fish for the filename
|
||||
if isinstance(item, py.test.collect.Function):
|
||||
obj = item.obj
|
||||
if hasattr(obj, 'compat_co_firstlineno'):
|
||||
fn = sys.modules[obj.__module__].__file__
|
||||
if fn.endswith(".pyc"):
|
||||
fn = fn[:-1]
|
||||
#assert 0
|
||||
#fn = inspect.getsourcefile(obj) or inspect.getfile(obj)
|
||||
lineno = obj.compat_co_firstlineno
|
||||
return py.path.local(fn), lineno, obj.__module__
|
||||
|
||||
def pytest_runtest_setup(item):
|
||||
if isinstance(item, (py.test.collect.Function)):
|
||||
if isinstance(item.parent, py.test.collect.Generator):
|
||||
gen = item.parent
|
||||
if not hasattr(gen, '_nosegensetup'):
|
||||
call_optional(gen.obj, 'setup')
|
||||
if isinstance(gen.parent, py.test.collect.Instance):
|
||||
call_optional(gen.parent.obj, 'setup')
|
||||
gen._nosegensetup = True
|
||||
call_optional(item.obj, 'setup')
|
||||
|
||||
def pytest_runtest_teardown(item):
|
||||
if isinstance(item, py.test.collect.Function):
|
||||
call_optional(item.obj, 'teardown')
|
||||
#if hasattr(item.parent, '_nosegensetup'):
|
||||
# #call_optional(item._nosegensetup, 'teardown')
|
||||
# del item.parent._nosegensetup
|
||||
|
||||
def pytest_make_collect_report(collector):
|
||||
if isinstance(collector, py.test.collect.Generator):
|
||||
call_optional(collector.obj, 'setup')
|
||||
|
||||
def call_optional(obj, name):
|
||||
method = getattr(obj, name, None)
|
||||
if method:
|
||||
method()
|
|
@ -241,15 +241,10 @@ class TerminalReporter:
|
|||
if self.config.option.traceconfig:
|
||||
plugins = []
|
||||
for plugin in self.config.pluginmanager.comregistry:
|
||||
name = getattr(plugin, '__name__', None)
|
||||
if name is None:
|
||||
name = plugin.__class__.__name__
|
||||
if name.endswith("Plugin"):
|
||||
name = name[:-6]
|
||||
#if name == "Conftest":
|
||||
# XXX get filename
|
||||
plugins.append(name)
|
||||
else:
|
||||
plugins.append(str(plugin))
|
||||
|
||||
plugins = ", ".join(plugins)
|
||||
self.write_line("active plugins: %s" %(plugins,))
|
||||
for i, testarg in py.builtin.enumerate(self.config.args):
|
||||
|
|
|
@ -55,6 +55,9 @@ class UnitTestFunction(py.test.collect.Function):
|
|||
if obj is not _dummy:
|
||||
self._obj = obj
|
||||
self._sort_value = sort_value
|
||||
if hasattr(self.parent, 'newinstance'):
|
||||
self.parent.newinstance()
|
||||
self.obj = self._getobj()
|
||||
|
||||
def runtest(self):
|
||||
target = self.obj
|
||||
|
@ -87,7 +90,6 @@ def test_simple_unittest(testdir):
|
|||
def test_setup(testdir):
|
||||
testpath = testdir.makepyfile(test_two="""
|
||||
import unittest
|
||||
pytest_plugins = "pytest_unittest" # XXX
|
||||
class MyTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.foo = 1
|
||||
|
@ -98,6 +100,18 @@ def test_setup(testdir):
|
|||
rep = reprec.matchreport("test_setUp")
|
||||
assert rep.passed
|
||||
|
||||
def test_new_instances(testdir):
|
||||
testpath = testdir.makepyfile("""
|
||||
import unittest
|
||||
class MyTestCase(unittest.TestCase):
|
||||
def test_func1(self):
|
||||
self.x = 2
|
||||
def test_func2(self):
|
||||
assert not hasattr(self, 'x')
|
||||
""")
|
||||
reprec = testdir.inline_run(testpath)
|
||||
reprec.assertoutcome(passed=2)
|
||||
|
||||
def test_teardown(testdir):
|
||||
testpath = testdir.makepyfile(test_three="""
|
||||
import unittest
|
||||
|
|
|
@ -0,0 +1,87 @@
|
|||
import py
|
||||
py.test.importorskip("nose")
|
||||
|
||||
def test_nose_setup(testdir):
|
||||
p = testdir.makepyfile("""
|
||||
l = []
|
||||
|
||||
def test_hello():
|
||||
assert l == [1]
|
||||
def test_world():
|
||||
assert l == [1,2]
|
||||
test_hello.setup = lambda: l.append(1)
|
||||
test_hello.teardown = lambda: l.append(2)
|
||||
""")
|
||||
result = testdir.runpytest(p, '-p', 'nose')
|
||||
result.stdout.fnmatch_lines([
|
||||
"*2 passed*"
|
||||
])
|
||||
|
||||
def test_nose_test_generator_fixtures(testdir):
|
||||
p = testdir.makepyfile("""
|
||||
# taken from nose-0.11.1 unit_tests/test_generator_fixtures.py
|
||||
from nose.tools import eq_
|
||||
called = []
|
||||
|
||||
def outer_setup():
|
||||
called.append('outer_setup')
|
||||
|
||||
def outer_teardown():
|
||||
called.append('outer_teardown')
|
||||
|
||||
def inner_setup():
|
||||
called.append('inner_setup')
|
||||
|
||||
def inner_teardown():
|
||||
called.append('inner_teardown')
|
||||
|
||||
def test_gen():
|
||||
called[:] = []
|
||||
for i in range(0, 5):
|
||||
yield check, i
|
||||
|
||||
def check(i):
|
||||
expect = ['outer_setup']
|
||||
for x in range(0, i):
|
||||
expect.append('inner_setup')
|
||||
expect.append('inner_teardown')
|
||||
expect.append('inner_setup')
|
||||
eq_(called, expect)
|
||||
|
||||
|
||||
test_gen.setup = outer_setup
|
||||
test_gen.teardown = outer_teardown
|
||||
check.setup = inner_setup
|
||||
check.teardown = inner_teardown
|
||||
|
||||
class TestClass(object):
|
||||
def setup(self):
|
||||
print "setup called in", self
|
||||
self.called = ['setup']
|
||||
|
||||
def teardown(self):
|
||||
print "teardown called in", self
|
||||
eq_(self.called, ['setup'])
|
||||
self.called.append('teardown')
|
||||
|
||||
def test(self):
|
||||
print "test called in", self
|
||||
for i in range(0, 5):
|
||||
yield self.check, i
|
||||
|
||||
def check(self, i):
|
||||
print "check called in", self
|
||||
expect = ['setup']
|
||||
#for x in range(0, i):
|
||||
# expect.append('setup')
|
||||
# expect.append('teardown')
|
||||
#expect.append('setup')
|
||||
eq_(self.called, expect)
|
||||
|
||||
""")
|
||||
result = testdir.runpytest(p, '-p', 'nose')
|
||||
result.stdout.fnmatch_lines([
|
||||
"*10 passed*"
|
||||
])
|
||||
|
||||
|
Loading…
Reference in New Issue