2024-01-28 21:12:42 +08:00
|
|
|
# mypy: allow-untyped-defs
|
2016-11-08 10:24:10 +08:00
|
|
|
import gc
|
2020-08-06 02:24:08 +08:00
|
|
|
import sys
|
2020-05-01 19:40:17 +08:00
|
|
|
from typing import List
|
2009-09-06 22:59:39 +08:00
|
|
|
|
2020-02-11 05:43:30 +08:00
|
|
|
from _pytest.config import ExitCode
|
2020-12-16 12:16:05 +08:00
|
|
|
from _pytest.monkeypatch import MonkeyPatch
|
|
|
|
from _pytest.pytester import Pytester
|
2018-10-25 15:01:29 +08:00
|
|
|
import pytest
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_simple_unittest(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2009-09-06 22:59:39 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def testpassing(self):
|
2017-07-21 10:11:14 +08:00
|
|
|
self.assertEqual('foo', 'foo')
|
2009-09-06 22:59:39 +08:00
|
|
|
def test_failing(self):
|
2017-07-21 10:11:14 +08:00
|
|
|
self.assertEqual('foo', 'bar')
|
2009-09-06 22:59:39 +08:00
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2009-09-06 22:59:39 +08:00
|
|
|
assert reprec.matchreport("testpassing").passed
|
2010-07-27 03:15:15 +08:00
|
|
|
assert reprec.matchreport("test_failing").failed
|
2009-09-06 22:59:39 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_runTest_method(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2013-10-12 21:39:22 +08:00
|
|
|
"""
|
2012-09-23 00:24:53 +08:00
|
|
|
import unittest
|
2012-10-22 22:25:09 +08:00
|
|
|
class MyTestCaseWithRunTest(unittest.TestCase):
|
|
|
|
def runTest(self):
|
2017-07-21 10:11:14 +08:00
|
|
|
self.assertEqual('foo', 'foo')
|
2012-10-22 22:25:09 +08:00
|
|
|
class MyTestCaseWithoutRunTest(unittest.TestCase):
|
2012-09-23 00:24:53 +08:00
|
|
|
def runTest(self):
|
2017-07-21 10:11:14 +08:00
|
|
|
self.assertEqual('foo', 'foo')
|
2012-10-22 22:25:09 +08:00
|
|
|
def test_something(self):
|
|
|
|
pass
|
2012-09-23 00:24:53 +08:00
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-v")
|
2012-10-22 22:25:09 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
"""
|
2014-04-15 05:42:02 +08:00
|
|
|
*MyTestCaseWithRunTest::runTest*
|
|
|
|
*MyTestCaseWithoutRunTest::test_something*
|
2012-10-22 22:25:09 +08:00
|
|
|
*2 passed*
|
|
|
|
"""
|
|
|
|
)
|
2012-09-23 00:24:53 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_isclasscheck_issue53(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2009-10-09 21:09:26 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class _E(object):
|
|
|
|
def __getattr__(self, tag):
|
|
|
|
pass
|
|
|
|
E = _E()
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest(testpath)
|
2019-06-07 18:58:51 +08:00
|
|
|
assert result.ret == ExitCode.NO_TESTS_COLLECTED
|
2009-10-09 21:09:26 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_setup(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2010-11-24 23:17:49 +08:00
|
|
|
"""
|
2009-09-06 22:59:39 +08:00
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.foo = 1
|
2010-11-24 23:17:49 +08:00
|
|
|
def setup_method(self, method):
|
|
|
|
self.foo2 = 1
|
|
|
|
def test_both(self):
|
2017-07-21 10:11:14 +08:00
|
|
|
self.assertEqual(1, self.foo)
|
2010-11-24 23:17:49 +08:00
|
|
|
assert self.foo2 == 1
|
|
|
|
def teardown_method(self, method):
|
|
|
|
assert 0, "42"
|
2011-12-14 18:56:51 +08:00
|
|
|
|
2009-09-06 22:59:39 +08:00
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run("-s", testpath)
|
2010-11-24 23:17:49 +08:00
|
|
|
assert reprec.matchreport("test_both", when="call").passed
|
|
|
|
rep = reprec.matchreport("test_both", when="teardown")
|
|
|
|
assert rep.failed and "42" in str(rep.longrepr)
|
2009-09-06 22:59:39 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_setUpModule(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2013-06-23 00:42:31 +08:00
|
|
|
"""
|
2017-11-04 23:17:20 +08:00
|
|
|
values = []
|
2013-06-23 00:35:10 +08:00
|
|
|
|
|
|
|
def setUpModule():
|
2017-11-04 23:17:20 +08:00
|
|
|
values.append(1)
|
2013-06-23 00:35:10 +08:00
|
|
|
|
|
|
|
def tearDownModule():
|
2017-11-04 23:17:20 +08:00
|
|
|
del values[0]
|
2013-06-23 00:35:10 +08:00
|
|
|
|
|
|
|
def test_hello():
|
2017-11-04 23:17:20 +08:00
|
|
|
assert values == [1]
|
2013-06-23 00:35:10 +08:00
|
|
|
|
|
|
|
def test_world():
|
2017-11-04 23:17:20 +08:00
|
|
|
assert values == [1]
|
2013-06-23 00:35:10 +08:00
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest(testpath)
|
2013-06-23 00:35:10 +08:00
|
|
|
result.stdout.fnmatch_lines(["*2 passed*"])
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_setUpModule_failing_no_teardown(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2013-08-02 15:52:40 +08:00
|
|
|
"""
|
2017-11-04 23:17:20 +08:00
|
|
|
values = []
|
2013-08-02 15:52:40 +08:00
|
|
|
|
|
|
|
def setUpModule():
|
|
|
|
0/0
|
|
|
|
|
|
|
|
def tearDownModule():
|
2017-11-04 23:17:20 +08:00
|
|
|
values.append(1)
|
2013-08-02 15:52:40 +08:00
|
|
|
|
|
|
|
def test_hello():
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2013-08-02 15:52:40 +08:00
|
|
|
reprec.assertoutcome(passed=0, failed=1)
|
|
|
|
call = reprec.getcalls("pytest_runtest_setup")[0]
|
2017-11-04 23:17:20 +08:00
|
|
|
assert not call.item.module.values
|
2013-06-23 00:35:10 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_new_instances(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2009-09-06 22:59:39 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def test_func1(self):
|
|
|
|
self.x = 2
|
|
|
|
def test_func2(self):
|
|
|
|
assert not hasattr(self, 'x')
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2009-09-06 22:59:39 +08:00
|
|
|
reprec.assertoutcome(passed=2)
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_function_item_obj_is_instance(pytester: Pytester) -> None:
|
2019-06-05 07:06:46 +08:00
|
|
|
"""item.obj should be a bound method on unittest.TestCase function items (#5390)."""
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makeconftest(
|
2019-06-05 07:06:46 +08:00
|
|
|
"""
|
|
|
|
def pytest_runtest_makereport(item, call):
|
|
|
|
if call.when == 'call':
|
|
|
|
class_ = item.parent.obj
|
|
|
|
assert isinstance(item.obj.__self__, class_)
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2019-06-05 07:06:46 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
class Test(unittest.TestCase):
|
|
|
|
def test_foo(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest_inprocess()
|
2019-06-05 07:06:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["* 1 passed in*"])
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_teardown(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2010-01-18 06:23:02 +08:00
|
|
|
"""
|
2009-09-06 22:59:39 +08:00
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
2017-11-04 23:17:20 +08:00
|
|
|
values = []
|
2009-09-06 22:59:39 +08:00
|
|
|
def test_one(self):
|
|
|
|
pass
|
|
|
|
def tearDown(self):
|
2017-11-04 23:17:20 +08:00
|
|
|
self.values.append(None)
|
2009-09-06 22:59:39 +08:00
|
|
|
class Second(unittest.TestCase):
|
|
|
|
def test_check(self):
|
2017-11-04 23:17:20 +08:00
|
|
|
self.assertEqual(MyTestCase.values, [None])
|
2009-09-06 22:59:39 +08:00
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2009-09-06 22:59:39 +08:00
|
|
|
passed, skipped, failed = reprec.countoutcomes()
|
|
|
|
assert failed == 0, failed
|
|
|
|
assert passed == 2
|
|
|
|
assert passed + skipped + failed == 2
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_teardown_issue1649(pytester: Pytester) -> None:
|
2016-11-08 10:24:10 +08:00
|
|
|
"""
|
|
|
|
Are TestCase objects cleaned up? Often unittest TestCase objects set
|
|
|
|
attributes that are large and expensive during setUp.
|
|
|
|
|
|
|
|
The TestCase will not be cleaned up if the test fails, because it
|
|
|
|
would then exist in the stackframe.
|
|
|
|
"""
|
2020-12-16 12:16:05 +08:00
|
|
|
testpath = pytester.makepyfile(
|
2016-11-08 10:24:10 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class TestCaseObjectsShouldBeCleanedUp(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.an_expensive_object = 1
|
|
|
|
def test_demo(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.inline_run("-s", testpath)
|
2016-11-08 10:24:10 +08:00
|
|
|
gc.collect()
|
|
|
|
for obj in gc.get_objects():
|
|
|
|
assert type(obj).__name__ != "TestCaseObjectsShouldBeCleanedUp"
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_unittest_skip_issue148(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2012-11-04 03:54:48 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
@unittest.skip("hello")
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(self):
|
|
|
|
xxx
|
|
|
|
def test_one(self):
|
|
|
|
pass
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(self):
|
|
|
|
xxx
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2012-11-04 03:54:48 +08:00
|
|
|
reprec.assertoutcome(skipped=1)
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_method_and_teardown_failing_reporting(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2010-11-24 23:17:49 +08:00
|
|
|
"""
|
2019-10-19 15:03:26 +08:00
|
|
|
import unittest
|
2010-11-24 23:17:49 +08:00
|
|
|
class TC(unittest.TestCase):
|
|
|
|
def tearDown(self):
|
|
|
|
assert 0, "down1"
|
|
|
|
def test_method(self):
|
|
|
|
assert False, "down2"
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-s")
|
2010-11-24 23:17:49 +08:00
|
|
|
assert result.ret == 1
|
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*tearDown*",
|
|
|
|
"*assert 0*",
|
|
|
|
"*test_method*",
|
|
|
|
"*assert False*",
|
|
|
|
"*1 failed*1 error*",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_setup_failure_is_shown(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2010-11-24 23:17:49 +08:00
|
|
|
"""
|
2010-10-26 05:08:56 +08:00
|
|
|
import unittest
|
2010-11-18 05:12:16 +08:00
|
|
|
import pytest
|
2010-11-24 23:17:49 +08:00
|
|
|
class TC(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
assert 0, "down1"
|
|
|
|
def test_method(self):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("never42")
|
2010-11-24 23:17:49 +08:00
|
|
|
xyz
|
2010-10-26 05:08:56 +08:00
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-s")
|
2010-11-24 23:17:49 +08:00
|
|
|
assert result.ret == 1
|
|
|
|
result.stdout.fnmatch_lines(["*setUp*", "*assert 0*down1*", "*1 failed*"])
|
2019-10-06 01:18:51 +08:00
|
|
|
result.stdout.no_fnmatch_line("*never42*")
|
2010-11-02 06:08:16 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_setup_setUpClass(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2010-11-02 06:08:16 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
2010-11-18 05:12:16 +08:00
|
|
|
import pytest
|
2010-11-02 06:08:16 +08:00
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
x = 0
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
cls.x += 1
|
|
|
|
def test_func1(self):
|
|
|
|
assert self.x == 1
|
|
|
|
def test_func2(self):
|
|
|
|
assert self.x == 1
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
cls.x -= 1
|
|
|
|
def test_teareddown():
|
|
|
|
assert MyTestCase.x == 0
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2010-11-02 06:08:16 +08:00
|
|
|
reprec.assertoutcome(passed=3)
|
2010-11-23 22:42:23 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2021-03-05 04:45:57 +08:00
|
|
|
def test_fixtures_setup_setUpClass_issue8394(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
pass
|
|
|
|
def test_func1(self):
|
|
|
|
pass
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = pytester.runpytest("--fixtures")
|
|
|
|
assert result.ret == 0
|
|
|
|
result.stdout.no_fnmatch_line("*no docstring available*")
|
|
|
|
|
|
|
|
result = pytester.runpytest("--fixtures", "-v")
|
|
|
|
assert result.ret == 0
|
|
|
|
result.stdout.fnmatch_lines(["*no docstring available*"])
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_setup_class(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2010-11-24 07:23:39 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
import pytest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
x = 0
|
|
|
|
def setup_class(cls):
|
|
|
|
cls.x += 1
|
|
|
|
def test_func1(self):
|
|
|
|
assert self.x == 1
|
|
|
|
def test_func2(self):
|
|
|
|
assert self.x == 1
|
|
|
|
def teardown_class(cls):
|
|
|
|
cls.x -= 1
|
|
|
|
def test_teareddown():
|
|
|
|
assert MyTestCase.x == 0
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2010-11-24 07:23:39 +08:00
|
|
|
reprec.assertoutcome(passed=3)
|
|
|
|
|
2010-11-23 22:42:23 +08:00
|
|
|
|
2013-11-21 21:40:14 +08:00
|
|
|
@pytest.mark.parametrize("type", ["Error", "Failure"])
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_testcase_adderrorandfailure_defers(pytester: Pytester, type: str) -> None:
|
|
|
|
pytester.makepyfile(
|
2010-11-23 22:42:23 +08:00
|
|
|
f"""
|
|
|
|
from unittest import TestCase
|
|
|
|
import pytest
|
|
|
|
class MyTestCase(TestCase):
|
|
|
|
def run(self, result):
|
|
|
|
excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
|
|
|
|
try:
|
|
|
|
result.add{type}(self, excinfo._excinfo)
|
|
|
|
except KeyboardInterrupt:
|
|
|
|
raise
|
|
|
|
except:
|
|
|
|
pytest.fail("add{type} should not raise")
|
|
|
|
def test_hello(self):
|
|
|
|
pass
|
|
|
|
"""
|
2018-05-23 22:48:46 +08:00
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest()
|
2019-10-06 01:18:51 +08:00
|
|
|
result.stdout.no_fnmatch_line("*should not raise*")
|
2010-11-23 22:42:23 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2013-11-21 21:40:14 +08:00
|
|
|
@pytest.mark.parametrize("type", ["Error", "Failure"])
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_testcase_custom_exception_info(pytester: Pytester, type: str) -> None:
|
|
|
|
pytester.makepyfile(
|
2010-11-23 22:42:23 +08:00
|
|
|
"""
|
2021-03-13 17:10:34 +08:00
|
|
|
from typing import Generic, TypeVar
|
2010-11-23 22:42:23 +08:00
|
|
|
from unittest import TestCase
|
2021-03-13 17:10:34 +08:00
|
|
|
import pytest, _pytest._code
|
|
|
|
|
2010-11-23 22:42:23 +08:00
|
|
|
class MyTestCase(TestCase):
|
|
|
|
def run(self, result):
|
|
|
|
excinfo = pytest.raises(ZeroDivisionError, lambda: 0/0)
|
2021-03-13 17:10:34 +08:00
|
|
|
# We fake an incompatible exception info.
|
|
|
|
class FakeExceptionInfo(Generic[TypeVar("E")]):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
mp.undo()
|
|
|
|
raise TypeError()
|
|
|
|
@classmethod
|
|
|
|
def from_current(cls):
|
|
|
|
return cls()
|
|
|
|
@classmethod
|
|
|
|
def from_exc_info(cls, *args, **kwargs):
|
|
|
|
return cls()
|
|
|
|
mp = pytest.MonkeyPatch()
|
|
|
|
mp.setattr(_pytest._code, 'ExceptionInfo', FakeExceptionInfo)
|
2010-11-23 22:42:23 +08:00
|
|
|
try:
|
|
|
|
excinfo = excinfo._excinfo
|
|
|
|
result.add{type}(self, excinfo)
|
|
|
|
finally:
|
|
|
|
mp.undo()
|
2021-03-13 17:10:34 +08:00
|
|
|
|
2010-11-23 22:42:23 +08:00
|
|
|
def test_hello(self):
|
|
|
|
pass
|
|
|
|
""".format(**locals())
|
2018-05-23 22:48:46 +08:00
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest()
|
2010-11-23 22:42:23 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"NOTE: Incompatible Exception Representation*",
|
|
|
|
"*ZeroDivisionError*",
|
|
|
|
"*1 failed*",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_testcase_totally_incompatible_exception_info(pytester: Pytester) -> None:
|
|
|
|
import _pytest.unittest
|
|
|
|
|
|
|
|
(item,) = pytester.getitems(
|
2010-11-23 22:42:23 +08:00
|
|
|
"""
|
|
|
|
from unittest import TestCase
|
|
|
|
class MyTestCase(TestCase):
|
|
|
|
def test_hello(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
assert isinstance(item, _pytest.unittest.TestCaseFunction)
|
|
|
|
item.addError(None, 42) # type: ignore[arg-type]
|
|
|
|
excinfo = item._excinfo
|
|
|
|
assert excinfo is not None
|
|
|
|
assert "ERROR: Unknown Incompatible" in str(excinfo.pop(0).getrepr())
|
2010-11-24 18:48:55 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_module_level_pytestmark(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2010-11-24 23:17:49 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
import pytest
|
|
|
|
pytestmark = pytest.mark.xfail
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def test_func1(self):
|
|
|
|
assert 0
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath, "-s")
|
2010-11-24 23:17:49 +08:00
|
|
|
reprec.assertoutcome(skipped=1)
|
|
|
|
|
|
|
|
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestTrialUnittest:
|
2010-11-24 18:48:55 +08:00
|
|
|
def setup_class(cls):
|
2012-10-23 01:22:01 +08:00
|
|
|
cls.ut = pytest.importorskip("twisted.trial.unittest")
|
2017-07-23 02:04:05 +08:00
|
|
|
# on windows trial uses a socket for a reactor and apparently doesn't close it properly
|
|
|
|
# https://twistedmatrix.com/trac/ticket/9227
|
|
|
|
cls.ignore_unclosed_socket_warning = ("-W", "always")
|
2012-10-23 01:22:01 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_trial_testcase_runtest_not_collected(self, pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2012-10-23 01:22:01 +08:00
|
|
|
"""
|
|
|
|
from twisted.trial.unittest import TestCase
|
|
|
|
|
|
|
|
class TC(TestCase):
|
|
|
|
def test_hello(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(*self.ignore_unclosed_socket_warning)
|
2012-10-23 01:22:01 +08:00
|
|
|
reprec.assertoutcome(passed=1)
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2012-10-23 01:22:01 +08:00
|
|
|
"""
|
|
|
|
from twisted.trial.unittest import TestCase
|
|
|
|
|
|
|
|
class TC(TestCase):
|
|
|
|
def runTest(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(*self.ignore_unclosed_socket_warning)
|
2012-10-23 01:22:01 +08:00
|
|
|
reprec.assertoutcome(passed=1)
|
2010-11-24 18:48:55 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_trial_exceptions_with_skips(self, pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2010-11-24 18:48:55 +08:00
|
|
|
"""
|
|
|
|
from twisted.trial import unittest
|
|
|
|
import pytest
|
|
|
|
class TC(unittest.TestCase):
|
|
|
|
def test_hello(self):
|
|
|
|
pytest.skip("skip_in_method")
|
|
|
|
@pytest.mark.skipif("sys.version_info != 1")
|
|
|
|
def test_hello2(self):
|
|
|
|
pass
|
|
|
|
@pytest.mark.xfail(reason="iwanto")
|
|
|
|
def test_hello3(self):
|
|
|
|
assert 0
|
|
|
|
def test_hello4(self):
|
|
|
|
pytest.xfail("i2wanto")
|
2010-11-25 22:48:59 +08:00
|
|
|
def test_trial_skip(self):
|
|
|
|
pass
|
|
|
|
test_trial_skip.skip = "trialselfskip"
|
|
|
|
|
|
|
|
def test_trial_todo(self):
|
|
|
|
assert 0
|
|
|
|
test_trial_todo.todo = "mytodo"
|
|
|
|
|
|
|
|
def test_trial_todo_success(self):
|
|
|
|
pass
|
|
|
|
test_trial_todo_success.todo = "mytodo"
|
2010-11-24 18:48:55 +08:00
|
|
|
|
|
|
|
class TC2(unittest.TestCase):
|
|
|
|
def setup_class(cls):
|
|
|
|
pytest.skip("skip_in_setup_class")
|
|
|
|
def test_method(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-rxs", *self.ignore_unclosed_socket_warning)
|
2010-11-24 18:48:55 +08:00
|
|
|
result.stdout.fnmatch_lines_random(
|
|
|
|
[
|
2010-11-25 22:48:59 +08:00
|
|
|
"*XFAIL*test_trial_todo*",
|
|
|
|
"*trialselfskip*",
|
2010-11-24 18:48:55 +08:00
|
|
|
"*skip_in_setup_class*",
|
|
|
|
"*iwanto*",
|
|
|
|
"*i2wanto*",
|
|
|
|
"*sys.version_info*",
|
|
|
|
"*skip_in_method*",
|
2019-05-28 07:31:52 +08:00
|
|
|
"*1 failed*4 skipped*3 xfailed*",
|
2010-11-24 18:48:55 +08:00
|
|
|
]
|
|
|
|
)
|
2019-05-28 07:31:52 +08:00
|
|
|
assert result.ret == 1
|
2010-11-24 18:48:55 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_trial_error(self, pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2010-11-24 20:54:56 +08:00
|
|
|
"""
|
|
|
|
from twisted.trial.unittest import TestCase
|
2010-11-24 21:06:40 +08:00
|
|
|
from twisted.internet.defer import Deferred
|
2010-11-24 20:54:56 +08:00
|
|
|
from twisted.internet import reactor
|
|
|
|
|
|
|
|
class TC(TestCase):
|
|
|
|
def test_one(self):
|
|
|
|
crash
|
|
|
|
|
|
|
|
def test_two(self):
|
|
|
|
def f(_):
|
|
|
|
crash
|
2010-11-24 21:06:40 +08:00
|
|
|
|
|
|
|
d = Deferred()
|
|
|
|
d.addCallback(f)
|
|
|
|
reactor.callLater(0.3, d.callback, None)
|
|
|
|
return d
|
2010-11-24 20:54:56 +08:00
|
|
|
|
|
|
|
def test_three(self):
|
|
|
|
def f():
|
|
|
|
pass # will never get called
|
2010-11-24 21:06:40 +08:00
|
|
|
reactor.callLater(0.3, f)
|
2010-11-24 20:54:56 +08:00
|
|
|
# will crash at teardown
|
|
|
|
|
|
|
|
def test_four(self):
|
|
|
|
def f(_):
|
|
|
|
reactor.callLater(0.3, f)
|
|
|
|
crash
|
|
|
|
|
2010-11-24 21:06:40 +08:00
|
|
|
d = Deferred()
|
|
|
|
d.addCallback(f)
|
|
|
|
reactor.callLater(0.3, d.callback, None)
|
|
|
|
return d
|
2010-11-24 20:54:56 +08:00
|
|
|
# will crash both at test time and at teardown
|
|
|
|
"""
|
|
|
|
)
|
2021-05-12 19:30:46 +08:00
|
|
|
result = pytester.runpytest(
|
|
|
|
"-vv", "-oconsole_output_style=classic", "-W", "ignore::DeprecationWarning"
|
|
|
|
)
|
2010-11-25 22:48:59 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
2020-05-02 04:56:01 +08:00
|
|
|
"test_trial_error.py::TC::test_four FAILED",
|
2019-10-19 15:03:26 +08:00
|
|
|
"test_trial_error.py::TC::test_four ERROR",
|
|
|
|
"test_trial_error.py::TC::test_one FAILED",
|
|
|
|
"test_trial_error.py::TC::test_three FAILED",
|
2020-05-02 04:56:01 +08:00
|
|
|
"test_trial_error.py::TC::test_two FAILED",
|
2010-11-25 22:48:59 +08:00
|
|
|
"*ERRORS*",
|
2019-10-19 15:03:26 +08:00
|
|
|
"*_ ERROR at teardown of TC.test_four _*",
|
2010-11-25 22:48:59 +08:00
|
|
|
"*DelayedCalls*",
|
2019-10-19 15:03:26 +08:00
|
|
|
"*= FAILURES =*",
|
2020-05-02 04:56:01 +08:00
|
|
|
"*_ TC.test_four _*",
|
|
|
|
"*NameError*crash*",
|
2019-10-19 15:03:26 +08:00
|
|
|
"*_ TC.test_one _*",
|
2010-11-25 22:48:59 +08:00
|
|
|
"*NameError*crash*",
|
2019-10-19 15:03:26 +08:00
|
|
|
"*_ TC.test_three _*",
|
2010-11-25 22:48:59 +08:00
|
|
|
"*DelayedCalls*",
|
2020-05-02 04:56:01 +08:00
|
|
|
"*_ TC.test_two _*",
|
|
|
|
"*NameError*crash*",
|
|
|
|
"*= 4 failed, 1 error in *",
|
2010-11-25 22:48:59 +08:00
|
|
|
]
|
|
|
|
)
|
2010-11-24 20:54:56 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_trial_pdb(self, pytester: Pytester) -> None:
|
|
|
|
p = pytester.makepyfile(
|
2010-11-24 18:48:55 +08:00
|
|
|
"""
|
|
|
|
from twisted.trial import unittest
|
|
|
|
import pytest
|
|
|
|
class TC(unittest.TestCase):
|
|
|
|
def test_hello(self):
|
|
|
|
assert 0, "hellopdb"
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
child = pytester.spawn_pytest(str(p))
|
2010-11-24 18:48:55 +08:00
|
|
|
child.expect("hellopdb")
|
|
|
|
child.sendeof()
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_trial_testcase_skip_property(self, pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2017-07-23 02:04:05 +08:00
|
|
|
"""
|
|
|
|
from twisted.trial import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
skip = 'dont run'
|
|
|
|
def test_func(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath, "-s")
|
2017-07-23 02:04:05 +08:00
|
|
|
reprec.assertoutcome(skipped=1)
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_trial_testfunction_skip_property(self, pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2017-07-23 02:04:05 +08:00
|
|
|
"""
|
|
|
|
from twisted.trial import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def test_func(self):
|
|
|
|
pass
|
|
|
|
test_func.skip = 'dont run'
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath, "-s")
|
2017-07-23 02:04:05 +08:00
|
|
|
reprec.assertoutcome(skipped=1)
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_trial_testcase_todo_property(self, pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2017-07-23 02:04:05 +08:00
|
|
|
"""
|
|
|
|
from twisted.trial import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
todo = 'dont run'
|
|
|
|
def test_func(self):
|
|
|
|
assert 0
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath, "-s")
|
2017-07-23 02:04:05 +08:00
|
|
|
reprec.assertoutcome(skipped=1)
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_trial_testfunction_todo_property(self, pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2017-07-23 02:04:05 +08:00
|
|
|
"""
|
|
|
|
from twisted.trial import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def test_func(self):
|
|
|
|
assert 0
|
|
|
|
test_func.todo = 'dont run'
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(
|
2017-07-23 02:04:05 +08:00
|
|
|
testpath, "-s", *self.ignore_unclosed_socket_warning
|
|
|
|
)
|
|
|
|
reprec.assertoutcome(skipped=1)
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_djangolike_testcase(pytester: Pytester) -> None:
|
2010-11-24 18:48:55 +08:00
|
|
|
# contributed from Morten Breekevold
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2010-11-24 18:48:55 +08:00
|
|
|
"""
|
|
|
|
from unittest import TestCase, main
|
|
|
|
|
|
|
|
class DjangoLikeTestCase(TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("setUp()")
|
2010-11-24 18:48:55 +08:00
|
|
|
|
|
|
|
def test_presetup_has_been_run(self):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("test_thing()")
|
2010-11-24 18:48:55 +08:00
|
|
|
self.assertTrue(hasattr(self, 'was_presetup'))
|
|
|
|
|
|
|
|
def tearDown(self):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("tearDown()")
|
2010-11-24 18:48:55 +08:00
|
|
|
|
|
|
|
def __call__(self, result=None):
|
|
|
|
try:
|
|
|
|
self._pre_setup()
|
|
|
|
except (KeyboardInterrupt, SystemExit):
|
|
|
|
raise
|
|
|
|
except Exception:
|
|
|
|
import sys
|
|
|
|
result.addError(self, sys.exc_info())
|
|
|
|
return
|
|
|
|
super(DjangoLikeTestCase, self).__call__(result)
|
|
|
|
try:
|
|
|
|
self._post_teardown()
|
|
|
|
except (KeyboardInterrupt, SystemExit):
|
|
|
|
raise
|
|
|
|
except Exception:
|
|
|
|
import sys
|
|
|
|
result.addError(self, sys.exc_info())
|
|
|
|
return
|
|
|
|
|
|
|
|
def _pre_setup(self):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("_pre_setup()")
|
2010-11-24 18:48:55 +08:00
|
|
|
self.was_presetup = True
|
|
|
|
|
|
|
|
def _post_teardown(self):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("_post_teardown()")
|
2010-11-24 18:48:55 +08:00
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-s")
|
2010-11-24 18:48:55 +08:00
|
|
|
assert result.ret == 0
|
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*_pre_setup()*",
|
|
|
|
"*setUp()*",
|
|
|
|
"*test_thing()*",
|
|
|
|
"*tearDown()*",
|
|
|
|
"*_post_teardown()*",
|
|
|
|
]
|
|
|
|
)
|
2011-03-06 00:49:51 +08:00
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_unittest_not_shown_in_traceback(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2011-03-06 00:49:51 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class t(unittest.TestCase):
|
|
|
|
def test_hello(self):
|
|
|
|
x = 3
|
2017-07-21 10:11:14 +08:00
|
|
|
self.assertEqual(x, 4)
|
2011-03-06 00:49:51 +08:00
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
res = pytester.runpytest()
|
2019-10-06 01:18:51 +08:00
|
|
|
res.stdout.no_fnmatch_line("*failUnlessEqual*")
|
2011-12-14 18:56:51 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_unorderable_types(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2011-12-14 18:56:51 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class TestJoinEmpty(unittest.TestCase):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def make_test():
|
|
|
|
class Test(unittest.TestCase):
|
|
|
|
pass
|
|
|
|
Test.__name__ = "TestFoo"
|
|
|
|
return Test
|
|
|
|
TestFoo = make_test()
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest()
|
2019-10-06 01:18:51 +08:00
|
|
|
result.stdout.no_fnmatch_line("*TypeError*")
|
2019-06-07 18:58:51 +08:00
|
|
|
assert result.ret == ExitCode.NO_TESTS_COLLECTED
|
2011-12-19 07:01:39 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_unittest_typerror_traceback(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2011-12-19 07:01:39 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class TestJoinEmpty(unittest.TestCase):
|
|
|
|
def test_hello(self, arg1):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest()
|
2011-12-19 07:01:39 +08:00
|
|
|
assert "TypeError" in result.stdout.str()
|
|
|
|
assert result.ret == 1
|
2012-03-20 13:53:52 +08:00
|
|
|
|
2016-08-18 07:29:26 +08:00
|
|
|
|
|
|
|
@pytest.mark.parametrize("runner", ["pytest", "unittest"])
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_unittest_expected_failure_for_failing_test_is_xfail(
|
|
|
|
pytester: Pytester, runner
|
|
|
|
) -> None:
|
|
|
|
script = pytester.makepyfile(
|
2016-08-18 07:29:26 +08:00
|
|
|
"""
|
2012-03-20 13:53:52 +08:00
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
@unittest.expectedFailure
|
2016-08-13 06:18:02 +08:00
|
|
|
def test_failing_test_is_xfail(self):
|
|
|
|
assert False
|
2016-08-18 07:29:26 +08:00
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|
2016-08-13 06:18:02 +08:00
|
|
|
"""
|
|
|
|
)
|
2016-08-18 07:29:26 +08:00
|
|
|
if runner == "pytest":
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-rxX")
|
2016-08-18 07:29:26 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
["*XFAIL*MyTestCase*test_failing_test_is_xfail*", "*1 xfailed*"]
|
|
|
|
)
|
|
|
|
else:
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpython(script)
|
2016-08-18 07:29:26 +08:00
|
|
|
result.stderr.fnmatch_lines(["*1 test in*", "*OK*(expected failures=1)*"])
|
2016-08-13 06:18:02 +08:00
|
|
|
assert result.ret == 0
|
|
|
|
|
2016-08-18 07:29:26 +08:00
|
|
|
|
|
|
|
@pytest.mark.parametrize("runner", ["pytest", "unittest"])
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_unittest_expected_failure_for_passing_test_is_fail(
|
2021-01-14 09:02:26 +08:00
|
|
|
pytester: Pytester,
|
|
|
|
runner: str,
|
2020-12-16 12:16:05 +08:00
|
|
|
) -> None:
|
|
|
|
script = pytester.makepyfile(
|
2016-08-18 07:29:26 +08:00
|
|
|
"""
|
2016-08-13 06:18:02 +08:00
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
2012-03-20 13:53:52 +08:00
|
|
|
@unittest.expectedFailure
|
2016-08-13 06:18:02 +08:00
|
|
|
def test_passing_test_is_fail(self):
|
|
|
|
assert True
|
2016-08-18 07:29:26 +08:00
|
|
|
if __name__ == '__main__':
|
|
|
|
unittest.main()
|
2012-03-20 13:53:52 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-05-23 22:48:46 +08:00
|
|
|
|
2016-08-18 07:29:26 +08:00
|
|
|
if runner == "pytest":
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-rxX")
|
2016-08-18 07:29:26 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
2021-01-14 09:02:26 +08:00
|
|
|
[
|
|
|
|
"*MyTestCase*test_passing_test_is_fail*",
|
|
|
|
"Unexpected success",
|
|
|
|
"*1 failed*",
|
|
|
|
]
|
2016-08-18 07:29:26 +08:00
|
|
|
)
|
|
|
|
else:
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpython(script)
|
2016-08-18 07:29:26 +08:00
|
|
|
result.stderr.fnmatch_lines(["*1 test in*", "*(unexpected successes=1)*"])
|
|
|
|
|
2019-05-28 07:31:52 +08:00
|
|
|
assert result.ret == 1
|
2012-03-20 13:53:52 +08:00
|
|
|
|
|
|
|
|
2020-11-01 04:44:10 +08:00
|
|
|
@pytest.mark.parametrize("stmt", ["return", "yield"])
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_unittest_setup_interaction(pytester: Pytester, stmt: str) -> None:
|
|
|
|
pytester.makepyfile(
|
2012-09-18 16:54:12 +08:00
|
|
|
f"""
|
|
|
|
import unittest
|
|
|
|
import pytest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
2020-11-01 04:44:10 +08:00
|
|
|
@pytest.fixture(scope="class", autouse=True)
|
2012-09-18 16:54:12 +08:00
|
|
|
def perclass(self, request):
|
|
|
|
request.cls.hello = "world"
|
2015-07-21 06:25:01 +08:00
|
|
|
{stmt}
|
2020-11-01 04:44:10 +08:00
|
|
|
@pytest.fixture(scope="function", autouse=True)
|
2012-09-18 16:54:12 +08:00
|
|
|
def perfunction(self, request):
|
|
|
|
request.instance.funcname = request.function.__name__
|
2015-07-21 06:25:01 +08:00
|
|
|
{stmt}
|
2012-09-18 16:54:12 +08:00
|
|
|
|
|
|
|
def test_method1(self):
|
|
|
|
assert self.funcname == "test_method1"
|
|
|
|
assert self.hello == "world"
|
|
|
|
|
|
|
|
def test_method2(self):
|
|
|
|
assert self.funcname == "test_method2"
|
|
|
|
|
|
|
|
def test_classattr(self):
|
|
|
|
assert self.__class__.hello == "world"
|
2015-07-21 06:25:01 +08:00
|
|
|
"""
|
2018-05-23 22:48:46 +08:00
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest()
|
2019-03-23 18:36:18 +08:00
|
|
|
result.stdout.fnmatch_lines(["*3 passed*"])
|
2013-08-02 05:48:40 +08:00
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_non_unittest_no_setupclass_support(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2013-08-02 05:48:40 +08:00
|
|
|
"""
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestFoo(object):
|
2013-08-02 05:48:40 +08:00
|
|
|
x = 0
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
cls.x = 1
|
|
|
|
|
|
|
|
def test_method1(self):
|
|
|
|
assert self.x == 0
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
cls.x = 1
|
|
|
|
|
|
|
|
def test_not_teareddown():
|
|
|
|
assert TestFoo.x == 0
|
|
|
|
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2013-08-02 05:48:40 +08:00
|
|
|
reprec.assertoutcome(passed=2)
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_no_teardown_if_setupclass_failed(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2013-08-02 05:48:40 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
x = 0
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
cls.x = 1
|
|
|
|
assert False
|
|
|
|
|
|
|
|
def test_func1(self):
|
|
|
|
cls.x = 10
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
cls.x = 100
|
|
|
|
|
|
|
|
def test_notTornDown():
|
|
|
|
assert MyTestCase.x == 1
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2013-08-02 05:48:40 +08:00
|
|
|
reprec.assertoutcome(passed=1, failed=1)
|
2013-08-02 15:52:40 +08:00
|
|
|
|
2013-09-09 15:56:53 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_cleanup_functions(pytester: Pytester) -> None:
|
2020-05-02 04:21:15 +08:00
|
|
|
"""Ensure functions added with addCleanup are always called after each test ends (#6947)"""
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2020-05-02 04:21:15 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
cleanups = []
|
|
|
|
|
|
|
|
class Test(unittest.TestCase):
|
|
|
|
|
|
|
|
def test_func_1(self):
|
|
|
|
self.addCleanup(cleanups.append, "test_func_1")
|
|
|
|
|
|
|
|
def test_func_2(self):
|
|
|
|
self.addCleanup(cleanups.append, "test_func_2")
|
|
|
|
assert 0
|
|
|
|
|
|
|
|
def test_func_3_check_cleanups(self):
|
|
|
|
assert cleanups == ["test_func_1", "test_func_2"]
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-v")
|
2020-05-02 04:21:15 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*::test_func_1 PASSED *",
|
|
|
|
"*::test_func_2 FAILED *",
|
|
|
|
"*::test_func_3_check_cleanups PASSED *",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_issue333_result_clearing(pytester: Pytester) -> None:
|
|
|
|
pytester.makeconftest(
|
2013-09-09 15:56:53 +08:00
|
|
|
"""
|
2017-08-31 07:23:55 +08:00
|
|
|
import pytest
|
2023-06-13 03:30:06 +08:00
|
|
|
@pytest.hookimpl(wrapper=True)
|
2017-08-31 07:23:55 +08:00
|
|
|
def pytest_runtest_call(item):
|
|
|
|
yield
|
2013-09-09 15:56:53 +08:00
|
|
|
assert 0
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2013-09-09 15:56:53 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class TestIt(unittest.TestCase):
|
|
|
|
def test_func(self):
|
|
|
|
0/0
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run()
|
2013-09-09 15:56:53 +08:00
|
|
|
reprec.assertoutcome(failed=1)
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_unittest_raise_skip_issue748(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2015-06-19 09:26:35 +08:00
|
|
|
test_foo="""
|
|
|
|
import unittest
|
2013-09-09 15:56:53 +08:00
|
|
|
|
2015-06-19 09:26:35 +08:00
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def test_one(self):
|
|
|
|
raise unittest.SkipTest('skipping due to reasons')
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-v", "-rs")
|
2015-06-19 09:26:35 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
"""
|
|
|
|
*SKIP*[1]*test_foo.py*skipping due to reasons*
|
|
|
|
*1 skipped*
|
|
|
|
"""
|
|
|
|
)
|
2015-11-05 03:24:22 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_unittest_skip_issue1169(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2015-11-05 05:54:18 +08:00
|
|
|
test_foo="""
|
2015-11-05 03:24:22 +08:00
|
|
|
import unittest
|
2017-07-17 07:25:06 +08:00
|
|
|
|
2015-11-05 03:24:22 +08:00
|
|
|
class MyTestCase(unittest.TestCase):
|
2015-11-05 05:54:18 +08:00
|
|
|
@unittest.skip("skipping due to reasons")
|
2015-11-05 03:24:22 +08:00
|
|
|
def test_skip(self):
|
|
|
|
self.fail()
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-v", "-rs")
|
2015-11-05 05:54:18 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
"""
|
|
|
|
*SKIP*[1]*skipping due to reasons*
|
|
|
|
*1 skipped*
|
|
|
|
"""
|
|
|
|
)
|
2016-05-19 01:09:58 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_class_method_containing_test_issue1558(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2016-05-19 01:09:58 +08:00
|
|
|
test_foo="""
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def test_should_run(self):
|
|
|
|
pass
|
|
|
|
def test_should_not_run(self):
|
|
|
|
pass
|
|
|
|
test_should_not_run.__test__ = False
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run()
|
2016-05-19 01:09:58 +08:00
|
|
|
reprec.assertoutcome(passed=1)
|
2018-05-24 13:30:38 +08:00
|
|
|
|
|
|
|
|
2019-07-10 06:49:35 +08:00
|
|
|
@pytest.mark.parametrize("base", ["builtins.object", "unittest.TestCase"])
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_usefixtures_marker_on_unittest(base, pytester: Pytester) -> None:
|
2019-04-27 22:25:37 +08:00
|
|
|
"""#3498"""
|
2018-05-26 05:21:48 +08:00
|
|
|
module = base.rsplit(".", 1)[0]
|
2018-05-24 13:30:38 +08:00
|
|
|
pytest.importorskip(module)
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2018-05-26 05:21:48 +08:00
|
|
|
conftest="""
|
2018-05-24 13:30:38 +08:00
|
|
|
import pytest
|
|
|
|
|
|
|
|
@pytest.fixture(scope='function')
|
|
|
|
def fixture1(request, monkeypatch):
|
|
|
|
monkeypatch.setattr(request.instance, 'fixture1', True )
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='function')
|
|
|
|
def fixture2(request, monkeypatch):
|
|
|
|
monkeypatch.setattr(request.instance, 'fixture2', True )
|
2018-05-24 15:58:36 +08:00
|
|
|
|
|
|
|
def node_and_marks(item):
|
|
|
|
print(item.nodeid)
|
|
|
|
for mark in item.iter_markers():
|
|
|
|
print(" ", mark)
|
|
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
|
|
|
def my_marks(request):
|
|
|
|
node_and_marks(request.node)
|
|
|
|
|
|
|
|
def pytest_collection_modifyitems(items):
|
|
|
|
for item in items:
|
|
|
|
node_and_marks(item)
|
|
|
|
|
2018-05-26 05:21:48 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-05-24 13:30:38 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2018-05-26 05:21:48 +08:00
|
|
|
f"""
|
2018-05-24 13:30:38 +08:00
|
|
|
import pytest
|
|
|
|
import {module}
|
|
|
|
|
|
|
|
class Tests({base}):
|
|
|
|
fixture1 = False
|
|
|
|
fixture2 = False
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("fixture1")
|
|
|
|
def test_one(self):
|
|
|
|
assert self.fixture1
|
|
|
|
assert not self.fixture2
|
|
|
|
|
|
|
|
@pytest.mark.usefixtures("fixture1", "fixture2")
|
|
|
|
def test_two(self):
|
|
|
|
assert self.fixture1
|
|
|
|
assert self.fixture2
|
|
|
|
|
|
|
|
|
2018-05-26 05:21:48 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-05-24 13:30:38 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("-s")
|
2018-05-24 13:30:38 +08:00
|
|
|
result.assert_outcomes(passed=2)
|
2018-08-09 07:13:21 +08:00
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_testcase_handles_init_exceptions(pytester: Pytester) -> None:
|
2018-08-09 07:13:21 +08:00
|
|
|
"""
|
|
|
|
Regression test to make sure exceptions in the __init__ method are bubbled up correctly.
|
|
|
|
See https://github.com/pytest-dev/pytest/issues/3788
|
|
|
|
"""
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2018-08-09 07:13:21 +08:00
|
|
|
"""
|
|
|
|
from unittest import TestCase
|
|
|
|
import pytest
|
|
|
|
class MyTestCase(TestCase):
|
|
|
|
def __init__(self, *args, **kwargs):
|
2018-08-09 07:18:18 +08:00
|
|
|
raise Exception("should raise this exception")
|
2018-08-09 07:13:21 +08:00
|
|
|
def test_hello(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest()
|
2018-08-09 07:13:21 +08:00
|
|
|
assert "should raise this exception" in result.stdout.str()
|
2019-10-06 01:18:51 +08:00
|
|
|
result.stdout.no_fnmatch_line("*ERROR at teardown of MyTestCase.test_hello*")
|
2018-10-04 06:42:41 +08:00
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_error_message_with_parametrized_fixtures(pytester: Pytester) -> None:
|
|
|
|
pytester.copy_example("unittest/test_parametrized_fixture_error_message.py")
|
|
|
|
result = pytester.runpytest()
|
2018-10-04 06:42:41 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*test_two does not support fixtures*",
|
|
|
|
"*TestSomethingElse::test_two",
|
|
|
|
"*Function type: TestCaseFunction",
|
|
|
|
]
|
|
|
|
)
|
2019-02-01 06:24:11 +08:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"test_name, expected_outcome",
|
|
|
|
[
|
|
|
|
("test_setup_skip.py", "1 skipped"),
|
|
|
|
("test_setup_skip_class.py", "1 skipped"),
|
|
|
|
("test_setup_skip_module.py", "1 error"),
|
|
|
|
],
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_setup_inheritance_skipping(
|
|
|
|
pytester: Pytester, test_name, expected_outcome
|
|
|
|
) -> None:
|
2019-02-01 06:24:11 +08:00
|
|
|
"""Issue #4700"""
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.copy_example(f"unittest/{test_name}")
|
|
|
|
result = pytester.runpytest()
|
2020-10-03 04:16:22 +08:00
|
|
|
result.stdout.fnmatch_lines([f"* {expected_outcome} in *"])
|
2019-07-20 13:04:38 +08:00
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_BdbQuit(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2019-07-20 13:04:38 +08:00
|
|
|
test_foo="""
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def test_bdbquit(self):
|
|
|
|
import bdb
|
|
|
|
raise bdb.BdbQuit()
|
|
|
|
|
|
|
|
def test_should_not_run(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run()
|
2019-07-20 13:04:38 +08:00
|
|
|
reprec.assertoutcome(failed=1, passed=1)
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_exit_outcome(pytester: Pytester) -> None:
|
|
|
|
pytester.makepyfile(
|
2019-07-20 13:04:38 +08:00
|
|
|
test_foo="""
|
|
|
|
import pytest
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def test_exit_outcome(self):
|
2019-07-23 19:52:52 +08:00
|
|
|
pytest.exit("pytest_exit called")
|
2019-07-20 13:04:38 +08:00
|
|
|
|
|
|
|
def test_should_not_run(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest()
|
2019-07-24 01:35:51 +08:00
|
|
|
result.stdout.fnmatch_lines(["*Exit: pytest_exit called*", "*= no tests ran in *"])
|
2019-10-19 00:26:03 +08:00
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_trace(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
|
2019-10-19 00:26:03 +08:00
|
|
|
calls = []
|
|
|
|
|
|
|
|
def check_call(*args, **kwargs):
|
|
|
|
calls.append((args, kwargs))
|
|
|
|
assert args == ("runcall",)
|
|
|
|
|
|
|
|
class _pdb:
|
|
|
|
def runcall(*args, **kwargs):
|
|
|
|
calls.append((args, kwargs))
|
|
|
|
|
|
|
|
return _pdb
|
|
|
|
|
|
|
|
monkeypatch.setattr("_pytest.debugging.pytestPDB._init_pdb", check_call)
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
p1 = pytester.makepyfile(
|
2019-10-19 00:26:03 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def test(self):
|
|
|
|
self.assertEqual('foo', 'foo')
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest("--trace", str(p1))
|
2019-10-19 00:26:03 +08:00
|
|
|
assert len(calls) == 2
|
|
|
|
assert result.ret == 0
|
2020-03-15 01:50:43 +08:00
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_pdb_teardown_called(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
|
2020-05-02 04:21:15 +08:00
|
|
|
"""Ensure tearDown() is always called when --pdb is given in the command-line.
|
|
|
|
|
|
|
|
We delay the normal tearDown() calls when --pdb is given, so this ensures we are calling
|
|
|
|
tearDown() eventually to avoid memory leaks when using --pdb.
|
|
|
|
"""
|
2020-10-06 09:13:05 +08:00
|
|
|
teardowns: List[str] = []
|
2020-05-02 04:21:15 +08:00
|
|
|
monkeypatch.setattr(
|
|
|
|
pytest, "test_pdb_teardown_called_teardowns", teardowns, raising=False
|
|
|
|
)
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2020-05-02 04:21:15 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
import pytest
|
|
|
|
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
pytest.test_pdb_teardown_called_teardowns.append(self.id())
|
|
|
|
|
|
|
|
def test_1(self):
|
|
|
|
pass
|
|
|
|
def test_2(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest_inprocess("--pdb")
|
2020-05-02 04:21:15 +08:00
|
|
|
result.stdout.fnmatch_lines("* 2 passed in *")
|
|
|
|
assert teardowns == [
|
|
|
|
"test_pdb_teardown_called.MyTestCase.test_1",
|
|
|
|
"test_pdb_teardown_called.MyTestCase.test_2",
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2020-05-31 01:33:22 +08:00
|
|
|
@pytest.mark.parametrize("mark", ["@unittest.skip", "@pytest.mark.skip"])
|
2022-06-26 21:52:01 +08:00
|
|
|
def test_pdb_teardown_skipped_for_functions(
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester: Pytester, monkeypatch: MonkeyPatch, mark: str
|
|
|
|
) -> None:
|
2022-06-26 21:52:01 +08:00
|
|
|
"""
|
|
|
|
With --pdb, setUp and tearDown should not be called for tests skipped
|
|
|
|
via a decorator (#7215).
|
|
|
|
"""
|
2020-10-06 09:13:05 +08:00
|
|
|
tracked: List[str] = []
|
2022-06-26 21:52:01 +08:00
|
|
|
monkeypatch.setattr(pytest, "track_pdb_teardown_skipped", tracked, raising=False)
|
2020-05-31 01:33:22 +08:00
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.makepyfile(
|
2020-05-31 01:33:22 +08:00
|
|
|
f"""
|
|
|
|
import unittest
|
|
|
|
import pytest
|
|
|
|
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
2022-06-26 21:52:01 +08:00
|
|
|
pytest.track_pdb_teardown_skipped.append("setUp:" + self.id())
|
2020-05-31 01:33:22 +08:00
|
|
|
|
|
|
|
def tearDown(self):
|
2022-06-26 21:52:01 +08:00
|
|
|
pytest.track_pdb_teardown_skipped.append("tearDown:" + self.id())
|
2020-05-31 01:33:22 +08:00
|
|
|
|
|
|
|
{mark}("skipped for reasons")
|
|
|
|
def test_1(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
result = pytester.runpytest_inprocess("--pdb")
|
2020-05-31 01:33:22 +08:00
|
|
|
result.stdout.fnmatch_lines("* 1 skipped in *")
|
|
|
|
assert tracked == []
|
|
|
|
|
|
|
|
|
2022-06-26 21:52:01 +08:00
|
|
|
@pytest.mark.parametrize("mark", ["@unittest.skip", "@pytest.mark.skip"])
|
|
|
|
def test_pdb_teardown_skipped_for_classes(
|
|
|
|
pytester: Pytester, monkeypatch: MonkeyPatch, mark: str
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
With --pdb, setUp and tearDown should not be called for tests skipped
|
|
|
|
via a decorator on the class (#10060).
|
|
|
|
"""
|
|
|
|
tracked: List[str] = []
|
|
|
|
monkeypatch.setattr(pytest, "track_pdb_teardown_skipped", tracked, raising=False)
|
|
|
|
|
|
|
|
pytester.makepyfile(
|
|
|
|
f"""
|
|
|
|
import unittest
|
|
|
|
import pytest
|
|
|
|
|
|
|
|
{mark}("skipped for reasons")
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
pytest.track_pdb_teardown_skipped.append("setUp:" + self.id())
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
pytest.track_pdb_teardown_skipped.append("tearDown:" + self.id())
|
|
|
|
|
|
|
|
def test_1(self):
|
|
|
|
pass
|
|
|
|
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = pytester.runpytest_inprocess("--pdb")
|
|
|
|
result.stdout.fnmatch_lines("* 1 skipped in *")
|
|
|
|
assert tracked == []
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_async_support(pytester: Pytester) -> None:
|
2020-03-15 01:50:43 +08:00
|
|
|
pytest.importorskip("unittest.async_case")
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.copy_example("unittest/test_unittest_asyncio.py")
|
|
|
|
reprec = pytester.inline_run()
|
2020-05-01 23:56:06 +08:00
|
|
|
reprec.assertoutcome(failed=1, passed=2)
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_asynctest_support(pytester: Pytester) -> None:
|
2020-05-01 23:56:06 +08:00
|
|
|
"""Check asynctest support (#7110)"""
|
|
|
|
pytest.importorskip("asynctest")
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.copy_example("unittest/test_unittest_asynctest.py")
|
|
|
|
reprec = pytester.inline_run()
|
2020-05-01 23:56:06 +08:00
|
|
|
reprec.assertoutcome(failed=1, passed=2)
|
2020-08-05 06:37:41 +08:00
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_plain_unittest_does_not_support_async(pytester: Pytester) -> None:
|
2020-08-05 06:37:41 +08:00
|
|
|
"""Async functions in plain unittest.TestCase subclasses are not supported without plugins.
|
|
|
|
|
|
|
|
This test exists here to avoid introducing this support by accident, leading users
|
|
|
|
to expect that it works, rather than doing so intentionally as a feature.
|
|
|
|
|
|
|
|
See https://github.com/pytest-dev/pytest-asyncio/issues/180 for more context.
|
|
|
|
"""
|
2020-12-16 12:16:05 +08:00
|
|
|
pytester.copy_example("unittest/test_unittest_plain_async.py")
|
|
|
|
result = pytester.runpytest_subprocess()
|
2020-08-06 02:24:08 +08:00
|
|
|
if hasattr(sys, "pypy_version_info"):
|
|
|
|
# in PyPy we can't reliable get the warning about the coroutine not being awaited,
|
|
|
|
# because it depends on the coroutine being garbage collected; given that
|
|
|
|
# we are running in a subprocess, that's difficult to enforce
|
|
|
|
expected_lines = ["*1 passed*"]
|
|
|
|
else:
|
|
|
|
expected_lines = [
|
|
|
|
"*RuntimeWarning: coroutine * was never awaited",
|
|
|
|
"*1 passed*",
|
|
|
|
]
|
|
|
|
result.stdout.fnmatch_lines(expected_lines)
|
2020-11-19 18:07:15 +08:00
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_do_class_cleanups_on_success(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2020-11-19 18:07:15 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
values = []
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
def cleanup():
|
|
|
|
cls.values.append(1)
|
|
|
|
cls.addClassCleanup(cleanup)
|
|
|
|
def test_one(self):
|
|
|
|
pass
|
|
|
|
def test_two(self):
|
|
|
|
pass
|
|
|
|
def test_cleanup_called_exactly_once():
|
|
|
|
assert MyTestCase.values == [1]
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2020-11-19 18:07:15 +08:00
|
|
|
passed, skipped, failed = reprec.countoutcomes()
|
|
|
|
assert failed == 0
|
|
|
|
assert passed == 3
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_do_class_cleanups_on_setupclass_failure(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2020-11-19 18:07:15 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
values = []
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
def cleanup():
|
|
|
|
cls.values.append(1)
|
|
|
|
cls.addClassCleanup(cleanup)
|
|
|
|
assert False
|
|
|
|
def test_one(self):
|
|
|
|
pass
|
|
|
|
def test_cleanup_called_exactly_once():
|
|
|
|
assert MyTestCase.values == [1]
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2020-11-19 18:07:15 +08:00
|
|
|
passed, skipped, failed = reprec.countoutcomes()
|
|
|
|
assert failed == 1
|
|
|
|
assert passed == 1
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_do_class_cleanups_on_teardownclass_failure(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2020-11-19 18:07:15 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
values = []
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
def cleanup():
|
|
|
|
cls.values.append(1)
|
|
|
|
cls.addClassCleanup(cleanup)
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
assert False
|
|
|
|
def test_one(self):
|
|
|
|
pass
|
|
|
|
def test_two(self):
|
|
|
|
pass
|
|
|
|
def test_cleanup_called_exactly_once():
|
|
|
|
assert MyTestCase.values == [1]
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2020-11-19 18:07:15 +08:00
|
|
|
passed, skipped, failed = reprec.countoutcomes()
|
|
|
|
assert passed == 3
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_do_cleanups_on_success(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2020-11-19 18:07:15 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
values = []
|
|
|
|
def setUp(self):
|
|
|
|
def cleanup():
|
|
|
|
self.values.append(1)
|
|
|
|
self.addCleanup(cleanup)
|
|
|
|
def test_one(self):
|
|
|
|
pass
|
|
|
|
def test_two(self):
|
|
|
|
pass
|
|
|
|
def test_cleanup_called_the_right_number_of_times():
|
|
|
|
assert MyTestCase.values == [1, 1]
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2020-11-19 18:07:15 +08:00
|
|
|
passed, skipped, failed = reprec.countoutcomes()
|
|
|
|
assert failed == 0
|
|
|
|
assert passed == 3
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_do_cleanups_on_setup_failure(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2020-11-19 18:07:15 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
values = []
|
|
|
|
def setUp(self):
|
|
|
|
def cleanup():
|
|
|
|
self.values.append(1)
|
|
|
|
self.addCleanup(cleanup)
|
|
|
|
assert False
|
|
|
|
def test_one(self):
|
|
|
|
pass
|
|
|
|
def test_two(self):
|
|
|
|
pass
|
|
|
|
def test_cleanup_called_the_right_number_of_times():
|
|
|
|
assert MyTestCase.values == [1, 1]
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2020-11-19 18:07:15 +08:00
|
|
|
passed, skipped, failed = reprec.countoutcomes()
|
|
|
|
assert failed == 2
|
|
|
|
assert passed == 1
|
|
|
|
|
|
|
|
|
2020-12-16 12:16:05 +08:00
|
|
|
def test_do_cleanups_on_teardown_failure(pytester: Pytester) -> None:
|
|
|
|
testpath = pytester.makepyfile(
|
2020-11-19 18:07:15 +08:00
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
values = []
|
|
|
|
def setUp(self):
|
|
|
|
def cleanup():
|
|
|
|
self.values.append(1)
|
|
|
|
self.addCleanup(cleanup)
|
|
|
|
def tearDown(self):
|
|
|
|
assert False
|
|
|
|
def test_one(self):
|
|
|
|
pass
|
|
|
|
def test_two(self):
|
|
|
|
pass
|
|
|
|
def test_cleanup_called_the_right_number_of_times():
|
|
|
|
assert MyTestCase.values == [1, 1]
|
|
|
|
"""
|
|
|
|
)
|
2020-12-16 12:16:05 +08:00
|
|
|
reprec = pytester.inline_run(testpath)
|
2020-11-19 18:07:15 +08:00
|
|
|
passed, skipped, failed = reprec.countoutcomes()
|
|
|
|
assert failed == 2
|
|
|
|
assert passed == 1
|
2022-02-07 06:25:32 +08:00
|
|
|
|
|
|
|
|
|
|
|
def test_traceback_pruning(pytester: Pytester) -> None:
|
|
|
|
"""Regression test for #9610 - doesn't crash during traceback pruning."""
|
|
|
|
pytester.makepyfile(
|
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
class MyTestCase(unittest.TestCase):
|
|
|
|
def __init__(self, test_method):
|
|
|
|
unittest.TestCase.__init__(self, test_method)
|
|
|
|
|
|
|
|
class TestIt(MyTestCase):
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls) -> None:
|
|
|
|
assert False
|
|
|
|
|
|
|
|
def test_it(self):
|
|
|
|
pass
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
reprec = pytester.inline_run()
|
|
|
|
passed, skipped, failed = reprec.countoutcomes()
|
|
|
|
assert passed == 1
|
|
|
|
assert failed == 1
|
|
|
|
assert reprec.ret == 1
|
2022-02-23 21:59:54 +08:00
|
|
|
|
|
|
|
|
|
|
|
def test_raising_unittest_skiptest_during_collection(
|
|
|
|
pytester: Pytester,
|
|
|
|
) -> None:
|
|
|
|
pytester.makepyfile(
|
|
|
|
"""
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
class TestIt(unittest.TestCase):
|
|
|
|
def test_it(self): pass
|
|
|
|
def test_it2(self): pass
|
|
|
|
|
|
|
|
raise unittest.SkipTest()
|
|
|
|
|
|
|
|
class TestIt2(unittest.TestCase):
|
|
|
|
def test_it(self): pass
|
|
|
|
def test_it2(self): pass
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
reprec = pytester.inline_run()
|
|
|
|
passed, skipped, failed = reprec.countoutcomes()
|
|
|
|
assert passed == 0
|
|
|
|
# Unittest reports one fake test for a skipped module.
|
|
|
|
assert skipped == 1
|
|
|
|
assert failed == 0
|
|
|
|
assert reprec.ret == ExitCode.NO_TESTS_COLLECTED
|