Merge upstream/master into features

This commit is contained in:
Bruno Oliveira 2017-11-03 16:37:18 -02:00
parent cb30848e5a
commit f3a119c06a
19 changed files with 215 additions and 59 deletions

View File

@ -47,6 +47,7 @@ Dave Hunt
David Díaz-Barquero David Díaz-Barquero
David Mohr David Mohr
David Vierra David Vierra
Daw-Ran Liou
Denis Kirisov Denis Kirisov
Diego Russo Diego Russo
Dmitry Dygalo Dmitry Dygalo

View File

@ -591,23 +591,26 @@ class AssertionRewriter(ast.NodeVisitor):
# docstrings and __future__ imports. # docstrings and __future__ imports.
aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"), aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
ast.alias("_pytest.assertion.rewrite", "@pytest_ar")] ast.alias("_pytest.assertion.rewrite", "@pytest_ar")]
expect_docstring = True doc = getattr(mod, "docstring", None)
expect_docstring = doc is None
if doc is not None and self.is_rewrite_disabled(doc):
return
pos = 0 pos = 0
lineno = 0 lineno = 1
for item in mod.body: for item in mod.body:
if (expect_docstring and isinstance(item, ast.Expr) and if (expect_docstring and isinstance(item, ast.Expr) and
isinstance(item.value, ast.Str)): isinstance(item.value, ast.Str)):
doc = item.value.s doc = item.value.s
if "PYTEST_DONT_REWRITE" in doc: if self.is_rewrite_disabled(doc):
# The module has disabled assertion rewriting.
return return
lineno += len(doc) - 1
expect_docstring = False expect_docstring = False
elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
item.module != "__future__"): item.module != "__future__"):
lineno = item.lineno lineno = item.lineno
break break
pos += 1 pos += 1
else:
lineno = item.lineno
imports = [ast.Import([alias], lineno=lineno, col_offset=0) imports = [ast.Import([alias], lineno=lineno, col_offset=0)
for alias in aliases] for alias in aliases]
mod.body[pos:pos] = imports mod.body[pos:pos] = imports
@ -633,6 +636,9 @@ class AssertionRewriter(ast.NodeVisitor):
not isinstance(field, ast.expr)): not isinstance(field, ast.expr)):
nodes.append(field) nodes.append(field)
def is_rewrite_disabled(self, docstring):
return "PYTEST_DONT_REWRITE" in docstring
def variable(self): def variable(self):
"""Get a new variable.""" """Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing. # Use a character invalid in python identifiers to avoid clashing.

View File

@ -127,7 +127,7 @@ class DoctestItem(pytest.Item):
lines = ["%03d %s" % (i + test.lineno + 1, x) lines = ["%03d %s" % (i + test.lineno + 1, x)
for (i, x) in enumerate(lines)] for (i, x) in enumerate(lines)]
# trim docstring error lines to 10 # trim docstring error lines to 10
lines = lines[example.lineno - 9:example.lineno + 1] lines = lines[max(example.lineno - 9, 0):example.lineno + 1]
else: else:
lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example'] lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example']
indent = '>>>' indent = '>>>'

View File

@ -268,8 +268,9 @@ class MarkGenerator:
pass pass
self._markers = l = set() self._markers = l = set()
for line in self._config.getini("markers"): for line in self._config.getini("markers"):
beginning = line.split(":", 1) marker, _ = line.split(":", 1)
x = beginning[0].split("(", 1)[0] marker = marker.rstrip()
x = marker.split("(", 1)[0]
l.add(x) l.add(x)
if name not in self._markers: if name not in self._markers:
raise AttributeError("%r not a registered marker" % (name,)) raise AttributeError("%r not a registered marker" % (name,))

1
changelog/1505.doc Normal file
View File

@ -0,0 +1 @@
Introduce a dedicated section about conftest.py.

1
changelog/2658.doc Normal file
View File

@ -0,0 +1 @@
Append example for pytest.param in the example/parametrize document.

1
changelog/2856.bugfix Normal file
View File

@ -0,0 +1 @@
Strip whitespace from marker names when reading them from INI config.

1
changelog/2882.bugfix Normal file
View File

@ -0,0 +1 @@
Show full context of doctest source in the pytest output, if the lineno of failed example in the docstring is < 9.

View File

@ -209,8 +209,8 @@ the ``pytest_assertrepr_compare`` hook.
.. autofunction:: _pytest.hookspec.pytest_assertrepr_compare .. autofunction:: _pytest.hookspec.pytest_assertrepr_compare
:noindex: :noindex:
As an example consider adding the following hook in a conftest.py which As an example consider adding the following hook in a :ref:`conftest.py <conftest.py>`
provides an alternative explanation for ``Foo`` objects:: file which provides an alternative explanation for ``Foo`` objects::
# content of conftest.py # content of conftest.py
from test_foocompare import Foo from test_foocompare import Foo

View File

@ -485,4 +485,54 @@ of our ``test_func1`` was skipped. A few notes:
values as well. values as well.
Set marks or test ID for individual parametrized test
--------------------------------------------------------------------
Use ``pytest.param`` to apply marks or set test ID to individual parametrized test.
For example::
# content of test_pytest_param_example.py
import pytest
@pytest.mark.parametrize('test_input,expected', [
('3+5', 8),
pytest.param('1+7', 8,
marks=pytest.mark.basic),
pytest.param('2+4', 6,
marks=pytest.mark.basic,
id='basic_2+4'),
pytest.param('6*9', 42,
marks=[pytest.mark.basic, pytest.mark.xfail],
id='basic_6*9'),
])
def test_eval(test_input, expected):
assert eval(test_input) == expected
In this example, we have 4 parametrized tests. Except for the first test,
we mark the rest three parametrized tests with the custom marker ``basic``,
and for the fourth test we also use the built-in mark ``xfail`` to indicate this
test is expected to fail. For explicitness, we set test ids for some tests.
Then run ``pytest`` with verbose mode and with only the ``basic`` marker::
pytest -v -m basic
============================================ test session starts =============================================
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y
rootdir: $REGENDOC_TMPDIR, inifile:
collected 4 items
test_pytest_param_example.py::test_eval[1+7-8] PASSED
test_pytest_param_example.py::test_eval[basic_2+4] PASSED
test_pytest_param_example.py::test_eval[basic_6*9] xfail
========================================== short test summary info ===========================================
XFAIL test_pytest_param_example.py::test_eval[basic_6*9]
============================================= 1 tests deselected =============================================
As the result:
- Four tests were collected
- One test was deselected because it doesn't have the ``basic`` mark.
- Three tests with the ``basic`` mark was selected.
- The test ``test_eval[1+7-8]`` passed, but the name is autogenerated and confusing.
- The test ``test_eval[basic_2+4]`` passed.
- The test ``test_eval[basic_6*9]`` was expected to fail and did fail.

View File

@ -175,21 +175,23 @@ You can always peek at the collection tree without running tests like this::
======= no tests ran in 0.12 seconds ======== ======= no tests ran in 0.12 seconds ========
customizing test collection to find all .py files .. _customizing-test-collection:
---------------------------------------------------------
Customizing test collection
---------------------------
.. regendoc:wipe .. regendoc:wipe
You can easily instruct ``pytest`` to discover tests from every python file:: You can easily instruct ``pytest`` to discover tests from every Python file::
# content of pytest.ini # content of pytest.ini
[pytest] [pytest]
python_files = *.py python_files = *.py
However, many projects will have a ``setup.py`` which they don't want to be imported. Moreover, there may files only importable by a specific python version. However, many projects will have a ``setup.py`` which they don't want to be
For such cases you can dynamically define files to be ignored by listing imported. Moreover, there may files only importable by a specific python
them in a ``conftest.py`` file:: version. For such cases you can dynamically define files to be ignored by
listing them in a ``conftest.py`` file::
# content of conftest.py # content of conftest.py
import sys import sys
@ -198,7 +200,7 @@ them in a ``conftest.py`` file::
if sys.version_info[0] > 2: if sys.version_info[0] > 2:
collect_ignore.append("pkg/module_py2.py") collect_ignore.append("pkg/module_py2.py")
And then if you have a module file like this:: and then if you have a module file like this::
# content of pkg/module_py2.py # content of pkg/module_py2.py
def test_only_on_python2(): def test_only_on_python2():
@ -207,13 +209,13 @@ And then if you have a module file like this::
except Exception, e: except Exception, e:
pass pass
and a setup.py dummy file like this:: and a ``setup.py`` dummy file like this::
# content of setup.py # content of setup.py
0/0 # will raise exception if imported 0/0 # will raise exception if imported
then a pytest run on Python2 will find the one test and will leave out the If you run with a Python 2 interpreter then you will find the one test and will
setup.py file:: leave out the ``setup.py`` file::
#$ pytest --collect-only #$ pytest --collect-only
====== test session starts ====== ====== test session starts ======
@ -225,8 +227,8 @@ setup.py file::
====== no tests ran in 0.04 seconds ====== ====== no tests ran in 0.04 seconds ======
If you run with a Python3 interpreter both the one test and the setup.py file If you run with a Python 3 interpreter both the one test and the ``setup.py``
will be left out:: file will be left out::
$ pytest --collect-only $ pytest --collect-only
======= test session starts ======== ======= test session starts ========

View File

@ -127,10 +127,39 @@ It's a prime example of `dependency injection`_ where fixture
functions take the role of the *injector* and test functions are the functions take the role of the *injector* and test functions are the
*consumers* of fixture objects. *consumers* of fixture objects.
.. _`conftest.py`:
.. _`conftest`:
``conftest.py``: sharing fixture functions
------------------------------------------
If during implementing your tests you realize that you
want to use a fixture function from multiple test files you can move it
to a ``conftest.py`` file.
You don't need to import the fixture you want to use in a test, it
automatically gets discovered by pytest. The discovery of
fixture functions starts at test classes, then test modules, then
``conftest.py`` files and finally builtin and third party plugins.
You can also use the ``conftest.py`` file to implement
:ref:`local per-directory plugins <conftest.py plugins>`.
Sharing test data
-----------------
If you want to make test data from files available to your tests, a good way
to do this is by loading these data in a fixture for use by your tests.
This makes use of the automatic caching mechanisms of pytest.
Another good approach is by adding the data files in the ``tests`` folder.
There are also community plugins available to help managing this aspect of
testing, e.g. `pytest-datadir <https://github.com/gabrielcnr/pytest-datadir>`__
and `pytest-datafiles <https://pypi.python.org/pypi/pytest-datafiles>`__.
.. _smtpshared: .. _smtpshared:
Scope: Sharing a fixture across tests in a class, module or session Scope: sharing a fixture instance across tests in a class, module or session
------------------------------------------------------------------- ----------------------------------------------------------------------------
.. regendoc:wipe .. regendoc:wipe
@ -878,17 +907,6 @@ All test methods in this TestClass will use the transaction fixture while
other test classes or functions in the module will not use it unless other test classes or functions in the module will not use it unless
they also add a ``transact`` reference. they also add a ``transact`` reference.
Shifting (visibility of) fixture functions
----------------------------------------------------
If during implementing your tests you realize that you
want to use a fixture function from multiple test files you can move it
to a :ref:`conftest.py <conftest.py>` file or even separately installable
:ref:`plugins <plugins>` without changing test code. The discovery of
fixtures functions starts at test classes, then test modules, then
``conftest.py`` files and finally builtin and third party plugins.
Overriding fixtures on various levels Overriding fixtures on various levels
------------------------------------- -------------------------------------

View File

@ -91,7 +91,7 @@ environment you can type::
and will get an extended test header which shows activated plugins and will get an extended test header which shows activated plugins
and their names. It will also print local plugins aka and their names. It will also print local plugins aka
:ref:`conftest.py <conftest>` files when they are loaded. :ref:`conftest.py <conftest.py plugins>` files when they are loaded.
.. _`cmdunregister`: .. _`cmdunregister`:
@ -152,4 +152,3 @@ in the `pytest repository <https://github.com/pytest-dev/pytest>`_.
_pytest.terminal _pytest.terminal
_pytest.tmpdir _pytest.tmpdir
_pytest.unittest _pytest.unittest

View File

@ -3,7 +3,7 @@
.. _skipping: .. _skipping:
Skip and xfail: dealing with tests that cannot succeed Skip and xfail: dealing with tests that cannot succeed
===================================================================== ======================================================
You can mark test functions that cannot be run on certain platforms You can mark test functions that cannot be run on certain platforms
or that you expect to fail so pytest can deal with them accordingly and or that you expect to fail so pytest can deal with them accordingly and
@ -152,6 +152,16 @@ will be skipped if any of the skip conditions is true.
.. _`whole class- or module level`: mark.html#scoped-marking .. _`whole class- or module level`: mark.html#scoped-marking
Skipping files or directories
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sometimes you may need to skip an entire file or directory, for example if the
tests rely on Python version-specific features or contain code that you do not
wish pytest to run. In this case, you must exclude the files and directories
from collection. Refer to :ref:`customizing-test-collection` for more
information.
Skipping on a missing import dependency Skipping on a missing import dependency
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~

View File

@ -57,9 +57,7 @@ Plugin discovery order at tool startup
.. _`pytest/plugin`: http://bitbucket.org/pytest-dev/pytest/src/tip/pytest/plugin/ .. _`pytest/plugin`: http://bitbucket.org/pytest-dev/pytest/src/tip/pytest/plugin/
.. _`conftest.py plugins`: .. _`conftest.py plugins`:
.. _`conftest.py`:
.. _`localplugin`: .. _`localplugin`:
.. _`conftest`:
.. _`local conftest plugins`: .. _`local conftest plugins`:
conftest.py: local per-directory plugins conftest.py: local per-directory plugins

View File

@ -65,13 +65,18 @@ class TestAssertionRewrite(object):
def test_place_initial_imports(self): def test_place_initial_imports(self):
s = """'Doc string'\nother = stuff""" s = """'Doc string'\nother = stuff"""
m = rewrite(s) m = rewrite(s)
# Module docstrings in 3.7 are part of Module node, it's not in the body
# so we remove it so the following body items have the same indexes on
# all Python versions
if sys.version_info < (3, 7):
assert isinstance(m.body[0], ast.Expr) assert isinstance(m.body[0], ast.Expr)
assert isinstance(m.body[0].value, ast.Str) assert isinstance(m.body[0].value, ast.Str)
for imp in m.body[1:3]: del m.body[0]
for imp in m.body[0:2]:
assert isinstance(imp, ast.Import) assert isinstance(imp, ast.Import)
assert imp.lineno == 2 assert imp.lineno == 2
assert imp.col_offset == 0 assert imp.col_offset == 0
assert isinstance(m.body[3], ast.Assign) assert isinstance(m.body[2], ast.Assign)
s = """from __future__ import with_statement\nother_stuff""" s = """from __future__ import with_statement\nother_stuff"""
m = rewrite(s) m = rewrite(s)
assert isinstance(m.body[0], ast.ImportFrom) assert isinstance(m.body[0], ast.ImportFrom)
@ -80,16 +85,29 @@ class TestAssertionRewrite(object):
assert imp.lineno == 2 assert imp.lineno == 2
assert imp.col_offset == 0 assert imp.col_offset == 0
assert isinstance(m.body[3], ast.Expr) assert isinstance(m.body[3], ast.Expr)
s = """'doc string'\nfrom __future__ import with_statement\nother""" s = """'doc string'\nfrom __future__ import with_statement"""
m = rewrite(s) m = rewrite(s)
if sys.version_info < (3, 7):
assert isinstance(m.body[0], ast.Expr) assert isinstance(m.body[0], ast.Expr)
assert isinstance(m.body[0].value, ast.Str) assert isinstance(m.body[0].value, ast.Str)
assert isinstance(m.body[1], ast.ImportFrom) del m.body[0]
for imp in m.body[2:4]: assert isinstance(m.body[0], ast.ImportFrom)
for imp in m.body[1:3]:
assert isinstance(imp, ast.Import)
assert imp.lineno == 2
assert imp.col_offset == 0
s = """'doc string'\nfrom __future__ import with_statement\nother"""
m = rewrite(s)
if sys.version_info < (3, 7):
assert isinstance(m.body[0], ast.Expr)
assert isinstance(m.body[0].value, ast.Str)
del m.body[0]
assert isinstance(m.body[0], ast.ImportFrom)
for imp in m.body[1:3]:
assert isinstance(imp, ast.Import) assert isinstance(imp, ast.Import)
assert imp.lineno == 3 assert imp.lineno == 3
assert imp.col_offset == 0 assert imp.col_offset == 0
assert isinstance(m.body[4], ast.Expr) assert isinstance(m.body[3], ast.Expr)
s = """from . import relative\nother_stuff""" s = """from . import relative\nother_stuff"""
m = rewrite(s) m = rewrite(s)
for imp in m.body[0:2]: for imp in m.body[0:2]:
@ -101,10 +119,14 @@ class TestAssertionRewrite(object):
def test_dont_rewrite(self): def test_dont_rewrite(self):
s = """'PYTEST_DONT_REWRITE'\nassert 14""" s = """'PYTEST_DONT_REWRITE'\nassert 14"""
m = rewrite(s) m = rewrite(s)
if sys.version_info < (3, 7):
assert len(m.body) == 2 assert len(m.body) == 2
assert isinstance(m.body[0], ast.Expr)
assert isinstance(m.body[0].value, ast.Str) assert isinstance(m.body[0].value, ast.Str)
assert isinstance(m.body[1], ast.Assert) del m.body[0]
assert m.body[1].msg is None else:
assert len(m.body) == 1
assert m.body[0].msg is None
def test_name(self): def test_name(self):
def f(): def f():

View File

@ -173,7 +173,7 @@ class TestDoctests(object):
"*UNEXPECTED*ZeroDivision*", "*UNEXPECTED*ZeroDivision*",
]) ])
def test_docstring_context_around_error(self, testdir): def test_docstring_partial_context_around_error(self, testdir):
"""Test that we show some context before the actual line of a failing """Test that we show some context before the actual line of a failing
doctest. doctest.
""" """
@ -199,7 +199,7 @@ class TestDoctests(object):
''') ''')
result = testdir.runpytest('--doctest-modules') result = testdir.runpytest('--doctest-modules')
result.stdout.fnmatch_lines([ result.stdout.fnmatch_lines([
'*docstring_context_around_error*', '*docstring_partial_context_around_error*',
'005*text-line-3', '005*text-line-3',
'006*text-line-4', '006*text-line-4',
'013*text-line-11', '013*text-line-11',
@ -213,6 +213,32 @@ class TestDoctests(object):
assert 'text-line-2' not in result.stdout.str() assert 'text-line-2' not in result.stdout.str()
assert 'text-line-after' not in result.stdout.str() assert 'text-line-after' not in result.stdout.str()
def test_docstring_full_context_around_error(self, testdir):
"""Test that we show the whole context before the actual line of a failing
doctest, provided that the context is up to 10 lines long.
"""
testdir.makepyfile('''
def foo():
"""
text-line-1
text-line-2
>>> 1 + 1
3
"""
''')
result = testdir.runpytest('--doctest-modules')
result.stdout.fnmatch_lines([
'*docstring_full_context_around_error*',
'003*text-line-1',
'004*text-line-2',
'006*>>> 1 + 1',
'Expected:',
' 3',
'Got:',
' 2',
])
def test_doctest_linedata_missing(self, testdir): def test_doctest_linedata_missing(self, testdir):
testdir.tmpdir.join('hello.py').write(_pytest._code.Source(""" testdir.tmpdir.join('hello.py').write(_pytest._code.Source("""
class Fun(object): class Fun(object):

View File

@ -169,6 +169,23 @@ def test_markers_option(testdir):
]) ])
def test_ini_markers_whitespace(testdir):
testdir.makeini("""
[pytest]
markers =
a1 : this is a whitespace marker
""")
testdir.makepyfile("""
import pytest
@pytest.mark.a1
def test_markers():
assert True
""")
rec = testdir.inline_run("--strict", "-m", "a1")
rec.assertoutcome(passed=1)
def test_markers_option_with_plugin_in_current_dir(testdir): def test_markers_option_with_plugin_in_current_dir(testdir):
testdir.makeconftest('pytest_plugins = "flip_flop"') testdir.makeconftest('pytest_plugins = "flip_flop"')
testdir.makepyfile(flip_flop="""\ testdir.makepyfile(flip_flop="""\

View File

@ -54,8 +54,9 @@ deps =
mock mock
nose nose
hypothesis>=3.5.2 hypothesis>=3.5.2
changedir=testing
commands = commands =
pytest -n1 -ra {posargs:testing} pytest -n1 -ra {posargs:.}
[testenv:py36-xdist] [testenv:py36-xdist]
deps = {[testenv:py27-xdist]deps} deps = {[testenv:py27-xdist]deps}
@ -81,10 +82,11 @@ deps =
pytest-xdist>=1.13 pytest-xdist>=1.13
hypothesis>=3.5.2 hypothesis>=3.5.2
distribute = true distribute = true
changedir=testing
setenv = setenv =
PYTHONDONTWRITEBYTECODE=1 PYTHONDONTWRITEBYTECODE=1
commands = commands =
pytest -n3 -ra {posargs:testing} pytest -n3 -ra {posargs:.}
[testenv:py27-trial] [testenv:py27-trial]
deps = twisted deps = twisted