Merge pull request #1046 from RonnyPfannschmidt/docs
fix documentation regeneration, kill Makefile
This commit is contained in:
commit
7c5d2ea81d
31
Makefile
31
Makefile
|
@ -1,31 +0,0 @@
|
|||
# Set of targets useful for development/release process
|
||||
PYTHON = python2.7
|
||||
PATH := $(PWD)/.env/bin:$(PATH)
|
||||
REGENDOC_ARGS := \
|
||||
--normalize "/={8,} (.*) ={8,}/======= \1 ========/" \
|
||||
--normalize "/_{8,} (.*) _{8,}/_______ \1 ________/" \
|
||||
--normalize "/in \d+.\d+ seconds/in 0.12 seconds/" \
|
||||
--normalize "@/tmp/pytest-\d+/@/tmp/pytest-NaN/@"
|
||||
|
||||
# prepare virtual python environment
|
||||
.env:
|
||||
virtualenv .env -p $(PYTHON)
|
||||
|
||||
# install all needed for development
|
||||
develop: .env
|
||||
pip install -e . tox -r requirements-docs.txt
|
||||
|
||||
# clean the development envrironment
|
||||
clean:
|
||||
-rm -rf .env
|
||||
|
||||
# generate documentation
|
||||
docs: develop
|
||||
find doc/en -name '*.rst' -not -path 'doc/en/_build/*' | xargs .env/bin/regendoc ${REGENDOC_ARGS}
|
||||
cd doc/en; make html
|
||||
|
||||
# upload documentation
|
||||
upload-docs: develop
|
||||
find doc/en -name '*.rst' -not -path 'doc/en/_build/*' | xargs .env/bin/regendoc ${REGENDOC_ARGS} --update
|
||||
#cd doc/en; make install
|
||||
|
|
@ -12,6 +12,13 @@ PAPEROPT_a4 = -D latex_paper_size=a4
|
|||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
REGENDOC_ARGS := \
|
||||
--normalize "/={8,} (.*) ={8,}/======= \1 ========/" \
|
||||
--normalize "/_{8,} (.*) _{8,}/_______ \1 ________/" \
|
||||
--normalize "/in \d+.\d+ seconds/in 0.12 seconds/" \
|
||||
--normalize "@/tmp/pytest-of-.*/pytest-\d+@PYTEST_TMPDIR@" \
|
||||
|
||||
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest
|
||||
|
||||
|
@ -46,7 +53,7 @@ installall: clean install installpdf
|
|||
@echo "done"
|
||||
|
||||
regen:
|
||||
PYTHONDONTWRITEBYTECODE=1 COLUMNS=76 regendoc --update *.rst */*.rst
|
||||
PYTHONDONTWRITEBYTECODE=1 COLUMNS=76 regendoc --update *.rst */*.rst ${REGENDOC_ARGS}
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
|
|
|
@ -26,7 +26,7 @@ you will see the return value of the function call::
|
|||
|
||||
$ py.test test_assert1.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -146,7 +146,7 @@ if you run this module::
|
|||
|
||||
$ py.test test_assert2.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -225,7 +225,7 @@ the conftest file::
|
|||
E assert Comparing Foo instances:
|
||||
E vals: 1 != 2
|
||||
|
||||
test_foocompare.py:8: AssertionError
|
||||
test_foocompare.py:11: AssertionError
|
||||
1 failed in 0.12 seconds
|
||||
|
||||
.. _assert-details:
|
||||
|
|
|
@ -73,6 +73,8 @@ You can ask for available builtin or project-custom
|
|||
:ref:`fixtures <fixtures>` by typing::
|
||||
|
||||
$ py.test -q --fixtures
|
||||
cache
|
||||
$PYTHON_PREFIX/lib/python3.4/site-packages/_pytest/cacheprovider.py:176: no docstring available
|
||||
capsys
|
||||
enables capturing of writes to sys.stdout/sys.stderr and makes
|
||||
captured output available via ``capsys.readouterr()`` method calls
|
||||
|
@ -81,6 +83,10 @@ You can ask for available builtin or project-custom
|
|||
enables capturing of writes to file descriptors 1 and 2 and makes
|
||||
captured output available via ``capfd.readouterr()`` method calls
|
||||
which return a ``(out, err)`` tuple.
|
||||
record_xml_property
|
||||
Fixture that adds extra xml properties to the tag for the calling test.
|
||||
The fixture is callable with (name, value), with value being automatically
|
||||
xml-encoded.
|
||||
monkeypatch
|
||||
The returned ``monkeypatch`` funcarg provides these
|
||||
helper methods to modify objects, dictionaries or os.environ::
|
||||
|
@ -108,6 +114,8 @@ You can ask for available builtin or project-custom
|
|||
|
||||
See http://docs.python.org/library/warnings.html for information
|
||||
on warning categories.
|
||||
tmpdir_factory
|
||||
Return a TempdirFactory instance for the test session.
|
||||
tmpdir
|
||||
return a temporary directory path object
|
||||
which is unique to each test function invocation,
|
||||
|
|
|
@ -44,8 +44,8 @@ If you run this for the first time you will see two failures::
|
|||
|
||||
$ py.test -q
|
||||
.................F.......F........................
|
||||
================================= FAILURES =================================
|
||||
_______________________________ test_num[17] _______________________________
|
||||
======= FAILURES ========
|
||||
_______ test_num[17] ________
|
||||
|
||||
i = 17
|
||||
|
||||
|
@ -56,7 +56,7 @@ If you run this for the first time you will see two failures::
|
|||
E Failed: bad luck
|
||||
|
||||
test_50.py:6: Failed
|
||||
_______________________________ test_num[25] _______________________________
|
||||
_______ test_num[25] ________
|
||||
|
||||
i = 25
|
||||
|
||||
|
@ -67,21 +67,21 @@ If you run this for the first time you will see two failures::
|
|||
E Failed: bad luck
|
||||
|
||||
test_50.py:6: Failed
|
||||
2 failed, 48 passed in 0.04 seconds
|
||||
2 failed, 48 passed in 0.12 seconds
|
||||
|
||||
If you then run it with ``--lf``::
|
||||
|
||||
$ py.test --lf
|
||||
=========================== test session starts ============================
|
||||
platform linux2 -- Python 2.7.6, pytest-2.7.3.dev428+ng79d22bf.d20150916, py-1.4.30, pluggy-0.3.0
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
run-last-failure: rerun last 2 failures
|
||||
rootdir: /tmp/doc-exec-94, inifile:
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 50 items
|
||||
|
||||
test_50.py FF
|
||||
|
||||
================================= FAILURES =================================
|
||||
_______________________________ test_num[17] _______________________________
|
||||
======= FAILURES ========
|
||||
_______ test_num[17] ________
|
||||
|
||||
i = 17
|
||||
|
||||
|
@ -92,7 +92,7 @@ If you then run it with ``--lf``::
|
|||
E Failed: bad luck
|
||||
|
||||
test_50.py:6: Failed
|
||||
_______________________________ test_num[25] _______________________________
|
||||
_______ test_num[25] ________
|
||||
|
||||
i = 25
|
||||
|
||||
|
@ -103,7 +103,7 @@ If you then run it with ``--lf``::
|
|||
E Failed: bad luck
|
||||
|
||||
test_50.py:6: Failed
|
||||
================= 2 failed, 48 deselected in 0.01 seconds ==================
|
||||
======= 2 failed, 48 deselected in 0.12 seconds ========
|
||||
|
||||
You have run only the two failing test from the last run, while 48 tests have
|
||||
not been run ("deselected").
|
||||
|
@ -113,16 +113,16 @@ previous failures will be executed first (as can be seen from the series
|
|||
of ``FF`` and dots)::
|
||||
|
||||
$ py.test --ff
|
||||
=========================== test session starts ============================
|
||||
platform linux2 -- Python 2.7.6, pytest-2.7.3.dev428+ng79d22bf.d20150916, py-1.4.30, pluggy-0.3.0
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
run-last-failure: rerun last 2 failures first
|
||||
rootdir: /tmp/doc-exec-94, inifile:
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 50 items
|
||||
|
||||
test_50.py FF................................................
|
||||
|
||||
================================= FAILURES =================================
|
||||
_______________________________ test_num[17] _______________________________
|
||||
======= FAILURES ========
|
||||
_______ test_num[17] ________
|
||||
|
||||
i = 17
|
||||
|
||||
|
@ -133,7 +133,7 @@ of ``FF`` and dots)::
|
|||
E Failed: bad luck
|
||||
|
||||
test_50.py:6: Failed
|
||||
_______________________________ test_num[25] _______________________________
|
||||
_______ test_num[25] ________
|
||||
|
||||
i = 25
|
||||
|
||||
|
@ -144,7 +144,7 @@ of ``FF`` and dots)::
|
|||
E Failed: bad luck
|
||||
|
||||
test_50.py:6: Failed
|
||||
=================== 2 failed, 48 passed in 0.03 seconds ====================
|
||||
======= 2 failed, 48 passed in 0.12 seconds ========
|
||||
|
||||
.. _`config.cache`:
|
||||
|
||||
|
@ -179,8 +179,8 @@ of the sleep::
|
|||
|
||||
$ py.test -q
|
||||
F
|
||||
================================= FAILURES =================================
|
||||
______________________________ test_function _______________________________
|
||||
======= FAILURES ========
|
||||
_______ test_function ________
|
||||
|
||||
mydata = 42
|
||||
|
||||
|
@ -189,15 +189,15 @@ of the sleep::
|
|||
E assert 42 == 23
|
||||
|
||||
test_caching.py:14: AssertionError
|
||||
1 failed in 5.41 seconds
|
||||
1 failed in 0.12 seconds
|
||||
|
||||
If you run it a second time the value will be retrieved from
|
||||
the cache and this will be quick::
|
||||
|
||||
$ py.test -q
|
||||
F
|
||||
================================= FAILURES =================================
|
||||
______________________________ test_function _______________________________
|
||||
======= FAILURES ========
|
||||
_______ test_function ________
|
||||
|
||||
mydata = 42
|
||||
|
||||
|
@ -206,7 +206,7 @@ the cache and this will be quick::
|
|||
E assert 42 == 23
|
||||
|
||||
test_caching.py:14: AssertionError
|
||||
1 failed in 0.01 seconds
|
||||
1 failed in 0.12 seconds
|
||||
|
||||
See the `cache-api`_ for more details.
|
||||
|
||||
|
@ -218,15 +218,15 @@ You can always peek at the content of the cache using the
|
|||
``--cache-clear`` command line option::
|
||||
|
||||
$ py.test --cache-clear
|
||||
=========================== test session starts ============================
|
||||
platform linux2 -- Python 2.7.6, pytest-2.7.3.dev428+ng79d22bf.d20150916, py-1.4.30, pluggy-0.3.0
|
||||
rootdir: /tmp/doc-exec-94, inifile:
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
test_caching.py F
|
||||
|
||||
================================= FAILURES =================================
|
||||
______________________________ test_function _______________________________
|
||||
======= FAILURES ========
|
||||
_______ test_function ________
|
||||
|
||||
mydata = 42
|
||||
|
||||
|
@ -235,7 +235,7 @@ You can always peek at the content of the cache using the
|
|||
E assert 42 == 23
|
||||
|
||||
test_caching.py:14: AssertionError
|
||||
========================= 1 failed in 5.41 seconds =========================
|
||||
======= 1 failed in 0.12 seconds ========
|
||||
|
||||
Clearing Cache content
|
||||
-------------------------------
|
||||
|
|
|
@ -64,7 +64,7 @@ of the failing function and hide the other one::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -78,7 +78,7 @@ of the failing function and hide the other one::
|
|||
E assert False
|
||||
|
||||
test_module.py:9: AssertionError
|
||||
---------------------------- Captured stdout setup -----------------------------
|
||||
-------------------------- Captured stdout setup ---------------------------
|
||||
setting up <function test_func2 at 0xdeadbeef>
|
||||
======= 1 failed, 1 passed in 0.12 seconds ========
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ then you can just invoke ``py.test`` without command line options::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 1 items
|
||||
|
||||
|
|
|
@ -31,7 +31,8 @@ You can then restrict a test run to only run tests marked with ``webtest``::
|
|||
|
||||
$ py.test -v -m webtest
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
||||
|
@ -44,7 +45,8 @@ Or the inverse, running all tests except the webtest ones::
|
|||
|
||||
$ py.test -v -m "not webtest"
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
||||
|
@ -64,7 +66,8 @@ tests based on their module, class, method, or function name::
|
|||
|
||||
$ py.test -v test_server.py::TestClass::test_method
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 5 items
|
||||
|
||||
|
@ -76,7 +79,8 @@ You can also select on the class::
|
|||
|
||||
$ py.test -v test_server.py::TestClass
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
||||
|
@ -88,7 +92,8 @@ Or select multiple nodes::
|
|||
|
||||
$ py.test -v test_server.py::TestClass test_server.py::test_send_http
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 8 items
|
||||
|
||||
|
@ -125,7 +130,8 @@ select tests based on their names::
|
|||
|
||||
$ py.test -v -k http # running with the above defined example module
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
||||
|
@ -138,7 +144,8 @@ And you can also run all tests except the ones that match the keyword::
|
|||
|
||||
$ py.test -k "not send_http" -v
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
||||
|
@ -153,7 +160,8 @@ Or to select "http" and "quick" tests::
|
|||
|
||||
$ py.test -k "http or quick" -v
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
||||
|
@ -342,7 +350,7 @@ the test needs::
|
|||
|
||||
$ py.test -E stage2
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -354,7 +362,7 @@ and here is one that specifies exactly the environment needed::
|
|||
|
||||
$ py.test -E stage1
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -473,28 +481,28 @@ then you will see two test skipped and two executed tests as expected::
|
|||
|
||||
$ py.test -rs # this option reports skip reasons
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
test_plat.py s.s.
|
||||
test_plat.py sss.
|
||||
======= short test summary info ========
|
||||
SKIP [2] $REGENDOC_TMPDIR/conftest.py:12: cannot run on platform linux2
|
||||
SKIP [3] $REGENDOC_TMPDIR/conftest.py:12: cannot run on platform linux
|
||||
|
||||
======= 2 passed, 2 skipped in 0.12 seconds ========
|
||||
======= 1 passed, 3 skipped in 0.12 seconds ========
|
||||
|
||||
Note that if you specify a platform via the marker-command line option like this::
|
||||
|
||||
$ py.test -m linux2
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
test_plat.py .
|
||||
test_plat.py s
|
||||
|
||||
======= 3 tests deselected by "-m 'linux2'" ========
|
||||
======= 1 passed, 3 deselected in 0.12 seconds ========
|
||||
======= 1 skipped, 3 deselected in 0.12 seconds ========
|
||||
|
||||
then the unmarked-tests will not be run. It is thus a way to restrict the run to the specific tests.
|
||||
|
||||
|
@ -539,7 +547,7 @@ We can now use the ``-m option`` to select one set::
|
|||
|
||||
$ py.test -m interface --tb=short
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -561,7 +569,7 @@ or to select both "event" and "interface" tests::
|
|||
|
||||
$ py.test -m "interface or event" --tb=short
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
|
|
@ -27,8 +27,8 @@ now execute the test specification::
|
|||
|
||||
nonpython $ py.test test_simple.yml
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
rootdir: $PWD/doc/en, inifile: pytest.ini
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collected 2 items
|
||||
|
||||
test_simple.yml .F
|
||||
|
@ -40,6 +40,8 @@ now execute the test specification::
|
|||
no further details known at this point.
|
||||
======= 1 failed, 1 passed in 0.12 seconds ========
|
||||
|
||||
.. regendoc:wipe
|
||||
|
||||
You get one dot for the passing ``sub1: sub1`` check and one failure.
|
||||
Obviously in the above ``conftest.py`` you'll want to implement a more
|
||||
interesting interpretation of the yaml-values. You can easily write
|
||||
|
@ -57,8 +59,9 @@ consulted when reporting in ``verbose`` mode::
|
|||
|
||||
nonpython $ py.test -v
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
rootdir: $PWD/doc/en, inifile: pytest.ini
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
||||
test_simple.yml::ok PASSED
|
||||
|
@ -71,15 +74,17 @@ consulted when reporting in ``verbose`` mode::
|
|||
no further details known at this point.
|
||||
======= 1 failed, 1 passed in 0.12 seconds ========
|
||||
|
||||
.. regendoc:wipe
|
||||
|
||||
While developing your custom test collection and execution it's also
|
||||
interesting to just look at the collection tree::
|
||||
|
||||
nonpython $ py.test --collect-only
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
rootdir: $PWD/doc/en, inifile: pytest.ini
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collected 2 items
|
||||
<YamlFile 'example/nonpython/test_simple.yml'>
|
||||
<YamlFile 'test_simple.yml'>
|
||||
<YamlItem 'ok'>
|
||||
<YamlItem 'hello'>
|
||||
|
||||
|
|
|
@ -127,11 +127,21 @@ objects, they are still using the default pytest representation::
|
|||
|
||||
$ py.test test_time.py --collect-only
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items / 1 errors
|
||||
|
||||
======= in 0.12 seconds ========
|
||||
ERROR: file not found: test_time.py
|
||||
======= ERRORS ========
|
||||
_______ ERROR collecting test_time.py ________
|
||||
$PYTHON_PREFIX/lib/python3.4/site-packages/_pytest/python.py:581: in _importtestmodule
|
||||
mod = self.fspath.pyimport(ensuresyspath=importmode)
|
||||
$PYTHON_PREFIX/lib/python3.4/site-packages/py/_path/local.py:650: in pyimport
|
||||
__import__(modname)
|
||||
E File "$REGENDOC_TMPDIR/test_time.py", line 5
|
||||
E (datetime(2001, 12, 11), datetime(2001, 12, 12), timedelta(-1)),
|
||||
E ^
|
||||
E SyntaxError: unexpected EOF while parsing
|
||||
======= 1 error in 0.12 seconds ========
|
||||
|
||||
A quick port of "testscenarios"
|
||||
------------------------------------
|
||||
|
@ -171,7 +181,7 @@ this is a fully self-contained example which you can run with::
|
|||
|
||||
$ py.test test_scenarios.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -184,7 +194,7 @@ If you just collect tests you'll also nicely see 'advanced' and 'basic' as varia
|
|||
|
||||
$ py.test --collect-only test_scenarios.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
<Module 'test_scenarios.py'>
|
||||
|
@ -249,7 +259,7 @@ Let's first see how it looks like at collection time::
|
|||
|
||||
$ py.test test_backends.py --collect-only
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
<Module 'test_backends.py'>
|
||||
|
@ -265,7 +275,7 @@ And then when we run the test::
|
|||
======= FAILURES ========
|
||||
_______ test_db_initialized[d2] ________
|
||||
|
||||
db = <conftest.DB2 instance at 0xdeadbeef>
|
||||
db = <conftest.DB2 object at 0xdeadbeef>
|
||||
|
||||
def test_db_initialized(db):
|
||||
# a dummy test
|
||||
|
@ -309,14 +319,14 @@ will be passed to respective fixture function.
|
|||
The result of this test will be successful:
|
||||
|
||||
$ py.test test_indirect_list.py --collect-only
|
||||
============================= test session starts ==============================
|
||||
platform linux2 -- Python 2.7.3, pytest-2.8.0.dev4, py-1.4.30, pluggy-0.3.0
|
||||
rootdir: /home/elizabeth/work/pytest, inifile: tox.ini
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
<Module 'testing/test_argnames.py'>
|
||||
<Function 'test_simple[a-b]'>
|
||||
<Module 'test_indirect_list.py'>
|
||||
<Function 'test_indirect[a-b]'>
|
||||
|
||||
=============================== in 0.02 seconds ===============================
|
||||
======= in 0.12 seconds ========
|
||||
|
||||
.. regendoc:wipe
|
||||
|
||||
|
@ -361,7 +371,7 @@ argument sets to use for each test function. Let's run it::
|
|||
======= FAILURES ========
|
||||
_______ TestClass.test_equals[1-2] ________
|
||||
|
||||
self = <test_parametrize.TestClass instance at 0xdeadbeef>, a = 1, b = 2
|
||||
self = <test_parametrize.TestClass object at 0xdeadbeef>, a = 1, b = 2
|
||||
|
||||
def test_equals(self, a, b):
|
||||
> assert a == b
|
||||
|
@ -389,8 +399,8 @@ Running it results in some skips if we don't have all the python interpreters in
|
|||
. $ py.test -rs -q multipython.py
|
||||
ssssssssssss...ssssssssssss
|
||||
======= short test summary info ========
|
||||
SKIP [12] $PWD/doc/en/example/multipython.py:22: 'python3.3' not found
|
||||
SKIP [12] $PWD/doc/en/example/multipython.py:22: 'python2.6' not found
|
||||
SKIP [12] $REGENDOC_TMPDIR/CWD/multipython.py:22: 'python3.3' not found
|
||||
SKIP [12] $REGENDOC_TMPDIR/CWD/multipython.py:22: 'python2.6' not found
|
||||
3 passed, 24 skipped in 0.12 seconds
|
||||
|
||||
Indirect parametrization of optional implementations/imports
|
||||
|
@ -438,7 +448,7 @@ If you run this with reporting for skips enabled::
|
|||
|
||||
$ py.test -rs test_module.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ then the test collection looks like this::
|
|||
|
||||
$ py.test --collect-only
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: setup.cfg
|
||||
collected 2 items
|
||||
<Module 'check_myapp.py'>
|
||||
|
@ -89,10 +89,10 @@ You can always peek at the collection tree without running tests like this::
|
|||
|
||||
. $ py.test --collect-only pythoncollection.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
rootdir: $PWD/doc/en, inifile: pytest.ini
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 3 items
|
||||
<Module 'example/pythoncollection.py'>
|
||||
<Module 'CWD/pythoncollection.py'>
|
||||
<Function 'test_function'>
|
||||
<Class 'TestClass'>
|
||||
<Instance '()'>
|
||||
|
@ -143,11 +143,9 @@ interpreters and will leave out the setup.py file::
|
|||
|
||||
$ py.test --collect-only
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 1 items
|
||||
<Module 'pkg/module_py2.py'>
|
||||
<Function 'test_only_on_python2'>
|
||||
collected 0 items
|
||||
|
||||
======= in 0.12 seconds ========
|
||||
|
||||
|
|
|
@ -13,8 +13,8 @@ get on the terminal - we are working on that):
|
|||
|
||||
assertion $ py.test failure_demo.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
rootdir: $PWD/doc/en, inifile: pytest.ini
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR/assertion, inifile:
|
||||
collected 42 items
|
||||
|
||||
failure_demo.py FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
|
||||
|
@ -41,8 +41,8 @@ get on the terminal - we are working on that):
|
|||
|
||||
> assert f() == g()
|
||||
E assert 42 == 43
|
||||
E + where 42 = <function f at 0xdeadbeef>()
|
||||
E + and 43 = <function g at 0xdeadbeef>()
|
||||
E + where 42 = <function TestFailing.test_simple.<locals>.f at 0xdeadbeef>()
|
||||
E + and 43 = <function TestFailing.test_simple.<locals>.g at 0xdeadbeef>()
|
||||
|
||||
failure_demo.py:28: AssertionError
|
||||
_______ TestFailing.test_simple_multiline ________
|
||||
|
@ -55,7 +55,7 @@ get on the terminal - we are working on that):
|
|||
> 6*9)
|
||||
|
||||
failure_demo.py:33:
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
|
||||
a = 42, b = 54
|
||||
|
||||
|
@ -74,7 +74,7 @@ get on the terminal - we are working on that):
|
|||
return 42
|
||||
> assert not f()
|
||||
E assert not 42
|
||||
E + where 42 = <function f at 0xdeadbeef>()
|
||||
E + where 42 = <function TestFailing.test_not.<locals>.f at 0xdeadbeef>()
|
||||
|
||||
failure_demo.py:38: AssertionError
|
||||
_______ TestSpecialisedExplanations.test_eq_text ________
|
||||
|
@ -277,7 +277,7 @@ get on the terminal - we are working on that):
|
|||
E ? +++
|
||||
|
||||
failure_demo.py:90: AssertionError
|
||||
_______ TestSpecialisedExplanations.test_not_in_text_single_long_term ________
|
||||
______ TestSpecialisedExplanations.test_not_in_text_single_long_term _______
|
||||
|
||||
self = <failure_demo.TestSpecialisedExplanations object at 0xdeadbeef>
|
||||
|
||||
|
@ -298,7 +298,7 @@ get on the terminal - we are working on that):
|
|||
i = Foo()
|
||||
> assert i.b == 2
|
||||
E assert 1 == 2
|
||||
E + where 1 = <failure_demo.Foo object at 0xdeadbeef>.b
|
||||
E + where 1 = <failure_demo.test_attribute.<locals>.Foo object at 0xdeadbeef>.b
|
||||
|
||||
failure_demo.py:101: AssertionError
|
||||
_______ test_attribute_instance ________
|
||||
|
@ -308,8 +308,8 @@ get on the terminal - we are working on that):
|
|||
b = 1
|
||||
> assert Foo().b == 2
|
||||
E assert 1 == 2
|
||||
E + where 1 = <failure_demo.Foo object at 0xdeadbeef>.b
|
||||
E + where <failure_demo.Foo object at 0xdeadbeef> = <class 'failure_demo.Foo'>()
|
||||
E + where 1 = <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef>.b
|
||||
E + where <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef> = <class 'failure_demo.test_attribute_instance.<locals>.Foo'>()
|
||||
|
||||
failure_demo.py:107: AssertionError
|
||||
_______ test_attribute_failure ________
|
||||
|
@ -323,9 +323,9 @@ get on the terminal - we are working on that):
|
|||
> assert i.b == 2
|
||||
|
||||
failure_demo.py:116:
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
|
||||
self = <failure_demo.Foo object at 0xdeadbeef>
|
||||
self = <failure_demo.test_attribute_failure.<locals>.Foo object at 0xdeadbeef>
|
||||
|
||||
def _get_b(self):
|
||||
> raise Exception('Failed to get attrib')
|
||||
|
@ -341,30 +341,30 @@ get on the terminal - we are working on that):
|
|||
b = 2
|
||||
> assert Foo().b == Bar().b
|
||||
E assert 1 == 2
|
||||
E + where 1 = <failure_demo.Foo object at 0xdeadbeef>.b
|
||||
E + where <failure_demo.Foo object at 0xdeadbeef> = <class 'failure_demo.Foo'>()
|
||||
E + and 2 = <failure_demo.Bar object at 0xdeadbeef>.b
|
||||
E + where <failure_demo.Bar object at 0xdeadbeef> = <class 'failure_demo.Bar'>()
|
||||
E + where 1 = <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef>.b
|
||||
E + where <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef> = <class 'failure_demo.test_attribute_multiple.<locals>.Foo'>()
|
||||
E + and 2 = <failure_demo.test_attribute_multiple.<locals>.Bar object at 0xdeadbeef>.b
|
||||
E + where <failure_demo.test_attribute_multiple.<locals>.Bar object at 0xdeadbeef> = <class 'failure_demo.test_attribute_multiple.<locals>.Bar'>()
|
||||
|
||||
failure_demo.py:124: AssertionError
|
||||
_______ TestRaises.test_raises ________
|
||||
|
||||
self = <failure_demo.TestRaises instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestRaises object at 0xdeadbeef>
|
||||
|
||||
def test_raises(self):
|
||||
s = 'qwe'
|
||||
> raises(TypeError, "int(s)")
|
||||
|
||||
failure_demo.py:133:
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
|
||||
> int(s)
|
||||
E ValueError: invalid literal for int() with base 10: 'qwe'
|
||||
|
||||
<0-codegen $PWD/_pytest/python.py:1091>:1: ValueError
|
||||
<0-codegen $PYTHON_PREFIX/lib/python3.4/site-packages/_pytest/python.py:1205>:1: ValueError
|
||||
_______ TestRaises.test_raises_doesnt ________
|
||||
|
||||
self = <failure_demo.TestRaises instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestRaises object at 0xdeadbeef>
|
||||
|
||||
def test_raises_doesnt(self):
|
||||
> raises(IOError, "int('3')")
|
||||
|
@ -373,7 +373,7 @@ get on the terminal - we are working on that):
|
|||
failure_demo.py:136: Failed
|
||||
_______ TestRaises.test_raise ________
|
||||
|
||||
self = <failure_demo.TestRaises instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestRaises object at 0xdeadbeef>
|
||||
|
||||
def test_raise(self):
|
||||
> raise ValueError("demo error")
|
||||
|
@ -382,16 +382,16 @@ get on the terminal - we are working on that):
|
|||
failure_demo.py:139: ValueError
|
||||
_______ TestRaises.test_tupleerror ________
|
||||
|
||||
self = <failure_demo.TestRaises instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestRaises object at 0xdeadbeef>
|
||||
|
||||
def test_tupleerror(self):
|
||||
> a,b = [1]
|
||||
E ValueError: need more than 1 value to unpack
|
||||
|
||||
failure_demo.py:142: ValueError
|
||||
_______ TestRaises.test_reinterpret_fails_with_print_for_the_fun_of_it ________
|
||||
______ TestRaises.test_reinterpret_fails_with_print_for_the_fun_of_it ______
|
||||
|
||||
self = <failure_demo.TestRaises instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestRaises object at 0xdeadbeef>
|
||||
|
||||
def test_reinterpret_fails_with_print_for_the_fun_of_it(self):
|
||||
l = [1,2,3]
|
||||
|
@ -400,15 +400,15 @@ get on the terminal - we are working on that):
|
|||
E TypeError: 'int' object is not iterable
|
||||
|
||||
failure_demo.py:147: TypeError
|
||||
----------------------------- Captured stdout call -----------------------------
|
||||
--------------------------- Captured stdout call ---------------------------
|
||||
l is [1, 2, 3]
|
||||
_______ TestRaises.test_some_error ________
|
||||
|
||||
self = <failure_demo.TestRaises instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestRaises object at 0xdeadbeef>
|
||||
|
||||
def test_some_error(self):
|
||||
> if namenotexi:
|
||||
E NameError: global name 'namenotexi' is not defined
|
||||
E NameError: name 'namenotexi' is not defined
|
||||
|
||||
failure_demo.py:150: NameError
|
||||
_______ test_dynamic_compile_shows_nicely ________
|
||||
|
@ -423,16 +423,16 @@ get on the terminal - we are working on that):
|
|||
> module.foo()
|
||||
|
||||
failure_demo.py:165:
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
|
||||
def foo():
|
||||
> assert 1 == 0
|
||||
E assert 1 == 0
|
||||
|
||||
<2-codegen 'abc-123' $PWD/doc/en/example/assertion/failure_demo.py:162>:2: AssertionError
|
||||
<2-codegen 'abc-123' $REGENDOC_TMPDIR/assertion/failure_demo.py:162>:2: AssertionError
|
||||
_______ TestMoreErrors.test_complex_error ________
|
||||
|
||||
self = <failure_demo.TestMoreErrors instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
|
||||
|
||||
def test_complex_error(self):
|
||||
def f():
|
||||
|
@ -442,10 +442,10 @@ get on the terminal - we are working on that):
|
|||
> somefunc(f(), g())
|
||||
|
||||
failure_demo.py:175:
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
failure_demo.py:8: in somefunc
|
||||
otherfunc(x,y)
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
|
||||
|
||||
a = 44, b = 43
|
||||
|
||||
|
@ -456,7 +456,7 @@ get on the terminal - we are working on that):
|
|||
failure_demo.py:5: AssertionError
|
||||
_______ TestMoreErrors.test_z1_unpack_error ________
|
||||
|
||||
self = <failure_demo.TestMoreErrors instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
|
||||
|
||||
def test_z1_unpack_error(self):
|
||||
l = []
|
||||
|
@ -466,7 +466,7 @@ get on the terminal - we are working on that):
|
|||
failure_demo.py:179: ValueError
|
||||
_______ TestMoreErrors.test_z2_type_error ________
|
||||
|
||||
self = <failure_demo.TestMoreErrors instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
|
||||
|
||||
def test_z2_type_error(self):
|
||||
l = 3
|
||||
|
@ -476,7 +476,7 @@ get on the terminal - we are working on that):
|
|||
failure_demo.py:183: TypeError
|
||||
_______ TestMoreErrors.test_startswith ________
|
||||
|
||||
self = <failure_demo.TestMoreErrors instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
|
||||
|
||||
def test_startswith(self):
|
||||
s = "123"
|
||||
|
@ -488,7 +488,7 @@ get on the terminal - we are working on that):
|
|||
failure_demo.py:188: AssertionError
|
||||
_______ TestMoreErrors.test_startswith_nested ________
|
||||
|
||||
self = <failure_demo.TestMoreErrors instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
|
||||
|
||||
def test_startswith_nested(self):
|
||||
def f():
|
||||
|
@ -498,13 +498,13 @@ get on the terminal - we are working on that):
|
|||
> assert f().startswith(g())
|
||||
E assert <built-in method startswith of str object at 0xdeadbeef>('456')
|
||||
E + where <built-in method startswith of str object at 0xdeadbeef> = '123'.startswith
|
||||
E + where '123' = <function f at 0xdeadbeef>()
|
||||
E + and '456' = <function g at 0xdeadbeef>()
|
||||
E + where '123' = <function TestMoreErrors.test_startswith_nested.<locals>.f at 0xdeadbeef>()
|
||||
E + and '456' = <function TestMoreErrors.test_startswith_nested.<locals>.g at 0xdeadbeef>()
|
||||
|
||||
failure_demo.py:195: AssertionError
|
||||
_______ TestMoreErrors.test_global_func ________
|
||||
|
||||
self = <failure_demo.TestMoreErrors instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
|
||||
|
||||
def test_global_func(self):
|
||||
> assert isinstance(globf(42), float)
|
||||
|
@ -514,18 +514,18 @@ get on the terminal - we are working on that):
|
|||
failure_demo.py:198: AssertionError
|
||||
_______ TestMoreErrors.test_instance ________
|
||||
|
||||
self = <failure_demo.TestMoreErrors instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
|
||||
|
||||
def test_instance(self):
|
||||
self.x = 6*7
|
||||
> assert self.x != 42
|
||||
E assert 42 != 42
|
||||
E + where 42 = <failure_demo.TestMoreErrors instance at 0xdeadbeef>.x
|
||||
E + where 42 = <failure_demo.TestMoreErrors object at 0xdeadbeef>.x
|
||||
|
||||
failure_demo.py:202: AssertionError
|
||||
_______ TestMoreErrors.test_compare ________
|
||||
|
||||
self = <failure_demo.TestMoreErrors instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
|
||||
|
||||
def test_compare(self):
|
||||
> assert globf(10) < 5
|
||||
|
@ -535,7 +535,7 @@ get on the terminal - we are working on that):
|
|||
failure_demo.py:205: AssertionError
|
||||
_______ TestMoreErrors.test_try_finally ________
|
||||
|
||||
self = <failure_demo.TestMoreErrors instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestMoreErrors object at 0xdeadbeef>
|
||||
|
||||
def test_try_finally(self):
|
||||
x = 1
|
||||
|
@ -546,7 +546,7 @@ get on the terminal - we are working on that):
|
|||
failure_demo.py:210: AssertionError
|
||||
_______ TestCustomAssertMsg.test_single_line ________
|
||||
|
||||
self = <failure_demo.TestCustomAssertMsg instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestCustomAssertMsg object at 0xdeadbeef>
|
||||
|
||||
def test_single_line(self):
|
||||
class A:
|
||||
|
@ -555,12 +555,12 @@ get on the terminal - we are working on that):
|
|||
> assert A.a == b, "A.a appears not to be b"
|
||||
E AssertionError: A.a appears not to be b
|
||||
E assert 1 == 2
|
||||
E + where 1 = <class failure_demo.A at 0xdeadbeef>.a
|
||||
E + where 1 = <class 'failure_demo.TestCustomAssertMsg.test_single_line.<locals>.A'>.a
|
||||
|
||||
failure_demo.py:221: AssertionError
|
||||
_______ TestCustomAssertMsg.test_multiline ________
|
||||
|
||||
self = <failure_demo.TestCustomAssertMsg instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestCustomAssertMsg object at 0xdeadbeef>
|
||||
|
||||
def test_multiline(self):
|
||||
class A:
|
||||
|
@ -572,12 +572,12 @@ get on the terminal - we are working on that):
|
|||
E or does not appear to be b
|
||||
E one of those
|
||||
E assert 1 == 2
|
||||
E + where 1 = <class failure_demo.A at 0xdeadbeef>.a
|
||||
E + where 1 = <class 'failure_demo.TestCustomAssertMsg.test_multiline.<locals>.A'>.a
|
||||
|
||||
failure_demo.py:227: AssertionError
|
||||
_______ TestCustomAssertMsg.test_custom_repr ________
|
||||
|
||||
self = <failure_demo.TestCustomAssertMsg instance at 0xdeadbeef>
|
||||
self = <failure_demo.TestCustomAssertMsg object at 0xdeadbeef>
|
||||
|
||||
def test_custom_repr(self):
|
||||
class JSON:
|
||||
|
|
|
@ -53,7 +53,7 @@ Let's run this without supplying our new option::
|
|||
E assert 0
|
||||
|
||||
test_sample.py:6: AssertionError
|
||||
----------------------------- Captured stdout call -----------------------------
|
||||
--------------------------- Captured stdout call ---------------------------
|
||||
first
|
||||
1 failed in 0.12 seconds
|
||||
|
||||
|
@ -75,7 +75,7 @@ And now with supplying a command line option::
|
|||
E assert 0
|
||||
|
||||
test_sample.py:6: AssertionError
|
||||
----------------------------- Captured stdout call -----------------------------
|
||||
--------------------------- Captured stdout call ---------------------------
|
||||
second
|
||||
1 failed in 0.12 seconds
|
||||
|
||||
|
@ -108,7 +108,7 @@ directory with the above conftest.py::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items
|
||||
|
||||
|
@ -153,7 +153,7 @@ and when running it will see a skipped "slow" test::
|
|||
|
||||
$ py.test -rs # "-rs" means report details on the little 's'
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -167,7 +167,7 @@ Or run it including the ``slow`` marked test::
|
|||
|
||||
$ py.test --runslow
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -259,7 +259,7 @@ which will add the string to the test header accordingly::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
project deps: mylib-1.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items
|
||||
|
@ -283,7 +283,8 @@ which will add info only when run with "--v"::
|
|||
|
||||
$ py.test -v
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
info1: did you know that ...
|
||||
did you?
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
|
@ -295,7 +296,7 @@ and nothing when run plainly::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items
|
||||
|
||||
|
@ -328,7 +329,7 @@ Now we can profile which test functions execute the slowest::
|
|||
|
||||
$ py.test --durations=3
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
@ -390,7 +391,7 @@ If we run this::
|
|||
|
||||
$ py.test -rx
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -399,7 +400,7 @@ If we run this::
|
|||
======= FAILURES ========
|
||||
_______ TestUserHandling.test_modification ________
|
||||
|
||||
self = <test_step.TestUserHandling instance at 0xdeadbeef>
|
||||
self = <test_step.TestUserHandling object at 0xdeadbeef>
|
||||
|
||||
def test_modification(self):
|
||||
> assert 0
|
||||
|
@ -461,7 +462,7 @@ We can run this::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 7 items
|
||||
|
||||
|
@ -475,14 +476,14 @@ We can run this::
|
|||
file $REGENDOC_TMPDIR/b/test_error.py, line 1
|
||||
def test_root(db): # no db here, will error out
|
||||
fixture 'db' not found
|
||||
available fixtures: pytestconfig, recwarn, monkeypatch, capfd, capsys, tmpdir
|
||||
available fixtures: cache, record_xml_property, pytestconfig, tmpdir_factory, monkeypatch, capsys, recwarn, tmpdir, capfd
|
||||
use 'py.test --fixtures [testpath]' for help on them.
|
||||
|
||||
$REGENDOC_TMPDIR/b/test_error.py:1
|
||||
======= FAILURES ========
|
||||
_______ TestUserHandling.test_modification ________
|
||||
|
||||
self = <test_step.TestUserHandling instance at 0xdeadbeef>
|
||||
self = <test_step.TestUserHandling object at 0xdeadbeef>
|
||||
|
||||
def test_modification(self):
|
||||
> assert 0
|
||||
|
@ -491,21 +492,21 @@ We can run this::
|
|||
test_step.py:9: AssertionError
|
||||
_______ test_a1 ________
|
||||
|
||||
db = <conftest.DB instance at 0xdeadbeef>
|
||||
db = <conftest.DB object at 0xdeadbeef>
|
||||
|
||||
def test_a1(db):
|
||||
> assert 0, db # to show value
|
||||
E AssertionError: <conftest.DB instance at 0xdeadbeef>
|
||||
E AssertionError: <conftest.DB object at 0xdeadbeef>
|
||||
E assert 0
|
||||
|
||||
a/test_db.py:2: AssertionError
|
||||
_______ test_a2 ________
|
||||
|
||||
db = <conftest.DB instance at 0xdeadbeef>
|
||||
db = <conftest.DB object at 0xdeadbeef>
|
||||
|
||||
def test_a2(db):
|
||||
> assert 0, db # to show value
|
||||
E AssertionError: <conftest.DB instance at 0xdeadbeef>
|
||||
E AssertionError: <conftest.DB object at 0xdeadbeef>
|
||||
E assert 0
|
||||
|
||||
a/test_db2.py:2: AssertionError
|
||||
|
@ -565,7 +566,7 @@ and run them::
|
|||
|
||||
$ py.test test_module.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -574,7 +575,7 @@ and run them::
|
|||
======= FAILURES ========
|
||||
_______ test_fail1 ________
|
||||
|
||||
tmpdir = local('/tmp/pytest-NaN/test_fail10')
|
||||
tmpdir = local('PYTEST_TMPDIR/test_fail10')
|
||||
|
||||
def test_fail1(tmpdir):
|
||||
> assert 0
|
||||
|
@ -593,7 +594,8 @@ and run them::
|
|||
you will have a "failures" file which contains the failing test ids::
|
||||
|
||||
$ cat failures
|
||||
cat: failures: No such file or directory
|
||||
test_module.py::test_fail1 (PYTEST_TMPDIR/test_fail10)
|
||||
test_module.py::test_fail2
|
||||
|
||||
Making test result information available in fixtures
|
||||
-----------------------------------------------------------
|
||||
|
@ -655,12 +657,12 @@ and run it::
|
|||
|
||||
$ py.test -s test_module.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
test_module.py E('setting up a test failed!', 'test_module.py::test_setup_fails')
|
||||
F('executing test failed', 'test_module.py::test_call_fails')
|
||||
test_module.py Esetting up a test failed! test_module.py::test_setup_fails
|
||||
Fexecuting test failed test_module.py::test_call_fails
|
||||
F
|
||||
|
||||
======= ERRORS ========
|
||||
|
@ -689,7 +691,7 @@ and run it::
|
|||
E assert 0
|
||||
|
||||
test_module.py:15: AssertionError
|
||||
======= 2 failed, 1 warnings, 1 error in 0.12 seconds ========
|
||||
======= 2 failed, 1 error in 0.12 seconds ========
|
||||
|
||||
You'll see that the fixture finalizers could use the precise reporting
|
||||
information.
|
||||
|
|
|
@ -75,7 +75,7 @@ marked ``smtp`` fixture function. Running the test looks like this::
|
|||
|
||||
$ py.test test_smtpsimple.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -84,7 +84,7 @@ marked ``smtp`` fixture function. Running the test looks like this::
|
|||
======= FAILURES ========
|
||||
_______ test_ehlo ________
|
||||
|
||||
smtp = <smtplib.SMTP instance at 0xdeadbeef>
|
||||
smtp = <smtplib.SMTP object at 0xdeadbeef>
|
||||
|
||||
def test_ehlo(smtp):
|
||||
response, msg = smtp.ehlo()
|
||||
|
@ -180,7 +180,7 @@ function (in or below the directory where ``conftest.py`` is located)::
|
|||
def test_ehlo(smtp):
|
||||
response, msg = smtp.ehlo()
|
||||
assert response == 250
|
||||
assert "smtp.gmail.com" in str(msg, 'ascii')
|
||||
assert b"smtp.gmail.com" in msg
|
||||
assert 0 # for demo purposes
|
||||
|
||||
def test_noop(smtp):
|
||||
|
@ -193,7 +193,7 @@ inspect what is going on and can now run the tests::
|
|||
|
||||
$ py.test test_module.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -202,23 +202,23 @@ inspect what is going on and can now run the tests::
|
|||
======= FAILURES ========
|
||||
_______ test_ehlo ________
|
||||
|
||||
smtp = <smtplib.SMTP instance at 0xdeadbeef>
|
||||
smtp = <smtplib.SMTP object at 0xdeadbeef>
|
||||
|
||||
def test_ehlo(smtp):
|
||||
response = smtp.ehlo()
|
||||
assert response[0] == 250
|
||||
assert "merlinux" in response[1]
|
||||
response, msg = smtp.ehlo()
|
||||
assert response == 250
|
||||
assert b"smtp.gmail.com" in msg
|
||||
> assert 0 # for demo purposes
|
||||
E assert 0
|
||||
|
||||
test_module.py:6: AssertionError
|
||||
_______ test_noop ________
|
||||
|
||||
smtp = <smtplib.SMTP instance at 0xdeadbeef>
|
||||
smtp = <smtplib.SMTP object at 0xdeadbeef>
|
||||
|
||||
def test_noop(smtp):
|
||||
response = smtp.noop()
|
||||
assert response[0] == 250
|
||||
response, msg = smtp.noop()
|
||||
assert response == 250
|
||||
> assert 0 # for demo purposes
|
||||
E assert 0
|
||||
|
||||
|
@ -313,7 +313,7 @@ We use the ``request.module`` attribute to optionally obtain an
|
|||
again, nothing much has changed::
|
||||
|
||||
$ py.test -s -q --tb=no
|
||||
FFteardown smtp
|
||||
FFfinalizing <smtplib.SMTP object at 0xdeadbeef> (smtp.gmail.com)
|
||||
|
||||
2 failed in 0.12 seconds
|
||||
|
||||
|
@ -335,7 +335,7 @@ Running it::
|
|||
_______ test_showhelo ________
|
||||
test_anothersmtp.py:5: in test_showhelo
|
||||
assert 0, smtp.helo()
|
||||
E AssertionError: (250, 'hq.merlinux.eu')
|
||||
E AssertionError: (250, b'mail.python.org')
|
||||
E assert 0
|
||||
|
||||
voila! The ``smtp`` fixture function picked up our mail server name
|
||||
|
@ -381,49 +381,49 @@ So let's just do another run::
|
|||
$ py.test -q test_module.py
|
||||
FFFF
|
||||
======= FAILURES ========
|
||||
_______ test_ehlo[merlinux.eu] ________
|
||||
_______ test_ehlo[smtp.gmail.com] ________
|
||||
|
||||
smtp = <smtplib.SMTP instance at 0xdeadbeef>
|
||||
smtp = <smtplib.SMTP object at 0xdeadbeef>
|
||||
|
||||
def test_ehlo(smtp):
|
||||
response = smtp.ehlo()
|
||||
assert response[0] == 250
|
||||
assert "merlinux" in response[1]
|
||||
response, msg = smtp.ehlo()
|
||||
assert response == 250
|
||||
assert b"smtp.gmail.com" in msg
|
||||
> assert 0 # for demo purposes
|
||||
E assert 0
|
||||
|
||||
test_module.py:6: AssertionError
|
||||
_______ test_noop[merlinux.eu] ________
|
||||
_______ test_noop[smtp.gmail.com] ________
|
||||
|
||||
smtp = <smtplib.SMTP instance at 0xdeadbeef>
|
||||
smtp = <smtplib.SMTP object at 0xdeadbeef>
|
||||
|
||||
def test_noop(smtp):
|
||||
response = smtp.noop()
|
||||
assert response[0] == 250
|
||||
response, msg = smtp.noop()
|
||||
assert response == 250
|
||||
> assert 0 # for demo purposes
|
||||
E assert 0
|
||||
|
||||
test_module.py:11: AssertionError
|
||||
_______ test_ehlo[mail.python.org] ________
|
||||
|
||||
smtp = <smtplib.SMTP instance at 0xdeadbeef>
|
||||
smtp = <smtplib.SMTP object at 0xdeadbeef>
|
||||
|
||||
def test_ehlo(smtp):
|
||||
response = smtp.ehlo()
|
||||
assert response[0] == 250
|
||||
> assert "merlinux" in response[1]
|
||||
E assert 'merlinux' in 'mail.python.org\nSIZE 51200000\nETRN\nSTARTTLS\nENHANCEDSTATUSCODES\n8BITMIME\nDSN\nSMTPUTF8'
|
||||
response, msg = smtp.ehlo()
|
||||
assert response == 250
|
||||
> assert b"smtp.gmail.com" in msg
|
||||
E assert b'smtp.gmail.com' in b'mail.python.org\nSIZE 51200000\nETRN\nSTARTTLS\nENHANCEDSTATUSCODES\n8BITMIME\nDSN\nSMTPUTF8'
|
||||
|
||||
test_module.py:5: AssertionError
|
||||
---------------------------- Captured stdout setup -----------------------------
|
||||
finalizing <smtplib.SMTP instance at 0xdeadbeef>
|
||||
-------------------------- Captured stdout setup ---------------------------
|
||||
finalizing <smtplib.SMTP object at 0xdeadbeef>
|
||||
_______ test_noop[mail.python.org] ________
|
||||
|
||||
smtp = <smtplib.SMTP instance at 0xdeadbeef>
|
||||
smtp = <smtplib.SMTP object at 0xdeadbeef>
|
||||
|
||||
def test_noop(smtp):
|
||||
response = smtp.noop()
|
||||
assert response[0] == 250
|
||||
response, msg = smtp.noop()
|
||||
assert response == 250
|
||||
> assert 0 # for demo purposes
|
||||
E assert 0
|
||||
|
||||
|
@ -480,15 +480,20 @@ Running the above tests results in the following test IDs being used::
|
|||
|
||||
$ py.test --collect-only
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 6 items
|
||||
collected 10 items
|
||||
<Module 'test_anothersmtp.py'>
|
||||
<Function 'test_showhelo[merlinux.eu]'>
|
||||
<Function 'test_showhelo[smtp.gmail.com]'>
|
||||
<Function 'test_showhelo[mail.python.org]'>
|
||||
<Module 'test_ids.py'>
|
||||
<Function 'test_a[spam]'>
|
||||
<Function 'test_a[ham]'>
|
||||
<Function 'test_b[eggs]'>
|
||||
<Function 'test_b[1]'>
|
||||
<Module 'test_module.py'>
|
||||
<Function 'test_ehlo[merlinux.eu]'>
|
||||
<Function 'test_noop[merlinux.eu]'>
|
||||
<Function 'test_ehlo[smtp.gmail.com]'>
|
||||
<Function 'test_noop[smtp.gmail.com]'>
|
||||
<Function 'test_ehlo[mail.python.org]'>
|
||||
<Function 'test_noop[mail.python.org]'>
|
||||
|
||||
|
@ -526,11 +531,12 @@ Here we declare an ``app`` fixture which receives the previously defined
|
|||
|
||||
$ py.test -v test_appsetup.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
||||
test_appsetup.py::test_smtp_exists[merlinux.eu] PASSED
|
||||
test_appsetup.py::test_smtp_exists[smtp.gmail.com] PASSED
|
||||
test_appsetup.py::test_smtp_exists[mail.python.org] PASSED
|
||||
|
||||
======= 2 passed in 0.12 seconds ========
|
||||
|
@ -591,27 +597,28 @@ Let's run the tests in verbose mode and with looking at the print-output::
|
|||
|
||||
$ py.test -v -s test_module.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0 -- $PWD/.env/bin/python2.7
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 8 items
|
||||
|
||||
test_module.py::test_0[1] (' test0', 1)
|
||||
test_module.py::test_0[1] test0 1
|
||||
PASSED
|
||||
test_module.py::test_0[2] (' test0', 2)
|
||||
test_module.py::test_0[2] test0 2
|
||||
PASSED
|
||||
test_module.py::test_1[mod1] ('create', 'mod1')
|
||||
(' test1', 'mod1')
|
||||
test_module.py::test_1[mod1] create mod1
|
||||
test1 mod1
|
||||
PASSED
|
||||
test_module.py::test_2[1-mod1] (' test2', 1, 'mod1')
|
||||
test_module.py::test_2[1-mod1] test2 1 mod1
|
||||
PASSED
|
||||
test_module.py::test_2[2-mod1] (' test2', 2, 'mod1')
|
||||
test_module.py::test_2[2-mod1] test2 2 mod1
|
||||
PASSED
|
||||
test_module.py::test_1[mod2] ('create', 'mod2')
|
||||
(' test1', 'mod2')
|
||||
test_module.py::test_1[mod2] create mod2
|
||||
test1 mod2
|
||||
PASSED
|
||||
test_module.py::test_2[1-mod2] (' test2', 1, 'mod2')
|
||||
test_module.py::test_2[1-mod2] test2 1 mod2
|
||||
PASSED
|
||||
test_module.py::test_2[2-mod2] (' test2', 2, 'mod2')
|
||||
test_module.py::test_2[2-mod2] test2 2 mod2
|
||||
PASSED
|
||||
|
||||
======= 8 passed in 0.12 seconds ========
|
||||
|
|
|
@ -27,7 +27,7 @@ Installation options::
|
|||
To check your installation has installed the correct version::
|
||||
|
||||
$ py.test --version
|
||||
This is pytest version 2.8.0.dev4, imported from $PWD/pytest.pyc
|
||||
This is pytest version 2.8.1.dev1, imported from $PYTHON_PREFIX/lib/python3.4/site-packages/pytest.py
|
||||
|
||||
If you get an error checkout :ref:`installation issues`.
|
||||
|
||||
|
@ -49,7 +49,7 @@ That's it. You can execute the test function now::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -128,7 +128,7 @@ run the module by passing its filename::
|
|||
======= FAILURES ========
|
||||
_______ TestClass.test_two ________
|
||||
|
||||
self = <test_class.TestClass instance at 0xdeadbeef>
|
||||
self = <test_class.TestClass object at 0xdeadbeef>
|
||||
|
||||
def test_two(self):
|
||||
x = "hello"
|
||||
|
@ -164,7 +164,7 @@ before performing the test function call. Let's just run it::
|
|||
======= FAILURES ========
|
||||
_______ test_needsfiles ________
|
||||
|
||||
tmpdir = local('/tmp/pytest-NaN/test_needsfiles0')
|
||||
tmpdir = local('PYTEST_TMPDIR/test_needsfiles0')
|
||||
|
||||
def test_needsfiles(tmpdir):
|
||||
print (tmpdir)
|
||||
|
@ -172,8 +172,8 @@ before performing the test function call. Let's just run it::
|
|||
E assert 0
|
||||
|
||||
test_tmpdir.py:3: AssertionError
|
||||
----------------------------- Captured stdout call -----------------------------
|
||||
/tmp/pytest-NaN/test_needsfiles0
|
||||
--------------------------- Captured stdout call ---------------------------
|
||||
PYTEST_TMPDIR/test_needsfiles0
|
||||
1 failed in 0.12 seconds
|
||||
|
||||
Before the test runs, a unique-per-test-invocation temporary directory
|
||||
|
|
|
@ -55,7 +55,7 @@ them in turn::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
@ -103,7 +103,7 @@ Let's run this::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
@ -201,7 +201,7 @@ listlist::
|
|||
$ py.test -q -rs test_strings.py
|
||||
s
|
||||
======= short test summary info ========
|
||||
SKIP [1] $PWD/_pytest/python.py:1201: got empty parameter set, function test_valid_string at $REGENDOC_TMPDIR/test_strings.py:1
|
||||
SKIP [1] $PYTHON_PREFIX/lib/python3.4/site-packages/_pytest/python.py:1322: got empty parameter set, function test_valid_string at $REGENDOC_TMPDIR/test_strings.py:1
|
||||
1 skipped in 0.12 seconds
|
||||
|
||||
For further examples, you might want to look at :ref:`more
|
||||
|
|
|
@ -165,8 +165,8 @@ Running it with the report-on-xfail option gives this output::
|
|||
|
||||
example $ py.test -rx xfail_demo.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
rootdir: $PWD/doc/en, inifile: pytest.ini
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR/example, inifile:
|
||||
collected 7 items
|
||||
|
||||
xfail_demo.py xxxxxxx
|
||||
|
|
|
@ -29,7 +29,7 @@ Running this would result in a passed test except for the last
|
|||
|
||||
$ py.test test_tmpdir.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -38,7 +38,7 @@ Running this would result in a passed test except for the last
|
|||
======= FAILURES ========
|
||||
_______ test_create_file ________
|
||||
|
||||
tmpdir = local('/tmp/pytest-NaN/test_create_file0')
|
||||
tmpdir = local('PYTEST_TMPDIR/test_create_file0')
|
||||
|
||||
def test_create_file(tmpdir):
|
||||
p = tmpdir.mkdir("sub").join("hello.txt")
|
||||
|
@ -51,7 +51,6 @@ Running this would result in a passed test except for the last
|
|||
test_tmpdir.py:7: AssertionError
|
||||
======= 1 failed in 0.12 seconds ========
|
||||
|
||||
|
||||
The 'tmpdir_factory' fixture
|
||||
----------------------------
|
||||
|
||||
|
|
|
@ -88,7 +88,7 @@ the ``self.db`` values in the traceback::
|
|||
|
||||
$ py.test test_unittest_db.py
|
||||
======= test session starts ========
|
||||
platform linux2 -- Python 2.7.9, pytest-2.8.0.dev4, py-1.4.28, pluggy-0.3.0
|
||||
platform linux -- Python 3.4.2, pytest-2.8.1.dev1, py-1.4.30, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -102,7 +102,7 @@ the ``self.db`` values in the traceback::
|
|||
def test_method1(self):
|
||||
assert hasattr(self, "db")
|
||||
> assert 0, self.db # fail for demo purposes
|
||||
E AssertionError: <conftest.DummyDB instance at 0xdeadbeef>
|
||||
E AssertionError: <conftest.db_class.<locals>.DummyDB object at 0xdeadbeef>
|
||||
E assert 0
|
||||
|
||||
test_unittest_db.py:9: AssertionError
|
||||
|
@ -112,7 +112,7 @@ the ``self.db`` values in the traceback::
|
|||
|
||||
def test_method2(self):
|
||||
> assert 0, self.db # fail for demo purposes
|
||||
E AssertionError: <conftest.DummyDB instance at 0xdeadbeef>
|
||||
E AssertionError: <conftest.db_class.<locals>.DummyDB object at 0xdeadbeef>
|
||||
E assert 0
|
||||
|
||||
test_unittest_db.py:12: AssertionError
|
||||
|
|
7
tox.ini
7
tox.ini
|
@ -105,12 +105,15 @@ commands= py.test -rfsxX {posargs}
|
|||
|
||||
[testenv:regen]
|
||||
changedir=doc/en
|
||||
basepython = python3.4
|
||||
deps=sphinx
|
||||
PyYAML
|
||||
whitelist_externals=rm
|
||||
regendoc>=0.6.1
|
||||
whitelist_externals=
|
||||
rm
|
||||
make
|
||||
commands=
|
||||
rm -rf /tmp/doc-exec*
|
||||
#pip install pytest==2.3.4
|
||||
make regen
|
||||
|
||||
[testenv:jython]
|
||||
|
|
Loading…
Reference in New Issue