Merge commit '56156bb119194014129ac08c4a2c370f0b893104' into merge-master

This commit is contained in:
Ronny Pfannschmidt 2016-05-13 17:55:02 +02:00
commit 01d2ff804b
6 changed files with 80 additions and 39 deletions

View File

@ -73,6 +73,7 @@ Omar Kohl
Pieter Mulder
Piotr Banaszkiewicz
Punyashloka Biswal
Quentin Pradet
Ralf Schmitt
Raphael Pierzina
Roman Bolshakov

View File

@ -77,7 +77,9 @@
*
*
* Fix ``pytest.mark.skip`` mark when used in strict mode.
Thanks `@pquentin`_ for the PR and `@RonnyPfannschmidt`_ for
showing how to fix the bug.
* Minor improvements and fixes to the documentation.
Thanks `@omarkohl`_ for the PR.
@ -237,6 +239,7 @@
.. _@rabbbit: https://github.com/rabbbit
.. _@hackebrot: https://github.com/hackebrot
.. _@omarkohl: https://github.com/omarkohl
.. _@pquentin: https://github.com/pquentin
2.8.7
=====

View File

@ -30,6 +30,11 @@ def pytest_configure(config):
nop.Exception = XFailed
setattr(pytest, "xfail", nop)
config.addinivalue_line("markers",
"skip(reason=None): skip the given test function with an optional reason. "
"Example: skip(reason=\"no way of currently testing this\") skips the "
"test."
)
config.addinivalue_line("markers",
"skipif(condition): skip the given test function if eval(condition) "
"results in a True value. Evaluation happens within the "
@ -38,13 +43,13 @@ def pytest_configure(config):
"http://pytest.org/latest/skipping.html"
)
config.addinivalue_line("markers",
"xfail(condition, reason=None, run=True, raises=None): mark the the test function "
"as an expected failure if eval(condition) has a True value. "
"Optionally specify a reason for better reporting and run=False if "
"you don't even want to execute the test function. If only specific "
"exception(s) are expected, you can list them in raises, and if the test fails "
"in other ways, it will be reported as a true failure. "
"See http://pytest.org/latest/skipping.html"
"xfail(condition, reason=None, run=True, raises=None, strict=False): "
"mark the the test function as an expected failure if eval(condition) "
"has a True value. Optionally specify a reason for better reporting "
"and run=False if you don't even want to execute the test function. "
"If only specific exception(s) are expected, you can list them in "
"raises, and if the test fails in other ways, it will be reported as "
"a true failure. See http://pytest.org/latest/skipping.html"
)

View File

@ -158,13 +158,22 @@ it in your setuptools-invocation:
'name_of_plugin = myproject.pluginmodule',
]
},
# custom PyPI classifier for pytest plugins
classifiers=[
"Framework :: Pytest",
],
)
If a package is installed this way, ``pytest`` will load
``myproject.pluginmodule`` as a plugin which can define
`well specified hooks`_.
.. note::
Make sure to include ``Framework :: Pytest`` in your list of
`PyPI classifiers <http://python-packaging-user-guide.readthedocs.org/en/latest/distributing/#classifiers>`_
to make it easy for users to find your plugin.
Requiring/Loading plugins in a test module or conftest file

View File

@ -2,30 +2,34 @@ import json
import py
import textwrap
issues_url = "http://bitbucket.org/api/1.0/repositories/pytest-dev/pytest/issues"
issues_url = "https://api.github.com/repos/pytest-dev/pytest/issues"
import requests
def get_issues():
chunksize = 50
start = 0
issues = []
url = issues_url
while 1:
post_data = {"accountname": "pytest-dev",
"repo_slug": "pytest",
"start": start,
"limit": chunksize}
print ("getting from", start)
r = requests.get(issues_url, params=post_data)
get_data = {"state": "all"}
r = requests.get(url, params=get_data)
data = r.json()
issues.extend(data["issues"])
if start + chunksize >= data["count"]:
if r.status_code == 403:
# API request limit exceeded
print(data['message'])
exit(1)
issues.extend(data)
# Look for next page
links = requests.utils.parse_header_links(r.headers['Link'])
another_page = False
for link in links:
if link['rel'] == 'next':
url = link['url']
another_page = True
if not another_page:
return issues
start += chunksize
kind2num = "bug enhancement task proposal".split()
status2num = "new open resolved duplicate invalid wontfix".split()
def main(args):
cachefile = py.path.local(args.cache)
@ -35,33 +39,38 @@ def main(args):
else:
issues = json.loads(cachefile.read())
open_issues = [x for x in issues
if x["status"] in ("new", "open")]
open_issues = [x for x in issues if x["state"] == "open"]
def kind_and_id(x):
kind = x["metadata"]["kind"]
return kind2num.index(kind), len(issues)-int(x["local_id"])
open_issues.sort(key=kind_and_id)
open_issues.sort(key=lambda x: x["number"])
report(open_issues)
def _get_kind(issue):
labels = [l['name'] for l in issue['labels']]
for key in ('bug', 'enhancement', 'proposal'):
if key in labels:
return key
return 'issue'
def report(issues):
for issue in issues:
metadata = issue["metadata"]
priority = issue["priority"]
title = issue["title"]
content = issue["content"]
kind = metadata["kind"]
status = issue["status"]
id = issue["local_id"]
link = "https://bitbucket.org/pytest-dev/pytest/issue/%s/" % id
body = issue["body"]
kind = _get_kind(issue)
status = issue["state"]
number = issue["number"]
link = "https://github.com/pytest-dev/pytest/issues/%s/" % number
print("----")
print(status, kind, link)
print(title)
#print()
#lines = content.split("\n")
#lines = body.split("\n")
#print ("\n".join(lines[:3]))
#if len(lines) > 3 or len(content) > 240:
#if len(lines) > 3 or len(body) > 240:
# print ("...")
print("\n\nFound %s open issues" % len(issues))
if __name__ == "__main__":
import argparse
@ -72,3 +81,4 @@ if __name__ == "__main__":
help="cache file")
args = parser.parse_args()
main(args)

View File

@ -539,6 +539,19 @@ class TestSkip:
"*1 passed*2 skipped*",
])
def test_strict_and_skip(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skip
def test_hello():
pass
""")
result = testdir.runpytest("-rs --strict")
result.stdout.fnmatch_lines([
"*unconditional skip*",
"*1 skipped*",
])
class TestSkipif:
def test_skipif_conditional(self, testdir):
item = testdir.getitem("""
@ -812,7 +825,7 @@ def test_default_markers(testdir):
result = testdir.runpytest("--markers")
result.stdout.fnmatch_lines([
"*skipif(*condition)*skip*",
"*xfail(*condition, reason=None, run=True, raises=None)*expected failure*",
"*xfail(*condition, reason=None, run=True, raises=None, strict=False)*expected failure*",
])
def test_xfail_test_setup_exception(testdir):