2013-11-19 04:31:32 +08:00
|
|
|
import json
|
|
|
|
import py
|
|
|
|
import textwrap
|
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
issues_url = "https://api.github.com/repos/pytest-dev/pytest/issues"
|
2013-11-19 04:31:32 +08:00
|
|
|
|
|
|
|
import requests
|
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
|
2013-11-19 04:31:32 +08:00
|
|
|
def get_issues():
|
|
|
|
issues = []
|
2016-03-29 01:51:26 +08:00
|
|
|
url = issues_url
|
2013-11-19 04:31:32 +08:00
|
|
|
while 1:
|
2016-03-29 01:51:26 +08:00
|
|
|
get_data = {"state": "all"}
|
|
|
|
r = requests.get(url, params=get_data)
|
2013-11-19 04:31:32 +08:00
|
|
|
data = r.json()
|
2016-03-29 01:51:26 +08:00
|
|
|
if r.status_code == 403:
|
|
|
|
# API request limit exceeded
|
|
|
|
print(data['message'])
|
|
|
|
exit(1)
|
|
|
|
issues.extend(data)
|
2013-11-19 04:31:32 +08:00
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
# Look for next page
|
|
|
|
links = requests.utils.parse_header_links(r.headers['Link'])
|
|
|
|
another_page = False
|
|
|
|
for link in links:
|
|
|
|
if link['rel'] == 'next':
|
|
|
|
url = link['url']
|
|
|
|
another_page = True
|
|
|
|
if not another_page:
|
|
|
|
return issues
|
2013-11-19 04:31:32 +08:00
|
|
|
|
|
|
|
|
|
|
|
def main(args):
|
|
|
|
cachefile = py.path.local(args.cache)
|
|
|
|
if not cachefile.exists() or args.refresh:
|
|
|
|
issues = get_issues()
|
|
|
|
cachefile.write(json.dumps(issues))
|
|
|
|
else:
|
|
|
|
issues = json.loads(cachefile.read())
|
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
open_issues = [x for x in issues if x["state"] == "open"]
|
2013-11-19 04:31:32 +08:00
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
open_issues.sort(key=lambda x: x["number"])
|
2013-11-19 04:31:32 +08:00
|
|
|
report(open_issues)
|
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
|
|
|
|
def _get_kind(issue):
|
|
|
|
labels = [l['name'] for l in issue['labels']]
|
|
|
|
for key in ('bug', 'enhancement', 'proposal'):
|
|
|
|
if key in labels:
|
|
|
|
return key
|
|
|
|
return 'issue'
|
|
|
|
|
|
|
|
|
2013-11-19 04:31:32 +08:00
|
|
|
def report(issues):
|
|
|
|
for issue in issues:
|
|
|
|
title = issue["title"]
|
2016-03-29 01:51:26 +08:00
|
|
|
body = issue["body"]
|
|
|
|
kind = _get_kind(issue)
|
|
|
|
status = issue["state"]
|
|
|
|
number = issue["number"]
|
|
|
|
link = "https://github.com/pytest-dev/pytest/issues/%s/" % number
|
2013-11-19 04:31:32 +08:00
|
|
|
print("----")
|
|
|
|
print(status, kind, link)
|
|
|
|
print(title)
|
|
|
|
#print()
|
2016-03-29 01:51:26 +08:00
|
|
|
#lines = body.split("\n")
|
2013-11-19 04:31:32 +08:00
|
|
|
#print ("\n".join(lines[:3]))
|
2016-03-29 01:51:26 +08:00
|
|
|
#if len(lines) > 3 or len(body) > 240:
|
2013-11-19 04:31:32 +08:00
|
|
|
# print ("...")
|
2016-03-29 01:51:26 +08:00
|
|
|
print("\n\nFound %s open issues" % len(issues))
|
|
|
|
|
2013-11-19 04:31:32 +08:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
import argparse
|
|
|
|
parser = argparse.ArgumentParser("process bitbucket issues")
|
2013-11-21 04:04:19 +08:00
|
|
|
parser.add_argument("--refresh", action="store_true",
|
|
|
|
help="invalidate cache, refresh issues")
|
2013-11-19 04:31:32 +08:00
|
|
|
parser.add_argument("--cache", action="store", default="issues.json",
|
|
|
|
help="cache file")
|
|
|
|
args = parser.parse_args()
|
|
|
|
main(args)
|
2016-03-29 01:51:26 +08:00
|
|
|
|