2013-11-19 04:31:32 +08:00
|
|
|
import json
|
2020-08-02 22:56:36 +08:00
|
|
|
from pathlib import Path
|
2018-10-25 15:01:29 +08:00
|
|
|
|
2018-05-24 00:12:04 +08:00
|
|
|
import requests
|
2013-11-19 04:31:32 +08:00
|
|
|
|
2024-02-01 04:12:33 +08:00
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
issues_url = "https://api.github.com/repos/pytest-dev/pytest/issues"
|
2013-11-19 04:31:32 +08:00
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
|
2013-11-19 04:31:32 +08:00
|
|
|
def get_issues():
|
|
|
|
issues = []
|
2016-03-29 01:51:26 +08:00
|
|
|
url = issues_url
|
2013-11-19 04:31:32 +08:00
|
|
|
while 1:
|
2016-03-29 01:51:26 +08:00
|
|
|
get_data = {"state": "all"}
|
|
|
|
r = requests.get(url, params=get_data)
|
2013-11-19 04:31:32 +08:00
|
|
|
data = r.json()
|
2016-03-29 01:51:26 +08:00
|
|
|
if r.status_code == 403:
|
|
|
|
# API request limit exceeded
|
|
|
|
print(data["message"])
|
|
|
|
exit(1)
|
|
|
|
issues.extend(data)
|
2013-11-19 04:31:32 +08:00
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
# Look for next page
|
|
|
|
links = requests.utils.parse_header_links(r.headers["Link"])
|
|
|
|
another_page = False
|
|
|
|
for link in links:
|
|
|
|
if link["rel"] == "next":
|
|
|
|
url = link["url"]
|
|
|
|
another_page = True
|
|
|
|
if not another_page:
|
|
|
|
return issues
|
2013-11-19 04:31:32 +08:00
|
|
|
|
|
|
|
|
|
|
|
def main(args):
|
2020-08-02 22:56:36 +08:00
|
|
|
cachefile = Path(args.cache)
|
2013-11-19 04:31:32 +08:00
|
|
|
if not cachefile.exists() or args.refresh:
|
|
|
|
issues = get_issues()
|
2020-08-02 22:56:36 +08:00
|
|
|
cachefile.write_text(json.dumps(issues), "utf-8")
|
2013-11-19 04:31:32 +08:00
|
|
|
else:
|
2020-08-02 22:56:36 +08:00
|
|
|
issues = json.loads(cachefile.read_text("utf-8"))
|
2013-11-19 04:31:32 +08:00
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
open_issues = [x for x in issues if x["state"] == "open"]
|
2013-11-19 04:31:32 +08:00
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
open_issues.sort(key=lambda x: x["number"])
|
2013-11-19 04:31:32 +08:00
|
|
|
report(open_issues)
|
|
|
|
|
2016-03-29 01:51:26 +08:00
|
|
|
|
|
|
|
def _get_kind(issue):
|
2020-04-25 02:51:32 +08:00
|
|
|
labels = [label["name"] for label in issue["labels"]]
|
2016-03-29 01:51:26 +08:00
|
|
|
for key in ("bug", "enhancement", "proposal"):
|
|
|
|
if key in labels:
|
|
|
|
return key
|
|
|
|
return "issue"
|
|
|
|
|
|
|
|
|
2013-11-19 04:31:32 +08:00
|
|
|
def report(issues):
|
|
|
|
for issue in issues:
|
|
|
|
title = issue["title"]
|
2018-05-24 00:12:04 +08:00
|
|
|
# body = issue["body"]
|
2016-03-29 01:51:26 +08:00
|
|
|
kind = _get_kind(issue)
|
|
|
|
status = issue["state"]
|
|
|
|
number = issue["number"]
|
|
|
|
link = "https://github.com/pytest-dev/pytest/issues/%s/" % number
|
2013-11-19 04:31:32 +08:00
|
|
|
print("----")
|
|
|
|
print(status, kind, link)
|
|
|
|
print(title)
|
|
|
|
# print()
|
2016-03-29 01:51:26 +08:00
|
|
|
# lines = body.split("\n")
|
2018-11-22 16:15:14 +08:00
|
|
|
# print("\n".join(lines[:3]))
|
2016-03-29 01:51:26 +08:00
|
|
|
# if len(lines) > 3 or len(body) > 240:
|
2018-11-22 16:15:14 +08:00
|
|
|
# print("...")
|
2016-03-29 01:51:26 +08:00
|
|
|
print("\n\nFound %s open issues" % len(issues))
|
|
|
|
|
2013-11-19 04:31:32 +08:00
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
import argparse
|
2018-05-23 22:48:46 +08:00
|
|
|
|
2013-11-19 04:31:32 +08:00
|
|
|
parser = argparse.ArgumentParser("process bitbucket issues")
|
2013-11-21 04:04:19 +08:00
|
|
|
parser.add_argument(
|
|
|
|
"--refresh", action="store_true", help="invalidate cache, refresh issues"
|
|
|
|
)
|
2013-11-19 04:31:32 +08:00
|
|
|
parser.add_argument(
|
|
|
|
"--cache", action="store", default="issues.json", help="cache file"
|
|
|
|
)
|
|
|
|
args = parser.parse_args()
|
|
|
|
main(args)
|