forked from robotpy/roborio-wheels
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathshould_build.py
executable file
·128 lines (98 loc) · 3.27 KB
/
should_build.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
#!/usr/bin/env python3
#
# Helper for github actions, intended to be ran in the cross-compilation
# environment
#
import argparse
from html.parser import HTMLParser
import posixpath
import subprocess
import sys
import typing
from urllib.parse import unquote_plus
from urllib.request import urlopen
from packaging.tags import parse_tag, sys_tags
import toml
# https://www.mschweighauser.com/fast-url-parsing-with-python/
class LinkFinder(HTMLParser):
@classmethod
def extract_links(cls, content: str) -> typing.List[str]:
parser = cls()
parser.feed(content)
return parser.links
def __init__(self):
super().__init__()
self.links = []
def handle_starttag(self, tag, attrs):
if tag == "a":
for attr in attrs:
if "href" in attr[0]:
self.links.append(attr[1])
return
def find_wheel_url(project: str, version: str, content: str):
project = project.replace("-", "_")
tags = set(sys_tags())
links = LinkFinder.extract_links(content)
not_matched = []
found = False
for link in links:
link = posixpath.basename(unquote_plus(link))
if not link.endswith(".whl"):
continue
wproject, wversion, wtags = link[:-4].split("-", 2)
if wproject.lower() != project.lower():
continue
# Add to list so we can print it at the end if nothing matches
not_matched.append(link)
if wversion != version:
continue
for wtag in parse_tag(wtags):
if wtag in tags:
print("Found matching wheel", link)
return True
if not found:
print("Did not find matching wheels in:")
for link in not_matched:
print("-", link)
return False
def get_find_links() -> str:
"""
Retrieves --find-links setting for pip
"""
content = subprocess.check_output(
[sys.executable, "-m", "pip", "--disable-pip-version-check", "config", "list"],
encoding="utf-8",
)
for line in content.splitlines():
s = line.split("global.find-links=", 1)
if len(s) == 2:
return s[1].strip("'")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("project")
parser.add_argument("--config", default="packages.toml")
group = parser.add_mutually_exclusive_group()
group.add_argument("--url", default=None)
group.add_argument("-f", "--file", default=None)
args = parser.parse_args()
with open(args.config) as fp:
cfg = toml.load(fp)
try:
version = cfg["packages"][args.project]["version"]
except KeyError:
parser.error(f"{args.project} not found in {args.config}")
if args.file:
with open(args.file) as fp:
content = fp.read()
else:
url = args.url
if not url:
url = get_find_links()
if not url:
parser.error("URL to parse must be specified!")
print("Checking", url)
with urlopen(url) as f:
content = f.read().decode("utf-8")
found = find_wheel_url(args.project, version, content)
# Sets variable for use in github actions
print(f"::set-output name=should_build::{str(not found).lower()}")