forked from cockpit-project/bots
-
Notifications
You must be signed in to change notification settings - Fork 0
/
issue-scan
executable file
·227 lines (187 loc) · 6.97 KB
/
issue-scan
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
#!/usr/bin/env python3
# This file is part of Cockpit.
#
# Copyright (C) 2017 Red Hat, Inc.
#
# Cockpit is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.
NAMES = [
"example-task",
"po-refresh",
"image-refresh",
"npm-update",
"naughty-prune",
"learn-tests",
"tests-data",
"flakes-refresh",
]
# RHEL tasks have to be done inside Red Hat network
REDHAT_TASKS = [
"rhel",
"redhat"
]
# Windows tasks have to be done by a human
WINDOWS_TASKS = [
"windows"
]
KUBERNETES_TASKS = [
".svc.cluster.local"
]
import argparse
import pipes
import sys
import json
sys.dont_write_bytecode = True
from task import github, redhat_network, distributed_queue, labels_of_pull
no_amqp = False
try:
import pika
except ImportError:
no_amqp = True
def main():
parser = argparse.ArgumentParser(description="Scan issues for tasks")
parser.add_argument("-v", "--human-readable", "--verbose", action="store_true", default=False,
dest="verbose", help="Print verbose information")
parser.add_argument('--amqp', default=None,
help='The host:port of the AMQP server to publish to (format host:port)')
parser.add_argument('--issues-data', default=None,
help='issues or pull request event GitHub JSON data to evaluate')
opts = parser.parse_args()
if opts.amqp and no_amqp:
parser.error("AMQP host:port specified but python-amqp not available")
# Figure if we're in a Kubernetes namespace. This file will always exist
try:
with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r") as f:
namespace = f.read().strip()
except IOError:
namespace = None
for result in scan(opts.issues_data, opts.verbose):
if opts.amqp:
with distributed_queue.DistributedQueue(opts.amqp, queues=['rhel', 'public']) as q:
queue_task(q.channel, result)
continue
elif not redhat_network() and contains_any(result, REDHAT_TASKS):
sys.stderr.write("issue-scan: skipping (outside redhat): {0}\n".format(result))
continue
elif contains_any(result, WINDOWS_TASKS):
sys.stderr.write("issue-scan: skipping (windows task): {0}\n".format(result))
continue
elif contains_any(result, KUBERNETES_TASKS):
if not namespace:
sys.stderr.write("issue-scan: skipping (not in kubernetes): {0}\n".format(result))
continue
url = namespace + KUBERNETES_TASKS[0]
if url not in result:
sys.stderr.write("issue-scan: skipping (not same namespace): {0}\n".format(result))
continue
sys.stdout.write(result + "\n")
return 0
def contains_any(string, matches):
for match in matches:
if match in string:
return True
return False
# Map all checkable work items to fixtures
def tasks_for_issues(issues_data):
results = []
issues = []
if issues_data:
event = json.loads(issues_data)
repo = event["repository"]["full_name"]
issue = event.get("issue") or event.get("pull_request")
labels = labels_of_pull(issue)
if 'bot' in labels:
issues.append(issue)
api = github.GitHub(repo=repo)
else:
api = github.GitHub()
issues = api.issues(state="open")
whitelist = api.whitelist()
for issue in issues:
if issue["title"].strip().startswith("WIP"):
continue
login = issue.get("user", {}).get("login", {})
if login not in whitelist:
continue
#
# We only consider the first unchecked item per issue
#
# The bots think the list needs to be done in order.
# If the first item in the checklist is not something
# the bots can do, then the bots will ignore this issue
# (below in output_task)
#
checklist = github.Checklist(issue["body"])
for item, checked in checklist.items.items():
if not checked:
results.append((item, issue, api.repo))
break
return results
def output_task(command, issue, repo, verbose):
name, unused, context = command.partition(" ")
if name not in NAMES:
return None
number = issue.get("number", None)
if number is None:
return None
context = context.strip()
checkout = "PRIORITY={priority:04d} "
if repo == "cockpit-project/bots":
# when working on bots run from project root
cmd = "./{name} --verbose --issue='{issue}' {context}"
else:
# for external projects, nothing checks out bots/ subdir for them, so do it here
cmd = "git clone .. bots && bots/{name} --verbose --issue='{issue}' {context}"
# `--issues-data` should also be able to receive pull_request events, in that
# case pull_request won't be present in the object, but commits will be
if "pull_request" in issue or "commits" in issue:
checkout += "./make-checkout --verbose --repo {repo} pull/{issue}/head && "
else:
checkout += "./make-checkout --verbose --repo {repo} master && "
if verbose:
return "issue-{issue} {name} {context} {priority}".format(
issue=int(number),
priority=distributed_queue.MAX_PRIORITY,
name=name,
context=context
)
else:
if context:
context = pipes.quote(context)
return (checkout + "cd make-checkout-workdir && " + cmd + " ; cd ..").format(
issue=int(number),
priority=distributed_queue.MAX_PRIORITY,
name=name,
context=context,
repo=repo,
)
def queue_task(channel, result):
body = {
"command": result,
"type": "issue",
}
queue = 'rhel' if contains_any(result, REDHAT_TASKS) else 'public'
channel.basic_publish('', queue, json.dumps(body),
properties=pika.BasicProperties(priority=distributed_queue.MAX_PRIORITY))
# Default scan behavior run for each task
def scan(issues_data, verbose):
global issues
results = []
# Now go through each fixture
for (command, issue, repo) in tasks_for_issues(issues_data):
result = output_task(command, issue, repo, verbose)
if result is not None:
results.append(result)
return results
if __name__ == '__main__':
sys.exit(main())