forked from cockpit-project/bots
-
Notifications
You must be signed in to change notification settings - Fork 0
/
issue-scan
executable file
·189 lines (154 loc) · 5.94 KB
/
issue-scan
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
#!/usr/bin/env python3
# This file is part of Cockpit.
#
# Copyright (C) 2017 Red Hat, Inc.
#
# Cockpit is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.
import argparse
import json
import logging
import shlex
import sys
from lib import ALLOWLIST, testmap
from task import distributed_queue, github, labels_of_pull
NAMES = [
"image-refresh",
]
# RHEL tasks have to be done inside Red Hat network
REDHAT_TASKS = [
"rhel",
"redhat"
]
sys.dont_write_bytecode = True
logging.basicConfig(level=logging.INFO)
no_amqp = False
try:
import pika
except ImportError:
no_amqp = True
def main():
parser = argparse.ArgumentParser(description="Scan issues for tasks")
parser.add_argument("-v", "--human-readable", "--verbose", action="store_true", default=False,
dest="verbose", help="Print verbose information")
parser.add_argument('--amqp', default=None,
help='The host:port of the AMQP server to publish to (format host:port)')
parser.add_argument('--issues-data', default=None,
help='issues or pull request event GitHub JSON data to evaluate')
parser.add_argument('--repo', default=None,
help='Repository to scan and checkout.')
opts = parser.parse_args()
if opts.amqp and no_amqp:
parser.error("AMQP host:port specified but python-amqp not available")
for result in scan(opts.issues_data, opts.verbose, opts.repo):
if opts.amqp:
with distributed_queue.DistributedQueue(opts.amqp, queues=['rhel', 'public']) as q:
queue_task(q.channel, result)
continue
sys.stdout.write(result + "\n")
return 0
def contains_any(string, matches):
for match in matches:
if match in string:
return True
return False
# Map all checkable work items to fixtures
def tasks_for_issues(issues_data, opts_repo):
results = []
issues = []
if issues_data:
event = json.loads(issues_data)
repo = event["repository"]["full_name"]
issue = event.get("issue") or event.get("pull_request")
labels = labels_of_pull(issue)
if 'bot' in labels:
issues.append(issue)
api = github.GitHub(repo=repo)
else:
api = github.GitHub(repo=opts_repo)
issues = api.issues(state="open")
for issue in issues:
if issue["title"].strip().startswith("WIP"):
continue
login = issue.get("user", {}).get("login", {})
if login not in ALLOWLIST:
continue
#
# We only consider the first unchecked item per issue
#
# The bots think the list needs to be done in order.
# If the first item in the checklist is not something
# the bots can do, then the bots will ignore this issue
# (below in output_task)
#
checklist = github.Checklist(issue["body"])
for item, checked in checklist.items.items():
if not checked:
results.append((item, issue, api.repo))
break
return results
def output_task(command, issue, repo, verbose):
name, unused, context = command.partition(" ")
if name not in NAMES:
return None
number = issue.get("number", None)
if number is None:
return None
context = context.strip()
checkout = "PRIORITY={priority:04d} "
if repo == "cockpit-project/bots":
# when working on bots run from project root
cmd = "./{name} --verbose --issue='{issue}' {context}"
else:
# for external projects, nothing checks out bots/ subdir for them, so do it here
cmd = "git clone .. bots && bots/{name} --verbose --issue='{issue}' {context}"
# `--issues-data` should also be able to receive pull_request events, in that
# case pull_request won't be present in the object, but commits will be
if "pull_request" in issue or "commits" in issue:
checkout += "./make-checkout --verbose --repo {repo} pull/{issue}/head && "
else:
checkout += "./make-checkout --verbose --repo {repo} {repo_default_branch} && "
if verbose:
return f"issue-{int(number)} {name} {context} {distributed_queue.MAX_PRIORITY}"
else:
if context:
context = shlex.quote(context)
return (checkout + "cd make-checkout-workdir && " + cmd + " ; cd ..").format(
issue=int(number),
priority=distributed_queue.MAX_PRIORITY,
name=name,
context=context,
repo=repo,
repo_default_branch=testmap.get_default_branch(repo),
)
def queue_task(channel, result):
body = {
"command": result,
"type": "issue",
}
queue = 'rhel' if contains_any(result, REDHAT_TASKS) else 'public'
channel.basic_publish('', queue, json.dumps(body),
properties=pika.BasicProperties(priority=distributed_queue.MAX_PRIORITY))
logging.info("Published issue task with command: '%s'", result)
# Default scan behavior run for each task
def scan(issues_data, verbose, opts_repo):
global issues
results = []
# Now go through each fixture
for (command, issue, repo) in tasks_for_issues(issues_data, opts_repo):
result = output_task(command, issue, repo, verbose)
if result is not None:
results.append(result)
return results
if __name__ == '__main__':
sys.exit(main())