-
Notifications
You must be signed in to change notification settings - Fork 38
Expand file tree
/
Copy pathissue-scan
More file actions
executable file
·199 lines (160 loc) · 6.52 KB
/
issue-scan
File metadata and controls
executable file
·199 lines (160 loc) · 6.52 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
#!/usr/bin/env python3
# This file is part of Cockpit.
#
# Copyright (C) 2017 Red Hat, Inc.
#
# Cockpit is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.
import argparse
import json
import logging
import shlex
import sys
import time
from collections.abc import Collection
# We don't need pika when running in a GitHub workflow; the whole AMQP code path will be obsolete once we
# permanently switch to GitHub Actions
try:
import pika
from lib import distributed_queue
except ImportError:
pika = None
distributed_queue = None # type: ignore[assignment]
from lib import ALLOWLIST, github, testmap
from lib.aio.jsonutil import JsonError, JsonObject, get_dict, get_dictv, get_int, get_nested, get_str
from lib.jobqueue import QueueEntry
# RHEL tasks have to be done inside Red Hat network
REDHAT_TASKS = [
"rhel",
"redhat"
]
logging.basicConfig(level=logging.INFO)
def main() -> int:
parser = argparse.ArgumentParser(description="Scan issues for image-refresh tasks")
parser.add_argument("-v", "--human-readable", "--verbose", action="store_true", default=False,
help="Print human readable output")
parser.add_argument('--amqp', default=None,
help='The host:port of the AMQP server to publish to (format host:port)')
event_group = parser.add_mutually_exclusive_group(required=True)
event_group.add_argument('--issues-data',
help='Event JSON data as string')
event_group.add_argument('--event-data-file',
help='Path to event JSON file (e.g., $GITHUB_EVENT_PATH)')
opts = parser.parse_args()
try:
event: JsonObject
if opts.event_data_file:
with open(opts.event_data_file) as f:
event = json.load(f)
else:
event = json.loads(opts.issues_data)
result = scan(event)
except json.JSONDecodeError as exc:
sys.stderr.write(f"issue-scan: invalid event JSON: {exc}\n")
return 1
except JsonError as exc:
sys.stderr.write(f"issue-scan: invalid event data: {exc}\n")
return 1
if result is None:
return 0
if opts.amqp:
assert pika is not None, "pika module is required for --amqp"
assert distributed_queue is not None, "distributed_queue module is required for --amqp"
with distributed_queue.DistributedQueue(opts.amqp, queues=['rhel', 'public']) as dq:
queue_task(dq, result)
else:
if opts.human_readable:
print(result['human'])
else:
print(json.dumps(result))
return 0
def contains_any(string: str, matches: Collection[str]) -> bool:
for match in matches:
if match in string:
return True
return False
def task_for_issue(event: JsonObject) -> tuple[str, JsonObject, str] | None:
"""Extract the first unchecked task from an issue/PR event."""
with get_nested(event, "repository") as repository:
repo = get_str(repository, "full_name")
issue = get_dict(event, "issue", None) or get_dict(event, "pull_request", None)
if issue is None:
return None
if not any(get_str(label, 'name') == 'bot' for label in get_dictv(issue, 'labels')):
return None
if get_str(issue, "title").strip().startswith("WIP"):
return None
login = get_str(get_dict(issue, "user", {}), "login", None)
if login not in ALLOWLIST:
return None
# We only consider the first unchecked item per issue
#
# The bots think the list needs to be done in order.
# If the first item in the checklist is not something
# the bots can do, then the bots will ignore this issue
# (below in output_task)
checklist = github.Checklist(get_str(issue, "body", None))
for item, checked in checklist.items.items():
if not checked:
return (item, issue, repo)
return None
def output_task(command: str, issue: JsonObject, repo: str) -> QueueEntry | None:
name, _, context = command.partition(" ")
if name != "image-refresh" or not repo.endswith("/bots"):
return None
number = get_int(issue, "number", None)
if number is None:
return None
number = int(number)
context = shlex.quote(context.strip())
api = github.GitHub(repo=repo)
# `--issues-data` should also be able to receive pull_request events, in that
# case pull_request won't be present in the object, but commits will be
if "pull_request" in issue or "commits" in issue:
ref = f"pull/{number}/head"
sha = api.get(f"git/ref/{ref}")["object"]["sha"]
pull = number
else:
ref = testmap.get_default_branch(repo)
sha = api.get(f"git/ref/heads/{ref}")["object"]["sha"]
pull = None
current = time.strftime('%Y%m%d-%H%M%S')
slug = f'{name}-{context}-{sha[:8]}-{current}'
return {
'job': {
'repo': repo,
'sha': sha,
'pull': pull,
'slug': slug,
'context': f'{name}/{context}',
'command': (f'./{name}', '--verbose', f'--issue={number}', context),
'secrets': ('github-token', 'image-upload'),
},
'human': f"issue-{number} {name} {context} {ref}",
}
def queue_task(dq: 'distributed_queue.DistributedQueue', body: QueueEntry) -> None:
assert pika is not None
assert distributed_queue is not None
queue = 'rhel' if contains_any(body['job']['context'], REDHAT_TASKS) else 'public'
dq.channel.basic_publish('', queue, json.dumps(body),
properties=pika.BasicProperties(priority=distributed_queue.MAX_PRIORITY))
logging.info("Published issue task: %r", body)
def scan(event: JsonObject) -> QueueEntry | None:
"""Scan an issue/PR event for a task and return a job entry if found."""
task = task_for_issue(event)
if task is None:
return None
command, issue, repo = task
return output_task(command, issue, repo)
if __name__ == '__main__':
sys.exit(main())