cockpit/bots/issue-scan

231 lines
7.2 KiB
Python
Executable File

#!/usr/bin/env python3
# This file is part of Cockpit.
#
# Copyright (C) 2017 Red Hat, Inc.
#
# Cockpit is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# Cockpit is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Cockpit; If not, see <http://www.gnu.org/licenses/>.
NAMES = [
"example-task",
"po-refresh",
"image-refresh",
"npm-update",
"naughty-prune",
"learn-tests",
"tests-data",
"flakes-refresh",
]
KVM_TASKS = [
"image-refresh"
]
# RHEL tasks have to be done inside Red Hat network
REDHAT_TASKS = [
"rhel",
"redhat"
]
# Windows tasks have to be done by a human
WINDOWS_TASKS = [
"windows"
]
KUBERNETES_TASKS = [
".svc.cluster.local"
]
# Credentials for working on above contexts
REDHAT_CREDS = "~/.rhel/login"
import argparse
import pipes
import os
import sys
import json
sys.dont_write_bytecode = True
from task import github, redhat_network, distributed_queue, labels_of_pull
BOTS = os.path.normpath(os.path.join(os.path.dirname(__file__)))
BASE = os.path.normpath(os.path.join(BOTS, ".."))
no_amqp = False
try:
import pika
except ImportError:
no_amqp = True
def main():
parser = argparse.ArgumentParser(description="Scan issues for tasks")
parser.add_argument("-v", "--human-readable", "--verbose", action="store_true", default=False,
dest="verbose", help="Print verbose information")
parser.add_argument('--amqp', default=None,
help='The host:port of the AMQP server to publish to (format host:port)')
parser.add_argument('--issues-data', default=None,
help='issues or pull request event GitHub JSON data to evaluate')
opts = parser.parse_args()
if opts.amqp and no_amqp:
parser.error("AMQP host:port specified but python-amqp not available")
kvm = os.access("/dev/kvm", os.R_OK | os.W_OK)
if not kvm:
sys.stderr.write("issue-scan: No /dev/kvm access, not creating images here\n")
# Figure if we're in a Kubernetes namespace. This file will always exist
try:
with open("/var/run/secrets/kubernetes.io/serviceaccount/namespace", "r") as f:
namespace = f.read().strip()
except IOError:
namespace = None
for result in scan(opts.issues_data, opts.verbose):
if opts.amqp:
with distributed_queue.DistributedQueue(opts.amqp, queues=['rhel', 'public']) as q:
queue_task(q.channel, result)
continue
if not kvm and contains_any(result, KVM_TASKS):
sys.stderr.write("issue-scan: skipping (no kvm): {0}\n".format(result))
continue
elif not redhat_network() and contains_any(result, REDHAT_TASKS):
sys.stderr.write("issue-scan: skipping (outside redhat): {0}\n".format(result))
continue
elif contains_any(result, WINDOWS_TASKS):
sys.stderr.write("issue-scan: skipping (windows task): {0}\n".format(result))
continue
elif contains_any(result, KUBERNETES_TASKS):
if not namespace:
sys.stderr.write("issue-scan: skipping (not in kubernetes): {0}\n".format(result))
continue
url = namespace + KUBERNETES_TASKS[0]
if url not in result:
sys.stderr.write("issue-scan: skipping (not same namespace): {0}\n".format(result))
continue
sys.stdout.write(result + "\n")
return 0
def contains_any(string, matches):
for match in matches:
if match in string:
return True
return False
# Map all checkable work items to fixtures
def tasks_for_issues(issues_data):
results = [ ]
issues = [ ]
if issues_data:
event = json.loads(issues_data)
repo = event["repository"]["full_name"]
issue = event.get("issue") or event.get("pull_request")
labels = labels_of_pull(issue)
if 'bot' in labels:
issues.append(issue)
api = github.GitHub(repo=repo)
else:
api = github.GitHub()
issues = api.issues(state="open")
whitelist = api.whitelist()
for issue in issues:
if issue["title"].strip().startswith("WIP"):
continue
login = issue.get("user", { }).get("login", { })
if login not in whitelist:
continue
#
# We only consider the first unchecked item per issue
#
# The bots think the list needs to be done in order.
# If the first item in the checklist is not something
# the bots can do, then the bots will ignore this issue
# (below in output_task)
#
checklist = github.Checklist(issue["body"])
for item, checked in checklist.items.items():
if not checked:
results.append((item, issue, api.repo))
break
return results
def output_task(command, issue, repo, verbose):
name, unused, context = command.partition(" ")
if name not in NAMES:
return None
number = issue.get("number", None)
if number is None:
return None
context = context.strip()
checkout = "PRIORITY={priority:04d} "
cmd = "bots/{name} --verbose --issue='{issue}' {context}"
# `--issues-data` should also be able to receive pull_request events, in that
# case pull_request won't be present in the object, but commits will be
if "pull_request" in issue or "commits" in issue:
checkout += "bots/make-checkout --verbose --repo {repo} pull/{issue}/head && "
else:
checkout += "bots/make-checkout --verbose --repo {repo} master && "
if verbose:
return "issue-{issue} {name} {context} {priority}".format(
issue=int(number),
priority=distributed_queue.MAX_PRIORITY,
name=name,
context=context
)
else:
if context:
context = pipes.quote(context)
return (checkout + "cd bots/make-checkout-workdir && " + cmd + " ; cd ../..").format(
issue=int(number),
priority=distributed_queue.MAX_PRIORITY,
name=name,
context=context,
repo=repo,
)
def queue_task(channel, result):
body = {
"command": result,
"type": "issue",
}
queue = 'rhel' if contains_any(result, REDHAT_TASKS) else 'public'
channel.basic_publish('', queue, json.dumps(body), properties=pika.BasicProperties(priority=distributed_queue.MAX_PRIORITY))
# Default scan behavior run for each task
def scan(issues_data, verbose):
global issues
results = [ ]
# Now go through each fixture
for (command, issue, repo) in tasks_for_issues(issues_data):
result = output_task(command, issue, repo, verbose)
if result is not None:
results.append(result)
return results
if __name__ == '__main__':
sys.exit(main())