2020-11-28 19:17:48 +01:00
|
|
|
import json
|
2020-05-23 21:33:32 +02:00
|
|
|
import os.path
|
2020-03-24 15:26:15 +01:00
|
|
|
import requests
|
2020-07-13 19:29:20 +02:00
|
|
|
import yaml
|
2020-03-31 21:08:00 +02:00
|
|
|
from abc import ABC
|
2020-03-31 22:09:33 +02:00
|
|
|
from flask import url_for
|
2022-04-11 12:16:21 +02:00
|
|
|
from markupsafe import Markup, escape
|
2020-12-03 14:33:47 +01:00
|
|
|
from srht.api import ensure_webhooks, encrypt_request_authorization, get_results
|
2020-08-25 20:02:07 +02:00
|
|
|
from srht.markdown import markdown, sanitize
|
2022-02-04 15:41:37 +01:00
|
|
|
from srht.config import get_origin, cfg
|
2020-03-24 15:26:15 +01:00
|
|
|
|
2024-02-07 20:40:33 +01:00
|
|
|
_gitsrht = get_origin("git.sr.ht", default=None)
|
2022-02-14 18:04:59 +01:00
|
|
|
_gitsrht_api = cfg("git.sr.ht", "api-origin", default=None) or _gitsrht
|
2024-02-07 20:40:33 +01:00
|
|
|
_hgsrht = get_origin("hg.sr.ht", default=None)
|
2022-02-14 18:04:59 +01:00
|
|
|
_hgsrht_api = cfg("hg.sr.ht", "api-origin", default=None) or _hgsrht
|
2024-02-07 20:40:33 +01:00
|
|
|
_listsrht = get_origin("lists.sr.ht", default=None)
|
2022-02-14 18:04:59 +01:00
|
|
|
_listsrht_api = cfg("lists.sr.ht", "api-origin", default=None) or _listsrht
|
2024-02-07 20:40:33 +01:00
|
|
|
_todosrht = get_origin("todo.sr.ht", default=None)
|
2022-02-14 18:04:59 +01:00
|
|
|
_todosrht_api = cfg("todo.sr.ht", "api-origin", default=None) or _todosrht
|
2020-07-13 19:29:20 +02:00
|
|
|
_buildsrht = get_origin("builds.sr.ht", default=None)
|
2022-02-14 18:04:59 +01:00
|
|
|
_buildsrht_api = cfg("builds.sr.ht", "api-origin", default=None) or _buildsrht
|
2020-03-31 22:09:33 +02:00
|
|
|
origin = get_origin("hub.sr.ht")
|
2020-03-24 15:26:15 +01:00
|
|
|
|
2020-05-23 21:33:32 +02:00
|
|
|
readme_names = ["README.md", "README.markdown", "README"]
|
|
|
|
|
|
|
|
def format_readme(content, filename="", link_prefix=None):
|
|
|
|
markdown_exts = ['.md', '.markdown']
|
|
|
|
basename, ext = os.path.splitext(filename)
|
|
|
|
if ext in markdown_exts:
|
2020-07-06 19:33:28 +02:00
|
|
|
html = markdown(content,
|
2020-05-23 21:33:32 +02:00
|
|
|
link_prefix=link_prefix)
|
2020-09-22 19:02:15 +02:00
|
|
|
elif content:
|
2020-05-23 21:33:32 +02:00
|
|
|
html = f"<pre>{escape(content)}</pre>"
|
2020-09-22 19:02:15 +02:00
|
|
|
else:
|
|
|
|
html = ""
|
2020-05-23 21:33:32 +02:00
|
|
|
return Markup(html)
|
|
|
|
|
2020-08-25 20:02:07 +02:00
|
|
|
def try_html_readme(session, prefix, user, repo_name):
|
|
|
|
r = session.get(f"{prefix}/api/repos/{repo_name}/readme",
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user))
|
2020-08-25 20:02:07 +02:00
|
|
|
if r.status_code == 200:
|
|
|
|
return Markup(sanitize(r.text))
|
|
|
|
elif r.status_code == 404:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
raise Exception(r.text)
|
|
|
|
|
2020-03-31 21:08:00 +02:00
|
|
|
class SrhtService(ABC):
|
|
|
|
def __init__(self):
|
|
|
|
self.session = requests.Session()
|
|
|
|
|
|
|
|
def post(self, user, valid, url, payload):
|
|
|
|
r = self.session.post(url,
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user),
|
2020-03-31 21:08:00 +02:00
|
|
|
json=payload)
|
|
|
|
if r.status_code == 400:
|
2020-07-13 19:29:20 +02:00
|
|
|
if valid:
|
|
|
|
for error in r.json()["errors"]:
|
|
|
|
valid.error(error["reason"], field=error.get("field"))
|
2020-03-31 21:08:00 +02:00
|
|
|
return None
|
2020-07-13 19:29:20 +02:00
|
|
|
elif r.status_code not in [200, 201]:
|
2020-03-31 21:08:00 +02:00
|
|
|
raise Exception(r.text)
|
|
|
|
return r.json()
|
|
|
|
|
2020-07-13 22:14:59 +02:00
|
|
|
def put(self, user, valid, url, payload):
|
|
|
|
r = self.session.put(url,
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user),
|
2020-07-13 22:14:59 +02:00
|
|
|
json=payload)
|
|
|
|
if r.status_code == 400:
|
|
|
|
if valid:
|
|
|
|
for error in r.json()["errors"]:
|
|
|
|
valid.error(error["reason"], field=error.get("field"))
|
|
|
|
return None
|
|
|
|
elif r.status_code not in [200, 201]:
|
|
|
|
raise Exception(r.text)
|
|
|
|
return r.json()
|
|
|
|
|
2020-03-31 21:08:00 +02:00
|
|
|
class GitService(SrhtService):
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__()
|
|
|
|
|
2020-03-24 15:26:15 +01:00
|
|
|
def get_repos(self, user):
|
|
|
|
return get_results(f"{_gitsrht}/api/repos", user)
|
|
|
|
|
|
|
|
def get_repo(self, user, repo_name):
|
2020-03-31 21:08:00 +02:00
|
|
|
r = self.session.get(f"{_gitsrht}/api/repos/{repo_name}",
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user))
|
2020-03-24 15:26:15 +01:00
|
|
|
if r.status_code != 200:
|
2020-03-31 17:17:19 +02:00
|
|
|
raise Exception(r.text)
|
2020-03-24 15:26:15 +01:00
|
|
|
return r.json()
|
|
|
|
|
2022-02-04 15:41:36 +01:00
|
|
|
def get_readme(self, user, repo_name, repo_url):
|
2020-12-03 17:46:55 +01:00
|
|
|
readme_query = """
|
2022-02-04 15:41:36 +01:00
|
|
|
query Readme($username: String!, $repoName: String!) {
|
|
|
|
user(username: $username) {
|
|
|
|
repository(name: $repoName) {
|
|
|
|
html: readme
|
|
|
|
md: path(path: "README.md") { ...textData }
|
|
|
|
markdown: path(path: "README.markdown") { ...textData }
|
|
|
|
plaintext: path(path: "README") { ...textData }
|
|
|
|
}
|
2020-12-03 17:46:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fragment textData on TreeEntry {
|
|
|
|
object {
|
|
|
|
... on TextBlob {
|
|
|
|
text
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
2022-02-04 15:41:37 +01:00
|
|
|
r = self.post(user, None, f"{_gitsrht_api}/query", {
|
2020-12-03 14:33:47 +01:00
|
|
|
"query": readme_query,
|
|
|
|
"variables": {
|
2022-02-04 15:41:36 +01:00
|
|
|
"username": user.username,
|
|
|
|
"repoName": repo_name,
|
2020-12-03 14:33:47 +01:00
|
|
|
},
|
|
|
|
})
|
2022-02-04 15:41:36 +01:00
|
|
|
if not r["data"]["user"]["repository"]:
|
|
|
|
raise Exception("git.sr.ht returned no repository: " +
|
2020-12-03 14:33:47 +01:00
|
|
|
json.dumps(r, indent=1))
|
2022-02-04 15:41:36 +01:00
|
|
|
repo = r["data"]["user"]["repository"]
|
2020-12-03 14:33:47 +01:00
|
|
|
|
|
|
|
content = repo["html"]
|
|
|
|
if content:
|
2021-01-26 15:01:19 +01:00
|
|
|
return Markup(sanitize(content))
|
2020-12-03 14:33:47 +01:00
|
|
|
|
|
|
|
content = repo["md"] or repo["markdown"]
|
|
|
|
if content:
|
2020-12-08 08:12:32 +01:00
|
|
|
blob_prefix = repo_url + "/blob/HEAD/"
|
|
|
|
rendered_prefix = repo_url + "/tree/HEAD/"
|
|
|
|
html = markdown(content["object"]["text"], link_prefix=[rendered_prefix, blob_prefix])
|
2020-12-03 14:33:47 +01:00
|
|
|
return Markup(html)
|
|
|
|
|
|
|
|
content = repo["plaintext"]
|
|
|
|
if content:
|
|
|
|
content = content["object"]["text"]
|
|
|
|
return Markup(f"<pre>{escape(content)}</pre>")
|
|
|
|
|
|
|
|
return None
|
2020-03-24 16:22:33 +01:00
|
|
|
|
2022-02-04 15:41:36 +01:00
|
|
|
def get_manifests(self, user, repo_name):
|
2020-12-03 17:46:55 +01:00
|
|
|
manifests_query = """
|
2022-02-04 15:41:36 +01:00
|
|
|
query Manifests($username: String!, $repoName: String!) {
|
|
|
|
user(username: $username) {
|
|
|
|
repository(name: $repoName) {
|
|
|
|
multiple: path(path:".builds") {
|
|
|
|
object {
|
|
|
|
... on Tree {
|
|
|
|
entries {
|
|
|
|
results {
|
|
|
|
name
|
|
|
|
object { ... on TextBlob { text } }
|
|
|
|
}
|
2020-12-03 17:46:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-02-04 15:41:36 +01:00
|
|
|
},
|
|
|
|
single: path(path:".build.yml") {
|
|
|
|
object {
|
|
|
|
... on TextBlob { text }
|
|
|
|
}
|
2020-12-03 17:46:55 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
2022-02-04 15:41:37 +01:00
|
|
|
r = self.post(user, None, f"{_gitsrht_api}/query", {
|
2020-07-13 19:29:20 +02:00
|
|
|
"query": manifests_query,
|
|
|
|
"variables": {
|
2022-02-04 15:41:36 +01:00
|
|
|
"username": user.username,
|
|
|
|
"repoName": repo_name,
|
2020-07-13 19:29:20 +02:00
|
|
|
},
|
|
|
|
})
|
2022-02-04 15:41:36 +01:00
|
|
|
if not r["data"]["user"]["repository"]:
|
|
|
|
raise Exception(f"git.sr.ht did not find repo {repo_name} (requesting on behalf of {user.username})\n" +
|
2020-11-28 19:17:48 +01:00
|
|
|
json.dumps(r, indent=1))
|
2020-07-13 19:29:20 +02:00
|
|
|
manifests = dict()
|
2022-02-04 15:41:36 +01:00
|
|
|
if r["data"]["user"]["repository"]["multiple"]:
|
|
|
|
for ent in r["data"]["user"]["repository"]["multiple"]["object"]\
|
2020-07-13 22:31:44 +02:00
|
|
|
["entries"]["results"]:
|
2020-10-28 12:55:05 +01:00
|
|
|
if not ent["object"]:
|
|
|
|
continue
|
2020-07-13 22:31:44 +02:00
|
|
|
manifests[ent["name"]] = ent["object"]["text"]
|
2022-02-04 15:41:36 +01:00
|
|
|
elif r["data"]["user"]["repository"]["single"]:
|
|
|
|
manifests[".build.yml"] = r["data"]["user"]["repository"]["single"]\
|
2020-07-13 19:29:20 +02:00
|
|
|
["object"]["text"]
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
return manifests
|
|
|
|
|
2021-11-28 21:07:37 +01:00
|
|
|
def log(self, user, repo, old, new):
|
|
|
|
query = """
|
2022-02-14 18:10:46 +01:00
|
|
|
query Log($username: String!, $repo: String!, $from: String!) {
|
|
|
|
user(username: $username) {
|
|
|
|
repository(name: $repo) {
|
|
|
|
log(from: $from) {
|
|
|
|
results {
|
|
|
|
id
|
|
|
|
message
|
|
|
|
author {
|
|
|
|
name
|
|
|
|
}
|
2021-11-28 21:07:37 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
2022-02-14 18:42:06 +01:00
|
|
|
r = self.post(user, None, f"{_gitsrht_api}/query", {
|
2021-11-28 21:07:37 +01:00
|
|
|
"query": query,
|
|
|
|
"variables": {
|
2022-02-14 18:42:38 +01:00
|
|
|
"username": repo.owner.username,
|
2021-11-28 21:07:37 +01:00
|
|
|
"repo": repo.name,
|
|
|
|
"from": new,
|
|
|
|
}
|
|
|
|
})
|
|
|
|
commits = []
|
2022-02-14 18:10:46 +01:00
|
|
|
for c in r["data"]["user"]["repository"]["log"]["results"]:
|
2021-11-28 21:07:37 +01:00
|
|
|
if c["id"] == old:
|
|
|
|
break
|
|
|
|
commits.append(c)
|
|
|
|
return commits
|
|
|
|
|
2020-07-11 16:33:42 +02:00
|
|
|
def create_repo(self, user, valid, visibility):
|
2020-12-03 17:46:55 +01:00
|
|
|
query = """
|
|
|
|
mutation CreateRepo(
|
|
|
|
$name: String!,
|
|
|
|
$description: String,
|
|
|
|
$visibility: Visibility!) {
|
|
|
|
createRepository(name: $name,
|
|
|
|
description: $description,
|
|
|
|
visibility: $visibility) {
|
|
|
|
id, name, description, visibility
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
2020-03-31 17:17:19 +02:00
|
|
|
name = valid.require("name")
|
2020-04-30 15:59:51 +02:00
|
|
|
description = valid.require("description")
|
2020-03-31 17:17:19 +02:00
|
|
|
if not valid.ok:
|
|
|
|
return None
|
2022-02-04 15:41:37 +01:00
|
|
|
r = self.post(user, None, f"{_gitsrht_api}/query", {
|
2020-12-03 17:46:55 +01:00
|
|
|
"query": query,
|
|
|
|
"variables": {
|
|
|
|
"name": name,
|
|
|
|
"visibility": visibility.value.upper(),
|
|
|
|
"description": description,
|
|
|
|
}
|
|
|
|
})
|
2021-09-24 10:05:32 +02:00
|
|
|
if not r["data"] or not r["data"]["createRepository"]:
|
2021-01-11 15:32:08 +01:00
|
|
|
for error in r["errors"]:
|
|
|
|
valid.error(error["message"])
|
|
|
|
return None
|
2020-12-03 17:46:55 +01:00
|
|
|
repo = r["data"]["createRepository"]
|
|
|
|
repo["visibility"] = repo["visibility"].lower()
|
|
|
|
return r["data"]["createRepository"]
|
|
|
|
|
|
|
|
def delete_repo(self, user, repo_id):
|
|
|
|
query = """
|
|
|
|
mutation DeleteRepo($id: Int!) {
|
|
|
|
deleteRepository(id: $id) { id }
|
|
|
|
}
|
|
|
|
"""
|
2022-02-04 15:41:37 +01:00
|
|
|
self.post(user, None, f"{_gitsrht_api}/query", {
|
2020-12-03 17:46:55 +01:00
|
|
|
"query": query,
|
|
|
|
"variables": {
|
|
|
|
"id": repo_id,
|
|
|
|
},
|
2020-03-31 21:08:00 +02:00
|
|
|
})
|
2020-04-01 20:47:47 +02:00
|
|
|
|
2020-03-31 22:09:33 +02:00
|
|
|
def ensure_user_webhooks(self, user):
|
|
|
|
config = {
|
2020-04-06 22:15:52 +02:00
|
|
|
origin + url_for("webhooks.git_user", user_id=user.id):
|
2020-03-31 22:09:33 +02:00
|
|
|
["repo:update", "repo:delete"],
|
|
|
|
}
|
2020-03-24 15:26:15 +01:00
|
|
|
ensure_webhooks(user, f"{_gitsrht}/api/user/webhooks", config)
|
|
|
|
|
2020-09-03 17:47:28 +02:00
|
|
|
def unensure_user_webhooks(self, user):
|
2020-04-23 17:32:15 +02:00
|
|
|
config = { }
|
2020-09-03 17:47:28 +02:00
|
|
|
try:
|
|
|
|
ensure_webhooks(user, f"{_gitsrht}/api/user/webhooks", config)
|
|
|
|
except:
|
|
|
|
pass # nbd, upstream was probably deleted
|
2020-04-23 17:32:15 +02:00
|
|
|
|
2020-04-06 22:15:52 +02:00
|
|
|
def ensure_repo_webhooks(self, repo):
|
2020-03-31 22:09:33 +02:00
|
|
|
config = {
|
2020-04-06 22:15:52 +02:00
|
|
|
origin + url_for("webhooks.git_repo", repo_id=repo.id):
|
|
|
|
["repo:post-update"],
|
2020-03-31 22:09:33 +02:00
|
|
|
}
|
2020-04-06 22:15:52 +02:00
|
|
|
owner = repo.owner
|
|
|
|
url = f"{_gitsrht}/api/{owner.canonical_name}/repos/{repo.name}/webhooks"
|
|
|
|
ensure_webhooks(owner, url, config)
|
2020-03-31 22:09:33 +02:00
|
|
|
|
2020-04-23 17:32:15 +02:00
|
|
|
def unensure_repo_webhooks(self, repo):
|
|
|
|
config = { }
|
|
|
|
owner = repo.owner
|
|
|
|
url = f"{_gitsrht}/api/{owner.canonical_name}/repos/{repo.name}/webhooks"
|
2020-05-04 17:20:23 +02:00
|
|
|
try:
|
|
|
|
ensure_webhooks(owner, url, config)
|
|
|
|
except:
|
|
|
|
pass # nbd, upstream was presumably deleted
|
2020-04-23 17:32:15 +02:00
|
|
|
|
2020-04-06 19:21:13 +02:00
|
|
|
class HgService(SrhtService):
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__()
|
|
|
|
|
|
|
|
def get_repos(self, user):
|
|
|
|
return get_results(f"{_hgsrht}/api/repos", user)
|
|
|
|
|
|
|
|
def get_repo(self, user, repo_name):
|
|
|
|
r = self.session.get(f"{_hgsrht}/api/repos/{repo_name}",
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user))
|
2020-04-06 19:21:13 +02:00
|
|
|
if r.status_code != 200:
|
|
|
|
raise Exception(r.text)
|
|
|
|
return r.json()
|
|
|
|
|
2020-05-23 21:33:32 +02:00
|
|
|
def get_readme(self, user, repo_name, repo_url):
|
2020-04-06 19:21:13 +02:00
|
|
|
# TODO: Cache?
|
2020-08-25 20:02:07 +02:00
|
|
|
override = try_html_readme(self.session, _hgsrht, user, repo_name)
|
|
|
|
if override is not None:
|
|
|
|
return override
|
2020-12-08 08:12:32 +01:00
|
|
|
blob_prefix = repo_url + "/raw/"
|
|
|
|
rendered_prefix = repo_url + "/browse/"
|
2020-05-23 21:33:32 +02:00
|
|
|
for readme_name in readme_names:
|
|
|
|
r = self.session.get(f"{_hgsrht}/api/repos/{repo_name}/raw/{readme_name}",
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user))
|
2020-05-23 21:33:32 +02:00
|
|
|
if r.status_code == 404:
|
|
|
|
continue
|
|
|
|
elif r.status_code != 200:
|
|
|
|
raise Exception(r.text)
|
2020-12-08 08:12:32 +01:00
|
|
|
return format_readme(r.text, readme_name, link_prefix=[rendered_prefix, blob_prefix])
|
2020-05-23 21:33:32 +02:00
|
|
|
return format_readme("")
|
2020-04-06 19:21:13 +02:00
|
|
|
|
2020-07-11 16:33:42 +02:00
|
|
|
def create_repo(self, user, valid, visibility):
|
2020-04-06 19:21:13 +02:00
|
|
|
name = valid.require("name")
|
2020-04-30 15:59:51 +02:00
|
|
|
description = valid.require("description")
|
2020-04-06 19:21:13 +02:00
|
|
|
if not valid.ok:
|
|
|
|
return None
|
|
|
|
return self.post(user, valid, f"{_hgsrht}/api/repos", {
|
|
|
|
"name": name,
|
|
|
|
"description": description,
|
2020-07-11 16:33:42 +02:00
|
|
|
"visibility": visibility.value,
|
2020-04-06 19:21:13 +02:00
|
|
|
})
|
|
|
|
|
|
|
|
def delete_repo(self, user, repo_name):
|
|
|
|
r = self.session.delete(f"{_hgsrht}/api/repos/{repo_name}",
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user))
|
2020-05-04 18:00:06 +02:00
|
|
|
if r.status_code != 204 and r.status_code != 404:
|
2020-04-06 19:21:13 +02:00
|
|
|
raise Exception(r.text)
|
|
|
|
|
|
|
|
def ensure_user_webhooks(self, user):
|
|
|
|
config = {
|
2020-04-29 16:05:03 +02:00
|
|
|
origin + url_for("webhooks.hg_user", user_id=user.id):
|
2020-04-06 19:21:13 +02:00
|
|
|
["repo:update", "repo:delete"],
|
|
|
|
}
|
|
|
|
ensure_webhooks(user, f"{_hgsrht}/api/user/webhooks", config)
|
|
|
|
|
2020-04-23 17:32:15 +02:00
|
|
|
def unensure_user_webhooks(self, user):
|
|
|
|
config = { }
|
2020-05-04 17:20:23 +02:00
|
|
|
try:
|
|
|
|
ensure_webhooks(user, f"{_hgsrht}/api/user/webhooks", config)
|
|
|
|
except:
|
|
|
|
pass # nbd, upstream was presumably deleted
|
2020-04-23 17:32:15 +02:00
|
|
|
|
2020-03-31 21:08:00 +02:00
|
|
|
class ListService(SrhtService):
|
2020-03-25 15:08:29 +01:00
|
|
|
def get_lists(self, user):
|
|
|
|
return get_results(f"{_listsrht}/api/lists", user)
|
|
|
|
|
|
|
|
def get_list(self, user, list_name):
|
2020-03-31 21:08:00 +02:00
|
|
|
r = self.session.get(f"{_listsrht}/api/lists/{list_name}",
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user))
|
2020-03-25 15:08:29 +01:00
|
|
|
if r.status_code != 200:
|
|
|
|
raise Exception(r.json())
|
|
|
|
return r.json()
|
|
|
|
|
2020-04-06 22:15:52 +02:00
|
|
|
def ensure_mailing_list_webhooks(self, mailing_list):
|
2020-03-31 22:09:33 +02:00
|
|
|
config = {
|
2020-04-06 22:15:52 +02:00
|
|
|
origin + url_for("webhooks.mailing_list", list_id=mailing_list.id):
|
2020-03-31 22:09:33 +02:00
|
|
|
["list:update", "list:delete", "post:received", "patchset:received"],
|
|
|
|
}
|
2020-04-06 22:15:52 +02:00
|
|
|
owner = mailing_list.owner
|
|
|
|
url = f"{_listsrht}/api/user/{owner.canonical_name}/lists/{mailing_list.name}/webhooks"
|
|
|
|
ensure_webhooks(owner, url, config)
|
2020-03-25 15:08:29 +01:00
|
|
|
|
2020-04-23 17:32:15 +02:00
|
|
|
def unensure_mailing_list_webhooks(self, mailing_list):
|
|
|
|
config = { }
|
|
|
|
owner = mailing_list.owner
|
|
|
|
url = f"{_listsrht}/api/user/{owner.canonical_name}/lists/{mailing_list.name}/webhooks"
|
2020-05-04 17:20:23 +02:00
|
|
|
try:
|
|
|
|
ensure_webhooks(owner, url, config)
|
|
|
|
except:
|
|
|
|
pass # nbd, upstream was presumably deleted
|
2020-04-23 17:32:15 +02:00
|
|
|
|
2020-03-31 21:08:00 +02:00
|
|
|
def create_list(self, user, valid):
|
|
|
|
name = valid.require("name")
|
2020-04-02 00:51:14 +02:00
|
|
|
description = valid.optional("description")
|
2020-03-31 21:08:00 +02:00
|
|
|
if not valid.ok:
|
|
|
|
return None
|
|
|
|
return self.post(user, valid, f"{_listsrht}/api/lists", {
|
|
|
|
"name": name,
|
|
|
|
"description": description,
|
|
|
|
})
|
|
|
|
|
2020-07-08 18:19:48 +02:00
|
|
|
def delete_list(self, user, list_name):
|
|
|
|
r = self.session.delete(f"{_listsrht}/api/lists/{list_name}",
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user))
|
2020-07-08 18:19:48 +02:00
|
|
|
if r.status_code != 204 and r.status_code != 404:
|
|
|
|
raise Exception(r.text)
|
|
|
|
|
2022-11-09 11:53:43 +01:00
|
|
|
def patchset_create_tool(self, user, patchset_id, icon, details):
|
|
|
|
query = """
|
|
|
|
mutation CreateTool($id: Int!, $details: String!, $icon: ToolIcon!) {
|
|
|
|
createTool(patchsetID: $id, details: $details, icon: $icon) {
|
|
|
|
id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
r = self.post(user, None, f"{_listsrht_api}/query", {
|
|
|
|
"query": query,
|
|
|
|
"variables": {
|
|
|
|
"id": patchset_id,
|
2020-07-13 22:14:59 +02:00
|
|
|
"icon": icon,
|
|
|
|
"details": details,
|
2022-11-09 11:53:43 +01:00
|
|
|
},
|
|
|
|
})
|
|
|
|
if not r["data"] or not r["data"]["createTool"]:
|
|
|
|
return None
|
|
|
|
return r["data"]["createTool"]["id"]
|
|
|
|
|
|
|
|
def patchset_update_tool(self, user, tool_id, icon, details):
|
|
|
|
query = """
|
|
|
|
mutation UpdateTool($id: Int!, $details: String!, $icon: ToolIcon!) {
|
|
|
|
updateTool(id: $id, details: $details, icon: $icon) {
|
|
|
|
id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
r = self.post(user, None, f"{_listsrht_api}/query", {
|
|
|
|
"query": query,
|
|
|
|
"variables": {
|
|
|
|
"id": tool_id,
|
|
|
|
"icon": icon,
|
|
|
|
"details": details,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
if not r["data"] or not r["data"]["updateTool"]:
|
|
|
|
return None
|
|
|
|
return r["data"]["updateTool"]["id"]
|
2020-07-13 22:14:59 +02:00
|
|
|
|
2020-04-02 00:51:14 +02:00
|
|
|
class TodoService(SrhtService):
|
|
|
|
def get_trackers(self, user):
|
|
|
|
return get_results(f"{_todosrht}/api/trackers", user)
|
|
|
|
|
|
|
|
def get_tracker(self, user, tracker_name):
|
|
|
|
r = self.session.get(f"{_todosrht}/api/trackers/{tracker_name}",
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user))
|
2020-04-02 00:51:14 +02:00
|
|
|
if r.status_code != 200:
|
|
|
|
raise Exception(r.json())
|
|
|
|
return r.json()
|
|
|
|
|
2021-10-29 10:55:05 +02:00
|
|
|
def create_tracker(self, user, valid, visibility):
|
2020-04-02 19:41:46 +02:00
|
|
|
name = valid.require("name")
|
2021-01-18 21:24:59 +01:00
|
|
|
description = valid.optional("description")
|
2020-04-02 19:41:46 +02:00
|
|
|
if not valid.ok:
|
|
|
|
return None
|
|
|
|
return self.post(user, valid, f"{_todosrht}/api/trackers", {
|
|
|
|
"name": name,
|
|
|
|
"description": description,
|
2021-10-29 10:55:05 +02:00
|
|
|
"visibility": visibility.value.upper(),
|
2020-04-02 19:41:46 +02:00
|
|
|
})
|
|
|
|
|
2020-04-02 16:26:04 +02:00
|
|
|
def delete_tracker(self, user, tracker_name):
|
|
|
|
r = self.session.delete(f"{_todosrht}/api/trackers/{tracker_name}",
|
2020-12-03 14:33:47 +01:00
|
|
|
headers=encrypt_request_authorization(user))
|
2020-05-04 18:00:06 +02:00
|
|
|
if r.status_code != 204 and r.status_code != 404:
|
2020-04-02 16:26:04 +02:00
|
|
|
raise Exception(r.text)
|
|
|
|
|
2020-04-02 15:24:29 +02:00
|
|
|
def ensure_user_webhooks(self, user):
|
|
|
|
config = {
|
2020-04-06 22:15:52 +02:00
|
|
|
origin + url_for("webhooks.todo_user", user_id=user.id):
|
|
|
|
["tracker:update", "tracker:delete"]
|
2020-04-02 15:24:29 +02:00
|
|
|
}
|
|
|
|
url = f"{_todosrht}/api/user/webhooks"
|
|
|
|
ensure_webhooks(user, url, config)
|
|
|
|
|
2020-04-23 17:32:15 +02:00
|
|
|
def unensure_user_webhooks(self, user):
|
|
|
|
config = { }
|
|
|
|
url = f"{_todosrht}/api/user/webhooks"
|
2020-05-04 17:20:23 +02:00
|
|
|
try:
|
|
|
|
ensure_webhooks(user, url, config)
|
|
|
|
except:
|
|
|
|
pass # nbd, upstream was presumably deleted
|
2020-04-23 17:32:15 +02:00
|
|
|
|
2020-04-06 22:15:52 +02:00
|
|
|
def ensure_tracker_webhooks(self, tracker):
|
2020-04-02 00:51:14 +02:00
|
|
|
config = {
|
2020-04-06 22:15:52 +02:00
|
|
|
origin + url_for("webhooks.todo_tracker", tracker_id=tracker.id):
|
|
|
|
["ticket:create"]
|
2020-04-02 00:51:14 +02:00
|
|
|
}
|
2020-04-06 22:15:52 +02:00
|
|
|
owner = tracker.owner
|
|
|
|
url = f"{_todosrht}/api/user/{owner.canonical_name}/trackers/{tracker.name}/webhooks"
|
|
|
|
ensure_webhooks(owner, url, config)
|
2020-04-02 00:51:14 +02:00
|
|
|
|
2020-04-23 17:32:15 +02:00
|
|
|
def unensure_tracker_webhooks(self, tracker):
|
|
|
|
config = { }
|
|
|
|
owner = tracker.owner
|
|
|
|
url = f"{_todosrht}/api/user/{owner.canonical_name}/trackers/{tracker.name}/webhooks"
|
2020-05-04 17:20:23 +02:00
|
|
|
try:
|
|
|
|
ensure_webhooks(owner, url, config)
|
|
|
|
except:
|
|
|
|
pass # nbd, upstream was presumably deleted
|
2020-04-23 17:32:15 +02:00
|
|
|
|
2020-04-06 22:15:52 +02:00
|
|
|
def ensure_ticket_webhooks(self, tracker, ticket_id):
|
2020-04-02 00:51:14 +02:00
|
|
|
config = {
|
2020-04-06 22:15:52 +02:00
|
|
|
origin + url_for("webhooks.todo_ticket", tracker_id=tracker.id):
|
|
|
|
["event:create"]
|
2020-04-02 00:51:14 +02:00
|
|
|
}
|
2020-04-06 22:15:52 +02:00
|
|
|
owner = tracker.owner
|
|
|
|
url = f"{_todosrht}/api/user/{owner.canonical_name}/trackers/{tracker.name}/tickets/{ticket_id}/webhooks"
|
|
|
|
ensure_webhooks(owner, url, config)
|
2020-04-02 00:51:14 +02:00
|
|
|
|
2020-04-23 17:32:15 +02:00
|
|
|
def unensure_ticket_webhooks(self, tracker, ticket_id):
|
|
|
|
config = { }
|
|
|
|
owner = tracker.owner
|
|
|
|
url = f"{_todosrht}/api/user/{owner.canonical_name}/trackers/{tracker.name}/tickets/{ticket_id}/webhooks"
|
2020-05-04 17:20:23 +02:00
|
|
|
try:
|
|
|
|
ensure_webhooks(owner, url, config)
|
|
|
|
except:
|
|
|
|
pass # nbd, upstream was presumably deleted
|
2020-04-23 17:32:15 +02:00
|
|
|
|
2022-01-14 21:09:41 +01:00
|
|
|
def get_ticket_comments(self, user, owner, tracker, ticket):
|
|
|
|
query = """
|
2022-04-28 18:02:11 +02:00
|
|
|
query TicketComments($username: String!, $tracker: String!, $ticket: Int!) {
|
|
|
|
user(username: $username) {
|
|
|
|
tracker(name: $tracker) {
|
|
|
|
ticket(id: $ticket) {
|
|
|
|
events {
|
|
|
|
results {
|
|
|
|
changes {
|
|
|
|
... on Comment {
|
|
|
|
text
|
|
|
|
}
|
2022-01-14 21:09:41 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
2022-04-28 18:02:11 +02:00
|
|
|
r = self.post(user, None, f"{_todosrht_api}/query", {
|
2022-01-14 21:09:41 +01:00
|
|
|
"query": query,
|
|
|
|
"variables": {
|
2022-04-28 18:02:11 +02:00
|
|
|
"username": owner[1:],
|
2022-01-14 21:09:41 +01:00
|
|
|
"tracker": tracker,
|
|
|
|
"ticket": ticket,
|
|
|
|
}
|
|
|
|
})
|
|
|
|
comments = []
|
2022-04-28 18:02:11 +02:00
|
|
|
for e in r["data"]["user"]["tracker"]["ticket"]["events"]["results"]:
|
2022-01-14 21:09:41 +01:00
|
|
|
for c in e["changes"]:
|
|
|
|
if "text" in c:
|
|
|
|
comments.append(c["text"])
|
|
|
|
return comments
|
|
|
|
|
2021-11-28 21:07:37 +01:00
|
|
|
def update_ticket(self, user, owner, tracker, ticket, comment, resolution=None):
|
|
|
|
url = f"{_todosrht}/api/user/{owner}/trackers/{tracker}/tickets/{ticket}"
|
|
|
|
payload = {"comment": comment}
|
|
|
|
if resolution is not None:
|
|
|
|
payload["resolution"] = resolution
|
|
|
|
payload["status"] = "resolved"
|
|
|
|
self.put(user, None, url, payload)
|
|
|
|
|
2020-07-13 19:29:20 +02:00
|
|
|
class BuildService(SrhtService):
|
2023-04-19 01:29:11 +02:00
|
|
|
def submit_build(self, user, manifest, note, tags, execute=True, valid=None, visibility=None):
|
|
|
|
query = """
|
|
|
|
mutation SubmitBuild(
|
|
|
|
$manifest: String!,
|
|
|
|
$note: String,
|
|
|
|
$tags: [String!],
|
|
|
|
$secrets: Boolean,
|
|
|
|
$execute: Boolean,
|
|
|
|
$visibility: Visibility,
|
|
|
|
) {
|
|
|
|
submit(
|
|
|
|
manifest: $manifest,
|
|
|
|
note: $note,
|
|
|
|
tags: $tags,
|
|
|
|
secrets: $secrets,
|
|
|
|
execute: $execute,
|
|
|
|
visibility: $visibility,
|
|
|
|
) {
|
|
|
|
id
|
|
|
|
}
|
|
|
|
}
|
|
|
|
"""
|
|
|
|
r = self.post(user, valid, f"{_buildsrht_api}/query", {
|
|
|
|
"query": query,
|
|
|
|
"variables": {
|
|
|
|
"manifest": yaml.dump(manifest.to_dict(), default_flow_style=False),
|
|
|
|
"tags": tags,
|
|
|
|
"note": note,
|
|
|
|
"secrets": False,
|
|
|
|
"execute": execute,
|
|
|
|
"visibility": visibility.value if visibility else None,
|
|
|
|
},
|
2020-07-23 18:58:38 +02:00
|
|
|
})
|
2023-04-19 01:29:11 +02:00
|
|
|
return r["data"]["submit"]
|
2020-07-23 18:58:38 +02:00
|
|
|
|
2022-11-24 14:29:46 +01:00
|
|
|
def create_group(self, user, job_ids, note, triggers, valid=None):
|
|
|
|
return self.post(user, valid, f"{_buildsrht}/api/job-group", {
|
2020-07-23 18:58:38 +02:00
|
|
|
"jobs": job_ids,
|
|
|
|
"note": note,
|
|
|
|
"execute": True,
|
|
|
|
"triggers": triggers,
|
2020-07-13 19:29:20 +02:00
|
|
|
})
|
|
|
|
|
2020-03-24 15:26:15 +01:00
|
|
|
git = GitService()
|
2020-04-06 19:21:13 +02:00
|
|
|
hg = HgService()
|
2020-03-25 15:08:29 +01:00
|
|
|
lists = ListService()
|
2020-04-02 00:51:14 +02:00
|
|
|
todo = TodoService()
|
2020-07-13 19:29:20 +02:00
|
|
|
builds = BuildService()
|