From 652abdff639a4eab7c8ab29a2db5a6389e7ac4b2 Mon Sep 17 00:00:00 2001 From: simon Date: Fri, 20 May 2022 11:41:44 +0700 Subject: [PATCH] implement build server --- .gitignore | 2 +- builder/monitor.py | 129 ++++++++++++ builder/requirements.txt | 2 + deploy.sh | 7 +- docker-compose_production.yml | 53 +---- docker-compose_testing.yml | 9 + env/drone.sample.env | 9 - env/tubearchivist.sample.env | 3 + install.sh | 14 ++ tubearchivist/postgres/indexsetup.sql | 10 + tubearchivist/web/requirements.txt | 1 + tubearchivist/web/src/webhook_base.py | 33 +++ tubearchivist/web/src/webhook_docker.py | 128 +++++++++--- tubearchivist/web/src/webhook_github.py | 261 ++++++++++++++++++++++-- tubearchivist/web/views.py | 31 +-- 15 files changed, 574 insertions(+), 118 deletions(-) create mode 100644 builder/monitor.py create mode 100644 builder/requirements.txt delete mode 100644 env/drone.sample.env create mode 100644 install.sh create mode 100644 tubearchivist/web/src/webhook_base.py diff --git a/.gitignore b/.gitignore index a05f57c..e0e28d3 100644 --- a/.gitignore +++ b/.gitignore @@ -6,9 +6,9 @@ config.json postgres.env tubearchivist.env umami.env -drone.env # example hooks docker-hook.json github-hook.json +github-push-hook.json roadmap-hook.json diff --git a/builder/monitor.py b/builder/monitor.py new file mode 100644 index 0000000..1a9b436 --- /dev/null +++ b/builder/monitor.py @@ -0,0 +1,129 @@ +"""monitor redis for tasks to execute""" + +import json +import subprocess +import os + +import redis + + +class RedisBase: + """connection base for redis""" + + REDIS_HOST = "localhost" + REDIS_PORT = 6379 + NAME_SPACE = "ta:" + TASK_KEY = NAME_SPACE + "task:buildx" + + def __init__(self): + self.conn = redis.Redis(host=self.REDIS_HOST, port=self.REDIS_PORT) + + +class Monitor(RedisBase): + """look for messages""" + + def get_tasks(self): + """get task list""" + response = self.conn.execute_command("JSON.GET", self.TASK_KEY) + tasks = json.loads(response.decode()) + return tasks + + def bootstrap(self): + """create custom builder""" + print("validate builder") + command = ["docker", "buildx", "inspect"] + output = subprocess.run(command, check=True, capture_output=True) + inspect = output.stdout.decode() + config = {} + lines = [i for i in inspect.split("\n") if i] + for line in lines: + key, value = line.split(":", maxsplit=1) + if value: + config[key.strip()] = value.strip() + + if not config["Name"].startswith("tubearchivist"): + print("create tubearchivist builder") + self._create_builder() + else: + print("tubearchivist builder already created") + + @staticmethod + def _create_builder(): + """create buildx builder""" + base = ["docker", "buildx"] + subprocess.run( + base + ["create", "--name", "tubearchivist"], check=True + ) + subprocess.run(base + ["use", "tubearchivist"], check=True) + subprocess.run(base + ["inspect", "--bootstrap"], check=True) + + def watch(self): + """watch for messages""" + print("waiting for tasks") + watcher = self.conn.pubsub() + watcher.subscribe(self.TASK_KEY) + for i in watcher.listen(): + if i["type"] == "message": + task = i["data"].decode() + print(task) + Builder(task).run() + + +class Builder(RedisBase): + """execute task""" + + CLONE_BASE = "clone" + + def __init__(self, task): + super().__init__() + self.task = task + self.task_detail = False + + def run(self): + """run all steps""" + self.get_task() + self.clone() + self.build() + self.remove_task() + + def get_task(self): + """get what to execute""" + print("get task from redis") + response = self.conn.execute_command("JSON.GET", self.TASK_KEY) + response_json = json.loads(response.decode()) + self.task_detail = response_json["tasks"][self.task] + + def clone(self): + """clone repo to destination""" + print("clone repo") + clone = ["git", "clone", self.task_detail["clone"]] + pull = ["git", "pull", self.task_detail["clone"]] + os.chdir("clone") + try: + subprocess.run(clone, check=True) + except subprocess.CalledProcessError: + print("git pull instead") + os.chdir(self.task) + subprocess.run(pull, check=True) + os.chdir("../") + os.chdir("../") + + def build(self): + """build the container""" + build_command = ["docker", "buildx"] + self.task_detail["build"] + build_command.append(os.path.join(self.CLONE_BASE, self.task)) + subprocess.run(build_command, check=True) + + def remove_task(self): + """remove task from redis queue""" + print("remove task from redis") + self.conn.json().delete(self.TASK_KEY, f".tasks.{self.task}") + + +if __name__ == "__main__": + handler = Monitor() + handler.bootstrap() + try: + handler.watch() + except KeyboardInterrupt: + print(" [X] cancle watch") diff --git a/builder/requirements.txt b/builder/requirements.txt new file mode 100644 index 0000000..77f3a72 --- /dev/null +++ b/builder/requirements.txt @@ -0,0 +1,2 @@ +redis +ipython \ No newline at end of file diff --git a/deploy.sh b/deploy.sh index 7126250..0653ae1 100755 --- a/deploy.sh +++ b/deploy.sh @@ -9,8 +9,9 @@ function rebuild_test { rsync -a --progress --delete docker-compose_testing.yml $test_host:docker/docker-compose.yml rsync -a --progress --delete tubearchivist $test_host:docker rsync -a --progress --delete env $test_host:docker - - ssh "$test_host" 'docker-compose -f docker/docker-compose.yml up -d --build' + rsync -a --progress --delete builder/ $test_host:builder + ssh "$test_host" "mkdir -p builder/clone" + ssh "$test_host" 'docker compose -f docker/docker-compose.yml up -d --build' } function docker_publish { @@ -19,6 +20,8 @@ function docker_publish { rsync -a --progress --delete docker-compose_production.yml $public_host:docker/docker-compose.yml rsync -a --progress --delete tubearchivist $public_host:docker rsync -a --progress --delete env $public_host:docker + rsync -a --progress --delete builder/ $public_host:builder + ssh "$public_host" "mkdir -p builder/clone" ssh "$public_host" 'docker compose -f docker/docker-compose.yml build tubearchivist' ssh "$public_host" 'docker compose -f docker/docker-compose.yml up -d' diff --git a/docker-compose_production.yml b/docker-compose_production.yml index 0a03446..d8c4435 100644 --- a/docker-compose_production.yml +++ b/docker-compose_production.yml @@ -16,7 +16,6 @@ services: - front - tubearchivist_network - umami_network - - drone_network nginx-proxy-acme: image: nginxproxy/acme-companion container_name: nginx-proxy-acme @@ -67,6 +66,16 @@ services: - "5432" networks: - tubearchivist_network + redis: + image: redislabs/rejson:latest + container_name: redis + restart: always + ports: + - "127.0.0.1:6379:6379" + volumes: + - ./volume/redis:/data + networks: + - tubearchivist_network # umami stats umami: image: ghcr.io/mikecao/umami:postgresql-latest @@ -94,46 +103,6 @@ services: restart: always networks: - umami_network - # drone build server - drone: - image: drone/drone:2 - container_name: drone - expose: - - "80" - env_file: - - ./env/drone.env - environment: - - VIRTUAL_HOST=www.drone.tubearchivist.com,drone.tubearchivist.com - - LETSENCRYPT_HOST=www.drone.tubearchivist.com,drone.tubearchivist.com - volumes: - - ./volume/drone/server:/data - restart: always - networks: - - drone_network - drone-runner-amd64: - image: drone/drone-runner-docker:1.8.1-linux-amd64 - container_name: drone-runner-amd64 - expose: - - "3000" - env_file: - - ./env/drone.env - volumes: - - /var/run/docker.sock:/var/run/docker.sock - restart: always - networks: - - drone_network - drone-runner-arm64: - image: drone/drone-runner-docker:1.8.1-linux-arm64 - container_name: drone-runner-arm64 - expose: - - "3001" - env_file: - - ./env/drone.env - volumes: - - /var/run/docker.sock:/var/run/docker.sock - restart: always - networks: - - drone_network networks: front: @@ -142,5 +111,3 @@ networks: driver: bridge umami_network: driver: bridge - drone_network: - driver: bridge diff --git a/docker-compose_testing.yml b/docker-compose_testing.yml index 3c8fd1f..962f77b 100644 --- a/docker-compose_testing.yml +++ b/docker-compose_testing.yml @@ -29,3 +29,12 @@ services: - ./env/postgres.env expose: - "5432" + # redis job monitor + redis: + image: redislabs/rejson:latest + container_name: redis + restart: always + ports: + - "6379:6379" + volumes: + - ./volume/redis:/data diff --git a/env/drone.sample.env b/env/drone.sample.env deleted file mode 100644 index 59a430c..0000000 --- a/env/drone.sample.env +++ /dev/null @@ -1,9 +0,0 @@ -DRONE_GITHUB_CLIENT_ID=aaaaaaaaaaaa -DRONE_GITHUB_CLIENT_SECRET=bbbbbbbbbbbbbbbbb -DRONE_RPC_SECRET=ccccccccccccc -DRONE_SERVER_HOST=www.drone.tubearchivist.com -DRONE_SERVER_PROTO=https -DRONE_RUNNER_CAPACITY=1 -DRONE_RUNNER_NAME=tubearchivist -DRONE_RPC_PROTO=https -DRONE_RPC_HOST=www.drone.tubearchivist.com diff --git a/env/tubearchivist.sample.env b/env/tubearchivist.sample.env index 017515e..84c49ac 100644 --- a/env/tubearchivist.sample.env +++ b/env/tubearchivist.sample.env @@ -1,3 +1,6 @@ REDDIT_HOOK_URL=https://discordapp.com/api/webhooks/000000000000000000/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa ROADMAP_HOOK_URL=https://discordapp.com/api/webhooks/000000000000000000/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa DOCKER_UNSTABLE_HOOK_URL=https://discordapp.com/api/webhooks/000000000000000000/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +NOTIFICATION_TEST_HOOK_URL=https://discord.com/api/webhooks/000000000000000000/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +GH_HOOK_SECRET=xxxxxxxxxxxxxxxxxxxxxxxx +DOCKER_HOOK_SECRET=yyyyyyyyyyyyyyyyyyyyyyyy \ No newline at end of file diff --git a/install.sh b/install.sh new file mode 100644 index 0000000..cfb98f9 --- /dev/null +++ b/install.sh @@ -0,0 +1,14 @@ +#!/bin/bash +# additional server install script + +# setup multiarch qemu +sudo apt-get install qemu binfmt-support qemu-user-static +docker run --rm --privileged multiarch/qemu-user-static --reset -p yes +docker run --rm -t arm64v8/ubuntu uname -m + +# pip dependencies +sudo apt install pip +pip install -r builder/requirements.txt + +## +exit 0 diff --git a/tubearchivist/postgres/indexsetup.sql b/tubearchivist/postgres/indexsetup.sql index 34705e6..4c6c3f5 100644 --- a/tubearchivist/postgres/indexsetup.sql +++ b/tubearchivist/postgres/indexsetup.sql @@ -22,3 +22,13 @@ CREATE TABLE ta_release ( breaking_changes BOOLEAN NOT NULL, release_notes TEXT NOT NULL ); + +-- create roadmap history table +CREATE TABLE ta_roadmap ( + id SERIAL NOT NULL PRIMARY KEY, + time_stamp INT NOT NULL, + time_stamp_human VARCHAR(20) NOT NULL, + last_id VARCHAR(20) NOT NULL, + implemented TEXT NOT NULL, + pending TEXT NOT NULL +); diff --git a/tubearchivist/web/requirements.txt b/tubearchivist/web/requirements.txt index ba03a9d..6b5f9e4 100644 --- a/tubearchivist/web/requirements.txt +++ b/tubearchivist/web/requirements.txt @@ -2,5 +2,6 @@ flask==2.1.2 ipython==8.3.0 markdown==3.3.7 psycopg2==2.9.3 +redis==4.3.1 requests==2.27.1 uWSGI==2.0.20 diff --git a/tubearchivist/web/src/webhook_base.py b/tubearchivist/web/src/webhook_base.py new file mode 100644 index 0000000..f916b36 --- /dev/null +++ b/tubearchivist/web/src/webhook_base.py @@ -0,0 +1,33 @@ +"""base class to handle webhook config""" + +from os import environ + + +class WebhookBase: + """shared config""" + + # map key is gh_repo name + HOOK_MAP = { + "drone-test": { + "gh_user": "tubearchivist", + "gh_repo": "drone-test", + "docker_user": "bbilly1", + "docker_repo": "drone-test", + "unstable_keyword": "#build", + "build_unstable": [ + "build", "--platform", "linux/amd64,linux/arm64", + "-t", "bbilly1/drone-test:unstable", "--push" + ], + "build_release": [ + "build", "--platform", "linux/amd64,linux/arm64", + "-t", "bbilly1/drone-test", + "-t", "bbilly1/drone-test:unstable", + "-t", "bbilly1/drone-test:$VERSION", "--push" + ], + "discord_unstable_hook": environ.get("NOTIFICATION_TEST_HOOK_URL"), + "discord_release_hook": environ.get("NOTIFICATION_TEST_HOOK_URL"), + } + } + ROADMAP_HOOK_URL = environ.get("ROADMAP_HOOK_URL") + GH_HOOK_SECRET = environ.get("GH_HOOK_SECRET") + DOCKER_HOOK_SECRET = environ.get("DOCKER_HOOK_SECRET") diff --git a/tubearchivist/web/src/webhook_docker.py b/tubearchivist/web/src/webhook_docker.py index df14e15..7a2b0b0 100644 --- a/tubearchivist/web/src/webhook_docker.py +++ b/tubearchivist/web/src/webhook_docker.py @@ -1,59 +1,119 @@ """parse and forward docker webhooks""" -from os import environ import requests +from src.webhook_base import WebhookBase -class DockerHook: + +class DockerHook(WebhookBase): """parse docker webhook and forward to discord""" - HOOK_URL = environ.get("DOCKER_UNSTABLE_HOOK_URL") - COMMITS_URL = "https://api.github.com/repos/bbilly1/tubearchivist/commits" + def __init__(self, request): + self.request = request + self.name = False + self.hook = False + self.repo_conf = False + self.tag = False - def __init__(self, docker_hook): - self.docker_hook = docker_hook - self.docker_hook_details = self.docker_hook_parser() - self.commit_url = False - self.first_line_message = False + def validate(self): + """validate hook origin""" + received = self.request.args.get("secret") + if not received: + return False - def docker_hook_parser(self): - """parse data from docker""" + return received == self.DOCKER_HOOK_SECRET - docker_hook_details = { - "release_tag": self.docker_hook["push_data"]["tag"], - "repo_url": self.docker_hook["repository"]["repo_url"], - "repo_name": self.docker_hook["repository"]["repo_name"] - } + def process(self): + """process the hook data""" - return docker_hook_details + parsed = self._parse_hook() + if not parsed: + return False - def get_latest_commit(self): - """get latest commit url from master""" - response = requests.get(f"{self.COMMITS_URL}/master").json() - self.commit_url = response["html_url"] - self.first_line_message = response["commit"]["message"].split("\n")[0] + if self.tag == "unstable": + response = self._send_unstable_hook() + else: + response = self._send_release_hook() - def forward_message(self): + return response + + def _send_unstable_hook(self): + """send notification for unstable build""" + + commit_url, first_line_message = self._get_last_commit() + if not first_line_message.endswith(self.repo_conf["unstable_keyword"]): + message = {"success": False} + print(message, "build message not found in commit") + return message + + url = self.repo_conf["discord_unstable_hook"] + message_data = self._build_unstable_message(commit_url) + response = self._forward_message(message_data, url) + return response + + def _parse_hook(self): + """parse hook json""" + self.hook = self.request.json + self.tag = self.hook["push_data"]["tag"] + if not self.tag or self.tag == "latest": + return False + + self.name = self.hook["repository"]["name"] + if self.name not in self.HOOK_MAP: + print(f"repo {self.name} not registered") + return False + + self.repo_conf = self.HOOK_MAP[self.name] + + return True + + def _get_last_commit(self): + """get last commit from git repo""" + user = self.repo_conf.get("gh_user") + repo = self.repo_conf.get("gh_repo") + url = f"https://api.github.com/repos/{user}/{repo}/commits/master" + response = requests.get(url).json() + commit_url = response["html_url"] + first_line_message = response["commit"]["message"].split("\n")[0] + + return commit_url, first_line_message + + @staticmethod + def _forward_message(message_data, url): """forward message to discrod""" - data = self.build_message() - response = requests.post(self.HOOK_URL, json=data) + response = requests.post(url, json=message_data) if not response.ok: print(response.json()) return {"success": False} return {"success": True} - def build_message(self): + def _build_unstable_message(self, commit_url): """build message for discord hook""" - release_tag = self.docker_hook_details["release_tag"] - repo_url = self.docker_hook_details["repo_url"] + repo_url = self.hook["repository"]["repo_url"] message = ( - f"There is a new **{release_tag}** build " + - f"published to [docker]({repo_url}). Built from:\n" + - self.commit_url) - - data = { + f"There is a new **{self.tag}** build published to " + + f"[docker]({repo_url}). Built from:\n{commit_url}" + ) + message_data = { "content": message } - return data + return message_data + + def _send_release_hook(self): + """send new release notification""" + user = self.repo_conf.get("gh_user") + repo = self.repo_conf.get("gh_repo") + release_url = ( + f"https://github.com/{user}/{repo}/" + + f"releases/tag/{self.tag}" + ) + message_data = { + "content": release_url + } + + url = self.repo_conf["discord_release_hook"] + response = self._forward_message(message_data, url) + + return response diff --git a/tubearchivist/web/src/webhook_github.py b/tubearchivist/web/src/webhook_github.py index 79656cf..1307b96 100644 --- a/tubearchivist/web/src/webhook_github.py +++ b/tubearchivist/web/src/webhook_github.py @@ -1,12 +1,107 @@ """handle release functionality""" +import base64 +import json from datetime import datetime -from os import environ +from hashlib import sha256 +from hmac import HMAC, compare_digest import requests +import redis from src.db import DatabaseConnect +from src.webhook_base import WebhookBase +class GithubHook(WebhookBase): + """process hooks from github""" + + def __init__(self, request): + self.request = request + self.hook = False + self.repo = False + self.repo_conf = False + + def validate(self): + """make sure hook is legit""" + sig = self.request.headers.get("X-Hub-Signature-256") + if not sig: + return False + + received = sig.split("sha256=")[-1].strip() + print(f"received: {received}") + secret = self.GH_HOOK_SECRET.encode() + msg = self.request.data + expected = HMAC(key=secret, msg=msg, digestmod=sha256).hexdigest() + print(f"expected: {expected}") + return compare_digest(received, expected) + + def create_hook_task(self): + """check what task is required""" + self.hook = self.request.json + self.repo = self.hook["repository"]["name"] + + if self.repo not in self.HOOK_MAP: + print(f"repo {self.repo} not registered") + return False + + self.repo_conf = self.HOOK_MAP[self.repo] + if "ref" in self.hook: + # is a commit hook + self.process_commit_hook() + + if "release" in self.hook: + # is a release hook + self.process_release_hook() + + return False + + def process_commit_hook(self): + """process commit hook after validation""" + on_master = self.check_branch() + if not on_master: + print("commit not on master") + return + + self._check_roadmap() + + build_message = self.check_commit_message() + if not build_message: + print("build keyword not found in commit message") + return + + self.repo = self.hook["repository"]["name"] + TaskHandler(self.repo_conf).create_task("build_unstable") + + def check_branch(self): + """check if commit on master branch""" + master_branch = self.hook["repository"]["master_branch"] + ref = self.hook["ref"] + + return ref.endswith(master_branch) + + def check_commit_message(self): + """check if keyword in commit message is there""" + message = self.hook["head_commit"]["message"] + return message.endswith(self.repo_conf["unstable_keyword"]) + + def _check_roadmap(self): + """check if roadmap update needed""" + modified = [i["modified"] for i in self.hook["commits"]] + for i in modified: + if "README.md" in i: + print("README updated, check roadmap") + RoadmapHook(self.repo_conf, self.ROADMAP_HOOK_URL).update() + break + + def process_release_hook(self): + """build and process for new release""" + if self.hook["action"] != "released": + return + + tag_name = self.hook["release"]["tag_name"] + task = TaskHandler(self.repo_conf, tag_name=tag_name) + task.create_task("build_release") + class GithubBackup: """backup release and notes""" @@ -108,24 +203,53 @@ class GithubBackup: class RoadmapHook: """update roadmap""" - README = "https://raw.githubusercontent.com/bbilly1/tubearchivist/master/README.md" - HOOK_URL = environ.get("ROADMAP_HOOK_URL") - - def __init__(self): + def __init__(self, repo_conf, hook_url): + self.repo_conf = repo_conf + self.hook_url = hook_url self.roadmap_raw = False self.implemented = False self.pending = False def update(self): """update message""" - self.get_roadmap() + pending_old, implemented_old, message_id = self.get_last_roadmap() + self.get_new_roadmap() self.parse_roadmap() - self.send_message() + if pending_old == self.pending and implemented_old == self.implemented: + print("roadmap did not change") + return - def get_roadmap(self): + if message_id: + self.delete_webhook(message_id) + + last_id = self.send_message() + self.update_roadmap(last_id) + + @staticmethod + def get_last_roadmap(): + """get last entry in db to comapre agains""" + query = "SELECT * FROM ta_roadmap ORDER BY time_stamp DESC LIMIT 1;" + handler = DatabaseConnect() + rows = handler.db_execute(query) + handler.db_close() + + try: + pending = [i.get("pending") for i in rows][0] + implemented = [i.get("implemented") for i in rows][0] + last_id = [i.get("last_id") for i in rows][0] + except IndexError: + pending, implemented, last_id = False, False, False + + return pending, implemented, last_id + + def get_new_roadmap(self): """get current roadmap""" - response = requests.get(self.README) - paragraphs = [i.strip() for i in response.text.split("##")] + user = self.repo_conf.get("gh_user") + repo = self.repo_conf.get("gh_repo") + url = f"https://api.github.com/repos/{user}/{repo}/contents/README.md" + response = requests.get(url).json() + content = base64.b64decode(response["content"]).decode() + paragraphs = [i.strip() for i in content.split("##")] for paragraph in paragraphs: if paragraph.startswith("Roadmap"): roadmap_raw = paragraph @@ -137,10 +261,22 @@ class RoadmapHook: def parse_roadmap(self): """extract relevant information""" - _, pending, implemented = self.roadmap_raw.split("\n\n") - implemented = implemented.lstrip("Implemented:\n") - self.implemented = implemented.replace("[X] ", "") - self.pending = pending.replace("[ ]", "") + pending_items = [] + implemented_items = [] + for line in self.roadmap_raw.split("\n"): + if line.startswith("- [ ] "): + pending_items.append(line.replace("[ ] ", "")) + if line.startswith("- [X] "): + implemented_items.append(line.replace("[X] ", "")) + + self.pending = "\n".join(pending_items) + self.implemented = "\n".join(implemented_items) + + def delete_webhook(self, message_id): + """delete old message""" + url = f"{self.hook_url}/messages/{message_id}" + response = requests.delete(url) + print(response) def send_message(self): """build message dict""" @@ -155,6 +291,101 @@ class RoadmapHook: "color": 10555 }] } - response = requests.post(self.HOOK_URL, json=data) + response = requests.post(f"{self.hook_url}?wait=true", json=data) print(response) print(response.text) + + return response.json()["id"] + + def update_roadmap(self, last_id): + """update new roadmap in db""" + ingest_line = { + "time_stamp": int(datetime.now().strftime("%s")), + "time_stamp_human": datetime.now().strftime("%Y-%m-%d"), + "last_id": last_id, + "implemented": self.implemented, + "pending": self.pending, + } + keys = ingest_line.keys() + values = tuple(ingest_line.values()) + keys_str = ", ".join(keys) + valid = ", ".join(["%s" for i in keys]) + query = ( + f"INSERT INTO ta_roadmap ({keys_str}) VALUES ({valid});", values + ) + handler = DatabaseConnect() + _ = handler.db_execute(query) + handler.db_close() + + +class RedisBase: + """connection base for redis""" + + REDIS_HOST = "redis" + REDIS_PORT = 6379 + NAME_SPACE = "ta:" + + def __init__(self): + self.conn = redis.Redis(host=self.REDIS_HOST, port=self.REDIS_PORT) + + +class TaskHandler(RedisBase): + """handle buildx task queue""" + + def __init__(self, repo_conf, tag_name=False): + super().__init__() + self.key = self.NAME_SPACE + "task:buildx" + self.repo_conf = repo_conf + self.tag_name = tag_name + + def create_task(self, task_name): + """create task""" + self.create_queue() + self.set_task(task_name) + self.set_pub() + + def create_queue(self): + """set initial json object for queue""" + if self.conn.execute_command(f"EXISTS {self.key}"): + print(f"{self.key} already exists") + return + + message = { + "created": int(datetime.now().strftime("%s")), + "tasks": {} + } + self.conn.execute_command( + "JSON.SET", self.key, ".", json.dumps(message) + ) + + def set_task(self, task_name): + """publish new task to queue""" + + user = self.repo_conf.get("gh_user") + repo = self.repo_conf.get("gh_repo") + build_command = self.build_command(task_name) + task = { + "timestamp": int(datetime.now().strftime("%s")), + "clone": f"https://github.com/{user}/{repo}.git", + "name": self.repo_conf.get("gh_repo"), + "build": build_command, + } + + self.conn.json().set(self.key, f".tasks.{repo}", task) + + def build_command(self, task_name): + """return build command""" + if not self.tag_name: + return self.repo_conf.get(task_name) + + command = self.repo_conf.get(task_name) + for idx, command_part in enumerate(command): + if "$VERSION" in command_part: + subed = command_part.replace("$VERSION", self.tag_name) + command[idx] = subed + + return command + + def set_pub(self): + """set message to pub""" + self.conn.publish(self.key, self.repo_conf.get("gh_repo")) diff --git a/tubearchivist/web/views.py b/tubearchivist/web/views.py index efc07b7..f617405 100644 --- a/tubearchivist/web/views.py +++ b/tubearchivist/web/views.py @@ -2,7 +2,7 @@ from flask import Flask, render_template, jsonify, request from src.webhook_docker import DockerHook -from src.webhook_github import GithubBackup +from src.webhook_github import GithubBackup, GithubHook import markdown app = Flask(__name__) @@ -28,20 +28,16 @@ def release(release_id): @app.route("/api/webhook/docker/", methods=['POST']) def webhook_docker(): """parse docker webhook data""" + handler = DockerHook(request) + valid = handler.validate() + + print(f"valid: {valid}") + if not valid: + return "Forbidden", 403 + print(request.json) - hook = DockerHook(request.json) - if hook.docker_hook_details.get("release_tag") != "unstable": - message = {"success": False} - print(message, "not unstable build") - return jsonify(message) + message = handler.process() - hook.get_latest_commit() - if not hook.first_line_message.endswith("#build"): - message = {"success": False} - print(message, "not build message in commit") - return jsonify(message) - - message = hook.forward_message() print(message, "hook sent to discord") return jsonify(message) @@ -49,6 +45,13 @@ def webhook_docker(): @app.route("/api/webhook/github/", methods=['POST']) def webhook_github(): """prase webhooks from github""" + handler = GithubHook(request) + valid = handler.validate() + print(f"valid: {valid}") + if not valid: + return "Forbidden", 403 + print(request.json) - message = {"success": False} + handler.create_hook_task() + message = {"success": True} return jsonify(message)