archive versioncheck incr to pg

This commit is contained in:
simon 2023-01-18 11:47:25 +07:00
parent 6bcf487eda
commit eef3581381
No known key found for this signature in database
GPG Key ID: 2C15AA5E89985DD4
4 changed files with 95 additions and 14 deletions

View File

@ -18,6 +18,14 @@ FROM '/dockerstats.csv'
DELIMITER ','
CSV HEADER;
-- create ta_version_stats table
CREATE TABLE ta_version_stats (
id SERIAL NOT NULL PRIMARY KEY,
ping_date DATE NOT NULL,
ping_count INT NOT NULL,
latest_version VARCHAR(10) NOT NULL
);
-- create release history table
CREATE TABLE ta_release (
id SERIAL NOT NULL PRIMARY KEY,

View File

@ -86,16 +86,3 @@ class TaskHandler(RedisBase):
def set_pub(self):
"""set message to pub"""
self.conn.publish(self.key, self.repo_conf.get("gh_repo"))
class VersionCheckCounter(RedisBase):
"""count requests to version check API endpoint"""
def __init__(self):
super().__init__()
self.timestamp = datetime.now().strftime("%Y%m%d")
self.key = f"{self.NAME_SPACE}versioncounter:{self.timestamp}"
def increase(self):
"""increase counter by one"""
self.conn.execute_command("INCR", self.key)

View File

@ -0,0 +1,78 @@
"""increment and archive version check requests"""
from datetime import datetime
from src.db import DatabaseConnect
from src.ta_redis import RedisBase
from src.webhook_github import GithubBackup
class VersionCheckCounter(RedisBase):
"""count requests to version check API endpoint"""
KEY_BASE = f"{RedisBase.NAME_SPACE}versioncounter"
TABLE = "ta_version_stats"
def __init__(self):
super().__init__()
self.timestamp = datetime.now().strftime("%Y%m%d")
self.key = f"{self.KEY_BASE}:{self.timestamp}"
self.query = False
def increase(self):
"""increase counter by one"""
self.conn.execute_command("INCR", self.key)
def archive(self):
"""archive past counters to pg"""
counters = self.conn.execute_command("KEYS", f"{self.KEY_BASE}:*")
archive_keys = [i.decode() for i in counters if i.decode() != self.key]
archive_keys.sort()
if not archive_keys:
print("no new version keys to archive")
return
for archive_key in archive_keys:
self._build_query(archive_key)
self._insert_line()
self._delete_key(archive_key)
def _build_query(self, archive_key):
"""store single archive key in pg"""
stats = {
"ping_date": archive_key.lstrip(self.KEY_BASE),
"ping_count": self._get_count(archive_key),
"latest_version": self._get_latest_version(),
}
keys = stats.keys()
values = tuple(stats.values())
keys_str = ", ".join(keys)
valid = ", ".join(["%s" for i in keys])
self.query = (
f"INSERT INTO {self.TABLE} ({keys_str}) VALUES ({valid});", values
)
def _get_count(self, archive_key):
"""get count from redis"""
result = self.conn.execute_command("GET", archive_key)
return int(result.decode())
def _get_latest_version(self):
"""get semantic release of latest"""
latest = GithubBackup("latest").get_tag().get("release_version")
return latest
def _insert_line(self):
"""add line to postgres"""
handler = DatabaseConnect()
handler.db_execute(self.query)
handler.db_close()
def _delete_key(self, archive_key):
"""delete archived key from redis"""
self.conn.execute_command("DEL", archive_key)
def run_version_check_archive():
"""daily task to store version check stats"""
VersionCheckCounter().archive()

View File

@ -5,7 +5,7 @@ from os import environ
from apscheduler.schedulers.background import BackgroundScheduler
from flask import Flask, render_template, jsonify, request, redirect
from src.api_docker import run_docker_backup
from src.ta_redis import VersionCheckCounter
from src.versioncheck import VersionCheckCounter, run_version_check_archive
from src.webhook_docker import DockerHook
from src.webhook_github import GithubBackup, GithubHook
import markdown
@ -21,6 +21,14 @@ scheduler.add_job(
minute="0",
name="docker_backup",
)
scheduler.add_job(
run_version_check_archive,
trigger="cron",
day="*",
hour="1",
minute="0",
name="version_backup",
)
scheduler.start()