mirror of
https://github.com/tubearchivist/tubearchivist.git
synced 2024-12-23 10:20:13 +00:00
[API] add backup endpoints
This commit is contained in:
parent
e4b7f8ce38
commit
285e2042ae
@ -96,6 +96,16 @@ urlpatterns = [
|
|||||||
views.SnapshotApiView.as_view(),
|
views.SnapshotApiView.as_view(),
|
||||||
name="api-snapshot",
|
name="api-snapshot",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"backup/",
|
||||||
|
views.BackupApiListView.as_view(),
|
||||||
|
name="api-backup-list",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"backup/<str:filename>/",
|
||||||
|
views.BackupApiView.as_view(),
|
||||||
|
name="api-backup",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"task-name/",
|
"task-name/",
|
||||||
views.TaskListView.as_view(),
|
views.TaskListView.as_view(),
|
||||||
|
@ -8,6 +8,7 @@ from home.src.download.subscriptions import (
|
|||||||
PlaylistSubscription,
|
PlaylistSubscription,
|
||||||
)
|
)
|
||||||
from home.src.download.yt_dlp_base import CookieHandler
|
from home.src.download.yt_dlp_base import CookieHandler
|
||||||
|
from home.src.es.backup import ElasticBackup
|
||||||
from home.src.es.connect import ElasticWrap
|
from home.src.es.connect import ElasticWrap
|
||||||
from home.src.es.snapshot import ElasticSnapshot
|
from home.src.es.snapshot import ElasticSnapshot
|
||||||
from home.src.frontend.searching import SearchForm
|
from home.src.frontend.searching import SearchForm
|
||||||
@ -27,6 +28,7 @@ from home.tasks import (
|
|||||||
check_reindex,
|
check_reindex,
|
||||||
download_pending,
|
download_pending,
|
||||||
extrac_dl,
|
extrac_dl,
|
||||||
|
run_restore_backup,
|
||||||
subscribe_to,
|
subscribe_to,
|
||||||
)
|
)
|
||||||
from rest_framework import permissions
|
from rest_framework import permissions
|
||||||
@ -764,6 +766,76 @@ class SnapshotApiView(ApiBaseView):
|
|||||||
return Response(response)
|
return Response(response)
|
||||||
|
|
||||||
|
|
||||||
|
class BackupApiListView(ApiBaseView):
|
||||||
|
"""resolves to /api/backup/
|
||||||
|
GET: returns list of available zip backups
|
||||||
|
POST: take zip backup now
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AdminOnly]
|
||||||
|
task_name = "run_backup"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get(request):
|
||||||
|
"""handle get request"""
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
backup_files = ElasticBackup().get_all_backup_files()
|
||||||
|
return Response(backup_files)
|
||||||
|
|
||||||
|
def post(self, request):
|
||||||
|
"""handle post request"""
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
message = TaskCommand().start(self.task_name)
|
||||||
|
|
||||||
|
return Response({"message": message})
|
||||||
|
|
||||||
|
|
||||||
|
class BackupApiView(ApiBaseView):
|
||||||
|
"""resolves to /api/backup/<filename>/
|
||||||
|
GET: return a single backup
|
||||||
|
POST: restore backup
|
||||||
|
DELETE: delete backup
|
||||||
|
"""
|
||||||
|
|
||||||
|
permission_classes = [AdminOnly]
|
||||||
|
task_name = "restore_backup"
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get(request, filename):
|
||||||
|
"""get single backup"""
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
backup_file = ElasticBackup().build_backup_file_data(filename)
|
||||||
|
if not backup_file:
|
||||||
|
message = {"message": "file not found"}
|
||||||
|
return Response(message, status=404)
|
||||||
|
|
||||||
|
return Response(backup_file)
|
||||||
|
|
||||||
|
def post(self, request, filename):
|
||||||
|
"""restore backup file"""
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
task = run_restore_backup.delay(filename)
|
||||||
|
message = {
|
||||||
|
"message": "backup restore task started",
|
||||||
|
"filename": filename,
|
||||||
|
"task_id": task.id,
|
||||||
|
}
|
||||||
|
return Response({"message": message})
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def delete(request, filename):
|
||||||
|
"""delete backup file"""
|
||||||
|
# pylint: disable=unused-argument
|
||||||
|
|
||||||
|
backup_file = ElasticBackup().delete_file(filename)
|
||||||
|
if not backup_file:
|
||||||
|
message = {"message": "file not found"}
|
||||||
|
return Response(message, status=404)
|
||||||
|
|
||||||
|
message = {"message": f"file {filename} deleted"}
|
||||||
|
return Response(message)
|
||||||
|
|
||||||
|
|
||||||
class TaskListView(ApiBaseView):
|
class TaskListView(ApiBaseView):
|
||||||
"""resolves to /api/task-name/
|
"""resolves to /api/task-name/
|
||||||
GET: return a list of all stored task results
|
GET: return a list of all stored task results
|
||||||
|
@ -20,10 +20,11 @@ class ElasticBackup:
|
|||||||
"""dump index to nd-json files for later bulk import"""
|
"""dump index to nd-json files for later bulk import"""
|
||||||
|
|
||||||
INDEX_SPLIT = ["comment"]
|
INDEX_SPLIT = ["comment"]
|
||||||
|
CACHE_DIR = EnvironmentSettings.CACHE_DIR
|
||||||
|
BACKUP_DIR = os.path.join(CACHE_DIR, "backup")
|
||||||
|
|
||||||
def __init__(self, reason=False, task=False):
|
def __init__(self, reason=False, task=False):
|
||||||
self.config = AppConfig().config
|
self.config = AppConfig().config
|
||||||
self.cache_dir = EnvironmentSettings.CACHE_DIR
|
|
||||||
self.timestamp = datetime.now().strftime("%Y%m%d")
|
self.timestamp = datetime.now().strftime("%Y%m%d")
|
||||||
self.index_config = get_mapping()
|
self.index_config = get_mapping()
|
||||||
self.reason = reason
|
self.reason = reason
|
||||||
@ -79,14 +80,13 @@ class ElasticBackup:
|
|||||||
def zip_it(self):
|
def zip_it(self):
|
||||||
"""pack it up into single zip file"""
|
"""pack it up into single zip file"""
|
||||||
file_name = f"ta_backup-{self.timestamp}-{self.reason}.zip"
|
file_name = f"ta_backup-{self.timestamp}-{self.reason}.zip"
|
||||||
folder = os.path.join(self.cache_dir, "backup")
|
|
||||||
|
|
||||||
to_backup = []
|
to_backup = []
|
||||||
for file in os.listdir(folder):
|
for file in os.listdir(self.BACKUP_DIR):
|
||||||
if file.endswith(".json"):
|
if file.endswith(".json"):
|
||||||
to_backup.append(os.path.join(folder, file))
|
to_backup.append(os.path.join(self.BACKUP_DIR, file))
|
||||||
|
|
||||||
backup_file = os.path.join(folder, file_name)
|
backup_file = os.path.join(self.BACKUP_DIR, file_name)
|
||||||
|
|
||||||
comp = zipfile.ZIP_DEFLATED
|
comp = zipfile.ZIP_DEFLATED
|
||||||
with zipfile.ZipFile(backup_file, "w", compression=comp) as zip_f:
|
with zipfile.ZipFile(backup_file, "w", compression=comp) as zip_f:
|
||||||
@ -99,7 +99,7 @@ class ElasticBackup:
|
|||||||
|
|
||||||
def post_bulk_restore(self, file_name):
|
def post_bulk_restore(self, file_name):
|
||||||
"""send bulk to es"""
|
"""send bulk to es"""
|
||||||
file_path = os.path.join(self.cache_dir, file_name)
|
file_path = os.path.join(self.CACHE_DIR, file_name)
|
||||||
with open(file_path, "r", encoding="utf-8") as f:
|
with open(file_path, "r", encoding="utf-8") as f:
|
||||||
data = f.read()
|
data = f.read()
|
||||||
|
|
||||||
@ -110,9 +110,7 @@ class ElasticBackup:
|
|||||||
|
|
||||||
def get_all_backup_files(self):
|
def get_all_backup_files(self):
|
||||||
"""build all available backup files for view"""
|
"""build all available backup files for view"""
|
||||||
backup_dir = os.path.join(self.cache_dir, "backup")
|
all_backup_files = ignore_filelist(os.listdir(self.BACKUP_DIR))
|
||||||
backup_files = os.listdir(backup_dir)
|
|
||||||
all_backup_files = ignore_filelist(backup_files)
|
|
||||||
all_available_backups = [
|
all_available_backups = [
|
||||||
i
|
i
|
||||||
for i in all_backup_files
|
for i in all_backup_files
|
||||||
@ -121,8 +119,19 @@ class ElasticBackup:
|
|||||||
all_available_backups.sort(reverse=True)
|
all_available_backups.sort(reverse=True)
|
||||||
|
|
||||||
backup_dicts = []
|
backup_dicts = []
|
||||||
for backup_file in all_available_backups:
|
for filename in all_available_backups:
|
||||||
file_split = backup_file.split("-")
|
data = self.build_backup_file_data(filename)
|
||||||
|
backup_dicts.append(data)
|
||||||
|
|
||||||
|
return backup_dicts
|
||||||
|
|
||||||
|
def build_backup_file_data(self, filename):
|
||||||
|
"""build metadata of single backup file"""
|
||||||
|
file_path = os.path.join(self.BACKUP_DIR, filename)
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
return False
|
||||||
|
|
||||||
|
file_split = filename.split("-")
|
||||||
if len(file_split) == 2:
|
if len(file_split) == 2:
|
||||||
timestamp = file_split[1].strip(".zip")
|
timestamp = file_split[1].strip(".zip")
|
||||||
reason = False
|
reason = False
|
||||||
@ -130,14 +139,15 @@ class ElasticBackup:
|
|||||||
timestamp = file_split[1]
|
timestamp = file_split[1]
|
||||||
reason = file_split[2].strip(".zip")
|
reason = file_split[2].strip(".zip")
|
||||||
|
|
||||||
to_add = {
|
data = {
|
||||||
"filename": backup_file,
|
"filename": filename,
|
||||||
|
"file_path": file_path,
|
||||||
|
"file_size": os.path.getsize(file_path),
|
||||||
"timestamp": timestamp,
|
"timestamp": timestamp,
|
||||||
"reason": reason,
|
"reason": reason,
|
||||||
}
|
}
|
||||||
backup_dicts.append(to_add)
|
|
||||||
|
|
||||||
return backup_dicts
|
return data
|
||||||
|
|
||||||
def restore(self, filename):
|
def restore(self, filename):
|
||||||
"""
|
"""
|
||||||
@ -149,22 +159,19 @@ class ElasticBackup:
|
|||||||
|
|
||||||
def _unpack_zip_backup(self, filename):
|
def _unpack_zip_backup(self, filename):
|
||||||
"""extract backup zip and return filelist"""
|
"""extract backup zip and return filelist"""
|
||||||
backup_dir = os.path.join(self.cache_dir, "backup")
|
file_path = os.path.join(self.BACKUP_DIR, filename)
|
||||||
file_path = os.path.join(backup_dir, filename)
|
|
||||||
|
|
||||||
with zipfile.ZipFile(file_path, "r") as z:
|
with zipfile.ZipFile(file_path, "r") as z:
|
||||||
zip_content = z.namelist()
|
zip_content = z.namelist()
|
||||||
z.extractall(backup_dir)
|
z.extractall(self.BACKUP_DIR)
|
||||||
|
|
||||||
return zip_content
|
return zip_content
|
||||||
|
|
||||||
def _restore_json_files(self, zip_content):
|
def _restore_json_files(self, zip_content):
|
||||||
"""go through the unpacked files and restore"""
|
"""go through the unpacked files and restore"""
|
||||||
backup_dir = os.path.join(self.cache_dir, "backup")
|
|
||||||
|
|
||||||
for idx, json_f in enumerate(zip_content):
|
for idx, json_f in enumerate(zip_content):
|
||||||
self._notify_restore(idx, json_f, len(zip_content))
|
self._notify_restore(idx, json_f, len(zip_content))
|
||||||
file_name = os.path.join(backup_dir, json_f)
|
file_name = os.path.join(self.BACKUP_DIR, json_f)
|
||||||
|
|
||||||
if not json_f.startswith("es_") or not json_f.endswith(".json"):
|
if not json_f.startswith("es_") or not json_f.endswith(".json"):
|
||||||
os.remove(file_name)
|
os.remove(file_name)
|
||||||
@ -201,14 +208,22 @@ class ElasticBackup:
|
|||||||
print("no backup files to rotate")
|
print("no backup files to rotate")
|
||||||
return
|
return
|
||||||
|
|
||||||
backup_dir = os.path.join(self.cache_dir, "backup")
|
|
||||||
|
|
||||||
all_to_delete = auto[rotate:]
|
all_to_delete = auto[rotate:]
|
||||||
for to_delete in all_to_delete:
|
for to_delete in all_to_delete:
|
||||||
file_path = os.path.join(backup_dir, to_delete["filename"])
|
self.delete_file(to_delete["filename"])
|
||||||
|
|
||||||
|
def delete_file(self, filename):
|
||||||
|
"""delete backup file"""
|
||||||
|
file_path = os.path.join(self.BACKUP_DIR, filename)
|
||||||
|
if not os.path.exists(file_path):
|
||||||
|
print(f"backup file not found: {filename}")
|
||||||
|
return False
|
||||||
|
|
||||||
print(f"remove old backup file: {file_path}")
|
print(f"remove old backup file: {file_path}")
|
||||||
os.remove(file_path)
|
os.remove(file_path)
|
||||||
|
|
||||||
|
return file_path
|
||||||
|
|
||||||
|
|
||||||
class BackupCallback:
|
class BackupCallback:
|
||||||
"""handle backup ndjson writer as callback for IndexPaginate"""
|
"""handle backup ndjson writer as callback for IndexPaginate"""
|
||||||
|
@ -294,7 +294,7 @@ def run_restore_backup(self, filename):
|
|||||||
if manager.is_pending(self):
|
if manager.is_pending(self):
|
||||||
print(f"[task][{self.name}] restore is already running")
|
print(f"[task][{self.name}] restore is already running")
|
||||||
self.send_progress("Restore is already running.")
|
self.send_progress("Restore is already running.")
|
||||||
return
|
return None
|
||||||
|
|
||||||
manager.init(self)
|
manager.init(self)
|
||||||
self.send_progress(["Reset your Index"])
|
self.send_progress(["Reset your Index"])
|
||||||
@ -302,6 +302,8 @@ def run_restore_backup(self, filename):
|
|||||||
ElasticBackup(task=self).restore(filename)
|
ElasticBackup(task=self).restore(filename)
|
||||||
print("index restore finished")
|
print("index restore finished")
|
||||||
|
|
||||||
|
return f"backup restore completed: {filename}"
|
||||||
|
|
||||||
|
|
||||||
@shared_task(bind=True, name="rescan_filesystem", base=BaseTask)
|
@shared_task(bind=True, name="rescan_filesystem", base=BaseTask)
|
||||||
def rescan_filesystem(self):
|
def rescan_filesystem(self):
|
||||||
|
@ -283,7 +283,7 @@ function reEmbed() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function dbBackup() {
|
function dbBackup() {
|
||||||
let apiEndpoint = '/api/task-name/run_backup/';
|
let apiEndpoint = '/api/backup/';
|
||||||
apiRequest(apiEndpoint, 'POST');
|
apiRequest(apiEndpoint, 'POST');
|
||||||
// clear button
|
// clear button
|
||||||
let message = document.createElement('p');
|
let message = document.createElement('p');
|
||||||
@ -299,8 +299,8 @@ function dbBackup() {
|
|||||||
|
|
||||||
function dbRestore(button) {
|
function dbRestore(button) {
|
||||||
let fileName = button.getAttribute('data-id');
|
let fileName = button.getAttribute('data-id');
|
||||||
let payload = JSON.stringify({ 'db-restore': fileName });
|
let apiEndpoint = `/api/backup/${fileName}/`;
|
||||||
sendPost(payload);
|
apiRequest(apiEndpoint, 'POST');
|
||||||
// clear backup row
|
// clear backup row
|
||||||
let message = document.createElement('p');
|
let message = document.createElement('p');
|
||||||
message.innerText = 'restoring from backup';
|
message.innerText = 'restoring from backup';
|
||||||
|
Loading…
Reference in New Issue
Block a user