2021-09-05 17:10:14 +00:00
|
|
|
"""
|
|
|
|
Functionality:
|
|
|
|
- initiate celery app
|
|
|
|
- collect tasks
|
|
|
|
"""
|
|
|
|
|
|
|
|
import os
|
|
|
|
|
|
|
|
from celery import Celery, shared_task
|
|
|
|
from home.src.config import AppConfig
|
2021-09-18 13:02:54 +00:00
|
|
|
from home.src.download import ChannelSubscription, PendingList, VideoDownloader
|
2021-09-28 03:33:00 +00:00
|
|
|
from home.src.helper import RedisArchivist, RedisQueue
|
2021-09-20 13:26:28 +00:00
|
|
|
from home.src.index_management import backup_all_indexes, restore_from_backup
|
2021-10-08 07:56:07 +00:00
|
|
|
from home.src.reindex import (
|
|
|
|
ManualImport,
|
|
|
|
reindex_old_documents,
|
2021-10-08 08:10:44 +00:00
|
|
|
scan_filesystem,
|
2021-10-08 07:56:07 +00:00
|
|
|
)
|
2021-10-11 06:33:28 +00:00
|
|
|
from home.src.thumbnails import ThumbManager
|
2021-09-05 17:10:14 +00:00
|
|
|
|
|
|
|
CONFIG = AppConfig().config
|
2021-09-30 11:03:23 +00:00
|
|
|
REDIS_HOST = os.environ.get("REDIS_HOST")
|
|
|
|
REDIS_PORT = os.environ.get("REDIS_PORT")
|
|
|
|
|
|
|
|
if not REDIS_PORT:
|
|
|
|
REDIS_PORT = 6379
|
2021-09-05 17:10:14 +00:00
|
|
|
|
2021-09-21 09:25:22 +00:00
|
|
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "home.settings")
|
2021-09-30 11:03:23 +00:00
|
|
|
app = Celery("tasks", broker=f"redis://{REDIS_HOST}:{REDIS_PORT}")
|
2021-09-21 09:25:22 +00:00
|
|
|
app.config_from_object("django.conf:settings", namespace="CELERY")
|
2021-09-05 17:10:14 +00:00
|
|
|
app.autodiscover_tasks()
|
|
|
|
|
|
|
|
|
|
|
|
@shared_task
|
|
|
|
def update_subscribed():
|
2021-09-21 09:25:22 +00:00
|
|
|
"""look for missing videos and add to pending"""
|
2021-09-05 17:10:14 +00:00
|
|
|
channel_handler = ChannelSubscription()
|
|
|
|
missing_videos = channel_handler.find_missing()
|
|
|
|
if missing_videos:
|
|
|
|
pending_handler = PendingList()
|
|
|
|
pending_handler.add_to_pending(missing_videos)
|
2021-09-11 10:55:44 +00:00
|
|
|
# check if reindex is needed
|
|
|
|
check_reindex.delay()
|
2021-09-05 17:10:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task
|
|
|
|
def download_pending():
|
2021-09-21 09:25:22 +00:00
|
|
|
"""download latest pending videos"""
|
2021-09-23 11:10:45 +00:00
|
|
|
have_lock = False
|
2021-09-28 03:33:00 +00:00
|
|
|
my_lock = RedisArchivist().get_lock("downloading")
|
2021-09-23 11:10:45 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
have_lock = my_lock.acquire(blocking=False)
|
|
|
|
if have_lock:
|
|
|
|
downloader = VideoDownloader()
|
|
|
|
downloader.add_pending()
|
|
|
|
downloader.run_queue()
|
|
|
|
else:
|
|
|
|
print("Did not acquire download lock.")
|
|
|
|
|
|
|
|
finally:
|
|
|
|
if have_lock:
|
|
|
|
my_lock.release()
|
2021-09-05 17:10:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task
|
|
|
|
def download_single(youtube_id):
|
2021-09-21 09:25:22 +00:00
|
|
|
"""start download single video now"""
|
2021-09-23 11:10:45 +00:00
|
|
|
queue = RedisQueue("dl_queue")
|
|
|
|
queue.add_priority(youtube_id)
|
|
|
|
print("Added to queue with priority: " + youtube_id)
|
|
|
|
# start queue if needed
|
|
|
|
have_lock = False
|
2021-09-28 03:33:00 +00:00
|
|
|
my_lock = RedisArchivist().get_lock("downloading")
|
2021-09-23 11:10:45 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
have_lock = my_lock.acquire(blocking=False)
|
|
|
|
if have_lock:
|
|
|
|
VideoDownloader().run_queue()
|
|
|
|
else:
|
|
|
|
print("Download queue already running.")
|
|
|
|
|
|
|
|
finally:
|
|
|
|
# release if only single run
|
|
|
|
if have_lock and not queue.get_next():
|
|
|
|
my_lock.release()
|
2021-09-05 17:10:14 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task
|
|
|
|
def extrac_dl(youtube_ids):
|
2021-09-21 09:25:22 +00:00
|
|
|
"""parse list passed and add to pending"""
|
2021-09-05 17:10:14 +00:00
|
|
|
pending_handler = PendingList()
|
|
|
|
missing_videos = pending_handler.parse_url_list(youtube_ids)
|
2021-10-11 06:33:28 +00:00
|
|
|
all_videos_added = pending_handler.add_to_pending(missing_videos)
|
|
|
|
ThumbManager().download_missing(all_videos_added)
|
2021-09-11 10:55:44 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task
|
|
|
|
def check_reindex():
|
2021-09-21 09:25:22 +00:00
|
|
|
"""run the reindex main command"""
|
2021-09-11 10:55:44 +00:00
|
|
|
reindex_old_documents()
|
2021-09-13 15:17:36 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task
|
|
|
|
def run_manual_import():
|
2021-09-21 09:25:22 +00:00
|
|
|
"""called from settings page, to go through import folder"""
|
|
|
|
print("starting media file import")
|
2021-09-14 11:25:28 +00:00
|
|
|
have_lock = False
|
2021-09-28 03:33:00 +00:00
|
|
|
my_lock = RedisArchivist().get_lock("manual_import")
|
2021-09-14 11:25:28 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
have_lock = my_lock.acquire(blocking=False)
|
|
|
|
if have_lock:
|
|
|
|
import_handler = ManualImport()
|
|
|
|
if import_handler.identified:
|
|
|
|
import_handler.process_import()
|
|
|
|
else:
|
|
|
|
print("Did not acquire lock form import.")
|
|
|
|
|
|
|
|
finally:
|
|
|
|
if have_lock:
|
|
|
|
my_lock.release()
|
2021-09-16 11:16:09 +00:00
|
|
|
|
2021-09-18 13:02:54 +00:00
|
|
|
|
2021-09-16 11:16:09 +00:00
|
|
|
@shared_task
|
|
|
|
def run_backup():
|
2021-09-21 09:25:22 +00:00
|
|
|
"""called from settings page, dump backup to zip file"""
|
2021-09-16 11:16:09 +00:00
|
|
|
backup_all_indexes()
|
2021-09-21 09:25:22 +00:00
|
|
|
print("backup finished")
|
2021-09-20 13:26:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task
|
|
|
|
def run_restore_backup():
|
2021-09-21 09:25:22 +00:00
|
|
|
"""called from settings page, dump backup to zip file"""
|
2021-09-20 13:26:28 +00:00
|
|
|
restore_from_backup()
|
2021-09-21 09:25:22 +00:00
|
|
|
print("index restore finished")
|
2021-09-24 16:37:26 +00:00
|
|
|
|
|
|
|
|
|
|
|
def kill_dl(task_id):
|
|
|
|
"""kill download worker task by ID"""
|
|
|
|
app.control.revoke(task_id, terminate=True)
|
2021-09-28 03:33:00 +00:00
|
|
|
_ = RedisArchivist().del_message("dl_queue_id")
|
2021-09-24 16:37:26 +00:00
|
|
|
RedisQueue("dl_queue").clear()
|
|
|
|
|
|
|
|
# clear cache
|
|
|
|
cache_dir = os.path.join(CONFIG["application"]["cache_dir"], "download")
|
|
|
|
for cached in os.listdir(cache_dir):
|
|
|
|
to_delete = os.path.join(cache_dir, cached)
|
|
|
|
os.remove(to_delete)
|
|
|
|
|
|
|
|
# notify
|
|
|
|
mess_dict = {
|
|
|
|
"status": "downloading",
|
|
|
|
"level": "error",
|
|
|
|
"title": "Brutally killing download queue",
|
|
|
|
"message": "",
|
|
|
|
}
|
2021-09-28 03:33:00 +00:00
|
|
|
RedisArchivist().set_message("progress:download", mess_dict)
|
2021-10-08 07:56:07 +00:00
|
|
|
|
|
|
|
|
|
|
|
@shared_task
|
|
|
|
def rescan_filesystem():
|
2021-10-08 08:10:44 +00:00
|
|
|
"""check the media folder for mismatches"""
|
2021-10-08 07:56:07 +00:00
|
|
|
scan_filesystem()
|