tubearchivist/tubearchivist/home/tasks.py

282 lines
8.3 KiB
Python
Raw Normal View History

2021-09-05 17:10:14 +00:00
"""
Functionality:
- initiate celery app
- collect tasks
- user config changes won't get applied here
because tasks are initiated at application start
2021-09-05 17:10:14 +00:00
"""
import os
from celery import Celery, shared_task
from home.apps import StartupCheck
2022-01-22 15:13:37 +00:00
from home.src.download.queue import PendingList
from home.src.download.subscriptions import (
ChannelSubscription,
PlaylistSubscription,
)
2022-01-22 15:13:37 +00:00
from home.src.download.thumbnails import ThumbManager, validate_thumbnails
from home.src.download.yt_dlp_handler import VideoDownloader
from home.src.es.index_setup import backup_all_indexes, restore_from_backup
from home.src.index.channel import YoutubeChannel
from home.src.index.filesystem import (
ManualImport,
reindex_old_documents,
2021-10-08 08:10:44 +00:00
scan_filesystem,
)
2022-01-22 15:13:37 +00:00
from home.src.ta.config import AppConfig, ScheduleBuilder
from home.src.ta.helper import UrlListParser
from home.src.ta.ta_redis import RedisArchivist, RedisQueue
2021-09-05 17:10:14 +00:00
CONFIG = AppConfig().config
2021-09-30 11:03:23 +00:00
REDIS_HOST = os.environ.get("REDIS_HOST")
REDIS_PORT = os.environ.get("REDIS_PORT") or 6379
2021-09-05 17:10:14 +00:00
2021-12-02 08:54:29 +00:00
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
2021-09-30 11:03:23 +00:00
app = Celery("tasks", broker=f"redis://{REDIS_HOST}:{REDIS_PORT}")
2021-10-27 11:07:35 +00:00
app.config_from_object("django.conf:settings", namespace="ta:")
2021-09-05 17:10:14 +00:00
app.autodiscover_tasks()
app.conf.timezone = os.environ.get("TZ") or "UTC"
2021-09-05 17:10:14 +00:00
2021-12-02 08:54:29 +00:00
@shared_task(name="update_subscribed")
2021-09-05 17:10:14 +00:00
def update_subscribed():
2021-09-21 09:25:22 +00:00
"""look for missing videos and add to pending"""
message = {
"status": "message:rescan",
"level": "info",
"title": "Rescanning channels and playlists.",
"message": "Looking for new videos.",
}
RedisArchivist().set_message("message:rescan", message, expire=True)
2021-12-02 12:11:45 +00:00
have_lock = False
my_lock = RedisArchivist().get_lock("rescan")
try:
have_lock = my_lock.acquire(blocking=False)
if have_lock:
channel_handler = ChannelSubscription()
missing_from_channels = channel_handler.find_missing()
playlist_handler = PlaylistSubscription()
missing_from_playlists = playlist_handler.find_missing()
missing = missing_from_channels + missing_from_playlists
if missing:
2022-03-18 15:11:41 +00:00
youtube_ids = [{"type": "video", "url": i} for i in missing]
2022-03-18 11:27:25 +00:00
pending_handler = PendingList(youtube_ids=youtube_ids)
pending_handler.parse_url_list()
pending_handler.add_to_pending()
2021-12-02 12:11:45 +00:00
else:
print("Did not acquire rescan lock.")
finally:
if have_lock:
my_lock.release()
2021-09-05 17:10:14 +00:00
2021-12-02 08:54:29 +00:00
@shared_task(name="download_pending")
2021-09-05 17:10:14 +00:00
def download_pending():
2021-09-21 09:25:22 +00:00
"""download latest pending videos"""
have_lock = False
my_lock = RedisArchivist().get_lock("downloading")
try:
have_lock = my_lock.acquire(blocking=False)
if have_lock:
downloader = VideoDownloader()
downloader.add_pending()
downloader.run_queue()
else:
print("Did not acquire download lock.")
finally:
if have_lock:
my_lock.release()
2021-09-05 17:10:14 +00:00
@shared_task
def download_single(youtube_id):
2021-09-21 09:25:22 +00:00
"""start download single video now"""
queue = RedisQueue()
queue.add_priority(youtube_id)
print("Added to queue with priority: " + youtube_id)
# start queue if needed
have_lock = False
my_lock = RedisArchivist().get_lock("downloading")
try:
have_lock = my_lock.acquire(blocking=False)
if have_lock:
key = "message:download"
mess_dict = {
"status": key,
"level": "info",
"title": "Download single video",
"message": "processing",
}
RedisArchivist().set_message(key, mess_dict, expire=True)
VideoDownloader().run_queue()
else:
print("Download queue already running.")
finally:
# release if only single run
if have_lock and not queue.get_next():
my_lock.release()
2021-09-05 17:10:14 +00:00
@shared_task
def extrac_dl(youtube_ids):
2021-09-21 09:25:22 +00:00
"""parse list passed and add to pending"""
2022-03-18 11:27:25 +00:00
pending_handler = PendingList(youtube_ids=youtube_ids)
pending_handler.parse_url_list()
pending_handler.add_to_pending()
2021-12-02 08:54:29 +00:00
@shared_task(name="check_reindex")
def check_reindex():
2021-09-21 09:25:22 +00:00
"""run the reindex main command"""
reindex_old_documents()
2021-09-13 15:17:36 +00:00
@shared_task
def run_manual_import():
2021-09-21 09:25:22 +00:00
"""called from settings page, to go through import folder"""
print("starting media file import")
have_lock = False
my_lock = RedisArchivist().get_lock("manual_import")
try:
have_lock = my_lock.acquire(blocking=False)
if have_lock:
import_handler = ManualImport()
if import_handler.identified:
all_videos_added = import_handler.process_import()
ThumbManager().download_vid(all_videos_added)
else:
print("Did not acquire lock form import.")
finally:
if have_lock:
my_lock.release()
2021-09-18 13:02:54 +00:00
2021-12-02 08:54:29 +00:00
@shared_task(name="run_backup")
2021-12-13 14:20:48 +00:00
def run_backup(reason="auto"):
2021-09-21 09:25:22 +00:00
"""called from settings page, dump backup to zip file"""
2021-12-13 14:20:48 +00:00
backup_all_indexes(reason)
2021-09-21 09:25:22 +00:00
print("backup finished")
@shared_task
2021-12-14 12:05:58 +00:00
def run_restore_backup(filename):
2021-09-21 09:25:22 +00:00
"""called from settings page, dump backup to zip file"""
2021-12-14 12:05:58 +00:00
restore_from_backup(filename)
2021-09-21 09:25:22 +00:00
print("index restore finished")
2021-09-24 16:37:26 +00:00
def kill_dl(task_id):
"""kill download worker task by ID"""
2021-10-11 08:26:31 +00:00
if task_id:
app.control.revoke(task_id, terminate=True)
_ = RedisArchivist().del_message("dl_queue_id")
RedisQueue().clear()
2021-09-24 16:37:26 +00:00
# clear cache
cache_dir = os.path.join(CONFIG["application"]["cache_dir"], "download")
for cached in os.listdir(cache_dir):
to_delete = os.path.join(cache_dir, cached)
os.remove(to_delete)
# notify
mess_dict = {
"status": "message:download",
2021-09-24 16:37:26 +00:00
"level": "error",
"title": "Canceling download process",
"message": "Canceling download queue now.",
2021-09-24 16:37:26 +00:00
}
RedisArchivist().set_message("message:download", mess_dict, expire=True)
@shared_task
def rescan_filesystem():
2021-10-08 08:10:44 +00:00
"""check the media folder for mismatches"""
scan_filesystem()
validate_thumbnails()
2021-12-02 08:54:29 +00:00
@shared_task(name="thumbnail_check")
def thumbnail_check():
"""validate thumbnails"""
validate_thumbnails()
@shared_task
def re_sync_thumbs():
"""sync thumbnails to mediafiles"""
handler = ThumbManager()
video_list = handler.get_thumb_list()
handler.write_all_thumbs(video_list)
@shared_task
def subscribe_to(url_str):
"""take a list of urls to subscribe to"""
to_subscribe_list = UrlListParser(url_str).process_list()
counter = 1
for item in to_subscribe_list:
to_sub_id = item["url"]
if item["type"] == "playlist":
new_thumbs = PlaylistSubscription().process_url_str([item])
if new_thumbs:
ThumbManager().download_playlist(new_thumbs)
continue
if item["type"] == "video":
vid_details = PendingList().get_youtube_details(to_sub_id)
channel_id_sub = vid_details["channel_id"]
elif item["type"] == "channel":
channel_id_sub = to_sub_id
else:
raise ValueError("failed to subscribe to: " + to_sub_id)
ChannelSubscription().change_subscribe(
channel_id_sub, channel_subscribed=True
)
# notify for channels
key = "message:subchannel"
message = {
"status": key,
"level": "info",
"title": "Subscribing to Channels",
"message": f"Processing {counter} of {len(to_subscribe_list)}",
}
RedisArchivist().set_message(key, message=message, expire=True)
counter = counter + 1
@shared_task
def index_channel_playlists(channel_id):
"""add all playlists of channel to index"""
2022-01-22 10:48:54 +00:00
channel = YoutubeChannel(channel_id)
# notify
key = "message:playlistscan"
mess_dict = {
"status": key,
"level": "info",
"title": "Looking for playlists",
2022-01-22 10:48:54 +00:00
"message": f'Scanning channel "{channel.youtube_id}" in progress',
}
RedisArchivist().set_message(key, mess_dict, expire=True)
channel.index_channel_playlists()
2021-12-02 08:54:29 +00:00
try:
app.conf.beat_schedule = ScheduleBuilder().build_schedule()
except KeyError:
# update path from v0.0.8 to v0.0.9 to load new defaults
StartupCheck().sync_redis_state()
app.conf.beat_schedule = ScheduleBuilder().build_schedule()