Improved Cookie handeling, #build

Changed:
- Cookie: Use IO Stream from Redis directly for cookies
- Cookie: Don't load invalid cookie
- API: Import cookie with PUT request
- Redis: Don't expire Redis messages by default
- Redis: Connections expire automatically
This commit is contained in:
simon 2022-06-21 08:14:01 +07:00
commit 49d33a2765
No known key found for this signature in database
GPG Key ID: 2C15AA5E89985DD4
19 changed files with 241 additions and 97 deletions

View File

@ -1,8 +1,11 @@
![Tube Archivist](assets/tube-archivist-banner.jpg?raw=true "Tube Archivist Banner") ![Tube Archivist](assets/tube-archivist-banner.jpg?raw=true "Tube Archivist Banner")
<center><h1>Your self hosted YouTube media server</h1></center> <h1 align="center">Your self hosted YouTube media server</h1>
<div align="center">
Tube Archivist has a new home: https://github.com/tubearchivist/tubearchivist <a href="https://github.com/bbilly1/tilefy" target="_blank"><img src="https://tiles.tilefy.me/t/tubearchivist-docker.png" alt="tubearchivist-docker" title="Tube Archivist Docker Pulls" height="50" width="200"/></a>
<a href="https://github.com/bbilly1/tilefy" target="_blank"><img src="https://tiles.tilefy.me/t/tubearchivist-github-star.png" alt="tubearchivist-github-star" title="Tube Archivist GitHub Stars" height="50" width="200"/></a>
<a href="https://github.com/bbilly1/tilefy" target="_blank"><img src="https://tiles.tilefy.me/t/tubearchivist-github-forks.png" alt="tubearchivist-github-forks" title="Tube Archivist GitHub Forks" height="50" width="200"/></a>
</div>
## Table of contents: ## Table of contents:
* [Wiki](https://github.com/tubearchivist/tubearchivist/wiki) with [FAQ](https://github.com/tubearchivist/tubearchivist/wiki/FAQ) * [Wiki](https://github.com/tubearchivist/tubearchivist/wiki) with [FAQ](https://github.com/tubearchivist/tubearchivist/wiki/FAQ)

View File

@ -1,6 +1,9 @@
# TubeArchivist API # TubeArchivist API
Documentation of available API endpoints. Documentation of available API endpoints.
**Note: This is very early alpha and will change!**
Note:
- This is very early stages and will change!
- Check the commit history to see if a documented feature is already in your release
## Authentication ## Authentication
API token will get automatically created, accessible on the settings page. Token needs to be passed as an authorization header with every request. Additionally session based authentication is enabled too: When you are logged into your TubeArchivist instance, you'll have access to the api in the browser for testing. API token will get automatically created, accessible on the settings page. Token needs to be passed as an authorization header with every request. Additionally session based authentication is enabled too: When you are logged into your TubeArchivist instance, you'll have access to the api in the browser for testing.
@ -216,7 +219,7 @@ Returns:
} }
``` ```
Start a background task Start a background task
POST /api/task/ POST /api/task/
```json ```json
{ {
@ -245,3 +248,25 @@ Send empty post request to validate cookie.
"cookie_validated": true "cookie_validated": true
} }
``` ```
PUT /api/cookie/
Send put request containing the cookie as a string:
```json
{
"cookie": "your-cookie-as-string"
}
```
Imports and validates cookie, returns on success:
```json
{
"cookie_import": "done",
"cookie_validated": true
}
```
Or returns status code 400 on failure:
```json
{
"cookie_import": "fail",
"cookie_validated": false
}
```

View File

@ -50,5 +50,5 @@ class TaskHandler:
print("download pending") print("download pending")
running = download_pending.delay() running = download_pending.delay()
print("set task id: " + running.id) print("set task id: " + running.id)
RedisArchivist().set_message("dl_queue_id", running.id, expire=False) RedisArchivist().set_message("dl_queue_id", running.id)
return {"success": True} return {"success": True}

View File

@ -131,7 +131,7 @@ class VideoProgressView(ApiBaseView):
position = request.data.get("position", 0) position = request.data.get("position", 0)
key = f"{request.user.id}:progress:{video_id}" key = f"{request.user.id}:progress:{video_id}"
message = {"position": position, "youtube_id": video_id} message = {"position": position, "youtube_id": video_id}
RedisArchivist().set_message(key, message, expire=False) RedisArchivist().set_message(key, message)
self.response = request.data self.response = request.data
return Response(self.response) return Response(self.response)
@ -459,7 +459,7 @@ class TaskApiView(ApiBaseView):
@staticmethod @staticmethod
def get(request): def get(request):
"""handle get request""" """handle get request"""
# pylint: disable=unused-argument
response = {"rescan": False, "downloading": False} response = {"rescan": False, "downloading": False}
for key in response.keys(): for key in response.keys():
response[key] = RedisArchivist().is_locked(key) response[key] = RedisArchivist().is_locked(key)
@ -480,6 +480,7 @@ class CookieView(ApiBaseView):
"""resolves to /api/cookie/ """resolves to /api/cookie/
GET: check if cookie is enabled GET: check if cookie is enabled
POST: verify validity of cookie POST: verify validity of cookie
PUT: import cookie
""" """
@staticmethod @staticmethod
@ -499,3 +500,27 @@ class CookieView(ApiBaseView):
validated = CookieHandler(config).validate() validated = CookieHandler(config).validate()
return Response({"cookie_validated": validated}) return Response({"cookie_validated": validated})
@staticmethod
def put(request):
"""handle put request"""
# pylint: disable=unused-argument
config = AppConfig().config
cookie = request.data.get("cookie")
if not cookie:
message = "missing cookie key in request data"
print(message)
return Response({"message": message}, status=400)
print(f"cookie preview:\n\n{cookie[:300]}")
handler = CookieHandler(config)
handler.set_cookie(cookie)
validated = handler.validate()
if not validated:
handler.revoke()
message = {"cookie_import": "fail", "cookie_validated": validated}
print(f"cookie: {message}")
return Response({"message": message}, status=400)
message = {"cookie_import": "done", "cookie_validated": validated}
return Response(message)

View File

@ -28,6 +28,7 @@ class StartupCheck:
self.release_lock() self.release_lock()
index_check() index_check()
self.sync_redis_state() self.sync_redis_state()
self.set_redis_conf()
self.make_folders() self.make_folders()
self.set_has_run() self.set_has_run()
@ -45,6 +46,10 @@ class StartupCheck:
print("sync redis") print("sync redis")
self.config_handler.load_new_defaults() self.config_handler.load_new_defaults()
def set_redis_conf(self):
"""set conf values for redis"""
self.redis_con.conn.config_set("timeout", 3600)
def make_folders(self): def make_folders(self):
"""make needed cache folders here so docker doesn't mess it up""" """make needed cache folders here so docker doesn't mess it up"""
folders = [ folders = [

View File

@ -150,7 +150,7 @@ class PendingList(PendingIndex):
"title": "Adding to download queue.", "title": "Adding to download queue.",
"message": "Extracting lists", "message": "Extracting lists",
} }
RedisArchivist().set_message("message:add", mess_dict) RedisArchivist().set_message("message:add", mess_dict, expire=True)
self._process_entry(entry) self._process_entry(entry)
def _process_entry(self, entry): def _process_entry(self, entry):
@ -229,10 +229,11 @@ class PendingList(PendingIndex):
"message": "Progress: " + progress, "message": "Progress: " + progress,
} }
if idx + 1 == len(self.missing_videos): if idx + 1 == len(self.missing_videos):
RedisArchivist().set_message("message:add", mess_dict, expire=4) expire = 4
else: else:
RedisArchivist().set_message("message:add", mess_dict) expire = True
RedisArchivist().set_message("message:add", mess_dict, expire=expire)
if idx + 1 % 25 == 0: if idx + 1 % 25 == 0:
print("adding to queue progress: " + progress) print("adding to queue progress: " + progress)

View File

@ -76,11 +76,13 @@ class ChannelSubscription:
"message": f"Progress: {idx + 1}/{len(all_channels)}", "message": f"Progress: {idx + 1}/{len(all_channels)}",
} }
if idx + 1 == len(all_channels): if idx + 1 == len(all_channels):
RedisArchivist().set_message( expire = 4
"message:rescan", message=message, expire=4
)
else: else:
RedisArchivist().set_message("message:rescan", message=message) expire = True
RedisArchivist().set_message(
"message:rescan", message=message, expire=expire
)
return missing_videos return missing_videos
@ -152,7 +154,7 @@ class PlaylistSubscription:
"message": f"Processing {idx + 1} of {len(new_playlists)}", "message": f"Processing {idx + 1} of {len(new_playlists)}",
} }
RedisArchivist().set_message( RedisArchivist().set_message(
"message:subplaylist", message=message "message:subplaylist", message=message, expire=True
) )
return new_thumbs return new_thumbs
@ -206,7 +208,9 @@ class PlaylistSubscription:
"title": "Scanning playlists: Looking for new videos.", "title": "Scanning playlists: Looking for new videos.",
"message": f"Progress: {idx + 1}/{len(all_playlists)}", "message": f"Progress: {idx + 1}/{len(all_playlists)}",
} }
RedisArchivist().set_message("message:rescan", message=message) RedisArchivist().set_message(
"message:rescan", message=message, expire=True
)
for video in all_missing: for video in all_missing:
youtube_id = video["youtube_id"] youtube_id = video["youtube_id"]

View File

@ -193,11 +193,13 @@ class ThumbManager:
"message": "Downloading Thumbnails, Progress: " + progress, "message": "Downloading Thumbnails, Progress: " + progress,
} }
if idx + 1 == len(missing_thumbs): if idx + 1 == len(missing_thumbs):
RedisArchivist().set_message( expire = 4
"message:add", mess_dict, expire=4
)
else: else:
RedisArchivist().set_message("message:add", mess_dict) expire = True
RedisArchivist().set_message(
"message:add", mess_dict, expire=expire
)
if idx + 1 % 25 == 0: if idx + 1 % 25 == 0:
print("thumbnail progress: " + progress) print("thumbnail progress: " + progress)
@ -226,7 +228,8 @@ class ThumbManager:
"title": "Processing Channels", "title": "Processing Channels",
"message": "Downloading Channel Art.", "message": "Downloading Channel Art.",
} }
RedisArchivist().set_message("message:download", mess_dict) key = "message:download"
RedisArchivist().set_message(key, mess_dict, expire=True)
def download_playlist(self, missing_playlists): def download_playlist(self, missing_playlists):
"""download needed artwork for playlists""" """download needed artwork for playlists"""
@ -243,7 +246,8 @@ class ThumbManager:
"title": "Processing Playlists", "title": "Processing Playlists",
"message": "Downloading Playlist Art.", "message": "Downloading Playlist Art.",
} }
RedisArchivist().set_message("message:download", mess_dict) key = "message:download"
RedisArchivist().set_message(key, mess_dict, expire=True)
def get_base64_blur(self, youtube_id): def get_base64_blur(self, youtube_id):
"""return base64 encoded placeholder""" """return base64 encoded placeholder"""

View File

@ -5,6 +5,7 @@ functionality:
""" """
import os import os
from http import cookiejar
from io import StringIO from io import StringIO
import yt_dlp import yt_dlp
@ -54,9 +55,12 @@ class YtWrap:
"""make extract request""" """make extract request"""
try: try:
response = yt_dlp.YoutubeDL(self.obs).extract_info(url) response = yt_dlp.YoutubeDL(self.obs).extract_info(url)
except cookiejar.LoadError:
print("cookie file is invalid")
return False
except (yt_dlp.utils.ExtractorError, yt_dlp.utils.DownloadError): except (yt_dlp.utils.ExtractorError, yt_dlp.utils.DownloadError):
print(f"{url}: failed to get info from youtube") print(f"{url}: failed to get info from youtube")
response = False return False
return response return response
@ -81,22 +85,53 @@ class CookieHandler:
with open(import_path, encoding="utf-8") as cookie_file: with open(import_path, encoding="utf-8") as cookie_file:
cookie = cookie_file.read() cookie = cookie_file.read()
RedisArchivist().set_message("cookie", cookie, expire=False) self.set_cookie(cookie)
os.remove(import_path) os.remove(import_path)
print("cookie: import successful") print("cookie: import successful")
def set_cookie(self, cookie):
"""set cookie str and activate in cofig"""
RedisArchivist().set_message("cookie", cookie)
path = ".downloads.cookie_import"
RedisArchivist().set_message("config", True, path=path)
self.config["downloads"]["cookie_import"] = True
print("cookie: activated and stored in Redis")
@staticmethod @staticmethod
def revoke(): def revoke():
"""revoke cookie""" """revoke cookie"""
RedisArchivist().del_message("cookie") RedisArchivist().del_message("cookie")
RedisArchivist().set_message(
"config", False, path=".downloads.cookie_import"
)
print("cookie: revoked") print("cookie: revoked")
def validate(self): def validate(self):
"""validate cookie using the liked videos playlist""" """validate cookie using the liked videos playlist"""
print("validating cookie")
obs_request = { obs_request = {
"skip_download": True, "skip_download": True,
"extract_flat": True, "extract_flat": True,
} }
response = YtWrap(obs_request, self.config).extract("LL") validator = YtWrap(obs_request, self.config)
response = validator.extract("LL")
# update in redis to avoid expiring
modified = validator.obs["cookiefile"].getvalue()
if modified:
RedisArchivist().set_message("cookie", modified)
if not response:
mess_dict = {
"status": "message:download",
"level": "error",
"title": "Cookie validation failed, exiting...",
"message": "",
}
RedisArchivist().set_message(
"message:download", mess_dict, expire=4
)
print("cookie validation failed, exiting...")
return bool(response) return bool(response)

View File

@ -12,7 +12,7 @@ from datetime import datetime
from home.src.download.queue import PendingList from home.src.download.queue import PendingList
from home.src.download.subscriptions import PlaylistSubscription from home.src.download.subscriptions import PlaylistSubscription
from home.src.download.yt_dlp_base import YtWrap from home.src.download.yt_dlp_base import CookieHandler, YtWrap
from home.src.es.connect import ElasticWrap, IndexPaginate from home.src.es.connect import ElasticWrap, IndexPaginate
from home.src.index.channel import YoutubeChannel from home.src.index.channel import YoutubeChannel
from home.src.index.playlist import YoutubePlaylist from home.src.index.playlist import YoutubePlaylist
@ -125,18 +125,19 @@ class DownloadPostProcess:
+ f"{id_c + 1}/{len(self.download.channels)}" + f"{id_c + 1}/{len(self.download.channels)}"
) )
message = f"Progress: {id_p + 1}/{len(all_channel_playlist)}" message = f"Progress: {id_p + 1}/{len(all_channel_playlist)}"
key = "message:download"
mess_dict = { mess_dict = {
"status": "message:download", "status": key,
"level": "info", "level": "info",
"title": title, "title": title,
"message": message, "message": message,
} }
if id_p + 1 == len(all_channel_playlist): if id_p + 1 == len(all_channel_playlist):
RedisArchivist().set_message( expire = 4
"message:download", mess_dict, expire=4
)
else: else:
RedisArchivist().set_message("message:download", mess_dict) expire = True
RedisArchivist().set_message(key, mess_dict, expire=expire)
class VideoDownloader: class VideoDownloader:
@ -145,6 +146,8 @@ class VideoDownloader:
if not initiated with list, take from queue if not initiated with list, take from queue
""" """
MSG = "message:download"
def __init__(self, youtube_id_list=False): def __init__(self, youtube_id_list=False):
self.obs = False self.obs = False
self.video_overwrites = False self.video_overwrites = False
@ -155,10 +158,7 @@ class VideoDownloader:
def run_queue(self): def run_queue(self):
"""setup download queue in redis loop until no more items""" """setup download queue in redis loop until no more items"""
pending = PendingList() self._setup_queue()
pending.get_download()
pending.get_channels()
self.video_overwrites = pending.video_overwrites
queue = RedisQueue() queue = RedisQueue()
@ -180,37 +180,48 @@ class VideoDownloader:
) )
self.channels.add(vid_dict["channel"]["channel_id"]) self.channels.add(vid_dict["channel"]["channel_id"])
mess_dict = { mess_dict = {
"status": "message:download", "status": self.MSG,
"level": "info", "level": "info",
"title": "Moving....", "title": "Moving....",
"message": "Moving downloaded file to storage folder", "message": "Moving downloaded file to storage folder",
} }
RedisArchivist().set_message("message:download", mess_dict, False) RedisArchivist().set_message(self.MSG, mess_dict)
self.move_to_archive(vid_dict) self.move_to_archive(vid_dict)
mess_dict = { mess_dict = {
"status": "message:download", "status": self.MSG,
"level": "info", "level": "info",
"title": "Completed", "title": "Completed",
"message": "", "message": "",
} }
RedisArchivist().set_message("message:download", mess_dict, 10) RedisArchivist().set_message(self.MSG, mess_dict, expire=10)
self._delete_from_pending(youtube_id) self._delete_from_pending(youtube_id)
# post processing # post processing
self._add_subscribed_channels() self._add_subscribed_channels()
DownloadPostProcess(self).run() DownloadPostProcess(self).run()
@staticmethod def _setup_queue(self):
def add_pending(): """setup required and validate"""
if self.config["downloads"]["cookie_import"]:
valid = CookieHandler(self.config).validate()
if not valid:
return
pending = PendingList()
pending.get_download()
pending.get_channels()
self.video_overwrites = pending.video_overwrites
def add_pending(self):
"""add pending videos to download queue""" """add pending videos to download queue"""
mess_dict = { mess_dict = {
"status": "message:download", "status": self.MSG,
"level": "info", "level": "info",
"title": "Looking for videos to download", "title": "Looking for videos to download",
"message": "Scanning your download queue.", "message": "Scanning your download queue.",
} }
RedisArchivist().set_message("message:download", mess_dict) RedisArchivist().set_message(self.MSG, mess_dict, expire=True)
pending = PendingList() pending = PendingList()
pending.get_download() pending.get_download()
to_add = [i["youtube_id"] for i in pending.all_pending] to_add = [i["youtube_id"] for i in pending.all_pending]
@ -218,18 +229,17 @@ class VideoDownloader:
# there is nothing pending # there is nothing pending
print("download queue is empty") print("download queue is empty")
mess_dict = { mess_dict = {
"status": "message:download", "status": self.MSG,
"level": "error", "level": "error",
"title": "Download queue is empty", "title": "Download queue is empty",
"message": "Add some videos to the queue first.", "message": "Add some videos to the queue first.",
} }
RedisArchivist().set_message("message:download", mess_dict) RedisArchivist().set_message(self.MSG, mess_dict, expire=True)
return return
RedisQueue().add_list(to_add) RedisQueue().add_list(to_add)
@staticmethod def _progress_hook(self, response):
def _progress_hook(response):
"""process the progress_hooks from yt_dlp""" """process the progress_hooks from yt_dlp"""
# title # title
path = os.path.split(response["filename"])[-1][12:] path = os.path.split(response["filename"])[-1][12:]
@ -246,12 +256,12 @@ class VideoDownloader:
except KeyError: except KeyError:
message = "processing" message = "processing"
mess_dict = { mess_dict = {
"status": "message:download", "status": self.MSG,
"level": "info", "level": "info",
"title": title, "title": title,
"message": message, "message": message,
} }
RedisArchivist().set_message("message:download", mess_dict) RedisArchivist().set_message(self.MSG, mess_dict, expire=True)
def _build_obs(self): def _build_obs(self):
"""collection to build all obs passed to yt-dlp""" """collection to build all obs passed to yt-dlp"""

View File

@ -98,7 +98,7 @@ class PostData:
origin, new_view = self.exec_val.split(":") origin, new_view = self.exec_val.split(":")
key = f"{self.current_user}:view:{origin}" key = f"{self.current_user}:view:{origin}"
print(f"change view: {key} to {new_view}") print(f"change view: {key} to {new_view}")
RedisArchivist().set_message(key, {"status": new_view}, expire=False) RedisArchivist().set_message(key, {"status": new_view})
return {"success": True} return {"success": True}
def _change_grid(self): def _change_grid(self):
@ -109,7 +109,7 @@ class PostData:
key = f"{self.current_user}:grid_items" key = f"{self.current_user}:grid_items"
print(f"change grid items: {grid_items}") print(f"change grid items: {grid_items}")
RedisArchivist().set_message(key, {"status": grid_items}, expire=False) RedisArchivist().set_message(key, {"status": grid_items})
return {"success": True} return {"success": True}
@staticmethod @staticmethod
@ -135,7 +135,7 @@ class PostData:
running = download_pending.delay() running = download_pending.delay()
task_id = running.id task_id = running.id
print(f"{task_id}: set task id") print(f"{task_id}: set task id")
RedisArchivist().set_message("dl_queue_id", task_id, expire=False) RedisArchivist().set_message("dl_queue_id", task_id)
return {"success": True} return {"success": True}
def _queue_handler(self): def _queue_handler(self):
@ -187,11 +187,11 @@ class PostData:
sort_order = {"status": self.exec_val} sort_order = {"status": self.exec_val}
if self.exec_val in ["asc", "desc"]: if self.exec_val in ["asc", "desc"]:
RedisArchivist().set_message( RedisArchivist().set_message(
f"{self.current_user}:sort_order", sort_order, expire=False f"{self.current_user}:sort_order", sort_order
) )
else: else:
RedisArchivist().set_message( RedisArchivist().set_message(
f"{self.current_user}:sort_by", sort_order, expire=False f"{self.current_user}:sort_by", sort_order
) )
return {"success": True} return {"success": True}
@ -200,7 +200,7 @@ class PostData:
key = f"{self.current_user}:hide_watched" key = f"{self.current_user}:hide_watched"
message = {"status": bool(int(self.exec_val))} message = {"status": bool(int(self.exec_val))}
print(f"toggle {key}: {message}") print(f"toggle {key}: {message}")
RedisArchivist().set_message(key, message, expire=False) RedisArchivist().set_message(key, message)
return {"success": True} return {"success": True}
def _show_subed_only(self): def _show_subed_only(self):
@ -208,7 +208,7 @@ class PostData:
key = f"{self.current_user}:show_subed_only" key = f"{self.current_user}:show_subed_only"
message = {"status": bool(int(self.exec_val))} message = {"status": bool(int(self.exec_val))}
print(f"toggle {key}: {message}") print(f"toggle {key}: {message}")
RedisArchivist().set_message(key, message, expire=False) RedisArchivist().set_message(key, message)
return {"success": True} return {"success": True}
def _dlnow(self): def _dlnow(self):
@ -218,7 +218,7 @@ class PostData:
running = download_single.delay(youtube_id=youtube_id) running = download_single.delay(youtube_id=youtube_id)
task_id = running.id task_id = running.id
print("set task id: " + task_id) print("set task id: " + task_id)
RedisArchivist().set_message("dl_queue_id", task_id, expire=False) RedisArchivist().set_message("dl_queue_id", task_id)
return {"success": True} return {"success": True}
def _show_ignored_only(self): def _show_ignored_only(self):
@ -227,7 +227,7 @@ class PostData:
key = f"{self.current_user}:show_ignored_only" key = f"{self.current_user}:show_ignored_only"
value = {"status": show_value} value = {"status": show_value}
print(f"Filter download view ignored only: {show_value}") print(f"Filter download view ignored only: {show_value}")
RedisArchivist().set_message(key, value, expire=False) RedisArchivist().set_message(key, value)
return {"success": True} return {"success": True}
def _forget_ignore(self): def _forget_ignore(self):

View File

@ -153,6 +153,7 @@ class YoutubeChannel(YouTubeItem):
es_path = False es_path = False
index_name = "ta_channel" index_name = "ta_channel"
yt_base = "https://www.youtube.com/channel/" yt_base = "https://www.youtube.com/channel/"
msg = "message:playlistscan"
def __init__(self, youtube_id): def __init__(self, youtube_id):
super().__init__(youtube_id) super().__init__(youtube_id)
@ -252,12 +253,12 @@ class YoutubeChannel(YouTubeItem):
self.get_from_es() self.get_from_es()
channel_name = self.json_data["channel_name"] channel_name = self.json_data["channel_name"]
mess_dict = { mess_dict = {
"status": "message:playlistscan", "status": self.msg,
"level": "info", "level": "info",
"title": "Looking for playlists", "title": "Looking for playlists",
"message": f"{channel_name}: Scanning channel in progress", "message": f"{channel_name}: Scanning channel in progress",
} }
RedisArchivist().set_message("message:playlistscan", mess_dict) RedisArchivist().set_message(self.msg, mess_dict, expire=True)
self.get_all_playlists() self.get_all_playlists()
if not self.all_playlists: if not self.all_playlists:
print(f"{self.youtube_id}: no playlists found.") print(f"{self.youtube_id}: no playlists found.")
@ -272,12 +273,12 @@ class YoutubeChannel(YouTubeItem):
"""send notification""" """send notification"""
channel_name = self.json_data["channel_name"] channel_name = self.json_data["channel_name"]
mess_dict = { mess_dict = {
"status": "message:playlistscan", "status": self.msg,
"level": "info", "level": "info",
"title": f"{channel_name}: Scanning channel for playlists", "title": f"{channel_name}: Scanning channel for playlists",
"message": f"Progress: {idx + 1}/{len(self.all_playlists)}", "message": f"Progress: {idx + 1}/{len(self.all_playlists)}",
} }
RedisArchivist().set_message("message:playlistscan", mess_dict) RedisArchivist().set_message(self.msg, mess_dict, expire=True)
print("add playlist: " + playlist[1]) print("add playlist: " + playlist[1])
@staticmethod @staticmethod

View File

@ -310,4 +310,4 @@ def reindex_old_documents():
handler = Reindex() handler = Reindex()
handler.check_outdated() handler.check_outdated()
handler.reindex() handler.reindex()
RedisArchivist().set_message("last_reindex", handler.now, expire=False) RedisArchivist().set_message("last_reindex", handler.now)

View File

@ -12,6 +12,7 @@ from time import sleep
from home.src.download.queue import PendingList from home.src.download.queue import PendingList
from home.src.download.thumbnails import ThumbManager from home.src.download.thumbnails import ThumbManager
from home.src.download.yt_dlp_base import CookieHandler
from home.src.download.yt_dlp_handler import VideoDownloader from home.src.download.yt_dlp_handler import VideoDownloader
from home.src.es.connect import ElasticWrap from home.src.es.connect import ElasticWrap
from home.src.index.channel import YoutubeChannel from home.src.index.channel import YoutubeChannel
@ -40,6 +41,13 @@ class Reindex:
self.all_channel_ids = False self.all_channel_ids = False
self.all_playlist_ids = False self.all_playlist_ids = False
def check_cookie(self):
"""validate cookie if enabled"""
if self.config["downloads"]["cookie_import"]:
valid = CookieHandler(self.config).validate()
if not valid:
return
def _get_daily(self): def _get_daily(self):
"""get daily refresh values""" """get daily refresh values"""
total_videos = self._get_total_hits("ta_video") total_videos = self._get_total_hits("ta_video")

View File

@ -339,7 +339,7 @@ class SponsorBlock:
sb_id = RedisArchivist().get_message(key) sb_id = RedisArchivist().get_message(key)
if not sb_id["status"]: if not sb_id["status"]:
sb_id = {"status": randomizor(32)} sb_id = {"status": randomizor(32)}
RedisArchivist().set_message(key, sb_id, expire=False) RedisArchivist().set_message(key, sb_id)
return sb_id return sb_id

View File

@ -99,7 +99,7 @@ class AppConfig:
self.config[config_dict][config_value] = to_write self.config[config_dict][config_value] = to_write
updated.append((config_value, to_write)) updated.append((config_value, to_write))
RedisArchivist().set_message("config", self.config, expire=False) RedisArchivist().set_message("config", self.config)
return updated return updated
@staticmethod @staticmethod
@ -111,7 +111,7 @@ class AppConfig:
message = {"status": value} message = {"status": value}
redis_key = f"{user_id}:{key}" redis_key = f"{user_id}:{key}"
RedisArchivist().set_message(redis_key, message, expire=False) RedisArchivist().set_message(redis_key, message)
def get_colors(self): def get_colors(self):
"""overwrite config if user has set custom values""" """overwrite config if user has set custom values"""
@ -151,7 +151,7 @@ class AppConfig:
needs_update = True needs_update = True
if needs_update: if needs_update:
RedisArchivist().set_message("config", redis_config, expire=False) RedisArchivist().set_message("config", redis_config)
class ScheduleBuilder: class ScheduleBuilder:
@ -165,6 +165,7 @@ class ScheduleBuilder:
"run_backup": "0 18 0", "run_backup": "0 18 0",
} }
CONFIG = ["check_reindex_days", "run_backup_rotate"] CONFIG = ["check_reindex_days", "run_backup_rotate"]
MSG = "message:setting"
def __init__(self): def __init__(self):
self.config = AppConfig().config self.config = AppConfig().config
@ -180,25 +181,27 @@ class ScheduleBuilder:
except ValueError: except ValueError:
print(f"failed: {key} {value}") print(f"failed: {key} {value}")
mess_dict = { mess_dict = {
"status": "message:setting", "status": self.MSG,
"level": "error", "level": "error",
"title": "Scheduler update failed.", "title": "Scheduler update failed.",
"message": "Invalid schedule input", "message": "Invalid schedule input",
} }
RedisArchivist().set_message("message:setting", mess_dict) RedisArchivist().set_message(
self.MSG, mess_dict, expire=True
)
return return
redis_config["scheduler"][key] = to_write redis_config["scheduler"][key] = to_write
if key in self.CONFIG and value: if key in self.CONFIG and value:
redis_config["scheduler"][key] = int(value) redis_config["scheduler"][key] = int(value)
RedisArchivist().set_message("config", redis_config, expire=False) RedisArchivist().set_message("config", redis_config)
mess_dict = { mess_dict = {
"status": "message:setting", "status": self.MSG,
"level": "info", "level": "info",
"title": "Scheduler changed.", "title": "Scheduler changed.",
"message": "Please restart container for changes to take effect", "message": "Please restart container for changes to take effect",
} }
RedisArchivist().set_message("message:setting", mess_dict) RedisArchivist().set_message(self.MSG, mess_dict, expire=True)
def value_builder(self, key, value): def value_builder(self, key, value):
"""validate single cron form entry and return cron dict""" """validate single cron form entry and return cron dict"""

View File

@ -34,10 +34,10 @@ class RedisArchivist(RedisBase):
"setting", "setting",
] ]
def set_message(self, key, message, expire=True): def set_message(self, key, message, path=".", expire=False):
"""write new message to redis""" """write new message to redis"""
self.conn.execute_command( self.conn.execute_command(
"JSON.SET", self.NAME_SPACE + key, ".", json.dumps(message) "JSON.SET", self.NAME_SPACE + key, path, json.dumps(message)
) )
if expire: if expire:

View File

@ -48,7 +48,7 @@ def update_subscribed():
"title": "Rescanning channels and playlists.", "title": "Rescanning channels and playlists.",
"message": "Looking for new videos.", "message": "Looking for new videos.",
} }
RedisArchivist().set_message("message:rescan", message) RedisArchivist().set_message("message:rescan", message, expire=True)
have_lock = False have_lock = False
my_lock = RedisArchivist().get_lock("rescan") my_lock = RedisArchivist().get_lock("rescan")
@ -108,13 +108,14 @@ def download_single(youtube_id):
try: try:
have_lock = my_lock.acquire(blocking=False) have_lock = my_lock.acquire(blocking=False)
if have_lock: if have_lock:
key = "message:download"
mess_dict = { mess_dict = {
"status": "message:download", "status": key,
"level": "info", "level": "info",
"title": "Download single video", "title": "Download single video",
"message": "processing", "message": "processing",
} }
RedisArchivist().set_message("message:download", mess_dict) RedisArchivist().set_message(key, mess_dict, expire=True)
VideoDownloader().run_queue() VideoDownloader().run_queue()
else: else:
print("Download queue already running.") print("Download queue already running.")
@ -196,7 +197,7 @@ def kill_dl(task_id):
"title": "Canceling download process", "title": "Canceling download process",
"message": "Canceling download queue now.", "message": "Canceling download queue now.",
} }
RedisArchivist().set_message("message:download", mess_dict) RedisArchivist().set_message("message:download", mess_dict, expire=True)
@shared_task @shared_task
@ -245,13 +246,14 @@ def subscribe_to(url_str):
channel_id_sub, channel_subscribed=True channel_id_sub, channel_subscribed=True
) )
# notify for channels # notify for channels
key = "message:subchannel"
message = { message = {
"status": "message:subchannel", "status": key,
"level": "info", "level": "info",
"title": "Subscribing to Channels", "title": "Subscribing to Channels",
"message": f"Processing {counter} of {len(to_subscribe_list)}", "message": f"Processing {counter} of {len(to_subscribe_list)}",
} }
RedisArchivist().set_message("message:subchannel", message=message) RedisArchivist().set_message(key, message=message, expire=True)
counter = counter + 1 counter = counter + 1
@ -260,13 +262,14 @@ def index_channel_playlists(channel_id):
"""add all playlists of channel to index""" """add all playlists of channel to index"""
channel = YoutubeChannel(channel_id) channel = YoutubeChannel(channel_id)
# notify # notify
key = "message:playlistscan"
mess_dict = { mess_dict = {
"status": "message:playlistscan", "status": key,
"level": "info", "level": "info",
"title": "Looking for playlists", "title": "Looking for playlists",
"message": f'Scanning channel "{channel.youtube_id}" in progress', "message": f'Scanning channel "{channel.youtube_id}" in progress',
} }
RedisArchivist().set_message("message:playlistscan", mess_dict) RedisArchivist().set_message(key, mess_dict, expire=True)
channel.index_channel_playlists() channel.index_channel_playlists()

View File

@ -394,14 +394,15 @@ class DownloadView(ArchivistResultsView):
youtube_ids = UrlListParser(url_str).process_list() youtube_ids = UrlListParser(url_str).process_list()
except ValueError: except ValueError:
# failed to process # failed to process
key = "message:add"
print(f"failed to parse: {url_str}") print(f"failed to parse: {url_str}")
mess_dict = { mess_dict = {
"status": "message:add", "status": key,
"level": "error", "level": "error",
"title": "Failed to extract links.", "title": "Failed to extract links.",
"message": "Not a video, channel or playlist ID or URL", "message": "Not a video, channel or playlist ID or URL",
} }
RedisArchivist().set_message("message:add", mess_dict) RedisArchivist().set_message(key, mess_dict, expire=True)
return redirect("downloads") return redirect("downloads")
print(youtube_ids) print(youtube_ids)
@ -512,13 +513,14 @@ class ChannelView(ArchivistResultsView):
"""handle http post requests""" """handle http post requests"""
subscribe_form = SubscribeToChannelForm(data=request.POST) subscribe_form = SubscribeToChannelForm(data=request.POST)
if subscribe_form.is_valid(): if subscribe_form.is_valid():
key = "message:subchannel"
message = { message = {
"status": "message:subchannel", "status": key,
"level": "info", "level": "info",
"title": "Subscribing to Channels", "title": "Subscribing to Channels",
"message": "Parsing form data", "message": "Parsing form data",
} }
RedisArchivist().set_message("message:subchannel", message=message) RedisArchivist().set_message(key, message=message, expire=True)
url_str = request.POST.get("subscribe") url_str = request.POST.get("subscribe")
print(url_str) print(url_str)
subscribe_to.delay(url_str) subscribe_to.delay(url_str)
@ -659,15 +661,14 @@ class PlaylistView(ArchivistResultsView):
if subscribe_form.is_valid(): if subscribe_form.is_valid():
url_str = request.POST.get("subscribe") url_str = request.POST.get("subscribe")
print(url_str) print(url_str)
key = "message:subplaylist"
message = { message = {
"status": "message:subplaylist", "status": key,
"level": "info", "level": "info",
"title": "Subscribing to Playlists", "title": "Subscribing to Playlists",
"message": "Parsing form data", "message": "Parsing form data",
} }
RedisArchivist().set_message( RedisArchivist().set_message(key, message=message, expire=True)
"message:subplaylist", message=message
)
subscribe_to.delay(url_str) subscribe_to.delay(url_str)
sleep(1) sleep(1)
@ -829,18 +830,34 @@ class SettingsView(View):
sleep(1) sleep(1)
return redirect("settings", permanent=True) return redirect("settings", permanent=True)
@staticmethod def post_process_updated(self, updated, config):
def post_process_updated(updated, config):
"""apply changes for config""" """apply changes for config"""
if not updated: if not updated:
return return
for config_value, updated_value in updated: for config_value, updated_value in updated:
if config_value == "cookie_import": if config_value == "cookie_import":
if updated_value: self.process_cookie(config, updated_value)
CookieHandler(config).import_cookie()
else: @staticmethod
CookieHandler(config).revoke() def process_cookie(config, updated_value):
"""import and validate cookie"""
handler = CookieHandler(config)
if updated_value:
handler.import_cookie()
valid = handler.validate()
if not valid:
handler.revoke()
key = "message:setting"
message = {
"status": key,
"level": "error",
"title": "Cookie import failed",
"message": "",
}
RedisArchivist().set_message(key, message=message, expire=True)
else:
handler.revoke()
def progress(request): def progress(request):