Compare commits

..

No commits in common. "master" and "v0.4.7" have entirely different histories.

61 changed files with 1147 additions and 2415 deletions

View File

@ -1,23 +0,0 @@
name: Frontend Migration
description: Tracking our new React based frontend
title: "[Frontend Migration]: "
labels: ["react migration"]
body:
- type: dropdown
id: domain
attributes:
label: Domain
options:
- Frontend
- Backend
- Combined
validations:
required: true
- type: textarea
id: description
attributes:
label: Description
placeholder: Organizing our React frontend migration
validations:
required: true

View File

@ -1,22 +1,16 @@
name: lint_js
on:
push:
paths:
- '**/*.js'
pull_request:
paths:
- '**/*.js'
on: [pull_request, push]
jobs:
check:
name: lint_js
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
- uses: actions/checkout@v2
- uses: actions/setup-node@v3
with:
node-version: '22'
node-version: '16'
- run: npm ci
- run: npm run lint
- run: npm run format -- --check

View File

@ -1,42 +1,14 @@
name: lint_python
on:
push:
paths:
- '**/*.py'
pull_request:
paths:
- '**/*.py'
on: [pull_request, push]
jobs:
lint_python:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y gcc libldap2-dev libsasl2-dev libssl-dev
- name: Set up Python
uses: actions/setup-python@v5
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Cache pip
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install python dependencies
run: |
python -m pip install --upgrade pip
pip install -r tubearchivist/requirements-dev.txt
- name: Run Linter
run: ./deploy.sh validate
python-version: '3.10'
- run: pip install --upgrade pip wheel
- run: pip install bandit black codespell flake8 flake8-bugbear
flake8-comprehensions isort requests
- run: ./deploy.sh validate

View File

@ -1,43 +0,0 @@
name: python_unit_tests
on:
push:
paths:
- '**/*.py'
pull_request:
paths:
- '**/*.py'
jobs:
unit-tests:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y gcc libldap2-dev libsasl2-dev libssl-dev
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
- name: Cache pip
uses: actions/cache@v4
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r tubearchivist/requirements-dev.txt
- name: Run unit tests
run: pytest tubearchivist

View File

@ -1,10 +1,8 @@
# Contributing to Tube Archivist
## Contributing to Tube Archivist
Welcome, and thanks for showing interest in improving Tube Archivist!
## Table of Content
- [Next Steps](#next-steps)
- [Beta Testing](#beta-testing)
- [How to open an issue](#how-to-open-an-issue)
- [Bug Report](#bug-report)
- [Feature Request](#feature-request)
@ -16,31 +14,8 @@ Welcome, and thanks for showing interest in improving Tube Archivist!
- [Development Environment](#development-environment)
---
## Next Steps
Going forward, this project will focus on developing a new modern frontend.
- For the time being, don't open any new PRs that are not towards the new frontend.
- New features requests likely won't get accepted during this process.
- Depending on the severity, bug reports may or may not get fixed during this time.
- When in doubt, reach out.
Join us on [Discord](https://tubearchivist.com/discord) if you want to help with that process.
## Beta Testing
Be the first to help test new features and improvements and provide feedback! There are regular `:unstable` builds for easy access. That's for the tinkerers and the breave. Ideally use a testing environment first, before a release be the first to install it on your main system.
There is always something that can get missed during development. Look at the commit messages tagged with `#build`, these are the unstable builds and give a quick overview what has changed.
- Test the features mentioned, play around, try to break it.
- Test the update path by installing the `:latest` release first, the upgrade to `:unstable` to check for any errors.
- Test the unstable build on a fresh install.
Then provide feedback, if there is a problem but also if there is no problem. Reach out on [Discord](https://tubearchivist.com/discord) in the `#beta-testing` channel with your findings.
This will help with a smooth update for the regular release. Plus you get to test things out early!
## How to open an issue
Please read this carefully before opening any [issue](https://github.com/tubearchivist/tubearchivist/issues) on GitHub. Make sure you read [Next Steps](#next-steps) above.
Please read this carefully before opening any [issue](https://github.com/tubearchivist/tubearchivist/issues) on GitHub.
**Do**:
- Do provide details and context, this matters a lot and makes it easier for people to help.
@ -91,8 +66,6 @@ IMPORTANT: When receiving help, contribute back to the community by improving th
## How to make a Pull Request
Make sure you read [Next Steps](#next-steps) above.
Thank you for contributing and helping improve this project. Focus for the foreseeable future is on improving and building on existing functionality, *not* on adding and expanding the application.
This is a quick checklist to help streamline the process:

View File

@ -1,9 +1,10 @@
# multi stage to build tube archivist
# build python wheel, download and extract ffmpeg, copy into final image
# first stage to build python wheel, copy into final image
# First stage to build python wheel
FROM python:3.11.8-slim-bookworm AS builder
FROM python:3.11.3-slim-bullseye AS builder
ARG TARGETPLATFORM
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential gcc libldap2-dev libsasl2-dev libssl-dev git
@ -12,17 +13,10 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
COPY ./tubearchivist/requirements.txt /requirements.txt
RUN pip install --user -r requirements.txt
# build ffmpeg
FROM python:3.11.8-slim-bookworm as ffmpeg-builder
# build final image
FROM python:3.11.3-slim-bullseye as tubearchivist
ARG TARGETPLATFORM
COPY docker_assets/ffmpeg_download.py ffmpeg_download.py
RUN python ffmpeg_download.py $TARGETPLATFORM
# build final image
FROM python:3.11.8-slim-bookworm as tubearchivist
ARG INSTALL_DEBUG
ENV PYTHONUNBUFFERED 1
@ -31,21 +25,36 @@ ENV PYTHONUNBUFFERED 1
COPY --from=builder /root/.local /root/.local
ENV PATH=/root/.local/bin:$PATH
# copy ffmpeg
COPY --from=ffmpeg-builder ./ffmpeg/ffmpeg /usr/bin/ffmpeg
COPY --from=ffmpeg-builder ./ffprobe/ffprobe /usr/bin/ffprobe
# install distro packages needed
RUN apt-get clean && apt-get -y update && apt-get -y install --no-install-recommends \
nginx \
atomicparsley \
curl && rm -rf /var/lib/apt/lists/*
curl \
xz-utils && rm -rf /var/lib/apt/lists/*
# install patched ffmpeg build, default to linux64
RUN if [ "$TARGETPLATFORM" = "linux/arm64" ] ; then \
curl -s https://api.github.com/repos/yt-dlp/FFmpeg-Builds/releases/latest \
| grep browser_download_url \
| grep ".*master.*linuxarm64.*tar.xz" \
| cut -d '"' -f 4 \
| xargs curl -L --output ffmpeg.tar.xz ; \
else \
curl -s https://api.github.com/repos/yt-dlp/FFmpeg-Builds/releases/latest \
| grep browser_download_url \
| grep ".*master.*linux64.*tar.xz" \
| cut -d '"' -f 4 \
| xargs curl -L --output ffmpeg.tar.xz ; \
fi && \
tar -xf ffmpeg.tar.xz --strip-components=2 --no-anchored -C /usr/bin/ "ffmpeg" && \
tar -xf ffmpeg.tar.xz --strip-components=2 --no-anchored -C /usr/bin/ "ffprobe" && \
rm ffmpeg.tar.xz
# install debug tools for testing environment
RUN if [ "$INSTALL_DEBUG" ] ; then \
apt-get -y update && apt-get -y install --no-install-recommends \
vim htop bmon net-tools iputils-ping procps \
&& pip install --user ipython pytest pytest-django \
&& pip install --user ipython \
; fi
# make folders

View File

@ -34,8 +34,8 @@ Once your YouTube video collection grows, it becomes hard to search and find a s
- [Discord](https://www.tubearchivist.com/discord): Connect with us on our Discord server.
- [r/TubeArchivist](https://www.reddit.com/r/TubeArchivist/): Join our Subreddit.
- [Browser Extension](https://github.com/tubearchivist/browser-extension) Tube Archivist Companion, for [Firefox](https://addons.mozilla.org/addon/tubearchivist-companion/) and [Chrome](https://chrome.google.com/webstore/detail/tubearchivist-companion/jjnkmicfnfojkkgobdfeieblocadmcie)
- [Jellyfin Plugin](https://github.com/tubearchivist/tubearchivist-jf-plugin): Add your videos to Jellyfin
- [Plex Plugin](https://github.com/tubearchivist/tubearchivist-plex): Add your videos to Plex
- [Jellyfin Integration](https://github.com/tubearchivist/tubearchivist-jf): Add your videos to Jellyfin.
- [Tube Archivist Metrics](https://github.com/tubearchivist/tubearchivist-metrics) to create statistics in Prometheus/OpenMetrics format.
## Installing
For minimal system requirements, the Tube Archivist stack needs around 2GB of available memory for a small testing setup and around 4GB of available memory for a mid to large sized installation. Minimal with dual core with 4 threads, better quad core plus.
@ -151,7 +151,7 @@ We have come far, nonetheless we are not short of ideas on how to improve and ex
- [ ] User roles
- [ ] Audio download
- [ ] Podcast mode to serve channel as mp3
- [ ] Random and repeat controls ([#108](https://github.com/tubearchivist/tubearchivist/issues/108), [#220](https://github.com/tubearchivist/tubearchivist/issues/220))
- [ ] User created playlists, random and repeat controls ([#108](https://github.com/tubearchivist/tubearchivist/issues/108), [#220](https://github.com/tubearchivist/tubearchivist/issues/220))
- [ ] Auto play or play next link ([#226](https://github.com/tubearchivist/tubearchivist/issues/226))
- [ ] Multi language support
- [ ] Show total video downloaded vs total videos available in channel
@ -162,7 +162,6 @@ We have come far, nonetheless we are not short of ideas on how to improve and ex
- [ ] Configure shorts, streams and video sizes per channel
Implemented:
- [X] User created playlists [2024-04-10]
- [X] Add statistics of index [2023-09-03]
- [X] Implement [Apprise](https://github.com/caronc/apprise) for notifications [2023-08-05]
- [X] Download video comments [2022-11-30]

View File

@ -50,7 +50,6 @@ function sync_test {
--exclude ".gitignore" \
--exclude "**/cache" \
--exclude "**/__pycache__/" \
--exclude "**/.pytest_cache/" \
--exclude ".venv" \
--exclude "db.sqlite3" \
--exclude ".mypy_cache" \

View File

@ -1,71 +0,0 @@
"""
ffmpeg link builder
copied as into build step in Dockerfile
"""
import json
import os
import sys
import tarfile
import urllib.request
from enum import Enum
API_URL = "https://api.github.com/repos/yt-dlp/FFmpeg-Builds/releases/latest"
BINARIES = ["ffmpeg", "ffprobe"]
class PlatformFilter(Enum):
"""options"""
ARM64 = "linuxarm64"
AMD64 = "linux64"
def get_assets():
"""get all available assets from latest build"""
with urllib.request.urlopen(API_URL) as f:
all_links = json.loads(f.read().decode("utf-8"))
return all_links
def pick_url(all_links, platform):
"""pick url for platform"""
filter_by = PlatformFilter[platform.split("/")[1].upper()].value
options = [i for i in all_links["assets"] if filter_by in i["name"]]
if not options:
raise ValueError(f"no valid asset found for filter {filter_by}")
url_pick = options[0]["browser_download_url"]
return url_pick
def download_extract(url):
"""download and extract binaries"""
print("download file")
filename, _ = urllib.request.urlretrieve(url)
print("extract file")
with tarfile.open(filename, "r:xz") as tar:
for member in tar.getmembers():
member.name = os.path.basename(member.name)
if member.name in BINARIES:
print(f"extract {member.name}")
tar.extract(member, member.name)
def main():
"""entry point"""
args = sys.argv
if len(args) == 1:
platform = "linux/amd64"
else:
platform = args[1]
all_links = get_assets()
url = pick_url(all_links, platform)
download_extract(url)
if __name__ == "__main__":
main()

View File

@ -17,7 +17,7 @@ python manage.py ta_startup
# start all tasks
nginx &
celery -A home.celery worker --loglevel=INFO --max-tasks-per-child 10 &
celery -A home.tasks worker --loglevel=INFO --max-tasks-per-child 10 &
celery -A home beat --loglevel=INFO \
--scheduler django_celery_beat.schedulers:DatabaseScheduler &
-s "${BEAT_SCHEDULE_PATH:-${cachedir}/celerybeat-schedule}" &
uwsgi --ini uwsgi.ini

View File

@ -7,7 +7,7 @@ Functionality:
import urllib.parse
from home.src.download.thumbnails import ThumbManager
from home.src.ta.helper import date_parser, get_duration_str
from home.src.ta.helper import date_praser, get_duration_str
from home.src.ta.settings import EnvironmentSettings
@ -67,7 +67,7 @@ class SearchProcess:
"""run on single channel"""
channel_id = channel_dict["channel_id"]
art_base = f"/cache/channels/{channel_id}"
date_str = date_parser(channel_dict["channel_last_refresh"])
date_str = date_praser(channel_dict["channel_last_refresh"])
channel_dict.update(
{
"channel_last_refresh": date_str,
@ -83,8 +83,8 @@ class SearchProcess:
"""run on single video dict"""
video_id = video_dict["youtube_id"]
media_url = urllib.parse.quote(video_dict["media_url"])
vid_last_refresh = date_parser(video_dict["vid_last_refresh"])
published = date_parser(video_dict["published"])
vid_last_refresh = date_praser(video_dict["vid_last_refresh"])
published = date_praser(video_dict["published"])
vid_thumb_url = ThumbManager(video_id).vid_thumb_path()
channel = self._process_channel(video_dict["channel"])
@ -109,7 +109,7 @@ class SearchProcess:
def _process_playlist(playlist_dict):
"""run on single playlist dict"""
playlist_id = playlist_dict["playlist_id"]
playlist_last_refresh = date_parser(
playlist_last_refresh = date_praser(
playlist_dict["playlist_last_refresh"]
)
playlist_dict.update(
@ -125,7 +125,7 @@ class SearchProcess:
"""run on single download item"""
video_id = download_dict["youtube_id"]
vid_thumb_url = ThumbManager(video_id).vid_thumb_path()
published = date_parser(download_dict["published"])
published = date_praser(download_dict["published"])
download_dict.update(
{

View File

@ -121,16 +121,6 @@ urlpatterns = [
views.TaskIDView.as_view(),
name="api-task-id",
),
path(
"schedule/",
views.ScheduleView.as_view(),
name="api-schedule",
),
path(
"schedule/notification/",
views.ScheduleNotification.as_view(),
name="api-schedule-notification",
),
path(
"config/user/",
views.UserConfigView.as_view(),

View File

@ -10,7 +10,6 @@ from api.src.aggs import (
WatchProgress,
)
from api.src.search_processor import SearchProcess
from home.models import CustomPeriodicTask
from home.src.download.queue import PendingInteract
from home.src.download.subscriptions import (
ChannelSubscription,
@ -28,14 +27,13 @@ from home.src.index.playlist import YoutubePlaylist
from home.src.index.reindex import ReindexProgress
from home.src.index.video import SponsorBlock, YoutubeVideo
from home.src.ta.config import AppConfig, ReleaseVersion
from home.src.ta.notify import Notifications, get_all_notifications
from home.src.ta.settings import EnvironmentSettings
from home.src.ta.ta_redis import RedisArchivist
from home.src.ta.task_config import TASK_CONFIG
from home.src.ta.task_manager import TaskCommand, TaskManager
from home.src.ta.urlparser import Parser
from home.src.ta.users import UserConfig
from home.tasks import (
BaseTask,
check_reindex,
download_pending,
extrac_dl,
@ -594,7 +592,7 @@ class DownloadApiView(ApiBaseView):
"""
search_base = "ta_download/_doc/"
valid_status = ["pending", "ignore", "ignore-force", "priority"]
valid_status = ["pending", "ignore", "priority"]
permission_classes = [AdminOnly]
def get(self, request, video_id):
@ -611,11 +609,6 @@ class DownloadApiView(ApiBaseView):
print(message)
return Response({"message": message}, status=400)
if item_status == "ignore-force":
extrac_dl.delay(video_id, status="ignore")
message = f"{video_id}: set status to ignore"
return Response(request.data)
_, status_code = PendingInteract(video_id).get_item()
if status_code == 404:
message = f"{video_id}: item not found {status_code}"
@ -688,7 +681,14 @@ class DownloadApiListView(ApiBaseView):
pending = [i["youtube_id"] for i in to_add if i["status"] == "pending"]
url_str = " ".join(pending)
extrac_dl.delay(url_str, auto_start=auto_start)
try:
youtube_ids = Parser(url_str).parse()
except ValueError:
message = f"failed to parse: {url_str}"
print(message)
return Response({"message": message}, status=400)
extrac_dl.delay(youtube_ids, auto_start=auto_start)
return Response(data)
@ -911,7 +911,7 @@ class TaskNameListView(ApiBaseView):
def get(self, request, task_name):
"""handle get request"""
# pylint: disable=unused-argument
if task_name not in TASK_CONFIG:
if task_name not in BaseTask.TASK_CONFIG:
message = {"message": "invalid task name"}
return Response(message, status=404)
@ -926,12 +926,12 @@ class TaskNameListView(ApiBaseView):
400 if task can't be started here without argument
"""
# pylint: disable=unused-argument
task_config = TASK_CONFIG.get(task_name)
task_config = BaseTask.TASK_CONFIG.get(task_name)
if not task_config:
message = {"message": "invalid task name"}
return Response(message, status=404)
if not task_config.get("api_start"):
if not task_config.get("api-start"):
message = {"message": "can not start task through this endpoint"}
return Response(message, status=400)
@ -970,16 +970,16 @@ class TaskIDView(ApiBaseView):
message = {"message": "task id not found"}
return Response(message, status=404)
task_conf = TASK_CONFIG.get(task_result.get("name"))
task_conf = BaseTask.TASK_CONFIG.get(task_result.get("name"))
if command == "stop":
if not task_conf.get("api_stop"):
if not task_conf.get("api-stop"):
message = {"message": "task can not be stopped"}
return Response(message, status=400)
message_key = self._build_message_key(task_conf, task_id)
TaskCommand().stop(task_id, message_key)
if command == "kill":
if not task_conf.get("api_stop"):
if not task_conf.get("api-stop"):
message = {"message": "task can not be killed"}
return Response(message, status=400)
@ -992,56 +992,6 @@ class TaskIDView(ApiBaseView):
return f"message:{task_conf.get('group')}:{task_id.split('-')[0]}"
class ScheduleView(ApiBaseView):
"""resolves to /api/schedule/
DEL: delete schedule for task
"""
permission_classes = [AdminOnly]
def delete(self, request):
"""delete schedule by task_name query"""
task_name = request.data.get("task_name")
try:
task = CustomPeriodicTask.objects.get(name=task_name)
except CustomPeriodicTask.DoesNotExist:
message = {"message": "task_name not found"}
return Response(message, status=404)
_ = task.delete()
return Response({"success": True})
class ScheduleNotification(ApiBaseView):
"""resolves to /api/schedule/notification/
GET: get all schedule notifications
DEL: delete notification
"""
def get(self, request):
"""handle get request"""
return Response(get_all_notifications())
def delete(self, request):
"""handle delete"""
task_name = request.data.get("task_name")
url = request.data.get("url")
if not TASK_CONFIG.get(task_name):
message = {"message": "task_name not found"}
return Response(message, status=404)
if url:
response, status_code = Notifications(task_name).remove_url(url)
else:
response, status_code = Notifications(task_name).remove_task()
return Response({"response": response, "status_code": status_code})
class RefreshView(ApiBaseView):
"""resolves to /api/refresh/
GET: get refresh progress

View File

@ -1,44 +0,0 @@
"""
channel fix for update from v0.4.7 to v0.4.8
reindex channels with 0 subscriber count
python manage.py ta_fix_channels
"""
from django.core.management.base import BaseCommand
from home.src.es.connect import IndexPaginate
from home.tasks import check_reindex
class Command(BaseCommand):
"""fix comment link"""
def handle(self, *args, **options):
"""run command"""
self.stdout.write("reindex failed channels")
channels = self._get_channels()
if not channels:
self.stdout.write("did not find any failed channels")
return
self.stdout.write(f"add {len(channels)} channels(s) to queue")
to_reindex = {"channel": [i["channel_id"] for i in channels]}
check_reindex.delay(data=to_reindex)
self.stdout.write(self.style.SUCCESS(" ✓ task queued\n"))
def _get_channels(self):
"""get failed channels"""
self.stdout.write("search for failed channels")
es_query = {
"query": {
"bool": {
"must": [
{"term": {"channel_subs": {"value": 0}}},
{"term": {"channel_active": {"value": True}}},
]
},
},
"_source": ["channel_id"],
}
channels = IndexPaginate("ta_channel", es_query).get_results()
return channels

View File

@ -1,76 +0,0 @@
"""
comment link fix for update from v0.4.7 to v0.4.8
scan your videos and comments to fix comment_count field
python manage.py ta_fix_comment_link
"""
from django.core.management.base import BaseCommand, CommandError
from home.src.es.connect import ElasticWrap, IndexPaginate
class Command(BaseCommand):
"""fix comment link"""
def handle(self, *args, **options):
"""run command"""
self.stdout.write("run comment link fix")
expected_count = self._get_comment_indexed()
all_videos = self._get_videos()
self.stdout.write(f"checking {len(all_videos)} video(s)")
videos_updated = []
for video in all_videos:
video_id = video["youtube_id"]
comment_count = expected_count.get(video_id)
if not comment_count:
continue
data = {"doc": {"comment_count": comment_count}}
path = f"ta_video/_update/{video_id}"
response, status_code = ElasticWrap(path).post(data=data)
if status_code != 200:
message = (
"failed to add comment count to video"
+ f"response code: {status_code}"
+ response
)
raise CommandError(message)
videos_updated.append(video_id)
self.stdout.write(f"fixed {len(videos_updated)} video(s)")
self.stdout.write(self.style.SUCCESS(" ✓ task completed\n"))
def _get_comment_indexed(self):
"""get comment count by index"""
self.stdout.write("get comments")
src = "params['_source']['comment_comments'].length"
data = {
"script_fields": {
"comments_length": {
"script": {"source": src, "lang": "painless"}
}
}
}
all_comments = IndexPaginate(
"ta_comment", data=data, keep_source=True
).get_results()
expected_count = {
i["_id"]: i["fields"]["comments_length"][0] for i in all_comments
}
return expected_count
def _get_videos(self):
"""get videos without comment_count"""
self.stdout.write("get videos")
data = {
"query": {
"bool": {"must_not": [{"exists": {"field": "comment_count"}}]}
}
}
all_videos = IndexPaginate("ta_video", data).get_results()
return all_videos

View File

@ -5,24 +5,18 @@ Functionality:
"""
import os
from random import randint
from time import sleep
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django_celery_beat.models import CrontabSchedule
from home.models import CustomPeriodicTask
from home.src.es.connect import ElasticWrap
from home.src.es.index_setup import ElasitIndexWrap
from home.src.es.snapshot import ElasticSnapshot
from home.src.ta.config import AppConfig, ReleaseVersion
from home.src.ta.config_schedule import ScheduleBuilder
from home.src.ta.helper import clear_dl_cache
from home.src.ta.notify import Notifications
from home.src.ta.settings import EnvironmentSettings
from home.src.ta.ta_redis import RedisArchivist
from home.src.ta.task_config import TASK_CONFIG
from home.src.ta.task_manager import TaskManager
from home.src.ta.users import UserConfig
TOPIC = """
@ -46,12 +40,12 @@ class Command(BaseCommand):
self._clear_redis_keys()
self._clear_tasks()
self._clear_dl_cache()
self._mig_clear_failed_versioncheck()
self._version_check()
self._mig_index_setup()
self._mig_snapshot_check()
self._mig_schedule_store()
self._mig_move_users_to_es()
self._mig_custom_playlist()
self._create_default_schedules()
def _sync_redis_state(self):
"""make sure redis gets new config.json values"""
@ -157,134 +151,102 @@ class Command(BaseCommand):
self.stdout.write("[MIGRATION] setup snapshots")
ElasticSnapshot().setup()
def _mig_schedule_store(self):
"""
update from 0.4.7 to 0.4.8
migrate schedule task store to CustomCronSchedule
"""
self.stdout.write("[MIGRATION] migrate schedule store")
config = AppConfig().config
current_schedules = config.get("scheduler")
if not current_schedules:
def _mig_clear_failed_versioncheck(self):
"""hotfix for v0.4.5, clearing faulty versioncheck"""
ReleaseVersion().clear_fail()
def _mig_move_users_to_es(self): # noqa: C901
"""migration: update from 0.4.1 to 0.4.2 move user config to ES"""
self.stdout.write("[MIGRATION] move user configuration to ES")
redis = RedisArchivist()
# 1: Find all users in Redis
users = {i.split(":")[0] for i in redis.list_keys("[0-9]*:")}
if not users:
self.stdout.write(" no users needed migrating to ES")
return
# 2: Write all Redis user settings to ES
# 3: Remove user settings from Redis
try:
for user in users:
new_conf = UserConfig(user)
stylesheet_key = f"{user}:color"
stylesheet = redis.get_message(stylesheet_key).get("status")
if stylesheet:
new_conf.set_value("stylesheet", stylesheet)
redis.del_message(stylesheet_key)
sort_by_key = f"{user}:sort_by"
sort_by = redis.get_message(sort_by_key).get("status")
if sort_by:
new_conf.set_value("sort_by", sort_by)
redis.del_message(sort_by_key)
page_size_key = f"{user}:page_size"
page_size = redis.get_message(page_size_key).get("status")
if page_size:
new_conf.set_value("page_size", page_size)
redis.del_message(page_size_key)
sort_order_key = f"{user}:sort_order"
sort_order = redis.get_message(sort_order_key).get("status")
if sort_order:
new_conf.set_value("sort_order", sort_order)
redis.del_message(sort_order_key)
grid_items_key = f"{user}:grid_items"
grid_items = redis.get_message(grid_items_key).get("status")
if grid_items:
new_conf.set_value("grid_items", grid_items)
redis.del_message(grid_items_key)
hide_watch_key = f"{user}:hide_watched"
hide_watch = redis.get_message(hide_watch_key).get("status")
if hide_watch:
new_conf.set_value("hide_watched", hide_watch)
redis.del_message(hide_watch_key)
ignore_only_key = f"{user}:show_ignored_only"
ignore_only = redis.get_message(ignore_only_key).get("status")
if ignore_only:
new_conf.set_value("show_ignored_only", ignore_only)
redis.del_message(ignore_only_key)
subed_only_key = f"{user}:show_subed_only"
subed_only = redis.get_message(subed_only_key).get("status")
if subed_only:
new_conf.set_value("show_subed_only", subed_only)
redis.del_message(subed_only_key)
for view in ["channel", "playlist", "home", "downloads"]:
view_key = f"{user}:view:{view}"
view_style = redis.get_message(view_key).get("status")
if view_style:
new_conf.set_value(f"view_style_{view}", view_style)
redis.del_message(view_key)
self.stdout.write(
self.style.SUCCESS(
f" ✓ Settings for user '{user}' migrated to ES"
)
)
except Exception as err:
message = " 🗙 user migration to ES failed"
self.stdout.write(self.style.ERROR(message))
self.stdout.write(self.style.ERROR(err))
sleep(60)
raise CommandError(message) from err
else:
self.stdout.write(
self.style.SUCCESS(" no schedules to migrate")
self.style.SUCCESS(
" ✓ Settings for all users migrated to ES"
)
)
return
self._mig_update_subscribed(current_schedules)
self._mig_download_pending(current_schedules)
self._mig_check_reindex(current_schedules)
self._mig_thumbnail_check(current_schedules)
self._mig_run_backup(current_schedules)
self._mig_version_check()
del config["scheduler"]
RedisArchivist().set_message("config", config, save=True)
def _mig_update_subscribed(self, current_schedules):
"""create update_subscribed schedule"""
task_name = "update_subscribed"
update_subscribed_schedule = current_schedules.get(task_name)
if update_subscribed_schedule:
self._create_task(task_name, update_subscribed_schedule)
self._create_notifications(task_name, current_schedules)
def _mig_download_pending(self, current_schedules):
"""create download_pending schedule"""
task_name = "download_pending"
download_pending_schedule = current_schedules.get(task_name)
if download_pending_schedule:
self._create_task(task_name, download_pending_schedule)
self._create_notifications(task_name, current_schedules)
def _mig_check_reindex(self, current_schedules):
"""create check_reindex schedule"""
task_name = "check_reindex"
check_reindex_schedule = current_schedules.get(task_name)
if check_reindex_schedule:
task_config = {}
days = current_schedules.get("check_reindex_days")
if days:
task_config.update({"days": days})
self._create_task(
task_name,
check_reindex_schedule,
task_config=task_config,
)
self._create_notifications(task_name, current_schedules)
def _mig_thumbnail_check(self, current_schedules):
"""create thumbnail_check schedule"""
thumbnail_check_schedule = current_schedules.get("thumbnail_check")
if thumbnail_check_schedule:
self._create_task("thumbnail_check", thumbnail_check_schedule)
def _mig_run_backup(self, current_schedules):
"""create run_backup schedule"""
run_backup_schedule = current_schedules.get("run_backup")
if run_backup_schedule:
task_config = False
rotate = current_schedules.get("run_backup_rotate")
if rotate:
task_config = {"rotate": rotate}
self._create_task(
"run_backup", run_backup_schedule, task_config=task_config
)
def _mig_version_check(self):
"""create version_check schedule"""
version_check_schedule = {
"minute": randint(0, 59),
"hour": randint(0, 23),
"day_of_week": "*",
}
self._create_task("version_check", version_check_schedule)
def _create_task(self, task_name, schedule, task_config=False):
"""create task"""
description = TASK_CONFIG[task_name].get("title")
schedule, _ = CrontabSchedule.objects.get_or_create(**schedule)
schedule.timezone = settings.TIME_ZONE
schedule.save()
task, _ = CustomPeriodicTask.objects.get_or_create(
crontab=schedule,
name=task_name,
description=description,
task=task_name,
)
if task_config:
task.task_config = task_config
task.save()
self.stdout.write(
self.style.SUCCESS(f" ✓ new task created: '{task}'")
)
def _create_notifications(self, task_name, current_schedules):
"""migrate notifications of task"""
notifications = current_schedules.get(f"{task_name}_notify")
if not notifications:
return
urls = [i.strip() for i in notifications.split()]
if not urls:
return
self.stdout.write(
self.style.SUCCESS(f" ✓ migrate notifications: '{urls}'")
)
handler = Notifications(task_name)
for url in urls:
handler.add_url(url)
def _mig_custom_playlist(self):
"""add playlist_type for migration from v0.4.6 to v0.4.7"""
"""migration for custom playlist"""
self.stdout.write("[MIGRATION] custom playlist")
data = {
"query": {
@ -315,54 +277,3 @@ class Command(BaseCommand):
self.stdout.write(response)
sleep(60)
raise CommandError(message)
def _create_default_schedules(self) -> None:
"""
create default schedules for new installations
needs to be called after _mig_schedule_store
"""
self.stdout.write("[7] create initial schedules")
init_has_run = CustomPeriodicTask.objects.filter(
name="version_check"
).exists()
if init_has_run:
self.stdout.write(
self.style.SUCCESS(
" schedule init already done, skipping..."
)
)
return
builder = ScheduleBuilder()
check_reindex = builder.get_set_task(
"check_reindex", schedule=builder.SCHEDULES["check_reindex"]
)
check_reindex.task_config.update({"days": 90})
check_reindex.save()
self.stdout.write(
self.style.SUCCESS(
f" ✓ created new default schedule: {check_reindex}"
)
)
thumbnail_check = builder.get_set_task(
"thumbnail_check", schedule=builder.SCHEDULES["thumbnail_check"]
)
self.stdout.write(
self.style.SUCCESS(
f" ✓ created new default schedule: {thumbnail_check}"
)
)
daily_random = f"{randint(0, 59)} {randint(0, 23)} *"
version_check = builder.get_set_task(
"version_check", schedule=daily_random
)
self.stdout.write(
self.style.SUCCESS(
f" ✓ created new default schedule: {version_check}"
)
)
self.stdout.write(
self.style.SUCCESS(" ✓ all default schedules created")
)

View File

@ -33,14 +33,11 @@ SECRET_KEY = PW_HASH.hexdigest()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = bool(environ.get("DJANGO_DEBUG"))
ALLOWED_HOSTS, CSRF_TRUSTED_ORIGINS = ta_host_parser(
environ.get("TA_HOST", "localhost")
)
ALLOWED_HOSTS, CSRF_TRUSTED_ORIGINS = ta_host_parser(environ["TA_HOST"])
# Application definition
INSTALLED_APPS = [
"django_celery_beat",
"home.apps.HomeConfig",
"django.contrib.admin",
"django.contrib.auth",
@ -243,11 +240,7 @@ USE_TZ = True
STATIC_URL = "/static/"
STATICFILES_DIRS = (str(BASE_DIR.joinpath("static")),)
STATIC_ROOT = str(BASE_DIR.joinpath("staticfiles"))
STORAGES = {
"staticfiles": {
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
},
}
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
@ -276,4 +269,4 @@ CORS_ALLOW_HEADERS = list(default_headers) + [
# TA application settings
TA_UPSTREAM = "https://github.com/tubearchivist/tubearchivist"
TA_VERSION = "v0.4.8"
TA_VERSION = "v0.4.7"

View File

@ -1,7 +1,5 @@
"""start celery app"""
""" handle celery startup """
from __future__ import absolute_import, unicode_literals
from home.celery import app as celery_app
from .tasks import app as celery_app
__all__ = ("celery_app",)

View File

@ -2,7 +2,6 @@
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django_celery_beat import models as BeatModels
from .models import Account
@ -35,12 +34,3 @@ class HomeAdmin(BaseUserAdmin):
admin.site.register(Account, HomeAdmin)
admin.site.unregister(
[
BeatModels.ClockedSchedule,
BeatModels.CrontabSchedule,
BeatModels.IntervalSchedule,
BeatModels.PeriodicTask,
BeatModels.SolarSchedule,
]
)

View File

@ -1,22 +0,0 @@
"""initiate celery"""
import os
from celery import Celery
from home.src.ta.settings import EnvironmentSettings
REDIS_HOST = EnvironmentSettings.REDIS_HOST
REDIS_PORT = EnvironmentSettings.REDIS_PORT
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
app = Celery(
"tasks",
broker=f"redis://{REDIS_HOST}:{REDIS_PORT}",
backend=f"redis://{REDIS_HOST}:{REDIS_PORT}",
result_extended=True,
)
app.config_from_object(
"django.conf:settings", namespace=EnvironmentSettings.REDIS_NAME_SPACE
)
app.autodiscover_tasks()
app.conf.timezone = EnvironmentSettings.TZ

View File

@ -26,5 +26,18 @@
},
"application": {
"enable_snapshot": true
},
"scheduler": {
"update_subscribed": false,
"update_subscribed_notify": false,
"download_pending": false,
"download_pending_notify": false,
"check_reindex": {"minute": "0", "hour": "12", "day_of_week": "*"},
"check_reindex_notify": false,
"check_reindex_days": 90,
"thumbnail_check": {"minute": "0", "hour": "17", "day_of_week": "*"},
"run_backup": false,
"run_backup_rotate": 5,
"version_check": "rand-d"
}
}

View File

@ -1,23 +0,0 @@
# Generated by Django 4.2.7 on 2023-12-05 13:47
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('django_celery_beat', '0018_improve_crontab_helptext'),
('home', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CustomPeriodicTask',
fields=[
('periodictask_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='django_celery_beat.periodictask')),
('task_config', models.JSONField(default=dict)),
],
bases=('django_celery_beat.periodictask',),
),
]

View File

@ -6,7 +6,6 @@ from django.contrib.auth.models import (
PermissionsMixin,
)
from django.db import models
from django_celery_beat.models import PeriodicTask
class AccountManager(BaseUserManager):
@ -53,9 +52,3 @@ class Account(AbstractBaseUser, PermissionsMixin):
USERNAME_FIELD = "name"
REQUIRED_FIELDS = ["password"]
class CustomPeriodicTask(PeriodicTask):
"""add custom metadata to to task"""
task_config = models.JSONField(default=dict)

View File

@ -7,7 +7,10 @@ Functionality:
import json
from datetime import datetime
from home.src.download.subscriptions import ChannelSubscription
from home.src.download.subscriptions import (
ChannelSubscription,
PlaylistSubscription,
)
from home.src.download.thumbnails import ThumbManager
from home.src.download.yt_dlp_base import YtWrap
from home.src.es.connect import ElasticWrap, IndexPaginate
@ -193,6 +196,7 @@ class PendingList(PendingIndex):
self._parse_channel(entry["url"], vid_type)
elif entry["type"] == "playlist":
self._parse_playlist(entry["url"])
PlaylistSubscription().process_url_str([entry], subscribed=False)
else:
raise ValueError(f"invalid url_type: {entry}")
@ -223,18 +227,15 @@ class PendingList(PendingIndex):
def _parse_playlist(self, url):
"""add all videos of playlist to list"""
playlist = YoutubePlaylist(url)
is_active = playlist.update_playlist()
if not is_active:
playlist.build_json()
if not playlist.json_data:
message = f"{playlist.youtube_id}: failed to extract metadata"
print(message)
raise ValueError(message)
entries = playlist.json_data["playlist_entries"]
to_add = [i["youtube_id"] for i in entries if not i["downloaded"]]
if not to_add:
return
for video_id in to_add:
video_results = playlist.json_data.get("playlist_entries")
youtube_ids = [i["youtube_id"] for i in video_results]
for video_id in youtube_ids:
# match vid_type later
self._add_video(video_id, VideoTypeEnum.UNKNOWN)
@ -244,7 +245,6 @@ class PendingList(PendingIndex):
bulk_list = []
total = len(self.missing_videos)
videos_added = []
for idx, (youtube_id, vid_type) in enumerate(self.missing_videos):
if self.task and self.task.is_stopped():
break
@ -268,7 +268,6 @@ class PendingList(PendingIndex):
url = video_details["vid_thumb_url"]
ThumbManager(youtube_id).download_video_thumb(url)
videos_added.append(youtube_id)
if len(bulk_list) >= 20:
self._ingest_bulk(bulk_list)
@ -276,8 +275,6 @@ class PendingList(PendingIndex):
self._ingest_bulk(bulk_list)
return videos_added
def _ingest_bulk(self, bulk_list):
"""add items to queue in bulk"""
if not bulk_list:

View File

@ -4,15 +4,14 @@ Functionality:
- handle playlist subscriptions
"""
from home.src.download import queue # partial import
from home.src.download.thumbnails import ThumbManager
from home.src.download.yt_dlp_base import YtWrap
from home.src.es.connect import IndexPaginate
from home.src.index.channel import YoutubeChannel
from home.src.index.playlist import YoutubePlaylist
from home.src.index.video import YoutubeVideo
from home.src.index.video_constants import VideoTypeEnum
from home.src.ta.config import AppConfig
from home.src.ta.helper import is_missing
from home.src.ta.urlparser import Parser
@ -106,6 +105,10 @@ class ChannelSubscription:
if not all_channels:
return False
pending = queue.PendingList()
pending.get_download()
pending.get_indexed()
missing_videos = []
total = len(all_channels)
@ -115,22 +118,22 @@ class ChannelSubscription:
last_videos = self.get_last_youtube_videos(channel_id)
if last_videos:
ids_to_add = is_missing([i[0] for i in last_videos])
for video_id, _, vid_type in last_videos:
if video_id in ids_to_add:
if video_id not in pending.to_skip:
missing_videos.append((video_id, vid_type))
if not self.task:
continue
if self.task.is_stopped():
self.task.send_progress(["Received Stop signal."])
break
if self.task:
if self.task.is_stopped():
self.task.send_progress(["Received Stop signal."])
break
self.task.send_progress(
message_lines=[f"Scanning Channel {idx + 1}/{total}"],
progress=(idx + 1) / total,
)
self.task.send_progress(
message_lines=[f"Scanning Channel {idx + 1}/{total}"],
progress=(idx + 1) / total,
)
return missing_videos
@ -171,6 +174,10 @@ class PlaylistSubscription:
def process_url_str(self, new_playlists, subscribed=True):
"""process playlist subscribe form url_str"""
data = {"query": {"match_all": {}}, "_source": ["youtube_id"]}
all_indexed = IndexPaginate("ta_video", data).get_results()
all_youtube_ids = [i["youtube_id"] for i in all_indexed]
for idx, playlist in enumerate(new_playlists):
playlist_id = playlist["url"]
if not playlist["type"] == "playlist":
@ -178,6 +185,7 @@ class PlaylistSubscription:
continue
playlist_h = YoutubePlaylist(playlist_id)
playlist_h.all_youtube_ids = all_youtube_ids
playlist_h.build_json()
if not playlist_h.json_data:
message = f"{playlist_h.youtube_id}: failed to extract data"
@ -215,15 +223,27 @@ class PlaylistSubscription:
playlist.json_data["playlist_subscribed"] = subscribe_status
playlist.upload_to_es()
@staticmethod
def get_to_ignore():
"""get all youtube_ids already downloaded or ignored"""
pending = queue.PendingList()
pending.get_download()
pending.get_indexed()
return pending.to_skip
def find_missing(self):
"""find videos in subscribed playlists not downloaded yet"""
all_playlists = [i["playlist_id"] for i in self.get_playlists()]
if not all_playlists:
return False
to_ignore = self.get_to_ignore()
missing_videos = []
total = len(all_playlists)
for idx, playlist_id in enumerate(all_playlists):
size_limit = self.config["subscriptions"]["channel_size"]
playlist = YoutubePlaylist(playlist_id)
is_active = playlist.update_playlist()
if not is_active:
@ -231,29 +251,27 @@ class PlaylistSubscription:
continue
playlist_entries = playlist.json_data["playlist_entries"]
size_limit = self.config["subscriptions"]["channel_size"]
if size_limit:
del playlist_entries[size_limit:]
to_check = [
i["youtube_id"]
for i in playlist_entries
if i["downloaded"] is False
]
needs_downloading = is_missing(to_check)
missing_videos.extend(needs_downloading)
all_missing = [i for i in playlist_entries if not i["downloaded"]]
for video in all_missing:
youtube_id = video["youtube_id"]
if youtube_id not in to_ignore:
missing_videos.append(youtube_id)
if not self.task:
continue
if self.task.is_stopped():
self.task.send_progress(["Received Stop signal."])
break
self.task.send_progress(
message_lines=[f"Scanning Playlists {idx + 1}/{total}"],
progress=(idx + 1) / total,
)
if self.task:
self.task.send_progress(
message_lines=[f"Scanning Playlists {idx + 1}/{total}"],
progress=(idx + 1) / total,
)
if self.task.is_stopped():
self.task.send_progress(["Received Stop signal."])
break
return missing_videos
@ -341,10 +359,8 @@ class SubscriptionHandler:
if item["type"] == "video":
# extract channel id from video
video = YoutubeVideo(item["url"])
video.get_from_youtube()
video.process_youtube_meta()
channel_id = video.channel_id
vid = queue.PendingList().get_youtube_details(item["url"])
channel_id = vid["channel_id"]
elif item["type"] == "channel":
channel_id = item["url"]
else:

View File

@ -11,7 +11,6 @@ from time import sleep
import requests
from home.src.es.connect import ElasticWrap, IndexPaginate
from home.src.ta.helper import is_missing
from home.src.ta.settings import EnvironmentSettings
from mutagen.mp4 import MP4, MP4Cover
from PIL import Image, ImageFile, ImageFilter, UnidentifiedImageError
@ -327,7 +326,7 @@ class ThumbValidator:
},
]
def __init__(self, task=False):
def __init__(self, task):
self.task = task
def validate(self):
@ -347,89 +346,6 @@ class ThumbValidator:
)
_ = paginate.get_results()
def clean_up(self):
"""clean up all thumbs"""
self._clean_up_vids()
self._clean_up_channels()
self._clean_up_playlists()
def _clean_up_vids(self):
"""clean unneeded vid thumbs"""
video_dir = os.path.join(EnvironmentSettings.CACHE_DIR, "videos")
video_folders = os.listdir(video_dir)
for video_folder in video_folders:
folder_path = os.path.join(video_dir, video_folder)
thumbs_is = {i.split(".")[0] for i in os.listdir(folder_path)}
thumbs_should = self._get_vid_thumbs_should(video_folder)
to_delete = thumbs_is - thumbs_should
for thumb in to_delete:
delete_path = os.path.join(folder_path, f"{thumb}.jpg")
os.remove(delete_path)
if to_delete:
message = (
f"[thumbs][video][{video_folder}] "
+ f"delete {len(to_delete)} unused thumbnails"
)
print(message)
if self.task:
self.task.send_progress([message])
@staticmethod
def _get_vid_thumbs_should(video_folder: str) -> set[str]:
"""get indexed"""
should_list = [
{"prefix": {"youtube_id": {"value": video_folder.lower()}}},
{"prefix": {"youtube_id": {"value": video_folder.upper()}}},
]
data = {
"query": {"bool": {"should": should_list}},
"_source": ["youtube_id"],
}
result = IndexPaginate("ta_video,ta_download", data).get_results()
thumbs_should = {i["youtube_id"] for i in result}
return thumbs_should
def _clean_up_channels(self):
"""clean unneeded channel thumbs"""
channel_dir = os.path.join(EnvironmentSettings.CACHE_DIR, "channels")
channel_art = os.listdir(channel_dir)
thumbs_is = {"_".join(i.split("_")[:-1]) for i in channel_art}
to_delete = is_missing(list(thumbs_is), "ta_channel", "channel_id")
for channel_thumb in channel_art:
if channel_thumb[:24] in to_delete:
delete_path = os.path.join(channel_dir, channel_thumb)
os.remove(delete_path)
if to_delete:
message = (
"[thumbs][channel] "
+ f"delete {len(to_delete)} unused channel art"
)
print(message)
if self.task:
self.task.send_progress([message])
def _clean_up_playlists(self):
"""clean up unneeded playlist thumbs"""
playlist_dir = os.path.join(EnvironmentSettings.CACHE_DIR, "playlists")
playlist_art = os.listdir(playlist_dir)
thumbs_is = {i.split(".")[0] for i in playlist_art}
to_delete = is_missing(list(thumbs_is), "ta_playlist", "playlist_id")
for playlist_id in to_delete:
delete_path = os.path.join(playlist_dir, f"{playlist_id}.jpg")
os.remove(delete_path)
if to_delete:
message = (
"[thumbs][playlist] "
+ f"delete {len(to_delete)} unused playlist art"
)
print(message)
if self.task:
self.task.send_progress([message])
@staticmethod
def _get_total(index_name):
"""get total documents in index"""

View File

@ -20,69 +20,182 @@ from home.src.index.playlist import YoutubePlaylist
from home.src.index.video import YoutubeVideo, index_new_video
from home.src.index.video_constants import VideoTypeEnum
from home.src.ta.config import AppConfig
from home.src.ta.helper import get_channel_overwrites, ignore_filelist
from home.src.ta.helper import ignore_filelist
from home.src.ta.settings import EnvironmentSettings
from home.src.ta.ta_redis import RedisQueue
class DownloaderBase:
"""base class for shared config"""
class DownloadPostProcess:
"""handle task to run after download queue finishes"""
CACHE_DIR = EnvironmentSettings.CACHE_DIR
MEDIA_DIR = EnvironmentSettings.MEDIA_DIR
CHANNEL_QUEUE = "download:channel"
PLAYLIST_QUEUE = "download:playlist:full"
PLAYLIST_QUICK = "download:playlist:quick"
VIDEO_QUEUE = "download:video"
def __init__(self, download):
self.download = download
self.now = int(datetime.now().timestamp())
self.pending = False
def __init__(self, task):
def run(self):
"""run all functions"""
self.pending = PendingList()
self.pending.get_download()
self.pending.get_channels()
self.pending.get_indexed()
self.auto_delete_all()
self.auto_delete_overwrites()
self.validate_playlists()
self.get_comments()
def auto_delete_all(self):
"""handle auto delete"""
autodelete_days = self.download.config["downloads"]["autodelete_days"]
if not autodelete_days:
return
print(f"auto delete older than {autodelete_days} days")
now_lte = str(self.now - autodelete_days * 24 * 60 * 60)
data = {
"query": {"range": {"player.watched_date": {"lte": now_lte}}},
"sort": [{"player.watched_date": {"order": "asc"}}],
}
self._auto_delete_watched(data)
def auto_delete_overwrites(self):
"""handle per channel auto delete from overwrites"""
for channel_id, value in self.pending.channel_overwrites.items():
if "autodelete_days" in value:
autodelete_days = value.get("autodelete_days")
print(f"{channel_id}: delete older than {autodelete_days}d")
now_lte = str(self.now - autodelete_days * 24 * 60 * 60)
must_list = [
{"range": {"player.watched_date": {"lte": now_lte}}},
{"term": {"channel.channel_id": {"value": channel_id}}},
]
data = {
"query": {"bool": {"must": must_list}},
"sort": [{"player.watched_date": {"order": "desc"}}],
}
self._auto_delete_watched(data)
@staticmethod
def _auto_delete_watched(data):
"""delete watched videos after x days"""
to_delete = IndexPaginate("ta_video", data).get_results()
if not to_delete:
return
for video in to_delete:
youtube_id = video["youtube_id"]
print(f"{youtube_id}: auto delete video")
YoutubeVideo(youtube_id).delete_media_file()
print("add deleted to ignore list")
vids = [{"type": "video", "url": i["youtube_id"]} for i in to_delete]
pending = PendingList(youtube_ids=vids)
pending.parse_url_list()
pending.add_to_pending(status="ignore")
def validate_playlists(self):
"""look for playlist needing to update"""
for id_c, channel_id in enumerate(self.download.channels):
channel = YoutubeChannel(channel_id, task=self.download.task)
overwrites = self.pending.channel_overwrites.get(channel_id, False)
if overwrites and overwrites.get("index_playlists"):
# validate from remote
channel.index_channel_playlists()
continue
# validate from local
playlists = channel.get_indexed_playlists(active_only=True)
all_channel_playlist = [i["playlist_id"] for i in playlists]
self._validate_channel_playlist(all_channel_playlist, id_c)
def _validate_channel_playlist(self, all_channel_playlist, id_c):
"""scan channel for playlist needing update"""
all_youtube_ids = [i["youtube_id"] for i in self.pending.all_videos]
for id_p, playlist_id in enumerate(all_channel_playlist):
playlist = YoutubePlaylist(playlist_id)
playlist.all_youtube_ids = all_youtube_ids
playlist.build_json(scrape=True)
if not playlist.json_data:
playlist.deactivate()
continue
playlist.add_vids_to_playlist()
playlist.upload_to_es()
self._notify_playlist_progress(all_channel_playlist, id_c, id_p)
def _notify_playlist_progress(self, all_channel_playlist, id_c, id_p):
"""notify to UI"""
if not self.download.task:
return
total_channel = len(self.download.channels)
total_playlist = len(all_channel_playlist)
message = [
f"Post Processing Channels: {id_c}/{total_channel}",
f"Validate Playlists {id_p + 1}/{total_playlist}",
]
progress = (id_c + 1) / total_channel
self.download.task.send_progress(message, progress=progress)
def get_comments(self):
"""get comments from youtube"""
CommentList(self.download.videos, task=self.download.task).index()
class VideoDownloader:
"""
handle the video download functionality
if not initiated with list, take from queue
"""
def __init__(self, youtube_id_list=False, task=False):
self.obs = False
self.video_overwrites = False
self.youtube_id_list = youtube_id_list
self.task = task
self.config = AppConfig().config
self.channel_overwrites = get_channel_overwrites()
self.now = int(datetime.now().timestamp())
class VideoDownloader(DownloaderBase):
"""handle the video download functionality"""
def __init__(self, task=False):
super().__init__(task)
self.obs = False
self.cache_dir = EnvironmentSettings.CACHE_DIR
self.media_dir = EnvironmentSettings.MEDIA_DIR
self._build_obs()
self.channels = set()
self.videos = set()
def run_queue(self, auto_only=False) -> int:
def run_queue(self, auto_only=False):
"""setup download queue in redis loop until no more items"""
downloaded = 0
self._get_overwrites()
while True:
video_data = self._get_next(auto_only)
if self.task.is_stopped() or not video_data:
self._reset_auto()
break
youtube_id = video_data["youtube_id"]
channel_id = video_data["channel_id"]
youtube_id = video_data.get("youtube_id")
print(f"{youtube_id}: Downloading video")
self._notify(video_data, "Validate download format")
success = self._dl_single_vid(youtube_id, channel_id)
success = self._dl_single_vid(youtube_id)
if not success:
continue
self._notify(video_data, "Add video metadata to index", progress=1)
video_type = VideoTypeEnum(video_data["vid_type"])
vid_dict = index_new_video(youtube_id, video_type=video_type)
RedisQueue(self.CHANNEL_QUEUE).add(channel_id)
RedisQueue(self.VIDEO_QUEUE).add(youtube_id)
vid_dict = index_new_video(
youtube_id,
video_overwrites=self.video_overwrites,
video_type=VideoTypeEnum(video_data["vid_type"]),
)
self.channels.add(vid_dict["channel"]["channel_id"])
self.videos.add(vid_dict["youtube_id"])
self._notify(video_data, "Move downloaded file to archive")
self.move_to_archive(vid_dict)
self._delete_from_pending(youtube_id)
downloaded += 1
# post processing
DownloadPostProcess(self.task).run()
self._add_subscribed_channels()
DownloadPostProcess(self).run()
return downloaded
return self.videos
def _notify(self, video_data, message, progress=False):
"""send progress notification to task"""
@ -117,6 +230,13 @@ class VideoDownloader(DownloaderBase):
return response["hits"]["hits"][0]["_source"]
def _get_overwrites(self):
"""get channel overwrites"""
pending = PendingList()
pending.get_download()
pending.get_channels()
self.video_overwrites = pending.video_overwrites
def _progress_hook(self, response):
"""process the progress_hooks from yt_dlp"""
progress = False
@ -147,7 +267,7 @@ class VideoDownloader(DownloaderBase):
"""initial obs"""
self.obs = {
"merge_output_format": "mp4",
"outtmpl": (self.CACHE_DIR + "/download/%(id)s.mp4"),
"outtmpl": (self.cache_dir + "/download/%(id)s.mp4"),
"progress_hooks": [self._progress_hook],
"noprogress": True,
"continuedl": True,
@ -207,17 +327,22 @@ class VideoDownloader(DownloaderBase):
self.obs["postprocessors"] = postprocessors
def _set_overwrites(self, obs: dict, channel_id: str) -> None:
"""add overwrites to obs"""
overwrites = self.channel_overwrites.get(channel_id)
if overwrites and overwrites.get("download_format"):
obs["format"] = overwrites.get("download_format")
def get_format_overwrites(self, youtube_id):
"""get overwrites from single video"""
overwrites = self.video_overwrites.get(youtube_id, False)
if overwrites:
return overwrites.get("download_format", False)
def _dl_single_vid(self, youtube_id: str, channel_id: str) -> bool:
return False
def _dl_single_vid(self, youtube_id):
"""download single video"""
obs = self.obs.copy()
self._set_overwrites(obs, channel_id)
dl_cache = os.path.join(self.CACHE_DIR, "download")
format_overwrite = self.get_format_overwrites(youtube_id)
if format_overwrite:
obs["format"] = format_overwrite
dl_cache = self.cache_dir + "/download/"
# check if already in cache to continue from there
all_cached = ignore_filelist(os.listdir(dl_cache))
@ -251,7 +376,7 @@ class VideoDownloader(DownloaderBase):
host_gid = EnvironmentSettings.HOST_GID
# make folder
folder = os.path.join(
self.MEDIA_DIR, vid_dict["channel"]["channel_id"]
self.media_dir, vid_dict["channel"]["channel_id"]
)
if not os.path.exists(folder):
os.makedirs(folder)
@ -259,8 +384,8 @@ class VideoDownloader(DownloaderBase):
os.chown(folder, host_uid, host_gid)
# move media file
media_file = vid_dict["youtube_id"] + ".mp4"
old_path = os.path.join(self.CACHE_DIR, "download", media_file)
new_path = os.path.join(self.MEDIA_DIR, vid_dict["media_url"])
old_path = os.path.join(self.cache_dir, "download", media_file)
new_path = os.path.join(self.media_dir, vid_dict["media_url"])
# move media file and fix permission
shutil.move(old_path, new_path, copy_function=shutil.copyfile)
if host_uid and host_gid:
@ -272,6 +397,18 @@ class VideoDownloader(DownloaderBase):
path = f"ta_download/_doc/{youtube_id}?refresh=true"
_, _ = ElasticWrap(path).delete()
def _add_subscribed_channels(self):
"""add all channels subscribed to refresh"""
all_subscribed = PlaylistSubscription().get_playlists()
if not all_subscribed:
return
channel_ids = [i["playlist_channel_id"] for i in all_subscribed]
for channel_id in channel_ids:
self.channels.add(channel_id)
return
def _reset_auto(self):
"""reset autostart to defaults after queue stop"""
path = "ta_download/_update_by_query"
@ -286,169 +423,3 @@ class VideoDownloader(DownloaderBase):
updated = response.get("updated")
if updated:
print(f"[download] reset auto start on {updated} videos.")
class DownloadPostProcess(DownloaderBase):
"""handle task to run after download queue finishes"""
def run(self):
"""run all functions"""
self.auto_delete_all()
self.auto_delete_overwrites()
self.refresh_playlist()
self.match_videos()
self.get_comments()
def auto_delete_all(self):
"""handle auto delete"""
autodelete_days = self.config["downloads"]["autodelete_days"]
if not autodelete_days:
return
print(f"auto delete older than {autodelete_days} days")
now_lte = str(self.now - autodelete_days * 24 * 60 * 60)
data = {
"query": {"range": {"player.watched_date": {"lte": now_lte}}},
"sort": [{"player.watched_date": {"order": "asc"}}],
}
self._auto_delete_watched(data)
def auto_delete_overwrites(self):
"""handle per channel auto delete from overwrites"""
for channel_id, value in self.channel_overwrites.items():
if "autodelete_days" in value:
autodelete_days = value.get("autodelete_days")
print(f"{channel_id}: delete older than {autodelete_days}d")
now_lte = str(self.now - autodelete_days * 24 * 60 * 60)
must_list = [
{"range": {"player.watched_date": {"lte": now_lte}}},
{"term": {"channel.channel_id": {"value": channel_id}}},
]
data = {
"query": {"bool": {"must": must_list}},
"sort": [{"player.watched_date": {"order": "desc"}}],
}
self._auto_delete_watched(data)
@staticmethod
def _auto_delete_watched(data):
"""delete watched videos after x days"""
to_delete = IndexPaginate("ta_video", data).get_results()
if not to_delete:
return
for video in to_delete:
youtube_id = video["youtube_id"]
print(f"{youtube_id}: auto delete video")
YoutubeVideo(youtube_id).delete_media_file()
print("add deleted to ignore list")
vids = [{"type": "video", "url": i["youtube_id"]} for i in to_delete]
pending = PendingList(youtube_ids=vids)
pending.parse_url_list()
_ = pending.add_to_pending(status="ignore")
def refresh_playlist(self) -> None:
"""match videos with playlists"""
self.add_playlists_to_refresh()
queue = RedisQueue(self.PLAYLIST_QUEUE)
while True:
total = queue.max_score()
playlist_id, idx = queue.get_next()
if not playlist_id or not idx or not total:
break
playlist = YoutubePlaylist(playlist_id)
playlist.update_playlist(skip_on_empty=True)
if not self.task:
continue
channel_name = playlist.json_data["playlist_channel"]
playlist_title = playlist.json_data["playlist_name"]
message = [
f"Post Processing Playlists for: {channel_name}",
f"{playlist_title} [{idx}/{total}]",
]
progress = idx / total
self.task.send_progress(message, progress=progress)
def add_playlists_to_refresh(self) -> None:
"""add playlists to refresh"""
if self.task:
message = ["Post Processing Playlists", "Scanning for Playlists"]
self.task.send_progress(message)
self._add_playlist_sub()
self._add_channel_playlists()
self._add_video_playlists()
def _add_playlist_sub(self):
"""add subscribed playlists to refresh"""
subs = PlaylistSubscription().get_playlists()
to_add = [i["playlist_id"] for i in subs]
RedisQueue(self.PLAYLIST_QUEUE).add_list(to_add)
def _add_channel_playlists(self):
"""add playlists from channels to refresh"""
queue = RedisQueue(self.CHANNEL_QUEUE)
while True:
channel_id, _ = queue.get_next()
if not channel_id:
break
channel = YoutubeChannel(channel_id)
channel.get_from_es()
overwrites = channel.get_overwrites()
if "index_playlists" in overwrites:
channel.get_all_playlists()
to_add = [i[0] for i in channel.all_playlists]
RedisQueue(self.PLAYLIST_QUEUE).add_list(to_add)
def _add_video_playlists(self):
"""add other playlists for quick sync"""
all_playlists = RedisQueue(self.PLAYLIST_QUEUE).get_all()
must_not = [{"terms": {"playlist_id": all_playlists}}]
video_ids = RedisQueue(self.VIDEO_QUEUE).get_all()
must = [{"terms": {"playlist_entries.youtube_id": video_ids}}]
data = {
"query": {"bool": {"must_not": must_not, "must": must}},
"_source": ["playlist_id"],
}
playlists = IndexPaginate("ta_playlist", data).get_results()
to_add = [i["playlist_id"] for i in playlists]
RedisQueue(self.PLAYLIST_QUICK).add_list(to_add)
def match_videos(self) -> None:
"""scan rest of indexed playlists to match videos"""
queue = RedisQueue(self.PLAYLIST_QUICK)
while True:
total = queue.max_score()
playlist_id, idx = queue.get_next()
if not playlist_id or not idx or not total:
break
playlist = YoutubePlaylist(playlist_id)
playlist.get_from_es()
playlist.add_vids_to_playlist()
playlist.remove_vids_from_playlist()
if not self.task:
continue
message = [
"Post Processing Playlists.",
f"Validate Playlists: - {idx}/{total}",
]
progress = idx / total
self.task.send_progress(message, progress=progress)
def get_comments(self):
"""get comments from youtube"""
video_queue = RedisQueue(self.VIDEO_QUEUE)
comment_list = CommentList(task=self.task)
comment_list.add(video_ids=video_queue.get_all())
video_queue.clear()
comment_list.index()

View File

@ -10,7 +10,6 @@ import os
import zipfile
from datetime import datetime
from home.models import CustomPeriodicTask
from home.src.es.connect import ElasticWrap, IndexPaginate
from home.src.ta.config import AppConfig
from home.src.ta.helper import get_mapping, ignore_filelist
@ -198,12 +197,7 @@ class ElasticBackup:
def rotate_backup(self):
"""delete old backups if needed"""
try:
task = CustomPeriodicTask.objects.get(name="run_backup")
except CustomPeriodicTask.DoesNotExist:
return
rotate = task.task_config.get("rotate")
rotate = self.config["scheduler"]["run_backup_rotate"]
if not rotate:
return

View File

@ -159,6 +159,50 @@ class ApplicationSettingsForm(forms.Form):
)
class SchedulerSettingsForm(forms.Form):
"""handle scheduler settings"""
HELP_TEXT = "Add Apprise notification URLs, one per line"
update_subscribed = forms.CharField(required=False)
update_subscribed_notify = forms.CharField(
label=False,
widget=forms.Textarea(
attrs={
"rows": 2,
"placeholder": HELP_TEXT,
}
),
required=False,
)
download_pending = forms.CharField(required=False)
download_pending_notify = forms.CharField(
label=False,
widget=forms.Textarea(
attrs={
"rows": 2,
"placeholder": HELP_TEXT,
}
),
required=False,
)
check_reindex = forms.CharField(required=False)
check_reindex_notify = forms.CharField(
label=False,
widget=forms.Textarea(
attrs={
"rows": 2,
"placeholder": HELP_TEXT,
}
),
required=False,
)
check_reindex_days = forms.IntegerField(required=False)
thumbnail_check = forms.CharField(required=False)
run_backup = forms.CharField(required=False)
run_backup_rotate = forms.IntegerField(required=False)
class MultiSearchForm(forms.Form):
"""multi search form for /search/"""

View File

@ -1,101 +0,0 @@
"""
Functionality:
- handle schedule forms
- implement form validation
"""
from celery.schedules import crontab
from django import forms
from home.src.ta.task_config import TASK_CONFIG
class CrontabValidator:
"""validate crontab"""
@staticmethod
def validate_fields(cron_fields):
"""expect 3 cron fields"""
if not len(cron_fields) == 3:
raise forms.ValidationError("expected three cron schedule fields")
@staticmethod
def validate_minute(minute_field):
"""expect minute int"""
try:
minute_value = int(minute_field)
if not 0 <= minute_value <= 59:
raise forms.ValidationError(
"Invalid value for minutes. Must be between 0 and 59."
)
except ValueError as err:
raise forms.ValidationError(
"Invalid value for minutes. Must be an integer."
) from err
@staticmethod
def validate_cron_tab(minute, hour, day_of_week):
"""check if crontab can be created"""
try:
crontab(minute=minute, hour=hour, day_of_week=day_of_week)
except ValueError as err:
raise forms.ValidationError(f"invalid crontab: {err}") from err
def validate(self, cron_expression):
"""create crontab schedule"""
if cron_expression == "auto":
return
cron_fields = cron_expression.split()
self.validate_fields(cron_fields)
minute, hour, day_of_week = cron_fields
self.validate_minute(minute)
self.validate_cron_tab(minute, hour, day_of_week)
def validate_cron(cron_expression):
"""callable for field"""
CrontabValidator().validate(cron_expression)
class SchedulerSettingsForm(forms.Form):
"""handle scheduler settings"""
update_subscribed = forms.CharField(
required=False, validators=[validate_cron]
)
download_pending = forms.CharField(
required=False, validators=[validate_cron]
)
check_reindex = forms.CharField(required=False, validators=[validate_cron])
check_reindex_days = forms.IntegerField(required=False)
thumbnail_check = forms.CharField(
required=False, validators=[validate_cron]
)
run_backup = forms.CharField(required=False, validators=[validate_cron])
run_backup_rotate = forms.IntegerField(required=False)
class NotificationSettingsForm(forms.Form):
"""add notification URL"""
SUPPORTED_TASKS = [
"update_subscribed",
"extract_download",
"download_pending",
"check_reindex",
]
TASK_LIST = [(i, TASK_CONFIG[i]["title"]) for i in SUPPORTED_TASKS]
TASK_CHOICES = [("", "-- select task --")]
TASK_CHOICES.extend(TASK_LIST)
PLACEHOLDER = "Apprise notification URL"
task = forms.ChoiceField(
widget=forms.Select, choices=TASK_CHOICES, required=False
)
notification_url = forms.CharField(
required=False,
widget=forms.TextInput(attrs={"placeholder": PLACEHOLDER}),
)

View File

@ -6,62 +6,34 @@ functionality:
import json
import os
import re
from datetime import datetime
import requests
from home.src.download import queue # partial import
from home.src.download.thumbnails import ThumbManager
from home.src.download.yt_dlp_base import YtWrap
from home.src.es.connect import ElasticWrap, IndexPaginate
from home.src.index.generic import YouTubeItem
from home.src.index.playlist import YoutubePlaylist
from home.src.ta.helper import requests_headers
from home.src.ta.settings import EnvironmentSettings
def banner_extractor(channel_id: str) -> dict[str, str] | None:
"""workaround for new channel renderer, upstream #9893"""
url = f"https://www.youtube.com/channel/{channel_id}?hl=en"
cookies = {"SOCS": "CAI"}
response = requests.get(
url, cookies=cookies, headers=requests_headers(), timeout=30
)
if not response.ok:
return None
matched_urls = re.findall(
r'"(https://yt3.googleusercontent.com/[^"]+=w(\d{3,4})-fcrop64[^"]*)"',
response.text,
)
if not matched_urls:
return None
sorted_urls = sorted(matched_urls, key=lambda x: int(x[1]), reverse=True)
banner = sorted_urls[0][0]
channel_art_fallback = {
"channel_banner_url": banner,
"channel_tvart_url": banner.split("-fcrop64")[0],
}
return channel_art_fallback
class YoutubeChannel(YouTubeItem):
"""represents a single youtube channel"""
es_path = False
index_name = "ta_channel"
yt_base = "https://www.youtube.com/channel/"
yt_obs = {
"playlist_items": "1,0",
"skip_download": True,
}
yt_obs = {"playlist_items": "0,0"}
def __init__(self, youtube_id, task=False):
super().__init__(youtube_id)
self.all_playlists = False
self.task = task
def build_yt_url(self):
"""overwrite base to use channel about page"""
return f"{self.yt_base}{self.youtube_id}/about"
def build_json(self, upload=False, fallback=False):
"""get from es or from youtube"""
self.get_from_es()
@ -81,13 +53,14 @@ class YoutubeChannel(YouTubeItem):
def process_youtube_meta(self):
"""extract relevant fields"""
self.youtube_meta["thumbnails"].reverse()
channel_subs = self.youtube_meta.get("channel_follower_count") or 0
self.json_data = {
"channel_active": True,
"channel_description": self.youtube_meta.get("description", False),
"channel_id": self.youtube_id,
"channel_last_refresh": int(datetime.now().timestamp()),
"channel_name": self.youtube_meta["uploader"],
"channel_subs": self._extract_follower_count(),
"channel_subs": channel_subs,
"channel_subscribed": False,
"channel_tags": self._parse_tags(self.youtube_meta.get("tags")),
"channel_banner_url": self._get_banner_art(),
@ -95,34 +68,6 @@ class YoutubeChannel(YouTubeItem):
"channel_tvart_url": self._get_tv_art(),
"channel_views": self.youtube_meta.get("view_count") or 0,
}
self._inject_fallback()
def _inject_fallback(self):
"""fallback channel art work, workaround for upstream #9893"""
if self.json_data["channel_banner_url"]:
return
print(f"{self.youtube_id}: attempt art fallback extraction")
fallback = banner_extractor(self.youtube_id)
if fallback:
print(f"{self.youtube_id}: fallback succeeded: {fallback}")
self.json_data.update(fallback)
def _extract_follower_count(self) -> int:
"""workaround for upstream #9893, extract subs from first video"""
subs = self.youtube_meta.get("channel_follower_count")
if subs is not None:
return subs
entries = self.youtube_meta.get("entries", [])
if entries:
first_entry = entries[0]
if isinstance(first_entry, dict):
subs_entry = first_entry.get("channel_follower_count")
if subs_entry is not None:
return subs_entry
return 0
def _parse_tags(self, tags):
"""parse channel tags"""
@ -267,7 +212,9 @@ class YoutubeChannel(YouTubeItem):
"""delete all indexed playlist from es"""
all_playlists = self.get_indexed_playlists()
for playlist in all_playlists:
YoutubePlaylist(playlist["playlist_id"]).delete_metadata()
playlist_id = playlist["playlist_id"]
playlist = YoutubePlaylist(playlist_id)
YoutubePlaylist(playlist_id).delete_metadata()
def delete_channel(self):
"""delete channel and all videos"""
@ -306,12 +253,13 @@ class YoutubeChannel(YouTubeItem):
print(f"{self.youtube_id}: no playlists found.")
return
all_youtube_ids = self.get_all_video_ids()
total = len(self.all_playlists)
for idx, playlist in enumerate(self.all_playlists):
if self.task:
self._notify_single_playlist(idx, total)
self._index_single_playlist(playlist)
self._index_single_playlist(playlist, all_youtube_ids)
print("add playlist: " + playlist[1])
def _notify_single_playlist(self, idx, total):
@ -324,10 +272,32 @@ class YoutubeChannel(YouTubeItem):
self.task.send_progress(message, progress=(idx + 1) / total)
@staticmethod
def _index_single_playlist(playlist):
def _index_single_playlist(playlist, all_youtube_ids):
"""add single playlist if needed"""
playlist = YoutubePlaylist(playlist[0])
playlist.update_playlist(skip_on_empty=True)
playlist.all_youtube_ids = all_youtube_ids
playlist.build_json()
if not playlist.json_data:
return
entries = playlist.json_data["playlist_entries"]
downloaded = [i for i in entries if i["downloaded"]]
if not downloaded:
return
playlist.upload_to_es()
playlist.add_vids_to_playlist()
playlist.get_playlist_art()
@staticmethod
def get_all_video_ids():
"""match all playlists with videos"""
handler = queue.PendingList()
handler.get_download()
handler.get_indexed()
all_youtube_ids = [i["youtube_id"] for i in handler.all_videos]
return all_youtube_ids
def get_channel_videos(self):
"""get all videos from channel"""
@ -364,9 +334,9 @@ class YoutubeChannel(YouTubeItem):
all_playlists = IndexPaginate("ta_playlist", data).get_results()
return all_playlists
def get_overwrites(self) -> dict:
def get_overwrites(self):
"""get all per channel overwrites"""
return self.json_data.get("channel_overwrites", {})
return self.json_data.get("channel_overwrites", False)
def set_overwrites(self, overwrites):
"""set per channel overwrites"""

View File

@ -10,7 +10,6 @@ from datetime import datetime
from home.src.download.yt_dlp_base import YtWrap
from home.src.es.connect import ElasticWrap
from home.src.ta.config import AppConfig
from home.src.ta.ta_redis import RedisQueue
class Comments:
@ -190,30 +189,20 @@ class Comments:
class CommentList:
"""interact with comments in group"""
COMMENT_QUEUE = "index:comment"
def __init__(self, task=False):
def __init__(self, video_ids, task=False):
self.video_ids = video_ids
self.task = task
self.config = AppConfig().config
def add(self, video_ids: list[str]) -> None:
"""add list of videos to get comments, if enabled in config"""
def index(self):
"""index comments for list, init with task object to notify"""
if not self.config["downloads"].get("comment_max"):
return
RedisQueue(self.COMMENT_QUEUE).add_list(video_ids)
def index(self):
"""run comment index"""
queue = RedisQueue(self.COMMENT_QUEUE)
while True:
total = queue.max_score()
youtube_id, idx = queue.get_next()
if not youtube_id or not idx or not total:
break
total_videos = len(self.video_ids)
for idx, youtube_id in enumerate(self.video_ids):
if self.task:
self.notify(idx, total)
self.notify(idx, total_videos)
comment = Comments(youtube_id, config=self.config)
comment.build_json()
@ -222,6 +211,6 @@ class CommentList:
def notify(self, idx, total_videos):
"""send notification on task"""
message = [f"Add comments for new videos {idx}/{total_videos}"]
progress = idx / total_videos
message = [f"Add comments for new videos {idx + 1}/{total_videos}"]
progress = (idx + 1) / total_videos
self.task.send_progress(message, progress=progress)

View File

@ -89,9 +89,7 @@ class Scanner:
)
index_new_video(youtube_id)
comment_list = CommentList(task=self.task)
comment_list.add(video_ids=list(self.to_index))
comment_list.index()
CommentList(self.to_index, task=self.task).index()
def url_fix(self) -> None:
"""

View File

@ -17,7 +17,7 @@ class YouTubeItem:
es_path = False
index_name = ""
yt_base = ""
yt_obs: dict[str, bool | str] = {
yt_obs = {
"skip_download": True,
"noplaylist": True,
}

View File

@ -147,9 +147,7 @@ class ImportFolderScanner:
ManualImport(current_video, self.CONFIG).run()
video_ids = [i["video_id"] for i in self.to_import]
comment_list = CommentList(task=self.task)
comment_list.add(video_ids=video_ids)
comment_list.index()
CommentList(video_ids, task=self.task).index()
def _notify(self, idx, current_video):
"""send notification back to task"""

View File

@ -8,8 +8,7 @@ import json
from datetime import datetime
from home.src.download.thumbnails import ThumbManager
from home.src.es.connect import ElasticWrap, IndexPaginate
from home.src.index import channel
from home.src.es.connect import ElasticWrap
from home.src.index.generic import YouTubeItem
from home.src.index.video import YoutubeVideo
@ -29,6 +28,7 @@ class YoutubePlaylist(YouTubeItem):
super().__init__(youtube_id)
self.all_members = False
self.nav = False
self.all_youtube_ids = []
def build_json(self, scrape=False):
"""collection to create json_data"""
@ -45,9 +45,7 @@ class YoutubePlaylist(YouTubeItem):
return
self.process_youtube_meta()
self._ensure_channel()
ids_found = self.get_local_vids()
self.get_entries(ids_found)
self.get_entries()
self.json_data["playlist_entries"] = self.all_members
self.json_data["playlist_subscribed"] = subscribed
@ -71,37 +69,25 @@ class YoutubePlaylist(YouTubeItem):
"playlist_type": "regular",
}
def _ensure_channel(self):
"""make sure channel is indexed"""
channel_id = self.json_data["playlist_channel_id"]
channel_handler = channel.YoutubeChannel(channel_id)
channel_handler.build_json(upload=True)
def get_local_vids(self) -> list[str]:
"""get local video ids from youtube entries"""
entries = self.youtube_meta["entries"]
data = {
"query": {"terms": {"youtube_id": [i["id"] for i in entries]}},
"_source": ["youtube_id"],
}
indexed_vids = IndexPaginate("ta_video", data).get_results()
ids_found = [i["youtube_id"] for i in indexed_vids]
return ids_found
def get_entries(self, ids_found) -> None:
"""get all videos in playlist, match downloaded with ids_found"""
def get_entries(self, playlistend=False):
"""get all videos in playlist"""
if playlistend:
# implement playlist end
print(playlistend)
all_members = []
for idx, entry in enumerate(self.youtube_meta["entries"]):
if self.all_youtube_ids:
downloaded = entry["id"] in self.all_youtube_ids
else:
downloaded = False
if not entry["channel"]:
continue
to_append = {
"youtube_id": entry["id"],
"title": entry["title"],
"uploader": entry["channel"],
"idx": idx,
"downloaded": entry["id"] in ids_found,
"downloaded": downloaded,
}
all_members.append(to_append)
@ -142,50 +128,17 @@ class YoutubePlaylist(YouTubeItem):
ElasticWrap("_bulk").post(query_str, ndjson=True)
def remove_vids_from_playlist(self):
"""remove playlist ids from videos if needed"""
needed = [i["youtube_id"] for i in self.json_data["playlist_entries"]]
data = {
"query": {"match": {"playlist": self.youtube_id}},
"_source": ["youtube_id"],
}
result = IndexPaginate("ta_video", data).get_results()
to_remove = [
i["youtube_id"] for i in result if i["youtube_id"] not in needed
]
s = "ctx._source.playlist.removeAll(Collections.singleton(params.rm))"
for video_id in to_remove:
query = {
"script": {
"source": s,
"lang": "painless",
"params": {"rm": self.youtube_id},
},
"query": {"match": {"youtube_id": video_id}},
}
path = "ta_video/_update_by_query"
_, status_code = ElasticWrap(path).post(query)
if status_code == 200:
print(f"{self.youtube_id}: removed {video_id} from playlist")
def update_playlist(self, skip_on_empty=False):
def update_playlist(self):
"""update metadata for playlist with data from YouTube"""
self.build_json(scrape=True)
self.get_from_es()
subscribed = self.json_data["playlist_subscribed"]
self.get_from_youtube()
if not self.json_data:
# return false to deactivate
return False
if skip_on_empty:
has_item_downloaded = any(
i["downloaded"] for i in self.json_data["playlist_entries"]
)
if not has_item_downloaded:
return True
self.json_data["playlist_subscribed"] = subscribed
self.upload_to_es()
self.add_vids_to_playlist()
self.remove_vids_from_playlist()
self.get_playlist_art()
return True
def build_nav(self, youtube_id):

View File

@ -8,9 +8,8 @@ import json
import os
from datetime import datetime
from time import sleep
from typing import Callable, TypedDict
from home.models import CustomPeriodicTask
from home.src.download.queue import PendingList
from home.src.download.subscriptions import ChannelSubscription
from home.src.download.thumbnails import ThumbManager
from home.src.download.yt_dlp_base import CookieHandler
@ -24,19 +23,10 @@ from home.src.ta.settings import EnvironmentSettings
from home.src.ta.ta_redis import RedisQueue
class ReindexConfigType(TypedDict):
"""represents config type"""
index_name: str
queue_name: str
active_key: str
refresh_key: str
class ReindexBase:
"""base config class for reindex task"""
REINDEX_CONFIG: dict[str, ReindexConfigType] = {
REINDEX_CONFIG = {
"video": {
"index_name": "ta_video",
"queue_name": "reindex:ta_video",
@ -63,36 +53,25 @@ class ReindexBase:
def __init__(self):
self.config = AppConfig().config
self.now = int(datetime.now().timestamp())
self.total = None
def populate(self, all_ids, reindex_config: ReindexConfigType):
def populate(self, all_ids, reindex_config):
"""add all to reindex ids to redis queue"""
if not all_ids:
return
RedisQueue(queue_name=reindex_config["queue_name"]).add_list(all_ids)
self.total = None
class ReindexPopulate(ReindexBase):
"""add outdated and recent documents to reindex queue"""
INTERVAL_DEFAIULT: int = 90
def __init__(self):
super().__init__()
self.interval = self.INTERVAL_DEFAIULT
self.interval = self.config["scheduler"]["check_reindex_days"]
def get_interval(self) -> None:
"""get reindex days interval from task"""
try:
task = CustomPeriodicTask.objects.get(name="check_reindex")
except CustomPeriodicTask.DoesNotExist:
return
task_config = task.task_config
if task_config.get("days"):
self.interval = task_config.get("days")
def add_recent(self) -> None:
def add_recent(self):
"""add recent videos to refresh"""
gte = datetime.fromtimestamp(self.now - self.DAYS3).date().isoformat()
must_list = [
@ -110,10 +89,10 @@ class ReindexPopulate(ReindexBase):
return
all_ids = [i["_source"]["youtube_id"] for i in hits]
reindex_config: ReindexConfigType = self.REINDEX_CONFIG["video"]
reindex_config = self.REINDEX_CONFIG.get("video")
self.populate(all_ids, reindex_config)
def add_outdated(self) -> None:
def add_outdated(self):
"""add outdated documents"""
for reindex_config in self.REINDEX_CONFIG.values():
total_hits = self._get_total_hits(reindex_config)
@ -122,7 +101,7 @@ class ReindexPopulate(ReindexBase):
self.populate(all_ids, reindex_config)
@staticmethod
def _get_total_hits(reindex_config: ReindexConfigType) -> int:
def _get_total_hits(reindex_config):
"""get total hits from index"""
index_name = reindex_config["index_name"]
active_key = reindex_config["active_key"]
@ -134,7 +113,7 @@ class ReindexPopulate(ReindexBase):
return len(total)
def _get_daily_should(self, total_hits: int) -> int:
def _get_daily_should(self, total_hits):
"""calc how many should reindex daily"""
daily_should = int((total_hits // self.interval + 1) * self.MULTIPLY)
if daily_should >= 10000:
@ -142,9 +121,7 @@ class ReindexPopulate(ReindexBase):
return daily_should
def _get_outdated_ids(
self, reindex_config: ReindexConfigType, daily_should: int
) -> list[str]:
def _get_outdated_ids(self, reindex_config, daily_should):
"""get outdated from index_name"""
index_name = reindex_config["index_name"]
refresh_key = reindex_config["refresh_key"]
@ -181,7 +158,7 @@ class ReindexManual(ReindexBase):
self.extract_videos = extract_videos
self.data = False
def extract_data(self, data) -> None:
def extract_data(self, data):
"""process data"""
self.data = data
for key, values in self.data.items():
@ -192,9 +169,7 @@ class ReindexManual(ReindexBase):
self.process_index(reindex_config, values)
def process_index(
self, index_config: ReindexConfigType, values: list[str]
) -> None:
def process_index(self, index_config, values):
"""process values per index"""
index_name = index_config["index_name"]
if index_name == "ta_video":
@ -204,35 +179,32 @@ class ReindexManual(ReindexBase):
elif index_name == "ta_playlist":
self._add_playlists(values)
def _add_videos(self, values: list[str]) -> None:
def _add_videos(self, values):
"""add list of videos to reindex queue"""
if not values:
return
queue_name = self.REINDEX_CONFIG["video"]["queue_name"]
RedisQueue(queue_name).add_list(values)
RedisQueue("reindex:ta_video").add_list(values)
def _add_channels(self, values: list[str]) -> None:
def _add_channels(self, values):
"""add list of channels to reindex queue"""
queue_name = self.REINDEX_CONFIG["channel"]["queue_name"]
RedisQueue(queue_name).add_list(values)
RedisQueue("reindex:ta_channel").add_list(values)
if self.extract_videos:
for channel_id in values:
all_videos = self._get_channel_videos(channel_id)
self._add_videos(all_videos)
def _add_playlists(self, values: list[str]) -> None:
def _add_playlists(self, values):
"""add list of playlists to reindex queue"""
queue_name = self.REINDEX_CONFIG["playlist"]["queue_name"]
RedisQueue(queue_name).add_list(values)
RedisQueue("reindex:ta_playlist").add_list(values)
if self.extract_videos:
for playlist_id in values:
all_videos = self._get_playlist_videos(playlist_id)
self._add_videos(all_videos)
def _get_channel_videos(self, channel_id: str) -> list[str]:
def _get_channel_videos(self, channel_id):
"""get all videos from channel"""
data = {
"query": {"term": {"channel.channel_id": {"value": channel_id}}},
@ -241,7 +213,7 @@ class ReindexManual(ReindexBase):
all_results = IndexPaginate("ta_video", data).get_results()
return [i["youtube_id"] for i in all_results]
def _get_playlist_videos(self, playlist_id: str) -> list[str]:
def _get_playlist_videos(self, playlist_id):
"""get all videos from playlist"""
data = {
"query": {"term": {"playlist.keyword": {"value": playlist_id}}},
@ -257,13 +229,14 @@ class Reindex(ReindexBase):
def __init__(self, task=False):
super().__init__()
self.task = task
self.all_indexed_ids = False
self.processed = {
"videos": 0,
"channels": 0,
"playlists": 0,
}
def reindex_all(self) -> None:
def reindex_all(self):
"""reindex all in queue"""
if not self.cookie_is_valid():
print("[reindex] cookie invalid, exiting...")
@ -273,26 +246,26 @@ class Reindex(ReindexBase):
if not RedisQueue(index_config["queue_name"]).length():
continue
self.reindex_type(name, index_config)
self.total = RedisQueue(index_config["queue_name"]).length()
while True:
has_next = self.reindex_index(name, index_config)
if not has_next:
break
def reindex_type(self, name: str, index_config: ReindexConfigType) -> None:
def reindex_index(self, name, index_config):
"""reindex all of a single index"""
reindex = self._get_reindex_map(index_config["index_name"])
queue = RedisQueue(index_config["queue_name"])
while True:
total = queue.max_score()
youtube_id, idx = queue.get_next()
if not youtube_id or not idx or not total:
break
reindex = self.get_reindex_map(index_config["index_name"])
youtube_id = RedisQueue(index_config["queue_name"]).get_next()
if youtube_id:
if self.task:
self._notify(name, total, idx)
self._notify(name, index_config)
reindex(youtube_id)
sleep_interval = self.config["downloads"].get("sleep_interval", 0)
sleep(sleep_interval)
def _get_reindex_map(self, index_name: str) -> Callable:
return bool(youtube_id)
def get_reindex_map(self, index_name):
"""return def to run for index"""
def_map = {
"ta_video": self._reindex_single_video,
@ -300,15 +273,20 @@ class Reindex(ReindexBase):
"ta_playlist": self._reindex_single_playlist,
}
return def_map[index_name]
return def_map.get(index_name)
def _notify(self, name: str, total: int, idx: int) -> None:
def _notify(self, name, index_config):
"""send notification back to task"""
message = [f"Reindexing {name.title()}s {idx}/{total}"]
progress = idx / total
if self.total is None:
self.total = RedisQueue(index_config["queue_name"]).length()
remaining = RedisQueue(index_config["queue_name"]).length()
idx = self.total - remaining
message = [f"Reindexing {name.title()}s {idx}/{self.total}"]
progress = idx / self.total
self.task.send_progress(message, progress=progress)
def _reindex_single_video(self, youtube_id: str) -> None:
def _reindex_single_video(self, youtube_id):
"""refresh data for single video"""
video = YoutubeVideo(youtube_id)
@ -347,7 +325,9 @@ class Reindex(ReindexBase):
Comments(youtube_id, config=self.config).reindex_comments()
self.processed["videos"] += 1
def _reindex_single_channel(self, channel_id: str) -> None:
return
def _reindex_single_channel(self, channel_id):
"""refresh channel data and sync to videos"""
# read current state
channel = YoutubeChannel(channel_id)
@ -375,12 +355,12 @@ class Reindex(ReindexBase):
channel.json_data["channel_overwrites"] = overwrites
channel.upload_to_es()
channel.sync_to_videos()
ChannelFullScan(channel_id).scan()
self.processed["channels"] += 1
def _reindex_single_playlist(self, playlist_id: str) -> None:
def _reindex_single_playlist(self, playlist_id):
"""refresh playlist data"""
self._get_all_videos()
playlist = YoutubePlaylist(playlist_id)
playlist.get_from_es()
if (
@ -389,14 +369,29 @@ class Reindex(ReindexBase):
):
return
is_active = playlist.update_playlist()
if not is_active:
subscribed = playlist.json_data["playlist_subscribed"]
playlist.all_youtube_ids = self.all_indexed_ids
playlist.build_json(scrape=True)
if not playlist.json_data:
playlist.deactivate()
return
playlist.json_data["playlist_subscribed"] = subscribed
playlist.upload_to_es()
self.processed["playlists"] += 1
return
def cookie_is_valid(self) -> bool:
def _get_all_videos(self):
"""add all videos for playlist index validation"""
if self.all_indexed_ids:
return
handler = PendingList()
handler.get_download()
handler.get_indexed()
self.all_indexed_ids = [i["youtube_id"] for i in handler.all_videos]
def cookie_is_valid(self):
"""return true if cookie is enabled and valid"""
if not self.config["downloads"]["cookie_import"]:
# is not activated, continue reindex
@ -405,7 +400,7 @@ class Reindex(ReindexBase):
valid = CookieHandler(self.config).validate()
return valid
def build_message(self) -> str:
def build_message(self):
"""build progress message"""
message = ""
for key, value in self.processed.items():
@ -435,7 +430,7 @@ class ReindexProgress(ReindexBase):
self.request_type = request_type
self.request_id = request_id
def get_progress(self) -> dict:
def get_progress(self):
"""get progress from task"""
queue_name, request_type = self._get_queue_name()
total = self._get_total_in_queue(queue_name)

View File

@ -125,9 +125,15 @@ class YoutubeVideo(YouTubeItem, YoutubeSubtitle):
index_name = "ta_video"
yt_base = "https://www.youtube.com/watch?v="
def __init__(self, youtube_id, video_type=VideoTypeEnum.VIDEOS):
def __init__(
self,
youtube_id,
video_overwrites=False,
video_type=VideoTypeEnum.VIDEOS,
):
super().__init__(youtube_id)
self.channel_id = False
self.video_overwrites = video_overwrites
self.video_type = video_type
self.offline_import = False
@ -141,7 +147,7 @@ class YoutubeVideo(YouTubeItem, YoutubeSubtitle):
self.youtube_meta = youtube_meta_overwrite
self.offline_import = True
self.process_youtube_meta()
self._process_youtube_meta()
self._add_channel()
self._add_stats()
self.add_file_path()
@ -159,16 +165,17 @@ class YoutubeVideo(YouTubeItem, YoutubeSubtitle):
"""check if need to run sponsor block"""
integrate = self.config["downloads"]["integrate_sponsorblock"]
if overwrite := self.json_data["channel"].get("channel_overwrites"):
if not overwrite:
if self.video_overwrites:
single_overwrite = self.video_overwrites.get(self.youtube_id)
if not single_overwrite:
return integrate
if "integrate_sponsorblock" in overwrite:
return overwrite.get("integrate_sponsorblock")
if "integrate_sponsorblock" in single_overwrite:
return single_overwrite.get("integrate_sponsorblock")
return integrate
def process_youtube_meta(self):
def _process_youtube_meta(self):
"""extract relevant fields from youtube"""
self._validate_id()
# extract
@ -392,9 +399,13 @@ class YoutubeVideo(YouTubeItem, YoutubeSubtitle):
_, _ = ElasticWrap(path).post(data=data)
def index_new_video(youtube_id, video_type=VideoTypeEnum.VIDEOS):
def index_new_video(
youtube_id, video_overwrites=False, video_type=VideoTypeEnum.VIDEOS
):
"""combined classes to create new video in index"""
video = YoutubeVideo(youtube_id, video_type=video_type)
video = YoutubeVideo(
youtube_id, video_overwrites=video_overwrites, video_type=video_type
)
video.build_json()
if not video.json_data:
raise ValueError("failed to get metadata for " + youtube_id)

View File

@ -5,10 +5,12 @@ Functionality:
"""
import json
import re
from random import randint
from time import sleep
import requests
from celery.schedules import crontab
from django.conf import settings
from home.src.ta.ta_redis import RedisArchivist
@ -72,6 +74,15 @@ class AppConfig:
RedisArchivist().set_message("config", self.config, save=True)
return updated
@staticmethod
def _build_rand_daily():
"""build random daily schedule per installation"""
return {
"minute": randint(0, 59),
"hour": randint(0, 23),
"day_of_week": "*",
}
def load_new_defaults(self):
"""check config.json for missing defaults"""
default_config = self.get_config_file()
@ -80,6 +91,7 @@ class AppConfig:
# check for customizations
if not redis_config:
config = self.get_config()
config["scheduler"]["version_check"] = self._build_rand_daily()
RedisArchivist().set_message("config", config)
return False
@ -94,7 +106,13 @@ class AppConfig:
# missing nested values
for sub_key, sub_value in value.items():
if sub_key not in redis_config[key].keys():
if (
sub_key not in redis_config[key].keys()
or sub_value == "rand-d"
):
if sub_value == "rand-d":
sub_value = self._build_rand_daily()
redis_config[key].update({sub_key: sub_value})
needs_update = True
@ -104,6 +122,147 @@ class AppConfig:
return needs_update
class ScheduleBuilder:
"""build schedule dicts for beat"""
SCHEDULES = {
"update_subscribed": "0 8 *",
"download_pending": "0 16 *",
"check_reindex": "0 12 *",
"thumbnail_check": "0 17 *",
"run_backup": "0 18 0",
"version_check": "0 11 *",
}
CONFIG = ["check_reindex_days", "run_backup_rotate"]
NOTIFY = [
"update_subscribed_notify",
"download_pending_notify",
"check_reindex_notify",
]
MSG = "message:setting"
def __init__(self):
self.config = AppConfig().config
def update_schedule_conf(self, form_post):
"""process form post"""
print("processing form, restart container for changes to take effect")
redis_config = self.config
for key, value in form_post.items():
if key in self.SCHEDULES and value:
try:
to_write = self.value_builder(key, value)
except ValueError:
print(f"failed: {key} {value}")
mess_dict = {
"group": "setting:schedule",
"level": "error",
"title": "Scheduler update failed.",
"messages": ["Invalid schedule input"],
"id": "0000",
}
RedisArchivist().set_message(
self.MSG, mess_dict, expire=True
)
return
redis_config["scheduler"][key] = to_write
if key in self.CONFIG and value:
redis_config["scheduler"][key] = int(value)
if key in self.NOTIFY and value:
if value == "0":
to_write = False
else:
to_write = value
redis_config["scheduler"][key] = to_write
RedisArchivist().set_message("config", redis_config, save=True)
mess_dict = {
"group": "setting:schedule",
"level": "info",
"title": "Scheduler changed.",
"messages": ["Restart container for changes to take effect"],
"id": "0000",
}
RedisArchivist().set_message(self.MSG, mess_dict, expire=True)
def value_builder(self, key, value):
"""validate single cron form entry and return cron dict"""
print(f"change schedule for {key} to {value}")
if value == "0":
# deactivate this schedule
return False
if re.search(r"[\d]{1,2}\/[\d]{1,2}", value):
# number/number cron format will fail in celery
print("number/number schedule formatting not supported")
raise ValueError
keys = ["minute", "hour", "day_of_week"]
if value == "auto":
# set to sensible default
values = self.SCHEDULES[key].split()
else:
values = value.split()
if len(keys) != len(values):
print(f"failed to parse {value} for {key}")
raise ValueError("invalid input")
to_write = dict(zip(keys, values))
self._validate_cron(to_write)
return to_write
@staticmethod
def _validate_cron(to_write):
"""validate all fields, raise value error for impossible schedule"""
all_hours = list(re.split(r"\D+", to_write["hour"]))
for hour in all_hours:
if hour.isdigit() and int(hour) > 23:
print("hour can not be greater than 23")
raise ValueError("invalid input")
all_days = list(re.split(r"\D+", to_write["day_of_week"]))
for day in all_days:
if day.isdigit() and int(day) > 6:
print("day can not be greater than 6")
raise ValueError("invalid input")
if not to_write["minute"].isdigit():
print("too frequent: only number in minutes are supported")
raise ValueError("invalid input")
if int(to_write["minute"]) > 59:
print("minutes can not be greater than 59")
raise ValueError("invalid input")
def build_schedule(self):
"""build schedule dict as expected by app.conf.beat_schedule"""
AppConfig().load_new_defaults()
self.config = AppConfig().config
schedule_dict = {}
for schedule_item in self.SCHEDULES:
item_conf = self.config["scheduler"][schedule_item]
if not item_conf:
continue
schedule_dict.update(
{
f"schedule_{schedule_item}": {
"task": schedule_item,
"schedule": crontab(
minute=item_conf["minute"],
hour=item_conf["hour"],
day_of_week=item_conf["day_of_week"],
),
}
}
)
return schedule_dict
class ReleaseVersion:
"""compare local version with remote version"""
@ -181,3 +340,12 @@ class ReleaseVersion:
return {}
return message
def clear_fail(self) -> None:
"""clear key, catch previous error in v0.4.5"""
message = self.get_update()
if not message:
return
if isinstance(message.get("version"), list):
RedisArchivist().del_message(self.NEW_KEY)

View File

@ -1,89 +0,0 @@
"""
Functionality:
- Handle scheduler config update
"""
from django_celery_beat.models import CrontabSchedule
from home.models import CustomPeriodicTask
from home.src.ta.config import AppConfig
from home.src.ta.settings import EnvironmentSettings
from home.src.ta.task_config import TASK_CONFIG
class ScheduleBuilder:
"""build schedule dicts for beat"""
SCHEDULES = {
"update_subscribed": "0 8 *",
"download_pending": "0 16 *",
"check_reindex": "0 12 *",
"thumbnail_check": "0 17 *",
"run_backup": "0 18 0",
"version_check": "0 11 *",
}
CONFIG = {
"check_reindex_days": "check_reindex",
"run_backup_rotate": "run_backup",
"update_subscribed_notify": "update_subscribed",
"download_pending_notify": "download_pending",
"check_reindex_notify": "check_reindex",
}
MSG = "message:setting"
def __init__(self):
self.config = AppConfig().config
def update_schedule_conf(self, form_post):
"""process form post, schedules need to be validated before"""
for key, value in form_post.items():
if not value:
continue
if key in self.SCHEDULES:
if value == "auto":
value = self.SCHEDULES.get(key)
_ = self.get_set_task(key, value)
continue
if key in self.CONFIG:
self.set_config(key, value)
def get_set_task(self, task_name, schedule=False):
"""get task"""
try:
task = CustomPeriodicTask.objects.get(name=task_name)
except CustomPeriodicTask.DoesNotExist:
description = TASK_CONFIG[task_name].get("title")
task = CustomPeriodicTask(
name=task_name,
task=task_name,
description=description,
)
if schedule:
task_crontab = self.get_set_cron_tab(schedule)
task.crontab = task_crontab
task.save()
return task
@staticmethod
def get_set_cron_tab(schedule):
"""needs to be validated before"""
kwargs = dict(zip(["minute", "hour", "day_of_week"], schedule.split()))
kwargs.update({"timezone": EnvironmentSettings.TZ})
crontab, _ = CrontabSchedule.objects.get_or_create(**kwargs)
return crontab
def set_config(self, key, value):
"""set task_config"""
task_name = self.CONFIG.get(key)
if not task_name:
raise ValueError("invalid config key")
task = CustomPeriodicTask.objects.get(name=task_name)
config_key = key.split(f"{task_name}_")[-1]
task.task_config.update({config_key: value})
task.save()

View File

@ -9,11 +9,9 @@ import random
import string
import subprocess
from datetime import datetime
from typing import Any
from urllib.parse import urlparse
import requests
from home.src.es.connect import IndexPaginate
from home.src.ta.settings import EnvironmentSettings
@ -93,14 +91,12 @@ def requests_headers() -> dict[str, str]:
return {"User-Agent": template}
def date_parser(timestamp: int | str) -> str:
def date_praser(timestamp: int | str) -> str:
"""return formatted date string"""
if isinstance(timestamp, int):
date_obj = datetime.fromtimestamp(timestamp)
elif isinstance(timestamp, str):
date_obj = datetime.strptime(timestamp, "%Y-%m-%d")
else:
raise TypeError(f"invalid timestamp: {timestamp}")
return date_obj.date().isoformat()
@ -140,9 +136,8 @@ def get_mapping() -> dict:
def is_shorts(youtube_id: str) -> bool:
"""check if youtube_id is a shorts video, bot not it it's not a shorts"""
shorts_url = f"https://www.youtube.com/shorts/{youtube_id}"
cookies = {"SOCS": "CAI"}
response = requests.head(
shorts_url, cookies=cookies, headers=requests_headers(), timeout=10
shorts_url, headers=requests_headers(), timeout=10
)
return response.status_code == 200
@ -186,8 +181,6 @@ def get_duration_str(seconds: int) -> str:
unit_count, seconds = divmod(seconds, unit_seconds)
duration_parts.append(f"{unit_count:02}{unit_label}")
duration_parts[0] = duration_parts[0].lstrip("0")
return " ".join(duration_parts)
@ -229,37 +222,3 @@ def check_stylesheet(stylesheet: str):
return stylesheet
return "dark.css"
def is_missing(
to_check: str | list[str],
index_name: str = "ta_video,ta_download",
on_key: str = "youtube_id",
) -> list[str]:
"""id or list of ids that are missing from index_name"""
if isinstance(to_check, str):
to_check = [to_check]
data = {
"query": {"terms": {on_key: to_check}},
"_source": [on_key],
}
result = IndexPaginate(index_name, data=data).get_results()
existing_ids = [i[on_key] for i in result]
dl = [i for i in to_check if i not in existing_ids]
return dl
def get_channel_overwrites() -> dict[str, dict[str, Any]]:
"""get overwrites indexed my channel_id"""
data = {
"query": {
"bool": {"must": [{"exists": {"field": "channel_overwrites"}}]}
},
"_source": ["channel_id", "channel_overwrites"],
}
result = IndexPaginate("ta_channel", data).get_results()
overwrites = {i["channel_id"]: i["channel_overwrites"] for i in result}
return overwrites

View File

@ -1,141 +1,55 @@
"""send notifications using apprise"""
import apprise
from home.src.es.connect import ElasticWrap
from home.src.ta.task_config import TASK_CONFIG
from home.src.ta.config import AppConfig
from home.src.ta.task_manager import TaskManager
class Notifications:
"""store notifications in ES"""
"""notification handler"""
GET_PATH = "ta_config/_doc/notify"
UPDATE_PATH = "ta_config/_update/notify/"
def __init__(self, name: str, task_id: str, task_title: str):
self.name: str = name
self.task_id: str = task_id
self.task_title: str = task_title
def __init__(self, task_name: str):
self.task_name = task_name
def send(self, task_id: str, task_title: str) -> None:
def send(self) -> None:
"""send notifications"""
apobj = apprise.Apprise()
urls: list[str] = self.get_urls()
if not urls:
hooks: str | None = self.get_url()
if not hooks:
return
title, body = self._build_message(task_id, task_title)
hook_list: list[str] = self.parse_hooks(hooks=hooks)
title, body = self.build_message()
if not body:
return
for url in urls:
apobj.add(url)
for hook in hook_list:
apobj.add(hook)
apobj.notify(body=body, title=title)
def _build_message(
self, task_id: str, task_title: str
) -> tuple[str, str | None]:
def get_url(self) -> str | None:
"""get apprise urls for task"""
config = AppConfig().config
hooks: str = config["scheduler"].get(f"{self.name}_notify")
return hooks
def parse_hooks(self, hooks: str) -> list[str]:
"""create list of hooks"""
hook_list: list[str] = [i.strip() for i in hooks.split()]
return hook_list
def build_message(self) -> tuple[str, str | None]:
"""build message to send notification"""
task = TaskManager().get_task(task_id)
task = TaskManager().get_task(self.task_id)
status = task.get("status")
title: str = f"[TA] {task_title} process ended with {status}"
title: str = f"[TA] {self.task_title} process ended with {status}"
body: str | None = task.get("result")
return title, body
def get_urls(self) -> list[str]:
"""get stored urls for task"""
response, code = ElasticWrap(self.GET_PATH).get(print_error=False)
if not code == 200:
return []
urls = response["_source"].get(self.task_name, [])
return urls
def add_url(self, url: str) -> None:
"""add url to task notification"""
source = (
"if (!ctx._source.containsKey(params.task_name)) "
+ "{ctx._source[params.task_name] = [params.url]} "
+ "else if (!ctx._source[params.task_name].contains(params.url)) "
+ "{ctx._source[params.task_name].add(params.url)} "
+ "else {ctx.op = 'none'}"
)
data = {
"script": {
"source": source,
"lang": "painless",
"params": {"url": url, "task_name": self.task_name},
},
"upsert": {self.task_name: [url]},
}
_, _ = ElasticWrap(self.UPDATE_PATH).post(data)
def remove_url(self, url: str) -> tuple[dict, int]:
"""remove url from task"""
source = (
"if (ctx._source.containsKey(params.task_name) "
+ "&& ctx._source[params.task_name].contains(params.url)) "
+ "{ctx._source[params.task_name]."
+ "remove(ctx._source[params.task_name].indexOf(params.url))}"
)
data = {
"script": {
"source": source,
"lang": "painless",
"params": {"url": url, "task_name": self.task_name},
}
}
response, status_code = ElasticWrap(self.UPDATE_PATH).post(data)
if not self.get_urls():
_, _ = self.remove_task()
return response, status_code
def remove_task(self) -> tuple[dict, int]:
"""remove all notifications from task"""
source = (
"if (ctx._source.containsKey(params.task_name)) "
+ "{ctx._source.remove(params.task_name)}"
)
data = {
"script": {
"source": source,
"lang": "painless",
"params": {"task_name": self.task_name},
}
}
response, status_code = ElasticWrap(self.UPDATE_PATH).post(data)
return response, status_code
def get_all_notifications() -> dict[str, list[str]]:
"""get all notifications stored"""
path = "ta_config/_doc/notify"
response, status_code = ElasticWrap(path).get(print_error=False)
if not status_code == 200:
return {}
notifications: dict = {}
source = response.get("_source")
if not source:
return notifications
for task_id, urls in source.items():
notifications.update(
{
task_id: {
"urls": urls,
"title": TASK_CONFIG[task_id]["title"],
}
}
)
return notifications

View File

@ -104,17 +104,6 @@ class RedisQueue(RedisBase):
dynamically interact with queues in redis using sorted set
- low score number is first in queue
- add new items with high score number
queue names in use:
download:channel channels during download
download:playlist:full playlists during dl for full refresh
download:playlist:quick playlists during dl for quick refresh
download:video videos during downloads
index:comment videos needing comment indexing
reindex:ta_video reindex videos
reindex:ta_channel reindex channels
reindex:ta_playlist reindex playlists
"""
def __init__(self, queue_name: str):
@ -138,48 +127,18 @@ class RedisQueue(RedisBase):
return False
def add(self, to_add: str) -> None:
"""add single item to queue"""
if not to_add:
return
next_score = self._get_next_score()
self.conn.zadd(self.key, {to_add: next_score})
def add_list(self, to_add: list) -> None:
"""add list to queue"""
if not to_add:
return
next_score = self._get_next_score()
mapping = {i[1]: next_score + i[0] for i in enumerate(to_add)}
mapping = {i: "+inf" for i in to_add}
self.conn.zadd(self.key, mapping)
def max_score(self) -> int | None:
"""get max score"""
last = self.conn.zrange(self.key, -1, -1, withscores=True)
if not last:
return None
return int(last[0][1])
def _get_next_score(self) -> float:
"""get next score in queue to append"""
last = self.conn.zrange(self.key, -1, -1, withscores=True)
if not last:
return 1.0
return last[0][1] + 1
def get_next(self) -> tuple[str | None, int | None]:
def get_next(self) -> str | bool:
"""return next element in the queue, if available"""
result = self.conn.zpopmin(self.key)
if not result:
return None, None
return False
item, idx = result[0][0], int(result[0][1])
return item, idx
return result[0][0]
def clear(self) -> None:
"""delete list from redis"""

View File

@ -1,125 +0,0 @@
"""
Functionality:
- Static Task config values
- Type definitions
- separate to avoid circular imports
"""
from typing import TypedDict
class TaskItemConfig(TypedDict):
"""describes a task item config"""
title: str
group: str
api_start: bool
api_stop: bool
UPDATE_SUBSCRIBED: TaskItemConfig = {
"title": "Rescan your Subscriptions",
"group": "download:scan",
"api_start": True,
"api_stop": True,
}
DOWNLOAD_PENDING: TaskItemConfig = {
"title": "Downloading",
"group": "download:run",
"api_start": True,
"api_stop": True,
}
EXTRACT_DOWNLOAD: TaskItemConfig = {
"title": "Add to download queue",
"group": "download:add",
"api_start": False,
"api_stop": True,
}
CHECK_REINDEX: TaskItemConfig = {
"title": "Reindex Documents",
"group": "reindex:run",
"api_start": False,
"api_stop": False,
}
MANUAL_IMPORT: TaskItemConfig = {
"title": "Manual video import",
"group": "setting:import",
"api_start": True,
"api_stop": False,
}
RUN_BACKUP: TaskItemConfig = {
"title": "Index Backup",
"group": "setting:backup",
"api_start": True,
"api_stop": False,
}
RESTORE_BACKUP: TaskItemConfig = {
"title": "Restore Backup",
"group": "setting:restore",
"api_start": False,
"api_stop": False,
}
RESCAN_FILESYSTEM: TaskItemConfig = {
"title": "Rescan your Filesystem",
"group": "setting:filesystemscan",
"api_start": True,
"api_stop": False,
}
THUMBNAIL_CHECK: TaskItemConfig = {
"title": "Check your Thumbnails",
"group": "setting:thumbnailcheck",
"api_start": True,
"api_stop": False,
}
RESYNC_THUMBS: TaskItemConfig = {
"title": "Sync Thumbnails to Media Files",
"group": "setting:thumbnailsync",
"api_start": True,
"api_stop": False,
}
INDEX_PLAYLISTS: TaskItemConfig = {
"title": "Index Channel Playlist",
"group": "channel:indexplaylist",
"api_start": False,
"api_stop": False,
}
SUBSCRIBE_TO: TaskItemConfig = {
"title": "Add Subscription",
"group": "subscription:add",
"api_start": False,
"api_stop": False,
}
VERSION_CHECK: TaskItemConfig = {
"title": "Look for new Version",
"group": "",
"api_start": False,
"api_stop": False,
}
TASK_CONFIG: dict[str, TaskItemConfig] = {
"update_subscribed": UPDATE_SUBSCRIBED,
"download_pending": DOWNLOAD_PENDING,
"extract_download": EXTRACT_DOWNLOAD,
"check_reindex": CHECK_REINDEX,
"manual_import": MANUAL_IMPORT,
"run_backup": RUN_BACKUP,
"restore_backup": RESTORE_BACKUP,
"rescan_filesystem": RESCAN_FILESYSTEM,
"thumbnail_check": THUMBNAIL_CHECK,
"resync_thumbs": RESYNC_THUMBS,
"index_playlists": INDEX_PLAYLISTS,
"subscribe_to": SUBSCRIBE_TO,
"version_check": VERSION_CHECK,
}

View File

@ -4,9 +4,8 @@ functionality:
- handle threads and locks
"""
from home.celery import app as celery_app
from home import tasks as ta_tasks
from home.src.ta.ta_redis import RedisArchivist, TaskRedis
from home.src.ta.task_config import TASK_CONFIG
class TaskManager:
@ -87,7 +86,7 @@ class TaskCommand:
def start(self, task_name):
"""start task by task_name, only pass task that don't take args"""
task = celery_app.tasks.get(task_name).delay()
task = ta_tasks.app.tasks.get(task_name).delay()
message = {
"task_id": task.id,
"status": task.status,
@ -105,7 +104,7 @@ class TaskCommand:
handler = TaskRedis()
task = handler.get_single(task_id)
if not task["name"] in TASK_CONFIG:
if not task["name"] in ta_tasks.BaseTask.TASK_CONFIG:
raise ValueError
handler.set_command(task_id, "STOP")
@ -114,4 +113,4 @@ class TaskCommand:
def kill(self, task_id):
"""send kill signal to task_id"""
print(f"[task][{task_id}]: received KILL signal.")
celery_app.control.revoke(task_id, terminate=True)
ta_tasks.app.control.revoke(task_id, terminate=True)

View File

@ -42,10 +42,6 @@ class Parser:
youtube_id = parsed.path.strip("/")
return self._validate_expected(youtube_id, "video")
if "youtube.com" not in parsed.netloc:
message = f"invalid domain: {parsed.netloc}"
raise ValueError(message)
query_parsed = parse_qs(parsed.query)
if "v" in query_parsed:
# video from v query str

View File

@ -1,12 +1,14 @@
"""
Functionality:
- initiate celery app
- collect tasks
- handle task callbacks
- handle task notifications
- handle task locking
- user config changes won't get applied here
because tasks are initiated at application start
"""
from celery import Task, shared_task
import os
from celery import Celery, Task, shared_task
from home.src.download.queue import PendingList
from home.src.download.subscriptions import (
SubscriptionHandler,
@ -20,19 +22,97 @@ from home.src.index.channel import YoutubeChannel
from home.src.index.filesystem import Scanner
from home.src.index.manual import ImportFolderScanner
from home.src.index.reindex import Reindex, ReindexManual, ReindexPopulate
from home.src.ta.config import ReleaseVersion
from home.src.ta.config import AppConfig, ReleaseVersion, ScheduleBuilder
from home.src.ta.notify import Notifications
from home.src.ta.settings import EnvironmentSettings
from home.src.ta.ta_redis import RedisArchivist
from home.src.ta.task_config import TASK_CONFIG
from home.src.ta.task_manager import TaskManager
from home.src.ta.urlparser import Parser
CONFIG = AppConfig().config
REDIS_HOST = EnvironmentSettings.REDIS_HOST
REDIS_PORT = EnvironmentSettings.REDIS_PORT
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
app = Celery(
"tasks",
broker=f"redis://{REDIS_HOST}:{REDIS_PORT}",
backend=f"redis://{REDIS_HOST}:{REDIS_PORT}",
result_extended=True,
)
app.config_from_object(
"django.conf:settings", namespace=EnvironmentSettings.REDIS_NAME_SPACE
)
app.autodiscover_tasks()
app.conf.timezone = EnvironmentSettings.TZ
class BaseTask(Task):
"""base class to inherit each class from"""
# pylint: disable=abstract-method
TASK_CONFIG = {
"update_subscribed": {
"title": "Rescan your Subscriptions",
"group": "download:scan",
"api-start": True,
"api-stop": True,
},
"download_pending": {
"title": "Downloading",
"group": "download:run",
"api-start": True,
"api-stop": True,
},
"extract_download": {
"title": "Add to download queue",
"group": "download:add",
"api-stop": True,
},
"check_reindex": {
"title": "Reindex Documents",
"group": "reindex:run",
},
"manual_import": {
"title": "Manual video import",
"group": "setting:import",
"api-start": True,
},
"run_backup": {
"title": "Index Backup",
"group": "setting:backup",
"api-start": True,
},
"restore_backup": {
"title": "Restore Backup",
"group": "setting:restore",
},
"rescan_filesystem": {
"title": "Rescan your Filesystem",
"group": "setting:filesystemscan",
"api-start": True,
},
"thumbnail_check": {
"title": "Check your Thumbnails",
"group": "setting:thumbnailcheck",
"api-start": True,
},
"resync_thumbs": {
"title": "Sync Thumbnails to Media Files",
"group": "setting:thumbnailsync",
"api-start": True,
},
"index_playlists": {
"title": "Index Channel Playlist",
"group": "channel:indexplaylist",
},
"subscribe_to": {
"title": "Add Subscription",
"group": "subscription:add",
},
}
def on_failure(self, exc, task_id, args, kwargs, einfo):
"""callback for task failure"""
print(f"{task_id} Failed callback")
@ -57,8 +137,8 @@ class BaseTask(Task):
def after_return(self, status, retval, task_id, args, kwargs, einfo):
"""callback after task returns"""
print(f"{task_id} return callback")
task_title = TASK_CONFIG.get(self.name).get("title")
Notifications(self.name).send(task_id, task_title)
task_title = self.TASK_CONFIG.get(self.name).get("title")
Notifications(self.name, task_id, task_title).send()
def send_progress(self, message_lines, progress=False, title=False):
"""send progress message"""
@ -77,7 +157,7 @@ class BaseTask(Task):
def _build_message(self, level="info"):
"""build message dict"""
task_id = self.request.id
message = TASK_CONFIG.get(self.name).copy()
message = self.TASK_CONFIG.get(self.name).copy()
message.update({"level": level, "id": task_id})
task_result = TaskManager().get_task(task_id)
if task_result:
@ -128,13 +208,13 @@ def download_pending(self, auto_only=False):
videos_downloaded = downloader.run_queue(auto_only=auto_only)
if videos_downloaded:
return f"downloaded {videos_downloaded} video(s)."
return f"downloaded {len(videos_downloaded)} videos."
return None
@shared_task(name="extract_download", bind=True, base=BaseTask)
def extrac_dl(self, youtube_ids, auto_start=False, status="pending"):
def extrac_dl(self, youtube_ids, auto_start=False):
"""parse list passed and add to pending"""
TaskManager().init(self)
if isinstance(youtube_ids, str):
@ -144,18 +224,11 @@ def extrac_dl(self, youtube_ids, auto_start=False, status="pending"):
pending_handler = PendingList(youtube_ids=to_add, task=self)
pending_handler.parse_url_list()
videos_added = pending_handler.add_to_pending(
status=status, auto_start=auto_start
)
pending_handler.add_to_pending(auto_start=auto_start)
if auto_start:
download_pending.delay(auto_only=True)
if videos_added:
return f"added {len(videos_added)} Videos to Queue"
return None
@shared_task(bind=True, name="check_reindex", base=BaseTask)
def check_reindex(self, data=False, extract_videos=False):
@ -178,7 +251,6 @@ def check_reindex(self, data=False, extract_videos=False):
populate = ReindexPopulate()
print(f"[task][{self.name}] reindex outdated documents")
self.send_progress("Add recent documents to the reindex Queue.")
populate.get_interval()
populate.add_recent()
self.send_progress("Add outdated documents to the reindex Queue.")
populate.add_outdated()
@ -259,9 +331,7 @@ def thumbnail_check(self):
return
manager.init(self)
thumnail = ThumbValidator(task=self)
thumnail.validate()
thumnail.clean_up()
ThumbValidator(task=self).validate()
@shared_task(bind=True, name="resync_thumbs", base=BaseTask)
@ -297,3 +367,7 @@ def index_channel_playlists(self, channel_id):
def version_check():
"""check for new updates"""
ReleaseVersion().check()
# start schedule here
app.conf.beat_schedule = ScheduleBuilder().build_schedule()

View File

@ -60,10 +60,7 @@
<a href="{% url 'settings' %}">
<img src="{% static 'img/icon-gear.svg' %}" alt="gear-icon" title="Settings">
</a>
<form id="logout-form" action="{% url 'logout' %}" method="post" style="display:none;">
{% csrf_token %}
</form>
<a href="#" onclick="document.getElementById('logout-form').submit();">
<a href="{% url 'logout' %}">
<img class="alert-hover" src="{% static 'img/icon-exit.svg' %}" alt="exit-icon" title="Logout">
</a>
</div>

View File

@ -53,14 +53,13 @@
</a>
</div>
<div class="playlist-desc {{ view_style }}">
<a href="{% url 'channel_id' playlist.playlist_channel_id %}"><h3>{{ playlist.playlist_channel }}</h3></a>
<a href="{% url 'playlist_id' playlist.playlist_id %}"><h2>{{ playlist.playlist_name }}</h2></a>
<p>Last refreshed: {{ playlist.playlist_last_refresh }}</p>
{% if request.user|has_group:"admin" or request.user.is_staff %}
{% if playlist.playlist_subscribed %}
<button class="unsubscribe" type="button" data-type="playlist" data-subscribe="" data-id="{{ playlist.playlist_id }}" onclick="subscribeStatus(this)" title="Unsubscribe from {{ playlist.playlist_name }}">Unsubscribe</button>
{% else %}
<button type="button" data-type="playlist" data-subscribe="true" data-id="{{ playlist.playlist_id }}" onclick="subscribeStatus(this)" title="Subscribe to {{ playlist.playlist_name }}">Subscribe</button>
{% endif %}
{% if playlist.playlist_subscribed and request.user|has_group:"admin" or request.user.is_staff %}
<button class="unsubscribe" type="button" data-type="playlist" data-subscribe="" data-id="{{ playlist.playlist_id }}" onclick="subscribeStatus(this)" title="Unsubscribe from {{ playlist.playlist_name }}">Unsubscribe</button>
{% else %}
<button type="button" data-type="playlist" data-subscribe="true" data-id="{{ playlist.playlist_id }}" onclick="subscribeStatus(this)" title="Subscribe to {{ playlist.playlist_name }}">Subscribe</button>
{% endif %}
</div>
</div>

View File

@ -10,9 +10,11 @@
<li><span class="settings-current">0 15 *</span>: Run task every day at 15:00 in the afternoon.</li>
<li><span class="settings-current">30 8 */2</span>: Run task every second day of the week (Sun, Tue, Thu, Sat) at 08:30 in the morning.</li>
<li><span class="settings-current">auto</span>: Sensible default.</li>
<li><span class="settings-current">0</span>: (zero), deactivate that task.</li>
</ul>
<p>Note:</p>
<ul>
<li>Changes in the scheduler settings require a container restart to take effect.</li>
<li>Avoid an unnecessary frequent schedule to not get blocked by YouTube. For that reason, the scheduler doesn't support schedules that trigger more than once per hour.</li>
</ul>
</div>
@ -22,47 +24,68 @@
<div class="settings-group">
<h2>Rescan Subscriptions</h2>
<div class="settings-item">
<p>Become a sponsor and join <a href="https://members.tubearchivist.com/" target="_blank">members.tubearchivist.com</a> to get access to <span class="settings-current">real time</span> notifications for new videos uploaded by your favorite channels.</p>
<p>Current rescan schedule: <span class="settings-current">
{% if update_subscribed %}
{{ update_subscribed.crontab.minute }} {{ update_subscribed.crontab.hour }} {{ update_subscribed.crontab.day_of_week }}
<button data-schedule="update_subscribed" onclick="deleteSchedule(this)" class="danger-button">Delete</button>
{% if config.scheduler.update_subscribed %}
{% for key, value in config.scheduler.update_subscribed.items %}
{{ value }}
{% endfor %}
{% else %}
False
{% endif %}
</span></p>
<p>Become a sponsor and join <a href="https://members.tubearchivist.com/" target="_blank">members.tubearchivist.com</a> to get access to <span class="settings-current">real time</span> notifications for new videos uploaded by your favorite channels.</p>
<p>Periodically rescan your subscriptions:</p>
{% for error in scheduler_form.update_subscribed.errors %}
<p class="danger-zone">{{ error }}</p>
{% endfor %}
{{ scheduler_form.update_subscribed }}
</div>
<div class="settings-item">
<p>Send notification on task completed:</p>
{% if config.scheduler.update_subscribed_notify %}
<p><button type="button" onclick="textReveal(this)" id="text-reveal-button">Show</button> stored notification links</p>
<div id="text-reveal" class="description-text">
<p>{{ config.scheduler.update_subscribed_notify|linebreaks }}</p>
</div>
{% else %}
<p>Current notification urls: <span class="settings-current">{{ config.scheduler.update_subscribed_notify }}</span></p>
{% endif %}
{{ scheduler_form.update_subscribed_notify }}
</div>
</div>
<div class="settings-group">
<h2>Start Download</h2>
<div class="settings-item">
<p>Current Download schedule: <span class="settings-current">
{% if download_pending %}
{{ download_pending.crontab.minute }} {{ download_pending.crontab.hour }} {{ download_pending.crontab.day_of_week }}
<button data-schedule="download_pending" onclick="deleteSchedule(this)" class="danger-button">Delete</button>
{% if config.scheduler.download_pending %}
{% for key, value in config.scheduler.download_pending.items %}
{{ value }}
{% endfor %}
{% else %}
False
{% endif %}
</span></p>
<p>Automatic video download schedule:</p>
{% for error in scheduler_form.download_pending.errors %}
<p class="danger-zone">{{ error }}</p>
{% endfor %}
{{ scheduler_form.download_pending }}
</div>
<div class="settings-item">
<p>Send notification on task completed:</p>
{% if config.scheduler.download_pending_notify %}
<p><button type="button" onclick="textReveal(this)" id="text-reveal-button">Show</button> stored notification links</p>
<div id="text-reveal" class="description-text">
<p>{{ config.scheduler.download_pending_notify|linebreaks }}</p>
</div>
{% else %}
<p>Current notification urls: <span class="settings-current">{{ config.scheduler.download_pending_notify }}</span></p>
{% endif %}
{{ scheduler_form.download_pending_notify }}
</div>
</div>
<div class="settings-group">
<h2>Refresh Metadata</h2>
<div class="settings-item">
<p>Current Metadata refresh schedule: <span class="settings-current">
{% if check_reindex %}
{{ check_reindex.crontab.minute }} {{ check_reindex.crontab.hour }} {{ check_reindex.crontab.day_of_week }}
<button data-schedule="check_reindex" onclick="deleteSchedule(this)" class="danger-button">Delete</button>
{% if config.scheduler.check_reindex %}
{% for key, value in config.scheduler.check_reindex.items %}
{{ value }}
{% endfor %}
{% else %}
False
{% endif %}
@ -71,29 +94,36 @@
{{ scheduler_form.check_reindex }}
</div>
<div class="settings-item">
<p>Current refresh for metadata older than x days: <span class="settings-current">{{ check_reindex.task_config.days }}</span></p>
<p>Current refresh for metadata older than x days: <span class="settings-current">{{ config.scheduler.check_reindex_days }}</span></p>
<p>Refresh older than x days, recommended 90:</p>
{% for error in scheduler_form.check_reindex.errors %}
<p class="danger-zone">{{ error }}</p>
{% endfor %}
{{ scheduler_form.check_reindex_days }}
</div>
<div class="settings-item">
<p>Send notification on task completed:</p>
{% if config.scheduler.check_reindex_notify %}
<p><button type="button" onclick="textReveal(this)" id="text-reveal-button">Show</button> stored notification links</p>
<div id="text-reveal" class="description-text">
<p>{{ config.scheduler.check_reindex_notify|linebreaks }}</p>
</div>
{% else %}
<p>Current notification urls: <span class="settings-current">{{ config.scheduler.check_reindex_notify }}</span></p>
{% endif %}
{{ scheduler_form.check_reindex_notify }}
</div>
</div>
<div class="settings-group">
<h2>Thumbnail Check</h2>
<div class="settings-item">
<p>Current thumbnail check schedule: <span class="settings-current">
{% if thumbnail_check %}
{{ thumbnail_check.crontab.minute }} {{ thumbnail_check.crontab.hour }} {{ thumbnail_check.crontab.day_of_week }}
<button data-schedule="thumbnail_check" onclick="deleteSchedule(this)" class="danger-button">Delete</button>
{% if config.scheduler.thumbnail_check %}
{% for key, value in config.scheduler.thumbnail_check.items %}
{{ value }}
{% endfor %}
{% else %}
False
{% endif %}
</span></p>
<p>Periodically check and cleanup thumbnails:</p>
{% for error in scheduler_form.thumbnail_check.errors %}
<p class="danger-zone">{{ error }}</p>
{% endfor %}
{{ scheduler_form.thumbnail_check }}
</div>
</div>
@ -102,51 +132,23 @@
<div class="settings-item">
<p><i>Zip file backups are very slow for large archives and consistency is not guaranteed, use snapshots instead. Make sure no other tasks are running when creating a Zip file backup.</i></p>
<p>Current index backup schedule: <span class="settings-current">
{% if run_backup %}
{{ run_backup.crontab.minute }} {{ run_backup.crontab.hour }} {{ run_backup.crontab.day_of_week }}
<button data-schedule="run_backup" onclick="deleteSchedule(this)" class="danger-button">Delete</button>
{% if config.scheduler.run_backup %}
{% for key, value in config.scheduler.run_backup.items %}
{{ value }}
{% endfor %}
{% else %}
False
{% endif %}
</span></p>
<p>Automatically backup metadata to a zip file:</p>
{% for error in scheduler_form.run_backup.errors %}
<p class="danger-zone">{{ error }}</p>
{% endfor %}
{{ scheduler_form.run_backup }}
</div>
<div class="settings-item">
<p>Current backup files to keep: <span class="settings-current">{{ run_backup.task_config.rotate }}</span></p>
<p>Current backup files to keep: <span class="settings-current">{{ config.scheduler.run_backup_rotate }}</span></p>
<p>Max auto backups to keep:</p>
{{ scheduler_form.run_backup_rotate }}
</div>
</div>
<div class="settings-group">
<h2>Add Notification URL</h2>
<div class="settings-item">
{% if notifications %}
<p><button type="button" onclick="textReveal(this)" id="text-reveal-button">Show</button> stored notification links</p>
<div id="text-reveal" class="description-text">
{% for task, notifications in notifications.items %}
<h3>{{ notifications.title }}</h3>
{% for url in notifications.urls %}
<p>
<button type="button" class="danger-button" data-url="{{ url }}" data-task="{{ task }}" onclick="deleteNotificationUrl(this)"> Delete</button>
<span> {{ url }}</span>
</p>
{% endfor %}
{% endfor %}
</div>
{% else %}
<p>No notifications stored</p>
{% endif %}
</div>
<div class="settings-item">
<p><i>Send notification on completed tasks with the help of the <a href="https://github.com/caronc/apprise" target="_blank">Apprise</a> library.</i></p>
{{ notification_form.task }}
{{ notification_form.notification_url }}
</div>
</div>
<button type="submit" name="scheduler-settings">Update Scheduler Settings</button>
</form>
{% endblock settings_content %}

View File

@ -92,10 +92,7 @@
{% if request.user|has_group:"admin" or request.user.is_staff %}
<button onclick="deleteConfirm()" id="delete-item">Delete Video</button>
<div class="delete-confirm" id="delete-button">
<span>Are you sure? </span>
<button class="danger-button" onclick="deleteVideo(this)" data-id="{{ video.youtube_id }}" data-redirect = "{{ video.channel.channel_id }}">Delete</button>
<button class="danger-button" onclick="deleteVideo(this)" data-id="{{ video.youtube_id }}" data-ignore data-redirect = "{{ video.channel.channel_id }}">Delete and ignore</button>
<button onclick="cancelDelete()">Cancel</button>
<span>Are you sure? </span><button class="danger-button" onclick="deleteVideo(this)" data-id="{{ video.youtube_id }}" data-redirect = "{{ video.channel.channel_id }}">Delete</button> <button onclick="cancelDelete()">Cancel</button>
</div>
{% endif %}
<button id="{{ video.youtube_id }}-button" data-id="{{ video.youtube_id }}" data-context="video" onclick="showAddToPlaylistMenu(this)">Add To Playlist</button>

View File

@ -1,11 +0,0 @@
"""test configs"""
import os
import pytest
@pytest.fixture(scope="session", autouse=True)
def change_test_dir(request):
"""change directory to project folder"""
os.chdir(request.config.rootdir / "tubearchivist")

View File

@ -1,113 +0,0 @@
"""tests for helper functions"""
import pytest
from home.src.ta.helper import (
date_parser,
get_duration_str,
get_mapping,
is_shorts,
randomizor,
time_parser,
)
def test_randomizor_with_positive_length():
"""test randomizer"""
length = 10
result = randomizor(length)
assert len(result) == length
assert result.isalnum()
def test_date_parser_with_int():
"""unix timestamp"""
timestamp = 1621539600
expected_date = "2021-05-20"
assert date_parser(timestamp) == expected_date
def test_date_parser_with_str():
"""iso timestamp"""
date_str = "2021-05-21"
expected_date = "2021-05-21"
assert date_parser(date_str) == expected_date
def test_date_parser_with_invalid_input():
"""invalid type"""
invalid_input = [1621539600]
with pytest.raises(TypeError):
date_parser(invalid_input)
def test_date_parser_with_invalid_string_format():
"""invalid date string"""
invalid_date_str = "21/05/2021"
with pytest.raises(ValueError):
date_parser(invalid_date_str)
def test_time_parser_with_numeric_string():
"""as number"""
timestamp = "100"
expected_seconds = 100
assert time_parser(timestamp) == expected_seconds
def test_time_parser_with_hh_mm_ss_format():
"""to seconds"""
timestamp = "01:00:00"
expected_seconds = 3600.0
assert time_parser(timestamp) == expected_seconds
def test_time_parser_with_empty_string():
"""handle empty"""
timestamp = ""
assert time_parser(timestamp) is False
def test_time_parser_with_invalid_format():
"""not enough to unpack"""
timestamp = "01:00"
with pytest.raises(ValueError):
time_parser(timestamp)
def test_time_parser_with_non_numeric_input():
"""non numeric"""
timestamp = "1a:00:00"
with pytest.raises(ValueError):
time_parser(timestamp)
def test_get_mapping():
"""test mappint"""
index_config = get_mapping()
assert isinstance(index_config, list)
assert all(isinstance(i, dict) for i in index_config)
def test_is_shorts():
"""is shorts id"""
youtube_id = "YG3-Pw3rixU"
assert is_shorts(youtube_id)
def test_is_not_shorts():
"""is not shorts id"""
youtube_id = "Ogr9kbypSNg"
assert is_shorts(youtube_id) is False
def test_get_duration_str():
"""only seconds"""
assert get_duration_str(None) == "NA"
assert get_duration_str(5) == "5s"
assert get_duration_str(10) == "10s"
assert get_duration_str(500) == "8m 20s"
assert get_duration_str(1000) == "16m 40s"
assert get_duration_str(5000) == "1h 23m 20s"
assert get_duration_str(500000) == "5d 18h 53m 20s"
assert get_duration_str(5000000) == "57d 20h 53m 20s"
assert get_duration_str(50000000) == "1y 213d 16h 53m 20s"

View File

@ -1,144 +0,0 @@
"""tests for url parser"""
import pytest
from home.src.ta.urlparser import Parser
# video id parsing
VIDEO_URL_IN = [
"7DKv5H5Frt0",
"https://www.youtube.com/watch?v=7DKv5H5Frt0",
"https://www.youtube.com/watch?v=7DKv5H5Frt0&t=113&feature=shared",
"https://www.youtube.com/watch?v=7DKv5H5Frt0&list=PL96C35uN7xGJu6skU4TBYrIWxggkZBrF5&index=1&pp=iAQB" # noqa: E501
"https://youtu.be/7DKv5H5Frt0",
]
VIDEO_OUT = [{"type": "video", "url": "7DKv5H5Frt0", "vid_type": "unknown"}]
VIDEO_TEST_CASES = [(i, VIDEO_OUT) for i in VIDEO_URL_IN]
# shorts id parsing
SHORTS_URL_IN = [
"https://www.youtube.com/shorts/YG3-Pw3rixU",
"https://youtube.com/shorts/YG3-Pw3rixU?feature=shared",
]
SHORTS_OUT = [{"type": "video", "url": "YG3-Pw3rixU", "vid_type": "shorts"}]
SHORTS_TEST_CASES = [(i, SHORTS_OUT) for i in SHORTS_URL_IN]
# channel id parsing
CHANNEL_URL_IN = [
"UCBa659QWEk1AI4Tg--mrJ2A",
"@TomScottGo",
"https://www.youtube.com/channel/UCBa659QWEk1AI4Tg--mrJ2A",
"https://www.youtube.com/@TomScottGo",
]
CHANNEL_OUT = [
{
"type": "channel",
"url": "UCBa659QWEk1AI4Tg--mrJ2A",
"vid_type": "unknown",
}
]
CHANNEL_TEST_CASES = [(i, CHANNEL_OUT) for i in CHANNEL_URL_IN]
# channel vid type parsing
CHANNEL_VID_TYPES = [
(
"https://www.youtube.com/@IBRACORP/videos",
[
{
"type": "channel",
"url": "UC7aW7chIafJG6ECYAd3N5uQ",
"vid_type": "videos",
}
],
),
(
"https://www.youtube.com/@IBRACORP/shorts",
[
{
"type": "channel",
"url": "UC7aW7chIafJG6ECYAd3N5uQ",
"vid_type": "shorts",
}
],
),
(
"https://www.youtube.com/@IBRACORP/streams",
[
{
"type": "channel",
"url": "UC7aW7chIafJG6ECYAd3N5uQ",
"vid_type": "streams",
}
],
),
]
# playlist id parsing
PLAYLIST_URL_IN = [
"PL96C35uN7xGJu6skU4TBYrIWxggkZBrF5",
"https://www.youtube.com/playlist?list=PL96C35uN7xGJu6skU4TBYrIWxggkZBrF5",
]
PLAYLIST_OUT = [
{
"type": "playlist",
"url": "PL96C35uN7xGJu6skU4TBYrIWxggkZBrF5",
"vid_type": "unknown",
}
]
PLAYLIST_TEST_CASES = [(i, PLAYLIST_OUT) for i in PLAYLIST_URL_IN]
# personal playlists
EXPECTED_WL = [{"type": "playlist", "url": "WL", "vid_type": "unknown"}]
EXPECTED_LL = [{"type": "playlist", "url": "LL", "vid_type": "unknown"}]
PERSONAL_PLAYLISTS_TEST_CASES = [
("WL", EXPECTED_WL),
("https://www.youtube.com/playlist?list=WL", EXPECTED_WL),
("LL", EXPECTED_LL),
("https://www.youtube.com/playlist?list=LL", EXPECTED_LL),
]
# collect tests expected to pass
PASSTING_TESTS = []
PASSTING_TESTS.extend(VIDEO_TEST_CASES)
PASSTING_TESTS.extend(SHORTS_TEST_CASES)
PASSTING_TESTS.extend(CHANNEL_TEST_CASES)
PASSTING_TESTS.extend(CHANNEL_VID_TYPES)
PASSTING_TESTS.extend(PLAYLIST_TEST_CASES)
PASSTING_TESTS.extend(PERSONAL_PLAYLISTS_TEST_CASES)
@pytest.mark.parametrize("url_str, expected_result", PASSTING_TESTS)
def test_passing_parse(url_str, expected_result):
"""test parser"""
parser = Parser(url_str)
parsed = parser.parse()
assert parsed == expected_result
INVALID_IDS_ERRORS = [
"aaaaa",
"https://www.youtube.com/playlist?list=AAAA",
"https://www.youtube.com/channel/UC9-y-6csu5WGm29I7Jiwpn",
"https://www.youtube.com/watch?v=CK3_zarXkw",
]
@pytest.mark.parametrize("invalid_value", INVALID_IDS_ERRORS)
def test_invalid_ids(invalid_value):
"""test for invalid IDs"""
with pytest.raises(ValueError, match="not a valid id_str"):
parser = Parser(invalid_value)
parser.parse()
INVALID_DOMAINS = [
"https://vimeo.com/32001208",
"https://peertube.tv/w/8RiJE2j2nw569FVgPNjDt7",
]
@pytest.mark.parametrize("invalid_value", INVALID_DOMAINS)
def test_invalid_domains(invalid_value):
"""raise error on none YT domains"""
parser = Parser(invalid_value)
with pytest.raises(ValueError, match="invalid domain"):
parser.parse()

View File

@ -0,0 +1,42 @@
"""All test classes"""
from django.test import TestCase
class URLTests(TestCase):
"""test if all expected URL are there"""
def test_home_view(self):
"""check homepage"""
response = self.client.get("/")
self.assertEqual(response.status_code, 200)
def test_about_view(self):
"""check about page"""
response = self.client.get("/about/")
self.assertEqual(response.status_code, 200)
def test_downloads_view(self):
"""check downloads page"""
response = self.client.get("/downloads/")
self.assertEqual(response.status_code, 200)
def test_channel_view(self):
"""check channel page"""
response = self.client.get("/channel/")
self.assertEqual(response.status_code, 200)
def test_settings_view(self):
"""check settings page"""
response = self.client.get("/settings/")
self.assertEqual(response.status_code, 200)
def test_progress_view(self):
"""check ajax progress endpoint"""
response = self.client.get("/downloads/progress/")
self.assertEqual(response.status_code, 200)
def test_process_view(self):
"""check process ajax endpoint"""
response = self.client.get("/process/")
self.assertEqual(response.status_code, 200)

View File

@ -19,7 +19,6 @@ from django.http import Http404
from django.shortcuts import redirect, render
from django.utils.decorators import method_decorator
from django.views import View
from home.models import CustomPeriodicTask
from home.src.download.queue import PendingInteract
from home.src.download.yt_dlp_base import CookieHandler
from home.src.es.backup import ElasticBackup
@ -32,23 +31,18 @@ from home.src.frontend.forms import (
CreatePlaylistForm,
CustomAuthForm,
MultiSearchForm,
SchedulerSettingsForm,
SubscribeToChannelForm,
SubscribeToPlaylistForm,
UserSettingsForm,
)
from home.src.frontend.forms_schedule import (
NotificationSettingsForm,
SchedulerSettingsForm,
)
from home.src.index.channel import channel_overwrites
from home.src.index.generic import Pagination
from home.src.index.playlist import YoutubePlaylist
from home.src.index.reindex import ReindexProgress
from home.src.index.video_constants import VideoTypeEnum
from home.src.ta.config import AppConfig, ReleaseVersion
from home.src.ta.config_schedule import ScheduleBuilder
from home.src.ta.config import AppConfig, ReleaseVersion, ScheduleBuilder
from home.src.ta.helper import check_stylesheet, time_parser
from home.src.ta.notify import Notifications, get_all_notifications
from home.src.ta.settings import EnvironmentSettings
from home.src.ta.ta_redis import RedisArchivist
from home.src.ta.users import UserConfig
@ -431,8 +425,6 @@ class ChannelIdBaseView(ArchivistResultsView):
path = f"ta_channel/_doc/{channel_id}"
response, _ = ElasticWrap(path).get()
channel_info = SearchProcess(response).process()
if not channel_info:
raise Http404
return channel_info
@ -726,9 +718,6 @@ class PlaylistIdView(ArchivistResultsView):
"""handle get request"""
self.initiate_vars(request)
playlist_info, channel_info = self._get_info(playlist_id)
if not playlist_info:
raise Http404
playlist_name = playlist_info["playlist_name"]
self._update_view_data(playlist_id, playlist_info)
self.find_results()
@ -879,8 +868,6 @@ class VideoView(MinView):
config_handler = AppConfig()
response, _ = ElasticWrap(f"ta_video/_doc/{video_id}").get()
video_data = SearchProcess(response).process()
if not video_data:
raise Http404
try:
rating = video_data["stats"]["average_rating"]
@ -1120,65 +1107,29 @@ class SettingsSchedulingView(MinView):
def get(self, request):
"""read and display current settings"""
context = self.get_context(request, SchedulerSettingsForm())
context = self.get_min_context(request)
context.update(
{
"title": "Scheduling Settings",
"config": AppConfig().config,
"scheduler_form": SchedulerSettingsForm(),
}
)
return render(request, "home/settings_scheduling.html", context)
def post(self, request):
"""handle form post to update settings"""
scheduler_form = SchedulerSettingsForm(request.POST)
notification_form = NotificationSettingsForm(request.POST)
if notification_form.is_valid():
notification_form_post = notification_form.cleaned_data
print(notification_form_post)
if any(notification_form_post.values()):
task_name = notification_form_post.get("task")
url = notification_form_post.get("notification_url")
Notifications(task_name).add_url(url)
if scheduler_form.is_valid():
scheduler_form_post = scheduler_form.cleaned_data
if any(scheduler_form_post.values()):
print(scheduler_form_post)
ScheduleBuilder().update_schedule_conf(scheduler_form_post)
else:
self.fail_message()
context = self.get_context(request, scheduler_form)
return render(request, "home/settings_scheduling.html", context)
sleep(1)
return redirect("settings_scheduling", permanent=True)
def get_context(self, request, scheduler_form):
"""get context"""
context = self.get_min_context(request)
all_tasks = CustomPeriodicTask.objects.all()
context.update(
{
"title": "Scheduling Settings",
"scheduler_form": scheduler_form,
"notification_form": NotificationSettingsForm(),
"notifications": get_all_notifications(),
}
)
for task in all_tasks:
context.update({task.name: task})
return context
@staticmethod
def fail_message():
"""send failure message"""
mess_dict = {
"group": "setting:schedule",
"level": "error",
"title": "Scheduler update failed.",
"messages": ["Invalid schedule input"],
"id": "0000",
}
RedisArchivist().set_message("message:setting", mess_dict, expire=True)
@method_decorator(user_passes_test(check_admin), name="dispatch")
class SettingsActionsView(MinView):

View File

@ -1,10 +0,0 @@
-r requirements.txt
black
codespell
flake8
isort
pylint
pylint-django
pytest
pytest-django
types-requests

View File

@ -1,14 +1,13 @@
apprise==1.8.0
celery==5.4.0
Django==5.0.6
apprise==1.7.5
celery==5.3.6
Django==5.0.4
django-auth-ldap==4.8.0
django-celery-beat==2.6.0
django-cors-headers==4.3.1
djangorestframework==3.15.1
Pillow==10.3.0
redis==5.0.4
requests==2.32.1
redis==5.0.0
requests==2.31.0
ryd-client==0.0.6
uWSGI==2.0.25.1
uWSGI==2.0.24
whitenoise==6.6.0
yt-dlp @ git+https://github.com/yt-dlp/yt-dlp@8e15177b4113c355989881e4e030f695a9b59c3a
yt-dlp==2024.4.9

View File

@ -12,7 +12,7 @@ checkMessages();
// start to look for messages
function checkMessages() {
let notifications = document.getElementById('notifications');
if (notifications && notifications.childNodes.length === 0) {
if (notifications && notifications.childNodes.length === 0 ) {
let dataOrigin = notifications.getAttribute('data');
getMessages(dataOrigin);
}
@ -55,7 +55,7 @@ function buildMessage(responseData, dataOrigin) {
}
clearNotifications(responseData);
if (currentNotifications > 0 && messages.length === 0) {
location.replace(location.href);
location.reload();
}
return messages;
}
@ -79,7 +79,7 @@ function updateMessageBox(messageData) {
children[1].innerHTML = messageData.messages.join('<br>');
if (
!messageBox.querySelector('#stop-icon') &&
messageData['api_stop'] &&
messageData['api-stop'] &&
messageData.command !== 'STOP'
) {
children[2].appendChild(buildStopIcon(messageData.id));

View File

@ -203,28 +203,18 @@ function showAddToPlaylistMenu(input1) {
dataId = input1.getAttribute('data-id');
buttonId = input1.getAttribute('id');
playlists = getCustomPlaylists();
//hide the invoking button
input1.style.visibility = 'hidden';
input1.style.visibility = "hidden";
//show the form
form_code =
'<div class="video-popup-menu"><img src="/static/img/icon-close.svg" class="video-popup-menu-close-button" title="Close menu" onclick="removeDotMenu(this, \'' +
buttonId +
'\')"/><h3>Add video to...</h3>';
for (let i = 0; i < playlists.length; i++) {
let obj = playlists[i];
form_code +=
'<p onclick="addToCustomPlaylist(this, \'' +
dataId +
"','" +
obj.playlist_id +
'\')"><img class="p-button" src="/static/img/icon-unseen.svg"/>' +
obj.playlist_name +
'</p>';
form_code = '<div class="video-popup-menu"><img src="/static/img/icon-close.svg" class="video-popup-menu-close-button" title="Close menu" onclick="removeDotMenu(this, \''+buttonId+'\')"/><h3>Add video to...</h3>';
for(let i = 0; i < playlists.length; i++) {
let obj = playlists[i];
form_code += '<p onclick="addToCustomPlaylist(this, \''+dataId+'\',\''+obj.playlist_id+'\')"><img class="p-button" src="/static/img/icon-unseen.svg"/>'+obj.playlist_name+'</p>';
}
form_code += '<p><a href="/playlist">Create playlist</a></p></div>';
input1.parentNode.parentNode.innerHTML += form_code;
}
@ -232,17 +222,18 @@ function showAddToPlaylistMenu(input1) {
//handles user action of adding a video to a custom playlist
function addToCustomPlaylist(input, video_id, playlist_id) {
let apiEndpoint = '/api/playlist/' + playlist_id + '/';
let data = { action: 'create', video_id: video_id };
let data = { "action": "create", "video_id": video_id };
apiRequest(apiEndpoint, 'POST', data);
//mark the item added in the ui
input.firstChild.src = '/static/img/icon-seen.svg';
input.firstChild.src='/static/img/icon-seen.svg';
}
function removeDotMenu(input1, button_id) {
//show the menu button
document.getElementById(button_id).style.visibility = 'visible';
//show the menu button
document.getElementById(button_id).style.visibility = "visible";
//remove the form
input1.parentNode.remove();
}
@ -252,67 +243,20 @@ function showCustomPlaylistMenu(input1, playlist_id, current_page, last_page) {
let dataId, form_code, buttonId;
dataId = input1.getAttribute('data-id');
buttonId = input1.getAttribute('id');
//hide the invoking button
input1.style.visibility = 'hidden';
input1.style.visibility = "hidden";
//show the form
form_code =
'<div class="video-popup-menu"><img src="/static/img/icon-close.svg" class="video-popup-menu-close-button" title="Close menu" onclick="removeDotMenu(this, \'' +
buttonId +
'\')"/><h3>Move Video</h3>';
form_code +=
'<img class="move-video-button" data-id="' +
dataId +
'" data-context="top" onclick="moveCustomPlaylistVideo(this,\'' +
playlist_id +
"'," +
current_page +
',' +
last_page +
')" src="/static/img/icon-arrow-top.svg" title="Move to top"/>';
form_code +=
'<img class="move-video-button" data-id="' +
dataId +
'" data-context="up" onclick="moveCustomPlaylistVideo(this,\'' +
playlist_id +
"'," +
current_page +
',' +
last_page +
')" src="/static/img/icon-arrow-up.svg" title="Move up"/>';
form_code +=
'<img class="move-video-button" data-id="' +
dataId +
'" data-context="down" onclick="moveCustomPlaylistVideo(this,\'' +
playlist_id +
"'," +
current_page +
',' +
last_page +
')" src="/static/img/icon-arrow-down.svg" title="Move down"/>';
form_code +=
'<img class="move-video-button" data-id="' +
dataId +
'" data-context="bottom" onclick="moveCustomPlaylistVideo(this,\'' +
playlist_id +
"'," +
current_page +
',' +
last_page +
')" src="/static/img/icon-arrow-bottom.svg" title="Move to bottom"/>';
form_code +=
'<img class="move-video-button" data-id="' +
dataId +
'" data-context="remove" onclick="moveCustomPlaylistVideo(this,\'' +
playlist_id +
"'," +
current_page +
',' +
last_page +
')" src="/static/img/icon-remove.svg" title="Remove from playlist"/>';
form_code = '<div class="video-popup-menu"><img src="/static/img/icon-close.svg" class="video-popup-menu-close-button" title="Close menu" onclick="removeDotMenu(this, \''+buttonId+'\')"/><h3>Move Video</h3>';
form_code += '<img class="move-video-button" data-id="'+dataId+'" data-context="top" onclick="moveCustomPlaylistVideo(this,\''+playlist_id+'\','+current_page+','+last_page+')" src="/static/img/icon-arrow-top.svg" title="Move to top"/>';
form_code += '<img class="move-video-button" data-id="'+dataId+'" data-context="up" onclick="moveCustomPlaylistVideo(this,\''+playlist_id+'\','+current_page+','+last_page+')" src="/static/img/icon-arrow-up.svg" title="Move up"/>';
form_code += '<img class="move-video-button" data-id="'+dataId+'" data-context="down" onclick="moveCustomPlaylistVideo(this,\''+playlist_id+'\','+current_page+','+last_page+')" src="/static/img/icon-arrow-down.svg" title="Move down"/>';
form_code += '<img class="move-video-button" data-id="'+dataId+'" data-context="bottom" onclick="moveCustomPlaylistVideo(this,\''+playlist_id+'\','+current_page+','+last_page+')" src="/static/img/icon-arrow-bottom.svg" title="Move to bottom"/>';
form_code += '<img class="move-video-button" data-id="'+dataId+'" data-context="remove" onclick="moveCustomPlaylistVideo(this,\''+playlist_id+'\','+current_page+','+last_page+')" src="/static/img/icon-remove.svg" title="Remove from playlist"/>';
form_code += '</div>';
input1.parentNode.parentNode.innerHTML += form_code;
}
@ -322,46 +266,65 @@ function moveCustomPlaylistVideo(input1, playlist_id, current_page, last_page) {
let dataId, dataContext;
dataId = input1.getAttribute('data-id');
dataContext = input1.getAttribute('data-context');
let apiEndpoint = '/api/playlist/' + playlist_id + '/';
let data = { action: dataContext, video_id: dataId };
let data = { "action": dataContext, "video_id": dataId };
apiRequest(apiEndpoint, 'POST', data);
let itemDom = input1.parentElement.parentElement.parentElement;
let listDom = itemDom.parentElement;
if (dataContext === 'up') {
let sibling = itemDom.previousElementSibling;
if (sibling !== null) {
sibling.before(itemDom);
} else if (current_page > 1) {
itemDom.remove();
}
} else if (dataContext === 'down') {
let sibling = itemDom.nextElementSibling;
if (sibling !== null) {
sibling.after(itemDom);
} else if (current_page !== last_page) {
itemDom.remove();
}
} else if (dataContext === 'top') {
let sibling = listDom.firstElementChild;
if (sibling !== null) {
sibling.before(itemDom);
}
if (current_page > 1) {
itemDom.remove();
}
} else if (dataContext === 'bottom') {
let sibling = listDom.lastElementChild;
if (sibling !== null) {
sibling.after(itemDom);
}
if (current_page !== last_page) {
itemDom.remove();
}
} else if (dataContext === 'remove') {
itemDom.remove();
if (dataContext === "up")
{
let sibling = itemDom.previousElementSibling;
if (sibling !== null)
{
sibling.before(itemDom);
}
else if (current_page > 1)
{
itemDom.remove();
}
}
else if (dataContext === "down")
{
let sibling = itemDom.nextElementSibling;
if (sibling !== null)
{
sibling.after(itemDom);
}
else if (current_page !== last_page)
{
itemDom.remove();
}
}
else if (dataContext === "top")
{
let sibling = listDom.firstElementChild;
if (sibling !== null)
{
sibling.before(itemDom);
}
if (current_page > 1)
{
itemDom.remove();
}
}
else if (dataContext === "bottom")
{
let sibling = listDom.lastElementChild;
if (sibling !== null)
{
sibling.after(itemDom);
}
if (current_page !== last_page)
{
itemDom.remove();
}
}
else if (dataContext === "remove")
{
itemDom.remove();
}
}
@ -531,28 +494,6 @@ function createSnapshot() {
document.getElementById('createButton').replaceWith(message);
}
function deleteNotificationUrl(button) {
console.log('delete notification url');
let apiEndpoint = '/api/schedule/notification/';
let data = {
task_name: button.dataset.task,
url: button.dataset.url,
};
apiRequest(apiEndpoint, 'DELETE', data);
button.parentElement.remove();
}
function deleteSchedule(button) {
console.log('delete schedule');
let apiEndpoint = '/api/schedule/';
let data = { task_name: button.dataset.schedule };
apiRequest(apiEndpoint, 'DELETE', data);
let message = document.createElement('span');
message.innerText = 'False';
message.classList.add('settings-current');
button.parentElement.replaceWith(message);
}
// delete from file system
function deleteConfirm() {
let to_show = document.getElementById('delete-button');
@ -562,16 +503,9 @@ function deleteConfirm() {
function deleteVideo(button) {
let to_delete = button.getAttribute('data-id');
let to_ignore = button.getAttribute('data-ignore');
let to_redirect = button.getAttribute('data-redirect');
let apiDeleteEndpoint = '/api/video/' + to_delete + '/';
apiRequest(apiDeleteEndpoint, 'DELETE');
if (to_ignore !== null) {
let apiIgnoreEndpoint = '/api/download/' + to_delete + '/';
apiRequest(apiIgnoreEndpoint, 'POST', { status: 'ignore-force' });
}
let apiEndpoint = '/api/video/' + to_delete + '/';
apiRequest(apiEndpoint, 'DELETE');
setTimeout(function () {
let redirect = '/channel/' + to_redirect;
window.location.replace(redirect);
@ -1588,6 +1522,7 @@ document.addEventListener('readystatechange', textExpandButtonVisibilityUpdate);
window.addEventListener('resize', textExpandButtonVisibilityUpdate);
function showForm(id) {
let id2 = id === undefined ? 'hidden-form' : id;
let formElement = document.getElementById(id2);
let displayStyle = formElement.style.display;