diff --git a/tubearchivist/config/management/commands/ta_startup.py b/tubearchivist/config/management/commands/ta_startup.py index c65b39e..4ff55d2 100644 --- a/tubearchivist/config/management/commands/ta_startup.py +++ b/tubearchivist/config/management/commands/ta_startup.py @@ -11,6 +11,7 @@ from django.core.management.base import BaseCommand, CommandError from home.src.es.connect import ElasticWrap, IndexPaginate from home.src.es.index_setup import ElasitIndexWrap from home.src.es.snapshot import ElasticSnapshot +from home.src.index.filesystem import Filesystem from home.src.index.video_streams import MediaStreamExtractor from home.src.ta.config import AppConfig, ReleaseVersion from home.src.ta.helper import clear_dl_cache @@ -162,6 +163,8 @@ class Command(BaseCommand): self.stdout.write(" no videos need updating") return + self.stdout.write(" start filesystem rescan") + Filesystem().process() total = len(all_missing) for idx, missing in enumerate(all_missing): media_url = missing["media_url"] @@ -201,17 +204,17 @@ class Command(BaseCommand): response, status_code = ElasticWrap(path).post(data=data) if status_code == 200: updated = response.get("updated", 0) - if not updated: + if updated: + self.stdout.write( + self.style.SUCCESS( + f" ✓ {updated} videos updated in ta_download" + ) + ) + else: self.stdout.write( " no videos needed updating in ta_download" ) - return - - self.stdout.write( - self.style.SUCCESS( - f" ✓ {updated} videos updated in ta_download" - ) - ) + return message = " 🗙 ta_download auto_start update failed" self.stdout.write(self.style.ERROR(message)) diff --git a/tubearchivist/home/src/frontend/searching.py b/tubearchivist/home/src/frontend/searching.py index 3e7894b..9977f36 100644 --- a/tubearchivist/home/src/frontend/searching.py +++ b/tubearchivist/home/src/frontend/searching.py @@ -391,6 +391,7 @@ class QueryBuilder: "channel_name._2gram^2", "channel_name._3gram^2", "channel_name.search_as_you_type^2", + "channel_tags", ], } } diff --git a/tubearchivist/home/src/index/channel.py b/tubearchivist/home/src/index/channel.py index eef5cdb..7ceeccb 100644 --- a/tubearchivist/home/src/index/channel.py +++ b/tubearchivist/home/src/index/channel.py @@ -47,13 +47,13 @@ class YoutubeChannel(YouTubeItem): if not self.youtube_meta and fallback: self._video_fallback(fallback) else: - self._process_youtube_meta() + self.process_youtube_meta() self.get_channel_art() if upload: self.upload_to_es() - def _process_youtube_meta(self): + def process_youtube_meta(self): """extract relevant fields""" self.youtube_meta["thumbnails"].reverse() channel_subs = self.youtube_meta.get("channel_follower_count") or 0 diff --git a/tubearchivist/home/src/index/filesystem.py b/tubearchivist/home/src/index/filesystem.py index c4001af..75f4724 100644 --- a/tubearchivist/home/src/index/filesystem.py +++ b/tubearchivist/home/src/index/filesystem.py @@ -127,7 +127,8 @@ class Filesystem(ScannerBase): def process(self): """entry point""" - self.task.send_progress(["Scanning your archive and index."]) + if self.task: + self.task.send_progress(["Scanning your archive and index."]) self.scan() self.rename_files() self.send_mismatch_bulk() @@ -140,7 +141,8 @@ class Filesystem(ScannerBase): return total = len(self.to_rename) - self.task.send_progress([f"Rename {total} media files."]) + if self.task: + self.task.send_progress([f"Rename {total} media files."]) for bad_filename in self.to_rename: channel, filename, expected_filename = bad_filename print(f"renaming [{filename}] to [{expected_filename}]") @@ -154,7 +156,8 @@ class Filesystem(ScannerBase): return total = len(self.mismatch) - self.task.send_progress([f"Fix media urls for {total} files"]) + if self.task: + self.task.send_progress([f"Fix media urls for {total} files"]) bulk_list = [] for video_mismatch in self.mismatch: youtube_id, media_url = video_mismatch @@ -174,7 +177,8 @@ class Filesystem(ScannerBase): return total = len(self.to_delete) - self.task.send_progress([f"Clean up {total} items from index."]) + if self.task: + self.task.send_progress([f"Clean up {total} items from index."]) for indexed in self.to_delete: youtube_id = indexed[0] print(f"deleting {youtube_id} from index") diff --git a/tubearchivist/home/src/index/reindex.py b/tubearchivist/home/src/index/reindex.py index b5ae83c..7473c89 100644 --- a/tubearchivist/home/src/index/reindex.py +++ b/tubearchivist/home/src/index/reindex.py @@ -331,23 +331,29 @@ class Reindex(ReindexBase): @staticmethod def _reindex_single_channel(channel_id): """refresh channel data and sync to videos""" + # read current state channel = YoutubeChannel(channel_id) channel.get_from_es() - subscribed = channel.json_data["channel_subscribed"] - overwrites = channel.json_data.get("channel_overwrites", False) + es_meta = channel.json_data.copy() + + # get new channel.get_from_youtube() - if not channel.json_data: + if not channel.youtube_meta: channel.deactivate() channel.get_from_es() channel.sync_to_videos() return - channel.json_data["channel_subscribed"] = subscribed + channel.process_youtube_meta() + channel.get_channel_art() + + # add back + channel.json_data["channel_subscribed"] = es_meta["channel_subscribed"] + overwrites = es_meta.get("channel_overwrites") if overwrites: channel.json_data["channel_overwrites"] = overwrites - channel.upload_to_es() - channel.sync_to_videos() + channel.upload_to_es() ChannelFullScan(channel_id).scan() def _reindex_single_playlist(self, playlist_id): diff --git a/tubearchivist/home/templates/home/downloads.html b/tubearchivist/home/templates/home/downloads.html index 59f4f03..93eace6 100644 --- a/tubearchivist/home/templates/home/downloads.html +++ b/tubearchivist/home/templates/home/downloads.html @@ -81,6 +81,9 @@ queued {% endif %} {{ video.source.vid_type }} + {% if video.source.auto_start %} + auto + {% endif %} diff --git a/tubearchivist/home/views.py b/tubearchivist/home/views.py index 142c6bc..ef880a7 100644 --- a/tubearchivist/home/views.py +++ b/tubearchivist/home/views.py @@ -413,7 +413,10 @@ class DownloadView(ArchivistResultsView): self.data.update( { "query": {"bool": {"must": must_list}}, - "sort": [{"timestamp": {"order": "asc"}}], + "sort": [ + {"auto_start": {"order": "desc"}}, + {"timestamp": {"order": "asc"}}, + ], } )