implement auto_start indexing

This commit is contained in:
simon 2023-04-16 18:30:39 +07:00
parent e385331f6c
commit 3a091ac287
No known key found for this signature in database
GPG Key ID: 2C15AA5E89985DD4
6 changed files with 52 additions and 8 deletions

View File

@ -44,6 +44,7 @@ class Command(BaseCommand):
self._mig_snapshot_check()
self._mig_set_vid_type()
self._mig_set_streams()
self._mig_set_autostart()
def _sync_redis_state(self):
"""make sure redis gets new config.json values"""
@ -236,3 +237,34 @@ class Command(BaseCommand):
if idx % 100 == 0:
self.stdout.write(f" progress {idx}/{total}")
def _mig_set_autostart(self):
"""migration: update from 0.3.5 to 0.3.6 set auto_start to false"""
self.stdout.write("[MIGRATION] set default download auto_start")
data = {
"query": {
"bool": {"must_not": [{"exists": {"field": "auto_start"}}]}
},
"script": {"source": "ctx._source['auto_start'] = false"},
}
path = "ta_download/_update_by_query"
response, status_code = ElasticWrap(path).post(data=data)
if status_code == 200:
updated = response.get("updated", 0)
if not updated:
self.stdout.write(
" no videos needed updating in ta_download"
)
self.stdout.write(
self.style.SUCCESS(
f"{updated} videos updated in ta_download"
)
)
return
message = " 🗙 ta_download auto_start update failed"
self.stdout.write(self.style.ERROR(message))
self.stdout.write(response)
sleep(60)
raise CommandError(message)

View File

@ -12,11 +12,11 @@
"grid_items": 3
},
"subscriptions": {
"auto_search": false,
"auto_download": false,
"channel_size": 50,
"live_channel_size": 50,
"shorts_channel_size": 50
"shorts_channel_size": 50,
"auto_start": false
},
"downloads": {
"limit_count": false,

View File

@ -236,7 +236,7 @@ class PendingList(PendingIndex):
# match vid_type later
self._add_video(video_id, VideoTypeEnum.UNKNOWN)
def add_to_pending(self, status="pending"):
def add_to_pending(self, status="pending", auto_start=False):
"""add missing videos to pending list"""
self.get_channels()
bulk_list = []
@ -252,7 +252,13 @@ class PendingList(PendingIndex):
if not video_details:
continue
video_details["status"] = status
video_details.update(
{
"status": status,
"auto_start": auto_start,
}
)
action = {"create": {"_id": youtube_id, "_index": "ta_download"}}
bulk_list.append(json.dumps(action))
bulk_list.append(json.dumps(video_details))

View File

@ -284,6 +284,7 @@ class SubscriptionScanner:
def __init__(self, task=False):
self.task = task
self.missing_videos = False
self.auto_start = AppConfig().config["subscriptions"].get("auto_start")
def scan(self):
"""scan channels and playlists"""

View File

@ -357,6 +357,9 @@
},
"vid_type": {
"type": "keyword"
},
"auto_start": {
"type": "boolean"
}
},
"expected_set": {

View File

@ -171,10 +171,12 @@ def update_subscribed(self):
return
manager.init(self)
missing_videos = SubscriptionScanner(task=self).scan()
handler = SubscriptionScanner(task=self)
missing_videos = handler.scan()
auto_start = handler.auto_start
if missing_videos:
print(missing_videos)
extrac_dl.delay(missing_videos)
extrac_dl.delay(missing_videos, auto_start=auto_start)
@shared_task(name="download_pending", bind=True, base=BaseTask)
@ -194,12 +196,12 @@ def download_pending(self, from_queue=True):
@shared_task(name="extract_download", bind=True, base=BaseTask)
def extrac_dl(self, youtube_ids):
def extrac_dl(self, youtube_ids, auto_start=False):
"""parse list passed and add to pending"""
TaskManager().init(self)
pending_handler = PendingList(youtube_ids=youtube_ids, task=self)
pending_handler.parse_url_list()
pending_handler.add_to_pending()
pending_handler.add_to_pending(auto_start=auto_start)
@shared_task(bind=True, name="check_reindex", base=BaseTask)