implement auto_start indexing
This commit is contained in:
parent
e385331f6c
commit
3a091ac287
|
@ -44,6 +44,7 @@ class Command(BaseCommand):
|
||||||
self._mig_snapshot_check()
|
self._mig_snapshot_check()
|
||||||
self._mig_set_vid_type()
|
self._mig_set_vid_type()
|
||||||
self._mig_set_streams()
|
self._mig_set_streams()
|
||||||
|
self._mig_set_autostart()
|
||||||
|
|
||||||
def _sync_redis_state(self):
|
def _sync_redis_state(self):
|
||||||
"""make sure redis gets new config.json values"""
|
"""make sure redis gets new config.json values"""
|
||||||
|
@ -236,3 +237,34 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
if idx % 100 == 0:
|
if idx % 100 == 0:
|
||||||
self.stdout.write(f" progress {idx}/{total}")
|
self.stdout.write(f" progress {idx}/{total}")
|
||||||
|
|
||||||
|
def _mig_set_autostart(self):
|
||||||
|
"""migration: update from 0.3.5 to 0.3.6 set auto_start to false"""
|
||||||
|
self.stdout.write("[MIGRATION] set default download auto_start")
|
||||||
|
data = {
|
||||||
|
"query": {
|
||||||
|
"bool": {"must_not": [{"exists": {"field": "auto_start"}}]}
|
||||||
|
},
|
||||||
|
"script": {"source": "ctx._source['auto_start'] = false"},
|
||||||
|
}
|
||||||
|
path = "ta_download/_update_by_query"
|
||||||
|
response, status_code = ElasticWrap(path).post(data=data)
|
||||||
|
if status_code == 200:
|
||||||
|
updated = response.get("updated", 0)
|
||||||
|
if not updated:
|
||||||
|
self.stdout.write(
|
||||||
|
" no videos needed updating in ta_download"
|
||||||
|
)
|
||||||
|
|
||||||
|
self.stdout.write(
|
||||||
|
self.style.SUCCESS(
|
||||||
|
f" ✓ {updated} videos updated in ta_download"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
message = " 🗙 ta_download auto_start update failed"
|
||||||
|
self.stdout.write(self.style.ERROR(message))
|
||||||
|
self.stdout.write(response)
|
||||||
|
sleep(60)
|
||||||
|
raise CommandError(message)
|
||||||
|
|
|
@ -12,11 +12,11 @@
|
||||||
"grid_items": 3
|
"grid_items": 3
|
||||||
},
|
},
|
||||||
"subscriptions": {
|
"subscriptions": {
|
||||||
"auto_search": false,
|
|
||||||
"auto_download": false,
|
"auto_download": false,
|
||||||
"channel_size": 50,
|
"channel_size": 50,
|
||||||
"live_channel_size": 50,
|
"live_channel_size": 50,
|
||||||
"shorts_channel_size": 50
|
"shorts_channel_size": 50,
|
||||||
|
"auto_start": false
|
||||||
},
|
},
|
||||||
"downloads": {
|
"downloads": {
|
||||||
"limit_count": false,
|
"limit_count": false,
|
||||||
|
|
|
@ -236,7 +236,7 @@ class PendingList(PendingIndex):
|
||||||
# match vid_type later
|
# match vid_type later
|
||||||
self._add_video(video_id, VideoTypeEnum.UNKNOWN)
|
self._add_video(video_id, VideoTypeEnum.UNKNOWN)
|
||||||
|
|
||||||
def add_to_pending(self, status="pending"):
|
def add_to_pending(self, status="pending", auto_start=False):
|
||||||
"""add missing videos to pending list"""
|
"""add missing videos to pending list"""
|
||||||
self.get_channels()
|
self.get_channels()
|
||||||
bulk_list = []
|
bulk_list = []
|
||||||
|
@ -252,7 +252,13 @@ class PendingList(PendingIndex):
|
||||||
if not video_details:
|
if not video_details:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
video_details["status"] = status
|
video_details.update(
|
||||||
|
{
|
||||||
|
"status": status,
|
||||||
|
"auto_start": auto_start,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
action = {"create": {"_id": youtube_id, "_index": "ta_download"}}
|
action = {"create": {"_id": youtube_id, "_index": "ta_download"}}
|
||||||
bulk_list.append(json.dumps(action))
|
bulk_list.append(json.dumps(action))
|
||||||
bulk_list.append(json.dumps(video_details))
|
bulk_list.append(json.dumps(video_details))
|
||||||
|
|
|
@ -284,6 +284,7 @@ class SubscriptionScanner:
|
||||||
def __init__(self, task=False):
|
def __init__(self, task=False):
|
||||||
self.task = task
|
self.task = task
|
||||||
self.missing_videos = False
|
self.missing_videos = False
|
||||||
|
self.auto_start = AppConfig().config["subscriptions"].get("auto_start")
|
||||||
|
|
||||||
def scan(self):
|
def scan(self):
|
||||||
"""scan channels and playlists"""
|
"""scan channels and playlists"""
|
||||||
|
|
|
@ -357,6 +357,9 @@
|
||||||
},
|
},
|
||||||
"vid_type": {
|
"vid_type": {
|
||||||
"type": "keyword"
|
"type": "keyword"
|
||||||
|
},
|
||||||
|
"auto_start": {
|
||||||
|
"type": "boolean"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"expected_set": {
|
"expected_set": {
|
||||||
|
|
|
@ -171,10 +171,12 @@ def update_subscribed(self):
|
||||||
return
|
return
|
||||||
|
|
||||||
manager.init(self)
|
manager.init(self)
|
||||||
missing_videos = SubscriptionScanner(task=self).scan()
|
handler = SubscriptionScanner(task=self)
|
||||||
|
missing_videos = handler.scan()
|
||||||
|
auto_start = handler.auto_start
|
||||||
if missing_videos:
|
if missing_videos:
|
||||||
print(missing_videos)
|
print(missing_videos)
|
||||||
extrac_dl.delay(missing_videos)
|
extrac_dl.delay(missing_videos, auto_start=auto_start)
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="download_pending", bind=True, base=BaseTask)
|
@shared_task(name="download_pending", bind=True, base=BaseTask)
|
||||||
|
@ -194,12 +196,12 @@ def download_pending(self, from_queue=True):
|
||||||
|
|
||||||
|
|
||||||
@shared_task(name="extract_download", bind=True, base=BaseTask)
|
@shared_task(name="extract_download", bind=True, base=BaseTask)
|
||||||
def extrac_dl(self, youtube_ids):
|
def extrac_dl(self, youtube_ids, auto_start=False):
|
||||||
"""parse list passed and add to pending"""
|
"""parse list passed and add to pending"""
|
||||||
TaskManager().init(self)
|
TaskManager().init(self)
|
||||||
pending_handler = PendingList(youtube_ids=youtube_ids, task=self)
|
pending_handler = PendingList(youtube_ids=youtube_ids, task=self)
|
||||||
pending_handler.parse_url_list()
|
pending_handler.parse_url_list()
|
||||||
pending_handler.add_to_pending()
|
pending_handler.add_to_pending(auto_start=auto_start)
|
||||||
|
|
||||||
|
|
||||||
@shared_task(bind=True, name="check_reindex", base=BaseTask)
|
@shared_task(bind=True, name="check_reindex", base=BaseTask)
|
||||||
|
|
Loading…
Reference in New Issue