extend cookie functionality, #build
changes: - integrate watch later and liked videos playlist - merges sponsorblock state issue #236 - lots of bug fixes
This commit is contained in:
commit
6b3ff009da
|
@ -32,6 +32,27 @@ Additional settings passed to yt-dlp.
|
|||
- **Source Settings**: User created subtitles are provided from the uploader and are usually the video script. Auto generated is from YouTube, quality varies, particularly for auto translated tracks.
|
||||
- **Index Settings**: Enabling subtitle indexing will add the lines to Elasticsearch and will make subtitles searchable. This will increase the index size and is not recommended on low-end hardware.
|
||||
|
||||
## Cookie
|
||||
Importing your YouTube Cookie into Tube Archivist allows yt-dlp to bypass age restrictions, gives access to private videos and your *watch later* or *liked videos*.
|
||||
|
||||
### Security concerns
|
||||
Cookies are used to store your session and contain your access token to your google account. Treat that file with utmost care as you would any other password or credential. *Tube Archivist* stores your cookie in Redis and will export the file container internally if it's needed.
|
||||
|
||||
### Export your cookie
|
||||
- Install **Cookies.txt** addon for [chrome](https://chrome.google.com/webstore/detail/get-cookiestxt/bgaddhkoddajcdgocldbbfleckgcbcid) or [firefox](https://addons.mozilla.org/firefox/addon/cookies-txt).
|
||||
- Visit YouTube and login with whichever YouTube account you wish to use to generate the cookies.
|
||||
- Click on the extension icon in the toolbar - it will drop down showing the active cookies for YT.
|
||||
- Click Export to export the cookies, filename is by default *cookies.google.txt*.
|
||||
|
||||
### Import your cookie
|
||||
Place the file *cookies.google.txt* into the */cache/import* folder of Tube Archivist and enable the cookie import. Once you click on *Update Application Configurations* to save your changes, your cookie will get imported and stored internally.
|
||||
|
||||
### Use your cookie
|
||||
Once imported, additionally to the advantages above, your [Watch Later](https://www.youtube.com/playlist?list=WL) and [Liked Videos](https://www.youtube.com/playlist?list=LL) become a regular playlist you can download and subscribe to as any other [playlist](Playlists).
|
||||
|
||||
### Limitation
|
||||
There is only one cookie per Tube Archivist instance, this will be shared between all users.
|
||||
|
||||
## Integrations
|
||||
All third party integrations of TubeArchivist will **always** be *opt in*.
|
||||
- **API**: Your access token for the Tube Archivist API.
|
||||
|
|
|
@ -103,7 +103,7 @@ class PendingInteract:
|
|||
def delete_item(self):
|
||||
"""delete single item from pending"""
|
||||
path = f"ta_download/_doc/{self.video_id}"
|
||||
_, _ = ElasticWrap(path).delete()
|
||||
_, _ = ElasticWrap(path).delete(refresh=True)
|
||||
|
||||
def delete_by_status(self):
|
||||
"""delete all matching item by status"""
|
||||
|
@ -128,6 +128,7 @@ class PendingList(PendingIndex):
|
|||
"noplaylist": True,
|
||||
"writethumbnail": True,
|
||||
"simulate": True,
|
||||
"socket_timeout": 3,
|
||||
}
|
||||
|
||||
def __init__(self, youtube_ids=False):
|
||||
|
|
|
@ -168,7 +168,7 @@ class PlaylistSubscription:
|
|||
def channel_validate(channel_id):
|
||||
"""make sure channel of playlist is there"""
|
||||
channel = YoutubeChannel(channel_id)
|
||||
channel.build_json()
|
||||
channel.build_json(upload=True)
|
||||
|
||||
@staticmethod
|
||||
def change_subscribe(playlist_id, subscribe_status):
|
||||
|
|
|
@ -296,6 +296,7 @@ class ThumbManager:
|
|||
def get_thumb_list(self):
|
||||
"""get list of mediafiles and matching thumbnails"""
|
||||
pending = queue.PendingList()
|
||||
pending.get_download()
|
||||
pending.get_indexed()
|
||||
|
||||
video_list = []
|
||||
|
|
|
@ -41,6 +41,7 @@ class DownloadPostProcess:
|
|||
self.auto_delete_all()
|
||||
self.auto_delete_overwrites()
|
||||
self.validate_playlists()
|
||||
self.clear_cookie()
|
||||
|
||||
def auto_delete_all(self):
|
||||
"""handle auto delete"""
|
||||
|
@ -140,6 +141,11 @@ class DownloadPostProcess:
|
|||
else:
|
||||
RedisArchivist().set_message("message:download", mess_dict)
|
||||
|
||||
def clear_cookie(self):
|
||||
"""hide cookie file"""
|
||||
if self.download.config["downloads"]["cookie_import"]:
|
||||
CookieHandler().hide()
|
||||
|
||||
|
||||
class VideoDownloader:
|
||||
"""
|
||||
|
@ -281,6 +287,7 @@ class VideoDownloader:
|
|||
"writethumbnail": False,
|
||||
"noplaylist": True,
|
||||
"check_formats": "selected",
|
||||
"socket_timeout": 3,
|
||||
}
|
||||
|
||||
def _build_obs_user(self):
|
||||
|
|
|
@ -75,8 +75,10 @@ class ElasticWrap:
|
|||
|
||||
return response.json(), response.status_code
|
||||
|
||||
def delete(self, data=False):
|
||||
def delete(self, data=False, refresh=False):
|
||||
"""delete document from es"""
|
||||
if refresh:
|
||||
self.url = f"{self.url}/?refresh=true"
|
||||
if data:
|
||||
response = requests.delete(self.url, json=data, auth=self.auth)
|
||||
else:
|
||||
|
|
|
@ -19,7 +19,6 @@ from home.src.ta.ta_redis import RedisArchivist, RedisQueue
|
|||
from home.tasks import (
|
||||
download_pending,
|
||||
download_single,
|
||||
extrac_dl,
|
||||
index_channel_playlists,
|
||||
kill_dl,
|
||||
re_sync_thumbs,
|
||||
|
@ -111,7 +110,7 @@ class PostData:
|
|||
def _ignore(self):
|
||||
"""ignore from download queue"""
|
||||
video_id = self.exec_val
|
||||
print(f"ignore video {video_id}")
|
||||
print(f"{video_id}: ignore video from download queue")
|
||||
PendingInteract(video_id=video_id, status="ignore").update_status()
|
||||
# also clear from redis queue
|
||||
RedisQueue().clear_item(video_id)
|
||||
|
@ -123,7 +122,7 @@ class PostData:
|
|||
print("download pending")
|
||||
running = download_pending.delay()
|
||||
task_id = running.id
|
||||
print("set task id: " + task_id)
|
||||
print(f"{task_id}: set task id")
|
||||
RedisArchivist().set_message("dl_queue_id", task_id, expire=False)
|
||||
return {"success": True}
|
||||
|
||||
|
@ -146,7 +145,7 @@ class PostData:
|
|||
def _unsubscribe(self):
|
||||
"""unsubscribe from channels or playlists"""
|
||||
id_unsub = self.exec_val
|
||||
print("unsubscribe from " + id_unsub)
|
||||
print(f"{id_unsub}: unsubscribe")
|
||||
to_unsub_list = UrlListParser(id_unsub).process_list()
|
||||
for to_unsub in to_unsub_list:
|
||||
unsub_type = to_unsub["type"]
|
||||
|
@ -167,7 +166,7 @@ class PostData:
|
|||
def _subscribe(self):
|
||||
"""subscribe to channel or playlist, called from js buttons"""
|
||||
id_sub = self.exec_val
|
||||
print("subscribe to " + id_sub)
|
||||
print(f"{id_sub}: subscribe")
|
||||
subscribe_to.delay(id_sub)
|
||||
return {"success": True}
|
||||
|
||||
|
@ -203,7 +202,7 @@ class PostData:
|
|||
def _dlnow(self):
|
||||
"""start downloading single vid now"""
|
||||
youtube_id = self.exec_val
|
||||
print("downloading: " + youtube_id)
|
||||
print(f"{youtube_id}: downloading now")
|
||||
running = download_single.delay(youtube_id=youtube_id)
|
||||
task_id = running.id
|
||||
print("set task id: " + task_id)
|
||||
|
@ -222,17 +221,15 @@ class PostData:
|
|||
def _forget_ignore(self):
|
||||
"""delete from ta_download index"""
|
||||
video_id = self.exec_val
|
||||
print(f"forgetting from download index: {video_id}")
|
||||
print(f"{video_id}: forget from download")
|
||||
PendingInteract(video_id=video_id).delete_item()
|
||||
return {"success": True}
|
||||
|
||||
def _add_single(self):
|
||||
"""add single youtube_id to download queue"""
|
||||
video_id = self.exec_val
|
||||
print(f"add vid to dl queue: {video_id}")
|
||||
PendingInteract(video_id=video_id).delete_item()
|
||||
video_ids = UrlListParser(video_id).process_list()
|
||||
extrac_dl.delay(video_ids)
|
||||
print(f"{video_id}: add single vid to download queue")
|
||||
PendingInteract(video_id=video_id, status="pending").update_status()
|
||||
return {"success": True}
|
||||
|
||||
def _delete_queue(self):
|
||||
|
|
|
@ -12,6 +12,7 @@ import shutil
|
|||
import subprocess
|
||||
|
||||
from home.src.download.queue import PendingList
|
||||
from home.src.download.yt_cookie import CookieHandler
|
||||
from home.src.download.yt_dlp_handler import VideoDownloader
|
||||
from home.src.es.connect import ElasticWrap
|
||||
from home.src.index.reindex import Reindex
|
||||
|
@ -308,6 +309,12 @@ def scan_filesystem():
|
|||
def reindex_old_documents():
|
||||
"""daily refresh of old documents"""
|
||||
handler = Reindex()
|
||||
handler.check_outdated()
|
||||
handler.reindex()
|
||||
RedisArchivist().set_message("last_reindex", handler.now, expire=False)
|
||||
if handler.config["downloads"]["cookie_import"]:
|
||||
CookieHandler().use()
|
||||
try:
|
||||
handler.check_outdated()
|
||||
handler.reindex()
|
||||
RedisArchivist().set_message("last_reindex", handler.now, expire=False)
|
||||
finally:
|
||||
if handler.config["downloads"]["cookie_import"]:
|
||||
CookieHandler().hide()
|
||||
|
|
|
@ -23,6 +23,7 @@ class YouTubeItem:
|
|||
"default_search": "ytsearch",
|
||||
"skip_download": True,
|
||||
"check_formats": "selected",
|
||||
"socket_timeout": 3,
|
||||
"noplaylist": True,
|
||||
}
|
||||
|
||||
|
@ -85,7 +86,7 @@ class YouTubeItem:
|
|||
def del_in_es(self):
|
||||
"""delete item from elastic search"""
|
||||
print(f"{self.youtube_id}: delete from es")
|
||||
_, _ = ElasticWrap(self.es_path).delete()
|
||||
_, _ = ElasticWrap(self.es_path).delete(refresh=True)
|
||||
|
||||
|
||||
class Pagination:
|
||||
|
|
|
@ -173,7 +173,7 @@ class UrlListParser:
|
|||
id_type = "video"
|
||||
elif str_len == 24:
|
||||
id_type = "channel"
|
||||
elif str_len in [34, 18]:
|
||||
elif str_len in [34, 18] or id_str in ["LL", "WL"]:
|
||||
id_type = "playlist"
|
||||
else:
|
||||
# unable to parse
|
||||
|
|
|
@ -350,7 +350,10 @@ function createPlayer(button) {
|
|||
`;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sponsorBlock = null;
|
||||
}
|
||||
|
||||
var videoProgress = getVideoProgress(videoId).position;
|
||||
var videoName = videoData.data.title;
|
||||
|
||||
|
|
Loading…
Reference in New Issue