2022-01-10 15:51:52 +00:00
|
|
|
"""all API views"""
|
|
|
|
|
2022-03-29 09:48:04 +00:00
|
|
|
from api.src.search_processor import SearchProcess
|
2022-04-23 13:50:38 +00:00
|
|
|
from api.src.task_processor import TaskHandler
|
2022-04-17 12:15:40 +00:00
|
|
|
from home.src.download.queue import PendingInteract
|
2022-05-24 08:51:58 +00:00
|
|
|
from home.src.download.yt_dlp_base import CookieHandler
|
2022-03-22 11:43:16 +00:00
|
|
|
from home.src.es.connect import ElasticWrap
|
2022-04-20 15:43:07 +00:00
|
|
|
from home.src.index.generic import Pagination
|
2022-04-05 14:51:45 +00:00
|
|
|
from home.src.index.video import SponsorBlock
|
2022-01-22 15:13:37 +00:00
|
|
|
from home.src.ta.config import AppConfig
|
|
|
|
from home.src.ta.helper import UrlListParser
|
2022-04-17 12:15:40 +00:00
|
|
|
from home.src.ta.ta_redis import RedisArchivist, RedisQueue
|
2022-01-11 09:53:02 +00:00
|
|
|
from home.tasks import extrac_dl, subscribe_to
|
2022-01-11 07:15:36 +00:00
|
|
|
from rest_framework.authentication import (
|
|
|
|
SessionAuthentication,
|
|
|
|
TokenAuthentication,
|
|
|
|
)
|
2022-03-29 03:17:47 +00:00
|
|
|
from rest_framework.authtoken.models import Token
|
|
|
|
from rest_framework.authtoken.views import ObtainAuthToken
|
2022-01-11 07:15:36 +00:00
|
|
|
from rest_framework.permissions import IsAuthenticated
|
2022-01-10 15:51:52 +00:00
|
|
|
from rest_framework.response import Response
|
|
|
|
from rest_framework.views import APIView
|
|
|
|
|
|
|
|
|
|
|
|
class ApiBaseView(APIView):
|
|
|
|
"""base view to inherit from"""
|
|
|
|
|
2022-01-11 07:15:36 +00:00
|
|
|
authentication_classes = [SessionAuthentication, TokenAuthentication]
|
|
|
|
permission_classes = [IsAuthenticated]
|
2022-01-10 15:51:52 +00:00
|
|
|
search_base = False
|
2022-04-20 15:51:30 +00:00
|
|
|
data = False
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
super().__init__()
|
2022-03-22 11:43:16 +00:00
|
|
|
self.response = {"data": False, "config": AppConfig().config}
|
2022-04-20 15:51:30 +00:00
|
|
|
self.data = {"query": {"match_all": {}}}
|
2022-01-10 15:51:52 +00:00
|
|
|
self.status_code = False
|
|
|
|
self.context = False
|
2022-04-20 15:43:07 +00:00
|
|
|
self.pagination_handler = False
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
def get_document(self, document_id):
|
|
|
|
"""get single document from es"""
|
2022-03-22 11:43:16 +00:00
|
|
|
path = f"{self.search_base}{document_id}"
|
|
|
|
print(path)
|
|
|
|
response, status_code = ElasticWrap(path).get()
|
2022-01-11 09:16:28 +00:00
|
|
|
try:
|
2022-03-29 09:48:04 +00:00
|
|
|
self.response["data"] = SearchProcess(response).process()
|
2022-01-11 09:16:28 +00:00
|
|
|
except KeyError:
|
|
|
|
print(f"item not found: {document_id}")
|
|
|
|
self.response["data"] = False
|
2022-03-22 11:43:16 +00:00
|
|
|
self.status_code = status_code
|
2022-01-11 08:58:50 +00:00
|
|
|
|
2022-04-20 15:43:07 +00:00
|
|
|
def initiate_pagination(self, request):
|
|
|
|
"""set initial pagination values"""
|
|
|
|
user_id = request.user.id
|
|
|
|
page_get = int(request.GET.get("page", 0))
|
|
|
|
self.pagination_handler = Pagination(page_get, user_id)
|
|
|
|
self.data.update(
|
|
|
|
{
|
|
|
|
"size": self.pagination_handler.pagination["page_size"],
|
|
|
|
"from": self.pagination_handler.pagination["page_from"],
|
|
|
|
}
|
|
|
|
)
|
2022-01-11 08:58:50 +00:00
|
|
|
|
2022-04-20 15:43:07 +00:00
|
|
|
def get_document_list(self, request):
|
2022-01-11 08:58:50 +00:00
|
|
|
"""get a list of results"""
|
2022-03-22 11:43:16 +00:00
|
|
|
print(self.search_base)
|
2022-04-20 15:43:07 +00:00
|
|
|
self.initiate_pagination(request)
|
|
|
|
es_handler = ElasticWrap(self.search_base)
|
|
|
|
response, status_code = es_handler.get(data=self.data)
|
2022-03-29 09:48:04 +00:00
|
|
|
self.response["data"] = SearchProcess(response).process()
|
2022-04-17 03:08:24 +00:00
|
|
|
if self.response["data"]:
|
|
|
|
self.status_code = status_code
|
|
|
|
else:
|
|
|
|
self.status_code = 404
|
2022-01-10 15:51:52 +00:00
|
|
|
|
2022-04-20 15:43:07 +00:00
|
|
|
self.pagination_handler.validate(response["hits"]["total"]["value"])
|
|
|
|
self.response["paginate"] = self.pagination_handler.pagination
|
|
|
|
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
class VideoApiView(ApiBaseView):
|
|
|
|
"""resolves to /api/video/<video_id>/
|
|
|
|
GET: returns metadata dict of video
|
|
|
|
"""
|
|
|
|
|
2022-03-22 11:43:16 +00:00
|
|
|
search_base = "ta_video/_doc/"
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
def get(self, request, video_id):
|
|
|
|
# pylint: disable=unused-argument
|
|
|
|
"""get request"""
|
|
|
|
self.get_document(video_id)
|
2022-01-11 09:16:28 +00:00
|
|
|
return Response(self.response, status=self.status_code)
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
|
2022-03-29 10:05:22 +00:00
|
|
|
class VideoApiListView(ApiBaseView):
|
|
|
|
"""resolves to /api/video/
|
|
|
|
GET: returns list of videos
|
|
|
|
"""
|
|
|
|
|
|
|
|
search_base = "ta_video/_search/"
|
|
|
|
|
|
|
|
def get(self, request):
|
|
|
|
"""get request"""
|
2022-04-20 15:43:07 +00:00
|
|
|
self.data.update({"sort": [{"published": {"order": "desc"}}]})
|
|
|
|
self.get_document_list(request)
|
2022-03-29 10:05:22 +00:00
|
|
|
|
|
|
|
return Response(self.response)
|
|
|
|
|
|
|
|
|
2022-02-21 13:59:04 +00:00
|
|
|
class VideoProgressView(ApiBaseView):
|
2022-02-17 11:20:30 +00:00
|
|
|
"""resolves to /api/video/<video_id>/
|
|
|
|
handle progress status for video
|
|
|
|
"""
|
|
|
|
|
2022-02-21 13:59:04 +00:00
|
|
|
def get(self, request, video_id):
|
2022-02-17 11:20:30 +00:00
|
|
|
"""get progress for a single video"""
|
|
|
|
user_id = request.user.id
|
|
|
|
key = f"{user_id}:progress:{video_id}"
|
|
|
|
video_progress = RedisArchivist().get_message(key)
|
|
|
|
position = video_progress.get("position", 0)
|
|
|
|
|
2022-02-21 13:59:04 +00:00
|
|
|
self.response = {
|
2022-02-17 11:20:30 +00:00
|
|
|
"youtube_id": video_id,
|
|
|
|
"user_id": user_id,
|
|
|
|
"position": position,
|
|
|
|
}
|
2022-02-21 13:59:04 +00:00
|
|
|
return Response(self.response)
|
2022-02-17 11:20:30 +00:00
|
|
|
|
2022-02-21 13:59:04 +00:00
|
|
|
def post(self, request, video_id):
|
2022-02-17 11:20:30 +00:00
|
|
|
"""set progress position in redis"""
|
|
|
|
position = request.data.get("position", 0)
|
|
|
|
key = f"{request.user.id}:progress:{video_id}"
|
2022-02-24 11:55:52 +00:00
|
|
|
message = {"position": position, "youtube_id": video_id}
|
2022-06-16 03:37:46 +00:00
|
|
|
RedisArchivist().set_message(key, message)
|
2022-02-21 13:59:04 +00:00
|
|
|
self.response = request.data
|
2022-02-17 11:20:30 +00:00
|
|
|
|
2022-02-21 13:59:04 +00:00
|
|
|
return Response(self.response)
|
2022-02-17 11:20:30 +00:00
|
|
|
|
2022-02-21 13:59:04 +00:00
|
|
|
def delete(self, request, video_id):
|
2022-02-17 11:20:30 +00:00
|
|
|
"""delete progress position"""
|
|
|
|
key = f"{request.user.id}:progress:{video_id}"
|
|
|
|
RedisArchivist().del_message(key)
|
2022-02-21 13:59:04 +00:00
|
|
|
self.response = {"progress-reset": video_id}
|
2022-02-17 11:20:30 +00:00
|
|
|
|
2022-02-21 13:59:04 +00:00
|
|
|
return Response(self.response)
|
2022-02-17 11:20:30 +00:00
|
|
|
|
|
|
|
|
2022-04-05 14:51:45 +00:00
|
|
|
class VideoSponsorView(ApiBaseView):
|
2022-04-05 15:25:40 +00:00
|
|
|
"""resolves to /api/video/<video_id>/sponsor/
|
2022-04-05 14:51:45 +00:00
|
|
|
handle sponsor block integration
|
|
|
|
"""
|
|
|
|
|
|
|
|
search_base = "ta_video/_doc/"
|
|
|
|
|
|
|
|
def get(self, request, video_id):
|
|
|
|
"""get sponsor info"""
|
|
|
|
# pylint: disable=unused-argument
|
|
|
|
|
|
|
|
self.get_document(video_id)
|
|
|
|
sponsorblock = self.response["data"].get("sponsorblock")
|
|
|
|
|
|
|
|
return Response(sponsorblock)
|
|
|
|
|
2022-04-05 15:25:40 +00:00
|
|
|
def post(self, request, video_id):
|
2022-04-05 14:51:45 +00:00
|
|
|
"""post verification and timestamps"""
|
2022-04-05 15:25:40 +00:00
|
|
|
if "segment" in request.data:
|
|
|
|
response, status_code = self._create_segment(request, video_id)
|
|
|
|
elif "vote" in request.data:
|
|
|
|
response, status_code = self._vote_on_segment(request)
|
2022-04-05 14:51:45 +00:00
|
|
|
|
2022-04-05 15:25:40 +00:00
|
|
|
return Response(response, status=status_code)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _create_segment(request, video_id):
|
|
|
|
"""create segment in API"""
|
|
|
|
start_time = request.data["segment"]["startTime"]
|
|
|
|
end_time = request.data["segment"]["endTime"]
|
2022-04-05 14:51:45 +00:00
|
|
|
response, status_code = SponsorBlock(request.user.id).post_timestamps(
|
|
|
|
video_id, start_time, end_time
|
|
|
|
)
|
|
|
|
|
2022-04-05 15:25:40 +00:00
|
|
|
return response, status_code
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def _vote_on_segment(request):
|
|
|
|
"""validate on existing segment"""
|
|
|
|
user_id = request.user.id
|
|
|
|
uuid = request.data["vote"]["uuid"]
|
|
|
|
vote = request.data["vote"]["yourVote"]
|
|
|
|
response, status_code = SponsorBlock(user_id).vote_on_segment(
|
|
|
|
uuid, vote
|
|
|
|
)
|
|
|
|
|
|
|
|
return response, status_code
|
2022-04-05 14:51:45 +00:00
|
|
|
|
|
|
|
|
2022-01-10 15:51:52 +00:00
|
|
|
class ChannelApiView(ApiBaseView):
|
|
|
|
"""resolves to /api/channel/<channel_id>/
|
|
|
|
GET: returns metadata dict of channel
|
|
|
|
"""
|
|
|
|
|
2022-03-22 11:43:16 +00:00
|
|
|
search_base = "ta_channel/_doc/"
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
def get(self, request, channel_id):
|
|
|
|
# pylint: disable=unused-argument
|
|
|
|
"""get request"""
|
|
|
|
self.get_document(channel_id)
|
2022-01-11 09:16:28 +00:00
|
|
|
return Response(self.response, status=self.status_code)
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
|
2022-01-11 09:53:02 +00:00
|
|
|
class ChannelApiListView(ApiBaseView):
|
|
|
|
"""resolves to /api/channel/
|
|
|
|
GET: returns list of channels
|
|
|
|
POST: edit a list of channels
|
|
|
|
"""
|
|
|
|
|
2022-03-22 11:43:16 +00:00
|
|
|
search_base = "ta_channel/_search/"
|
2022-01-11 09:53:02 +00:00
|
|
|
|
|
|
|
def get(self, request):
|
|
|
|
"""get request"""
|
2022-04-20 15:43:07 +00:00
|
|
|
self.get_document_list(request)
|
|
|
|
self.data.update(
|
|
|
|
{"sort": [{"channel_name.keyword": {"order": "asc"}}]}
|
|
|
|
)
|
2022-01-11 09:53:02 +00:00
|
|
|
|
|
|
|
return Response(self.response)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def post(request):
|
|
|
|
"""subscribe to list of channels"""
|
|
|
|
data = request.data
|
|
|
|
try:
|
|
|
|
to_add = data["data"]
|
|
|
|
except KeyError:
|
|
|
|
message = "missing expected data key"
|
|
|
|
print(message)
|
|
|
|
return Response({"message": message}, status=400)
|
|
|
|
|
|
|
|
pending = [i["channel_id"] for i in to_add if i["channel_subscribed"]]
|
|
|
|
url_str = " ".join(pending)
|
|
|
|
subscribe_to.delay(url_str)
|
|
|
|
|
|
|
|
return Response(data)
|
|
|
|
|
|
|
|
|
2022-04-17 02:58:18 +00:00
|
|
|
class ChannelApiVideoView(ApiBaseView):
|
|
|
|
"""resolves to /api/channel/<channel-id>/video
|
|
|
|
GET: returns a list of videos of channel
|
|
|
|
"""
|
|
|
|
|
|
|
|
search_base = "ta_video/_search/"
|
|
|
|
|
|
|
|
def get(self, request, channel_id):
|
|
|
|
"""handle get request"""
|
2022-04-20 15:43:07 +00:00
|
|
|
self.data.update(
|
|
|
|
{
|
|
|
|
"query": {
|
|
|
|
"term": {"channel.channel_id": {"value": channel_id}}
|
|
|
|
},
|
|
|
|
"sort": [{"published": {"order": "desc"}}],
|
|
|
|
}
|
|
|
|
)
|
|
|
|
self.get_document_list(request)
|
2022-04-17 02:58:18 +00:00
|
|
|
|
2022-04-17 03:08:24 +00:00
|
|
|
return Response(self.response, status=self.status_code)
|
2022-04-17 02:58:18 +00:00
|
|
|
|
|
|
|
|
|
|
|
class PlaylistApiListView(ApiBaseView):
|
|
|
|
"""resolves to /api/playlist/
|
|
|
|
GET: returns list of indexed playlists
|
|
|
|
"""
|
|
|
|
|
|
|
|
search_base = "ta_playlist/_search/"
|
|
|
|
|
|
|
|
def get(self, request):
|
|
|
|
"""handle get request"""
|
2022-04-20 15:43:07 +00:00
|
|
|
self.data.update(
|
|
|
|
{"sort": [{"playlist_name.keyword": {"order": "asc"}}]}
|
|
|
|
)
|
|
|
|
self.get_document_list(request)
|
2022-04-17 02:58:18 +00:00
|
|
|
return Response(self.response)
|
|
|
|
|
|
|
|
|
2022-01-10 15:51:52 +00:00
|
|
|
class PlaylistApiView(ApiBaseView):
|
|
|
|
"""resolves to /api/playlist/<playlist_id>/
|
|
|
|
GET: returns metadata dict of playlist
|
|
|
|
"""
|
|
|
|
|
2022-03-22 11:43:16 +00:00
|
|
|
search_base = "ta_playlist/_doc/"
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
def get(self, request, playlist_id):
|
|
|
|
# pylint: disable=unused-argument
|
|
|
|
"""get request"""
|
|
|
|
self.get_document(playlist_id)
|
2022-01-11 09:16:28 +00:00
|
|
|
return Response(self.response, status=self.status_code)
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
|
2022-04-17 02:58:18 +00:00
|
|
|
class PlaylistApiVideoView(ApiBaseView):
|
|
|
|
"""resolves to /api/playlist/<playlist_id>/video
|
|
|
|
GET: returns list of videos in playlist
|
2022-04-13 02:51:15 +00:00
|
|
|
"""
|
|
|
|
|
2022-04-17 02:58:18 +00:00
|
|
|
search_base = "ta_video/_search/"
|
2022-04-13 02:51:15 +00:00
|
|
|
|
2022-04-17 02:58:18 +00:00
|
|
|
def get(self, request, playlist_id):
|
2022-04-13 02:51:15 +00:00
|
|
|
"""handle get request"""
|
2022-04-20 15:43:07 +00:00
|
|
|
self.data["query"] = {
|
|
|
|
"term": {"playlist.keyword": {"value": playlist_id}}
|
2022-04-17 02:58:18 +00:00
|
|
|
}
|
2022-04-20 15:43:07 +00:00
|
|
|
self.data.update({"sort": [{"published": {"order": "desc"}}]})
|
|
|
|
|
|
|
|
self.get_document_list(request)
|
2022-04-17 03:08:24 +00:00
|
|
|
return Response(self.response, status=self.status_code)
|
2022-04-13 02:51:15 +00:00
|
|
|
|
|
|
|
|
2022-01-10 15:51:52 +00:00
|
|
|
class DownloadApiView(ApiBaseView):
|
|
|
|
"""resolves to /api/download/<video_id>/
|
|
|
|
GET: returns metadata dict of an item in the download queue
|
2022-04-17 12:15:40 +00:00
|
|
|
POST: update status of item to pending or ignore
|
|
|
|
DELETE: forget from download queue
|
2022-01-10 15:51:52 +00:00
|
|
|
"""
|
|
|
|
|
2022-03-22 11:43:16 +00:00
|
|
|
search_base = "ta_download/_doc/"
|
2022-04-17 12:15:40 +00:00
|
|
|
valid_status = ["pending", "ignore"]
|
2022-01-10 15:51:52 +00:00
|
|
|
|
|
|
|
def get(self, request, video_id):
|
|
|
|
# pylint: disable=unused-argument
|
|
|
|
"""get request"""
|
|
|
|
self.get_document(video_id)
|
2022-01-11 09:16:28 +00:00
|
|
|
return Response(self.response, status=self.status_code)
|
2022-01-11 08:58:50 +00:00
|
|
|
|
2022-04-17 12:15:40 +00:00
|
|
|
def post(self, request, video_id):
|
|
|
|
"""post to video to change status"""
|
|
|
|
item_status = request.data["status"]
|
|
|
|
if item_status not in self.valid_status:
|
|
|
|
message = f"{video_id}: invalid status {item_status}"
|
|
|
|
print(message)
|
|
|
|
return Response({"message": message}, status=400)
|
|
|
|
|
|
|
|
print(f"{video_id}: change status to {item_status}")
|
|
|
|
PendingInteract(video_id=video_id, status=item_status).update_status()
|
|
|
|
RedisQueue().clear_item(video_id)
|
|
|
|
|
|
|
|
return Response(request.data)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def delete(request, video_id):
|
|
|
|
# pylint: disable=unused-argument
|
|
|
|
"""delete single video from queue"""
|
|
|
|
print(f"{video_id}: delete from queue")
|
|
|
|
PendingInteract(video_id=video_id).delete_item()
|
|
|
|
|
|
|
|
return Response({"success": True})
|
|
|
|
|
2022-01-11 08:58:50 +00:00
|
|
|
|
|
|
|
class DownloadApiListView(ApiBaseView):
|
|
|
|
"""resolves to /api/download/
|
|
|
|
GET: returns latest videos in the download queue
|
|
|
|
POST: add a list of videos to download queue
|
2022-04-17 13:10:49 +00:00
|
|
|
DELETE: remove items based on query filter
|
2022-01-11 08:58:50 +00:00
|
|
|
"""
|
|
|
|
|
2022-03-22 11:43:16 +00:00
|
|
|
search_base = "ta_download/_search/"
|
2022-04-17 13:10:49 +00:00
|
|
|
valid_filter = ["pending", "ignore"]
|
2022-01-11 08:58:50 +00:00
|
|
|
|
|
|
|
def get(self, request):
|
|
|
|
"""get request"""
|
2022-04-17 13:10:49 +00:00
|
|
|
query_filter = request.GET.get("filter", False)
|
2022-04-20 22:45:55 +00:00
|
|
|
self.data.update({"sort": [{"timestamp": {"order": "asc"}}]})
|
2022-04-17 13:10:49 +00:00
|
|
|
if query_filter:
|
|
|
|
if query_filter not in self.valid_filter:
|
|
|
|
message = f"invalid url query filder: {query_filter}"
|
|
|
|
print(message)
|
|
|
|
return Response({"message": message}, status=400)
|
|
|
|
|
2022-04-20 15:43:07 +00:00
|
|
|
self.data["query"] = {"term": {"status": {"value": query_filter}}}
|
2022-04-17 13:10:49 +00:00
|
|
|
|
2022-04-20 15:43:07 +00:00
|
|
|
self.get_document_list(request)
|
2022-01-11 08:58:50 +00:00
|
|
|
return Response(self.response)
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def post(request):
|
|
|
|
"""add list of videos to download queue"""
|
2022-01-14 07:46:00 +00:00
|
|
|
print(f"request meta data: {request.META}")
|
2022-01-11 08:58:50 +00:00
|
|
|
data = request.data
|
|
|
|
try:
|
|
|
|
to_add = data["data"]
|
|
|
|
except KeyError:
|
|
|
|
message = "missing expected data key"
|
|
|
|
print(message)
|
|
|
|
return Response({"message": message}, status=400)
|
|
|
|
|
|
|
|
pending = [i["youtube_id"] for i in to_add if i["status"] == "pending"]
|
|
|
|
url_str = " ".join(pending)
|
|
|
|
try:
|
|
|
|
youtube_ids = UrlListParser(url_str).process_list()
|
|
|
|
except ValueError:
|
|
|
|
message = f"failed to parse: {url_str}"
|
|
|
|
print(message)
|
|
|
|
return Response({"message": message}, status=400)
|
|
|
|
|
|
|
|
extrac_dl.delay(youtube_ids)
|
|
|
|
|
|
|
|
return Response(data)
|
2022-03-29 03:17:47 +00:00
|
|
|
|
2022-04-17 13:10:49 +00:00
|
|
|
def delete(self, request):
|
|
|
|
"""delete download queue"""
|
|
|
|
query_filter = request.GET.get("filter", False)
|
|
|
|
if query_filter not in self.valid_filter:
|
|
|
|
message = f"invalid url query filter: {query_filter}"
|
|
|
|
print(message)
|
|
|
|
return Response({"message": message}, status=400)
|
|
|
|
|
|
|
|
message = f"delete queue by status: {query_filter}"
|
|
|
|
print(message)
|
|
|
|
PendingInteract(status=query_filter).delete_by_status()
|
|
|
|
|
|
|
|
return Response({"message": message})
|
|
|
|
|
2022-03-29 03:17:47 +00:00
|
|
|
|
2022-03-31 14:13:21 +00:00
|
|
|
class PingView(ApiBaseView):
|
|
|
|
"""resolves to /api/ping/
|
|
|
|
GET: test your connection
|
|
|
|
"""
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get(request):
|
|
|
|
"""get pong"""
|
2022-04-01 09:20:32 +00:00
|
|
|
data = {"response": "pong", "user": request.user.id}
|
2022-03-31 14:13:21 +00:00
|
|
|
return Response(data)
|
|
|
|
|
|
|
|
|
2022-03-29 03:17:47 +00:00
|
|
|
class LoginApiView(ObtainAuthToken):
|
|
|
|
"""resolves to /api/login/
|
|
|
|
POST: return token and username after successful login
|
|
|
|
"""
|
|
|
|
|
|
|
|
def post(self, request, *args, **kwargs):
|
|
|
|
"""post data"""
|
|
|
|
# pylint: disable=no-member
|
|
|
|
serializer = self.serializer_class(
|
|
|
|
data=request.data, context={"request": request}
|
|
|
|
)
|
|
|
|
serializer.is_valid(raise_exception=True)
|
|
|
|
user = serializer.validated_data["user"]
|
|
|
|
token, _ = Token.objects.get_or_create(user=user)
|
|
|
|
|
|
|
|
print(f"returning token for user with id {user.pk}")
|
|
|
|
|
|
|
|
return Response({"token": token.key, "user_id": user.pk})
|
2022-04-23 13:50:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
class TaskApiView(ApiBaseView):
|
|
|
|
"""resolves to /api/task/
|
2022-06-15 02:15:59 +00:00
|
|
|
GET: check if ongoing background task
|
2022-04-23 13:50:38 +00:00
|
|
|
POST: start a new background task
|
|
|
|
"""
|
|
|
|
|
2022-06-15 02:15:59 +00:00
|
|
|
@staticmethod
|
|
|
|
def get(request):
|
|
|
|
"""handle get request"""
|
2022-06-16 03:37:46 +00:00
|
|
|
# pylint: disable=unused-argument
|
2022-06-15 02:15:59 +00:00
|
|
|
response = {"rescan": False, "downloading": False}
|
|
|
|
for key in response.keys():
|
|
|
|
response[key] = RedisArchivist().is_locked(key)
|
|
|
|
|
|
|
|
return Response(response)
|
|
|
|
|
2022-04-23 13:50:38 +00:00
|
|
|
def post(self, request):
|
|
|
|
"""handle post request"""
|
|
|
|
|
|
|
|
data = request.data
|
|
|
|
print(data)
|
|
|
|
response = TaskHandler(data).run_task()
|
|
|
|
|
|
|
|
return Response(response)
|
2022-04-30 12:13:49 +00:00
|
|
|
|
|
|
|
|
|
|
|
class CookieView(ApiBaseView):
|
|
|
|
"""resolves to /api/cookie/
|
|
|
|
GET: check if cookie is enabled
|
|
|
|
POST: verify validity of cookie
|
2022-06-21 01:06:38 +00:00
|
|
|
PUT: import cookie
|
2022-04-30 12:13:49 +00:00
|
|
|
"""
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get(request):
|
|
|
|
"""handle get request"""
|
|
|
|
# pylint: disable=unused-argument
|
|
|
|
config = AppConfig().config
|
2022-06-22 23:48:55 +00:00
|
|
|
valid = RedisArchivist().get_message("cookie:valid")
|
|
|
|
response = {"cookie_enabled": config["downloads"]["cookie_import"]}
|
|
|
|
response.update(valid)
|
2022-04-30 12:13:49 +00:00
|
|
|
|
2022-06-22 23:48:55 +00:00
|
|
|
return Response(response)
|
2022-04-30 12:13:49 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def post(request):
|
|
|
|
"""handle post request"""
|
|
|
|
# pylint: disable=unused-argument
|
2022-05-24 08:51:58 +00:00
|
|
|
config = AppConfig().config
|
|
|
|
validated = CookieHandler(config).validate()
|
2022-04-30 12:13:49 +00:00
|
|
|
|
|
|
|
return Response({"cookie_validated": validated})
|
2022-06-21 01:06:38 +00:00
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def put(request):
|
|
|
|
"""handle put request"""
|
|
|
|
# pylint: disable=unused-argument
|
|
|
|
config = AppConfig().config
|
|
|
|
cookie = request.data.get("cookie")
|
|
|
|
if not cookie:
|
|
|
|
message = "missing cookie key in request data"
|
|
|
|
print(message)
|
|
|
|
return Response({"message": message}, status=400)
|
|
|
|
|
|
|
|
print(f"cookie preview:\n\n{cookie[:300]}")
|
|
|
|
handler = CookieHandler(config)
|
|
|
|
handler.set_cookie(cookie)
|
|
|
|
validated = handler.validate()
|
|
|
|
if not validated:
|
|
|
|
handler.revoke()
|
|
|
|
message = {"cookie_import": "fail", "cookie_validated": validated}
|
|
|
|
print(f"cookie: {message}")
|
|
|
|
return Response({"message": message}, status=400)
|
|
|
|
|
|
|
|
message = {"cookie_import": "done", "cookie_validated": validated}
|
|
|
|
return Response(message)
|