mirror of
https://github.com/tubearchivist/tubearchivist-frontend.git
synced 2024-11-22 11:50:14 +00:00
reinvent SearchProcess class for API endpoints
This commit is contained in:
parent
cdc617c382
commit
80af255f25
0
tubearchivist/api/src/__init__.py
Normal file
0
tubearchivist/api/src/__init__.py
Normal file
87
tubearchivist/api/src/search_processor.py
Normal file
87
tubearchivist/api/src/search_processor.py
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
"""
|
||||||
|
Functionality:
|
||||||
|
- processing search results for frontend
|
||||||
|
- this is duplicated code from home.src.frontend.searching.SearchHandler
|
||||||
|
"""
|
||||||
|
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
from home.src.download.thumbnails import ThumbManager
|
||||||
|
from home.src.ta.helper import date_praser
|
||||||
|
|
||||||
|
|
||||||
|
class SearchProcess:
|
||||||
|
"""process search results"""
|
||||||
|
|
||||||
|
def __init__(self, response):
|
||||||
|
self.response = response
|
||||||
|
self.processed = False
|
||||||
|
|
||||||
|
def process(self):
|
||||||
|
"""dedect type and process"""
|
||||||
|
if "_source" in self.response.keys():
|
||||||
|
# single
|
||||||
|
self.processed = self._process_result(self.response)
|
||||||
|
|
||||||
|
elif "hits" in self.response.keys():
|
||||||
|
# multiple
|
||||||
|
self.processed = []
|
||||||
|
all_sources = self.response["hits"]["hits"]
|
||||||
|
for result in all_sources:
|
||||||
|
self.processed.append(self._process_result(result))
|
||||||
|
|
||||||
|
return self.processed
|
||||||
|
|
||||||
|
def _process_result(self, result):
|
||||||
|
"""dedect which type of data to process"""
|
||||||
|
index = result["_index"]
|
||||||
|
document_id = result["_id"]
|
||||||
|
processed = False
|
||||||
|
if index == "ta_video":
|
||||||
|
# process single video
|
||||||
|
print(f"{document_id}: processing video")
|
||||||
|
processed = self._process_video(result["_source"])
|
||||||
|
if index == "ta_channel":
|
||||||
|
# process single channel
|
||||||
|
print(f"{document_id}: processing channel")
|
||||||
|
processed = self._process_channel(result["_source"])
|
||||||
|
|
||||||
|
return processed
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _process_channel(channel_dict):
|
||||||
|
"""run on single channel"""
|
||||||
|
channel_id = channel_dict["channel_id"]
|
||||||
|
art_base = f"/cache/channels/{channel_id}"
|
||||||
|
date_str = date_praser(channel_dict["channel_last_refresh"])
|
||||||
|
channel_dict.update(
|
||||||
|
{
|
||||||
|
"channel_last_refresh": date_str,
|
||||||
|
"channel_banner_url": f"{art_base}_banner.jpg",
|
||||||
|
"channel_thumb_url": f"{art_base}_thumb.jpg",
|
||||||
|
"channel_tvart_url": False,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return dict(sorted(channel_dict.items()))
|
||||||
|
|
||||||
|
def _process_video(self, video_dict):
|
||||||
|
"""run on single video dict"""
|
||||||
|
video_id = video_dict["youtube_id"]
|
||||||
|
media_url = urllib.parse.quote(video_dict["media_url"])
|
||||||
|
vid_last_refresh = date_praser(video_dict["vid_last_refresh"])
|
||||||
|
published = date_praser(video_dict["published"])
|
||||||
|
vid_thumb_url = ThumbManager().vid_thumb_path(video_id)
|
||||||
|
channel = self._process_channel(video_dict["channel"])
|
||||||
|
|
||||||
|
video_dict.update(
|
||||||
|
{
|
||||||
|
"channel": channel,
|
||||||
|
"media_url": media_url,
|
||||||
|
"vid_last_refresh": vid_last_refresh,
|
||||||
|
"published": published,
|
||||||
|
"vid_thumb_url": vid_thumb_url,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return dict(sorted(video_dict.items()))
|
@ -1,5 +1,6 @@
|
|||||||
"""all API views"""
|
"""all API views"""
|
||||||
|
|
||||||
|
from api.src.search_processor import SearchProcess
|
||||||
from home.src.download.thumbnails import ThumbManager
|
from home.src.download.thumbnails import ThumbManager
|
||||||
from home.src.es.connect import ElasticWrap
|
from home.src.es.connect import ElasticWrap
|
||||||
from home.src.ta.config import AppConfig
|
from home.src.ta.config import AppConfig
|
||||||
@ -36,7 +37,7 @@ class ApiBaseView(APIView):
|
|||||||
print(path)
|
print(path)
|
||||||
response, status_code = ElasticWrap(path).get()
|
response, status_code = ElasticWrap(path).get()
|
||||||
try:
|
try:
|
||||||
self.response["data"] = response["_source"]
|
self.response["data"] = SearchProcess(response).process()
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print(f"item not found: {document_id}")
|
print(f"item not found: {document_id}")
|
||||||
self.response["data"] = False
|
self.response["data"] = False
|
||||||
@ -69,8 +70,7 @@ class ApiBaseView(APIView):
|
|||||||
"""get a list of results"""
|
"""get a list of results"""
|
||||||
print(self.search_base)
|
print(self.search_base)
|
||||||
response, status_code = ElasticWrap(self.search_base).get(data=data)
|
response, status_code = ElasticWrap(self.search_base).get(data=data)
|
||||||
all_hits = response["hits"]["hits"]
|
self.response["data"] = SearchProcess(response).process()
|
||||||
self.response["data"] = [i["_source"] for i in all_hits]
|
|
||||||
self.status_code = status_code
|
self.status_code = status_code
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user