From ffac57ad4d7687261fec3e6a6a00c2a90cd7a4e3 Mon Sep 17 00:00:00 2001 From: jxxghp Date: Mon, 20 May 2024 16:55:36 +0800 Subject: [PATCH] =?UTF-8?q?=E6=94=AF=E6=8C=81=20YemaPT?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/chain/site.py | 25 ++++++ app/modules/indexer/__init__.py | 7 ++ app/modules/indexer/mtorrent.py | 2 +- app/modules/indexer/yema.py | 132 ++++++++++++++++++++++++++++++++ app/utils/http.py | 4 + 5 files changed, 169 insertions(+), 1 deletion(-) create mode 100644 app/modules/indexer/yema.py diff --git a/app/chain/site.py b/app/chain/site.py index 1578c549..ef579c51 100644 --- a/app/chain/site.py +++ b/app/chain/site.py @@ -55,6 +55,7 @@ class SiteChain(ChainBase): "ptlsp.com": self.__indexphp_test, "1ptba.com": self.__indexphp_test, "star-space.net": self.__indexphp_test, + "yemapt.org": self.__yema_test, } def is_special_site(self, domain: str) -> bool: @@ -133,6 +134,30 @@ class SiteChain(ChainBase): return True, f"连接成功,但更新状态失败" return False, "鉴权已过期或无效" + @staticmethod + def __yema_test(site: Site) -> Tuple[bool, str]: + """ + 判断站点是否已经登陆:yemapt + """ + user_agent = site.ua or settings.USER_AGENT + url = f"{site.url}api/consumer/fetchSelfDetail" + headers = { + "User-Agent": user_agent, + "Content-Type": "application/json", + "Accept": "application/json, text/plain, */*", + } + res = RequestUtils( + headers=headers, + cookies=site.cookie, + proxies=settings.PROXY if site.proxy else None, + timeout=site.timeout or 15 + ).get_res(url=url) + if res and res.status_code == 200: + user_info = res.json() + if user_info and user_info.get("success"): + return True, "连接成功" + return False, "Cookie已过期" + def __indexphp_test(self, site: Site) -> Tuple[bool, str]: """ 判断站点是否已经登陆:ptlsp/1ptba diff --git a/app/modules/indexer/__init__.py b/app/modules/indexer/__init__.py index 28f6aa5a..5ea82f24 100644 --- a/app/modules/indexer/__init__.py +++ b/app/modules/indexer/__init__.py @@ -13,6 +13,7 @@ from app.modules.indexer.mtorrent import MTorrentSpider from app.modules.indexer.spider import TorrentSpider from app.modules.indexer.tnode import TNodeSpider from app.modules.indexer.torrentleech import TorrentLeech +from app.modules.indexer.yema import YemaSpider from app.schemas.types import MediaType from app.utils.string import StringUtils @@ -111,6 +112,12 @@ class IndexerModule(_ModuleBase): mtype=mtype, page=page ) + elif site.get('parser') == "Yema": + error_flag, result = YemaSpider(site).search( + keyword=search_word, + mtype=mtype, + page=page + ) else: error_flag, result = self.__spider_search( search_word=search_word, diff --git a/app/modules/indexer/mtorrent.py b/app/modules/indexer/mtorrent.py index 150a38d0..4d2a2a30 100644 --- a/app/modules/indexer/mtorrent.py +++ b/app/modules/indexer/mtorrent.py @@ -15,7 +15,7 @@ from app.utils.string import StringUtils class MTorrentSpider: """ - mTorrent API,需要缓存ApiKey + mTorrent API """ _indexerid = None _domain = None diff --git a/app/modules/indexer/yema.py b/app/modules/indexer/yema.py new file mode 100644 index 00000000..70aa6231 --- /dev/null +++ b/app/modules/indexer/yema.py @@ -0,0 +1,132 @@ +from typing import Tuple, List + +from ruamel.yaml import CommentedMap + +from app.core.config import settings +from app.db.systemconfig_oper import SystemConfigOper +from app.log import logger +from app.schemas import MediaType +from app.utils.http import RequestUtils +from app.utils.string import StringUtils + + +class YemaSpider: + """ + YemaPT API + """ + _indexerid = None + _domain = None + _name = "" + _proxy = None + _cookie = None + _ua = None + _size = 40 + _searchurl = "%sapi/torrent/fetchCategoryOpenTorrentList" + _downloadurl = "%sapi/torrent/download?id=%s" + _pageurl = "%s#/torrent/detail/%s/" + _timeout = 15 + + # 分类 + _movie_category = 4 + _tv_category = 5 + + def __init__(self, indexer: CommentedMap): + self.systemconfig = SystemConfigOper() + if indexer: + self._indexerid = indexer.get('id') + self._domain = indexer.get('domain') + self._searchurl = self._searchurl % self._domain + self._name = indexer.get('name') + if indexer.get('proxy'): + self._proxy = settings.PROXY + self._cookie = indexer.get('cookie') + self._ua = indexer.get('ua') + self._timeout = indexer.get('timeout') or 15 + + def search(self, keyword: str, mtype: MediaType = None, page: int = 0) -> Tuple[bool, List[dict]]: + """ + 搜索 + """ + if not mtype: + categoryId = self._movie_category + elif mtype == MediaType.TV: + categoryId = self._tv_category + else: + categoryId = self._movie_category + params = { + "categoryId": categoryId, + "pageParam": { + "current": page + 1, + "pageSize": self._size, + "total": self._size + }, + "sorter": {} + } + if keyword: + params.update({ + "keyword": keyword, + }) + res = RequestUtils( + headers={ + "Content-Type": "application/json", + "User-Agent": f"{self._ua}", + "Accept": "application/json, text/plain, */*" + }, + cookies=self._cookie, + proxies=self._proxy, + referer=f"{self._domain}", + timeout=self._timeout + ).post_res(url=self._searchurl, json=params) + torrents = [] + if res and res.status_code == 200: + results = res.json().get('data', []) or [] + for result in results: + category_value = result.get('categoryId') + if category_value == self._tv_category: + category = MediaType.TV.value + elif category_value == self._movie_category: + category = MediaType.MOVIE.value + else: + category = MediaType.UNKNOWN.value + torrent = { + 'title': result.get('showName'), + 'description': result.get('shortDesc'), + 'enclosure': self.__get_download_url(result.get('id')), + 'pubdate': StringUtils.unify_datetime_str(result.get('gmtCreate')), + 'size': result.get('fileSize'), + 'seeders': result.get('seedNum'), + 'peers': result.get('leechNum'), + 'grabs': result.get('completedNum'), + 'downloadvolumefactor': self.__get_downloadvolumefactor(result.get('downloadPromotion')), + 'uploadvolumefactor': 1.0, + 'freedate': StringUtils.unify_datetime_str(result.get('downloadPromotionEndTime')), + 'page_url': self._pageurl % (self._domain, result.get('id')), + 'labels': [], + 'category': category + } + torrents.append(torrent) + elif res is not None: + logger.warn(f"{self._name} 搜索失败,错误码:{res.status_code}") + return True, [] + else: + logger.warn(f"{self._name} 搜索失败,无法连接 {self._domain}") + return True, [] + return False, torrents + + @staticmethod + def __get_downloadvolumefactor(discount: str) -> float: + """ + 获取下载系数 + """ + discount_dict = { + "free": 0 + } + if discount: + return discount_dict.get(discount, 1) + return 1 + + def __get_download_url(self, torrent_id: str) -> str: + """ + 获取下载链接 + """ + return self._downloadurl % (self._domain, torrent_id) diff --git a/app/utils/http.py b/app/utils/http.py index 2c649531..05f2cc53 100644 --- a/app/utils/http.py +++ b/app/utils/http.py @@ -58,6 +58,7 @@ class RequestUtils: verify=False, headers=self._headers, proxies=self._proxies, + cookies=self._cookies, timeout=self._timeout, json=json, stream=False) @@ -67,6 +68,7 @@ class RequestUtils: verify=False, headers=self._headers, proxies=self._proxies, + cookies=self._cookies, timeout=self._timeout, json=json, stream=False) @@ -80,6 +82,7 @@ class RequestUtils: verify=False, headers=self._headers, proxies=self._proxies, + cookies=self._cookies, timeout=self._timeout, params=params) else: @@ -87,6 +90,7 @@ class RequestUtils: verify=False, headers=self._headers, proxies=self._proxies, + cookies=self._cookies, timeout=self._timeout, params=params) return str(r.content, 'utf-8')