站点独立设置超时时间
This commit is contained in:
@ -27,6 +27,7 @@ class MTorrentSpider:
|
||||
_searchurl = "%sapi/torrent/search"
|
||||
_downloadurl = "%sapi/torrent/genDlToken"
|
||||
_pageurl = "%sdetail/%s"
|
||||
_timeout = 15
|
||||
|
||||
# 电影分类
|
||||
_movie_category = ['401', '419', '420', '421', '439', '405', '404']
|
||||
@ -62,6 +63,7 @@ class MTorrentSpider:
|
||||
self._ua = indexer.get('ua')
|
||||
self._apikey = indexer.get('apikey')
|
||||
self._token = indexer.get('token')
|
||||
self._timeout = indexer.get('timeout') or 15
|
||||
|
||||
def search(self, keyword: str, mtype: MediaType = None, page: int = 0) -> Tuple[bool, List[dict]]:
|
||||
"""
|
||||
@ -92,7 +94,7 @@ class MTorrentSpider:
|
||||
},
|
||||
proxies=self._proxy,
|
||||
referer=f"{self._domain}browse",
|
||||
timeout=15
|
||||
timeout=self._timeout
|
||||
).post_res(url=self._searchurl, json=params)
|
||||
torrents = []
|
||||
if res and res.status_code == 200:
|
||||
|
@ -63,8 +63,8 @@ class TorrentSpider:
|
||||
torrents_info: dict = {}
|
||||
# 种子列表
|
||||
torrents_info_array: list = []
|
||||
# 搜索超时, 默认: 30秒
|
||||
_timeout = 30
|
||||
# 搜索超时, 默认: 15秒
|
||||
_timeout = 15
|
||||
|
||||
def __init__(self,
|
||||
indexer: CommentedMap,
|
||||
|
@ -18,6 +18,7 @@ class TNodeSpider:
|
||||
_ua = None
|
||||
_token = None
|
||||
_size = 100
|
||||
_timeout = 15
|
||||
_searchurl = "%sapi/torrent/advancedSearch"
|
||||
_downloadurl = "%sapi/torrent/download/%s"
|
||||
_pageurl = "%storrent/info/%s"
|
||||
@ -32,6 +33,7 @@ class TNodeSpider:
|
||||
self._proxy = settings.PROXY
|
||||
self._cookie = indexer.get('cookie')
|
||||
self._ua = indexer.get('ua')
|
||||
self._timeout = indexer.get('timeout') or 15
|
||||
self.init_config()
|
||||
|
||||
def init_config(self):
|
||||
@ -43,7 +45,7 @@ class TNodeSpider:
|
||||
res = RequestUtils(ua=self._ua,
|
||||
cookies=self._cookie,
|
||||
proxies=self._proxy,
|
||||
timeout=15).get_res(url=self._domain)
|
||||
timeout=self._timeout).get_res(url=self._domain)
|
||||
if res and res.status_code == 200:
|
||||
csrf_token = re.search(r'<meta name="x-csrf-token" content="(.+?)">', res.text)
|
||||
if csrf_token:
|
||||
@ -77,7 +79,7 @@ class TNodeSpider:
|
||||
},
|
||||
cookies=self._cookie,
|
||||
proxies=self._proxy,
|
||||
timeout=15
|
||||
timeout=self._timeout
|
||||
).post_res(url=self._searchurl, json=params)
|
||||
torrents = []
|
||||
if res and res.status_code == 200:
|
||||
|
@ -17,11 +17,13 @@ class TorrentLeech:
|
||||
_browseurl = "%storrents/browse/list/page/2%s"
|
||||
_downloadurl = "%sdownload/%s/%s"
|
||||
_pageurl = "%storrent/%s"
|
||||
_timeout = 15
|
||||
|
||||
def __init__(self, indexer: CommentedMap):
|
||||
self._indexer = indexer
|
||||
if indexer.get('proxy'):
|
||||
self._proxy = settings.PROXY
|
||||
self._timeout = indexer.get('timeout') or 15
|
||||
|
||||
def search(self, keyword: str, page: int = 0) -> Tuple[bool, List[dict]]:
|
||||
|
||||
@ -40,7 +42,7 @@ class TorrentLeech:
|
||||
},
|
||||
cookies=self._indexer.get('cookie'),
|
||||
proxies=self._proxy,
|
||||
timeout=15
|
||||
timeout=self._timeout
|
||||
).get_res(url)
|
||||
torrents = []
|
||||
if res and res.status_code == 200:
|
||||
|
Reference in New Issue
Block a user