This commit is contained in:
jxxghp 2023-06-08 07:46:31 +08:00
parent 726c06a963
commit 856f931307
4 changed files with 16 additions and 10 deletions

View File

@ -79,6 +79,7 @@ class SubscribeChain(_ChainBase):
subscribes = self.subscribes.list(state) subscribes = self.subscribes.list(state)
# 遍历订阅 # 遍历订阅
for subscribe in subscribes: for subscribe in subscribes:
logger.info(f'开始搜索订阅,标题:{subscribe.name} ...')
# 如果状态为N则更新为R # 如果状态为N则更新为R
if subscribe.state == 'N': if subscribe.state == 'N':
self.subscribes.update(subscribe.id, {'state': 'R'}) self.subscribes.update(subscribe.id, {'state': 'R'})
@ -121,10 +122,18 @@ class SubscribeChain(_ChainBase):
indexers = self.siteshelper.get_indexers() indexers = self.siteshelper.get_indexers()
# 遍历站点缓存资源 # 遍历站点缓存资源
for indexer in indexers: for indexer in indexers:
logger.info(f'开始刷新站点资源,站点:{indexer.get("name")} ...')
domain = StringUtils.get_url_domain(indexer.get("domain")) domain = StringUtils.get_url_domain(indexer.get("domain"))
torrents: List[TorrentInfo] = self.run_module("refresh_torrents", sites=[indexer]) torrents: List[TorrentInfo] = self.run_module("refresh_torrents", sites=[indexer])
if torrents: if torrents:
self._torrents_cache[domain] = [] self._torrents_cache[domain] = []
# 过滤种子
result: List[TorrentInfo] = self.run_module("filter_torrents", torrent_list=torrents)
if result is not None:
torrents = result
if not torrents:
logger.warn(f'{indexer.get("name")} 没有符合过滤条件的资源')
continue
for torrent in torrents: for torrent in torrents:
# 识别 # 识别
meta = MetaInfo(torrent.title, torrent.description) meta = MetaInfo(torrent.title, torrent.description)
@ -147,6 +156,7 @@ class SubscribeChain(_ChainBase):
subscribes = self.subscribes.list('R') subscribes = self.subscribes.list('R')
# 遍历订阅 # 遍历订阅
for subscribe in subscribes: for subscribe in subscribes:
logger.info(f'开始匹配订阅,标题:{subscribe.name} ...')
# 生成元数据 # 生成元数据
meta = MetaInfo(subscribe.name) meta = MetaInfo(subscribe.name)
meta.year = subscribe.year meta.year = subscribe.year

View File

@ -12,7 +12,7 @@ console_handler = logging.StreamHandler()
console_handler.setLevel(logging.DEBUG) console_handler.setLevel(logging.DEBUG)
# 创建文件输出Handler # 创建文件输出Handler
file_handler = RotatingFileHandler(filename=settings.LOG_PATH / 'nasbot.log', file_handler = RotatingFileHandler(filename=settings.LOG_PATH / 'moviepilot.log',
mode='w', mode='w',
maxBytes=5 * 1024 * 1024, maxBytes=5 * 1024 * 1024,
backupCount=3, backupCount=3,

View File

@ -100,21 +100,17 @@ class IndexerModule(_ModuleBase):
mtype=mediainfo.type mtype=mediainfo.type
) )
except Exception as err: except Exception as err:
error_flag = True logger.error(f"{site.get('name')} 搜索出错:{err}")
print(str(err))
# 索引花费的时间 # 索引花费的时间
seconds = round((datetime.now() - start_time).seconds, 1) seconds = round((datetime.now() - start_time).seconds, 1)
if error_flag:
logger.error(f"{site.get('name')} 搜索发生错误,耗时 {seconds}")
else:
logger.info(f"{site.get('name')} 搜索完成,耗时 {seconds}")
# 返回结果 # 返回结果
if len(result_array) == 0: if len(result_array) == 0:
logger.warn(f"{site.get('name')} 未搜索到数据") logger.warn(f"{site.get('name')} 未搜索到数据,耗时 {seconds}")
return [] return []
else: else:
logger.warn(f"{site.get('name')} 返回数据:{len(result_array)}") logger.warn(f"{site.get('name')} 搜索完成,耗时 {seconds} 秒,返回数据:{len(result_array)}")
# 合并站点信息以TorrentInfo返回 # 合并站点信息以TorrentInfo返回
return [TorrentInfo(site=site.get("id"), return [TorrentInfo(site=site.get("id"),
site_name=site.get("name"), site_name=site.get("name"),

View File

@ -113,7 +113,7 @@ class TorrentSpider(feapder.AirSpider):
if self.domain and not str(self.domain).endswith("/"): if self.domain and not str(self.domain).endswith("/"):
self.domain = self.domain + "/" self.domain = self.domain + "/"
if indexer.get('ua'): if indexer.get('ua'):
self.ua = indexer.get('ua') self.ua = indexer.get('ua') or settings.USER_AGENT
else: else:
self.ua = settings.USER_AGENT self.ua = settings.USER_AGENT
if indexer.get('proxy'): if indexer.get('proxy'):