This commit is contained in:
jxxghp 2023-09-13 16:12:57 +08:00
parent 3fc267bcfa
commit 7f95bab0d5
2 changed files with 39 additions and 38 deletions

View File

@ -377,15 +377,38 @@ class SubscribeChain(ChainBase):
""" """
订阅刷新 订阅刷新
""" """
# 触发刷新站点资源,从缓存中匹配订阅
sites = self.get_subscribed_sites()
if sites is None:
return
self.match(
self.torrentschain.refresh(sites=sites)
)
def get_subscribed_sites(self) -> Optional[List[int]]:
"""
获取订阅中涉及的所有站点清单节约资源
:return: 返回[]代表所有站点命中返回None代表没有订阅
"""
# 查询所有订阅 # 查询所有订阅
subscribes = self.subscribeoper.list('R') subscribes = self.subscribeoper.list('R')
if not subscribes: if not subscribes:
# 没有订阅不运行 return None
return ret_sites = []
# 触发刷新站点资源,从缓存中匹配订阅 # 刷新订阅选中的Rss站点
self.match( for subscribe in subscribes:
self.torrentschain.refresh(subscribes=subscribes) # 如果有一个订阅没有选择站点,则刷新所有订阅站点
) if not subscribe.sites:
return []
# 刷新选中的站点
sub_sites = json.loads(subscribe.sites)
if sub_sites:
ret_sites.extend(sub_sites)
# 去重
if ret_sites:
ret_sites = list(set(ret_sites))
return ret_sites
def match(self, torrents: Dict[str, List[Context]]): def match(self, torrents: Dict[str, List[Context]]):
""" """

View File

@ -118,27 +118,29 @@ class TorrentsChain(ChainBase, metaclass=Singleton):
return ret_torrents return ret_torrents
def refresh(self, stype: str = None, subscribes: list = None) -> Dict[str, List[Context]]: def refresh(self, stype: str = None, sites: List[int] = None) -> Dict[str, List[Context]]:
""" """
刷新站点最新资源识别并缓存起来 刷新站点最新资源识别并缓存起来
:param stype: 强制指定缓存类型spider:爬虫缓存rss:rss缓存 :param stype: 强制指定缓存类型spider:爬虫缓存rss:rss缓存
:param subscribes: 订阅 :param sites: 强制指定站点ID列表为空则读取设置的订阅站点
""" """
# 刷新类型 # 刷新类型
if not stype: if not stype:
stype = settings.SUBSCRIBE_MODE stype = settings.SUBSCRIBE_MODE
# 刷新站点
if not sites:
sites = [str(sid) for sid in (self.systemconfig.get(SystemConfigKey.RssSites) or [])]
# 读取缓存 # 读取缓存
torrents_cache = self.get_torrents() torrents_cache = self.get_torrents()
# 所有站点索引 # 所有站点索引
indexers = self.siteshelper.get_indexers() indexers = self.siteshelper.get_indexers()
# 查询订阅站点
config_indexers = self.__get_rss_sites(subscribes)
# 遍历站点缓存资源 # 遍历站点缓存资源
for indexer in indexers: for indexer in indexers:
# 未开启的站点不搜索 # 未开启的站点不刷新
if config_indexers and str(indexer.get("id")) not in config_indexers: if sites and str(indexer.get("id")) not in sites:
continue continue
domain = StringUtils.get_url_domain(indexer.get("domain")) domain = StringUtils.get_url_domain(indexer.get("domain"))
if stype == "spider": if stype == "spider":
@ -188,40 +190,16 @@ class TorrentsChain(ChainBase, metaclass=Singleton):
del torrents del torrents
else: else:
logger.info(f'{indexer.get("name")} 没有获取到种子') logger.info(f'{indexer.get("name")} 没有获取到种子')
# 保存缓存到本地 # 保存缓存到本地
if stype == "spider": if stype == "spider":
self.save_cache(torrents_cache, self._spider_file) self.save_cache(torrents_cache, self._spider_file)
else: else:
self.save_cache(torrents_cache, self._rss_file) self.save_cache(torrents_cache, self._rss_file)
# 返回 # 返回
return torrents_cache return torrents_cache
def __get_rss_sites(self, subscribes: list = None):
"""
获取rss站点节约资源
"""
config_indexers = []
if subscribes:
# 刷新订阅选中的Rss站点
for subscribe in subscribes:
# 如果有一个订阅没有选择站点,则刷新所有订阅站点
if not subscribe.sites:
# 配置的Rss站点
config_indexers = [str(sid) for sid in self.systemconfig.get(SystemConfigKey.RssSites) or []]
break
# 刷新选中的站点
rss_sites = subscribe.sites
if isinstance(subscribe.sites, str):
rss_sites = eval(subscribe.sites)
for site in rss_sites:
config_indexers.append(site)
config_indexers = list(set(config_indexers))
if not config_indexers:
# 配置的Rss站点
config_indexers = [str(sid) for sid in self.systemconfig.get(SystemConfigKey.RssSites) or []]
return config_indexers
def __renew_rss_url(self, domain: str, site: dict): def __renew_rss_url(self, domain: str, site: dict):
""" """
保留原配置生成新的rss地址 保留原配置生成新的rss地址