fix 优化定时服务调度
This commit is contained in:
192
app/scheduler.py
192
app/scheduler.py
@ -1,10 +1,12 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import List
|
||||
|
||||
import pytz
|
||||
from apscheduler.executors.pool import ThreadPoolExecutor
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from app import schemas
|
||||
from app.chain import ChainBase
|
||||
from app.chain.cookiecloud import CookieCloudChain
|
||||
from app.chain.mediaserver import MediaServerChain
|
||||
@ -40,65 +42,153 @@ class Scheduler(metaclass=Singleton):
|
||||
def __init__(self):
|
||||
# 数据库连接
|
||||
self._db = SessionFactory()
|
||||
# 各服务的运行状态
|
||||
self._jobs = {
|
||||
"cookiecloud": {
|
||||
"func": CookieCloudChain(self._db).process,
|
||||
"running": False,
|
||||
},
|
||||
"mediaserver_sync": {
|
||||
"func": MediaServerChain(self._db).sync,
|
||||
"running": False,
|
||||
},
|
||||
"subscribe_tmdb": {
|
||||
"func": SubscribeChain(self._db).check,
|
||||
"running": False,
|
||||
},
|
||||
"subscribe_search": {
|
||||
"func": SubscribeChain(self._db).search,
|
||||
"running": False,
|
||||
},
|
||||
"subscribe_refresh": {
|
||||
"func": SubscribeChain(self._db).refresh,
|
||||
"running": False,
|
||||
},
|
||||
"transfer": {
|
||||
"func": TransferChain(self._db).process,
|
||||
"running": False,
|
||||
}
|
||||
}
|
||||
|
||||
# 调试模式不启动定时服务
|
||||
if settings.DEV:
|
||||
return
|
||||
|
||||
# CookieCloud定时同步
|
||||
if settings.COOKIECLOUD_INTERVAL:
|
||||
self._scheduler.add_job(CookieCloudChain(self._db).process,
|
||||
"interval",
|
||||
minutes=settings.COOKIECLOUD_INTERVAL,
|
||||
next_run_time=datetime.now(pytz.timezone(settings.TZ)) + timedelta(minutes=1),
|
||||
id="cookiecloud",
|
||||
name="同步CookieCloud站点")
|
||||
self._scheduler.add_job(
|
||||
self.start,
|
||||
"interval",
|
||||
id="cookiecloud",
|
||||
name="同步CookieCloud站点",
|
||||
minutes=settings.COOKIECLOUD_INTERVAL,
|
||||
next_run_time=datetime.now(pytz.timezone(settings.TZ)) + timedelta(minutes=1),
|
||||
kwargs={
|
||||
'job_id': 'cookiecloud'
|
||||
}
|
||||
)
|
||||
|
||||
# 媒体服务器同步
|
||||
if settings.MEDIASERVER_SYNC_INTERVAL:
|
||||
self._scheduler.add_job(MediaServerChain(self._db).sync, "interval",
|
||||
hours=settings.MEDIASERVER_SYNC_INTERVAL,
|
||||
next_run_time=datetime.now(pytz.timezone(settings.TZ)) + timedelta(minutes=5),
|
||||
id="mediaserver_sync",
|
||||
name="同步媒体服务器")
|
||||
self._scheduler.add_job(
|
||||
self.start,
|
||||
"interval",
|
||||
id="mediaserver_sync",
|
||||
name="同步媒体服务器",
|
||||
hours=settings.MEDIASERVER_SYNC_INTERVAL,
|
||||
next_run_time=datetime.now(pytz.timezone(settings.TZ)) + timedelta(minutes=5),
|
||||
kwargs={
|
||||
'job_id': 'mediaserver_sync'
|
||||
}
|
||||
)
|
||||
|
||||
# 新增订阅时搜索(5分钟检查一次)
|
||||
self._scheduler.add_job(SubscribeChain(self._db).search, "interval",
|
||||
minutes=5, kwargs={'state': 'N'})
|
||||
self._scheduler.add_job(
|
||||
self.start,
|
||||
"interval",
|
||||
minutes=5,
|
||||
kwargs={
|
||||
'job_id': 'subscribe_search',
|
||||
'state': 'N'
|
||||
}
|
||||
)
|
||||
|
||||
# 检查更新订阅TMDB数据(每隔6小时)
|
||||
self._scheduler.add_job(SubscribeChain(self._db).check, "interval", hours=6,
|
||||
id="subscribe_tmdb", name="订阅元数据更新")
|
||||
self._scheduler.add_job(
|
||||
self.start,
|
||||
"interval",
|
||||
id="subscribe_tmdb",
|
||||
name="订阅元数据更新",
|
||||
hours=6,
|
||||
kwargs={
|
||||
'job_id': 'subscribe_tmdb'
|
||||
}
|
||||
)
|
||||
|
||||
# 订阅状态每隔24小时搜索一次
|
||||
if settings.SUBSCRIBE_SEARCH:
|
||||
self._scheduler.add_job(SubscribeChain(self._db).search, "interval",
|
||||
hours=24, kwargs={'state': 'R'},
|
||||
id="subscribe_search", name="订阅搜索")
|
||||
self._scheduler.add_job(
|
||||
self.start,
|
||||
"interval",
|
||||
id="subscribe_search",
|
||||
name="订阅搜索",
|
||||
hours=24,
|
||||
kwargs={
|
||||
'job_id': 'subscribe_search',
|
||||
'state': 'R'
|
||||
}
|
||||
)
|
||||
|
||||
if settings.SUBSCRIBE_MODE == "spider":
|
||||
# 站点首页种子定时刷新模式
|
||||
triggers = TimerUtils.random_scheduler(num_executions=30)
|
||||
for trigger in triggers:
|
||||
self._scheduler.add_job(SubscribeChain(self._db).refresh, "cron",
|
||||
hour=trigger.hour, minute=trigger.minute,
|
||||
id=f"subscribe_refresh|{trigger.hour}:{trigger.minute}",
|
||||
name="订阅刷新")
|
||||
self._scheduler.add_job(
|
||||
self.start,
|
||||
"cron",
|
||||
id=f"subscribe_refresh|{trigger.hour}:{trigger.minute}",
|
||||
name="订阅刷新",
|
||||
hour=trigger.hour,
|
||||
minute=trigger.minute,
|
||||
kwargs={
|
||||
'job_id': 'subscribe_refresh'
|
||||
})
|
||||
else:
|
||||
# RSS订阅模式
|
||||
if not settings.SUBSCRIBE_RSS_INTERVAL:
|
||||
settings.SUBSCRIBE_RSS_INTERVAL = 30
|
||||
elif settings.SUBSCRIBE_RSS_INTERVAL < 5:
|
||||
settings.SUBSCRIBE_RSS_INTERVAL = 5
|
||||
self._scheduler.add_job(SubscribeChain(self._db).refresh, "interval",
|
||||
minutes=settings.SUBSCRIBE_RSS_INTERVAL,
|
||||
id="subscribe_refresh", name="订阅刷新")
|
||||
self._scheduler.add_job(
|
||||
self.start,
|
||||
"interval",
|
||||
id="subscribe_refresh",
|
||||
name="RSS订阅刷新",
|
||||
minutes=settings.SUBSCRIBE_RSS_INTERVAL,
|
||||
kwargs={
|
||||
'job_id': 'subscribe_refresh'
|
||||
}
|
||||
)
|
||||
|
||||
# 下载器文件转移(每5分钟)
|
||||
if settings.DOWNLOADER_MONITOR:
|
||||
self._scheduler.add_job(TransferChain(self._db).process, "interval", minutes=5,
|
||||
id="transfer", name="下载文件整理")
|
||||
self._scheduler.add_job(
|
||||
self.start,
|
||||
"interval",
|
||||
id="transfer",
|
||||
name="下载文件整理",
|
||||
minutes=5,
|
||||
kwargs={
|
||||
'job_id': 'transfer'
|
||||
}
|
||||
)
|
||||
|
||||
# 公共定时服务
|
||||
self._scheduler.add_job(SchedulerChain(self._db).scheduler_job, "interval", minutes=10)
|
||||
self._scheduler.add_job(
|
||||
SchedulerChain(self._db).scheduler_job,
|
||||
"interval",
|
||||
minutes=10
|
||||
)
|
||||
|
||||
# 打印服务
|
||||
logger.debug(self._scheduler.print_jobs())
|
||||
@ -106,11 +196,53 @@ class Scheduler(metaclass=Singleton):
|
||||
# 启动定时服务
|
||||
self._scheduler.start()
|
||||
|
||||
def list(self):
|
||||
def start(self, job_id: str, *args, **kwargs):
|
||||
"""
|
||||
启动定时服务
|
||||
"""
|
||||
# 处理job_id格式
|
||||
job = self._jobs.get(job_id)
|
||||
if not job:
|
||||
return
|
||||
if job.get("running"):
|
||||
logger.warning(f"定时任务 {job_id} 正在运行 ...")
|
||||
return
|
||||
self._jobs[job_id]["running"] = True
|
||||
try:
|
||||
job["func"](*args, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"定时任务 {job_id} 执行失败:{e}")
|
||||
self._jobs[job_id]["running"] = False
|
||||
|
||||
def list(self) -> List[schemas.ScheduleInfo]:
|
||||
"""
|
||||
当前所有任务
|
||||
"""
|
||||
return self._scheduler.get_jobs()
|
||||
# 返回计时任务
|
||||
schedulers = []
|
||||
# 去重
|
||||
added = []
|
||||
jobs = self._scheduler.get_jobs()
|
||||
# 按照下次运行时间排序
|
||||
jobs.sort(key=lambda x: x.next_run_time)
|
||||
for job in jobs:
|
||||
if job.name not in added:
|
||||
added.append(job.name)
|
||||
else:
|
||||
continue
|
||||
if not self._jobs.get(job.id):
|
||||
continue
|
||||
# 任务状态
|
||||
status = "正在运行" if self._jobs[job.id].get("running") else "等待"
|
||||
# 下次运行时间
|
||||
next_run = TimerUtils.time_difference(job.next_run_time)
|
||||
schedulers.append(schemas.ScheduleInfo(
|
||||
id=job.id,
|
||||
name=job.name,
|
||||
status=status,
|
||||
next_run=next_run
|
||||
))
|
||||
return schedulers
|
||||
|
||||
def stop(self):
|
||||
"""
|
||||
|
Reference in New Issue
Block a user