Merge remote-tracking branch 'origin/main'

This commit is contained in:
thsrite 2023-07-11 09:39:45 +08:00
commit dbd410d400
24 changed files with 486 additions and 45 deletions

View File

@ -1,7 +1,7 @@
from fastapi import APIRouter
from app.api.endpoints import login, user, site, message, webhook, subscribe, \
media, douban, search, plugin, tmdb, history, system, download
media, douban, search, plugin, tmdb, history, system, download, dashboard
api_router = APIRouter()
api_router.include_router(login.router, tags=["login"])
@ -18,3 +18,4 @@ api_router.include_router(history.router, prefix="/history", tags=["history"])
api_router.include_router(system.router, prefix="/system", tags=["system"])
api_router.include_router(plugin.router, prefix="/plugin", tags=["plugin"])
api_router.include_router(download.router, prefix="/download", tags=["download"])
api_router.include_router(dashboard.router, prefix="/dashboard", tags=["dashboard"])

View File

@ -0,0 +1,115 @@
from pathlib import Path
from typing import Any, List
from fastapi import APIRouter, Depends
from requests import Session
from app import schemas
from app.chain.dashboard import DashboardChain
from app.core.config import settings
from app.core.security import verify_token
from app.db import get_db
from app.db.models.transferhistory import TransferHistory
from app.scheduler import Scheduler
from app.utils.string import StringUtils
from app.utils.system import SystemUtils
from app.utils.timer import TimerUtils
router = APIRouter()
@router.get("/statistic", summary="媒体数量统计", response_model=schemas.Statistic)
def statistic(_: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
查询媒体数量统计信息
"""
media_statistic = DashboardChain().media_statistic()
return schemas.Statistic(
movie_count=media_statistic.movie_count,
tv_count=media_statistic.tv_count,
episode_count=media_statistic.episode_count,
user_count=media_statistic.user_count
)
@router.get("/storage", summary="存储空间", response_model=schemas.Storage)
def storage(_: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
查询存储空间信息
"""
if settings.LIBRARY_PATH:
total_storage, free_storage = SystemUtils.space_usage(Path(settings.LIBRARY_PATH))
else:
total_storage, free_storage = 0, 0
return schemas.Storage(
total_storage=total_storage,
used_storage=total_storage - free_storage
)
@router.get("/processes", summary="进程信息", response_model=List[schemas.ProcessInfo])
def processes(_: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
查询进程信息
"""
return SystemUtils.processes()
@router.get("/downloader", summary="下载器信息", response_model=schemas.DownloaderInfo)
def downloader(_: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
查询下载器信息
"""
transfer_info = DashboardChain().downloader_info()
free_space = SystemUtils.free_space(Path(settings.DOWNLOAD_PATH))
return schemas.DownloaderInfo(
download_speed=transfer_info.download_speed,
upload_speed=transfer_info.upload_speed,
download_size=transfer_info.download_size,
upload_size=transfer_info.upload_size,
free_space=free_space
)
@router.get("/schedule", summary="后台服务", response_model=List[schemas.ScheduleInfo])
def schedule(_: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
查询后台服务信息
"""
# 返回计时任务
schedulers = []
# 去重
added = []
jobs = Scheduler().list()
# 按照下次运行时间排序
jobs.sort(key=lambda x: x.next_run_time)
for job in jobs:
if job.name not in added:
added.append(job.name)
else:
continue
if not StringUtils.is_chinese(job.name):
continue
next_run = TimerUtils.time_difference(job.next_run_time)
if not next_run:
status = "已停止"
else:
status = "等待" if job.pending else "运行中"
schedulers.append(schemas.ScheduleInfo(
id=job.id,
name=job.name,
status=status,
next_run=next_run
))
return schedulers
@router.get("/transfer", summary="文件整理统计", response_model=List[int])
def transfer(days: int = 7, db: Session = Depends(get_db),
_: schemas.TokenPayload = Depends(verify_token)) -> Any:
"""
查询文件整理统计信息
"""
transfer_stat = TransferHistory.statistic(db, days)
return [stat[1] for stat in transfer_stat]

19
app/chain/dashboard.py Normal file
View File

@ -0,0 +1,19 @@
from app import schemas
from app.chain import ChainBase
class DashboardChain(ChainBase):
"""
各类仪表板统计处理链
"""
def media_statistic(self) -> schemas.Statistic:
"""
媒体数量统计
"""
return self.run_module("media_statistic")
def downloader_info(self) -> schemas.DownloaderInfo:
"""
下载器信息
"""
return self.run_module("downloader_info")

View File

@ -32,7 +32,7 @@ class SearchChain(ChainBase):
self.systemconfig = SystemConfigOper()
self.torrenthelper = TorrentHelper()
def search_by_tmdbid(self, tmdbid: int, mtype: MediaType = None) -> Optional[List[Context]]:
def search_by_tmdbid(self, tmdbid: int, mtype: MediaType = None) -> List[Context]:
"""
根据TMDB ID搜索资源精确匹配但不不过滤本地存在的资源
:param tmdbid: TMDB ID
@ -41,11 +41,11 @@ class SearchChain(ChainBase):
mediainfo = self.recognize_media(tmdbid=tmdbid, mtype=mtype)
if not mediainfo:
logger.error(f'{tmdbid} 媒体信息识别失败!')
return None
return []
results = self.process(mediainfo=mediainfo)
# 保存眲结果
self.systemconfig.set(SystemConfigKey.SearchResults,
pickle.dumps(results or []))
bytes_results = pickle.dumps(results)
self.systemconfig.set(SystemConfigKey.SearchResults, bytes_results)
return results
def search_by_title(self, title: str) -> List[TorrentInfo]:
@ -55,14 +55,14 @@ class SearchChain(ChainBase):
"""
logger.info(f'开始搜索资源,关键词:{title} ...')
# 搜索
return self.__search_all_sites(keyword=title)
return self.__search_all_sites(keyword=title) or []
def last_search_results(self) -> List[Context]:
"""
获取上次搜索结果
"""
results = self.systemconfig.get(SystemConfigKey.SearchResults)
if not results or not ObjectUtils.is_obj(results):
if not results:
return []
return pickle.loads(results)
@ -84,7 +84,7 @@ class SearchChain(ChainBase):
def process(self, mediainfo: MediaInfo,
keyword: str = None,
no_exists: Dict[int, Dict[int, NotExistMediaInfo]] = None) -> Optional[List[Context]]:
no_exists: Dict[int, Dict[int, NotExistMediaInfo]] = None) -> List[Context]:
"""
根据媒体信息搜索种子资源精确匹配应用过滤规则同时根据no_exists过滤本地已存在的资源
:param mediainfo: 媒体信息
@ -206,10 +206,10 @@ class SearchChain(ChainBase):
# 未开启的站点不搜索
indexer_sites = []
# 配置的索引站点
config_indexers = self.systemconfig.get(SystemConfigKey.IndexerSites) or []
config_indexers = [str(sid) for sid in self.systemconfig.get(SystemConfigKey.IndexerSites) or []]
for indexer in self.siteshelper.get_indexers():
# 检查站点索引开关
if not config_indexers or indexer.get("id") in config_indexers:
if not config_indexers or str(indexer.get("id")) in config_indexers:
# 站点流控
state, msg = self.siteshelper.check(indexer.get("domain"))
if not state:

View File

@ -272,11 +272,11 @@ class SubscribeChain(ChainBase):
# 所有站点索引
indexers = self.siteshelper.get_indexers()
# 配置的索引站点
config_indexers = self.systemconfig.get(SystemConfigKey.IndexerSites) or []
config_indexers = [str(sid) for sid in self.systemconfig.get(SystemConfigKey.IndexerSites) or []]
# 遍历站点缓存资源
for indexer in indexers:
# 未开启的站点不搜索
if config_indexers and indexer.get("id") not in config_indexers:
if config_indexers and str(indexer.get("id")) not in config_indexers:
continue
logger.info(f'开始刷新站点资源,站点:{indexer.get("name")} ...')
domain = StringUtils.get_url_domain(indexer.get("domain"))

View File

@ -134,7 +134,7 @@ class Settings(BaseSettings):
# CookieCloud端对端加密密码
COOKIECLOUD_PASSWORD: str = None
# CookieCloud同步间隔分钟
COOKIECLOUD_INTERVAL: int = 3600 * 24
COOKIECLOUD_INTERVAL: int = 60 * 24
# CookieCloud对应的浏览器UA
USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36 Edg/113.0.1774.57"
# 媒体库目录

View File

@ -55,9 +55,6 @@ class TorrentInfo:
# 种子优先级
pri_order: int = 0
def __getattr__(self, attribute):
return None
def __setattr__(self, name: str, value: Any):
self.__dict__[name] = value
@ -175,9 +172,6 @@ class MediaInfo:
if self.douban_info:
self.set_douban_info(self.douban_info)
def __getattr__(self, attribute):
return None
def __setattr__(self, name: str, value: Any):
self.__dict__[name] = value
@ -518,9 +512,6 @@ class Context:
# 种子信息
torrent_info: TorrentInfo = None
def __getattr__(self, attribute):
return None
def __setattr__(self, name: str, value: Any):
self.__dict__[name] = value

View File

@ -1,6 +1,6 @@
import time
from sqlalchemy import Column, Integer, String, Sequence, Boolean
from sqlalchemy import Column, Integer, String, Sequence, Boolean, func
from sqlalchemy.orm import Session
from app.db.models import Base
@ -58,3 +58,15 @@ class TransferHistory(Base):
@staticmethod
def get_by_hash(db: Session, download_hash: str):
return db.query(TransferHistory).filter(TransferHistory.download_hash == download_hash).first()
@staticmethod
def statistic(db: Session, days: int = 7):
"""
统计最近days天的下载历史数量按日期分组返回每日数量
"""
sub_query = db.query(func.substr(TransferHistory.date, 1, 10).label('date'),
TransferHistory.id.label('id')).filter(
TransferHistory.date >= time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime(time.time() - 86400 * days))).subquery()
return db.query(sub_query.c.date, func.count(sub_query.c.id)).group_by(sub_query.c.date).all()

View File

@ -29,3 +29,10 @@ class TransferHistoryOper(DbOper):
"date": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
})
return TransferHistory(**kwargs).create(self._db)
def statistic(self, days: int = 7):
"""
统计最近days天的下载历史数量
"""
return TransferHistory.statistic(self._db, days)

View File

@ -1,6 +1,7 @@
from pathlib import Path
from typing import Optional, Tuple, Union, Any
from app import schemas
from app.core.context import MediaInfo
from app.log import logger
from app.modules import _ModuleBase
@ -83,3 +84,16 @@ class EmbyModule(_ModuleBase):
)
]
return self.emby.refresh_library_by_items(items)
def media_statistic(self) -> schemas.Statistic:
"""
媒体数量统计
"""
media_statistic = self.emby.get_medias_count()
user_count = self.emby.get_user_count()
return schemas.Statistic(
movie_count=media_statistic.get("MovieCount") or 0,
tv_count=media_statistic.get("SeriesCount") or 0,
episode_count=media_statistic.get("EpisodeCount") or 0,
user_count=user_count or 0
)

View File

@ -2,6 +2,7 @@ import json
from pathlib import Path
from typing import Optional, Tuple, Union, Any
from app import schemas
from app.core.context import MediaInfo
from app.log import logger
from app.modules import _ModuleBase
@ -75,3 +76,16 @@ class JellyfinModule(_ModuleBase):
:return: 成功或失败
"""
return self.jellyfin.refresh_root_library()
def media_statistic(self) -> schemas.Statistic:
"""
媒体数量统计
"""
media_statistic = self.jellyfin.get_medias_count()
user_count = self.jellyfin.get_user_count()
return schemas.Statistic(
movie_count=media_statistic.get("MovieCount") or 0,
tv_count=media_statistic.get("SeriesCount") or 0,
episode_count=media_statistic.get("EpisodeCount") or 0,
user_count=user_count or 0
)

View File

@ -1,6 +1,7 @@
from pathlib import Path
from typing import Optional, Tuple, Union, Any
from app import schemas
from app.core.context import MediaInfo
from app.log import logger
from app.modules import _ModuleBase
@ -73,3 +74,15 @@ class PlexModule(_ModuleBase):
)
]
return self.plex.refresh_library_by_items(items)
def media_statistic(self) -> schemas.Statistic:
"""
媒体数量统计
"""
media_statistic = self.plex.get_medias_count()
return schemas.Statistic(
movie_count=media_statistic.get("MovieCount") or 0,
tv_count=media_statistic.get("SeriesCount") or 0,
episode_count=media_statistic.get("EpisodeCount") or 0,
user_count=1
)

View File

@ -1,14 +1,15 @@
from pathlib import Path
from typing import Set, Tuple, Optional, Union, List
from app import schemas
from app.core.config import settings
from app.core.metainfo import MetaInfo
from app.log import logger
from app.modules import _ModuleBase
from app.modules.qbittorrent.qbittorrent import Qbittorrent
from app.schemas import TransferInfo, TransferTorrent, DownloadingTorrent
from app.utils.string import StringUtils
from app.schemas.types import TorrentStatus
from app.utils.string import StringUtils
class QbittorrentModule(_ModuleBase):
@ -184,3 +185,16 @@ class QbittorrentModule(_ModuleBase):
:return: bool
"""
return self.qbittorrent.start_torrents(ids=hashs)
def downloader_info(self) -> schemas.DownloaderInfo:
"""
下载器信息
"""
# 调用Qbittorrent API查询实时信息
info = self.qbittorrent.transfer_info()
return schemas.DownloaderInfo(
download_speed=info.get("dl_info_speed"),
upload_speed=info.get("up_info_speed"),
download_size=info.get("dl_info_data"),
upload_size=info.get("up_info_data")
)

View File

@ -4,6 +4,7 @@ from typing import Optional, Union, Tuple, List
import qbittorrentapi
from qbittorrentapi import TorrentFilesList, TorrentDictionary
from qbittorrentapi.client import Client
from qbittorrentapi.transfer import TransferInfoDictionary
from app.core.config import settings
from app.log import logger
@ -285,3 +286,13 @@ class Qbittorrent(metaclass=Singleton):
except Exception as err:
logger.error(f"设置种子文件状态出错:{err}")
return False
def transfer_info(self) -> Optional[TransferInfoDictionary]:
"""
获取传输信息
"""
try:
return self.qbc.transfer_info()
except Exception as err:
logger.error(f"获取传输信息出错:{err}")
return None

View File

@ -1,6 +1,7 @@
from pathlib import Path
from typing import Set, Tuple, Optional, Union, List
from app import schemas
from app.core.config import settings
from app.core.metainfo import MetaInfo
from app.log import logger
@ -168,3 +169,15 @@ class TransmissionModule(_ModuleBase):
:return: bool
"""
return self.transmission.start_torrents(ids=hashs)
def downloader_info(self) -> schemas.DownloaderInfo:
"""
下载器信息
"""
info = self.transmission.transfer_info()
return schemas.DownloaderInfo(
download_speed=info.download_speed,
upload_speed=info.upload_speed,
download_size=info.current_stats.downloaded_bytes,
upload_size=info.current_stats.uploaded_bytes
)

View File

@ -2,6 +2,7 @@ from typing import Optional, Union, Tuple, List
import transmission_rpc
from transmission_rpc import Client, Torrent, File
from transmission_rpc.session import SessionStats
from app.core.config import settings
from app.log import logger
@ -214,3 +215,13 @@ class Transmission(metaclass=Singleton):
except Exception as err:
logger.error(f"设置下载文件状态出错:{err}")
return False
def transfer_info(self) -> Optional[SessionStats]:
"""
获取传输信息
"""
try:
return self.trc.session_stats()
except Exception as err:
logger.error(f"获取传输信息出错:{err}")
return None

View File

@ -45,24 +45,28 @@ class Scheduler(metaclass=Singleton):
self._scheduler.add_job(CookieCloudChain().process,
"interval",
minutes=settings.COOKIECLOUD_INTERVAL,
next_run_time=datetime.now(pytz.timezone(settings.TZ)) + timedelta(minutes=1))
next_run_time=datetime.now(pytz.timezone(settings.TZ)) + timedelta(minutes=1),
name="同步CookieCloud站点")
# 新增订阅时搜索5分钟检查一次
self._scheduler.add_job(SubscribeChain().search, "interval", minutes=5, kwargs={'state': 'N'})
self._scheduler.add_job(SubscribeChain().search, "interval",
minutes=5, kwargs={'state': 'N'})
# 订阅状态每隔12小时刷新一次
self._scheduler.add_job(SubscribeChain().search, "interval", hours=12, kwargs={'state': 'R'})
# 订阅状态每隔12小时搜索一次
self._scheduler.add_job(SubscribeChain().search, "interval",
hours=12, kwargs={'state': 'R'}, name="订阅搜索")
# 站点首页种子定时刷新缓存并匹配订阅
triggers = TimerUtils.random_scheduler(num_executions=20)
for trigger in triggers:
self._scheduler.add_job(SubscribeChain().refresh, "cron", hour=trigger.hour, minute=trigger.minute)
self._scheduler.add_job(SubscribeChain().refresh, "cron",
hour=trigger.hour, minute=trigger.minute, name="订阅刷新")
# 豆瓣同步每30分钟
self._scheduler.add_job(DoubanChain().sync, "interval", minutes=30)
self._scheduler.add_job(DoubanChain().sync, "interval", minutes=30, name="同步豆瓣想看")
# 下载器文件转移每5分钟
self._scheduler.add_job(TransferChain().process, "interval", minutes=5)
self._scheduler.add_job(TransferChain().process, "interval", minutes=5, name="下载文件整理")
# 公共定时服务
self._scheduler.add_job(SchedulerChain().scheduler_job, "interval", minutes=10)
@ -73,6 +77,12 @@ class Scheduler(metaclass=Singleton):
# 启动定时服务
self._scheduler.start()
def list(self):
"""
当前所有任务
"""
return self._scheduler.get_jobs()
def stop(self):
"""
关闭定时服务

View File

@ -7,3 +7,4 @@ from .context import *
from .servarr import *
from .plugin import *
from .history import *
from .dashboard import *

62
app/schemas/dashboard.py Normal file
View File

@ -0,0 +1,62 @@
from typing import Optional
from pydantic import BaseModel
class Statistic(BaseModel):
# 电影
movie_count: Optional[int] = 0
# 电视剧数量
tv_count: Optional[int] = 0
# 集数量
episode_count: Optional[int] = 0
# 用户数量
user_count: Optional[int] = 0
class Storage(BaseModel):
# 总存储空间
total_storage: Optional[float] = 0
# 已使用空间
used_storage: Optional[float] = 0
class ProcessInfo(BaseModel):
# 进程ID
pid: Optional[int] = 0
# 进程名称
name: Optional[str] = None
# 进程状态
status: Optional[str] = None
# 进程占用CPU
cpu: Optional[float] = 0.0
# 进程占用内存 MB
memory: Optional[float] = 0.0
# 进程创建时间
create_time: Optional[float] = 0.0
# 进程运行时间 秒
run_time: Optional[float] = 0.0
class DownloaderInfo(BaseModel):
# 下载速度
download_speed: Optional[float] = 0.0
# 上传速度
upload_speed: Optional[float] = 0.0
# 下载量
download_size: Optional[float] = 0.0
# 上传量
upload_size: Optional[float] = 0.0
# 剩余空间
free_space: Optional[float] = 0.0
class ScheduleInfo(BaseModel):
# ID
id: Optional[str] = None
# 名称
name: Optional[str] = None
# 状态
status: Optional[str] = None
# 下次执行时间
next_run: Optional[str] = None

View File

@ -7,10 +7,18 @@ class ObjectUtils:
@staticmethod
def is_obj(obj: Any):
if isinstance(obj, list) or isinstance(obj, dict):
if isinstance(obj, list) \
or isinstance(obj, dict):
return True
elif isinstance(obj, str) \
or isinstance(obj, int) \
or isinstance(obj, float) \
or isinstance(obj, bool) \
or isinstance(obj, bytes):
return False
else:
return str(obj).startswith("{") or str(obj).startswith("[")
return str(obj).startswith("{") \
or str(obj).startswith("[")
@staticmethod
def arguments(func: Callable) -> int:

View File

@ -1,9 +1,12 @@
import datetime
import os
import platform
import re
import shutil
from pathlib import Path
from typing import List
from typing import List, Union, Tuple
import psutil
from app import schemas
class SystemUtils:
@ -39,7 +42,7 @@ class SystemUtils:
return True if platform.system() == 'Darwin' else False
@staticmethod
def copy(src: Path, dest: Path):
def copy(src: Path, dest: Path) -> Tuple[int, str]:
"""
复制
"""
@ -51,7 +54,7 @@ class SystemUtils:
return -1, str(err)
@staticmethod
def move(src: Path, dest: Path):
def move(src: Path, dest: Path) -> Tuple[int, str]:
"""
移动
"""
@ -63,7 +66,7 @@ class SystemUtils:
return -1, str(err)
@staticmethod
def link(src: Path, dest: Path):
def link(src: Path, dest: Path) -> Tuple[int, str]:
"""
硬链接
"""
@ -75,7 +78,7 @@ class SystemUtils:
return -1, str(err)
@staticmethod
def softlink(src: Path, dest: Path):
def softlink(src: Path, dest: Path) -> Tuple[int, str]:
"""
软链接
"""
@ -105,7 +108,7 @@ class SystemUtils:
return files
@staticmethod
def get_directory_size(path: Path):
def get_directory_size(path: Path) -> float:
"""
计算目录的大小
@ -125,3 +128,74 @@ class SystemUtils:
total_size += path.stat().st_size
return total_size
@staticmethod
def space_usage(dir_list: Union[Path, List[Path]]) -> Tuple[float, float]:
"""
计算多个目录的总可用空间/剩余空间单位Byte并去除重复磁盘
"""
if not dir_list:
return 0.0, 0.0
if not isinstance(dir_list, list):
dir_list = [dir_list]
# 存储不重复的磁盘
disk_set = set()
# 存储总剩余空间
total_free_space = 0.0
# 存储总空间
total_space = 0.0
for dir_path in dir_list:
if not dir_path:
continue
if not dir_path.exists():
continue
# 获取目录所在磁盘
if os.name == "nt":
disk = dir_path.drive
else:
disk = os.stat(dir_path).st_dev
# 如果磁盘未出现过,则计算其剩余空间并加入总剩余空间中
if disk not in disk_set:
disk_set.add(disk)
total_space += SystemUtils.total_space(dir_path)
total_free_space += SystemUtils.free_space(dir_path)
return total_space, total_free_space
@staticmethod
def free_space(path: Path) -> float:
"""
获取指定路径的剩余空间单位Byte
"""
if not os.path.exists(path):
return 0.0
return psutil.disk_usage(str(path)).free
@staticmethod
def total_space(path: Path) -> float:
"""
获取指定路径的总空间单位Byte
"""
if not os.path.exists(path):
return 0.0
return psutil.disk_usage(str(path)).total
@staticmethod
def processes() -> List[schemas.ProcessInfo]:
"""
获取所有进程
"""
processes = []
for proc in psutil.process_iter(['pid', 'name', 'create_time', 'memory_info', 'status']):
try:
if proc.status() != psutil.STATUS_ZOMBIE:
runtime = datetime.datetime.now() - datetime.datetime.fromtimestamp(
int(getattr(proc, 'create_time', 0)()))
mem_info = getattr(proc, 'memory_info', None)()
if mem_info is not None:
mem_mb = round(mem_info.rss / (1024 * 1024), 1)
processes.append(schemas.ProcessInfo(
pid=proc.pid, name=proc.name(), run_time=runtime.seconds, memory=mem_mb
))
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
pass
return processes

View File

@ -37,3 +37,32 @@ class TimerUtils:
trigger.append(random_trigger)
return trigger
from datetime import datetime, timedelta
@staticmethod
def time_difference(input_datetime: datetime) -> str:
"""
判断输入时间与当前的时间差如果输入时间大于当前时间则返回时间差否则返回空字符串
"""
if not input_datetime:
return ""
current_datetime = datetime.datetime.now(datetime.timezone.utc).astimezone()
time_difference = input_datetime - current_datetime
if time_difference.total_seconds() < 0:
return ""
days = time_difference.days
hours, remainder = divmod(time_difference.seconds, 3600)
minutes, _ = divmod(remainder, 60)
time_difference_string = ""
if days > 0:
time_difference_string += f"{days}"
if hours > 0:
time_difference_string += f"{hours}小时"
if minutes > 0:
time_difference_string += f"{minutes}分钟"
return time_difference_string

View File

@ -27,6 +27,7 @@ http {
location / {
# 主目录
expires off;
add_header Cache-Control "no-cache, no-store, must-revalidate";
root /app/public;
@ -34,6 +35,7 @@ http {
}
location /assets {
# 静态资源
expires 7d;
add_header Cache-Control "public";
}
@ -53,6 +55,28 @@ http {
proxy_pass http://backend_api;
}
location ~ ^/api/v1/system/(message|progress/) {
# SSE MIME类型设置
default_type text/event-stream;
# 禁用缓存
add_header Cache-Control no-cache;
add_header X-Accel-Buffering no;
proxy_buffering off;
proxy_cache off;
# 代理设置
proxy_pass http://backend_api;
proxy_http_version 1.1;
proxy_set_header Connection "";
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
# 超时设置
proxy_read_timeout 3600s;
}
location /api {
# 后端API
proxy_pass http://backend_api;
@ -67,13 +91,10 @@ http {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-Nginx-Proxy true;
}
error_page 500 502 503 504 /50x.html;
location = /50x.html {
root html;
# 超时设置
proxy_read_timeout 600s;
}
}
upstream backend_api {

View File

@ -42,4 +42,5 @@ chardet~=4.0.0
starlette~=0.27.0
PyVirtualDisplay~=3.0
Cython~=0.29.35
tvdb_api~=3.1
tvdb_api~=3.1
psutil==5.9.4