Merge pull request #2410 from jxxghp/main

fix bugs
This commit is contained in:
jxxghp 2024-06-24 12:47:39 +08:00 committed by GitHub
commit ae36f5100a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
34 changed files with 875 additions and 60 deletions

View File

@ -1,6 +1,11 @@
name: MoviePilot Builder
on:
workflow_dispatch:
push:
branches:
- main
paths:
- version.py
jobs:
Docker-build:

View File

@ -73,7 +73,7 @@ def list_aliyun(fileitem: schemas.FileItem,
if sort == "time":
sort = "updated_at"
if fileitem.type == "file":
fileitem = AliyunHelper().detail(fileitem.fileid, path=path)
fileitem = AliyunHelper().detail(drive_id=fileitem.drive_id, file_id=fileitem.fileid, path=path)
if fileitem:
return [fileitem]
return []
@ -115,13 +115,14 @@ def delete_aliyun(fileitem: schemas.FileItem,
@router.get("/download", summary="下载文件(阿里云盘)")
def download_aliyun(fileid: str,
drive_id: str = None,
_: schemas.TokenPayload = Depends(verify_uri_token)) -> Any:
"""
下载文件或目录
"""
if not fileid:
return schemas.Response(success=False)
url = AliyunHelper().download(fileid)
url = AliyunHelper().download(drive_id=drive_id, file_id=fileid)
if url:
# 重定向
return Response(status_code=302, headers={"Location": url})
@ -138,7 +139,7 @@ def rename_aliyun(fileitem: schemas.FileItem,
"""
if not fileitem.fileid or not new_name:
return schemas.Response(success=False)
result = AliyunHelper().rename(fileitem.fileid, new_name)
result = AliyunHelper().rename(drive_id=fileitem.drive_id, file_id=fileitem.fileid, name=new_name)
if result:
if recursive:
transferchain = TransferChain()
@ -184,13 +185,13 @@ def rename_aliyun(fileitem: schemas.FileItem,
@router.get("/image", summary="读取图片(阿里云盘)", response_model=schemas.Response)
def image_aliyun(fileid: str, _: schemas.TokenPayload = Depends(verify_uri_token)) -> Any:
def image_aliyun(fileid: str, drive_id: str = None, _: schemas.TokenPayload = Depends(verify_uri_token)) -> Any:
"""
读取图片
"""
if not fileid:
return schemas.Response(success=False)
url = AliyunHelper().download(fileid)
url = AliyunHelper().download(drive_id=drive_id, file_id=fileid)
if url:
# 重定向
return Response(status_code=302, headers={"Location": url})

View File

@ -110,7 +110,7 @@ def scrape(fileitem: schemas.FileItem,
# 识别媒体信息
scrape_path = Path(fileitem.path)
meta = MetaInfoPath(scrape_path)
mediainfo = chain.recognize_media(meta)
mediainfo = chain.recognize_by_meta(meta)
if not media_info:
return schemas.Response(success=False, message="刮削失败,无法识别媒体信息")
if storage == "local":

View File

@ -142,7 +142,7 @@ class ChainBase(metaclass=ABCMeta):
bangumiid: int = None,
cache: bool = True) -> Optional[MediaInfo]:
"""
识别媒体信息
识别媒体信息不含Fanart图片
:param meta: 识别的元数据
:param mtype: 识别的媒体类型与tmdbid配套
:param tmdbid: tmdbid

View File

@ -216,6 +216,13 @@ class DownloadChain(ChainBase):
_media = context.media_info
_meta = context.meta_info
# 补充完整的media数据
if not _media.genre_ids:
new_media = self.recognize_media(mtype=_media.type, tmdbid=_media.tmdb_id,
doubanid=_media.douban_id, bangumiid=_media.bangumi_id)
if new_media:
_media = new_media
# 实际下载的集数
download_episodes = StringUtils.format_ep(list(episodes)) if episodes else None
_folder_name = ""

View File

@ -334,7 +334,7 @@ class MediaChain(ChainBase, metaclass=Singleton):
return None
def manual_scrape(self, storage: str, fileitem: schemas.FileItem,
meta: MetaBase, mediainfo: MediaInfo, init_folder: bool = True):
meta: MetaBase = None, mediainfo: MediaInfo = None, init_folder: bool = True):
"""
手动刮削媒体信息
"""
@ -399,7 +399,12 @@ class MediaChain(ChainBase, metaclass=Singleton):
if fileitem.type == "file" \
and (not filepath.suffix or filepath.suffix.lower() not in settings.RMT_MEDIAEXT):
return
if not meta:
meta = MetaInfoPath(filepath)
if not mediainfo:
mediainfo = self.recognize_by_meta(meta)
if not mediainfo:
logger.warn(f"{filepath} 无法识别文件媒体信息!")
return
logger.info(f"开始刮削:{filepath} ...")
if mediainfo.type == MediaType.MOVIE:

View File

@ -1072,6 +1072,9 @@ class SubscribeChain(ChainBase):
total = no_exist_season.total_episode
# 原开始集数
start = no_exist_season.start_episode
# 整季缺失
if not episode_list:
episode_list = list(range(start, total + 1))
# 更新剧集列表
episodes = list(set(episode_list).difference(set(downloaded_episodes)))
# 更新集合

View File

@ -86,13 +86,16 @@ class TransferChain(ChainBase):
mediainfo = self.recognize_media(mtype=mtype,
tmdbid=downloadhis.tmdbid,
doubanid=downloadhis.doubanid)
if mediainfo:
# 补充图片
self.obtain_images(mediainfo)
else:
# 非MoviePilot下载的任务按文件识别
mediainfo = None
# 执行转移
self.do_transfer(storage="local", path=torrent.path,
mediainfo=mediainfo, download_hash=torrent.hash)
self.__do_transfer(storage="local", path=torrent.path,
mediainfo=mediainfo, download_hash=torrent.hash)
# 设置下载任务状态
self.transfer_completed(hashs=torrent.hash, path=torrent.path)
@ -100,13 +103,13 @@ class TransferChain(ChainBase):
logger.info("下载器文件转移执行完成")
return True
def do_transfer(self, storage: str, path: Path, drive_id: str = None, fileid: str = None, filetype: str = None,
meta: MetaBase = None, mediainfo: MediaInfo = None,
download_hash: str = None,
target: Path = None, transfer_type: str = None,
season: int = None, epformat: EpisodeFormat = None,
min_filesize: int = 0, scrape: bool = None,
force: bool = False) -> Tuple[bool, str]:
def __do_transfer(self, storage: str, path: Path, drive_id: str = None, fileid: str = None, filetype: str = None,
meta: MetaBase = None, mediainfo: MediaInfo = None,
download_hash: str = None,
target: Path = None, transfer_type: str = None,
season: int = None, epformat: EpisodeFormat = None,
min_filesize: int = 0, scrape: bool = None,
force: bool = False) -> Tuple[bool, str]:
"""
执行一个复杂目录的转移操作
:param storage: 存储器
@ -152,7 +155,7 @@ class TransferChain(ChainBase):
download_hash=download_hash, force=force)
else:
# 网盘整理
result = self.__transfer_remote(storage=storage,
result = self.__transfer_online(storage=storage,
fileitem=schemas.FileItem(
path=str(path) + ("/" if filetype == "dir" else ""),
type=filetype,
@ -162,7 +165,21 @@ class TransferChain(ChainBase):
),
meta=meta,
mediainfo=mediainfo)
if result and result[0] and scrape:
# 刮削元数据
self.progress.update(value=0,
text=f"正在刮削 {path} ...",
key=ProgressKey.FileTransfer)
self.mediachain.manual_scrape(storage=storage,
fileitem=schemas.FileItem(
path=str(path) + ("/" if filetype == "dir" else ""),
type=filetype,
drive_id=drive_id,
fileid=fileid,
name=path.name
),
meta=meta,
mediainfo=mediainfo)
# 结速进度
self.progress.end(ProgressKey.FileTransfer)
return result
@ -467,7 +484,7 @@ class TransferChain(ChainBase):
key=ProgressKey.FileTransfer)
return True, "\n".join(err_msgs)
def __transfer_remote(self, storage: str, fileitem: schemas.FileItem,
def __transfer_online(self, storage: str, fileitem: schemas.FileItem,
meta: MetaBase, mediainfo: MediaInfo) -> Tuple[bool, str]:
"""
整理一个远程目录
@ -484,12 +501,12 @@ class TransferChain(ChainBase):
return U115Helper().list(parent_file_id=_fileid, path=_path)
return []
def __rename_file(_storage: str, _fileid: str, _name: str) -> bool:
def __rename_file(_storage: str, _deive_id: str, _fileid: str, _name: str) -> bool:
"""
重命名文件
"""
if _storage == "aliyun":
return AliyunHelper().rename(file_id=_fileid, name=_name)
return AliyunHelper().rename(drive_id=_deive_id, file_id=_fileid, name=_name)
elif _storage == "u115":
return U115Helper().rename(file_id=_fileid, name=_name)
return False
@ -535,7 +552,7 @@ class TransferChain(ChainBase):
# 文件元数据
meta = MetaInfoPath(Path(fileitem.path))
if not mediainfo:
mediainfo = self.recognize_media(meta=meta)
mediainfo = self.mediachain.recognize_by_meta(meta)
if not mediainfo:
logger.warn(f"{fileitem.name} 未识别到媒体信息")
return False, f"{fileitem.name} 未识别到媒体信息"
@ -557,7 +574,7 @@ class TransferChain(ChainBase):
if fileitem.type == "file":
# 重命名文件
logger.info(f"正在整理 {fileitem.name} => {file_name} ...")
if not __rename_file(storage, fileitem.fileid, file_name):
if not __rename_file(_storage=storage, _deive_id=fileitem.drive_id, _fileid=fileitem.fileid, _name=file_name):
logger.error(f"{fileitem.name} 重命名失败")
return False, f"{fileitem.name} 重命名失败"
logger.info(f"{fileitem.path} 整理完成")
@ -567,7 +584,8 @@ class TransferChain(ChainBase):
# 电影目录
# 重命名当前目录
logger.info(f"正在重命名 {fileitem.path} => {folder_name} ...")
if not __rename_file(_storage=storage, _fileid=fileitem.fileid, _name=folder_name):
if not __rename_file(_storage=storage, _deive_id=fileitem.drive_id,
_fileid=fileitem.fileid, _name=folder_name):
logger.error(f"{fileitem.path} 重命名失败")
return False, f"{fileitem.path} 重命名失败"
logger.info(f"{fileitem.path} 重命名完成")
@ -589,12 +607,12 @@ class TransferChain(ChainBase):
if not file_meta.name:
# 过滤掉无效文件
continue
file_media = self.recognize_media(meta=file_meta)
file_media = self.mediachain.recognize_by_meta(file_meta)
if not file_media:
logger.warn(f"{file.name} 未识别到媒体信息")
continue
# 整理这个文件或目录
self.__transfer_remote(storage=storage, fileitem=file, meta=file_meta, mediainfo=file_media)
self.__transfer_online(storage=storage, fileitem=file, meta=file_meta, mediainfo=file_media)
else:
# 电视剧目录
# 判断当前目录类型
@ -602,14 +620,16 @@ class TransferChain(ChainBase):
if folder_meta.begin_season and not folder_meta.name:
# 季目录
logger.info(f"正在重命名 {fileitem.path} => {season_name} ...")
if not __rename_file(_storage=storage, _fileid=fileitem.fileid, _name=season_name):
if not __rename_file(_storage=storage, _deive_id=fileitem.drive_id,
_fileid=fileitem.fileid, _name=season_name):
logger.error(f"{fileitem.path} 重命名失败")
return False, f"{fileitem.path} 重命名失败"
logger.info(f"{fileitem.path} 重命名完成")
elif folder_meta.name:
# 根目录,重命名当前目录
logger.info(f"正在重命名 {fileitem.path} => {folder_name} ...")
if not __rename_file(_storage=storage, _fileid=fileitem.fileid, _name=folder_name):
if not __rename_file(_storage=storage, _deive_id=fileitem.drive_id,
_fileid=fileitem.fileid, _name=folder_name):
logger.error(f"{fileitem.path} 重命名失败")
return False, f"{fileitem.path} 重命名失败"
logger.info(f"{fileitem.path} 重命名完成")
@ -661,12 +681,12 @@ class TransferChain(ChainBase):
continue
# 重新识别文件或目录
file_meta = MetaInfoPath(Path(file.path))
file_media = self.recognize_media(meta=file_meta)
file_media = self.mediachain.recognize_by_meta(file_meta)
if not file_media:
logger.warn(f"{file.name} 未识别到媒体信息")
continue
# 整理这个文件或目录
self.__transfer_remote(storage=storage, fileitem=file, meta=file_meta, mediainfo=file_media)
self.__transfer_online(storage=storage, fileitem=file, meta=file_meta, mediainfo=file_media)
logger.info(f"{fileitem.path} 整理完成")
self.progress.update(value=0,
@ -747,16 +767,16 @@ class TransferChain(ChainBase):
if not type_str or type_str not in [MediaType.MOVIE.value, MediaType.TV.value]:
args_error()
return
state, errmsg = self.re_transfer(logid=int(logid),
mtype=MediaType(type_str),
mediaid=media_id)
state, errmsg = self.__re_transfer(logid=int(logid),
mtype=MediaType(type_str),
mediaid=media_id)
if not state:
self.post_message(Notification(channel=channel, title="手动整理失败",
text=errmsg, userid=userid, link=settings.MP_DOMAIN('#/history')))
return
def re_transfer(self, logid: int, mtype: MediaType = None,
mediaid: str = None) -> Tuple[bool, str]:
def __re_transfer(self, logid: int, mtype: MediaType = None,
mediaid: str = None) -> Tuple[bool, str]:
"""
根据历史记录重新识别转移只支持简单条件
:param logid: 历史记录ID
@ -791,11 +811,11 @@ class TransferChain(ChainBase):
self.delete_files(Path(history.dest))
# 强制转移
state, errmsg = self.do_transfer(storage="local",
path=src_path,
mediainfo=mediainfo,
download_hash=history.download_hash,
force=True)
state, errmsg = self.__do_transfer(storage="local",
path=src_path,
mediainfo=mediainfo,
download_hash=history.download_hash,
force=True)
if not state:
return False, errmsg
@ -843,13 +863,16 @@ class TransferChain(ChainBase):
mediainfo: MediaInfo = self.mediachain.recognize_media(tmdbid=tmdbid, doubanid=doubanid, mtype=mtype)
if not mediainfo:
return False, f"媒体信息识别失败tmdbid{tmdbid}doubanid{doubanid}type: {mtype.value}"
else:
# 更新媒体图片
self.obtain_images(mediainfo=mediainfo)
# 开始进度
self.progress.start(ProgressKey.FileTransfer)
self.progress.update(value=0,
text=f"开始转移 {in_path} ...",
key=ProgressKey.FileTransfer)
# 开始转移
state, errmsg = self.do_transfer(
state, errmsg = self.__do_transfer(
storage=storage,
path=in_path,
drive_id=drive_id,
@ -872,17 +895,18 @@ class TransferChain(ChainBase):
return True, ""
else:
# 没有输入TMDBID时按文件识别
state, errmsg = self.do_transfer(storage=storage,
path=in_path,
drive_id=drive_id,
fileid=fileid,
filetype=filetype,
target=target,
transfer_type=transfer_type,
season=season,
epformat=epformat,
min_filesize=min_filesize,
force=force)
state, errmsg = self.__do_transfer(storage=storage,
path=in_path,
drive_id=drive_id,
fileid=fileid,
filetype=filetype,
target=target,
transfer_type=transfer_type,
season=season,
epformat=epformat,
min_filesize=min_filesize,
scrape=scrape,
force=force)
return state, errmsg
def send_transfer_message(self, meta: MetaBase, mediainfo: MediaInfo,

View File

@ -241,6 +241,29 @@ class Settings(BaseSettings):
# 服务器地址,对应 https://github.com/jxxghp/MoviePilot-Server 项目
MP_SERVER_HOST: str = "https://movie-pilot.org"
# 【已弃用】刮削入库的媒体文件
SCRAP_METADATA: bool = True
# 【已弃用】下载保存目录,容器内映射路径需要一致
DOWNLOAD_PATH: Optional[str] = None
# 【已弃用】电影下载保存目录,容器内映射路径需要一致
DOWNLOAD_MOVIE_PATH: Optional[str] = None
# 【已弃用】电视剧下载保存目录,容器内映射路径需要一致
DOWNLOAD_TV_PATH: Optional[str] = None
# 【已弃用】动漫下载保存目录,容器内映射路径需要一致
DOWNLOAD_ANIME_PATH: Optional[str] = None
# 【已弃用】下载目录二级分类
DOWNLOAD_CATEGORY: bool = False
# 【已弃用】媒体库目录,多个目录使用,分隔
LIBRARY_PATH: Optional[str] = None
# 【已弃用】电影媒体库目录名
LIBRARY_MOVIE_NAME: str = "电影"
# 【已弃用】电视剧媒体库目录名
LIBRARY_TV_NAME: str = "电视剧"
# 【已弃用】动漫媒体库目录名,不设置时使用电视剧目录
LIBRARY_ANIME_NAME: Optional[str] = None
# 【已弃用】二级分类
LIBRARY_CATEGORY: bool = True
@validator("SUBSCRIBE_RSS_INTERVAL",
"COOKIECLOUD_INTERVAL",
"MEDIASERVER_SYNC_INTERVAL",

View File

@ -89,6 +89,11 @@ class TransferHistory(Base):
def get_by_src(db: Session, src: str):
return db.query(TransferHistory).filter(TransferHistory.src == src).first()
@staticmethod
@db_query
def get_by_dest(db: Session, dest: str):
return db.query(TransferHistory).filter(TransferHistory.dest == dest).first()
@staticmethod
@db_query
def list_by_hash(db: Session, download_hash: str):

View File

@ -36,6 +36,13 @@ class TransferHistoryOper(DbOper):
"""
return TransferHistory.get_by_src(self._db, src)
def get_by_dest(self, dest: str) -> TransferHistory:
"""
按转移路径查询转移记录
:param dest: 数据key
"""
return TransferHistory.get_by_dest(self._db, dest)
def list_by_hash(self, download_hash: str) -> List[TransferHistory]:
"""
按种子hash查询转移记录

View File

@ -456,7 +456,7 @@ class AliyunHelper:
self.__handle_error(res, "删除文件")
return False
def detail(self, file_id: str, path: str = "/") -> Optional[schemas.FileItem]:
def detail(self, drive_id: str, file_id: str, path: str = "/") -> Optional[schemas.FileItem]:
"""
获取文件详情
"""
@ -465,7 +465,7 @@ class AliyunHelper:
return None
headers = self.__get_headers(params)
res = RequestUtils(headers=headers, timeout=10).post_res(self.file_detail_url, json={
"drive_id": params.get("resourceDriveId"),
"drive_id": drive_id,
"file_id": file_id
})
if res:
@ -486,7 +486,7 @@ class AliyunHelper:
self.__handle_error(res, "获取文件详情")
return None
def rename(self, file_id: str, name: str) -> bool:
def rename(self, drive_id: str, file_id: str, name: str) -> bool:
"""
重命名文件
"""
@ -495,7 +495,7 @@ class AliyunHelper:
return False
headers = self.__get_headers(params)
res = RequestUtils(headers=headers, timeout=10).post_res(self.rename_file_url, json={
"drive_id": params.get("resourceDriveId"),
"drive_id": drive_id,
"file_id": file_id,
"name": name,
"check_name_mode": "refuse"
@ -506,7 +506,7 @@ class AliyunHelper:
self.__handle_error(res, "重命名文件")
return False
def download(self, file_id: str) -> Optional[str]:
def download(self, drive_id: str, file_id: str) -> Optional[str]:
"""
获取下载链接
"""
@ -515,7 +515,7 @@ class AliyunHelper:
return None
headers = self.__get_headers(params)
res = RequestUtils(headers=headers, timeout=10).post_res(self.download_url, json={
"drive_id": params.get("resourceDriveId"),
"drive_id": drive_id,
"file_id": file_id
})
if res:

View File

@ -0,0 +1,30 @@
"""1.0.11
Revision ID: 06abf3e7090b
Revises: d633ca6cd572
Create Date: 2023-10-27 12:22:56.213376
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '06abf3e7090b'
down_revision = 'd633ca6cd572'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("downloadhistory") as batch_op:
batch_op.add_column(sa.Column('username', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,30 @@
"""1.0.13
Revision ID: 127a25fdf0e8
Revises: d71e624f0208
Create Date: 2024-02-24 03:11:32.005540
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '127a25fdf0e8'
down_revision = 'd71e624f0208'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('search_imdbid', sa.Integer, nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,31 @@
"""1.0.1
Revision ID: 14f1813ae8e3
Revises: 9f4edd55c2d4
Create Date: 2023-07-27 12:34:57.839443
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '14f1813ae8e3'
down_revision = '9f4edd55c2d4'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('best_version', sa.Integer, nullable=True))
batch_op.add_column(sa.Column('current_priority', sa.Integer, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,30 @@
"""1.0.4
Revision ID: 1e169250e949
Revises: 52ab4930be04
Create Date: 2023-09-01 09:56:33.907661
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1e169250e949'
down_revision = '52ab4930be04'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('date', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,38 @@
"""1.0.6
Revision ID: 232dfa044617
Revises: e734c7fe6056
Create Date: 2023-09-19 21:34:41.994617
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '232dfa044617'
down_revision = 'e734c7fe6056'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
# 搜索优先级
op.execute("delete from systemconfig where key = 'SearchFilterRules';")
op.execute(
"insert into systemconfig(key, value) VALUES('SearchFilterRules', (select value from systemconfig where key= 'FilterRules'));")
# 订阅优先级
op.execute("delete from systemconfig where key = 'SubscribeFilterRules';")
op.execute(
"insert into systemconfig(key, value) VALUES('SubscribeFilterRules', (select value from systemconfig where key= 'FilterRules'));")
# 洗版优先级
op.execute("delete from systemconfig where key = 'BestVersionFilterRules';")
op.execute(
"insert into systemconfig(key, value) VALUES('BestVersionFilterRules', (select value from systemconfig where key= 'FilterRules2'));")
# 删除旧的优先级规则
op.execute("delete from systemconfig where key = 'FilterRules';")
op.execute("delete from systemconfig where key = 'FilterRules2';")
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,27 @@
"""1.0.7
Revision ID: 30329639c12b
Revises: 232dfa044617
Create Date: 2023-09-23 08:25:59.776488
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '30329639c12b'
down_revision = '232dfa044617'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.execute("delete from systemconfig where key = 'DefaultFilterRules';")
op.execute(
"insert into systemconfig(key, value) VALUES('DefaultFilterRules', (select value from systemconfig where key= 'DefaultIncludeExcludeFilter'));")
op.execute("delete from systemconfig where key = 'DefaultIncludeExcludeFilter';")
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,28 @@
"""1_0_3
Revision ID: 52ab4930be04
Revises: ec5fb51fc300
Create Date: 2023-08-28 13:21:45.152012
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '52ab4930be04'
down_revision = 'ec5fb51fc300'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.execute("delete from systemconfig where key = 'RssSites';")
op.execute("insert into systemconfig(key, value) VALUES('RssSites', (select value from systemconfig where key= 'IndexerSites'));")
op.execute("delete from systemconfig where key = 'SearchResults';")
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@ -0,0 +1,34 @@
"""1.0.15
Revision ID: 5813aaa7cb3a
Revises: f94cd1217fd7
Create Date: 2024-03-17 09:04:51.785716
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5813aaa7cb3a'
down_revision = 'f94cd1217fd7'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("message") as batch_op:
batch_op.add_column(sa.Column('note', sa.String, nullable=True))
try:
op.create_index('ix_message_reg_time', 'message', ['reg_time'], unique=False)
except Exception as err:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,31 @@
"""1.0.18
Revision ID: 735c01e0453d
Revises: 9cb3993e340e
Create Date: 2024-04-29 19:40:38.375072
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '735c01e0453d'
down_revision = '9cb3993e340e'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("site") as batch_op:
batch_op.add_column(sa.Column('apikey', sa.VARCHAR))
batch_op.add_column(sa.Column('token', sa.VARCHAR))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,30 @@
"""1_0_17
Revision ID: 9cb3993e340e
Revises: d146dea51516
Create Date: 2024-03-28 14:36:35.588392
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9cb3993e340e'
down_revision = 'd146dea51516'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("user") as batch_op:
batch_op.add_column(sa.Column('is_otp', sa.BOOLEAN, server_default='0'))
batch_op.add_column(sa.Column('otp_secret', sa.VARCHAR))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,32 @@
"""1.0.0
Revision ID: 9f4edd55c2d4
Revises:
Create Date: 2023-07-13 12:27:26.402317
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9f4edd55c2d4'
down_revision = None
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('sites', sa.Text, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@ -0,0 +1,119 @@
"""1.0.20
Revision ID: a40261701909
Revises: ae9d8ed8df97
Create Date: 2024-05-22 19:16:21.374806
"""
import json
from pathlib import Path
from alembic import op
import sqlalchemy as sa
from app.core.config import Settings
# revision identifiers, used by Alembic.
revision = 'a40261701909'
down_revision = 'ae9d8ed8df97'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""
升级目录配置
"""
# 实例化配置
_settings = Settings(
_env_file=Settings().CONFIG_PATH / "app.env",
_env_file_encoding="utf-8"
)
# 下载目录配置升级
download_dirs = []
if _settings.DOWNLOAD_MOVIE_PATH:
download_dirs.append({
"type": "download",
"name": "电影目录",
"path": _settings.DOWNLOAD_MOVIE_PATH,
"media_type": "电影",
"category": "",
"auto_category": True if _settings.DOWNLOAD_CATEGORY else False,
"priority": 1
})
if _settings.DOWNLOAD_TV_PATH:
download_dirs.append({
"type": "download",
"name": "电视剧目录",
"path": _settings.DOWNLOAD_TV_PATH,
"media_type": "电视剧",
"category": "",
"auto_category": True if _settings.DOWNLOAD_CATEGORY else False,
"priority": 2
})
if _settings.DOWNLOAD_PATH:
download_dirs.append({
"type": "download",
"name": "下载目录",
"path": _settings.DOWNLOAD_PATH,
"media_type": "",
"category": "",
"auto_category": True if _settings.DOWNLOAD_CATEGORY else False,
"priority": 4
})
# 插入数据库,报错的话则更新
if download_dirs:
download_dirs_value = json.dumps(download_dirs)
try:
op.execute(f"INSERT INTO systemconfig (key, value) VALUES ('DownloadDirectories', '{download_dirs_value}');")
except Exception as e:
op.execute(f"UPDATE systemconfig SET value = '{download_dirs_value}' WHERE key = 'DownloadDirectories';")
# 媒体库目录配置升级
library_dirs = []
if _settings.LIBRARY_PATH:
for library_path in _settings.LIBRARY_PATH.split(","):
if _settings.LIBRARY_MOVIE_NAME:
library_dirs.append({
"type": "library",
"name": "电影目录",
"path": str(Path(library_path) / _settings.LIBRARY_MOVIE_NAME),
"media_type": "电影",
"category": "",
"auto_category": True if _settings.LIBRARY_CATEGORY else False,
"scrape": True if _settings.SCRAP_METADATA else False,
"priority": 1
})
if _settings.LIBRARY_TV_NAME:
library_dirs.append({
"type": "library",
"name": "电视剧目录",
"path": str(Path(library_path) / _settings.LIBRARY_TV_NAME),
"media_type": "电视剧",
"category": "",
"auto_category": True if _settings.LIBRARY_CATEGORY else False,
"scrape": True if _settings.SCRAP_METADATA else False,
"priority": 2
})
library_dirs.append({
"type": "library",
"name": "媒体库目录",
"path": library_path,
"media_type": "",
"category": "",
"auto_category": True if _settings.LIBRARY_CATEGORY else False,
"scrape": True if _settings.SCRAP_METADATA else False,
"priority": 4
})
# 插入数据库,报错的话则更新
if library_dirs:
library_dirs_value = json.dumps(library_dirs)
try:
op.execute(f"INSERT INTO systemconfig (key, value) VALUES ('LibraryDirectories', '{library_dirs_value}');")
except Exception as e:
op.execute(f"UPDATE systemconfig SET value = '{library_dirs_value}' WHERE key = 'LibraryDirectories';")
def downgrade() -> None:
pass

View File

@ -0,0 +1,30 @@
"""1_0_9
Revision ID: a521fbc28b18
Revises: b2f011d3a8b7
Create Date: 2023-09-28 13:37:16.479360
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'a521fbc28b18'
down_revision = 'b2f011d3a8b7'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("downloadhistory") as batch_op:
batch_op.add_column(sa.Column('date', sa.String, nullable=True))
batch_op.add_column(sa.Column('channel', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,30 @@
"""1.0.19
Revision ID: ae9d8ed8df97
Revises: 735c01e0453d
Create Date: 2024-05-16 14:21:46.108359
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ae9d8ed8df97'
down_revision = '735c01e0453d'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("site") as batch_op:
batch_op.add_column(sa.Column('timeout', sa.INTEGER))
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,29 @@
"""1_0_8
Revision ID: b2f011d3a8b7
Revises: 30329639c12b
Create Date: 2023-09-28 10:15:58.410003
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'b2f011d3a8b7'
down_revision = '30329639c12b'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("downloadhistory") as batch_op:
batch_op.add_column(sa.Column('userid', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,34 @@
"""1.0.16
Revision ID: d146dea51516
Revises: 5813aaa7cb3a
Create Date: 2024-03-18 18:13:38.099531
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd146dea51516'
down_revision = '5813aaa7cb3a'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('bangumiid', sa.Integer, nullable=True))
try:
op.create_index('ix_subscribe_bangumiid', 'subscribe', ['bangumiid'], unique=False)
except Exception as err:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,32 @@
"""1.0.10
Revision ID: d633ca6cd572
Revises: a521fbc28b18
Create Date: 2023-10-12 08:54:49.728638
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd633ca6cd572'
down_revision = 'a521fbc28b18'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('quality', sa.String, nullable=True))
batch_op.add_column(sa.Column('resolution', sa.String, nullable=True))
batch_op.add_column(sa.Column('effect', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,30 @@
"""1_0_12
Revision ID: d71e624f0208
Revises: 06abf3e7090b
Create Date: 2023-12-12 13:26:34.039497
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd71e624f0208'
down_revision = '06abf3e7090b'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('save_path', sa.String, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,27 @@
"""1.0.5
Revision ID: e734c7fe6056
Revises: 1e169250e949
Create Date: 2023-09-07 18:19:41.250957
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = 'e734c7fe6056'
down_revision = '1e169250e949'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
op.create_index('ix_transferhistory_tmdbid', 'transferhistory', ['tmdbid'], unique=False)
except Exception as err:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,32 @@
"""1.0.2
Revision ID: ec5fb51fc300
Revises: 14f1813ae8e3
Create Date: 2023-08-12 17:55:06.509548
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ec5fb51fc300'
down_revision = '14f1813ae8e3'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
try:
with op.batch_alter_table("transferhistory") as batch_op:
batch_op.add_column(sa.Column('files', sa.String, nullable=True))
with op.batch_alter_table("rss") as batch_op:
batch_op.add_column(sa.Column('filter', sa.Integer, nullable=True))
except Exception as e:
pass
# ### end Alembic commands ###
def downgrade() -> None:
pass

View File

@ -0,0 +1,31 @@
"""1_0_14
Revision ID: f94cd1217fd7
Revises: 127a25fdf0e8
Create Date: 2024-03-06 19:19:33.053186
"""
import contextlib
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'f94cd1217fd7'
down_revision = '127a25fdf0e8'
branch_labels = None
depends_on = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
with contextlib.suppress(Exception):
with op.batch_alter_table("subscribe") as batch_op:
batch_op.add_column(sa.Column('manual_total_episode', sa.Integer, nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
pass

View File

@ -1 +1 @@
APP_VERSION = 'v2.0.0-alpha'
APP_VERSION = 'v1.9.8-beta'