fix api
This commit is contained in:
parent
e0a251b339
commit
37985eba25
@ -116,14 +116,12 @@ def scrape(fileitem: schemas.FileItem,
|
|||||||
if storage == "local":
|
if storage == "local":
|
||||||
if not scrape_path.exists():
|
if not scrape_path.exists():
|
||||||
return schemas.Response(success=False, message="刮削路径不存在")
|
return schemas.Response(success=False, message="刮削路径不存在")
|
||||||
# 刮削本地
|
|
||||||
chain.scrape_metadata(path=scrape_path, mediainfo=mediainfo, transfer_type=settings.TRANSFER_TYPE)
|
|
||||||
else:
|
else:
|
||||||
if not fileitem.fileid:
|
if not fileitem.fileid:
|
||||||
return schemas.Response(success=False, message="刮削文件ID无效")
|
return schemas.Response(success=False, message="刮削文件ID无效")
|
||||||
# 刮削在线
|
# 手动刮削
|
||||||
chain.scrape_metadata_online(storage=storage, fileitem=fileitem, meta=meta, mediainfo=mediainfo)
|
chain.manual_scrape(storage=storage, fileitem=fileitem, meta=meta, mediainfo=mediainfo)
|
||||||
return schemas.Response(success=True, message="刮削完成")
|
return schemas.Response(success=True, message=f"{fileitem.path} 刮削完成")
|
||||||
|
|
||||||
|
|
||||||
@router.get("/category", summary="查询自动分类配置", response_model=dict)
|
@router.get("/category", summary="查询自动分类配置", response_model=dict)
|
||||||
|
@ -2,7 +2,7 @@ import copy
|
|||||||
import time
|
import time
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
from typing import Optional, List, Tuple
|
from typing import Optional, List, Tuple, Union
|
||||||
|
|
||||||
from app import schemas
|
from app import schemas
|
||||||
from app.chain import ChainBase
|
from app.chain import ChainBase
|
||||||
@ -18,6 +18,7 @@ from app.schemas.types import EventType, MediaType
|
|||||||
from app.utils.http import RequestUtils
|
from app.utils.http import RequestUtils
|
||||||
from app.utils.singleton import Singleton
|
from app.utils.singleton import Singleton
|
||||||
from app.utils.string import StringUtils
|
from app.utils.string import StringUtils
|
||||||
|
from app.utils.system import SystemUtils
|
||||||
|
|
||||||
recognize_lock = Lock()
|
recognize_lock = Lock()
|
||||||
|
|
||||||
@ -31,7 +32,7 @@ class MediaChain(ChainBase, metaclass=Singleton):
|
|||||||
# 临时识别结果 {title, name, year, season, episode}
|
# 临时识别结果 {title, name, year, season, episode}
|
||||||
recognize_temp: Optional[dict] = None
|
recognize_temp: Optional[dict] = None
|
||||||
|
|
||||||
def meta_nfo(self, meta: MetaBase, mediainfo: MediaInfo,
|
def metadata_nfo(self, meta: MetaBase, mediainfo: MediaInfo,
|
||||||
season: int = None, episode: int = None) -> Optional[str]:
|
season: int = None, episode: int = None) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
获取NFO文件内容文本
|
获取NFO文件内容文本
|
||||||
@ -40,7 +41,7 @@ class MediaChain(ChainBase, metaclass=Singleton):
|
|||||||
:param season: 季号
|
:param season: 季号
|
||||||
:param episode: 集号
|
:param episode: 集号
|
||||||
"""
|
"""
|
||||||
return self.run_module("meta_nfo", meta=meta, mediainfo=mediainfo, season=season, episode=episode)
|
return self.run_module("metadata_nfo", meta=meta, mediainfo=mediainfo, season=season, episode=episode)
|
||||||
|
|
||||||
def recognize_by_meta(self, metainfo: MetaBase) -> Optional[MediaInfo]:
|
def recognize_by_meta(self, metainfo: MetaBase) -> Optional[MediaInfo]:
|
||||||
"""
|
"""
|
||||||
@ -332,42 +333,66 @@ class MediaChain(ChainBase, metaclass=Singleton):
|
|||||||
)
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def scrape_metadata_online(self, storage: str, fileitem: schemas.FileItem,
|
def manual_scrape(self, storage: str, fileitem: schemas.FileItem,
|
||||||
meta: MetaBase, mediainfo: MediaInfo, init_folder: bool = True):
|
meta: MetaBase, mediainfo: MediaInfo, init_folder: bool = True):
|
||||||
"""
|
"""
|
||||||
远程刮削媒体信息(网盘等)
|
手动刮削媒体信息
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __list_files(_storage: str, _fileid: str, _path: str = None, _drive_id: str = None):
|
def __list_files(_storage: str, _fileid: str, _path: str = None, _drive_id: str = None):
|
||||||
|
"""
|
||||||
|
列出下级文件
|
||||||
|
"""
|
||||||
if _storage == "aliyun":
|
if _storage == "aliyun":
|
||||||
return AliyunHelper().list(drive_id=_drive_id, parent_file_id=_fileid, path=_path)
|
return AliyunHelper().list(drive_id=_drive_id, parent_file_id=_fileid, path=_path)
|
||||||
if _storage == "u115":
|
elif _storage == "u115":
|
||||||
return U115Helper().list(parent_file_id=_fileid, path=_path)
|
return U115Helper().list(parent_file_id=_fileid, path=_path)
|
||||||
return []
|
else:
|
||||||
|
items = SystemUtils.list_sub_all(Path(_path))
|
||||||
|
return [schemas.FileItem(
|
||||||
|
type="file" if item.is_file() else "dir",
|
||||||
|
path=str(item),
|
||||||
|
name=item.name,
|
||||||
|
basename=item.stem,
|
||||||
|
extension=item.suffix[1:],
|
||||||
|
size=item.stat().st_size,
|
||||||
|
modify_time=item.stat().st_mtime
|
||||||
|
) for item in items]
|
||||||
|
|
||||||
def __upload_file(_storage: str, _fileid: str, _path: Path):
|
def __save_file(_storage: str, _drive_id: str, _fileid: str, _path: Path, _content: Union[bytes, str]):
|
||||||
|
"""
|
||||||
|
保存或上传文件
|
||||||
|
"""
|
||||||
|
if _storage != "local":
|
||||||
|
# 写入到临时目录
|
||||||
|
temp_path = settings.TEMP_PATH / _path.name
|
||||||
|
temp_path.write_bytes(_content)
|
||||||
|
# 上传文件
|
||||||
|
logger.info(f"正在上传 {_path.name} ...")
|
||||||
if _storage == "aliyun":
|
if _storage == "aliyun":
|
||||||
return AliyunHelper().upload(parent_file_id=_fileid, file_path=_path)
|
AliyunHelper().upload(drive_id=_drive_id, parent_file_id=_fileid, file_path=temp_path)
|
||||||
if _storage == "u115":
|
elif _storage == "u115":
|
||||||
return U115Helper().upload(parent_file_id=_fileid, file_path=_path)
|
U115Helper().upload(parent_file_id=_fileid, file_path=temp_path)
|
||||||
|
logger.info(f"{_path.name} 上传完成")
|
||||||
|
else:
|
||||||
|
# 保存到本地
|
||||||
|
logger.info(f"正在保存 {_path.name} ...")
|
||||||
|
_path.write_bytes(_content)
|
||||||
|
logger.info(f"{_path} 已保存")
|
||||||
|
|
||||||
def __save_image(u: str, f: Path):
|
def __save_image(_url: str) -> Optional[bytes]:
|
||||||
"""
|
"""
|
||||||
下载图片并保存
|
下载图片并保存
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
logger.info(f"正在下载{f.stem}图片:{u} ...")
|
logger.info(f"正在下载图片:{_url} ...")
|
||||||
r = RequestUtils(proxies=settings.PROXY).get_res(url=u)
|
r = RequestUtils(proxies=settings.PROXY).get_res(url=_url)
|
||||||
if r:
|
if r:
|
||||||
f.write_bytes(r.content)
|
return r.content
|
||||||
else:
|
else:
|
||||||
logger.info(f"{f.stem}图片下载失败,请检查网络连通性!")
|
logger.info(f"{_url} 图片下载失败,请检查网络连通性!")
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
logger.error(f"{f.stem}图片下载失败:{str(err)}!")
|
logger.error(f"{_url} 图片下载失败:{str(err)}!")
|
||||||
|
|
||||||
if storage not in ["aliyun", "u115"]:
|
|
||||||
logger.warn(f"不支持的存储类型:{storage}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# 当前文件路径
|
# 当前文件路径
|
||||||
filepath = Path(fileitem.path)
|
filepath = Path(fileitem.path)
|
||||||
@ -380,27 +405,24 @@ class MediaChain(ChainBase, metaclass=Singleton):
|
|||||||
if fileitem.type == "file":
|
if fileitem.type == "file":
|
||||||
# 电影文件
|
# 电影文件
|
||||||
logger.info(f"正在生成电影nfo:{mediainfo.title_year} - {filepath.name}")
|
logger.info(f"正在生成电影nfo:{mediainfo.title_year} - {filepath.name}")
|
||||||
movie_nfo = self.meta_nfo(meta=meta, mediainfo=mediainfo)
|
movie_nfo = self.metadata_nfo(meta=meta, mediainfo=mediainfo)
|
||||||
if not movie_nfo:
|
if not movie_nfo:
|
||||||
logger.warn(f"{filepath.name} nfo文件生成失败!")
|
logger.warn(f"{filepath.name} nfo文件生成失败!")
|
||||||
return
|
return
|
||||||
# 写入到临时目录
|
# 保存或上传nfo文件
|
||||||
nfo_path = settings.TEMP_PATH / f"{filepath.stem}.nfo"
|
__save_file(_storage=storage, _drive_id=fileitem.drive_id, _fileid=fileitem.parent_fileid,
|
||||||
nfo_path.write_bytes(movie_nfo)
|
_path=filepath.with_suffix(".nfo"), _content=movie_nfo)
|
||||||
# 上传NFO文件
|
|
||||||
logger.info(f"上传NFO文件:{nfo_path.name} ...")
|
|
||||||
__upload_file(storage, fileitem.parent_fileid, nfo_path)
|
|
||||||
logger.info(f"{nfo_path.name} 上传成功")
|
|
||||||
else:
|
else:
|
||||||
# 电影目录
|
# 电影目录
|
||||||
files = __list_files(_storage=storage, _fileid=fileitem.fileid,
|
files = __list_files(_storage=storage, _fileid=fileitem.fileid,
|
||||||
_drive_id=fileitem.drive_id, _path=fileitem.path)
|
_drive_id=fileitem.drive_id, _path=fileitem.path)
|
||||||
for file in files:
|
for file in files:
|
||||||
self.scrape_metadata_online(storage=storage, fileitem=file,
|
self.manual_scrape(storage=storage, fileitem=file,
|
||||||
meta=meta, mediainfo=mediainfo,
|
meta=meta, mediainfo=mediainfo,
|
||||||
init_folder=False)
|
init_folder=False)
|
||||||
# 生成图片文件和上传
|
# 生成目录内图片文件
|
||||||
if init_folder:
|
if init_folder:
|
||||||
|
# 图片
|
||||||
for attr_name, attr_value in vars(mediainfo).items():
|
for attr_name, attr_value in vars(mediainfo).items():
|
||||||
if attr_value \
|
if attr_value \
|
||||||
and attr_name.endswith("_path") \
|
and attr_name.endswith("_path") \
|
||||||
@ -408,13 +430,12 @@ class MediaChain(ChainBase, metaclass=Singleton):
|
|||||||
and isinstance(attr_value, str) \
|
and isinstance(attr_value, str) \
|
||||||
and attr_value.startswith("http"):
|
and attr_value.startswith("http"):
|
||||||
image_name = attr_name.replace("_path", "") + Path(attr_value).suffix
|
image_name = attr_name.replace("_path", "") + Path(attr_value).suffix
|
||||||
|
image_path = filepath / image_name
|
||||||
|
# 下载图片
|
||||||
|
content = __save_image(_url=attr_value)
|
||||||
# 写入nfo到根目录
|
# 写入nfo到根目录
|
||||||
image_path = settings.TEMP_PATH / image_name
|
__save_file(_storage=storage, _drive_id=fileitem.drive_id, _fileid=fileitem.fileid,
|
||||||
__save_image(attr_value, image_path)
|
_path=image_path, _content=content)
|
||||||
# 上传图片文件到当前目录
|
|
||||||
logger.info(f"上传图片文件:{image_path.name} ...")
|
|
||||||
__upload_file(storage, fileitem.fileid, image_path)
|
|
||||||
logger.info(f"{image_path.name} 上传成功")
|
|
||||||
else:
|
else:
|
||||||
# 电视剧
|
# 电视剧
|
||||||
if fileitem.type == "file":
|
if fileitem.type == "file":
|
||||||
@ -428,41 +449,35 @@ class MediaChain(ChainBase, metaclass=Singleton):
|
|||||||
logger.warn(f"{filepath.name} 无法识别文件媒体信息!")
|
logger.warn(f"{filepath.name} 无法识别文件媒体信息!")
|
||||||
return
|
return
|
||||||
# 获取集的nfo文件
|
# 获取集的nfo文件
|
||||||
episode_nfo = self.meta_nfo(meta=file_meta, mediainfo=file_mediainfo,
|
episode_nfo = self.metadata_nfo(meta=file_meta, mediainfo=file_mediainfo,
|
||||||
season=file_meta.begin_season, episode=file_meta.begin_episode)
|
season=file_meta.begin_season, episode=file_meta.begin_episode)
|
||||||
if not episode_nfo:
|
if not episode_nfo:
|
||||||
logger.warn(f"{filepath.name} nfo生成失败!")
|
logger.warn(f"{filepath.name} nfo生成失败!")
|
||||||
return
|
return
|
||||||
# 写入到临时目录
|
# 保存或上传nfo文件
|
||||||
nfo_path = settings.TEMP_PATH / f"{filepath.stem}.nfo"
|
__save_file(_storage=storage, _drive_id=fileitem.drive_id, _fileid=fileitem.parent_fileid,
|
||||||
nfo_path.write_bytes(episode_nfo)
|
_path=filepath.with_suffix(".nfo"), _content=episode_nfo)
|
||||||
# 上传NFO文件,到文件当前目录下
|
|
||||||
logger.info(f"上传NFO文件:{nfo_path.name} ...")
|
|
||||||
__upload_file(storage, fileitem.parent_fileid, nfo_path)
|
|
||||||
logger.info(f"{nfo_path.name} 上传成功")
|
|
||||||
elif meta.begin_season:
|
elif meta.begin_season:
|
||||||
# 当前为季的目录,处理目录内的文件
|
# 当前为季的目录,处理目录内的文件
|
||||||
files = __list_files(_storage=storage, _fileid=fileitem.fileid,
|
files = __list_files(_storage=storage, _fileid=fileitem.fileid,
|
||||||
_drive_id=fileitem.drive_id, _path=fileitem.path)
|
_drive_id=fileitem.drive_id, _path=fileitem.path)
|
||||||
for file in files:
|
for file in files:
|
||||||
self.scrape_metadata_online(storage=storage, fileitem=file,
|
self.manual_scrape(storage=storage, fileitem=file,
|
||||||
meta=meta, mediainfo=mediainfo,
|
meta=meta, mediainfo=mediainfo,
|
||||||
init_folder=False)
|
init_folder=False)
|
||||||
# 生成季的nfo和图片
|
# 生成季的nfo和图片
|
||||||
if init_folder:
|
if init_folder:
|
||||||
# 季nfo
|
# 季nfo
|
||||||
season_nfo = self.meta_nfo(meta=meta, mediainfo=mediainfo, season=meta.begin_season)
|
season_nfo = self.metadata_nfo(meta=meta, mediainfo=mediainfo, season=meta.begin_season)
|
||||||
if not season_nfo:
|
if not season_nfo:
|
||||||
logger.warn(f"无法生成电视剧季nfo文件:{meta.name}")
|
logger.warn(f"无法生成电视剧季nfo文件:{meta.name}")
|
||||||
return
|
return
|
||||||
# 写入nfo到根目录
|
# 写入nfo到根目录
|
||||||
nfo_path = settings.TEMP_PATH / "season.nfo"
|
nfo_path = filepath / "season.nfo"
|
||||||
nfo_path.write_bytes(season_nfo)
|
__save_file(_storage=storage, _drive_id=fileitem.drive_id, _fileid=fileitem.fileid,
|
||||||
# 上传NFO文件
|
_path=nfo_path, _content=season_nfo)
|
||||||
logger.info(f"上传NFO文件:{nfo_path.name} ...")
|
|
||||||
__upload_file(storage, fileitem.fileid, nfo_path)
|
|
||||||
logger.info(f"{nfo_path.name} 上传成功")
|
|
||||||
# TMDB季poster图片
|
# TMDB季poster图片
|
||||||
|
if settings.SCRAP_SOURCE == "themoviedb":
|
||||||
sea_seq = str(meta.begin_season).rjust(2, '0')
|
sea_seq = str(meta.begin_season).rjust(2, '0')
|
||||||
# 查询季剧详情
|
# 查询季剧详情
|
||||||
seasoninfo = self.tmdb_info(tmdbid=mediainfo.tmdb_id, mtype=MediaType.TV,
|
seasoninfo = self.tmdb_info(tmdbid=mediainfo.tmdb_id, mtype=MediaType.TV,
|
||||||
@ -472,14 +487,13 @@ class MediaChain(ChainBase, metaclass=Singleton):
|
|||||||
return
|
return
|
||||||
if seasoninfo.get("poster_path"):
|
if seasoninfo.get("poster_path"):
|
||||||
# 下载图片
|
# 下载图片
|
||||||
ext = Path(seasoninfo.get('poster_path')).suffix
|
content = __save_image(f"https://{settings.TMDB_IMAGE_DOMAIN}/t/p/original"
|
||||||
url = f"https://{settings.TMDB_IMAGE_DOMAIN}/t/p/original{seasoninfo.get('poster_path')}"
|
f"{seasoninfo.get('poster_path')}")
|
||||||
image_path = filepath.parent.with_name(f"season{sea_seq}-poster{ext}")
|
image_path = filepath.with_name(f"season{sea_seq}"
|
||||||
__save_image(url, image_path)
|
f"-poster{Path(seasoninfo.get('poster_path')).suffix}")
|
||||||
# 上传图片文件到当前目录
|
# 保存图片文件到当前目录
|
||||||
logger.info(f"上传图片文件:{image_path.name} ...")
|
__save_file(_storage=storage, _drive_id=fileitem.drive_id, _fileid=fileitem.fileid,
|
||||||
__upload_file(storage, fileitem.fileid, image_path)
|
_path=image_path, _content=content)
|
||||||
logger.info(f"{image_path.name} 上传成功")
|
|
||||||
# 季的其它图片
|
# 季的其它图片
|
||||||
for attr_name, attr_value in vars(mediainfo).items():
|
for attr_name, attr_value in vars(mediainfo).items():
|
||||||
if attr_value \
|
if attr_value \
|
||||||
@ -490,32 +504,28 @@ class MediaChain(ChainBase, metaclass=Singleton):
|
|||||||
and attr_value.startswith("http"):
|
and attr_value.startswith("http"):
|
||||||
image_name = attr_name.replace("_path", "") + Path(attr_value).suffix
|
image_name = attr_name.replace("_path", "") + Path(attr_value).suffix
|
||||||
image_path = filepath.parent.with_name(image_name)
|
image_path = filepath.parent.with_name(image_name)
|
||||||
__save_image(attr_value, image_path)
|
content = __save_image(attr_value)
|
||||||
# 上传图片文件到当前目录
|
# 保存图片文件到当前目录
|
||||||
logger.info(f"上传图片文件:{image_path.name} ...")
|
__save_file(_storage=storage, _drive_id=fileitem.drive_id, _fileid=fileitem.fileid,
|
||||||
__upload_file(storage, fileitem.fileid, image_path)
|
_path=image_path, _content=content)
|
||||||
logger.info(f"{image_path.name} 上传成功")
|
|
||||||
else:
|
else:
|
||||||
# 当前为根目录,处理目录内的文件
|
# 当前为根目录,处理目录内的文件
|
||||||
files = __list_files(_storage=storage, _fileid=fileitem.fileid,
|
files = __list_files(_storage=storage, _fileid=fileitem.fileid,
|
||||||
_drive_id=fileitem.drive_id, _path=fileitem.path)
|
_drive_id=fileitem.drive_id, _path=fileitem.path)
|
||||||
for file in files:
|
for file in files:
|
||||||
self.scrape_metadata_online(storage=storage, fileitem=file,
|
self.manual_scrape(storage=storage, fileitem=file,
|
||||||
meta=meta, mediainfo=mediainfo,
|
meta=meta, mediainfo=mediainfo,
|
||||||
init_folder=False)
|
init_folder=False)
|
||||||
# 生成根目录的nfo和图片
|
# 生成根目录的nfo和图片
|
||||||
if init_folder:
|
if init_folder:
|
||||||
tv_nfo = self.meta_nfo(meta=meta, mediainfo=mediainfo)
|
tv_nfo = self.metadata_nfo(meta=meta, mediainfo=mediainfo)
|
||||||
if not tv_nfo:
|
if not tv_nfo:
|
||||||
logger.warn(f"无法生成电视剧nfo文件:{meta.name}")
|
logger.warn(f"无法生成电视剧nfo文件:{meta.name}")
|
||||||
return
|
return
|
||||||
# 写入nfo到根目录
|
# 写入nfo到根目录
|
||||||
nfo_path = settings.TEMP_PATH / "tvshow.nfo"
|
nfo_path = filepath / "tvshow.nfo"
|
||||||
nfo_path.write_bytes(tv_nfo)
|
__save_file(_storage=storage, _drive_id=fileitem.drive_id, _fileid=fileitem.fileid,
|
||||||
# 上传NFO文件
|
_path=nfo_path, _content=tv_nfo)
|
||||||
logger.info(f"上传NFO文件:{nfo_path.name} ...")
|
|
||||||
__upload_file(storage, fileitem.fileid, nfo_path)
|
|
||||||
logger.info(f"{nfo_path.name} 上传成功")
|
|
||||||
# 生成根目录图片
|
# 生成根目录图片
|
||||||
for attr_name, attr_value in vars(mediainfo).items():
|
for attr_name, attr_value in vars(mediainfo).items():
|
||||||
if attr_name \
|
if attr_name \
|
||||||
@ -526,10 +536,9 @@ class MediaChain(ChainBase, metaclass=Singleton):
|
|||||||
and attr_value.startswith("http"):
|
and attr_value.startswith("http"):
|
||||||
image_name = attr_name.replace("_path", "") + Path(attr_value).suffix
|
image_name = attr_name.replace("_path", "") + Path(attr_value).suffix
|
||||||
image_path = filepath.parent.with_name(image_name)
|
image_path = filepath.parent.with_name(image_name)
|
||||||
__save_image(attr_value, image_path)
|
content = __save_image(attr_value)
|
||||||
# 上传图片文件到当前目录
|
# 保存图片文件到当前目录
|
||||||
logger.info(f"上传图片文件:{image_path.name} ...")
|
__save_file(_storage=storage, _drive_id=fileitem.drive_id, _fileid=fileitem.fileid,
|
||||||
__upload_file(storage, fileitem.fileid, image_path)
|
_path=image_path, _content=content)
|
||||||
logger.info(f"{image_path.name} 上传成功")
|
|
||||||
|
|
||||||
logger.info(f"{filepath.name} 刮削完成")
|
logger.info(f"{filepath.name} 刮削完成")
|
||||||
|
@ -546,7 +546,7 @@ class AliyunHelper:
|
|||||||
self.__handle_error(res, "移动文件")
|
self.__handle_error(res, "移动文件")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def upload(self, parent_file_id: str, file_path: Path) -> Optional[dict]:
|
def upload(self, drive_id: str, parent_file_id: str, file_path: Path) -> Optional[schemas.FileItem]:
|
||||||
"""
|
"""
|
||||||
上传文件,并标记完成
|
上传文件,并标记完成
|
||||||
"""
|
"""
|
||||||
@ -555,7 +555,7 @@ class AliyunHelper:
|
|||||||
return None
|
return None
|
||||||
headers = self.__get_headers(params)
|
headers = self.__get_headers(params)
|
||||||
res = RequestUtils(headers=headers, timeout=10).post_res(self.create_file_url, json={
|
res = RequestUtils(headers=headers, timeout=10).post_res(self.create_file_url, json={
|
||||||
"drive_id": params.get("resourceDriveId"),
|
"drive_id": drive_id,
|
||||||
"parent_file_id": parent_file_id,
|
"parent_file_id": parent_file_id,
|
||||||
"name": file_path.name,
|
"name": file_path.name,
|
||||||
"type": "file",
|
"type": "file",
|
||||||
@ -566,7 +566,6 @@ class AliyunHelper:
|
|||||||
return None
|
return None
|
||||||
# 获取上传参数
|
# 获取上传参数
|
||||||
result = res.json()
|
result = res.json()
|
||||||
drive_id = result.get("drive_id")
|
|
||||||
file_id = result.get("file_id")
|
file_id = result.get("file_id")
|
||||||
upload_id = result.get("upload_id")
|
upload_id = result.get("upload_id")
|
||||||
part_info_list = result.get("part_info_list")
|
part_info_list = result.get("part_info_list")
|
||||||
@ -587,10 +586,15 @@ class AliyunHelper:
|
|||||||
if not res:
|
if not res:
|
||||||
self.__handle_error(res, "标记上传状态")
|
self.__handle_error(res, "标记上传状态")
|
||||||
return None
|
return None
|
||||||
return {
|
result = res.json()
|
||||||
"drive_id": drive_id,
|
return schemas.FileItem(
|
||||||
"file_id": file_id
|
fileid=result.get("file_id"),
|
||||||
}
|
drive_id=result.get("drive_id"),
|
||||||
|
parent_fileid=result.get("parent_file_id"),
|
||||||
|
type="file",
|
||||||
|
name=result.get("name"),
|
||||||
|
path=f"{file_path.parent}/{result.get('name')}",
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.warn("上传文件失败:无法获取上传地址!")
|
logger.warn("上传文件失败:无法获取上传地址!")
|
||||||
return None
|
return None
|
||||||
|
@ -233,14 +233,14 @@ class U115Helper(metaclass=Singleton):
|
|||||||
logger.error(f"移动115文件失败:{str(e)}")
|
logger.error(f"移动115文件失败:{str(e)}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def upload(self, parent_file_id: str, file_path: Path) -> Optional[dict]:
|
def upload(self, parent_file_id: str, file_path: Path) -> Optional[schemas.FileItem]:
|
||||||
"""
|
"""
|
||||||
上传文件
|
上传文件
|
||||||
"""
|
"""
|
||||||
if not self.__init_cloud():
|
if not self.__init_cloud():
|
||||||
return None
|
return None
|
||||||
try:
|
try:
|
||||||
ticket = self.cloud.storage().request_upload(dir_id=parent_file_id, file_path=file_path)
|
ticket = self.cloud.storage().request_upload(dir_id=parent_file_id, file_path=str(file_path))
|
||||||
if ticket is None:
|
if ticket is None:
|
||||||
logger.warn(f"115请求上传出错")
|
logger.warn(f"115请求上传出错")
|
||||||
return None
|
return None
|
||||||
@ -256,13 +256,23 @@ class U115Helper(metaclass=Singleton):
|
|||||||
)
|
)
|
||||||
por = bucket.put_object_from_file(
|
por = bucket.put_object_from_file(
|
||||||
key=ticket.object_key,
|
key=ticket.object_key,
|
||||||
filename=file_path,
|
filename=str(file_path),
|
||||||
headers=ticket.headers,
|
headers=ticket.headers,
|
||||||
)
|
)
|
||||||
result = por.resp.response.json()
|
result = por.resp.response.json()
|
||||||
if result:
|
if result:
|
||||||
logger.info(f"115上传文件成功:{result}")
|
fileitem = result.get('data')
|
||||||
return result
|
logger.info(f"115上传文件成功:{fileitem}")
|
||||||
|
return schemas.FileItem(
|
||||||
|
fileid=fileitem.get('file_id'),
|
||||||
|
parent_fileid=parent_file_id,
|
||||||
|
type="file",
|
||||||
|
name=fileitem.get('file_name'),
|
||||||
|
path=f"{file_path / fileitem.get('file_name')}",
|
||||||
|
size=fileitem.get('file_size'),
|
||||||
|
extension=Path(fileitem.get('file_name')).suffix[1:],
|
||||||
|
pickcode=fileitem.get('pickcode')
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.warn(f"115上传文件失败:{por.resp.response.text}")
|
logger.warn(f"115上传文件失败:{por.resp.response.text}")
|
||||||
return None
|
return None
|
||||||
|
@ -765,7 +765,7 @@ class DoubanModule(_ModuleBase):
|
|||||||
logger.error(f"刮削文件 {file} 失败,原因:{str(e)}")
|
logger.error(f"刮削文件 {file} 失败,原因:{str(e)}")
|
||||||
logger.info(f"{path} 刮削完成")
|
logger.info(f"{path} 刮削完成")
|
||||||
|
|
||||||
def meta_nfo(self, mediainfo: MediaInfo, season: int = None, **kwargs) -> Optional[str]:
|
def metadata_nfo(self, mediainfo: MediaInfo, season: int = None, **kwargs) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
获取NFO文件内容文本
|
获取NFO文件内容文本
|
||||||
:param mediainfo: 媒体信息
|
:param mediainfo: 媒体信息
|
||||||
|
@ -336,7 +336,7 @@ class TheMovieDbModule(_ModuleBase):
|
|||||||
force_img=force_img)
|
force_img=force_img)
|
||||||
logger.info(f"{path} 刮削完成")
|
logger.info(f"{path} 刮削完成")
|
||||||
|
|
||||||
def meta_nfo(self, meta: MetaBase, mediainfo: MediaInfo,
|
def metadata_nfo(self, meta: MetaBase, mediainfo: MediaInfo,
|
||||||
season: int = None, episode: int = None) -> Optional[str]:
|
season: int = None, episode: int = None) -> Optional[str]:
|
||||||
"""
|
"""
|
||||||
获取NFO文件内容文本
|
获取NFO文件内容文本
|
||||||
|
@ -247,6 +247,7 @@ class TmdbScraper:
|
|||||||
:param file_path: 电影文件路径
|
:param file_path: 电影文件路径
|
||||||
"""
|
"""
|
||||||
# 开始生成XML
|
# 开始生成XML
|
||||||
|
if file_path:
|
||||||
logger.info(f"正在生成电影NFO文件:{file_path.name}")
|
logger.info(f"正在生成电影NFO文件:{file_path.name}")
|
||||||
doc = minidom.Document()
|
doc = minidom.Document()
|
||||||
root = DomUtils.add_node(doc, doc, "movie")
|
root = DomUtils.add_node(doc, doc, "movie")
|
||||||
|
@ -293,6 +293,25 @@ class SystemUtils:
|
|||||||
|
|
||||||
return dirs
|
return dirs
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def list_sub_all(directory: Path) -> List[Path]:
|
||||||
|
"""
|
||||||
|
列出当前目录下的所有子目录和文件(不递归)
|
||||||
|
"""
|
||||||
|
if not directory.exists():
|
||||||
|
return []
|
||||||
|
|
||||||
|
if directory.is_file():
|
||||||
|
return []
|
||||||
|
|
||||||
|
items = []
|
||||||
|
|
||||||
|
# 遍历目录
|
||||||
|
for path in directory.iterdir():
|
||||||
|
items.append(path)
|
||||||
|
|
||||||
|
return items
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_directory_size(path: Path) -> float:
|
def get_directory_size(path: Path) -> float:
|
||||||
"""
|
"""
|
||||||
|
Loading…
x
Reference in New Issue
Block a user