This commit is contained in:
thsrite 2023-10-09 11:41:59 +08:00
parent 05bbfbbd54
commit 15fd312765

View File

@ -741,7 +741,11 @@ class MediaSyncDel(_PluginBase):
return
# 遍历删除
last_del_time = None
for del_media in del_medias:
# 删除时间
del_time = del_media.get("time")
last_del_time = del_time
# 媒体类型 Movie|Series|Season|Episode
media_type = del_media.get("type")
# 媒体名称 蜀山战纪
@ -881,7 +885,7 @@ class MediaSyncDel(_PluginBase):
# 保存历史
self.save_data("history", history)
self.save_data("last_time", datetime.datetime.now())
self.save_data("last_time", last_del_time or datetime.datetime.now())
def handle_torrent(self, src: str, torrent_hash: str):
"""
@ -1047,17 +1051,24 @@ class MediaSyncDel(_PluginBase):
@staticmethod
def parse_emby_log(last_time):
log_url = "[HOST]System/Logs/embyserver.txt?api_key=[APIKEY]"
"""
获取emby日志列表解析emby日志
"""
def __parse_log(file_name: str, del_list: list):
"""
解析emby日志
"""
log_url = f"[HOST]System/Logs/{file_name}?api_key=[APIKEY]"
log_res = Emby().get_data(log_url)
if not log_res or log_res.status_code != 200:
logger.error("获取emby日志失败请检查服务器配置")
return []
return del_list
# 正则解析删除的媒体信息
pattern = r'(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3}) Info App: Removing item from database, Type: (\w+), Name: (.*), Path: (.*), Id: (\d+)'
matches = re.findall(pattern, log_res.text)
del_medias = []
# 循环获取媒体信息
for match in matches:
mtime = match[0]
@ -1113,24 +1124,54 @@ class MediaSyncDel(_PluginBase):
"episode": episode,
}
logger.debug(f"解析到删除媒体:{json.dumps(media)}")
del_medias.append(media)
del_list.append(media)
return del_list
log_files = []
try:
# 获取所有emby日志
log_list_url = "[HOST]System/Logs/Query?Limit=3&api_key=[APIKEY]"
log_list_res = Emby().get_data(log_list_url)
if log_list_res and log_list_res.status_code == 200:
log_files_dict = json.loads(log_list_res.text)
for item in log_files_dict.get("Items"):
if str(item.get('Name')).startswith("embyserver"):
log_files.append(str(item.get('Name')))
except Exception as e:
print(str(e))
if not log_files:
log_files.append("embyserver.txt")
del_medias = []
log_files.reverse()
for log_file in log_files:
del_medias = __parse_log(log_file, del_medias)
return del_medias
@staticmethod
def parse_jellyfin_log(last_time: datetime):
# 根据加入日期 降序排序
log_url = "[HOST]System/Logs/Log?name=log_%s.log&api_key=[APIKEY]" % datetime.date.today().strftime("%Y%m%d")
"""
获取jellyfin日志列表解析jellyfin日志
"""
def __parse_log(file_name: str, del_list: list):
"""
解析jellyfin日志
"""
log_url = f"[HOST]System/Logs/Log?name={file_name}&api_key=[APIKEY]"
log_res = Jellyfin().get_data(log_url)
if not log_res or log_res.status_code != 200:
logger.error("获取jellyfin日志失败请检查服务器配置")
return []
return del_list
# 正则解析删除的媒体信息
pattern = r'\[(.*?)\].*?Removing item, Type: "(.*?)", Name: "(.*?)", Path: "(.*?)"'
matches = re.findall(pattern, log_res.text)
del_medias = []
# 循环获取媒体信息
for match in matches:
mtime = match[0]
@ -1186,7 +1227,31 @@ class MediaSyncDel(_PluginBase):
"episode": episode,
}
logger.debug(f"解析到删除媒体:{json.dumps(media)}")
del_medias.append(media)
del_list.append(media)
return del_list
log_files = []
try:
# 获取所有jellyfin日志
log_list_url = "[HOST]System/Logs?api_key=[APIKEY]"
log_list_res = Jellyfin().get_data(log_list_url)
if log_list_res and log_list_res.status_code == 200:
log_files_dict = json.loads(log_list_res.text)
for item in log_files_dict:
if str(item.get('Name')).startswith("log_"):
log_files.append(str(item.get('Name')))
except Exception as e:
print(str(e))
if not log_files:
log_files.append("log_%s.log" % datetime.date.today().strftime("%Y%m%d"))
del_medias = []
log_files.reverse()
for log_file in log_files:
del_medias = __parse_log(log_file, del_medias)
return del_medias