mirror of
https://github.com/d0zingcat/MoviePilot-Plugins.git
synced 2026-05-13 15:09:12 +00:00
Merge branch 'main' of https://github.com/Seed680/MoviePilot-Plugins-main
This commit is contained in:
BIN
icons/IMDb_IOS-OSX_App.png
Normal file
BIN
icons/IMDb_IOS-OSX_App.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 18 KiB |
BIN
icons/Mihomo_Meta_A.png
Executable file
BIN
icons/Mihomo_Meta_A.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 58 KiB |
2026
package.json
2026
package.json
File diff suppressed because it is too large
Load Diff
870
package.v2.json
870
package.v2.json
@@ -1,423 +1,453 @@
|
||||
{
|
||||
"SiteStatistic": {
|
||||
"name": "站点数据统计",
|
||||
"description": "站点统计数据图表。",
|
||||
"labels": "站点,仪表板",
|
||||
"version": "1.6",
|
||||
"icon": "statistic.png",
|
||||
"author": "lightolly,jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v1.6": "优化了站点数据获取失败时的回退逻辑",
|
||||
"v1.5": "修复了发送增量通知失败等一些问题",
|
||||
"v1.4.1": "支持数据刷新时发送消息通知",
|
||||
"v1.3": "远程刷新命令移植到主程序",
|
||||
"v1.2": "继续修复增量数据统计问题",
|
||||
"v1.1": "修复增量数据统计问题",
|
||||
"v1.0": "MoviePilot V2 版本站点数据统计插件"
|
||||
}
|
||||
},
|
||||
"BrushFlow": {
|
||||
"name": "站点刷流",
|
||||
"description": "自动托管刷流,将会提高对应站点的访问频率。",
|
||||
"labels": "刷流,仪表板",
|
||||
"version": "4.3.1",
|
||||
"icon": "brush.jpg",
|
||||
"author": "jxxghp,InfinityPacer",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v4.3.1": "修复了一些细节问题",
|
||||
"v4.3": "支持带宽采样并计算平均值,以优化刷流效率",
|
||||
"v4.2": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v4.1": "支持通过CRON表达式配置开启时间,固定10分钟为执行周期",
|
||||
"v4.0": "站点独立配置项支持配置NexusPHP 站点自动跳过下载提示页",
|
||||
"v3.9": "MoviePilot V2 版本站点刷流插件"
|
||||
}
|
||||
},
|
||||
"AutoSignIn": {
|
||||
"name": "站点自动签到",
|
||||
"description": "自动模拟登录、签到站点。",
|
||||
"labels": "站点",
|
||||
"version": "2.6",
|
||||
"icon": "signin.png",
|
||||
"author": "thsrite",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.6": "感谢madrays佬提供的UI!",
|
||||
"v2.5.4": "增加保号风险提示",
|
||||
"v2.5.3": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.5.2": "修复HDArea签到",
|
||||
"v2.5.1": "修复空签到失败问题",
|
||||
"v2.5": "MoviePilot V2 版本站点自动签到插件"
|
||||
}
|
||||
},
|
||||
"DownloadSiteTag": {
|
||||
"name": "下载任务分类与标签",
|
||||
"description": "自动给下载任务分类与打站点标签、剧集名称标签",
|
||||
"labels": "下载管理",
|
||||
"version": "2.2",
|
||||
"icon": "Youtube-dl_B.png",
|
||||
"author": "叮叮当",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.2": "MoviePilot V2 版本下载任务分类与标签插件"
|
||||
}
|
||||
},
|
||||
"MediaServerRefresh": {
|
||||
"name": "媒体库服务器刷新",
|
||||
"description": "入库后自动刷新Emby/Jellyfin/Plex服务器海报墙。",
|
||||
"labels": "媒体库",
|
||||
"version": "1.3.2",
|
||||
"icon": "refresh2.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.3.2": "适配飞牛媒体库",
|
||||
"v1.3.1": "修复兼容性问题",
|
||||
"v1.3": "MoviePilot V2 版本媒体库服务器刷新插件"
|
||||
}
|
||||
},
|
||||
"MediaServerMsg": {
|
||||
"name": "媒体库服务器通知",
|
||||
"description": "发送Emby/Jellyfin/Plex服务器的播放、入库等通知消息。",
|
||||
"labels": "消息通知,媒体库",
|
||||
"version": "1.6",
|
||||
"icon": "mediaplay.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.6": "查询剧集图片兼容没有季集信息的情况",
|
||||
"v1.5": "支持独立控制媒体服务器通知",
|
||||
"v1.4": "MoviePilot V2 版本媒体库服务器通知插件"
|
||||
}
|
||||
},
|
||||
"ChatGPT": {
|
||||
"name": "ChatGPT",
|
||||
"description": "消息交互支持与ChatGPT对话。",
|
||||
"labels": "消息通知,识别",
|
||||
"version": "2.1.6",
|
||||
"icon": "Chatgpt_A.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1.6": "支持自定义辅助识别提示词",
|
||||
"v2.1.5": "兼容一些模型返回json数据信息用markdown语法包裹的情况",
|
||||
"v2.1.4": "不处理http链接",
|
||||
"v2.1.3": "修复通知异常",
|
||||
"v2.1.2": "支持传入多个api key",
|
||||
"v2.1.1": "兼容/v1后仍有路径的接口",
|
||||
"v2.1.0": "优化辅助识别提示词",
|
||||
"v2.0.1": "修复辅助识别",
|
||||
"v2.0": "适配MoviePilot V2 版本,采用链式事件机制"
|
||||
}
|
||||
},
|
||||
"TorrentTransfer": {
|
||||
"name": "自动转移做种",
|
||||
"description": "定期转移下载器中的做种任务到另一个下载器。",
|
||||
"labels": "做种",
|
||||
"version": "1.10.2",
|
||||
"icon": "seed.png",
|
||||
"author": "jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v1.10.2": "增加保留原标签和原分类的选项",
|
||||
"v1.10.1": "优化“立即运行一次”按钮位置",
|
||||
"v1.10": "支持跳过校验(仅支持 qBittorrent)",
|
||||
"v1.9": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v1.8": "支持qbittorrent 5",
|
||||
"v1.7": "MoviePilot V2 版本自动转移做种插件",
|
||||
"v1.7.1": "修复兼容性问题"
|
||||
}
|
||||
},
|
||||
"RssSubscribe": {
|
||||
"name": "自定义订阅",
|
||||
"description": "定时刷新RSS报文,识别内容后添加订阅或直接下载。",
|
||||
"labels": "订阅",
|
||||
"version": "2.1",
|
||||
"icon": "rss.png",
|
||||
"author": "jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"FFmpegThumb": {
|
||||
"name": "FFmpeg缩略图",
|
||||
"description": "TheMovieDb没有背景图片时使用FFmpeg截取视频文件缩略图",
|
||||
"labels": "刮削",
|
||||
"version": "2.1",
|
||||
"icon": "ffmpeg.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"LibraryScraper": {
|
||||
"name": "媒体库刮削",
|
||||
"description": "定时对媒体库进行刮削,补齐缺失元数据和图片。",
|
||||
"labels": "刮削",
|
||||
"version": "2.1.1",
|
||||
"icon": "scraper.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1.1": "调整目录计算方法,以支持更多重命名格式",
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本",
|
||||
"v1.5": "修复未获取fanart图片的问题",
|
||||
"v1.4.1": "修复nfo文件读取失败时任务中断问题"
|
||||
}
|
||||
},
|
||||
"PersonMeta": {
|
||||
"name": "演职人员刮削",
|
||||
"description": "刮削演职人员图片以及中文名称。",
|
||||
"labels": "媒体库,刮削",
|
||||
"version": "2.1",
|
||||
"icon": "actor.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本",
|
||||
"v1.4": "人物图片调整为优先从TMDB获取,避免douban图片CDN加载过慢的问题",
|
||||
"v1.3": "修复v1.8.5版本后刮削报错问题"
|
||||
}
|
||||
},
|
||||
"SpeedLimiter": {
|
||||
"name": "播放限速",
|
||||
"description": "外网播放媒体库视频时,自动对下载器进行限速。",
|
||||
"labels": "网络",
|
||||
"version": "2.1",
|
||||
"icon": "Librespeed_A.png",
|
||||
"author": "Shurelol",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "修复表单参数",
|
||||
"v2.0": "兼容MoviePilot V2 版本",
|
||||
"v1.2": "增加不限速路径配置,以应对网盘直链播放的情况"
|
||||
}
|
||||
},
|
||||
"AutoClean": {
|
||||
"name": "定时清理媒体库",
|
||||
"description": "定时清理用户下载的种子、源文件、媒体库文件。",
|
||||
"labels": "媒体库",
|
||||
"version": "2.1",
|
||||
"icon": "clean.png",
|
||||
"author": "thsrite",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"TorrentRemover": {
|
||||
"name": "自动删种",
|
||||
"description": "自动删除下载器中的下载任务。",
|
||||
"labels": "做种",
|
||||
"version": "2.2",
|
||||
"icon": "delete.jpg",
|
||||
"author": "jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.2": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.1.1": "修复兼容MoviePilot V2 版本",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"IYUUAutoSeed": {
|
||||
"name": "IYUU自动辅种",
|
||||
"description": "基于IYUU官方Api实现自动辅种。",
|
||||
"labels": "做种,IYUU",
|
||||
"version": "2.14",
|
||||
"icon": "IYUU.png",
|
||||
"author": "jxxghp,CKun",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.14": "修复馒头不能辅种的问题",
|
||||
"v2.13": "开启跳过校验后需手动开启自动开始",
|
||||
"v2.12": "增加qb下载器分类复用配置",
|
||||
"v2.11": "修复qb跳过校验不自动开始的问题",
|
||||
"v2.10": "Revert 辅种结束后,一起开始所有辅种后暂停的种子(排除了出错的种子)",
|
||||
"v2.9": "修复开启跳过校验后,Tr下载器不自动开始的问题",
|
||||
"v2.8": "为配置主辅分离时,不走辅种下载器检查",
|
||||
"v2.7": "增加主辅分离配置,单独指定辅种下载器",
|
||||
"v2.6": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.5": "修复qb辅种结束后自动开始暂停的种子",
|
||||
"v2.4": "辅种结束后,一起开始所有辅种后暂停的种子(排除了出错的种子)",
|
||||
"v2.3": "支持qbittorrent 5",
|
||||
"v2.2": "修复种子校验服务未生效",
|
||||
"v2.1": "调整IYUU最新域名",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"CrossSeed": {
|
||||
"name": "青蛙辅种助手",
|
||||
"description": "参考ReseedPuppy和IYUU辅种插件实现自动辅种,支持站点:青蛙、AGSVPT、麒麟、UBits、聆音、憨憨等。",
|
||||
"labels": "做种",
|
||||
"version": "3.0.1",
|
||||
"icon": "qingwa.png",
|
||||
"author": "233@qingwa",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v3.0.1": "遗漏了一个私有属性",
|
||||
"v3.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"QbCommand": {
|
||||
"name": "QB远程操作",
|
||||
"description": "通过定时任务或交互命令远程操作QB暂停/开始/限速等。",
|
||||
"labels": "下载管理,Qbittorrent",
|
||||
"version": "2.1",
|
||||
"icon": "Qbittorrent_A.png",
|
||||
"author": "DzAvril",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "支持qbittorrent 5",
|
||||
"v2.0": "适配MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"HistoryToV2": {
|
||||
"name": "历史记录迁移",
|
||||
"description": "将MoviePilot V1版本的整理历史记录迁移至V2版本。",
|
||||
"labels": "整理,历史记录",
|
||||
"version": "1.1",
|
||||
"icon": "Moviepilot_A.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.1": "修复启动提示信息"
|
||||
}
|
||||
},
|
||||
"SyncCookieCloud": {
|
||||
"name": "同步CookieCloud",
|
||||
"description": "同步MoviePilot站点Cookie到本地CookieCloud。",
|
||||
"labels": "站点",
|
||||
"version": "2.2",
|
||||
"icon": "Cookiecloud_A.png",
|
||||
"author": "thsrite",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.2": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.1": "兼容MoviePilot V2"
|
||||
}
|
||||
},
|
||||
"ChineseSubFinder": {
|
||||
"name": "ChineseSubFinder",
|
||||
"description": "整理入库时通知ChineseSubFinder下载字幕。",
|
||||
"labels": "字幕",
|
||||
"version": "2.0",
|
||||
"icon": "chinesesubfinder.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.0": "兼容MoviePilot V2"
|
||||
}
|
||||
},
|
||||
"CleanInvalidSeed": {
|
||||
"name": "清理QB无效做种",
|
||||
"description": "清理已经被站点删除的种子及对应源文件,仅支持QB",
|
||||
"labels": "Qbittorrent",
|
||||
"version": "2.0",
|
||||
"icon": "clean_a.png",
|
||||
"author": "DzAvril",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.0": "适配 MoviePilot V2"
|
||||
}
|
||||
},
|
||||
"PlayletCategory": {
|
||||
"name": "短剧自动分类",
|
||||
"description": "网络短剧自动整理到独立的分类目录。",
|
||||
"labels": "文件整理",
|
||||
"version": "2.1",
|
||||
"icon": "Amule_A.png",
|
||||
"author": "jxxghp,longqiuyu",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "兼容MoviePilot V2",
|
||||
"v2.0": "适配新的目录结构变化,短剧分类名称调整为配置目录路径,升级后需要重新调整设置后才能使用。"
|
||||
}
|
||||
},
|
||||
"MoviePilotUpdateNotify": {
|
||||
"name": "MoviePilot更新推送",
|
||||
"description": "MoviePilot推送release更新通知、自动重启。",
|
||||
"labels": "消息通知,自动更新",
|
||||
"version": "2.1",
|
||||
"icon": "Moviepilot_A.png",
|
||||
"author": "thsrite",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2"
|
||||
}
|
||||
},
|
||||
"DoubanRank": {
|
||||
"name": "豆瓣榜单订阅",
|
||||
"description": "监控豆瓣热门榜单,自动添加订阅。",
|
||||
"labels": "订阅",
|
||||
"version": "2.0.0",
|
||||
"icon": "movie.jpg",
|
||||
"author": "jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.0.0": "优化cron表达式输入"
|
||||
}
|
||||
},
|
||||
"DoubanSync": {
|
||||
"name": "豆瓣想看",
|
||||
"description": "同步豆瓣想看数据,自动添加订阅。",
|
||||
"labels": "订阅",
|
||||
"version": "2.1.0",
|
||||
"icon": "douban.png",
|
||||
"author": "jxxghp,dwhmofly",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.1.0": "新增配置项-搜索下载,开启后会优先搜索站点资源进行下载,下载不到才会添加订阅",
|
||||
"v2.0.1": "支持将豆瓣ID转换为MoviePilot中已有用户(在用户个人信息中绑定豆瓣ID),需要MoviePilot v2.2.6+",
|
||||
"v2.0.0": "优化cron表达式输入"
|
||||
}
|
||||
},
|
||||
"TvdbDiscover": {
|
||||
"name": "TheTVDB探索",
|
||||
"description": "让探索支持TheTVDB的数据浏览。",
|
||||
"labels": "探索",
|
||||
"version": "1.1",
|
||||
"icon": "TheTVDB_A.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.1": "需要MoviePilot v2.2.7-1+ 版本,否则无法显示图片"
|
||||
}
|
||||
},
|
||||
"SubscribeClear": {
|
||||
"name": "订阅种子清理",
|
||||
"description": "删除指定下载信息。",
|
||||
"labels": "下载管理",
|
||||
"version": "1.0",
|
||||
"icon": "Moviepilot_A.jpg",
|
||||
"author": "k0ala",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.0": "支持清理QB中已下载的订阅文件"
|
||||
}
|
||||
},
|
||||
"ToBypassTrackers": {
|
||||
"name": "绕过Trackers",
|
||||
"description": "提供tracker服务器IP地址列表,帮助IPv6连接绕过OpenClash",
|
||||
"labels": "工具",
|
||||
"version": "1.4",
|
||||
"icon": "Clash_A.png",
|
||||
"author": "wumode",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v1.0": "支持自定义Trackers",
|
||||
"v1.1": "更新列表后发送通知",
|
||||
"v1.2": "修复Trackers加载错误",
|
||||
"v1.3": "新增一些Trackers",
|
||||
"v1.4": "异步查询DNS"
|
||||
}
|
||||
"SiteStatistic": {
|
||||
"name": "站点数据统计",
|
||||
"description": "站点统计数据图表。",
|
||||
"labels": "站点,仪表板",
|
||||
"version": "1.7.1",
|
||||
"icon": "statistic.png",
|
||||
"author": "lightolly,jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v1.7.1": "优化内存占用",
|
||||
"v1.6": "优化了站点数据获取失败时的回退逻辑",
|
||||
"v1.5": "修复了发送增量通知失败等一些问题",
|
||||
"v1.4.1": "支持数据刷新时发送消息通知",
|
||||
"v1.3": "远程刷新命令移植到主程序",
|
||||
"v1.2": "继续修复增量数据统计问题",
|
||||
"v1.1": "修复增量数据统计问题",
|
||||
"v1.0": "MoviePilot V2 版本站点数据统计插件"
|
||||
}
|
||||
},
|
||||
"BrushFlow": {
|
||||
"name": "站点刷流",
|
||||
"description": "自动托管刷流,将会提高对应站点的访问频率。",
|
||||
"labels": "刷流,仪表板",
|
||||
"version": "4.3.3",
|
||||
"icon": "brush.jpg",
|
||||
"author": "jxxghp,InfinityPacer",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v4.3.2": "增加'删除促销结束的未完成下载'功能",
|
||||
"v4.3.1": "修复了一些细节问题",
|
||||
"v4.3": "支持带宽采样并计算平均值,以优化刷流效率",
|
||||
"v4.2": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v4.1": "支持通过CRON表达式配置开启时间,固定10分钟为执行周期",
|
||||
"v4.0": "站点独立配置项支持配置NexusPHP 站点自动跳过下载提示页",
|
||||
"v3.9": "MoviePilot V2 版本站点刷流插件"
|
||||
}
|
||||
},
|
||||
"AutoSignIn": {
|
||||
"name": "站点自动签到",
|
||||
"description": "自动模拟登录、签到站点。",
|
||||
"labels": "站点",
|
||||
"version": "2.6",
|
||||
"icon": "signin.png",
|
||||
"author": "thsrite",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.6": "感谢madrays佬提供的UI!",
|
||||
"v2.5.4": "增加保号风险提示",
|
||||
"v2.5.3": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.5.2": "修复HDArea签到",
|
||||
"v2.5.1": "修复空签到失败问题",
|
||||
"v2.5": "MoviePilot V2 版本站点自动签到插件"
|
||||
}
|
||||
},
|
||||
"DownloadSiteTag": {
|
||||
"name": "下载任务分类与标签",
|
||||
"description": "自动给下载任务分类与打站点标签、剧集名称标签",
|
||||
"labels": "下载管理",
|
||||
"version": "2.2",
|
||||
"icon": "Youtube-dl_B.png",
|
||||
"author": "叮叮当",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.2": "MoviePilot V2 版本下载任务分类与标签插件"
|
||||
}
|
||||
},
|
||||
"MediaServerRefresh": {
|
||||
"name": "媒体库服务器刷新",
|
||||
"description": "入库后自动刷新Emby/Jellyfin/Plex服务器海报墙。",
|
||||
"labels": "媒体库",
|
||||
"version": "1.3.2",
|
||||
"icon": "refresh2.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.3.2": "适配飞牛媒体库",
|
||||
"v1.3.1": "修复兼容性问题",
|
||||
"v1.3": "MoviePilot V2 版本媒体库服务器刷新插件"
|
||||
}
|
||||
},
|
||||
"MediaServerMsg": {
|
||||
"name": "媒体库服务器通知",
|
||||
"description": "发送Emby/Jellyfin/Plex服务器的播放、入库等通知消息。",
|
||||
"labels": "消息通知,媒体库",
|
||||
"version": "1.5",
|
||||
"icon": "mediaplay.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.5": "支持独立控制媒体服务器通知",
|
||||
"v1.4": "MoviePilot V2 版本媒体库服务器通知插件"
|
||||
}
|
||||
},
|
||||
"ChatGPT": {
|
||||
"name": "ChatGPT",
|
||||
"description": "消息交互支持与ChatGPT对话。",
|
||||
"labels": "消息通知,识别",
|
||||
"version": "2.1.6",
|
||||
"icon": "Chatgpt_A.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1.6": "支持自定义辅助识别提示词",
|
||||
"v2.1.5": "兼容一些模型返回json数据信息用markdown语法包裹的情况",
|
||||
"v2.1.4": "不处理http链接",
|
||||
"v2.1.3": "修复通知异常",
|
||||
"v2.1.2": "支持传入多个api key",
|
||||
"v2.1.1": "兼容/v1后仍有路径的接口",
|
||||
"v2.1.0": "优化辅助识别提示词",
|
||||
"v2.0.1": "修复辅助识别",
|
||||
"v2.0": "适配MoviePilot V2 版本,采用链式事件机制"
|
||||
}
|
||||
},
|
||||
"TorrentTransfer": {
|
||||
"name": "自动转移做种",
|
||||
"description": "定期转移下载器中的做种任务到另一个下载器。",
|
||||
"labels": "做种",
|
||||
"version": "1.10.2",
|
||||
"icon": "seed.png",
|
||||
"author": "jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v1.10.2": "增加保留原标签和原分类的选项",
|
||||
"v1.10.1": "优化“立即运行一次”按钮位置",
|
||||
"v1.10": "支持跳过校验(仅支持 qBittorrent)",
|
||||
"v1.9": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v1.8": "支持qbittorrent 5",
|
||||
"v1.7": "MoviePilot V2 版本自动转移做种插件",
|
||||
"v1.7.1": "修复兼容性问题"
|
||||
}
|
||||
},
|
||||
"RssSubscribe": {
|
||||
"name": "自定义订阅",
|
||||
"description": "定时刷新RSS报文,识别内容后添加订阅或直接下载。",
|
||||
"labels": "订阅",
|
||||
"version": "2.1",
|
||||
"icon": "rss.png",
|
||||
"author": "jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"FFmpegThumb": {
|
||||
"name": "FFmpeg缩略图",
|
||||
"description": "TheMovieDb没有背景图片时使用FFmpeg截取视频文件缩略图",
|
||||
"labels": "刮削",
|
||||
"version": "2.1",
|
||||
"icon": "ffmpeg.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"LibraryScraper": {
|
||||
"name": "媒体库刮削",
|
||||
"description": "定时对媒体库进行刮削,补齐缺失元数据和图片。",
|
||||
"labels": "刮削",
|
||||
"version": "2.1.1",
|
||||
"icon": "scraper.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1.1": "调整目录计算方法,以支持更多重命名格式",
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本",
|
||||
"v1.5": "修复未获取fanart图片的问题",
|
||||
"v1.4.1": "修复nfo文件读取失败时任务中断问题"
|
||||
}
|
||||
},
|
||||
"PersonMeta": {
|
||||
"name": "演职人员刮削",
|
||||
"description": "刮削演职人员图片以及中文名称。",
|
||||
"labels": "媒体库,刮削",
|
||||
"version": "2.1",
|
||||
"icon": "actor.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本",
|
||||
"v1.4": "人物图片调整为优先从TMDB获取,避免douban图片CDN加载过慢的问题",
|
||||
"v1.3": "修复v1.8.5版本后刮削报错问题"
|
||||
}
|
||||
},
|
||||
"SpeedLimiter": {
|
||||
"name": "播放限速",
|
||||
"description": "外网播放媒体库视频时,自动对下载器进行限速。",
|
||||
"labels": "网络",
|
||||
"version": "2.1",
|
||||
"icon": "Librespeed_A.png",
|
||||
"author": "Shurelol",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "修复表单参数",
|
||||
"v2.0": "兼容MoviePilot V2 版本",
|
||||
"v1.2": "增加不限速路径配置,以应对网盘直链播放的情况"
|
||||
}
|
||||
},
|
||||
"AutoClean": {
|
||||
"name": "定时清理媒体库",
|
||||
"description": "定时清理用户下载的种子、源文件、媒体库文件。",
|
||||
"labels": "媒体库",
|
||||
"version": "2.1",
|
||||
"icon": "clean.png",
|
||||
"author": "thsrite",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"TorrentRemover": {
|
||||
"name": "自动删种",
|
||||
"description": "自动删除下载器中的下载任务。",
|
||||
"labels": "做种",
|
||||
"version": "2.2",
|
||||
"icon": "delete.jpg",
|
||||
"author": "jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.2": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.1.1": "修复兼容MoviePilot V2 版本",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"IYUUAutoSeed": {
|
||||
"name": "IYUU自动辅种",
|
||||
"description": "基于IYUU官方Api实现自动辅种。",
|
||||
"labels": "做种,IYUU",
|
||||
"version": "2.14",
|
||||
"icon": "IYUU.png",
|
||||
"author": "jxxghp,CKun",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.14": "修复馒头不能辅种的问题",
|
||||
"v2.13": "开启跳过校验后需手动开启自动开始",
|
||||
"v2.12": "增加qb下载器分类复用配置",
|
||||
"v2.11": "修复qb跳过校验不自动开始的问题",
|
||||
"v2.10": "Revert 辅种结束后,一起开始所有辅种后暂停的种子(排除了出错的种子)",
|
||||
"v2.9": "修复开启跳过校验后,Tr下载器不自动开始的问题",
|
||||
"v2.8": "为配置主辅分离时,不走辅种下载器检查",
|
||||
"v2.7": "增加主辅分离配置,单独指定辅种下载器",
|
||||
"v2.6": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.5": "修复qb辅种结束后自动开始暂停的种子",
|
||||
"v2.4": "辅种结束后,一起开始所有辅种后暂停的种子(排除了出错的种子)",
|
||||
"v2.3": "支持qbittorrent 5",
|
||||
"v2.2": "修复种子校验服务未生效",
|
||||
"v2.1": "调整IYUU最新域名",
|
||||
"v2.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"CrossSeed": {
|
||||
"name": "青蛙辅种助手",
|
||||
"description": "参考ReseedPuppy和IYUU辅种插件实现自动辅种,支持站点:青蛙、AGSVPT、麒麟、UBits、聆音、憨憨等。",
|
||||
"labels": "做种",
|
||||
"version": "3.0.1",
|
||||
"icon": "qingwa.png",
|
||||
"author": "233@qingwa",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v3.0.1": "遗漏了一个私有属性",
|
||||
"v3.0": "兼容MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"QbCommand": {
|
||||
"name": "QB远程操作",
|
||||
"description": "通过定时任务或交互命令远程操作QB暂停/开始/限速等。",
|
||||
"labels": "下载管理,Qbittorrent",
|
||||
"version": "2.1",
|
||||
"icon": "Qbittorrent_A.png",
|
||||
"author": "DzAvril",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "支持qbittorrent 5",
|
||||
"v2.0": "适配MoviePilot V2 版本"
|
||||
}
|
||||
},
|
||||
"HistoryToV2": {
|
||||
"name": "历史记录迁移",
|
||||
"description": "将MoviePilot V1版本的整理历史记录迁移至V2版本。",
|
||||
"labels": "整理,历史记录",
|
||||
"version": "1.1",
|
||||
"icon": "Moviepilot_A.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.1": "修复启动提示信息"
|
||||
}
|
||||
},
|
||||
"SyncCookieCloud": {
|
||||
"name": "同步CookieCloud",
|
||||
"description": "同步MoviePilot站点Cookie到本地CookieCloud。",
|
||||
"labels": "站点",
|
||||
"version": "2.2",
|
||||
"icon": "Cookiecloud_A.png",
|
||||
"author": "thsrite",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.2": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.1": "兼容MoviePilot V2"
|
||||
}
|
||||
},
|
||||
"ChineseSubFinder": {
|
||||
"name": "ChineseSubFinder",
|
||||
"description": "整理入库时通知ChineseSubFinder下载字幕。",
|
||||
"labels": "字幕",
|
||||
"version": "2.0",
|
||||
"icon": "chinesesubfinder.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.0": "兼容MoviePilot V2"
|
||||
}
|
||||
},
|
||||
"CleanInvalidSeed": {
|
||||
"name": "清理QB无效做种",
|
||||
"description": "清理已经被站点删除的种子及对应源文件,仅支持QB",
|
||||
"labels": "Qbittorrent",
|
||||
"version": "2.0",
|
||||
"icon": "clean_a.png",
|
||||
"author": "DzAvril",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.0": "适配 MoviePilot V2"
|
||||
}
|
||||
},
|
||||
"PlayletCategory": {
|
||||
"name": "短剧自动分类",
|
||||
"description": "网络短剧自动整理到独立的分类目录。",
|
||||
"labels": "文件整理",
|
||||
"version": "2.1",
|
||||
"icon": "Amule_A.png",
|
||||
"author": "jxxghp,longqiuyu",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.1": "兼容MoviePilot V2",
|
||||
"v2.0": "适配新的目录结构变化,短剧分类名称调整为配置目录路径,升级后需要重新调整设置后才能使用。"
|
||||
}
|
||||
},
|
||||
"MoviePilotUpdateNotify": {
|
||||
"name": "MoviePilot更新推送",
|
||||
"description": "MoviePilot推送release更新通知、自动重启。",
|
||||
"labels": "消息通知,自动更新",
|
||||
"version": "2.2",
|
||||
"icon": "Moviepilot_A.png",
|
||||
"author": "thsrite",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v2.2": "支持 MoviePilot v2.5.0+",
|
||||
"v2.1": "优化执行周期输入,需要MoviePilot v2.2.1+",
|
||||
"v2.0": "兼容MoviePilot V2"
|
||||
}
|
||||
},
|
||||
"DoubanRank": {
|
||||
"name": "豆瓣榜单订阅",
|
||||
"description": "监控豆瓣热门榜单,自动添加订阅。",
|
||||
"labels": "订阅",
|
||||
"version": "2.0.0",
|
||||
"icon": "movie.jpg",
|
||||
"author": "jxxghp",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.0.0": "优化cron表达式输入"
|
||||
}
|
||||
},
|
||||
"DoubanSync": {
|
||||
"name": "豆瓣想看",
|
||||
"description": "同步豆瓣想看数据,自动添加订阅。",
|
||||
"labels": "订阅",
|
||||
"version": "2.1.0",
|
||||
"icon": "douban.png",
|
||||
"author": "jxxghp,dwhmofly",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v2.1.0": "新增配置项-搜索下载,开启后会优先搜索站点资源进行下载,下载不到才会添加订阅",
|
||||
"v2.0.1": "支持将豆瓣ID转换为MoviePilot中已有用户(在用户个人信息中绑定豆瓣ID),需要MoviePilot v2.2.6+",
|
||||
"v2.0.0": "优化cron表达式输入"
|
||||
}
|
||||
},
|
||||
"TvdbDiscover": {
|
||||
"name": "TheTVDB探索",
|
||||
"description": "让探索支持TheTVDB的数据浏览。",
|
||||
"labels": "探索",
|
||||
"version": "1.1",
|
||||
"icon": "TheTVDB_A.png",
|
||||
"author": "jxxghp",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.1": "需要MoviePilot v2.2.7-1+ 版本,否则无法显示图片"
|
||||
}
|
||||
},
|
||||
"SubscribeClear": {
|
||||
"name": "订阅种子清理",
|
||||
"description": "删除指定下载信息。",
|
||||
"labels": "下载管理",
|
||||
"version": "1.0",
|
||||
"icon": "Moviepilot_A.jpg",
|
||||
"author": "k0ala",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.0": "支持清理QB中已下载的订阅文件"
|
||||
}
|
||||
},
|
||||
"ToBypassTrackers": {
|
||||
"name": "绕过Trackers",
|
||||
"description": "提供tracker服务器IP地址列表,帮助IPv6连接绕过OpenClash",
|
||||
"labels": "工具",
|
||||
"version": "1.4",
|
||||
"icon": "Clash_A.png",
|
||||
"author": "wumode",
|
||||
"level": 2,
|
||||
"history": {
|
||||
"v1.0": "支持自定义Trackers",
|
||||
"v1.1": "更新列表后发送通知",
|
||||
"v1.2": "修复Trackers加载错误",
|
||||
"v1.3": "新增一些Trackers",
|
||||
"v1.4": "异步查询DNS"
|
||||
}
|
||||
},
|
||||
"ImdbSource": {
|
||||
"name": "IMDb源",
|
||||
"description": "让探索支持IMDb数据源。",
|
||||
"labels": "探索",
|
||||
"version": "1.3.1",
|
||||
"icon": "IMDb_IOS-OSX_App.png",
|
||||
"author": "wumode",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v1.3.1": "修复按日期排序错误",
|
||||
"v1.3": "优化网络连接",
|
||||
"v1.2": "推荐热门纪录片",
|
||||
"v1.1": "推荐支持IMDB数据源; 优化海报尺寸,减少卡顿",
|
||||
"v1.0": "探索支持IMDb数据源"
|
||||
}
|
||||
},
|
||||
"ClashRuleProvider": {
|
||||
"name": "Clash Rule Provider",
|
||||
"description": "随时为Clash添加一些额外的规则。",
|
||||
"labels": "工具",
|
||||
"version": "0.1.0",
|
||||
"icon": "Mihomo_Meta_A.png",
|
||||
"author": "wumode",
|
||||
"level": 1,
|
||||
"history": {
|
||||
"v0.1.0": "新增ClashRuleProvider"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,8 +49,6 @@ class AutoClean(_PluginBase):
|
||||
_cleantype = None
|
||||
_cleandate = None
|
||||
_cleanuser = None
|
||||
_downloadhis = None
|
||||
_transferhis = None
|
||||
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
@@ -70,9 +68,6 @@ class AutoClean(_PluginBase):
|
||||
|
||||
# 加载模块
|
||||
if self._enabled:
|
||||
self._downloadhis = DownloadHistoryOper()
|
||||
self._transferhis = TransferHistoryOper()
|
||||
|
||||
if self._onlyonce:
|
||||
# 定时服务
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
@@ -115,9 +110,10 @@ class AutoClean(_PluginBase):
|
||||
return
|
||||
|
||||
# 查询用户清理日期之前的下载历史,不填默认清理全部用户的下载
|
||||
_downloadhis = DownloadHistoryOper()
|
||||
if not self._cleanuser:
|
||||
clean_date = self.__get_clean_date()
|
||||
downloadhis_list = self._downloadhis.list_by_user_date(date=clean_date)
|
||||
downloadhis_list = _downloadhis.list_by_user_date(date=clean_date)
|
||||
logger.info(f'获取到日期 {clean_date} 之前的下载历史 {len(downloadhis_list)} 条')
|
||||
self.__clean_history(date=clean_date, clean_type=self._cleantype, downloadhis_list=downloadhis_list)
|
||||
|
||||
@@ -130,8 +126,8 @@ class AutoClean(_PluginBase):
|
||||
# 1.3.7版本及之前处理多位用户
|
||||
if str(self._cleanuser).count(','):
|
||||
for username in str(self._cleanuser).split(","):
|
||||
downloadhis_list = self._downloadhis.list_by_user_date(date=clean_date,
|
||||
username=username)
|
||||
downloadhis_list = _downloadhis.list_by_user_date(date=clean_date,
|
||||
username=username)
|
||||
logger.info(
|
||||
f'获取到用户 {username} 日期 {clean_date} 之前的下载历史 {len(downloadhis_list)} 条')
|
||||
self.__clean_history(date=clean_date, clean_type=self._cleantype, downloadhis_list=downloadhis_list)
|
||||
@@ -152,8 +148,8 @@ class AutoClean(_PluginBase):
|
||||
# 转strftime
|
||||
clean_date = self.__get_clean_date(clean_date)
|
||||
logger.info(f'{username} 使用 {clean_type} 清理方式,清理 {clean_date} 之前的下载历史')
|
||||
downloadhis_list = self._downloadhis.list_by_user_date(date=clean_date,
|
||||
username=username)
|
||||
downloadhis_list = _downloadhis.list_by_user_date(date=clean_date,
|
||||
username=username)
|
||||
logger.info(
|
||||
f'获取到用户 {username} 日期 {clean_date} 之前的下载历史 {len(downloadhis_list)} 条')
|
||||
self.__clean_history(date=clean_date, clean_type=clean_type,
|
||||
@@ -168,6 +164,7 @@ class AutoClean(_PluginBase):
|
||||
return
|
||||
|
||||
# 读取历史记录
|
||||
_transferhis = TransferHistoryOper()
|
||||
pulgin_history = self.get_data('history') or []
|
||||
|
||||
# 创建一个字典来保存分组结果
|
||||
@@ -197,7 +194,7 @@ class AutoClean(_PluginBase):
|
||||
logger.debug(f'下载历史 {downloadhis.id} {downloadhis.title} 未获取到download_hash,跳过处理')
|
||||
continue
|
||||
# 根据hash获取转移记录
|
||||
transferhis_list = self._transferhis.list_by_hash(download_hash=downloadhis.download_hash)
|
||||
transferhis_list = _transferhis.list_by_hash(download_hash=downloadhis.download_hash)
|
||||
if not transferhis_list:
|
||||
logger.warn(f"下载历史 {downloadhis.download_hash} 未查询到转移记录,跳过处理")
|
||||
continue
|
||||
@@ -208,7 +205,7 @@ class AutoClean(_PluginBase):
|
||||
dest_fileitem = schemas.FileItem(**history.dest_fileitem)
|
||||
StorageChain().delete_file(dest_fileitem)
|
||||
# 删除记录
|
||||
self._transferhis.delete(history.id)
|
||||
_transferhis.delete(history.id)
|
||||
# 删除源文件
|
||||
if clean_type in ["src", "all"]:
|
||||
src_fileitem = schemas.FileItem(**history.src_fileitem)
|
||||
|
||||
@@ -12,9 +12,8 @@ from apscheduler.triggers.cron import CronTrigger
|
||||
from ruamel.yaml import CommentedMap
|
||||
|
||||
from app import schemas
|
||||
from app.chain.site import SiteChain
|
||||
from app.core.config import settings
|
||||
from app.core.event import EventManager, eventmanager, Event
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.db.site_oper import SiteOper
|
||||
from app.helper.browser import PlaywrightHelper
|
||||
from app.helper.cloudflare import under_challenge
|
||||
@@ -49,12 +48,6 @@ class AutoSignIn(_PluginBase):
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
sites: SitesHelper = None
|
||||
siteoper: SiteOper = None
|
||||
sitechain: SiteChain = None
|
||||
# 事件管理器
|
||||
event: EventManager = None
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
# 加载的模块
|
||||
@@ -75,10 +68,6 @@ class AutoSignIn(_PluginBase):
|
||||
_auto_cf: int = 0
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.sites = SitesHelper()
|
||||
self.siteoper = SiteOper()
|
||||
self.event = EventManager()
|
||||
self.sitechain = SiteChain()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
@@ -97,8 +86,8 @@ class AutoSignIn(_PluginBase):
|
||||
self._clean = config.get("clean")
|
||||
|
||||
# 过滤掉已删除的站点
|
||||
all_sites = [site.id for site in self.siteoper.list_order_by_pri()] + [site.get("id") for site in
|
||||
self.__custom_sites()]
|
||||
all_sites = [site.id for site in SiteOper().list_order_by_pri()] + [site.get("id") for site in
|
||||
self.__custom_sites()]
|
||||
self._sign_sites = [site_id for site_id in all_sites if site_id in self._sign_sites]
|
||||
self._login_sites = [site_id for site_id in all_sites if site_id in self._login_sites]
|
||||
# 保存配置
|
||||
@@ -272,7 +261,7 @@ class AutoSignIn(_PluginBase):
|
||||
customSites = self.__custom_sites()
|
||||
|
||||
site_options = ([{"title": site.name, "value": site.id}
|
||||
for site in self.siteoper.list_order_by_pri()]
|
||||
for site in SiteOper().list_order_by_pri()]
|
||||
+ [{"title": site.get("name"), "value": site.get("id")}
|
||||
for site in customSites])
|
||||
return [
|
||||
@@ -565,7 +554,7 @@ class AutoSignIn(_PluginBase):
|
||||
sites_info = {} # 记录站点信息
|
||||
|
||||
# 获取站点信息
|
||||
site_indexers = self.sites.get_indexers()
|
||||
site_indexers = SitesHelper().get_indexers()
|
||||
for site in site_indexers:
|
||||
if not site.get("public"):
|
||||
sites_info[site.get("id")] = site.get("name")
|
||||
@@ -734,8 +723,8 @@ class AutoSignIn(_PluginBase):
|
||||
# 按日期排序,最新的在前面
|
||||
try:
|
||||
records.sort(key=lambda x: x.get("day_obj", datetime.now().date()), reverse=True)
|
||||
except:
|
||||
pass # 排序失败时跳过
|
||||
except Exception as e:
|
||||
logger.debug(f"排序失败: {str(e)}")
|
||||
|
||||
# 获取最新的状态作为站点概要
|
||||
latest_status = records[0].get("status", "未知状态")
|
||||
@@ -770,8 +759,8 @@ class AutoSignIn(_PluginBase):
|
||||
# 按日期排序,最新的在前面
|
||||
try:
|
||||
records.sort(key=lambda x: x.get("day_obj", datetime.now().date()), reverse=True)
|
||||
except:
|
||||
pass # 排序失败时跳过
|
||||
except Exception as e:
|
||||
logger.debug(f"排序失败: {str(e)}")
|
||||
|
||||
# 获取最新的状态作为站点概要
|
||||
latest_status = records[0].get("status", "未知状态")
|
||||
@@ -1142,7 +1131,8 @@ class AutoSignIn(_PluginBase):
|
||||
}
|
||||
]
|
||||
|
||||
def _create_expansion_panel(self, site_name, records, status_color, status_icon, latest_status):
|
||||
@staticmethod
|
||||
def _create_expansion_panel(site_name, records, status_color, status_icon, latest_status):
|
||||
"""创建站点折叠面板"""
|
||||
# 生成站点图标(使用站点名的首字母)
|
||||
site_initial = site_name[0].upper() if site_name else "?"
|
||||
@@ -1322,7 +1312,7 @@ class AutoSignIn(_PluginBase):
|
||||
today_history = self.get_data(key=type_str + "-" + today)
|
||||
|
||||
# 查询所有站点
|
||||
all_sites = [site for site in self.sites.get_indexers() if not site.get("public")] + self.__custom_sites()
|
||||
all_sites = [site for site in SitesHelper().get_indexers() if not site.get("public")] + self.__custom_sites()
|
||||
# 过滤掉没有选中的站点
|
||||
if do_sites:
|
||||
do_sites = [site for site in all_sites if site.get("id") in do_sites]
|
||||
@@ -1402,7 +1392,8 @@ class AutoSignIn(_PluginBase):
|
||||
# 失败|错误
|
||||
failed_msg = []
|
||||
|
||||
sites = {site.get('name'): site.get("id") for site in self.sites.get_indexers() if not site.get("public")}
|
||||
sites = {site.get('name'): site.get("id") for site in SitesHelper().get_indexers() if
|
||||
not site.get("public")}
|
||||
for s in status:
|
||||
site_name = s[0]
|
||||
site_id = None
|
||||
@@ -1501,7 +1492,7 @@ class AutoSignIn(_PluginBase):
|
||||
if apikey != settings.API_TOKEN:
|
||||
return schemas.Response(success=False, message="API密钥错误")
|
||||
domain = StringUtils.get_url_domain(url)
|
||||
site_info = self.sites.get_indexer(domain)
|
||||
site_info = SitesHelper().get_indexer(domain)
|
||||
if not site_info:
|
||||
return schemas.Response(
|
||||
success=True,
|
||||
@@ -1533,9 +1524,9 @@ class AutoSignIn(_PluginBase):
|
||||
seconds = (datetime.now() - start_time).seconds
|
||||
domain = StringUtils.get_url_domain(site_info.get('url'))
|
||||
if state:
|
||||
self.siteoper.success(domain=domain, seconds=seconds)
|
||||
SiteOper().success(domain=domain, seconds=seconds)
|
||||
else:
|
||||
self.siteoper.fail(domain)
|
||||
SiteOper().fail(domain)
|
||||
return site_info.get("name"), message
|
||||
|
||||
@staticmethod
|
||||
@@ -1635,9 +1626,9 @@ class AutoSignIn(_PluginBase):
|
||||
seconds = (datetime.now() - start_time).seconds
|
||||
domain = StringUtils.get_url_domain(site_info.get('url'))
|
||||
if state:
|
||||
self.siteoper.success(domain=domain, seconds=seconds)
|
||||
SiteOper().success(domain=domain, seconds=seconds)
|
||||
else:
|
||||
self.siteoper.fail(domain)
|
||||
SiteOper().fail(domain)
|
||||
return site_info.get("name"), message
|
||||
|
||||
@staticmethod
|
||||
|
||||
@@ -54,6 +54,7 @@ class BrushConfig:
|
||||
self.exclude = config.get("exclude")
|
||||
self.size = config.get("size")
|
||||
self.seeder = config.get("seeder")
|
||||
self.timezone_offset = (self.__parse_number(config.get("timezone_offset", "+0")) or 0) * 60 # 转换到分钟
|
||||
self.pubtime = config.get("pubtime")
|
||||
self.seed_time = self.__parse_number(config.get("seed_time"))
|
||||
self.hr_seed_time = self.__parse_number(config.get("hr_seed_time"))
|
||||
@@ -72,6 +73,7 @@ class BrushConfig:
|
||||
self.except_subscribe = config.get("except_subscribe", True)
|
||||
self.brush_sequential = config.get("brush_sequential", False)
|
||||
self.proxy_delete = config.get("proxy_delete", False)
|
||||
self.del_no_free = config.get("del_no_free", False) if self.freeleech in ["free", "2xfree"] else False
|
||||
self.active_time_range = config.get("active_time_range")
|
||||
self.cron = config.get("cron")
|
||||
self.qb_category = config.get("qb_category")
|
||||
@@ -107,6 +109,7 @@ class BrushConfig:
|
||||
"exclude",
|
||||
"size",
|
||||
"seeder",
|
||||
"timezone_offset",
|
||||
"pubtime",
|
||||
"seed_time",
|
||||
"hr_seed_time",
|
||||
@@ -119,7 +122,8 @@ class BrushConfig:
|
||||
"proxy_delete",
|
||||
"qb_category",
|
||||
"site_hr_active",
|
||||
"site_skip_tips"
|
||||
"site_skip_tips",
|
||||
"del_no_free"
|
||||
# 当新增支持字段时,仅在此处添加字段名
|
||||
}
|
||||
try:
|
||||
@@ -173,6 +177,8 @@ class BrushConfig:
|
||||
"exclude": "",
|
||||
"size": "10-500",
|
||||
"seeder": "1",
|
||||
// 用户本地时区与站点时区的时间偏移,单位为小时。例如:主机时区是UTC+8,站点时区是UTC,应配置为+8;主机时区是UTC,站点时区是UTC+8,应配置为-8
|
||||
"timezone_offset": "+0",
|
||||
"pubtime": "5-120",
|
||||
"seed_time": 120,
|
||||
"hr_seed_time": 144,
|
||||
@@ -183,6 +189,8 @@ class BrushConfig:
|
||||
"seed_inactivetime": "",
|
||||
"save_path": "/downloads/site1",
|
||||
"proxy_delete": false,
|
||||
// 是否删除促销超时的未完成下载,仅当freeleech配置为free或2xfree时有效
|
||||
"del_no_free": false,
|
||||
"qb_category": "刷流",
|
||||
"site_hr_active": true,
|
||||
"site_skip_tips": true
|
||||
@@ -251,7 +259,7 @@ class BrushFlow(_PluginBase):
|
||||
# 插件图标
|
||||
plugin_icon = "brush.jpg"
|
||||
# 插件版本
|
||||
plugin_version = "4.3.1"
|
||||
plugin_version = "4.3.3"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp,InfinityPacer"
|
||||
# 作者主页
|
||||
@@ -263,12 +271,6 @@ class BrushFlow(_PluginBase):
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
sites_helper = None
|
||||
site_oper = None
|
||||
torrents_chain = None
|
||||
subscribe_oper = None
|
||||
downloader_helper = None
|
||||
# 刷流配置
|
||||
_brush_config = None
|
||||
# Brush任务是否启动
|
||||
@@ -288,11 +290,7 @@ class BrushFlow(_PluginBase):
|
||||
# endregion
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.sites_helper = SitesHelper()
|
||||
self.site_oper = SiteOper()
|
||||
self.torrents_chain = TorrentsChain()
|
||||
self.subscribe_oper = SubscribeOper()
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
|
||||
self._task_brush_enable = False
|
||||
|
||||
if not config:
|
||||
@@ -314,7 +312,7 @@ class BrushFlow(_PluginBase):
|
||||
|
||||
# 这里先过滤掉已删除的站点并保存,特别注意的是,这里保留了界面选择站点时的顺序,以便后续站点随机刷流或顺序刷流
|
||||
if brush_config.brushsites:
|
||||
site_id_to_public_status = {site.get("id"): site.get("public") for site in self.sites_helper.get_indexers()}
|
||||
site_id_to_public_status = {site.get("id"): site.get("public") for site in SitesHelper().get_indexers()}
|
||||
brush_config.brushsites = [
|
||||
site_id for site_id in brush_config.brushsites
|
||||
if site_id in site_id_to_public_status and not site_id_to_public_status[site_id]
|
||||
@@ -386,7 +384,7 @@ class BrushFlow(_PluginBase):
|
||||
服务信息
|
||||
"""
|
||||
brush_config = self.__get_brush_config()
|
||||
service = self.downloader_helper.get_service(name=brush_config.downloader)
|
||||
service = DownloaderHelper().get_service(name=brush_config.downloader)
|
||||
if not service:
|
||||
self.__log_and_notify_error("站点刷流任务出错,获取下载器实例失败,请检查配置")
|
||||
return None
|
||||
@@ -811,10 +809,10 @@ class BrushFlow(_PluginBase):
|
||||
|
||||
# 站点选项
|
||||
site_options = [{"title": site.get("name"), "value": site.get("id")}
|
||||
for site in self.sites_helper.get_indexers()]
|
||||
for site in SitesHelper().get_indexers()]
|
||||
# 下载器选项
|
||||
downloader_options = [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
@@ -1621,6 +1619,27 @@ class BrushFlow(_PluginBase):
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 4
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'del_no_free',
|
||||
'label': '删除促销过期的未完成下载',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1794,6 +1813,7 @@ class BrushFlow(_PluginBase):
|
||||
"except_subscribe": True,
|
||||
"brush_sequential": False,
|
||||
"proxy_delete": False,
|
||||
"del_no_free": False,
|
||||
"freeleech": "free",
|
||||
"hr": "yes",
|
||||
"enable_site_config": False,
|
||||
@@ -1941,7 +1961,7 @@ class BrushFlow(_PluginBase):
|
||||
# 获取所有站点的信息,并过滤掉不存在的站点
|
||||
site_infos = []
|
||||
for siteid in brush_config.brushsites:
|
||||
siteinfo = self.site_oper.get(siteid)
|
||||
siteinfo = SiteOper().get(siteid)
|
||||
if siteinfo:
|
||||
site_infos.append(siteinfo)
|
||||
|
||||
@@ -1976,13 +1996,13 @@ class BrushFlow(_PluginBase):
|
||||
"""
|
||||
针对站点进行刷流
|
||||
"""
|
||||
siteinfo = self.site_oper.get(siteid)
|
||||
siteinfo = SiteOper().get(siteid)
|
||||
if not siteinfo:
|
||||
logger.warning(f"站点不存在:{siteid}")
|
||||
return True
|
||||
|
||||
logger.info(f"开始获取站点 {siteinfo.name} 的新种子 ...")
|
||||
torrents = self.torrents_chain.browse(domain=siteinfo.domain)
|
||||
torrents = TorrentsChain().browse(domain=siteinfo.domain)
|
||||
if not torrents:
|
||||
logger.info(f"站点 {siteinfo.name} 没有获取到种子")
|
||||
return True
|
||||
@@ -2232,8 +2252,13 @@ class BrushFlow(_PluginBase):
|
||||
if not (seeders_range[0] <= torrent.seeders <= seeders_range[1]):
|
||||
return False, f"做种人数 {torrent.seeders},不在指定范围内"
|
||||
|
||||
# 发布时间
|
||||
pubdate_minutes = self.__get_pubminutes(torrent.pubdate)
|
||||
# 发布时间:用户时间 - 站点时间 - 时区偏移
|
||||
# e.g.1: 用户UTC+8,站点UTC,timezone_offset应为+8,种子在UTC 0:00/UTC+8 8:00发布;
|
||||
# 9:17 - 0:00 - 8:00 = 1:17;1小时17分为正确的发布时间与当前的时间差
|
||||
# e.g.2: 用户UTC,站点UTC+8,timezone_offset应为-8,种子在UTC 0:00/UTC+8 8:00发布:
|
||||
# 1:17 - 8:00 - (-8:00) = 1:17;1小时17分为正确的发布时间与当前的时间差
|
||||
# timezone_offset为后加功能,默认为0,方便后续更多与时间相关的功能开发,之前在单独站点配置中使用pubtime计算过时区偏移的用户也不受影响
|
||||
pubdate_minutes = self.__get_pubminutes(torrent.pubdate) - brush_config.timezone_offset
|
||||
# 已支持独立站点配置,取消单独适配站点时区逻辑,可通过配置项「pubtime」自行适配
|
||||
# pubdate_minutes = self.__adjust_site_pubminutes(pubdate_minutes, torrent)
|
||||
if brush_config.pubtime:
|
||||
@@ -2241,11 +2266,11 @@ class BrushFlow(_PluginBase):
|
||||
if len(pubtimes) == 1:
|
||||
# 单个值:选择发布时间小于等于该值的种子
|
||||
if pubdate_minutes > pubtimes[0]:
|
||||
return False, f"发布时间 {torrent.pubdate},{pubdate_minutes:.0f} 分钟前,不符合条件"
|
||||
return False, f"发布时间(站点时区){torrent.pubdate},当前配置时区偏移 {brush_config.timezone_offset} 小时,{pubdate_minutes:.0f} 分钟前,不符合条件"
|
||||
else:
|
||||
# 范围值:选择发布时间在范围内的种子
|
||||
if not (pubtimes[0] <= pubdate_minutes <= pubtimes[1]):
|
||||
return False, f"发布时间 {torrent.pubdate},{pubdate_minutes:.0f} 分钟前,不在指定范围内"
|
||||
return False, f"发布时间(站点时区){torrent.pubdate},当前配置时区偏移 {brush_config.timezone_offset} 小时,{pubdate_minutes:.0f} 分钟前,不在指定范围内"
|
||||
|
||||
return True, None
|
||||
|
||||
@@ -2348,7 +2373,7 @@ class BrushFlow(_PluginBase):
|
||||
|
||||
if need_delete_hashes:
|
||||
# 如果是QB,则重新汇报Tracker
|
||||
if self.downloader_helper.is_downloader("qbittorrent", service=self.service_info):
|
||||
if DownloaderHelper().is_downloader("qbittorrent", service=self.service_info):
|
||||
self.__qb_torrents_reannounce(torrent_hashes=need_delete_hashes)
|
||||
# 删除种子
|
||||
if downloader.delete_torrents(ids=need_delete_hashes, delete_file=True):
|
||||
@@ -2390,7 +2415,7 @@ class BrushFlow(_PluginBase):
|
||||
seeding_torrents_dict: Dict[str, Any]):
|
||||
brush_config = self.__get_brush_config()
|
||||
|
||||
if not self.downloader_helper.is_downloader("qbittorrent", service=self.service_info):
|
||||
if not DownloaderHelper().is_downloader("qbittorrent", service=self.service_info):
|
||||
logger.info("同步种子刷流标签记录目前仅支持qbittorrent")
|
||||
return
|
||||
|
||||
@@ -2501,6 +2526,25 @@ class BrushFlow(_PluginBase):
|
||||
return True, f"H&R种子,分享率 {torrent_info.get('ratio'):.2f},大于 {brush_config.seed_ratio}"
|
||||
return False, "H&R种子,未能满足设置的H&R删除条件"
|
||||
|
||||
while brush_config.del_no_free and torrent_info.get("downloaded") < torrent_info.get("total_size"):
|
||||
if not torrent_task.get("freedate", None):
|
||||
logger.warning(f"配置了‘删除促销过期的未完成下载’,但未获取到该种子的促销截止时间,跳过。")
|
||||
break
|
||||
try:
|
||||
now = datetime.now()
|
||||
freedate_origin = torrent_task.get("freedate")
|
||||
freedate = freedate_origin.replace("T", " ").replace("Z", "")
|
||||
freedate = datetime.strptime(freedate, "%Y-%m-%d %H:%M:%S")
|
||||
delta_minutes = (((freedate - now).total_seconds() + 60) // 60) - brush_config.timezone_offset
|
||||
logger.debug(
|
||||
f"促销截止(站点时间): {freedate_origin}, 时区偏移: {brush_config.timezone_offset}, 用户当前时间: {now.strftime('%Y-%m-%d %H:%M:%S')}, 时间差: {delta_minutes}分")
|
||||
if delta_minutes <= 0:
|
||||
return True, "促销过期"
|
||||
except Exception as e:
|
||||
logger.warning(f"处理‘删除促销过期的未完成下载’时报错,继续判断其他删除条件。")
|
||||
logger.debug(f"error: {e}")
|
||||
break
|
||||
|
||||
# 处理其他场景,1. 不是H&R种子;2. 是H&R种子但没有特定条件配置
|
||||
reason = reason if not hit_and_run else "H&R种子(未设置H&R条件),未能满足设置的删除条件"
|
||||
if brush_config.seed_time and torrent_info.get("seeding_time") >= float(brush_config.seed_time) * 3600:
|
||||
@@ -2523,18 +2567,39 @@ class BrushFlow(_PluginBase):
|
||||
|
||||
return True, reason if not hit_and_run else "H&R种子(未设置H&R条件)," + reason
|
||||
|
||||
def __evaluate_proxy_pre_conditions_for_delete(self, site_name: str, torrent_info: dict) -> Tuple[bool, str]:
|
||||
def __evaluate_proxy_pre_conditions_for_delete(self, site_name: str,
|
||||
torrent_info: dict, torrent_task: dict) -> Tuple[bool, str]:
|
||||
"""
|
||||
评估动态删除前置条件并返回是否应删除种子及其原因
|
||||
"""
|
||||
brush_config = self.__get_brush_config(sitename=site_name)
|
||||
|
||||
should_delete = False
|
||||
reason = "未能满足动态删除设置的前置删除条件"
|
||||
|
||||
while brush_config.del_no_free and torrent_info.get("downloaded") < torrent_info.get("total_size"):
|
||||
if not torrent_task.get("freedate", None):
|
||||
logger.warning(f"配置了‘删除促销过期的未完成下载’,但未获取到该种子的促销截止时间,跳过。")
|
||||
break
|
||||
try:
|
||||
now = datetime.now()
|
||||
freedate_origin = torrent_task.get("freedate")
|
||||
freedate = freedate_origin.replace("T", " ").replace("Z", "")
|
||||
freedate = datetime.strptime(freedate, "%Y-%m-%d %H:%M:%S")
|
||||
delta_minutes = (((freedate - now).total_seconds() + 60) // 60) - brush_config.timezone_offset
|
||||
logger.debug(
|
||||
f"促销截止(站点时间): {freedate_origin}, 时区偏移: {brush_config.timezone_offset}, 用户当前时间: {now.strftime('%Y-%m-%d %H:%M:%S')}, 时间差: {delta_minutes}分")
|
||||
if delta_minutes <= 0:
|
||||
return True, f"促销已过期"
|
||||
except Exception as e:
|
||||
logger.warning(f"处理‘删除促销过期的未完成下载’时报错,继续判断其他删除条件。")
|
||||
logger.debug(f"error: {e}")
|
||||
break
|
||||
|
||||
if brush_config.download_time and torrent_info.get("downloaded") < torrent_info.get(
|
||||
"total_size") and torrent_info.get("dltime") >= float(brush_config.download_time) * 3600:
|
||||
reason = f"下载耗时 {torrent_info.get('dltime') / 3600:.1f} 小时,大于 {brush_config.download_time} 小时"
|
||||
else:
|
||||
elif not should_delete:
|
||||
return False, reason
|
||||
|
||||
return True, reason
|
||||
@@ -2599,7 +2664,8 @@ class BrushFlow(_PluginBase):
|
||||
|
||||
# 删除种子的具体实现可能会根据实际情况略有不同
|
||||
should_delete, reason = self.__evaluate_proxy_pre_conditions_for_delete(site_name=site_name,
|
||||
torrent_info=torrent_info)
|
||||
torrent_info=torrent_info,
|
||||
torrent_task=torrent_task)
|
||||
if should_delete:
|
||||
delete_hashes.append(torrent_hash)
|
||||
self.__send_delete_message(site_name=site_name, torrent_title=torrent_title, torrent_desc=torrent_desc,
|
||||
@@ -2957,6 +3023,7 @@ class BrushFlow(_PluginBase):
|
||||
"exclude": brush_config.exclude,
|
||||
"size": brush_config.size,
|
||||
"seeder": brush_config.seeder,
|
||||
"timezone_offset": brush_config.timezone_offset,
|
||||
"pubtime": brush_config.pubtime,
|
||||
"seed_time": brush_config.seed_time,
|
||||
"hr_seed_time": brush_config.hr_seed_time,
|
||||
@@ -2980,6 +3047,7 @@ class BrushFlow(_PluginBase):
|
||||
"qb_category": brush_config.qb_category,
|
||||
"enable_site_config": brush_config.enable_site_config,
|
||||
"site_config": brush_config.site_config,
|
||||
"del_no_free": brush_config.del_no_free,
|
||||
"_tabs": self._tabs
|
||||
}
|
||||
|
||||
@@ -3041,7 +3109,8 @@ class BrushFlow(_PluginBase):
|
||||
return data
|
||||
return None
|
||||
|
||||
def __reset_download_url(self, torrent_url, site_id) -> str:
|
||||
@staticmethod
|
||||
def __reset_download_url(torrent_url, site_id) -> str:
|
||||
"""
|
||||
处理下载地址
|
||||
"""
|
||||
@@ -3050,7 +3119,7 @@ class BrushFlow(_PluginBase):
|
||||
if not torrent_url or torrent_url.startswith("magnet"):
|
||||
return torrent_url
|
||||
|
||||
indexers = self.sites_helper.get_indexers()
|
||||
indexers = SitesHelper().get_indexers()
|
||||
if not indexers:
|
||||
return torrent_url
|
||||
|
||||
@@ -3115,7 +3184,8 @@ class BrushFlow(_PluginBase):
|
||||
if not downloader:
|
||||
return None
|
||||
|
||||
if self.downloader_helper.is_downloader("qbittorrent", service=self.service_info):
|
||||
downloader_helper = DownloaderHelper()
|
||||
if downloader_helper.is_downloader("qbittorrent", service=self.service_info):
|
||||
# 限速值转为bytes
|
||||
up_speed = up_speed * 1024 if up_speed else None
|
||||
down_speed = down_speed * 1024 if down_speed else None
|
||||
@@ -3149,7 +3219,7 @@ class BrushFlow(_PluginBase):
|
||||
return torrent_hash
|
||||
return None
|
||||
|
||||
elif self.downloader_helper.is_downloader("transmission", service=self.service_info):
|
||||
elif downloader_helper.is_downloader("transmission", service=self.service_info):
|
||||
# 如果开启代理下载以及种子地址不是磁力地址,则请求种子到内存再传入下载器
|
||||
if not torrent_content.startswith("magnet"):
|
||||
response = RequestUtils(cookies=cookies,
|
||||
@@ -3197,7 +3267,7 @@ class BrushFlow(_PluginBase):
|
||||
获取种子hash
|
||||
"""
|
||||
try:
|
||||
return torrent.get("hash") if self.downloader_helper.is_downloader("qbittorrent", service=self.service_info) \
|
||||
return torrent.get("hash") if DownloaderHelper().is_downloader("qbittorrent", service=self.service_info) \
|
||||
else torrent.hashString
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
@@ -3214,8 +3284,8 @@ class BrushFlow(_PluginBase):
|
||||
all_hashes = []
|
||||
for torrent in torrents:
|
||||
# 根据下载器类型获取Hash值
|
||||
hash_value = torrent.get("hash") if self.downloader_helper.is_downloader("qbittorrent",
|
||||
service=self.service_info) \
|
||||
hash_value = torrent.get("hash") if DownloaderHelper().is_downloader("qbittorrent",
|
||||
service=self.service_info) \
|
||||
else torrent.hashString
|
||||
if hash_value:
|
||||
all_hashes.append(hash_value)
|
||||
@@ -3230,8 +3300,8 @@ class BrushFlow(_PluginBase):
|
||||
"""
|
||||
try:
|
||||
return [str(tag).strip() for tag in torrent.get("tags").split(',')] \
|
||||
if self.downloader_helper.is_downloader("qbittorrent",
|
||||
service=self.service_info) else torrent.labels or []
|
||||
if DownloaderHelper().is_downloader("qbittorrent",
|
||||
service=self.service_info) else torrent.labels or []
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
return []
|
||||
@@ -3242,7 +3312,7 @@ class BrushFlow(_PluginBase):
|
||||
"""
|
||||
date_now = int(time.time())
|
||||
# QB
|
||||
if self.downloader_helper.is_downloader("qbittorrent", service=self.service_info):
|
||||
if DownloaderHelper().is_downloader("qbittorrent", service=self.service_info):
|
||||
"""
|
||||
{
|
||||
"added_on": 1693359031,
|
||||
@@ -3649,7 +3719,7 @@ class BrushFlow(_PluginBase):
|
||||
if not self._subscribe_infos:
|
||||
self._subscribe_infos = {}
|
||||
|
||||
subscribes = self.subscribe_oper.list()
|
||||
subscribes = SubscribeOper().list()
|
||||
if subscribes:
|
||||
# 遍历订阅
|
||||
for subscribe in subscribes:
|
||||
@@ -3862,7 +3932,8 @@ class BrushFlow(_PluginBase):
|
||||
# 情况2: 时间段跨越午夜
|
||||
return now >= start_time or now <= end_time
|
||||
|
||||
def __get_site_by_torrent(self, torrent: Any) -> Tuple[int, str]:
|
||||
@staticmethod
|
||||
def __get_site_by_torrent(torrent: Any) -> Tuple[int, str]:
|
||||
"""
|
||||
根据tracker获取站点信息
|
||||
"""
|
||||
@@ -3905,7 +3976,7 @@ class BrushFlow(_PluginBase):
|
||||
# 使用StringUtils工具类获取tracker的域名
|
||||
domain = StringUtils.get_url_domain(tracker)
|
||||
|
||||
site_info = self.sites_helper.get_indexer(domain)
|
||||
site_info = SitesHelper().get_indexer(domain)
|
||||
if site_info:
|
||||
return site_info.get("id"), site_info.get("name")
|
||||
|
||||
|
||||
@@ -337,7 +337,8 @@ class ChatGPT(_PluginBase):
|
||||
def get_page(self) -> List[dict]:
|
||||
pass
|
||||
|
||||
def is_api_error(self, response):
|
||||
@staticmethod
|
||||
def is_api_error(response):
|
||||
"""
|
||||
判断响应是否表示API错误
|
||||
:param response: API响应
|
||||
@@ -486,4 +487,4 @@ class ChatGPT(_PluginBase):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
pass
|
||||
|
||||
624
plugins.v2/clashruleprovider/__init__.py
Normal file
624
plugins.v2/clashruleprovider/__init__.py
Normal file
@@ -0,0 +1,624 @@
|
||||
import hashlib
|
||||
import re
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Optional, List, Dict, Tuple, Union
|
||||
|
||||
import pytz
|
||||
import yaml
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from fastapi import Body, Response
|
||||
|
||||
from app.core.config import settings
|
||||
from app.core.event import eventmanager
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.plugins.clashruleprovider.clash_rule_parser import Action, RuleType, ClashRule, MatchRule, LogicRule
|
||||
from app.plugins.clashruleprovider.clash_rule_parser import ClashRuleParser
|
||||
from app.schemas.types import EventType
|
||||
from app.utils.http import RequestUtils
|
||||
|
||||
|
||||
class ClashRuleProvider(_PluginBase):
|
||||
# 插件名称
|
||||
plugin_name = "Clash Rule Provider"
|
||||
# 插件描述
|
||||
plugin_desc = "随时为Clash添加一些额外的规则。"
|
||||
# 插件图标
|
||||
plugin_icon = ("https://raw.githubusercontent.com/wumode/MoviePilot-Plugins/"
|
||||
"refs/heads/imdbsource_assets/icons/Mihomo_Meta_A.png")
|
||||
# 插件版本
|
||||
plugin_version = "0.1.0"
|
||||
# 插件作者
|
||||
plugin_author = "wumode"
|
||||
# 作者主页
|
||||
author_url = "https://github.com/wumode"
|
||||
# 插件配置项ID前缀
|
||||
plugin_config_prefix = "clashruleprovider_"
|
||||
# 加载顺序
|
||||
plugin_order = 99
|
||||
# 可使用的用户级别
|
||||
auth_level = 1
|
||||
|
||||
# 插件配置
|
||||
# 启用插件
|
||||
_enabled = False
|
||||
_proxy = False
|
||||
_notify = False
|
||||
# 订阅链接
|
||||
_sub_links = []
|
||||
# Clash 面板 URL
|
||||
_clash_dashboard_url = None
|
||||
# Clash 面板密钥
|
||||
_clash_dashboard_secret = None
|
||||
# MoviePilot URL
|
||||
_movie_pilot_url = None
|
||||
_cron = ''
|
||||
_timeout = 10
|
||||
_retry_times = 3
|
||||
_filter_keywords = []
|
||||
_auto_update_subscriptions = True
|
||||
_ruleset_prefix = '📂<-'
|
||||
|
||||
# 插件数据
|
||||
_clash_config = None
|
||||
_top_rules: List[str] = []
|
||||
_ruleset_rules: List[str] = []
|
||||
_rule_provider: Dict[str, Any] = {}
|
||||
_subscription_info = {}
|
||||
_ruleset_names: Dict[str, str] = {}
|
||||
|
||||
# protected variables
|
||||
_clash_rule_parser = None
|
||||
_ruleset_rule_parser = None
|
||||
_custom_rule_sets = None
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self._clash_config = self.get_data("clash_config")
|
||||
self._ruleset_rules = self.get_data("ruleset_rules")
|
||||
self._top_rules = self.get_data("top_rules")
|
||||
self._subscription_info = self.get_data("subscription_info") or \
|
||||
{"download": 0, "upload": 0, "total": 0, "expire": 0, "last_update": 0}
|
||||
self._rule_provider = self.get_data("rule_provider") or {}
|
||||
self._ruleset_names = self.get_data("ruleset_names") or {}
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._proxy = config.get("proxy")
|
||||
self._notify = config.get("notify"),
|
||||
self._sub_links = config.get("sub_links")
|
||||
self._clash_dashboard_url = config.get("clash_dashboard_url")
|
||||
self._clash_dashboard_secret = config.get("clash_dashboard_secret")
|
||||
self._movie_pilot_url = config.get("movie_pilot_url")
|
||||
if self._movie_pilot_url[-1] == '/':
|
||||
self._movie_pilot_url = self._movie_pilot_url[:-1]
|
||||
self._cron = config.get("cron_string")
|
||||
self._timeout = config.get("timeout")
|
||||
self._retry_times = config.get("retry_times")
|
||||
self._filter_keywords = config.get("filter_keywords")
|
||||
self._ruleset_prefix = config.get("ruleset_prefix", "Custom_")
|
||||
self._auto_update_subscriptions = config.get("auto_update_subscriptions")
|
||||
self._clash_rule_parser = ClashRuleParser()
|
||||
self._ruleset_rule_parser = ClashRuleParser()
|
||||
if self._enabled:
|
||||
self.__parse_config()
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
self._scheduler.start()
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@staticmethod
|
||||
def get_command() -> List[Dict[str, Any]]:
|
||||
pass
|
||||
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
return [
|
||||
{
|
||||
"path": "/connectivity",
|
||||
"endpoint": self.test_connectivity,
|
||||
"methods": ["POST"],
|
||||
"auth": "bear",
|
||||
"summary": "测试连接",
|
||||
"description": "测试连接"
|
||||
},
|
||||
{
|
||||
"path": "/clash_outbound",
|
||||
"endpoint": self.get_clash_outbound,
|
||||
"methods": ["GET"],
|
||||
"auth": "bear",
|
||||
"summary": "clash outbound",
|
||||
"description": "clash outbound"
|
||||
},
|
||||
{
|
||||
"path": "/status",
|
||||
"endpoint": self.get_status,
|
||||
"methods": ["GET"],
|
||||
"auth": "bear",
|
||||
"summary": "stated",
|
||||
"description": "state"
|
||||
},
|
||||
{
|
||||
"path": "/rules",
|
||||
"endpoint": self.get_rules,
|
||||
"methods": ["GET"],
|
||||
"auth": "bear",
|
||||
"summary": "clash rules",
|
||||
"description": "clash rules"
|
||||
},
|
||||
{
|
||||
"path": "/rules",
|
||||
"endpoint": self.update_rules,
|
||||
"methods": ["PUT"],
|
||||
"auth": "bear",
|
||||
"summary": "clash rules",
|
||||
"description": "clash rules"
|
||||
},
|
||||
{
|
||||
"path": "/reorder-rules",
|
||||
"endpoint": self.reorder_rules,
|
||||
"methods": ["PUT"],
|
||||
"auth": "bear",
|
||||
"summary": "clash rules",
|
||||
"description": "clash rules"
|
||||
},
|
||||
{
|
||||
"path": "/rule",
|
||||
"endpoint": self.update_rule,
|
||||
"methods": ["PUT"],
|
||||
"auth": "bear",
|
||||
"summary": "clash rules",
|
||||
"description": "clash rules"
|
||||
},
|
||||
{
|
||||
"path": "/rule",
|
||||
"endpoint": self.add_rule,
|
||||
"methods": ["POSt"],
|
||||
"auth": "bear",
|
||||
"summary": "clash rules",
|
||||
"description": "clash rules"
|
||||
},
|
||||
{
|
||||
"path": "/rule",
|
||||
"endpoint": self.delete_rule,
|
||||
"methods": ["DELETE"],
|
||||
"auth": "bear",
|
||||
"summary": "clash rules",
|
||||
"description": "clash rules"
|
||||
},
|
||||
{
|
||||
"path": "/subscription",
|
||||
"endpoint": self.get_subscription,
|
||||
"methods": ["GET"],
|
||||
"auth": "bear",
|
||||
"summary": "clash rules",
|
||||
"description": "clash rules"
|
||||
},
|
||||
{
|
||||
"path": "/subscription",
|
||||
"endpoint": self.update_subscription,
|
||||
"methods": ["PUT"],
|
||||
"auth": "bear",
|
||||
"summary": "update clash rules",
|
||||
"description": "update clash rules"
|
||||
},
|
||||
{
|
||||
"path": "/rule_providers",
|
||||
"endpoint": self.get_rule_providers,
|
||||
"methods": ["GET"],
|
||||
"auth": "bear",
|
||||
"summary": "update rule providers",
|
||||
"description": "update rule providers"
|
||||
},
|
||||
{
|
||||
"path": "/ruleset",
|
||||
"endpoint": self.get_ruleset,
|
||||
"methods": ["GET"],
|
||||
"summary": "update rule providers",
|
||||
"description": "update rule providers"
|
||||
},
|
||||
{
|
||||
"path": "/config",
|
||||
"endpoint": self.get_clash_config,
|
||||
"methods": ["GET"],
|
||||
"summary": "update rule providers",
|
||||
"description": "update rule providers"
|
||||
}
|
||||
]
|
||||
|
||||
def get_render_mode(self) -> Tuple[str, str]:
|
||||
"""
|
||||
获取插件渲染模式
|
||||
:return: 1、渲染模式,支持:vue/vuetify,默认vuetify
|
||||
:return: 2、组件路径,默认 dist/assets
|
||||
"""
|
||||
return "vue", "dist/assets"
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
"""
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
return [], {}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
return []
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
"""
|
||||
pass
|
||||
|
||||
def get_service(self) -> List[Dict[str, Any]]:
|
||||
if self.get_state() and self._auto_update_subscriptions:
|
||||
return [{
|
||||
"id": "ClashRuleProvider",
|
||||
"name": "Clash Rule Provider 服务",
|
||||
"trigger": CronTrigger.from_crontab(self._cron),
|
||||
"func": self.update_subscription_service,
|
||||
"kwargs": {}
|
||||
}]
|
||||
return []
|
||||
|
||||
def __update_config(self):
|
||||
# 保存配置
|
||||
self.update_config(
|
||||
{
|
||||
"enabled": self._enabled,
|
||||
"cron": self._cron,
|
||||
"proxy": self._proxy,
|
||||
"notify": self._notify,
|
||||
"sub_links": self._sub_links,
|
||||
"clash_dashboard_url": self._clash_dashboard_url,
|
||||
"clash_dashboard_secret": self._clash_dashboard_secret,
|
||||
"movie_pilot_url": self._movie_pilot_url,
|
||||
"retry_times": self._retry_times,
|
||||
"timeout": self._timeout,
|
||||
})
|
||||
|
||||
def __save_data(self):
|
||||
self.__insert_ruleset()
|
||||
self._top_rules = self._clash_rule_parser.to_string()
|
||||
self._ruleset_rules = self._ruleset_rule_parser.to_string()
|
||||
self.save_data('clash_config', self._clash_config)
|
||||
self.save_data('ruleset_rules', self._ruleset_rules)
|
||||
self.save_data('top_rules', self._top_rules)
|
||||
self.save_data('subscription_info', self._subscription_info)
|
||||
self.save_data('ruleset_names', self._ruleset_names)
|
||||
self.save_data('rule_provider', self._rule_provider)
|
||||
|
||||
def __parse_config(self):
|
||||
if not self._top_rules:
|
||||
return
|
||||
self._clash_rule_parser.parse_rules_from_list(self._top_rules)
|
||||
if not self._ruleset_rules:
|
||||
return
|
||||
self._ruleset_rule_parser.parse_rules_from_list(self._ruleset_rules)
|
||||
|
||||
def test_connectivity(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
if not self._enabled:
|
||||
return {"success": False, "message": ""}
|
||||
if not params.get('clash_dashboard_url') or not params.get('clash_dashboard_secret') \
|
||||
or not params.get('sub_link'):
|
||||
return {"success": False, "message": "missing params"}
|
||||
clash_version_url = f"{params.get('clash_dashboard_url')}/version"
|
||||
ret = RequestUtils(accept_type="application/json",
|
||||
headers={"authorization": f"Bearer {params.get('clash_dashboard_secret')}"}
|
||||
).get(clash_version_url)
|
||||
if not ret:
|
||||
return {"success": False, "message": "无法连接到Clash"}
|
||||
ret = RequestUtils(accept_type="text/html",
|
||||
proxies=settings.PROXY if self._proxy else None
|
||||
).get(params.get('sub_link'))
|
||||
if not ret:
|
||||
return {"success": False, "message": f"Unable to get {params.get('sub_link')}"}
|
||||
return {"success": True, "message": "测试连接成功"}
|
||||
|
||||
def get_ruleset(self, name):
|
||||
if not self._ruleset_names.get(name):
|
||||
return None
|
||||
name = self._ruleset_names.get(name)
|
||||
rules = self.__get_ruleset(name)
|
||||
# if rules or ruleset in self._rule_provider:
|
||||
# self._rule_provider[ruleset] = rules
|
||||
res = yaml.dump({"payload": rules}, allow_unicode=True)
|
||||
return Response(content=res, media_type="text/yaml")
|
||||
|
||||
def get_clash_outbound(self):
|
||||
outbound = self.clash_outbound(self._clash_config)
|
||||
return {"success": True, "message": None, "data": {"outbound": outbound}}
|
||||
|
||||
def get_status(self):
|
||||
rule_size = len(self._clash_config.get("rules", [])) if self._clash_config else 0
|
||||
return {"success": True, "message": "",
|
||||
"data": {"state": self._enabled,
|
||||
"ruleset_prefix": self._ruleset_prefix,
|
||||
"clash": {"rule_size": rule_size},
|
||||
"subscription_info": self._subscription_info,
|
||||
"sub_url": f"{self._movie_pilot_url}/api/v1/plugin/ClashRuleProvider/config?"
|
||||
f"apikey={settings.API_TOKEN}"}}
|
||||
|
||||
def get_clash_config(self):
|
||||
config = self.clash_config()
|
||||
if not config:
|
||||
return {"success": False, "message": ""}
|
||||
res = yaml.dump(config, allow_unicode=True)
|
||||
headers = {'Subscription-Userinfo': f'upload={self._subscription_info["upload"]}; '
|
||||
f'download={self._subscription_info["download"]}; '
|
||||
f'total={self._subscription_info["total"]}; '
|
||||
f'expire={self._subscription_info["expire"]}'}
|
||||
return Response(headers=headers, content=res, media_type="text/yaml")
|
||||
|
||||
def get_rules(self, rule_type: str) -> Dict[str, Any]:
|
||||
if rule_type == 'ruleset':
|
||||
return {"success": True, "message": None, "data": {"rules": self._ruleset_rule_parser.to_dict()}}
|
||||
return {"success": True, "message": None, "data": {"rules": self._clash_rule_parser.to_dict()}}
|
||||
|
||||
def delete_rule(self, params: dict = Body(...)):
|
||||
if not self._enabled:
|
||||
return {"success": False, "message": ""}
|
||||
if params.get('type') == 'ruleset':
|
||||
res = self.delete_rule_by_priority(params.get('priority'), self._ruleset_rule_parser)
|
||||
if res:
|
||||
self.__add_notification_job(
|
||||
f"{self._ruleset_prefix}{res.action.value if isinstance(res.action, Action) else res.action}")
|
||||
else:
|
||||
res = self.delete_rule_by_priority(params.get('priority'), self._clash_rule_parser)
|
||||
return {"success": res, "message": None}
|
||||
|
||||
def reorder_rules(self, params: Dict[str, Any]):
|
||||
if not self._enabled:
|
||||
return {"success": False, "message": ""}
|
||||
moved_priority = params.get('moved_priority')
|
||||
target_priority = params.get('target_priority')
|
||||
try:
|
||||
if params.get('type') == 'ruleset':
|
||||
self.__reorder_rules(self._ruleset_rule_parser, moved_priority, target_priority)
|
||||
self.__add_notification_job(f"{self._ruleset_prefix}{params.get('rule_data').get('action')}")
|
||||
else:
|
||||
self.__reorder_rules(self._clash_rule_parser, moved_priority, target_priority)
|
||||
except Exception as e:
|
||||
return {"success": False, "message": str(e)}
|
||||
return {"success": True, "message": None}
|
||||
|
||||
def update_rules(self, params: Dict[str, Any]):
|
||||
if not self._enabled:
|
||||
return {"success": False, "message": ""}
|
||||
if params.get('type') == 'ruleset':
|
||||
self.__update_rules(params.get('rules'), self._ruleset_rule_parser)
|
||||
else:
|
||||
self.__update_rules(params.get('rules'), self._clash_rule_parser)
|
||||
return {"success": True, "message": None}
|
||||
|
||||
def update_rule(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
if not self._enabled:
|
||||
return {"success": False, "message": ""}
|
||||
if params.get('type') == 'ruleset':
|
||||
res = self.update_rule_by_priority(params.get('rule_data'), self._ruleset_rule_parser)
|
||||
if res:
|
||||
self.__add_notification_job(f"{self._ruleset_prefix}{params.get('rule_data').get('action')}")
|
||||
else:
|
||||
res = self.update_rule_by_priority(params.get('rule_data'), self._clash_rule_parser)
|
||||
return {"success": bool(res), "message": None}
|
||||
|
||||
def add_rule(self, params: Dict[str, Any]) -> Dict[str, Any]:
|
||||
if not self._enabled:
|
||||
return {"success": False, "message": ""}
|
||||
if params.get('type') == 'ruleset':
|
||||
res = self.add_rule_by_priority(params.get('rule_data'), self._ruleset_rule_parser)
|
||||
if res:
|
||||
self.__add_notification_job(f"{self._ruleset_prefix}{params.get('rule_data').get('action')}")
|
||||
else:
|
||||
res = self.add_rule_by_priority(params.get('rule_data'), self._clash_rule_parser)
|
||||
return {"success": bool(res), "message": None}
|
||||
|
||||
def get_subscription(self):
|
||||
if not self._sub_links:
|
||||
return None
|
||||
return {"success": True, "message": None, "data": {"url": self._sub_links[0]}}
|
||||
|
||||
def update_subscription(self, params: Dict[str, Any]):
|
||||
if not self._enabled:
|
||||
return {"success": False, "message": ""}
|
||||
url = params.get('url')
|
||||
if not url:
|
||||
return {"success": False, "message": "missing params"}
|
||||
res = self.update_subscription_service()
|
||||
if not res:
|
||||
return {"success": True, "message": f"订阅链接 {self._sub_links[0]} 更新失败"}
|
||||
return {"success": True, "message": "订阅更新成功"}
|
||||
|
||||
def get_rule_providers(self):
|
||||
return {"success": True, "message": None, "data": self.rule_providers()}
|
||||
|
||||
@staticmethod
|
||||
def clash_outbound(clash_config: Dict[str, Any]) -> Optional[List]:
|
||||
if not clash_config:
|
||||
return []
|
||||
outbound = [{'name': proxy_group.get("name")} for proxy_group in clash_config.get("proxy-groups")]
|
||||
outbound.extend([{'name': proxy.get("name")} for proxy in clash_config.get("proxies")])
|
||||
return outbound
|
||||
|
||||
def rule_providers(self) -> Optional[Dict[str, Any]]:
|
||||
if not self._clash_config:
|
||||
return None
|
||||
rule_providers = {}
|
||||
for key, value in self._clash_config.get("rule-providers", {}):
|
||||
if value.get("path", '').startwith("./CRP/"):
|
||||
continue
|
||||
rule_providers[key] = value
|
||||
return rule_providers
|
||||
|
||||
def __update_rules(self, rules: List[Dict[str, Any]], rule_parser: ClashRuleParser):
|
||||
rule_parser.rules = []
|
||||
for rule in rules:
|
||||
clash_rule = ClashRuleParser.parse_rule_dict(rule)
|
||||
rule_parser.insert_rule_at_priority(clash_rule, rule.get("priority"))
|
||||
self.__save_data()
|
||||
|
||||
def __reorder_rules(self, rule_parser: ClashRuleParser, moved_priority, target_priority):
|
||||
rule_parser.reorder_rules(moved_priority, target_priority)
|
||||
self.__save_data()
|
||||
|
||||
def __get_ruleset(self, ruleset: str) -> List[str]:
|
||||
if ruleset.startswith(self._ruleset_prefix):
|
||||
action = ruleset[len(self._ruleset_prefix):]
|
||||
else:
|
||||
return []
|
||||
try:
|
||||
action_enum = Action(action.upper())
|
||||
final_action = action_enum
|
||||
except ValueError:
|
||||
final_action = action
|
||||
rules = self._ruleset_rule_parser.filter_rules_by_action(final_action)
|
||||
res = []
|
||||
for rule in rules:
|
||||
res.append(rule.condition_string())
|
||||
return res
|
||||
|
||||
def __insert_ruleset(self):
|
||||
outbounds = []
|
||||
for rule in self._ruleset_rule_parser.rules:
|
||||
action_str = f"{rule.action.value}" if isinstance(rule.action, Action) else rule.action
|
||||
if action_str not in outbounds:
|
||||
outbounds.append(action_str)
|
||||
self._clash_rule_parser.remove_rules(lambda r: r.rule_type == RuleType.RULE_SET and
|
||||
r.payload.startswith(self._ruleset_prefix))
|
||||
for outbound in outbounds:
|
||||
clash_rule = ClashRuleParser.parse_rule_line(f"RULE-SET,{self._ruleset_prefix}{outbound},{outbound}")
|
||||
if not self._clash_rule_parser.has_rule(clash_rule):
|
||||
self._clash_rule_parser.insert_rule_at_priority(clash_rule, 0)
|
||||
|
||||
def update_rule_by_priority(self, rule: Dict[str, Any], rule_parser: ClashRuleParser) -> bool:
|
||||
if not isinstance(rule.get("priority"), int):
|
||||
return False
|
||||
clash_rule = ClashRuleParser.parse_rule_dict(rule)
|
||||
if not clash_rule:
|
||||
return False
|
||||
res = rule_parser.update_rule_at_priority(clash_rule, rule.get("priority"))
|
||||
self.__save_data()
|
||||
return res
|
||||
|
||||
def add_rule_by_priority(self, rule: Dict[str, Any], rule_parser: ClashRuleParser) -> bool:
|
||||
if not isinstance(rule.get("priority"), int):
|
||||
return False
|
||||
try:
|
||||
clash_rule = self._clash_rule_parser.parse_rule_dict(rule)
|
||||
except ValueError:
|
||||
logger.warn(f"无效的输入规则: {rule}")
|
||||
return False
|
||||
if not clash_rule:
|
||||
return False
|
||||
rule_parser.insert_rule_at_priority(clash_rule, rule.get("priority"))
|
||||
self.__save_data()
|
||||
return True
|
||||
|
||||
def delete_rule_by_priority(self, priority: int, rule_parser: ClashRuleParser
|
||||
) -> Optional[Union[ClashRule, LogicRule, MatchRule]]:
|
||||
if not isinstance(priority, int):
|
||||
return None
|
||||
res = rule_parser.remove_rule_at_priority(priority)
|
||||
self.__save_data()
|
||||
return res
|
||||
|
||||
@eventmanager.register(EventType.PluginAction)
|
||||
def update_subscription_service(self) -> bool:
|
||||
if not self._sub_links:
|
||||
return False
|
||||
url = self._sub_links[0]
|
||||
ret = RequestUtils(accept_type="text/html",
|
||||
proxies=settings.PROXY if self._proxy else None
|
||||
).get_res(url)
|
||||
if not ret:
|
||||
return False
|
||||
try:
|
||||
rs = yaml.load(ret.content, Loader=yaml.FullLoader)
|
||||
self._clash_config = self.__remove_nodes_by_keywords(rs)
|
||||
except Exception as e:
|
||||
logger.error(f"解析配置出错: {e}")
|
||||
return False
|
||||
if 'Subscription-Userinfo' in ret.headers:
|
||||
matches = re.findall(r'(\w+)=(\d+)', ret.headers['Subscription-Userinfo'])
|
||||
variables = {key: int(value) for key, value in matches}
|
||||
self._subscription_info['download'] = variables['download']
|
||||
self._subscription_info['upload'] = variables['upload']
|
||||
self._subscription_info['total'] = variables['total']
|
||||
self._subscription_info['expire'] = variables['expire']
|
||||
self._subscription_info["last_update"] = int(time.time())
|
||||
self.save_data('subscription_info', self._subscription_info)
|
||||
self.save_data('clash_config', self._clash_config)
|
||||
return True
|
||||
|
||||
def notify_clash(self, ruleset: str):
|
||||
url = f'{self._clash_dashboard_url}/providers/rules/{ruleset}'
|
||||
RequestUtils(content_type="application/json",
|
||||
headers={"authorization": f"Bearer {self._clash_dashboard_secret}"}
|
||||
).put(url)
|
||||
|
||||
def __add_notification_job(self, ruleset: str):
|
||||
if ruleset in self._rule_provider:
|
||||
self._scheduler.add_job(self.notify_clash, "date",
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=30),
|
||||
args=[ruleset],
|
||||
id='CRP-notify-clash',
|
||||
replace_existing=True
|
||||
)
|
||||
|
||||
def __remove_nodes_by_keywords(self, clash_config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
removed_proxies = []
|
||||
proxies = []
|
||||
for proxy in clash_config.get("proxies", []):
|
||||
has_keywords = bool(len([x for x in self._filter_keywords if x in proxy.get("name", '')]))
|
||||
if has_keywords:
|
||||
removed_proxies.append(proxy.get("name"))
|
||||
else:
|
||||
proxies.append(proxy)
|
||||
if proxies:
|
||||
clash_config["proxies"] = proxies
|
||||
else:
|
||||
logger.warn(f"关键词过滤后无可用节点,跳过过滤")
|
||||
removed_proxies = []
|
||||
for proxy_group in clash_config.get("proxy-groups", []):
|
||||
proxy_group['proxies'] = [x for x in proxy_group.get('proxies') if x not in removed_proxies]
|
||||
clash_config["proxy-groups"] = [x for x in clash_config.get("proxy-groups", []) if x.get("proxies")]
|
||||
return clash_config
|
||||
|
||||
def clash_config(self) -> Optional[Dict[str, Any]]:
|
||||
if not self._clash_config:
|
||||
return
|
||||
self.__insert_ruleset()
|
||||
self._top_rules = self._clash_rule_parser.to_string()
|
||||
clash_config = self._clash_config.copy()
|
||||
top_rules = []
|
||||
for rule in self._clash_rule_parser.rules:
|
||||
if (not isinstance(rule.action, Action) and
|
||||
not len([x for x in self.clash_outbound(clash_config) if rule.action == x.get("name", '')])):
|
||||
logger.warn(f"出站 {rule.action} 不存在, 绕过 {rule.raw_rule}")
|
||||
continue
|
||||
top_rules.append(rule.raw_rule)
|
||||
clash_config["rules"] = self._top_rules + clash_config.get("rules", [])
|
||||
self._rule_provider = {}
|
||||
for r in self._clash_rule_parser.rules:
|
||||
if r.rule_type == RuleType.RULE_SET and r.payload.startswith(self._ruleset_prefix):
|
||||
action_str = f"{r.action.value}" if isinstance(r.action, Action) else r.action
|
||||
path_name = hashlib.sha256(action_str.encode('utf-8')).hexdigest()[:10]
|
||||
self._ruleset_names[path_name] = r.payload
|
||||
sub_url = (f"{self._movie_pilot_url}/api/v1/plugin/ClashRuleProvider/ruleset?"
|
||||
f"name={path_name}&apikey={settings.API_TOKEN}")
|
||||
self._rule_provider[r.payload] = {"behavior": "classical",
|
||||
"format": "yaml",
|
||||
"interval": 3600,
|
||||
"path": f"./CRP/{path_name}.yaml",
|
||||
"type": "http",
|
||||
"url": sub_url}
|
||||
if clash_config.get("rule-providers"):
|
||||
clash_config['rule-providers'].update(self._rule_provider)
|
||||
else:
|
||||
clash_config['rule-providers'] = self._rule_provider
|
||||
for key, item in self._ruleset_names.items():
|
||||
if item not in clash_config['rule-providers']:
|
||||
del self._ruleset_names[key]
|
||||
self.save_data('ruleset_names', self._ruleset_names)
|
||||
self.save_data('rule_provider', self._rule_provider)
|
||||
return clash_config
|
||||
488
plugins.v2/clashruleprovider/clash_rule_parser.py
Normal file
488
plugins.v2/clashruleprovider/clash_rule_parser.py
Normal file
@@ -0,0 +1,488 @@
|
||||
import re
|
||||
from typing import List, Dict, Any, Optional, Union, Callable
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class RuleType(Enum):
|
||||
"""Enumeration of all supported Clash rule types"""
|
||||
DOMAIN = "DOMAIN"
|
||||
DOMAIN_SUFFIX = "DOMAIN-SUFFIX"
|
||||
DOMAIN_KEYWORD = "DOMAIN-KEYWORD"
|
||||
DOMAIN_REGEX = "DOMAIN-REGEX"
|
||||
GEOSITE = "GEOSITE"
|
||||
|
||||
IP_CIDR = "IP-CIDR"
|
||||
IP_CIDR6 = "IP-CIDR6"
|
||||
IP_SUFFIX = "IP-SUFFIX"
|
||||
IP_ASN = "IP-ASN"
|
||||
GEOIP = "GEOIP"
|
||||
|
||||
SRC_GEOIP = "SRC-GEOIP"
|
||||
SRC_IP_ASN = "SRC-IP-ASN"
|
||||
SRC_IP_CIDR = "SRC-IP-CIDR"
|
||||
SRC_IP_SUFFIX = "SRC-IP-SUFFIX"
|
||||
|
||||
DST_PORT = "DST-PORT"
|
||||
SRC_PORT = "SRC-PORT"
|
||||
|
||||
IN_PORT = "IN-PORT"
|
||||
IN_TYPE = "IN-TYPE"
|
||||
IN_USER = "IN-USER"
|
||||
IN_NAME = "IN-NAME"
|
||||
|
||||
PROCESS_PATH = "PROCESS-PATH"
|
||||
PROCESS_PATH_REGEX = "PROCESS-PATH-REGEX"
|
||||
PROCESS_NAME = "PROCESS-NAME"
|
||||
PROCESS_NAME_REGEX = "PROCESS-NAME-REGEX"
|
||||
|
||||
UID = "UID"
|
||||
NETWORK = "NETWORK"
|
||||
DSCP = "DSCP"
|
||||
|
||||
RULE_SET = "RULE-SET"
|
||||
AND = "AND"
|
||||
OR = "OR"
|
||||
NOT = "NOT"
|
||||
SUB_RULE = "SUB-RULE"
|
||||
|
||||
MATCH = "MATCH"
|
||||
|
||||
|
||||
class Action(Enum):
|
||||
"""Enumeration of rule actions"""
|
||||
DIRECT = "DIRECT"
|
||||
REJECT = "REJECT"
|
||||
REJECT_DROP = "REJECT-DROP"
|
||||
PASS = "PASS"
|
||||
COMPATIBLE = "COMPATIBLE"
|
||||
|
||||
|
||||
@dataclass
|
||||
class ClashRule:
|
||||
"""Represents a parsed Clash routing rule"""
|
||||
rule_type: RuleType
|
||||
payload: str
|
||||
action: Union[Action, str] # Can be Action enum or custom proxy group name
|
||||
additional_params: Optional[List[str]] = None
|
||||
raw_rule: str = ""
|
||||
priority: int = 0
|
||||
|
||||
def __post_init__(self):
|
||||
if self.additional_params is None:
|
||||
self.additional_params = []
|
||||
|
||||
def condition_string(self) -> str:
|
||||
return f"{self.rule_type.value},{self.payload}"
|
||||
|
||||
|
||||
@dataclass
|
||||
class LogicRule:
|
||||
"""Represents a logic rule (AND, OR, NOT)"""
|
||||
logic_type: RuleType
|
||||
conditions: List[Union[ClashRule, 'LogicRule']]
|
||||
action: Union[Action, str]
|
||||
raw_rule: str = ""
|
||||
priority: int = 0
|
||||
|
||||
def condition_string(self) -> str:
|
||||
conditions_str = ','.join([f"({c.condition_string()})" for c in self.conditions])
|
||||
return f"{self.logic_type.value},({conditions_str})"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MatchRule:
|
||||
"""Represents a match rule"""
|
||||
action: Union[Action, str]
|
||||
raw_rule: str = ""
|
||||
priority: int = 0
|
||||
rule_type: RuleType = RuleType.MATCH
|
||||
|
||||
@staticmethod
|
||||
def condition_string() -> str:
|
||||
return "MATCH"
|
||||
|
||||
|
||||
class ClashRuleParser:
|
||||
"""Parser for Clash routing rules"""
|
||||
|
||||
def __init__(self):
|
||||
self.rules: List[Union[ClashRule, LogicRule, MatchRule]] = []
|
||||
|
||||
@staticmethod
|
||||
def parse_rule_line(line: str) -> Optional[Union[ClashRule, LogicRule, MatchRule]]:
|
||||
"""Parse a single rule line"""
|
||||
line = line.strip()
|
||||
try:
|
||||
# Handle logic rules (AND, OR, NOT)
|
||||
|
||||
if line.startswith(('AND,', 'OR,', 'NOT,')):
|
||||
return ClashRuleParser._parse_logic_rule(line)
|
||||
elif line.startswith('MATCH'):
|
||||
return ClashRuleParser._parse_match_rule(line)
|
||||
# Handle regular rules
|
||||
return ClashRuleParser._parse_regular_rule(line)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error parsing rule '{line}': {e}")
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def parse_rule_dict(clash_rule: Dict[str, Any]) -> Optional[Union[ClashRule, LogicRule, MatchRule]]:
|
||||
if not clash_rule:
|
||||
return None
|
||||
if clash_rule.get("type") in ('AND', 'OR', 'NOT'):
|
||||
conditions = clash_rule.get("conditions")
|
||||
if not conditions:
|
||||
return None
|
||||
conditions_str = ''
|
||||
for condition in conditions:
|
||||
conditions_str += f'({condition.get("type")},{condition.get("payload")})'
|
||||
conditions_str = f"({conditions_str})"
|
||||
raw_rule = f"{clash_rule.get('type')},{conditions_str},{clash_rule.get('action')}"
|
||||
return ClashRuleParser._parse_logic_rule(raw_rule)
|
||||
elif clash_rule.get("type") == 'MATCH':
|
||||
raw_rule = f"{clash_rule.get('type')},{clash_rule.get('action')}"
|
||||
return ClashRuleParser._parse_match_rule(raw_rule)
|
||||
else:
|
||||
raw_rule = f"{clash_rule.get('type')},{clash_rule.get('payload')},{clash_rule.get('action')}"
|
||||
return ClashRuleParser._parse_regular_rule(raw_rule)
|
||||
|
||||
@staticmethod
|
||||
def _parse_match_rule(line: str) -> MatchRule:
|
||||
parts = line.split(',')
|
||||
if len(parts) < 2:
|
||||
raise ValueError(f"Invalid rule format: {line}")
|
||||
action = parts[1]
|
||||
# Validate rule type
|
||||
try:
|
||||
action_enum = Action(action.upper())
|
||||
final_action = action_enum
|
||||
except ValueError:
|
||||
final_action = action
|
||||
|
||||
return MatchRule(
|
||||
action=final_action,
|
||||
raw_rule=line
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_regular_rule(line: str) -> ClashRule:
|
||||
"""Parse a regular (non-logic) rule"""
|
||||
parts = line.split(',')
|
||||
|
||||
if len(parts) < 3:
|
||||
raise ValueError(f"Invalid rule format: {line}")
|
||||
|
||||
rule_type_str = parts[0].upper()
|
||||
payload = parts[1]
|
||||
action = parts[2]
|
||||
|
||||
if not payload or not rule_type_str:
|
||||
raise ValueError(f"Invalid rule format: {line}")
|
||||
|
||||
additional_params = parts[3:] if len(parts) > 3 else []
|
||||
|
||||
# Validate rule type
|
||||
try:
|
||||
rule_type = RuleType(rule_type_str)
|
||||
except ValueError:
|
||||
raise ValueError(f"Unknown rule type: {rule_type_str}")
|
||||
|
||||
# Try to convert action to enum, otherwise keep as string (custom proxy group)
|
||||
try:
|
||||
action_enum = Action(action.upper())
|
||||
final_action = action_enum
|
||||
except ValueError:
|
||||
final_action = action
|
||||
|
||||
return ClashRule(
|
||||
rule_type=rule_type,
|
||||
payload=payload,
|
||||
action=final_action,
|
||||
additional_params=additional_params,
|
||||
raw_rule=line
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_logic_rule(line: str) -> LogicRule:
|
||||
"""Parse a logic rule (AND, OR, NOT)"""
|
||||
# Extract logic type
|
||||
logic_rule_match = re.match(r'^(AND|OR|NOT),\((.+)\),([^,]+)$', line)
|
||||
if not logic_rule_match:
|
||||
raise ValueError(f"Cannot extract action from logic rule: {line}")
|
||||
logic_type_str = logic_rule_match.group(1).upper()
|
||||
logic_type = RuleType(logic_type_str)
|
||||
action = logic_rule_match.group(3)
|
||||
# Try to convert action to enum
|
||||
try:
|
||||
action_enum = Action(action.upper())
|
||||
final_action = action_enum
|
||||
except ValueError:
|
||||
final_action = action
|
||||
conditions_str = logic_rule_match.group(2)
|
||||
conditions = ClashRuleParser._parse_logic_conditions(conditions_str)
|
||||
|
||||
return LogicRule(
|
||||
logic_type=logic_type,
|
||||
conditions=conditions,
|
||||
action=final_action,
|
||||
raw_rule=line
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _parse_logic_conditions(conditions_str: str) -> List[ClashRule]:
|
||||
"""Parse conditions within logic rules"""
|
||||
conditions = []
|
||||
|
||||
# Simple parser for conditions like (DOMAIN,baidu.com),(NETWORK,UDP)
|
||||
# This is a basic implementation - more complex nested logic would need a proper parser
|
||||
condition_pattern = r'\(([^,]+),([^)]+)\)'
|
||||
matches = re.findall(condition_pattern, conditions_str)
|
||||
|
||||
for rule_type_str, payload in matches:
|
||||
try:
|
||||
rule_type = RuleType(rule_type_str.upper())
|
||||
condition = ClashRule(
|
||||
rule_type=rule_type,
|
||||
payload=payload,
|
||||
action="", # Logic conditions don't have actions
|
||||
raw_rule=f"{rule_type_str},{payload}"
|
||||
)
|
||||
conditions.append(condition)
|
||||
except ValueError:
|
||||
print(f"Unknown rule type in logic condition: {rule_type_str}")
|
||||
|
||||
return conditions
|
||||
|
||||
def parse_rules(self, rules_text: str) -> List[Union[ClashRule, LogicRule, MatchRule]]:
|
||||
"""Parse multiple rules from text, preserving order and priority"""
|
||||
self.rules = []
|
||||
lines = rules_text.strip().split('\n')
|
||||
priority = 0
|
||||
|
||||
for line in lines:
|
||||
rule = self.parse_rule_line(line)
|
||||
if rule:
|
||||
rule.priority = priority # Assign priority based on position
|
||||
self.rules.append(rule)
|
||||
priority += 1
|
||||
|
||||
return self.rules
|
||||
|
||||
def parse_rules_from_list(self, rules_list: List[str]) -> List[Union[ClashRule, LogicRule, MatchRule]]:
|
||||
"""Parse rules from a list of rule strings, preserving order and priority"""
|
||||
self.rules = []
|
||||
|
||||
for priority, rule_str in enumerate(rules_list):
|
||||
rule = self.parse_rule_line(rule_str)
|
||||
if rule:
|
||||
rule.priority = priority # Assign priority based on list position
|
||||
self.rules.append(rule)
|
||||
|
||||
return self.rules
|
||||
|
||||
@staticmethod
|
||||
def validate_rule(rule: ClashRule) -> bool:
|
||||
"""Validate a parsed rule"""
|
||||
try:
|
||||
# Basic validation based on rule type
|
||||
if rule.rule_type in [RuleType.IP_CIDR, RuleType.IP_CIDR6]:
|
||||
# Validate CIDR format
|
||||
return '/' in rule.payload
|
||||
|
||||
elif rule.rule_type == RuleType.DST_PORT or rule.rule_type == RuleType.SRC_PORT:
|
||||
# Validate port number/range
|
||||
return rule.payload.isdigit() or '-' in rule.payload
|
||||
|
||||
elif rule.rule_type == RuleType.NETWORK:
|
||||
# Validate network type
|
||||
return rule.payload.lower() in ['tcp', 'udp']
|
||||
|
||||
elif rule.rule_type == RuleType.DOMAIN_REGEX or rule.rule_type == RuleType.PROCESS_PATH_REGEX:
|
||||
# Try to compile regex
|
||||
re.compile(rule.payload)
|
||||
return True
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Invalid rule '{rule.raw_rule}': {e}")
|
||||
return False
|
||||
|
||||
def to_string(self) -> List[str]:
|
||||
result = []
|
||||
for rule in self.rules:
|
||||
result.append(rule.raw_rule)
|
||||
return result
|
||||
|
||||
def to_dict(self) -> List[Dict[str, Any]]:
|
||||
"""Convert parsed rules to dictionary format"""
|
||||
result = []
|
||||
|
||||
for rule in self.rules:
|
||||
if isinstance(rule, ClashRule):
|
||||
rule_dict = {
|
||||
'type': rule.rule_type.value,
|
||||
'payload': rule.payload,
|
||||
'action': rule.action.value if isinstance(rule.action, Action) else rule.action,
|
||||
'additional_params': rule.additional_params,
|
||||
'priority': rule.priority,
|
||||
'raw': rule.raw_rule
|
||||
}
|
||||
result.append(rule_dict)
|
||||
|
||||
elif isinstance(rule, LogicRule):
|
||||
conditions_dict = []
|
||||
for condition in rule.conditions:
|
||||
if isinstance(condition, ClashRule):
|
||||
conditions_dict.append({
|
||||
'type': condition.rule_type.value,
|
||||
'payload': condition.payload
|
||||
})
|
||||
|
||||
rule_dict = {
|
||||
'type': rule.logic_type.value,
|
||||
'conditions': conditions_dict,
|
||||
'action': rule.action.value if isinstance(rule.action, Action) else rule.action,
|
||||
'priority': rule.priority,
|
||||
'raw': rule.raw_rule
|
||||
}
|
||||
result.append(rule_dict)
|
||||
elif isinstance(rule, MatchRule):
|
||||
rule_dict = {
|
||||
'type': 'MATCH',
|
||||
'action': rule.action.value if isinstance(rule.action, Action) else rule.action,
|
||||
'priority': rule.priority,
|
||||
'raw': rule.raw_rule
|
||||
}
|
||||
result.append(rule_dict)
|
||||
return result
|
||||
|
||||
def get_rules_by_priority(self) -> List[Union[ClashRule, LogicRule, MatchRule]]:
|
||||
"""Get rules sorted by priority (highest priority first)"""
|
||||
return sorted(self.rules, key=lambda rule: rule.priority)
|
||||
|
||||
def append_rule(self, rule: Union[ClashRule, LogicRule, MatchRule]) -> None:
|
||||
max_priority = max(rule.priority for rule in self.rules) if len(self.rules) else 0
|
||||
rule.priority = max_priority + 1
|
||||
self.rules.append(rule)
|
||||
# Re-sort rules to maintain order
|
||||
self.rules.sort(key=lambda r: r.priority)
|
||||
|
||||
def insert_rule_at_priority(self, rule: Union[ClashRule, LogicRule, MatchRule], priority: int):
|
||||
"""Insert a rule at a specific priority position, adjusting other rules"""
|
||||
# Adjust priorities of existing rules
|
||||
for existing_rule in self.rules:
|
||||
if existing_rule.priority >= priority:
|
||||
existing_rule.priority += 1
|
||||
rule.priority = priority
|
||||
self.rules.append(rule)
|
||||
|
||||
# Re-sort rules to maintain order
|
||||
self.rules.sort(key=lambda r: r.priority)
|
||||
|
||||
def update_rule_at_priority(self, clash_rule: Union[ClashRule, LogicRule], priority: int) -> bool:
|
||||
for index, existing_rule in enumerate(self.rules):
|
||||
if existing_rule.priority == priority:
|
||||
self.rules[index] = clash_rule
|
||||
self.rules[index].priority = priority
|
||||
return True
|
||||
return False
|
||||
|
||||
def remove_rule_at_priority(self, priority: int) -> Optional[Union[ClashRule, LogicRule, MatchRule]]:
|
||||
"""Remove rule at specific priority and adjust remaining priorities"""
|
||||
rule_to_remove = None
|
||||
for rule in self.rules:
|
||||
if rule.priority == priority:
|
||||
rule_to_remove = rule
|
||||
break
|
||||
|
||||
if rule_to_remove:
|
||||
self.rules.remove(rule_to_remove)
|
||||
|
||||
# Adjust priorities of remaining rules
|
||||
for rule in self.rules:
|
||||
if rule.priority > priority:
|
||||
rule.priority -= 1
|
||||
|
||||
return rule_to_remove
|
||||
return None
|
||||
|
||||
def remove_rules(self, condition: Callable[[Union[ClashRule, LogicRule, MatchRule]], bool]):
|
||||
"""Remove rules by lambda"""
|
||||
i = 0
|
||||
while i < len(self.rules):
|
||||
if condition(self.rules[i]):
|
||||
priority = self.rules[i].priority
|
||||
for rule in self.rules:
|
||||
if rule.priority > priority:
|
||||
rule.priority -= 1
|
||||
del self.rules[i]
|
||||
else:
|
||||
i += 1
|
||||
|
||||
def move_rule_priority(self, from_priority: int, to_priority: int) -> bool:
|
||||
"""Move a rule from one priority position to another"""
|
||||
rule_to_move = None
|
||||
for rule in self.rules:
|
||||
if rule.priority == from_priority:
|
||||
rule_to_move = rule
|
||||
break
|
||||
|
||||
if not rule_to_move:
|
||||
return False
|
||||
|
||||
# Remove rule temporarily
|
||||
self.remove_rule_at_priority(from_priority)
|
||||
|
||||
# Insert at new priority
|
||||
self.insert_rule_at_priority(rule_to_move, to_priority)
|
||||
|
||||
return True
|
||||
|
||||
def filter_rules_by_type(self, rule_type: RuleType) -> List[ClashRule]:
|
||||
"""Filter rules by type"""
|
||||
return [rule for rule in self.rules
|
||||
if isinstance(rule, ClashRule) and rule.rule_type == rule_type]
|
||||
|
||||
def filter_rules_by_action(self, action: Union[Action, str]) -> List[Union[ClashRule, LogicRule, MatchRule]]:
|
||||
"""Filter rules by action"""
|
||||
return [rule for rule in self.rules if rule.action == action]
|
||||
|
||||
def has_rule(self, clash_rule: Union[ClashRule, LogicRule, MatchRule]) -> bool:
|
||||
for rule in self.rules:
|
||||
if rule.rule_type == clash_rule.rule_type and rule.action == clash_rule.action \
|
||||
and rule.payload == clash_rule.payload:
|
||||
return True
|
||||
return False
|
||||
|
||||
def reorder_rules(
|
||||
self,
|
||||
moved_rule_priority: int,
|
||||
target_priority: int,
|
||||
):
|
||||
"""
|
||||
Reorder rules
|
||||
|
||||
:param moved_rule_priority: 被移动规则的原始优先级
|
||||
:param target_priority: 目标位置的优先级
|
||||
"""
|
||||
moved_index = next(i for i, r in enumerate(self.rules) if r.priority == moved_rule_priority)
|
||||
target_index = next(
|
||||
(i for i, r in enumerate(self.rules) if r.priority == target_priority),
|
||||
len(self.rules)
|
||||
)
|
||||
# 直接修改被移动规则的优先级
|
||||
moved_rule = self.rules[moved_index]
|
||||
moved_rule.priority = target_priority
|
||||
|
||||
if moved_index < target_index:
|
||||
# 向后移动:原位置到目标位置之间的规则优先级 -1
|
||||
for i in range(moved_index + 1, target_index + 1):
|
||||
self.rules[i].priority -= 1
|
||||
elif moved_index > target_index:
|
||||
# 向前移动:目标位置到原位置之间的规则优先级 +1
|
||||
for i in range(target_index, moved_index):
|
||||
self.rules[i].priority += 1
|
||||
self.rules.sort(key=lambda x: x.priority)
|
||||
828
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Config-C3BpNVeC.js
vendored
Normal file
828
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Config-C3BpNVeC.js
vendored
Normal file
@@ -0,0 +1,828 @@
|
||||
import { importShared } from './__federation_fn_import-JrT3xvdd.js';
|
||||
import { _ as _export_sfc } from './_plugin-vue_export-helper-pcqpp-6-.js';
|
||||
|
||||
const {createTextVNode:_createTextVNode,resolveComponent:_resolveComponent,withCtx:_withCtx,createVNode:_createVNode,toDisplayString:_toDisplayString,openBlock:_openBlock,createBlock:_createBlock,createCommentVNode:_createCommentVNode,createElementVNode:_createElementVNode,mergeProps:_mergeProps,withModifiers:_withModifiers,createElementBlock:_createElementBlock} = await importShared('vue');
|
||||
|
||||
|
||||
const _hoisted_1 = { class: "plugin-config" };
|
||||
const _hoisted_2 = { class: "d-flex align-center" };
|
||||
const _hoisted_3 = { class: "font-weight-medium" };
|
||||
const _hoisted_4 = { class: "text-body-2" };
|
||||
|
||||
const {ref,reactive,onMounted,computed} = await importShared('vue');
|
||||
|
||||
|
||||
// Props
|
||||
|
||||
const _sfc_main = {
|
||||
__name: 'Config',
|
||||
props: {
|
||||
initialConfig: {
|
||||
type: Object,
|
||||
default: () => ({}),
|
||||
},
|
||||
api: {
|
||||
type: Object,
|
||||
default: () => {
|
||||
},
|
||||
},
|
||||
},
|
||||
emits: ['save', 'close'],
|
||||
setup(__props, { emit: __emit }) {
|
||||
|
||||
const props = __props;
|
||||
|
||||
// 状态变量
|
||||
const form = ref(null);
|
||||
const isFormValid = ref(true);
|
||||
const error = ref(null);
|
||||
const saving = ref(false);
|
||||
const testing = ref(false);
|
||||
const showClashSecret = ref(false);
|
||||
const selectedCronOption = ref('6hours');
|
||||
|
||||
// Test result state
|
||||
const testResult = reactive({
|
||||
show: false,
|
||||
success: false,
|
||||
title: '',
|
||||
message: ''
|
||||
});
|
||||
|
||||
// Cron 选项
|
||||
const cronOptions = [
|
||||
{text: '每5分钟', value: '5min', cron: '*/5 * * * *'},
|
||||
{text: '每15分钟', value: '15min', cron: '*/15 * * * *'},
|
||||
{text: '每30分钟', value: '30min', cron: '*/30 * * * *'},
|
||||
{text: '每小时', value: '1hour', cron: '0 * * * *'},
|
||||
{text: '每2小时', value: '2hours', cron: '0 */2 * * *'},
|
||||
{text: '每6小时', value: '6hours', cron: '0 */6 * * *'},
|
||||
{text: '每12小时', value: '12hours', cron: '0 */12 * * *'},
|
||||
{text: '每天', value: '1day', cron: '0 0 * * *'},
|
||||
{text: '自定义', value: 'custom', cron: ''},
|
||||
];
|
||||
|
||||
// 默认配置
|
||||
const defaultConfig = {
|
||||
enabled: false,
|
||||
sub_links: [],
|
||||
filter_keywords: ["公益性", "高延迟", "域名", "官网", "重启", "过期时间", "系统代理"],
|
||||
clash_dashboard_url: '',
|
||||
clash_dashboard_secret: '',
|
||||
movie_pilot_url: '',
|
||||
cron_string: '0 */6 * * *',
|
||||
timeout: 10,
|
||||
retry_times: 3,
|
||||
proxy: false,
|
||||
notify: false,
|
||||
auto_update_subscriptions: true,
|
||||
ruleset_prefix: '📂<-',
|
||||
};
|
||||
|
||||
// 响应式配置对象
|
||||
const config = reactive({...defaultConfig});
|
||||
|
||||
// 自定义事件
|
||||
const emit = __emit;
|
||||
|
||||
// 初始化
|
||||
onMounted(() => {
|
||||
if (props.initialConfig) {
|
||||
Object.keys(props.initialConfig).forEach(key => {
|
||||
if (key in config) {
|
||||
config[key] = props.initialConfig[key];
|
||||
}
|
||||
});
|
||||
|
||||
// 设置对应的cron选项
|
||||
const cronOption = cronOptions.find(option => option.cron === config.cron_string);
|
||||
if (cronOption) {
|
||||
selectedCronOption.value = cronOption.value;
|
||||
} else {
|
||||
selectedCronOption.value = 'custom';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// 验证函数
|
||||
function isValidUrl(url) {
|
||||
try {
|
||||
new URL(url);
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function validateSubLinks(links) {
|
||||
if (!links || links.length === 0) {
|
||||
return '至少需要一个订阅链接'
|
||||
}
|
||||
|
||||
for (const link of links) {
|
||||
if (!isValidUrl(link)) {
|
||||
return `无效的订阅链接: ${link}`
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
function validateCronExpression(cronStr) {
|
||||
if (!cronStr) return '请输入Cron表达式'
|
||||
|
||||
// 简单的cron表达式验证
|
||||
const parts = cronStr.trim().split(/\s+/);
|
||||
if (parts.length !== 5) {
|
||||
return 'Cron表达式应包含5个部分 (分 时 日 月 周)'
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// 更新cron字符串
|
||||
function updateCronString(optionValue) {
|
||||
const option = cronOptions.find(opt => opt.value === optionValue);
|
||||
if (option && option.cron) {
|
||||
config.cron_string = option.cron;
|
||||
}
|
||||
}
|
||||
|
||||
// 测试连接
|
||||
async function testConnection() {
|
||||
testing.value = true;
|
||||
error.value = null;
|
||||
testResult.show = false;
|
||||
|
||||
try {
|
||||
// 验证必需的参数
|
||||
if (!config.clash_dashboard_url) {
|
||||
throw new Error('请先配置 Clash 面板 URL')
|
||||
}
|
||||
if (!config.clash_dashboard_secret) {
|
||||
throw new Error('请先配置 Clash 面板密钥')
|
||||
}
|
||||
if (!config.sub_links || config.sub_links.length === 0) {
|
||||
throw new Error('请先配置至少一个订阅链接')
|
||||
}
|
||||
if (!config.movie_pilot_url || config.movie_pilot_url.length === 0) {
|
||||
throw new Error('请先MoviePilot链接')
|
||||
}
|
||||
// 准备API请求参数
|
||||
const testParams = {
|
||||
clash_dashboard_url: config.clash_dashboard_url,
|
||||
clash_dashboard_secret: config.clash_dashboard_secret,
|
||||
sub_link: config.sub_links[0] // 使用第一个订阅链接进行测试
|
||||
};
|
||||
|
||||
// 调用API进行连接测试
|
||||
const result = await props.api.post('/plugin/ClashRuleProvider/connectivity', testParams);
|
||||
|
||||
// 根据返回结果显示相应消息
|
||||
if (result.success) {
|
||||
testResult.success = true;
|
||||
testResult.title = '连接测试成功!';
|
||||
testResult.message = 'Clash面板和订阅链接连接正常,配置验证通过';
|
||||
testResult.show = true;
|
||||
|
||||
// Auto hide after 5 seconds
|
||||
setTimeout(() => {
|
||||
testResult.show = false;
|
||||
}, 5000);
|
||||
} else {
|
||||
throw new Error(result.message || '连接测试失败,请检查配置')
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
console.error('连接测试失败:', err);
|
||||
testResult.success = false;
|
||||
testResult.title = '连接测试失败';
|
||||
testResult.message = err.message;
|
||||
testResult.show = true;
|
||||
} finally {
|
||||
testing.value = false;
|
||||
}
|
||||
}
|
||||
|
||||
// 保存配置
|
||||
async function saveConfig() {
|
||||
if (!isFormValid.value) {
|
||||
error.value = '请修正表单中的错误';
|
||||
return
|
||||
}
|
||||
|
||||
saving.value = true;
|
||||
error.value = null;
|
||||
|
||||
try {
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
emit('save', {...config});
|
||||
} catch (err) {
|
||||
console.error('保存配置失败:', err);
|
||||
error.value = err.message || '保存配置失败';
|
||||
} finally {
|
||||
saving.value = false;
|
||||
}
|
||||
}
|
||||
|
||||
function extractDomain(url) {
|
||||
try {
|
||||
const domain = new URL(url).hostname;
|
||||
return domain.startsWith('www.') ? domain.substring(4) : domain
|
||||
} catch {
|
||||
return url // 如果解析失败,返回原始URL
|
||||
}
|
||||
}
|
||||
|
||||
// 重置表单
|
||||
function resetForm() {
|
||||
Object.keys(defaultConfig).forEach(key => {
|
||||
config[key] = defaultConfig[key];
|
||||
});
|
||||
selectedCronOption.value = '6hours';
|
||||
|
||||
if (form.value) {
|
||||
form.value.resetValidation();
|
||||
}
|
||||
}
|
||||
|
||||
// 关闭组件
|
||||
function notifyClose() {
|
||||
emit('close');
|
||||
}
|
||||
|
||||
// 通知主应用切换到Page页面
|
||||
function notifySwitch() {
|
||||
emit('switch');
|
||||
}
|
||||
|
||||
return (_ctx, _cache) => {
|
||||
const _component_v_card_title = _resolveComponent("v-card-title");
|
||||
const _component_v_icon = _resolveComponent("v-icon");
|
||||
const _component_v_btn = _resolveComponent("v-btn");
|
||||
const _component_v_card_item = _resolveComponent("v-card-item");
|
||||
const _component_v_alert = _resolveComponent("v-alert");
|
||||
const _component_v_switch = _resolveComponent("v-switch");
|
||||
const _component_v_col = _resolveComponent("v-col");
|
||||
const _component_v_row = _resolveComponent("v-row");
|
||||
const _component_v_chip = _resolveComponent("v-chip");
|
||||
const _component_v_combobox = _resolveComponent("v-combobox");
|
||||
const _component_v_text_field = _resolveComponent("v-text-field");
|
||||
const _component_v_select = _resolveComponent("v-select");
|
||||
const _component_v_expansion_panel_title = _resolveComponent("v-expansion-panel-title");
|
||||
const _component_v_expansion_panel_text = _resolveComponent("v-expansion-panel-text");
|
||||
const _component_v_expansion_panel = _resolveComponent("v-expansion-panel");
|
||||
const _component_v_expansion_panels = _resolveComponent("v-expansion-panels");
|
||||
const _component_v_form = _resolveComponent("v-form");
|
||||
const _component_v_card_text = _resolveComponent("v-card-text");
|
||||
const _component_v_spacer = _resolveComponent("v-spacer");
|
||||
const _component_v_card_actions = _resolveComponent("v-card-actions");
|
||||
const _component_v_card = _resolveComponent("v-card");
|
||||
|
||||
return (_openBlock(), _createElementBlock("div", _hoisted_1, [
|
||||
_createVNode(_component_v_card, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_card_item, null, {
|
||||
append: _withCtx(() => [
|
||||
_createVNode(_component_v_btn, {
|
||||
icon: "",
|
||||
color: "primary",
|
||||
variant: "text",
|
||||
onClick: notifyClose
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { left: "" }, {
|
||||
default: _withCtx(() => _cache[18] || (_cache[18] = [
|
||||
_createTextVNode("mdi-close")
|
||||
])),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_card_title, null, {
|
||||
default: _withCtx(() => _cache[17] || (_cache[17] = [
|
||||
_createTextVNode("Clash Rule Provider 插件配置")
|
||||
])),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_card_text, { class: "overflow-y-auto" }, {
|
||||
default: _withCtx(() => [
|
||||
(error.value)
|
||||
? (_openBlock(), _createBlock(_component_v_alert, {
|
||||
key: 0,
|
||||
type: "error",
|
||||
class: "mb-4"
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createTextVNode(_toDisplayString(error.value), 1)
|
||||
]),
|
||||
_: 1
|
||||
}))
|
||||
: _createCommentVNode("", true),
|
||||
_createVNode(_component_v_form, {
|
||||
ref_key: "form",
|
||||
ref: form,
|
||||
modelValue: isFormValid.value,
|
||||
"onUpdate:modelValue": _cache[15] || (_cache[15] = $event => ((isFormValid).value = $event)),
|
||||
onSubmit: _withModifiers(saveConfig, ["prevent"])
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_cache[28] || (_cache[28] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "基本设置", -1)),
|
||||
_createVNode(_component_v_row, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_col, {
|
||||
cols: "12",
|
||||
md: "4"
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_switch, {
|
||||
modelValue: config.enabled,
|
||||
"onUpdate:modelValue": _cache[0] || (_cache[0] = $event => ((config.enabled) = $event)),
|
||||
label: "启用插件",
|
||||
color: "primary",
|
||||
inset: "",
|
||||
hint: "启用后插件将开始监控和同步",
|
||||
"persistent-hint": ""
|
||||
}, null, 8, ["modelValue"])
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_col, {
|
||||
cols: "12",
|
||||
md: "4"
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_switch, {
|
||||
modelValue: config.proxy,
|
||||
"onUpdate:modelValue": _cache[1] || (_cache[1] = $event => ((config.proxy) = $event)),
|
||||
label: "启用代理",
|
||||
color: "primary",
|
||||
inset: "",
|
||||
hint: "是否使用系统代理进行网络请求",
|
||||
"persistent-hint": ""
|
||||
}, null, 8, ["modelValue"])
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_col, {
|
||||
cols: "12",
|
||||
md: "4"
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_switch, {
|
||||
modelValue: config.notify,
|
||||
"onUpdate:modelValue": _cache[2] || (_cache[2] = $event => ((config.notify) = $event)),
|
||||
label: "启用通知",
|
||||
color: "primary",
|
||||
inset: "",
|
||||
hint: "执行完成后发送通知消息",
|
||||
"persistent-hint": ""
|
||||
}, null, 8, ["modelValue"])
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_cache[29] || (_cache[29] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "订阅配置", -1)),
|
||||
_createVNode(_component_v_row, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_col, { cols: "12" }, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_combobox, {
|
||||
modelValue: config.sub_links,
|
||||
"onUpdate:modelValue": _cache[3] || (_cache[3] = $event => ((config.sub_links) = $event)),
|
||||
label: "订阅链接",
|
||||
variant: "outlined",
|
||||
multiple: "",
|
||||
chips: "",
|
||||
"closable-chips": "",
|
||||
hint: "添加一个Clash订阅链接",
|
||||
"persistent-hint": "",
|
||||
rules: [validateSubLinks]
|
||||
}, {
|
||||
chip: _withCtx(({ props, item }) => [
|
||||
_createVNode(_component_v_chip, _mergeProps(props, {
|
||||
closable: "",
|
||||
size: "small"
|
||||
}), {
|
||||
default: _withCtx(() => [
|
||||
_createTextVNode(_toDisplayString(extractDomain(item.value)), 1)
|
||||
]),
|
||||
_: 2
|
||||
}, 1040)
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue", "rules"])
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_col, { cols: "12" }, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_combobox, {
|
||||
modelValue: config.filter_keywords,
|
||||
"onUpdate:modelValue": _cache[4] || (_cache[4] = $event => ((config.filter_keywords) = $event)),
|
||||
label: "节点过滤关键词",
|
||||
variant: "outlined",
|
||||
multiple: "",
|
||||
chips: "",
|
||||
"closable-chips": "",
|
||||
hint: "添加用于过滤节点的关键词",
|
||||
"persistent-hint": ""
|
||||
}, {
|
||||
chip: _withCtx(({ props, item }) => [
|
||||
_createVNode(_component_v_chip, _mergeProps(props, {
|
||||
closable: "",
|
||||
size: "small",
|
||||
color: "info"
|
||||
}), {
|
||||
default: _withCtx(() => [
|
||||
_createTextVNode(_toDisplayString(item.value), 1)
|
||||
]),
|
||||
_: 2
|
||||
}, 1040)
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue"])
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_cache[30] || (_cache[30] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "Clash 面板设置", -1)),
|
||||
_createVNode(_component_v_row, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_col, { cols: "12" }, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_text_field, {
|
||||
modelValue: config.clash_dashboard_url,
|
||||
"onUpdate:modelValue": _cache[5] || (_cache[5] = $event => ((config.clash_dashboard_url) = $event)),
|
||||
label: "Clash 面板 URL",
|
||||
variant: "outlined",
|
||||
placeholder: "http://localhost:9090",
|
||||
hint: "Clash 控制面板的访问地址",
|
||||
"persistent-hint": "",
|
||||
rules: [v => !v || isValidUrl(v) || '请输入有效的URL地址']
|
||||
}, {
|
||||
"prepend-inner": _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { color: "primary" }, {
|
||||
default: _withCtx(() => _cache[19] || (_cache[19] = [
|
||||
_createTextVNode("mdi-web")
|
||||
])),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue", "rules"])
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_col, { cols: "12" }, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_text_field, {
|
||||
modelValue: config.clash_dashboard_secret,
|
||||
"onUpdate:modelValue": _cache[6] || (_cache[6] = $event => ((config.clash_dashboard_secret) = $event)),
|
||||
label: "Clash 面板密钥",
|
||||
variant: "outlined",
|
||||
placeholder: "your-clash-secret",
|
||||
hint: "用于访问Clash API的密钥",
|
||||
"persistent-hint": "",
|
||||
"append-inner-icon": showClashSecret.value ? 'mdi-eye-off' : 'mdi-eye',
|
||||
type: showClashSecret.value ? 'text' : 'password',
|
||||
"onClick:appendInner": _cache[7] || (_cache[7] = $event => (showClashSecret.value = !showClashSecret.value))
|
||||
}, {
|
||||
"prepend-inner": _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { color: "warning" }, {
|
||||
default: _withCtx(() => _cache[20] || (_cache[20] = [
|
||||
_createTextVNode("mdi-key")
|
||||
])),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue", "append-inner-icon", "type"])
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_cache[31] || (_cache[31] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "MoviePilot 设置", -1)),
|
||||
_createVNode(_component_v_row, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_col, { cols: "12" }, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_text_field, {
|
||||
modelValue: config.movie_pilot_url,
|
||||
"onUpdate:modelValue": _cache[8] || (_cache[8] = $event => ((config.movie_pilot_url) = $event)),
|
||||
label: "MoviePilot URL",
|
||||
variant: "outlined",
|
||||
placeholder: "http://localhost:3001",
|
||||
hint: "MoviePilot 服务的访问地址",
|
||||
"persistent-hint": "",
|
||||
rules: [v => !!v || 'MoviePilot URL不能为空', v => isValidUrl(v) || '请输入有效的URL地址']
|
||||
}, {
|
||||
"prepend-inner": _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { color: "success" }, {
|
||||
default: _withCtx(() => _cache[21] || (_cache[21] = [
|
||||
_createTextVNode("mdi-movie")
|
||||
])),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue", "rules"])
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_cache[32] || (_cache[32] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "执行设置", -1)),
|
||||
_createVNode(_component_v_row, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_col, { cols: "12" }, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_select, {
|
||||
modelValue: selectedCronOption.value,
|
||||
"onUpdate:modelValue": [
|
||||
_cache[9] || (_cache[9] = $event => ((selectedCronOption).value = $event)),
|
||||
updateCronString
|
||||
],
|
||||
label: "执行周期",
|
||||
items: cronOptions,
|
||||
variant: "outlined",
|
||||
"item-title": "text",
|
||||
"item-value": "value",
|
||||
hint: "选择插件执行的时间间隔",
|
||||
"persistent-hint": ""
|
||||
}, null, 8, ["modelValue"])
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
(selectedCronOption.value === 'custom')
|
||||
? (_openBlock(), _createBlock(_component_v_col, {
|
||||
key: 0,
|
||||
cols: "12"
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_text_field, {
|
||||
modelValue: config.cron_string,
|
||||
"onUpdate:modelValue": _cache[10] || (_cache[10] = $event => ((config.cron_string) = $event)),
|
||||
label: "自定义 Cron 表达式",
|
||||
variant: "outlined",
|
||||
placeholder: "0 */6 * * *",
|
||||
hint: "使用标准Cron表达式格式 (分 时 日 月 周)",
|
||||
"persistent-hint": "",
|
||||
rules: [validateCronExpression]
|
||||
}, {
|
||||
"prepend-inner": _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { color: "info" }, {
|
||||
default: _withCtx(() => _cache[22] || (_cache[22] = [
|
||||
_createTextVNode("mdi-clock-outline")
|
||||
])),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue", "rules"])
|
||||
]),
|
||||
_: 1
|
||||
}))
|
||||
: _createCommentVNode("", true),
|
||||
_createVNode(_component_v_col, {
|
||||
cols: "12",
|
||||
md: "6"
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_text_field, {
|
||||
modelValue: config.timeout,
|
||||
"onUpdate:modelValue": _cache[11] || (_cache[11] = $event => ((config.timeout) = $event)),
|
||||
modelModifiers: { number: true },
|
||||
label: "超时时间 (秒)",
|
||||
variant: "outlined",
|
||||
type: "number",
|
||||
min: "1",
|
||||
max: "300",
|
||||
hint: "请求的超时时间",
|
||||
"persistent-hint": "",
|
||||
rules: [v => v > 0 || '超时时间必须大于0']
|
||||
}, {
|
||||
"prepend-inner": _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { color: "warning" }, {
|
||||
default: _withCtx(() => _cache[23] || (_cache[23] = [
|
||||
_createTextVNode("mdi-timer")
|
||||
])),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue", "rules"])
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_col, {
|
||||
cols: "12",
|
||||
md: "6"
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_text_field, {
|
||||
modelValue: config.retry_times,
|
||||
"onUpdate:modelValue": _cache[12] || (_cache[12] = $event => ((config.retry_times) = $event)),
|
||||
modelModifiers: { number: true },
|
||||
label: "重试次数",
|
||||
variant: "outlined",
|
||||
type: "number",
|
||||
min: "0",
|
||||
max: "10",
|
||||
hint: "失败时的重试次数",
|
||||
"persistent-hint": "",
|
||||
rules: [v => v >= 0 || '重试次数不能为负数']
|
||||
}, {
|
||||
"prepend-inner": _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { color: "info" }, {
|
||||
default: _withCtx(() => _cache[24] || (_cache[24] = [
|
||||
_createTextVNode("mdi-refresh")
|
||||
])),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue", "rules"])
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_expansion_panels, {
|
||||
variant: "accordion",
|
||||
class: "mt-4"
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_expansion_panel, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_expansion_panel_title, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { class: "mr-2" }, {
|
||||
default: _withCtx(() => _cache[25] || (_cache[25] = [
|
||||
_createTextVNode("mdi-cog")
|
||||
])),
|
||||
_: 1
|
||||
}),
|
||||
_cache[26] || (_cache[26] = _createTextVNode(" 高级选项 "))
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_expansion_panel_text, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_row, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_col, { cols: "12" }, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_switch, {
|
||||
modelValue: config.auto_update_subscriptions,
|
||||
"onUpdate:modelValue": _cache[13] || (_cache[13] = $event => ((config.auto_update_subscriptions) = $event)),
|
||||
label: "自动更新订阅",
|
||||
color: "primary",
|
||||
inset: "",
|
||||
hint: "定期自动更新Clash订阅配置"
|
||||
}, null, 8, ["modelValue"])
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_col, { cols: "12" }, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_text_field, {
|
||||
modelValue: config.ruleset_prefix,
|
||||
"onUpdate:modelValue": _cache[14] || (_cache[14] = $event => ((config.ruleset_prefix) = $event)),
|
||||
label: "规则集前缀",
|
||||
variant: "outlined",
|
||||
placeholder: "📂<-",
|
||||
hint: "为生成的规则集添加前缀",
|
||||
"persistent-hint": ""
|
||||
}, {
|
||||
"prepend-inner": _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { color: "info" }, {
|
||||
default: _withCtx(() => _cache[27] || (_cache[27] = [
|
||||
_createTextVNode("mdi-prefix")
|
||||
])),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue"])
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["modelValue"])
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_card_actions, null, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_btn, {
|
||||
color: "primary",
|
||||
onClick: notifySwitch
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createVNode(_component_v_icon, { left: "" }, {
|
||||
default: _withCtx(() => _cache[33] || (_cache[33] = [
|
||||
_createTextVNode("mdi-view-dashboard-edit")
|
||||
])),
|
||||
_: 1
|
||||
}),
|
||||
_cache[34] || (_cache[34] = _createTextVNode(" 规则 "))
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_btn, {
|
||||
color: "secondary",
|
||||
onClick: resetForm
|
||||
}, {
|
||||
default: _withCtx(() => _cache[35] || (_cache[35] = [
|
||||
_createTextVNode("重置")
|
||||
])),
|
||||
_: 1
|
||||
}),
|
||||
_createVNode(_component_v_btn, {
|
||||
color: "info",
|
||||
onClick: testConnection,
|
||||
loading: testing.value
|
||||
}, {
|
||||
default: _withCtx(() => _cache[36] || (_cache[36] = [
|
||||
_createTextVNode("测试连接")
|
||||
])),
|
||||
_: 1
|
||||
}, 8, ["loading"]),
|
||||
_createVNode(_component_v_spacer),
|
||||
_createVNode(_component_v_btn, {
|
||||
color: "primary",
|
||||
disabled: !isFormValid.value,
|
||||
onClick: saveConfig,
|
||||
loading: saving.value
|
||||
}, {
|
||||
default: _withCtx(() => _cache[37] || (_cache[37] = [
|
||||
_createTextVNode(" 保存配置 ")
|
||||
])),
|
||||
_: 1
|
||||
}, 8, ["disabled", "loading"])
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
(testResult.show)
|
||||
? (_openBlock(), _createBlock(_component_v_alert, {
|
||||
key: 0,
|
||||
type: testResult.success ? 'success' : 'error',
|
||||
variant: "tonal",
|
||||
closable: "",
|
||||
class: "ma-4 mt-0",
|
||||
"onClick:close": _cache[16] || (_cache[16] = $event => (testResult.show = false))
|
||||
}, {
|
||||
default: _withCtx(() => [
|
||||
_createElementVNode("div", _hoisted_2, [
|
||||
_createVNode(_component_v_icon, { class: "mr-2" }, {
|
||||
default: _withCtx(() => [
|
||||
_createTextVNode(_toDisplayString(testResult.success ? 'mdi-check-circle' : 'mdi-alert-circle'), 1)
|
||||
]),
|
||||
_: 1
|
||||
}),
|
||||
_createElementVNode("div", null, [
|
||||
_createElementVNode("div", _hoisted_3, _toDisplayString(testResult.title), 1),
|
||||
_createElementVNode("div", _hoisted_4, _toDisplayString(testResult.message), 1)
|
||||
])
|
||||
])
|
||||
]),
|
||||
_: 1
|
||||
}, 8, ["type"]))
|
||||
: _createCommentVNode("", true)
|
||||
]),
|
||||
_: 1
|
||||
})
|
||||
]))
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
const ConfigComponent = /*#__PURE__*/_export_sfc(_sfc_main, [['__scopeId',"data-v-0e64dae0"]]);
|
||||
|
||||
export { ConfigComponent as default };
|
||||
5
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Config-DXzIavcD.css
vendored
Normal file
5
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Config-DXzIavcD.css
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
|
||||
.plugin-config[data-v-0e64dae0] {
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
40514
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Dashboard-BkyO-3pr.js
vendored
Normal file
40514
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Dashboard-BkyO-3pr.js
vendored
Normal file
File diff suppressed because one or more lines are too long
48
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Page-Bl7XNZ7k.css
vendored
Normal file
48
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Page-Bl7XNZ7k.css
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
|
||||
.plugin-page[data-v-d5e502a5] {
|
||||
max-width: 1200px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
/* 使卡片等宽并适应移动端 */
|
||||
.d-flex.flex-wrap[data-v-d5e502a5] {
|
||||
gap: 16px;
|
||||
}
|
||||
.url-display[data-v-d5e502a5] {
|
||||
word-break: break-all;
|
||||
padding: 8px;
|
||||
background: rgba(0, 0, 0, 0.05);
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
/* 移动端堆叠布局 */
|
||||
@media (max-width: 768px) {
|
||||
.d-flex.flex-wrap[data-v-d5e502a5] {
|
||||
flex-direction: column;
|
||||
}
|
||||
}
|
||||
|
||||
/* Add visual distinction between sections */
|
||||
.ruleset-section[data-v-d5e502a5] {
|
||||
border: 1px solid #e0e0e0;
|
||||
border-radius: 4px;
|
||||
padding: 16px;
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
.top-section[data-v-d5e502a5] {
|
||||
border: 1px solid #e0e0e0;
|
||||
border-radius: 4px;
|
||||
padding: 16px;
|
||||
background-color: #f9f9f9;
|
||||
}
|
||||
|
||||
/* Optional: Add different border colors to further distinguish */
|
||||
.ruleset-section[data-v-d5e502a5] {
|
||||
border-left: 4px solid #2196F3; /* Blue accent */
|
||||
}
|
||||
.top-section[data-v-d5e502a5] {
|
||||
border-left: 4px solid #4CAF50; /* Green accent */
|
||||
}
|
||||
.drag-handle[data-v-d5e502a5] {
|
||||
cursor: move;
|
||||
}
|
||||
1118
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Page-DlQgf7u6.js
vendored
Normal file
1118
plugins.v2/clashruleprovider/dist/assets/__federation_expose_Page-DlQgf7u6.js
vendored
Normal file
File diff suppressed because it is too large
Load Diff
418
plugins.v2/clashruleprovider/dist/assets/__federation_fn_import-JrT3xvdd.js
vendored
Normal file
418
plugins.v2/clashruleprovider/dist/assets/__federation_fn_import-JrT3xvdd.js
vendored
Normal file
@@ -0,0 +1,418 @@
|
||||
const buildIdentifier = "[0-9A-Za-z-]+";
|
||||
const build = `(?:\\+(${buildIdentifier}(?:\\.${buildIdentifier})*))`;
|
||||
const numericIdentifier = "0|[1-9]\\d*";
|
||||
const numericIdentifierLoose = "[0-9]+";
|
||||
const nonNumericIdentifier = "\\d*[a-zA-Z-][a-zA-Z0-9-]*";
|
||||
const preReleaseIdentifierLoose = `(?:${numericIdentifierLoose}|${nonNumericIdentifier})`;
|
||||
const preReleaseLoose = `(?:-?(${preReleaseIdentifierLoose}(?:\\.${preReleaseIdentifierLoose})*))`;
|
||||
const preReleaseIdentifier = `(?:${numericIdentifier}|${nonNumericIdentifier})`;
|
||||
const preRelease = `(?:-(${preReleaseIdentifier}(?:\\.${preReleaseIdentifier})*))`;
|
||||
const xRangeIdentifier = `${numericIdentifier}|x|X|\\*`;
|
||||
const xRangePlain = `[v=\\s]*(${xRangeIdentifier})(?:\\.(${xRangeIdentifier})(?:\\.(${xRangeIdentifier})(?:${preRelease})?${build}?)?)?`;
|
||||
const hyphenRange = `^\\s*(${xRangePlain})\\s+-\\s+(${xRangePlain})\\s*$`;
|
||||
const mainVersionLoose = `(${numericIdentifierLoose})\\.(${numericIdentifierLoose})\\.(${numericIdentifierLoose})`;
|
||||
const loosePlain = `[v=\\s]*${mainVersionLoose}${preReleaseLoose}?${build}?`;
|
||||
const gtlt = "((?:<|>)?=?)";
|
||||
const comparatorTrim = `(\\s*)${gtlt}\\s*(${loosePlain}|${xRangePlain})`;
|
||||
const loneTilde = "(?:~>?)";
|
||||
const tildeTrim = `(\\s*)${loneTilde}\\s+`;
|
||||
const loneCaret = "(?:\\^)";
|
||||
const caretTrim = `(\\s*)${loneCaret}\\s+`;
|
||||
const star = "(<|>)?=?\\s*\\*";
|
||||
const caret = `^${loneCaret}${xRangePlain}$`;
|
||||
const mainVersion = `(${numericIdentifier})\\.(${numericIdentifier})\\.(${numericIdentifier})`;
|
||||
const fullPlain = `v?${mainVersion}${preRelease}?${build}?`;
|
||||
const tilde = `^${loneTilde}${xRangePlain}$`;
|
||||
const xRange = `^${gtlt}\\s*${xRangePlain}$`;
|
||||
const comparator = `^${gtlt}\\s*(${fullPlain})$|^$`;
|
||||
const gte0 = "^\\s*>=\\s*0.0.0\\s*$";
|
||||
function parseRegex(source) {
|
||||
return new RegExp(source);
|
||||
}
|
||||
function isXVersion(version) {
|
||||
return !version || version.toLowerCase() === "x" || version === "*";
|
||||
}
|
||||
function pipe(...fns) {
|
||||
return (x) => {
|
||||
return fns.reduce((v, f) => f(v), x);
|
||||
};
|
||||
}
|
||||
function extractComparator(comparatorString) {
|
||||
return comparatorString.match(parseRegex(comparator));
|
||||
}
|
||||
function combineVersion(major, minor, patch, preRelease2) {
|
||||
const mainVersion2 = `${major}.${minor}.${patch}`;
|
||||
if (preRelease2) {
|
||||
return `${mainVersion2}-${preRelease2}`;
|
||||
}
|
||||
return mainVersion2;
|
||||
}
|
||||
function parseHyphen(range) {
|
||||
return range.replace(
|
||||
parseRegex(hyphenRange),
|
||||
(_range, from, fromMajor, fromMinor, fromPatch, _fromPreRelease, _fromBuild, to, toMajor, toMinor, toPatch, toPreRelease) => {
|
||||
if (isXVersion(fromMajor)) {
|
||||
from = "";
|
||||
} else if (isXVersion(fromMinor)) {
|
||||
from = `>=${fromMajor}.0.0`;
|
||||
} else if (isXVersion(fromPatch)) {
|
||||
from = `>=${fromMajor}.${fromMinor}.0`;
|
||||
} else {
|
||||
from = `>=${from}`;
|
||||
}
|
||||
if (isXVersion(toMajor)) {
|
||||
to = "";
|
||||
} else if (isXVersion(toMinor)) {
|
||||
to = `<${+toMajor + 1}.0.0-0`;
|
||||
} else if (isXVersion(toPatch)) {
|
||||
to = `<${toMajor}.${+toMinor + 1}.0-0`;
|
||||
} else if (toPreRelease) {
|
||||
to = `<=${toMajor}.${toMinor}.${toPatch}-${toPreRelease}`;
|
||||
} else {
|
||||
to = `<=${to}`;
|
||||
}
|
||||
return `${from} ${to}`.trim();
|
||||
}
|
||||
);
|
||||
}
|
||||
function parseComparatorTrim(range) {
|
||||
return range.replace(parseRegex(comparatorTrim), "$1$2$3");
|
||||
}
|
||||
function parseTildeTrim(range) {
|
||||
return range.replace(parseRegex(tildeTrim), "$1~");
|
||||
}
|
||||
function parseCaretTrim(range) {
|
||||
return range.replace(parseRegex(caretTrim), "$1^");
|
||||
}
|
||||
function parseCarets(range) {
|
||||
return range.trim().split(/\s+/).map((rangeVersion) => {
|
||||
return rangeVersion.replace(
|
||||
parseRegex(caret),
|
||||
(_, major, minor, patch, preRelease2) => {
|
||||
if (isXVersion(major)) {
|
||||
return "";
|
||||
} else if (isXVersion(minor)) {
|
||||
return `>=${major}.0.0 <${+major + 1}.0.0-0`;
|
||||
} else if (isXVersion(patch)) {
|
||||
if (major === "0") {
|
||||
return `>=${major}.${minor}.0 <${major}.${+minor + 1}.0-0`;
|
||||
} else {
|
||||
return `>=${major}.${minor}.0 <${+major + 1}.0.0-0`;
|
||||
}
|
||||
} else if (preRelease2) {
|
||||
if (major === "0") {
|
||||
if (minor === "0") {
|
||||
return `>=${major}.${minor}.${patch}-${preRelease2} <${major}.${minor}.${+patch + 1}-0`;
|
||||
} else {
|
||||
return `>=${major}.${minor}.${patch}-${preRelease2} <${major}.${+minor + 1}.0-0`;
|
||||
}
|
||||
} else {
|
||||
return `>=${major}.${minor}.${patch}-${preRelease2} <${+major + 1}.0.0-0`;
|
||||
}
|
||||
} else {
|
||||
if (major === "0") {
|
||||
if (minor === "0") {
|
||||
return `>=${major}.${minor}.${patch} <${major}.${minor}.${+patch + 1}-0`;
|
||||
} else {
|
||||
return `>=${major}.${minor}.${patch} <${major}.${+minor + 1}.0-0`;
|
||||
}
|
||||
}
|
||||
return `>=${major}.${minor}.${patch} <${+major + 1}.0.0-0`;
|
||||
}
|
||||
}
|
||||
);
|
||||
}).join(" ");
|
||||
}
|
||||
function parseTildes(range) {
|
||||
return range.trim().split(/\s+/).map((rangeVersion) => {
|
||||
return rangeVersion.replace(
|
||||
parseRegex(tilde),
|
||||
(_, major, minor, patch, preRelease2) => {
|
||||
if (isXVersion(major)) {
|
||||
return "";
|
||||
} else if (isXVersion(minor)) {
|
||||
return `>=${major}.0.0 <${+major + 1}.0.0-0`;
|
||||
} else if (isXVersion(patch)) {
|
||||
return `>=${major}.${minor}.0 <${major}.${+minor + 1}.0-0`;
|
||||
} else if (preRelease2) {
|
||||
return `>=${major}.${minor}.${patch}-${preRelease2} <${major}.${+minor + 1}.0-0`;
|
||||
}
|
||||
return `>=${major}.${minor}.${patch} <${major}.${+minor + 1}.0-0`;
|
||||
}
|
||||
);
|
||||
}).join(" ");
|
||||
}
|
||||
function parseXRanges(range) {
|
||||
return range.split(/\s+/).map((rangeVersion) => {
|
||||
return rangeVersion.trim().replace(
|
||||
parseRegex(xRange),
|
||||
(ret, gtlt2, major, minor, patch, preRelease2) => {
|
||||
const isXMajor = isXVersion(major);
|
||||
const isXMinor = isXMajor || isXVersion(minor);
|
||||
const isXPatch = isXMinor || isXVersion(patch);
|
||||
if (gtlt2 === "=" && isXPatch) {
|
||||
gtlt2 = "";
|
||||
}
|
||||
preRelease2 = "";
|
||||
if (isXMajor) {
|
||||
if (gtlt2 === ">" || gtlt2 === "<") {
|
||||
return "<0.0.0-0";
|
||||
} else {
|
||||
return "*";
|
||||
}
|
||||
} else if (gtlt2 && isXPatch) {
|
||||
if (isXMinor) {
|
||||
minor = 0;
|
||||
}
|
||||
patch = 0;
|
||||
if (gtlt2 === ">") {
|
||||
gtlt2 = ">=";
|
||||
if (isXMinor) {
|
||||
major = +major + 1;
|
||||
minor = 0;
|
||||
patch = 0;
|
||||
} else {
|
||||
minor = +minor + 1;
|
||||
patch = 0;
|
||||
}
|
||||
} else if (gtlt2 === "<=") {
|
||||
gtlt2 = "<";
|
||||
if (isXMinor) {
|
||||
major = +major + 1;
|
||||
} else {
|
||||
minor = +minor + 1;
|
||||
}
|
||||
}
|
||||
if (gtlt2 === "<") {
|
||||
preRelease2 = "-0";
|
||||
}
|
||||
return `${gtlt2 + major}.${minor}.${patch}${preRelease2}`;
|
||||
} else if (isXMinor) {
|
||||
return `>=${major}.0.0${preRelease2} <${+major + 1}.0.0-0`;
|
||||
} else if (isXPatch) {
|
||||
return `>=${major}.${minor}.0${preRelease2} <${major}.${+minor + 1}.0-0`;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
);
|
||||
}).join(" ");
|
||||
}
|
||||
function parseStar(range) {
|
||||
return range.trim().replace(parseRegex(star), "");
|
||||
}
|
||||
function parseGTE0(comparatorString) {
|
||||
return comparatorString.trim().replace(parseRegex(gte0), "");
|
||||
}
|
||||
function compareAtom(rangeAtom, versionAtom) {
|
||||
rangeAtom = +rangeAtom || rangeAtom;
|
||||
versionAtom = +versionAtom || versionAtom;
|
||||
if (rangeAtom > versionAtom) {
|
||||
return 1;
|
||||
}
|
||||
if (rangeAtom === versionAtom) {
|
||||
return 0;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
function comparePreRelease(rangeAtom, versionAtom) {
|
||||
const { preRelease: rangePreRelease } = rangeAtom;
|
||||
const { preRelease: versionPreRelease } = versionAtom;
|
||||
if (rangePreRelease === void 0 && !!versionPreRelease) {
|
||||
return 1;
|
||||
}
|
||||
if (!!rangePreRelease && versionPreRelease === void 0) {
|
||||
return -1;
|
||||
}
|
||||
if (rangePreRelease === void 0 && versionPreRelease === void 0) {
|
||||
return 0;
|
||||
}
|
||||
for (let i = 0, n = rangePreRelease.length; i <= n; i++) {
|
||||
const rangeElement = rangePreRelease[i];
|
||||
const versionElement = versionPreRelease[i];
|
||||
if (rangeElement === versionElement) {
|
||||
continue;
|
||||
}
|
||||
if (rangeElement === void 0 && versionElement === void 0) {
|
||||
return 0;
|
||||
}
|
||||
if (!rangeElement) {
|
||||
return 1;
|
||||
}
|
||||
if (!versionElement) {
|
||||
return -1;
|
||||
}
|
||||
return compareAtom(rangeElement, versionElement);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
function compareVersion(rangeAtom, versionAtom) {
|
||||
return compareAtom(rangeAtom.major, versionAtom.major) || compareAtom(rangeAtom.minor, versionAtom.minor) || compareAtom(rangeAtom.patch, versionAtom.patch) || comparePreRelease(rangeAtom, versionAtom);
|
||||
}
|
||||
function eq(rangeAtom, versionAtom) {
|
||||
return rangeAtom.version === versionAtom.version;
|
||||
}
|
||||
function compare(rangeAtom, versionAtom) {
|
||||
switch (rangeAtom.operator) {
|
||||
case "":
|
||||
case "=":
|
||||
return eq(rangeAtom, versionAtom);
|
||||
case ">":
|
||||
return compareVersion(rangeAtom, versionAtom) < 0;
|
||||
case ">=":
|
||||
return eq(rangeAtom, versionAtom) || compareVersion(rangeAtom, versionAtom) < 0;
|
||||
case "<":
|
||||
return compareVersion(rangeAtom, versionAtom) > 0;
|
||||
case "<=":
|
||||
return eq(rangeAtom, versionAtom) || compareVersion(rangeAtom, versionAtom) > 0;
|
||||
case void 0: {
|
||||
return true;
|
||||
}
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
function parseComparatorString(range) {
|
||||
return pipe(
|
||||
parseCarets,
|
||||
parseTildes,
|
||||
parseXRanges,
|
||||
parseStar
|
||||
)(range);
|
||||
}
|
||||
function parseRange(range) {
|
||||
return pipe(
|
||||
parseHyphen,
|
||||
parseComparatorTrim,
|
||||
parseTildeTrim,
|
||||
parseCaretTrim
|
||||
)(range.trim()).split(/\s+/).join(" ");
|
||||
}
|
||||
function satisfy(version, range) {
|
||||
if (!version) {
|
||||
return false;
|
||||
}
|
||||
const parsedRange = parseRange(range);
|
||||
const parsedComparator = parsedRange.split(" ").map((rangeVersion) => parseComparatorString(rangeVersion)).join(" ");
|
||||
const comparators = parsedComparator.split(/\s+/).map((comparator2) => parseGTE0(comparator2));
|
||||
const extractedVersion = extractComparator(version);
|
||||
if (!extractedVersion) {
|
||||
return false;
|
||||
}
|
||||
const [
|
||||
,
|
||||
versionOperator,
|
||||
,
|
||||
versionMajor,
|
||||
versionMinor,
|
||||
versionPatch,
|
||||
versionPreRelease
|
||||
] = extractedVersion;
|
||||
const versionAtom = {
|
||||
version: combineVersion(
|
||||
versionMajor,
|
||||
versionMinor,
|
||||
versionPatch,
|
||||
versionPreRelease
|
||||
),
|
||||
major: versionMajor,
|
||||
minor: versionMinor,
|
||||
patch: versionPatch,
|
||||
preRelease: versionPreRelease == null ? void 0 : versionPreRelease.split(".")
|
||||
};
|
||||
for (const comparator2 of comparators) {
|
||||
const extractedComparator = extractComparator(comparator2);
|
||||
if (!extractedComparator) {
|
||||
return false;
|
||||
}
|
||||
const [
|
||||
,
|
||||
rangeOperator,
|
||||
,
|
||||
rangeMajor,
|
||||
rangeMinor,
|
||||
rangePatch,
|
||||
rangePreRelease
|
||||
] = extractedComparator;
|
||||
const rangeAtom = {
|
||||
operator: rangeOperator,
|
||||
version: combineVersion(
|
||||
rangeMajor,
|
||||
rangeMinor,
|
||||
rangePatch,
|
||||
rangePreRelease
|
||||
),
|
||||
major: rangeMajor,
|
||||
minor: rangeMinor,
|
||||
patch: rangePatch,
|
||||
preRelease: rangePreRelease == null ? void 0 : rangePreRelease.split(".")
|
||||
};
|
||||
if (!compare(rangeAtom, versionAtom)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-undef
|
||||
const moduleMap = {};
|
||||
const moduleCache = Object.create(null);
|
||||
async function importShared(name, shareScope = 'default') {
|
||||
return moduleCache[name]
|
||||
? new Promise((r) => r(moduleCache[name]))
|
||||
: (await getSharedFromRuntime(name, shareScope)) || getSharedFromLocal(name)
|
||||
}
|
||||
async function getSharedFromRuntime(name, shareScope) {
|
||||
let module = null;
|
||||
if (globalThis?.__federation_shared__?.[shareScope]?.[name]) {
|
||||
const versionObj = globalThis.__federation_shared__[shareScope][name];
|
||||
const requiredVersion = moduleMap[name]?.requiredVersion;
|
||||
const hasRequiredVersion = !!requiredVersion;
|
||||
if (hasRequiredVersion) {
|
||||
const versionKey = Object.keys(versionObj).find((version) =>
|
||||
satisfy(version, requiredVersion)
|
||||
);
|
||||
if (versionKey) {
|
||||
const versionValue = versionObj[versionKey];
|
||||
module = await (await versionValue.get())();
|
||||
} else {
|
||||
console.log(
|
||||
`provider support ${name}(${versionKey}) is not satisfied requiredVersion(\${moduleMap[name].requiredVersion})`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
const versionKey = Object.keys(versionObj)[0];
|
||||
const versionValue = versionObj[versionKey];
|
||||
module = await (await versionValue.get())();
|
||||
}
|
||||
}
|
||||
if (module) {
|
||||
return flattenModule(module, name)
|
||||
}
|
||||
}
|
||||
async function getSharedFromLocal(name) {
|
||||
if (moduleMap[name]?.import) {
|
||||
let module = await (await moduleMap[name].get())();
|
||||
return flattenModule(module, name)
|
||||
} else {
|
||||
console.error(
|
||||
`consumer config import=false,so cant use callback shared module`
|
||||
);
|
||||
}
|
||||
}
|
||||
function flattenModule(module, name) {
|
||||
// use a shared module which export default a function will getting error 'TypeError: xxx is not a function'
|
||||
if (typeof module.default === 'function') {
|
||||
Object.keys(module).forEach((key) => {
|
||||
if (key !== 'default') {
|
||||
module.default[key] = module[key];
|
||||
}
|
||||
});
|
||||
moduleCache[name] = module.default;
|
||||
return module.default
|
||||
}
|
||||
if (module.default) module = Object.assign({}, module.default, module);
|
||||
moduleCache[name] = module;
|
||||
return module
|
||||
}
|
||||
|
||||
export { importShared, getSharedFromLocal as importSharedLocal, getSharedFromRuntime as importSharedRuntime };
|
||||
9
plugins.v2/clashruleprovider/dist/assets/_plugin-vue_export-helper-pcqpp-6-.js
vendored
Normal file
9
plugins.v2/clashruleprovider/dist/assets/_plugin-vue_export-helper-pcqpp-6-.js
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
const _export_sfc = (sfc, props) => {
|
||||
const target = sfc.__vccOpts || sfc;
|
||||
for (const [key, val] of props) {
|
||||
target[key] = val;
|
||||
}
|
||||
return target;
|
||||
};
|
||||
|
||||
export { _export_sfc as _ };
|
||||
87
plugins.v2/clashruleprovider/dist/assets/remoteEntry.js
vendored
Normal file
87
plugins.v2/clashruleprovider/dist/assets/remoteEntry.js
vendored
Normal file
@@ -0,0 +1,87 @@
|
||||
const currentImports = {};
|
||||
const exportSet = new Set(['Module', '__esModule', 'default', '_export_sfc']);
|
||||
let moduleMap = {
|
||||
"./Page":()=>{
|
||||
dynamicLoadingCss(["__federation_expose_Page-Bl7XNZ7k.css"], false, './Page');
|
||||
return __federation_import('./__federation_expose_Page-DlQgf7u6.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},
|
||||
"./Config":()=>{
|
||||
dynamicLoadingCss(["__federation_expose_Config-DXzIavcD.css"], false, './Config');
|
||||
return __federation_import('./__federation_expose_Config-C3BpNVeC.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},
|
||||
"./Dashboard":()=>{
|
||||
dynamicLoadingCss([], false, './Dashboard');
|
||||
return __federation_import('./__federation_expose_Dashboard-BkyO-3pr.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},};
|
||||
const seen = {};
|
||||
const dynamicLoadingCss = (cssFilePaths, dontAppendStylesToHead, exposeItemName) => {
|
||||
const metaUrl = import.meta.url;
|
||||
if (typeof metaUrl === 'undefined') {
|
||||
console.warn('The remote style takes effect only when the build.target option in the vite.config.ts file is higher than that of "es2020".');
|
||||
return;
|
||||
}
|
||||
|
||||
const curUrl = metaUrl.substring(0, metaUrl.lastIndexOf('remoteEntry.js'));
|
||||
const base = '/';
|
||||
'assets';
|
||||
|
||||
cssFilePaths.forEach(cssPath => {
|
||||
let href = '';
|
||||
const baseUrl = base || curUrl;
|
||||
if (baseUrl) {
|
||||
const trimmer = {
|
||||
trailing: (path) => (path.endsWith('/') ? path.slice(0, -1) : path),
|
||||
leading: (path) => (path.startsWith('/') ? path.slice(1) : path)
|
||||
};
|
||||
const isAbsoluteUrl = (url) => url.startsWith('http') || url.startsWith('//');
|
||||
|
||||
const cleanBaseUrl = trimmer.trailing(baseUrl);
|
||||
const cleanCssPath = trimmer.leading(cssPath);
|
||||
const cleanCurUrl = trimmer.trailing(curUrl);
|
||||
|
||||
if (isAbsoluteUrl(baseUrl)) {
|
||||
href = [cleanBaseUrl, cleanCssPath].filter(Boolean).join('/');
|
||||
} else {
|
||||
if (cleanCurUrl.includes(cleanBaseUrl)) {
|
||||
href = [cleanCurUrl, cleanCssPath].filter(Boolean).join('/');
|
||||
} else {
|
||||
href = [cleanCurUrl + cleanBaseUrl, cleanCssPath].filter(Boolean).join('/');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
href = cssPath;
|
||||
}
|
||||
|
||||
if (dontAppendStylesToHead) {
|
||||
const key = 'css__ClashRuleProvider__' + exposeItemName;
|
||||
window[key] = window[key] || [];
|
||||
window[key].push(href);
|
||||
return;
|
||||
}
|
||||
|
||||
if (href in seen) return;
|
||||
seen[href] = true;
|
||||
|
||||
const element = document.createElement('link');
|
||||
element.rel = 'stylesheet';
|
||||
element.href = href;
|
||||
document.head.appendChild(element);
|
||||
});
|
||||
};
|
||||
async function __federation_import(name) {
|
||||
currentImports[name] ??= import(name);
|
||||
return currentImports[name]
|
||||
} const get =(module) => {
|
||||
if(!moduleMap[module]) throw new Error('Can not find remote module ' + module)
|
||||
return moduleMap[module]();
|
||||
};
|
||||
const init =(shareScope) => {
|
||||
globalThis.__federation_shared__= globalThis.__federation_shared__|| {};
|
||||
Object.entries(shareScope).forEach(([key, value]) => {
|
||||
for (const [versionKey, versionValue] of Object.entries(value)) {
|
||||
const scope = versionValue.scope || 'default';
|
||||
globalThis.__federation_shared__[scope] = globalThis.__federation_shared__[scope] || {};
|
||||
const shared= globalThis.__federation_shared__[scope];
|
||||
(shared[key] = shared[key]||{})[versionKey] = versionValue;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
export { dynamicLoadingCss, get, init };
|
||||
@@ -1,24 +1,22 @@
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from app.utils.string import StringUtils
|
||||
from app.schemas.types import EventType
|
||||
from app.schemas import ServiceInfo
|
||||
from app.core.event import eventmanager, Event
|
||||
|
||||
from app.core.config import settings
|
||||
from app.plugins import _PluginBase
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
from app.log import logger
|
||||
from app.schemas import NotificationType
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.helper.downloader import DownloaderHelper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import NotificationType
|
||||
from app.schemas import ServiceInfo
|
||||
from app.schemas.types import EventType
|
||||
from app.utils.string import StringUtils
|
||||
|
||||
|
||||
class CleanInvalidSeed(_PluginBase):
|
||||
# 插件名称
|
||||
@@ -56,6 +54,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
_exclude_categories = ""
|
||||
_exclude_labels = ""
|
||||
_more_logs = False
|
||||
_downloaders = []
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
_error_msg = [
|
||||
@@ -67,7 +66,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
_custom_error_msg = ""
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
@@ -97,8 +96,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
self._scheduler.add_job(
|
||||
func=self.clean_invalid_seed,
|
||||
trigger="date",
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ))
|
||||
+ timedelta(seconds=3),
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
name="清理无效种子",
|
||||
)
|
||||
# 关闭一次性开关
|
||||
@@ -137,7 +135,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
)
|
||||
|
||||
@property
|
||||
def service_info(self) -> Optional[ServiceInfo]:
|
||||
def service_info(self) -> Optional[Dict[str, ServiceInfo]]:
|
||||
"""
|
||||
服务信息
|
||||
"""
|
||||
@@ -145,7 +143,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
logger.warning("尚未配置下载器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.downloader_helper.get_services(name_filters=self._downloaders)
|
||||
services = DownloaderHelper().get_services(name_filters=self._downloaders)
|
||||
|
||||
if not services:
|
||||
logger.warning("获取下载器实例失败,请检查配置")
|
||||
@@ -166,11 +164,12 @@ class CleanInvalidSeed(_PluginBase):
|
||||
|
||||
return active_services
|
||||
|
||||
def check_is_qb(self, service_info) -> bool:
|
||||
@staticmethod
|
||||
def check_is_qb(service_info) -> bool:
|
||||
"""
|
||||
检查下载器类型是否为 qbittorrent 或 transmission
|
||||
"""
|
||||
if self.downloader_helper.is_downloader(service_type="qbittorrent", service=service_info):
|
||||
if DownloaderHelper().is_downloader(service_type="qbittorrent", service=service_info):
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -225,11 +224,11 @@ class CleanInvalidSeed(_PluginBase):
|
||||
event_data = event.event_data
|
||||
if event_data:
|
||||
if not (
|
||||
event_data.get("action") == "detect_invalid_torrents"
|
||||
or event_data.get("action") == "delete_invalid_torrents"
|
||||
or event_data.get("action") == "detect_invalid_files"
|
||||
or event_data.get("action") == "delete_invalid_files"
|
||||
or event_data.get("action") == "toggle_notify_all"
|
||||
event_data.get("action") == "detect_invalid_torrents"
|
||||
or event_data.get("action") == "delete_invalid_torrents"
|
||||
or event_data.get("action") == "detect_invalid_files"
|
||||
or event_data.get("action") == "delete_invalid_files"
|
||||
or event_data.get("action") == "toggle_notify_all"
|
||||
):
|
||||
return
|
||||
self.post_message(
|
||||
@@ -344,7 +343,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
downloader_name = service.name
|
||||
downloader_obj = service.instance
|
||||
if not downloader_obj:
|
||||
logger.error(f"{self.LOG_TAG} 获取下载器失败 {downloader_name}")
|
||||
logger.error(f"获取下载器失败 {downloader_name}")
|
||||
continue
|
||||
logger.info(f"开始清理 {downloader_name} 无效做种...")
|
||||
all_torrents = self.get_all_torrents(service)
|
||||
@@ -376,13 +375,14 @@ class CleanInvalidSeed(_PluginBase):
|
||||
is_tracker_working = True
|
||||
|
||||
if not (
|
||||
(tracker.get("status") == 4) and (tracker.get("msg") in error_msgs)
|
||||
(tracker.get("status") == 4) and (tracker.get("msg") in error_msgs)
|
||||
):
|
||||
is_invalid = False
|
||||
working_tracker_set.add(tracker_domian)
|
||||
|
||||
if self._more_logs:
|
||||
logger.info(f"处理 [{torrent.name}] tracker [{tracker_domian}]: 分类: [{torrent.category}], 标签: [{torrent.tags}], 状态: [{tracker.get('status')}], msg: [{tracker.get('msg')}], is_invalid: [{is_invalid}], is_working: [{is_tracker_working}]")
|
||||
logger.info(
|
||||
f"处理 [{torrent.name}] tracker [{tracker_domian}]: 分类: [{torrent.category}], 标签: [{torrent.tags}], 状态: [{tracker.get('status')}], msg: [{tracker.get('msg')}], is_invalid: [{is_invalid}], is_working: [{is_tracker_working}]")
|
||||
if is_invalid:
|
||||
temp_invalid_torrents.append(torrent)
|
||||
elif not is_tracker_working:
|
||||
@@ -433,25 +433,30 @@ class CleanInvalidSeed(_PluginBase):
|
||||
if not is_excluded:
|
||||
if self._label_only:
|
||||
# 仅标记
|
||||
downloader_obj.set_torrents_tag(ids=torrent.get("hash"), tags=[self._label if self._label != "" else "无效做种"])
|
||||
downloader_obj.set_torrents_tag(ids=torrent.get("hash"), tags=[
|
||||
self._label if self._label != "" else "无效做种"])
|
||||
else:
|
||||
# 只删除种子不删除文件,以防其它站点辅种
|
||||
downloader_obj.delete_torrents(False, torrent.get("hash"))
|
||||
# 标记已处理种子信息
|
||||
deleted_torrent_tuple_list.append(
|
||||
(
|
||||
torrent.name,
|
||||
torrent.category,
|
||||
torrent.tags,
|
||||
torrent.size,
|
||||
tracker_domian,
|
||||
tracker.msg,
|
||||
)
|
||||
(
|
||||
torrent.name,
|
||||
torrent.category,
|
||||
torrent.tags,
|
||||
torrent.size,
|
||||
tracker_domian,
|
||||
tracker.msg,
|
||||
)
|
||||
)
|
||||
break
|
||||
invalid_msg = f"检测到{len(invalid_torrent_tuple_list)}个失效做种\n"
|
||||
tracker_not_working_msg = f"检测到{len(tracker_not_working_torrents)}个tracker未工作做种,请检查种子状态\n"
|
||||
|
||||
exclude_categories_msg = ""
|
||||
exclude_labels_msg = ""
|
||||
deleted_msg = ""
|
||||
|
||||
if self._label_only or self._delete_invalid_torrents:
|
||||
if self._label_only:
|
||||
deleted_msg = f"标记了{len(deleted_torrent_tuple_list)}个失效种子\n"
|
||||
@@ -513,34 +518,34 @@ class CleanInvalidSeed(_PluginBase):
|
||||
logger.info(exclude_labels_msg)
|
||||
# 通知
|
||||
if self._notify:
|
||||
invalid_msg = invalid_msg.replace("_", "\_")
|
||||
invalid_msg = invalid_msg.replace("_", "\\_")
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【清理无效做种】",
|
||||
text=invalid_msg,
|
||||
)
|
||||
if self._notify_all:
|
||||
tracker_not_working_msg = tracker_not_working_msg.replace("_", "\_")
|
||||
tracker_not_working_msg = tracker_not_working_msg.replace("_", "\\_")
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【清理无效做种】",
|
||||
text=tracker_not_working_msg,
|
||||
)
|
||||
if self._label_only or self._delete_invalid_torrents:
|
||||
deleted_msg = deleted_msg.replace("_", "\_")
|
||||
deleted_msg = deleted_msg.replace("_", "\\_")
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【清理无效做种】",
|
||||
text=deleted_msg,
|
||||
)
|
||||
if self._notify_all:
|
||||
exclude_categories_msg = exclude_categories_msg.replace("_", "\_")
|
||||
exclude_categories_msg = exclude_categories_msg.replace("_", "\\_")
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【清理无效做种】",
|
||||
text=exclude_categories_msg,
|
||||
)
|
||||
exclude_labels_msg = exclude_labels_msg.replace("_", "\_")
|
||||
exclude_labels_msg = exclude_labels_msg.replace("_", "\\_")
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【清理无效做种】",
|
||||
@@ -559,7 +564,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
downloader_name = service.name
|
||||
downloader_obj = service.instance
|
||||
if not downloader_obj:
|
||||
logger.error(f"{self.LOG_TAG} 获取下载器失败 {downloader_name}")
|
||||
logger.error(f"获取下载器失败 {downloader_name}")
|
||||
continue
|
||||
all_torrents += self.get_all_torrents(service)
|
||||
|
||||
@@ -638,7 +643,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
message += f"***已删除无效源文件,释放{StringUtils.str_filesize(total_size)}空间!***\n"
|
||||
logger.info(message)
|
||||
if self._notify:
|
||||
message = message.replace("_", "\_")
|
||||
message = message.replace("_", "\\_")
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【清理无效做种】",
|
||||
@@ -646,7 +651,8 @@ class CleanInvalidSeed(_PluginBase):
|
||||
)
|
||||
logger.info("检测无效源文件任务结束")
|
||||
|
||||
def get_size(self, path: Path):
|
||||
@staticmethod
|
||||
def get_size(path: Path):
|
||||
total_size = 0
|
||||
if path.is_file():
|
||||
return path.stat().st_size
|
||||
@@ -801,7 +807,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
'model': 'downloaders',
|
||||
'label': '请选择下载器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -813,7 +819,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
"content": [
|
||||
{
|
||||
"component": "VCol",
|
||||
"props": { "cols": 12, "md": 6 },
|
||||
"props": {"cols": 12, "md": 6},
|
||||
"content": [
|
||||
{
|
||||
"component": "VTextField",
|
||||
@@ -826,7 +832,7 @@ class CleanInvalidSeed(_PluginBase):
|
||||
},
|
||||
{
|
||||
"component": "VCol",
|
||||
"props": { "cols": 12, "md": 6 },
|
||||
"props": {"cols": 12, "md": 6},
|
||||
"content": [
|
||||
{
|
||||
"component": "VTextField",
|
||||
|
||||
@@ -194,10 +194,6 @@ class CrossSeed(_PluginBase):
|
||||
# 私有属性
|
||||
_scheduler = None
|
||||
cross_helper = None
|
||||
sites = None
|
||||
siteoper = None
|
||||
torrent = None
|
||||
downloader_helper = None
|
||||
# 开关
|
||||
_enabled = False
|
||||
_cron = None
|
||||
@@ -233,10 +229,7 @@ class CrossSeed(_PluginBase):
|
||||
cached = 0
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.sites = SitesHelper()
|
||||
self.siteoper = SiteOper()
|
||||
self.torrent = TorrentHelper()
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
@@ -257,7 +250,7 @@ class CrossSeed(_PluginBase):
|
||||
self._success_caches = [] if self._clearcache else config.get("success_caches") or []
|
||||
|
||||
# 过滤掉已删除的站点
|
||||
inner_site_list = self.siteoper.list_order_by_pri()
|
||||
inner_site_list = SiteOper().list_order_by_pri()
|
||||
all_sites = [(site.id, site.name) for site in inner_site_list] + [
|
||||
(site.get("id"), site.get("name")) for site in self.__custom_sites()
|
||||
]
|
||||
@@ -363,7 +356,7 @@ class CrossSeed(_PluginBase):
|
||||
logger.warning("尚未配置下载器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.downloader_helper.get_services(name_filters=self._downloaders)
|
||||
services = DownloaderHelper().get_services(name_filters=self._downloaders)
|
||||
if not services:
|
||||
logger.warning("获取下载器实例失败,请检查配置")
|
||||
return None
|
||||
@@ -445,7 +438,7 @@ class CrossSeed(_PluginBase):
|
||||
|
||||
# 站点的可选项
|
||||
site_options = ([{"title": site.name, "value": site.id}
|
||||
for site in self.siteoper.list_order_by_pri()]
|
||||
for site in SiteOper().list_order_by_pri()]
|
||||
+ [{"title": site.get("name"), "value": site.get("id")}
|
||||
for site in customSites])
|
||||
# 测试版本,只支持青蛙
|
||||
@@ -557,7 +550,7 @@ class CrossSeed(_PluginBase):
|
||||
'model': 'downloaders',
|
||||
'label': '辅种下载器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -852,7 +845,7 @@ class CrossSeed(_PluginBase):
|
||||
if not torrent_info.site_name:
|
||||
# 尝试通过域名获取站点信息
|
||||
tracker_domain = StringUtils.get_url_domain(tracker)
|
||||
site_info = self.sites.get_indexer(tracker_domain)
|
||||
site_info = SitesHelper().get_indexer(tracker_domain)
|
||||
if site_info:
|
||||
torrent_info.site_name = site_info.get("name")
|
||||
|
||||
@@ -983,7 +976,7 @@ class CrossSeed(_PluginBase):
|
||||
chunk_size = 100
|
||||
for site_config in self._site_cs_infos:
|
||||
# 检查站点是否已经停用
|
||||
db_site = self.siteoper.get(site_config.id)
|
||||
db_site = SiteOper().get(site_config.id)
|
||||
if db_site and not db_site.is_active:
|
||||
logger.info(f"站点{site_config.name}已停用,跳过辅种")
|
||||
continue
|
||||
@@ -1045,7 +1038,8 @@ class CrossSeed(_PluginBase):
|
||||
|
||||
logger.info(f"下载器 {service.name} 辅种完成")
|
||||
|
||||
def __download(self, service: ServiceInfo, content: Union[bytes, str],
|
||||
@staticmethod
|
||||
def __download(service: ServiceInfo, content: Union[bytes, str],
|
||||
save_path: str) -> Optional[str]:
|
||||
"""
|
||||
添加下载任务
|
||||
@@ -1055,9 +1049,9 @@ class CrossSeed(_PluginBase):
|
||||
tag = StringUtils.generate_random_str(10)
|
||||
|
||||
state = service.instance.add_torrent(content=content,
|
||||
download_dir=save_path,
|
||||
is_paused=True,
|
||||
tag=["已整理", "辅种", tag])
|
||||
download_dir=save_path,
|
||||
is_paused=True,
|
||||
tag=["已整理", "辅种", tag])
|
||||
if not state:
|
||||
return None
|
||||
else:
|
||||
@@ -1070,9 +1064,9 @@ class CrossSeed(_PluginBase):
|
||||
elif service.type == "transmission":
|
||||
# 添加任务
|
||||
torrent = service.instance.add_torrent(content=content,
|
||||
download_dir=save_path,
|
||||
is_paused=True,
|
||||
labels=["已整理", "辅种"])
|
||||
download_dir=save_path,
|
||||
is_paused=True,
|
||||
labels=["已整理", "辅种"])
|
||||
if not torrent:
|
||||
return None
|
||||
else:
|
||||
@@ -1099,7 +1093,7 @@ class CrossSeed(_PluginBase):
|
||||
torrent_url = site_config.get_torrent_url(tor.torrent_id)
|
||||
|
||||
# 下载种子文件
|
||||
_, content, _, _, error_msg = self.torrent.download_torrent(
|
||||
_, content, _, _, error_msg = TorrentHelper().download_torrent(
|
||||
url=torrent_url,
|
||||
cookie=site_config.cookie,
|
||||
ua=site_config.ua or settings.USER_AGENT,
|
||||
|
||||
@@ -45,9 +45,6 @@ class DoubanRank(_PluginBase):
|
||||
# 退出事件
|
||||
_event = Event()
|
||||
# 私有属性
|
||||
downloadchain: DownloadChain = None
|
||||
subscribechain: SubscribeChain = None
|
||||
mediachain: MediaChain = None
|
||||
_scheduler = None
|
||||
_douban_address = {
|
||||
'movie-ustop': 'https://rsshub.app/douban/movie/ustop',
|
||||
@@ -70,9 +67,6 @@ class DoubanRank(_PluginBase):
|
||||
_proxy = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.downloadchain = DownloadChain()
|
||||
self.subscribechain = SubscribeChain()
|
||||
self.mediachain = MediaChain()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
@@ -574,9 +568,10 @@ class DoubanRank(_PluginBase):
|
||||
if douban_id:
|
||||
# 识别豆瓣信息
|
||||
if settings.RECOGNIZE_SOURCE == "themoviedb":
|
||||
tmdbinfo = self.mediachain.get_tmdbinfo_by_doubanid(doubanid=douban_id, mtype=meta.type)
|
||||
tmdbinfo = MediaChain().get_tmdbinfo_by_doubanid(doubanid=douban_id, mtype=meta.type)
|
||||
if not tmdbinfo:
|
||||
logger.warn(f'未能通过豆瓣ID {douban_id} 获取到TMDB信息,标题:{title},豆瓣ID:{douban_id}')
|
||||
logger.warn(
|
||||
f'未能通过豆瓣ID {douban_id} 获取到TMDB信息,标题:{title},豆瓣ID:{douban_id}')
|
||||
continue
|
||||
mediainfo = self.chain.recognize_media(meta=meta, tmdbid=tmdbinfo.get("id"))
|
||||
if not mediainfo:
|
||||
@@ -598,22 +593,23 @@ class DoubanRank(_PluginBase):
|
||||
logger.info(f'{mediainfo.title_year} 评分不符合要求')
|
||||
continue
|
||||
# 查询缺失的媒体信息
|
||||
exist_flag, _ = self.downloadchain.get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
exist_flag, _ = DownloadChain().get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
if exist_flag:
|
||||
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
|
||||
continue
|
||||
# 判断用户是否已经添加订阅
|
||||
if self.subscribechain.exists(mediainfo=mediainfo, meta=meta):
|
||||
subscribechain = SubscribeChain()
|
||||
if subscribechain.exists(mediainfo=mediainfo, meta=meta):
|
||||
logger.info(f'{mediainfo.title_year} 订阅已存在')
|
||||
continue
|
||||
# 添加订阅
|
||||
self.subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="豆瓣榜单")
|
||||
subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="豆瓣榜单")
|
||||
# 存储历史记录
|
||||
history.append({
|
||||
"title": title,
|
||||
|
||||
@@ -9,6 +9,7 @@ from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app import schemas
|
||||
from app.chain.media import MediaChain
|
||||
from app.db.subscribe_oper import SubscribeOper
|
||||
from app.db.user_oper import UserOper
|
||||
from app.schemas.types import MediaType, EventType, SystemConfigKey
|
||||
|
||||
@@ -50,12 +51,6 @@ class DoubanSync(_PluginBase):
|
||||
_interests_url: str = "https://www.douban.com/feed/people/%s/interests"
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
_cache_path: Optional[Path] = None
|
||||
rsshelper = None
|
||||
downloadchain = None
|
||||
searchchain = None
|
||||
subscribechain = None
|
||||
mediachain = None
|
||||
useroper = None
|
||||
|
||||
# 配置属性
|
||||
_enabled: bool = False
|
||||
@@ -69,12 +64,6 @@ class DoubanSync(_PluginBase):
|
||||
_search_download = False
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.rsshelper = RssHelper()
|
||||
self.downloadchain = DownloadChain()
|
||||
self.searchchain = SearchChain()
|
||||
self.subscribechain = SubscribeChain()
|
||||
self.mediachain = MediaChain()
|
||||
self.useroper = UserOper()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
@@ -337,7 +326,7 @@ class DoubanSync(_PluginBase):
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
@@ -546,12 +535,13 @@ class DoubanSync(_PluginBase):
|
||||
except Exception as e:
|
||||
logger.error("退出插件失败:%s" % str(e))
|
||||
|
||||
def __get_username_by_douban(self, user_id: str) -> Optional[str]:
|
||||
@staticmethod
|
||||
def __get_username_by_douban(user_id: str) -> Optional[str]:
|
||||
"""
|
||||
根据豆瓣ID获取用户名
|
||||
"""
|
||||
try:
|
||||
return self.useroper.get_name(douban_userid=user_id)
|
||||
return UserOper().get_name(douban_userid=user_id)
|
||||
except Exception as err:
|
||||
logger.warn(f'{err}, 需要 MoviePilot v2.2.6+ 版本')
|
||||
return None
|
||||
@@ -579,23 +569,28 @@ class DoubanSync(_PluginBase):
|
||||
logger.info(f"开始同步用户 {user_id} 的豆瓣想看数据 ...")
|
||||
url = self._interests_url % user_id
|
||||
if version == "v2":
|
||||
results = self.rsshelper.parse(url, headers={
|
||||
results = RssHelper().parse(url, headers={
|
||||
"User-Agent": settings.USER_AGENT
|
||||
})
|
||||
else:
|
||||
results = self.rsshelper.parse(url)
|
||||
results = RssHelper().parse(url)
|
||||
if not results:
|
||||
logger.warn(f"未获取到用户 {user_id} 豆瓣RSS数据:{url}")
|
||||
continue
|
||||
else:
|
||||
logger.info(f"获取到用户 {user_id} 豆瓣RSS数据:{len(results)}")
|
||||
# 解析数据
|
||||
mediachain = MediaChain()
|
||||
downloadchain = DownloadChain()
|
||||
subscribechain = SubscribeChain()
|
||||
searchchain = SearchChain()
|
||||
subscribeoper = SubscribeOper()
|
||||
for result in results:
|
||||
try:
|
||||
dtype = result.get("title", "")[:2]
|
||||
title = result.get("title", "")[2:]
|
||||
# 增加豆瓣昵称,数据来源自app.helper.rss.py
|
||||
nickname = result.get("nickname","")
|
||||
nickname = result.get("nickname", "")
|
||||
if nickname:
|
||||
nickname = f"[{nickname}]"
|
||||
if dtype not in ["想看"]:
|
||||
@@ -620,7 +615,7 @@ class DoubanSync(_PluginBase):
|
||||
douban_info = self.chain.douban_info(doubanid=douban_id)
|
||||
meta.type = MediaType.MOVIE if douban_info.get("type") == "movie" else MediaType.TV
|
||||
if settings.RECOGNIZE_SOURCE == "themoviedb":
|
||||
tmdbinfo = self.mediachain.get_tmdbinfo_by_doubanid(doubanid=douban_id, mtype=meta.type)
|
||||
tmdbinfo = mediachain.get_tmdbinfo_by_doubanid(doubanid=douban_id, mtype=meta.type)
|
||||
if not tmdbinfo:
|
||||
logger.warn(f'未能通过豆瓣ID {douban_id} 获取到TMDB信息,标题:{title},豆瓣ID:{douban_id}')
|
||||
continue
|
||||
@@ -634,7 +629,7 @@ class DoubanSync(_PluginBase):
|
||||
logger.warn(f'豆瓣ID {douban_id} 未识别到媒体信息')
|
||||
continue
|
||||
# 查询缺失的媒体信息
|
||||
exist_flag, no_exists = self.downloadchain.get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
exist_flag, no_exists = downloadchain.get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
if exist_flag:
|
||||
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
|
||||
action = "exist"
|
||||
@@ -643,9 +638,10 @@ class DoubanSync(_PluginBase):
|
||||
real_name = self.__get_username_by_douban(user_id)
|
||||
if self._search_download:
|
||||
# 先搜索资源
|
||||
logger.info(f'媒体库中不存在或不完整,开启搜索下载,开始搜索 {mediainfo.title_year} 的资源...')
|
||||
# 按订阅优先级规则组搜索过滤,站点为设置的订阅站点
|
||||
filter_results = self.searchchain.process(
|
||||
logger.info(
|
||||
f'媒体库中不存在或不完整,开启搜索下载,开始搜索 {mediainfo.title_year} 的资源...')
|
||||
# 按订阅优先级规则组搜索过滤,站点为设置的订阅站点
|
||||
filter_results = searchchain.process(
|
||||
mediainfo=mediainfo,
|
||||
no_exists=no_exists,
|
||||
sites=self.systemconfig.get(SystemConfigKey.RssSites),
|
||||
@@ -656,7 +652,7 @@ class DoubanSync(_PluginBase):
|
||||
action = "download"
|
||||
if mediainfo.type == MediaType.MOVIE:
|
||||
# 电影类型调用单次下载
|
||||
download_id = self.downloadchain.download_single(
|
||||
download_id = downloadchain.download_single(
|
||||
context=filter_results[0],
|
||||
username=real_name or f"豆瓣{nickname}想看"
|
||||
)
|
||||
@@ -666,7 +662,7 @@ class DoubanSync(_PluginBase):
|
||||
action = "subscribe"
|
||||
else:
|
||||
# 电视剧类型调用批量下载
|
||||
downloaded_list, no_exists = self.downloadchain.batch_download(
|
||||
downloaded_list, no_exists = downloadchain.batch_download(
|
||||
contexts=filter_results,
|
||||
no_exists=no_exists,
|
||||
username=real_name or f"豆瓣{nickname}想看"
|
||||
@@ -678,13 +674,13 @@ class DoubanSync(_PluginBase):
|
||||
|
||||
# 更新订阅信息
|
||||
logger.info(f'根据缺失剧集更新订阅信息 {mediainfo.title_year} ...')
|
||||
subscribe = self.subscribechain.subscribeoper.get(sub_id)
|
||||
subscribe = subscribeoper.get(sub_id)
|
||||
if subscribe:
|
||||
self.subscribechain.finish_subscribe_or_not(subscribe=subscribe,
|
||||
meta=meta,
|
||||
mediainfo=mediainfo,
|
||||
downloads=downloaded_list,
|
||||
lefts=no_exists)
|
||||
subscribechain.finish_subscribe_or_not(subscribe=subscribe,
|
||||
meta=meta,
|
||||
mediainfo=mediainfo,
|
||||
downloads=downloaded_list,
|
||||
lefts=no_exists)
|
||||
|
||||
else:
|
||||
logger.info(f'未找到符合条件资源,添加订阅 {mediainfo.title_year} ...')
|
||||
@@ -714,8 +710,9 @@ class DoubanSync(_PluginBase):
|
||||
# 缓存只清理一次
|
||||
self._clearflag = False
|
||||
|
||||
def add_subscribe(self, mediainfo, meta, nickname, real_name):
|
||||
return self.subscribechain.add(
|
||||
@staticmethod
|
||||
def add_subscribe(mediainfo, meta, nickname, real_name):
|
||||
return SubscribeChain().add(
|
||||
title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
|
||||
@@ -45,9 +45,6 @@ class DownloadSiteTag(_PluginBase):
|
||||
# 退出事件
|
||||
_event = threading.Event()
|
||||
# 私有属性
|
||||
downloadhistory_oper = None
|
||||
sites_helper = None
|
||||
downloader_helper = None
|
||||
_scheduler = None
|
||||
_enabled = False
|
||||
_onlyonce = False
|
||||
@@ -64,9 +61,6 @@ class DownloadSiteTag(_PluginBase):
|
||||
_downloaders = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.downloadhistory_oper = DownloadHistoryOper()
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
self.sites_helper = SitesHelper()
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
@@ -113,7 +107,7 @@ class DownloadSiteTag(_PluginBase):
|
||||
logger.warning("尚未配置下载器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.downloader_helper.get_services(name_filters=self._downloaders)
|
||||
services = DownloaderHelper().get_services(name_filters=self._downloaders)
|
||||
if not services:
|
||||
logger.warning("获取下载器实例失败,请检查配置")
|
||||
return None
|
||||
@@ -205,7 +199,7 @@ class DownloadSiteTag(_PluginBase):
|
||||
# 记录处理的种子, 供辅种(无下载历史)使用
|
||||
dispose_history = {}
|
||||
# 所有站点索引
|
||||
indexers = [indexer.get("name") for indexer in self.sites_helper.get_indexers()]
|
||||
indexers = [indexer.get("name") for indexer in SitesHelper().get_indexers()]
|
||||
# JackettIndexers索引器支持多个站点, 如果不存在历史记录, 则通过tracker会再次附加其他站点名称
|
||||
indexers.append("JackettIndexers")
|
||||
indexers = set(indexers)
|
||||
@@ -230,6 +224,8 @@ class DownloadSiteTag(_PluginBase):
|
||||
# 按添加时间进行排序, 时间靠前的按大小和名称加入处理历史, 判定为原始种子, 其他为辅种
|
||||
torrents = self._torrents_sort(torrents=torrents, dl_type=service.type)
|
||||
logger.info(f"{self.LOG_TAG}下载器 {downloader} 分析种子信息中 ...")
|
||||
downloadhis = DownloadHistoryOper()
|
||||
siteshelper = SitesHelper()
|
||||
for torrent in torrents:
|
||||
try:
|
||||
if self._event.is_set():
|
||||
@@ -246,7 +242,7 @@ class DownloadSiteTag(_PluginBase):
|
||||
torrent_tags = self._get_label(torrent=torrent, dl_type=service.type)
|
||||
torrent_cat = self._get_category(torrent=torrent, dl_type=service.type)
|
||||
# 提取种子hash对应的下载历史
|
||||
history: DownloadHistory = self.downloadhistory_oper.get_by_hash(_hash)
|
||||
history: DownloadHistory = downloadhis.get_by_hash(_hash)
|
||||
if not history:
|
||||
# 如果找到已处理种子的历史, 表明当前种子是辅种, 否则创建一个空DownloadHistory
|
||||
if _key and _key in dispose_history:
|
||||
@@ -273,7 +269,7 @@ class DownloadSiteTag(_PluginBase):
|
||||
break
|
||||
else:
|
||||
domain = StringUtils.get_url_domain(tracker)
|
||||
site_info = self.sites_helper.get_indexer(domain)
|
||||
site_info = siteshelper.get_indexer(domain)
|
||||
if site_info:
|
||||
history.torrent_site = site_info.get("name")
|
||||
break
|
||||
@@ -652,7 +648,7 @@ class DownloadSiteTag(_PluginBase):
|
||||
'model': 'downloaders',
|
||||
'label': '下载器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -856,4 +852,4 @@ class DownloadSiteTag(_PluginBase):
|
||||
self._event.clear()
|
||||
self._scheduler = None
|
||||
except Exception as e:
|
||||
print(str(e))
|
||||
print(str(e))
|
||||
|
||||
@@ -30,7 +30,6 @@ class HistoryToV2(_PluginBase):
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
historyoper = None
|
||||
_enabled = False
|
||||
_host = None
|
||||
_username = None
|
||||
|
||||
1211
plugins.v2/imdbsource/__init__.py
Normal file
1211
plugins.v2/imdbsource/__init__.py
Normal file
File diff suppressed because it is too large
Load Diff
785
plugins.v2/imdbsource/imdb_helper.py
Normal file
785
plugins.v2/imdbsource/imdb_helper.py
Normal file
@@ -0,0 +1,785 @@
|
||||
import re
|
||||
from typing import Optional, Any, Dict, List, Tuple
|
||||
from io import StringIO
|
||||
from collections import OrderedDict
|
||||
from dataclasses import dataclass
|
||||
|
||||
import graphene
|
||||
import requests
|
||||
from requests_html import HTMLSession
|
||||
import ijson
|
||||
import json
|
||||
import base64
|
||||
|
||||
from app.log import logger
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.string import StringUtils
|
||||
from app.utils.common import retry
|
||||
from app.schemas.types import MediaType
|
||||
from app.core.cache import cached
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SearchParams:
|
||||
title_types: Optional[Tuple[str, ...]] = None
|
||||
genres: Optional[Tuple[str, ...]] = None
|
||||
sort_by: str = 'POPULARITY'
|
||||
sort_order: str = 'ASC'
|
||||
rating_min: Optional[float] = None
|
||||
rating_max: Optional[float] = None
|
||||
countries: Optional[Tuple[str, ...]] = None
|
||||
languages: Optional[Tuple[str, ...]] = None
|
||||
release_date_end: Optional[str] = None
|
||||
release_date_start: Optional[str] = None
|
||||
award_constraint: Optional[Tuple[str, ...]] = None
|
||||
ranked: Optional[Tuple[str, ...]] = None
|
||||
interests: Optional[Tuple[str, ...]] = None
|
||||
|
||||
|
||||
class SearchState:
|
||||
def __init__(self, pageinfo: dict, total: int):
|
||||
self.pageinfo = pageinfo
|
||||
self.total = total
|
||||
|
||||
|
||||
class ImdbHelper:
|
||||
_query_by_id = """query queryWithVariables($id: ID!) {
|
||||
title(id: $id) {
|
||||
id
|
||||
type
|
||||
is_adult
|
||||
primary_title
|
||||
original_title
|
||||
start_year
|
||||
end_year
|
||||
runtime_minutes
|
||||
plot
|
||||
rating {
|
||||
aggregate_rating
|
||||
votes_count
|
||||
}
|
||||
genres
|
||||
posters {
|
||||
url
|
||||
width
|
||||
height
|
||||
}
|
||||
certificates {
|
||||
country {
|
||||
code
|
||||
name
|
||||
}
|
||||
rating
|
||||
}
|
||||
spoken_languages {
|
||||
code
|
||||
name
|
||||
}
|
||||
origin_countries {
|
||||
code
|
||||
name
|
||||
}
|
||||
critic_review {
|
||||
score
|
||||
review_count
|
||||
}
|
||||
directors: credits(first: 5, categories: ["director"]) {
|
||||
name {
|
||||
id
|
||||
display_name
|
||||
avatars {
|
||||
url
|
||||
width
|
||||
height
|
||||
}
|
||||
}
|
||||
}
|
||||
writers: credits(first: 5, categories: ["writer"]) {
|
||||
name {
|
||||
id
|
||||
display_name
|
||||
avatars {
|
||||
url
|
||||
width
|
||||
height
|
||||
}
|
||||
}
|
||||
}
|
||||
casts: credits(first: 5, categories: ["actor", "actress"]) {
|
||||
name {
|
||||
id
|
||||
display_name
|
||||
avatars {
|
||||
url
|
||||
width
|
||||
height
|
||||
}
|
||||
}
|
||||
characters
|
||||
}
|
||||
}
|
||||
}"""
|
||||
_endpoint = "https://graph.imdbapi.dev/v1"
|
||||
_search_endpoint = "https://v3.sg.media-imdb.com/suggestion/x/%s.json?includeVideos=0"
|
||||
_official_endpoint = "https://caching.graphql.imdb.com/"
|
||||
_hash_update_url = ("https://raw.githubusercontent.com/wumode/MoviePilot-Plugins/"
|
||||
"refs/heads/imdbsource_assets/plugins.v2/imdbsource/imdb_hash.json")
|
||||
_qid_map = {
|
||||
MediaType.TV: ["tvSeries", "tvMiniSeries", "tvShort", "tvEpisode"],
|
||||
MediaType.MOVIE: ["movie"]
|
||||
}
|
||||
|
||||
_imdb_headers = {
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome"
|
||||
"/84.0.4147.105 Safari/537.36",
|
||||
"Referer": "https://www.imdb.com/",
|
||||
}
|
||||
all_title_types = ["tvSeries", "tvMiniSeries", "movie", "tvMovie", "musicVideo", "tvShort", "short",
|
||||
"tvEpisode", "tvSpecial", "videoGame"]
|
||||
interest_id = {
|
||||
"Anime": "in0000027",
|
||||
"Superhero": "in0000008",
|
||||
"Sitcom": "in0000044",
|
||||
"Coming-of-Age": "in0000073",
|
||||
"Slasher Horror": "in0000115",
|
||||
"Raunchy Comedy": "in0000041",
|
||||
"Documentary": "in0000060"
|
||||
}
|
||||
|
||||
def __init__(self, proxies=None):
|
||||
self._proxies = proxies
|
||||
self._session = HTMLSession()
|
||||
self._req_utils = RequestUtils(headers=self._imdb_headers, session=self._session, timeout=10, proxies=proxies)
|
||||
self._imdb_req = RequestUtils(accept_type="application/json",
|
||||
content_type="application/json",
|
||||
headers=self._imdb_headers,
|
||||
timeout=10,
|
||||
proxies=proxies,
|
||||
session=requests.Session())
|
||||
self._imdb_api_hash = {"AdvancedTitleSearch": None, "TitleAkasPaginated": None}
|
||||
self._search_states = OrderedDict()
|
||||
self._max_states = 30
|
||||
|
||||
def imdbid(self, imdbid: str) -> Optional[Dict]:
|
||||
params = {"operationName": "queryWithVariables", "query": self._query_by_id, "variables": {"id": imdbid}}
|
||||
ret = RequestUtils(
|
||||
accept_type="application/json", content_type="application/json"
|
||||
).post_res(f"{self._endpoint}", json=params)
|
||||
if not ret:
|
||||
return None
|
||||
data = ret.json()
|
||||
if "errors" in data:
|
||||
logger.error(f"Imdb query ({imdbid}) errors {data.get('errors')}")
|
||||
logger.error(f"{params}")
|
||||
return None
|
||||
info = data.get("data").get("title", None)
|
||||
return info
|
||||
|
||||
@cached(maxsize=1000, ttl=3600)
|
||||
def __episodes_by_season(self, imdbid: str, build_id: str, season: str) -> Optional[Dict]:
|
||||
if not build_id or not season:
|
||||
return None
|
||||
prefix = "pageProps.contentData.section"
|
||||
url = (f"https://www.imdb.com/_next/data/{build_id}"
|
||||
f"/en-US/title/{imdbid}/episodes.json?season={season}&ref_=ttep&tconst={imdbid}")
|
||||
response = self._req_utils.get_res(url)
|
||||
if not response or response.status_code != 200:
|
||||
return
|
||||
json_content = response.text
|
||||
try:
|
||||
section = next(ijson.items(json_content, prefix))
|
||||
except StopIteration:
|
||||
logger.warn(f"No data found at prefix: {prefix}")
|
||||
return None
|
||||
except (ijson.JSONError, ValueError) as e:
|
||||
logger.warn(f"JSON parsing error: {e}")
|
||||
return None
|
||||
except TypeError as e:
|
||||
logger.warn(f"Invalid input type: {e}")
|
||||
return None
|
||||
return section
|
||||
|
||||
@cached(maxsize=1000, ttl=3600)
|
||||
def __episodes(self, imdbid: str) -> Optional[Dict]:
|
||||
prefix = "props.pageProps.contentData.section"
|
||||
url = f"https://www.imdb.com/title/{imdbid}/episodes/"
|
||||
|
||||
response = self._req_utils.get_res(url)
|
||||
if not response or response.status_code != 200:
|
||||
return
|
||||
script_content = response.html.xpath('//script[@id="__NEXT_DATA__"]/text()')
|
||||
if len(script_content) == 0:
|
||||
return None
|
||||
json_content = script_content[0]
|
||||
# 直接定位到目标路径提取 items
|
||||
try:
|
||||
section = next(ijson.items(json_content, prefix))
|
||||
except StopIteration:
|
||||
logger.warn(f"No data found at prefix: {prefix}")
|
||||
return None
|
||||
except (ijson.JSONError, ValueError) as e:
|
||||
logger.warn(f"JSON parsing error: {e}")
|
||||
return None
|
||||
except TypeError as e:
|
||||
logger.warn(f"Invalid input type: {e}")
|
||||
return None
|
||||
total_seasons = []
|
||||
for s in section.get("seasons"):
|
||||
if s.get("value") and s.get("value") not in total_seasons:
|
||||
total_seasons.append(s.get("value"))
|
||||
build_id = next(ijson.items(json_content, 'buildId'))
|
||||
current_season = section.get('currentSeason') or '1'
|
||||
total_seasons.remove(current_season)
|
||||
for season in total_seasons:
|
||||
section_next = self.__episodes_by_season(imdbid, build_id=build_id, season=season)
|
||||
if section_next:
|
||||
section["episodes"]["items"].extend(section_next.get("episodes", {}).get("items", []))
|
||||
section["episodes"]["total"] += section_next.get("episodes", {}).get("total", 0)
|
||||
return section
|
||||
|
||||
@retry(Exception, logger=logger)
|
||||
@cached(maxsize=32, ttl=1800)
|
||||
def __request(self, params: Dict, sha256) -> Optional[Dict]:
|
||||
params["extensions"] = {"persistedQuery": {"sha256Hash": sha256, "version": 1}}
|
||||
ret = self._imdb_req.post_res(f"{self._official_endpoint}", json=params, raise_exception=True)
|
||||
if not ret:
|
||||
return None
|
||||
data = ret.json()
|
||||
if "errors" in data:
|
||||
logger.error(f"Imdb query errors")
|
||||
return None
|
||||
return data.get("data")
|
||||
|
||||
@cached(maxsize=1, ttl=30 * 24 * 3600)
|
||||
def __get_hash(self) -> Optional[dict]:
|
||||
"""
|
||||
根据IMDb hash使用
|
||||
"""
|
||||
headers = {
|
||||
"Accept": "text/html",
|
||||
}
|
||||
res = RequestUtils(headers=headers).get_res(
|
||||
self._hash_update_url,
|
||||
proxies=self._proxies
|
||||
)
|
||||
if not res:
|
||||
logger.error("获取IMDb hash")
|
||||
return None
|
||||
return res.json()
|
||||
|
||||
def __update_hash(self):
|
||||
imdb_hash = self.__get_hash()
|
||||
if imdb_hash:
|
||||
self._imdb_api_hash["AdvancedTitleSearch"] = imdb_hash.get("AdvancedTitleSearch")
|
||||
self._imdb_api_hash["TitleAkasPaginated"] = imdb_hash.get("TitleAkasPaginated")
|
||||
|
||||
@staticmethod
|
||||
def __award_to_constraint(award: str) -> Optional[Dict]:
|
||||
pattern = r'^(ev\d+)(?:-(best\w+))?-(Winning|Nominated)$'
|
||||
match = re.match(pattern, award)
|
||||
constraint = {}
|
||||
if match:
|
||||
ev_id = match.group(1) # 第一部分:evXXXXXXXX
|
||||
best = match.group(2) # 第二部分:bestXX(可选)
|
||||
status = match.group(3) # 第三部分:Winning/Nominated
|
||||
constraint["eventId"] = ev_id
|
||||
if status == "Winning":
|
||||
constraint["winnerFilter"] = "WINNER_ONLY"
|
||||
if best:
|
||||
constraint["searchAwardCategoryId"] = best
|
||||
return constraint
|
||||
else:
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def __ranked_list_to_constraint(ranked: str) -> Optional[Dict]:
|
||||
"""
|
||||
"TOP_RATED_MOVIES-100": "IMDb Top 100",
|
||||
"TOP_RATED_MOVIES-250": "IMDb Top 250",
|
||||
"TOP_RATED_MOVIES-1000": "IMDb Top 1000",
|
||||
"LOWEST_RATED_MOVIES-100": "IMDb Bottom 100",
|
||||
"LOWEST_RATED_MOVIES-250": "IMDb Bottom 250",
|
||||
"LOWEST_RATED_MOVIES-1000": "IMDb Bottom 1000"
|
||||
"""
|
||||
pattern = r'^(TOP_RATED_MOVIES|LOWEST_RATED_MOVIES)-(\d+)$'
|
||||
match = re.match(pattern, ranked)
|
||||
if match:
|
||||
ranked_title_list_type = match.group(1)
|
||||
rank_range = int(match.group(2))
|
||||
constraint = {"rankRange": {"max": rank_range}, "rankedTitleListType": ranked_title_list_type}
|
||||
return constraint
|
||||
return None
|
||||
|
||||
def advanced_title_search(self,
|
||||
first_page: bool = True,
|
||||
title_types: Optional[Tuple[str, ...]] = None,
|
||||
genres: Optional[Tuple[str, ...]] = None,
|
||||
sort_by: str = 'POPULARITY',
|
||||
sort_order: str = 'ASC',
|
||||
rating_min: Optional[float] = None,
|
||||
rating_max: Optional[float] = None,
|
||||
countries: Optional[Tuple[str, ...]] = None,
|
||||
languages: Optional[Tuple[str, ...]] = None,
|
||||
release_date_end: Optional[str] = None,
|
||||
release_date_start: Optional[str] = None,
|
||||
award_constraint: Optional[Tuple[str, ...]] = None,
|
||||
ranked: Optional[Tuple[str, ...]] = None,
|
||||
interests: Optional[Tuple[str, ...]] = None):
|
||||
# 创建参数对象
|
||||
params = SearchParams(
|
||||
title_types=title_types,
|
||||
genres=genres,
|
||||
sort_by=sort_by,
|
||||
sort_order=sort_order,
|
||||
rating_min=rating_min,
|
||||
rating_max=rating_max,
|
||||
countries=countries,
|
||||
languages=languages,
|
||||
release_date_end=release_date_end,
|
||||
release_date_start=release_date_start,
|
||||
award_constraint=award_constraint,
|
||||
ranked=ranked,
|
||||
interests=interests
|
||||
)
|
||||
sha256 = 'be358d7b41add9fd174461f4c8c673dfee5e2a88744e2d5dc037362a96e2b4e4'
|
||||
self.__update_hash()
|
||||
if self._imdb_api_hash.get("AdvancedTitleSearch"):
|
||||
sha256 = self._imdb_api_hash["AdvancedTitleSearch"]
|
||||
# 获取或创建搜索状态
|
||||
last_cursor = None
|
||||
if not first_page and params in self._search_states:
|
||||
search_state: SearchState = self._search_states.pop(params) # 移除并获取
|
||||
self._search_states[params] = search_state
|
||||
# 不是第一页且已有状态 - 使用上次的结果
|
||||
if not search_state.pageinfo.get("hasNextPage"):
|
||||
return {'pageInfo': {'endCursor': None, 'hasNextPage': False, 'hasPreviousPage': True,
|
||||
'startCursor': None},
|
||||
'edges': [], 'total': search_state.total, 'genres': [], 'keywords': [],
|
||||
'titleTypes': [], 'jobCategories': []}
|
||||
if search_state.pageinfo.get('endCursor'):
|
||||
last_cursor = search_state.pageinfo.get('endCursor')
|
||||
# 这里实现基于上次结果的逻辑
|
||||
else:
|
||||
# 重新搜索
|
||||
first_page = True
|
||||
else:
|
||||
first_page = True
|
||||
result = self.__advanced_title_search(params, sha256, first_page, last_cursor)
|
||||
if result:
|
||||
page_info = result.get("pageInfo", {})
|
||||
total = result.get("total", 0)
|
||||
search_state = SearchState(page_info, total)
|
||||
self._search_states[params] = search_state
|
||||
if len(self._search_states) > self._max_states:
|
||||
self._search_states.popitem(last=False) # 移除最旧的条目
|
||||
return result
|
||||
|
||||
def __advanced_title_search(self,
|
||||
params: SearchParams,
|
||||
sha256: str,
|
||||
first_page: bool = True,
|
||||
last_cursor: Optional[str] = None,
|
||||
) -> Optional[Dict]:
|
||||
|
||||
variables = {"first": 50,
|
||||
"locale": "en-US",
|
||||
"sortBy": params.sort_by,
|
||||
"sortOrder": params.sort_order,
|
||||
}
|
||||
if params.title_types:
|
||||
title_type_ids = []
|
||||
for title_type in params.title_types:
|
||||
if title_type in self.all_title_types:
|
||||
title_type_ids.append(title_type)
|
||||
if len(title_type_ids):
|
||||
variables["titleTypeConstraint"] = {"anyTitleTypeIds": params.title_types,
|
||||
"excludeTitleTypeIds": []}
|
||||
if params.genres:
|
||||
variables["genreConstraint"] = {"allGenreIds": params.genres, "excludeGenreIds": []}
|
||||
if params.countries:
|
||||
variables["originCountryConstraint"] = {"allCountries": params.countries}
|
||||
if params.languages:
|
||||
variables["languageConstraint"] = {"anyPrimaryLanguages": params.languages}
|
||||
if params.rating_min or params.rating_max:
|
||||
rating_min = params.rating_min if params.rating_min else 1
|
||||
rating_min = max(rating_min, 1)
|
||||
rating_max = params.rating_max if params.rating_max else 10
|
||||
rating_max = min(rating_max, 10)
|
||||
variables["userRatingsConstraint"] = {"aggregateRatingRange": {"max": rating_max, "min": rating_min}}
|
||||
if params.release_date_start or params.release_date_end:
|
||||
release_dict = {}
|
||||
if params.release_date_start:
|
||||
release_dict["start"] = params.release_date_start
|
||||
if params.release_date_end:
|
||||
release_dict["end"] = params.release_date_end
|
||||
variables["releaseDateConstraint"] = {"releaseDateRange": release_dict}
|
||||
if params.award_constraint:
|
||||
constraints = []
|
||||
for award in params.award_constraint:
|
||||
c = self.__award_to_constraint(award)
|
||||
if c:
|
||||
constraints.append(c)
|
||||
variables["awardConstraint"] = {"allEventNominations": constraints}
|
||||
if params.ranked:
|
||||
constraints = []
|
||||
for r in params.ranked:
|
||||
c = self.__ranked_list_to_constraint(r)
|
||||
if c:
|
||||
constraints.append(c)
|
||||
variables["rankedTitleListConstraint"] = {"allRankedTitleLists": constraints,
|
||||
"excludeRankedTitleLists": []}
|
||||
if params.interests:
|
||||
constraints = []
|
||||
for interest in params.interests:
|
||||
in_id = self.interest_id.get(interest)
|
||||
if in_id:
|
||||
constraints.append(in_id)
|
||||
variables["interestConstraint"] = {"allInterestIds": constraints, "excludeInterestIds": []}
|
||||
if not first_page and last_cursor:
|
||||
variables["after"] = last_cursor
|
||||
|
||||
params = {"operationName": "AdvancedTitleSearch",
|
||||
"variables": variables}
|
||||
data = self.__request(params, sha256)
|
||||
if not data:
|
||||
return None
|
||||
return data.get("advancedTitleSearch")
|
||||
|
||||
def __known_as(self, imdbid: str,
|
||||
sha256='48d4f7bfa73230fb550147bd4704d8050080e65fe2ad576da6276cac2330e446') -> Optional[List]:
|
||||
"""
|
||||
获取电影和电视别名
|
||||
:param imdbid: IMBd id
|
||||
:return: 别名列表
|
||||
"""
|
||||
self.__update_hash()
|
||||
if self._imdb_api_hash.get("TitleAkasPaginated"):
|
||||
sha256 = self._imdb_api_hash["TitleAkasPaginated"]
|
||||
params = {"operationName": "TitleAkasPaginated",
|
||||
"variables": {"const": imdbid, "first": 50, "locale": "en-US", "originalTitleText": False}}
|
||||
data = self.__request(params=params, sha256=sha256)
|
||||
if not data:
|
||||
return None
|
||||
if not data.get("data", {}).get("title", {}).get("akas", {}).get("total"):
|
||||
return None
|
||||
akas = []
|
||||
for edge in data["data"]["title"]["akas"]["edges"]:
|
||||
title = edge.get("node", {}).get("displayableProperty", {}).get("value", {}).get("plainText")
|
||||
if not title:
|
||||
continue
|
||||
country = edge.get("node", {}).get("country", {})
|
||||
language = edge.get("node", {}).get("language", {})
|
||||
akas.append({"title": title, "country": country, "language": language})
|
||||
return akas
|
||||
|
||||
def __search_on_imdb(self, term, mtype, release_year=None):
|
||||
params = f"{term}"
|
||||
if release_year is not None:
|
||||
params += f" {release_year}"
|
||||
ret = RequestUtils(
|
||||
accept_type="application/json",
|
||||
).get_res(f"{self._search_endpoint % params}")
|
||||
if not ret:
|
||||
return None
|
||||
data = ret.json()
|
||||
if "d" not in data:
|
||||
return None
|
||||
result = [d for d in data["d"] if d.get("qid") in self._qid_map.get(mtype)]
|
||||
return result
|
||||
|
||||
def search_tvs(self, title: str, year: str = None) -> List[dict]:
|
||||
if not title:
|
||||
return []
|
||||
if year:
|
||||
tvs = self.__search_on_imdb(title, MediaType.TV, year) or []
|
||||
else:
|
||||
tvs = self.__search_on_imdb(title, MediaType.TV, ) or []
|
||||
ret_infos = []
|
||||
for tv in tvs:
|
||||
# if title in tv.get("l"):
|
||||
# if self.__compare_names(title, [tv.get("l")]):
|
||||
# tv['media_type'] = MediaType.TV
|
||||
ret_infos.append(tv)
|
||||
return ret_infos
|
||||
|
||||
def search_movies(self, title: str, year: str = None) -> List[dict]:
|
||||
if not title:
|
||||
return []
|
||||
if year:
|
||||
movies = self.__search_on_imdb(title, MediaType.MOVIE, year) or []
|
||||
else:
|
||||
movies = self.__search_on_imdb(title, MediaType.MOVIE) or []
|
||||
ret_infos = []
|
||||
for movie in movies:
|
||||
# if title in movie.get("l"):
|
||||
# if self.__compare_names(title, [movie.get("l")]):
|
||||
# movie['media_type'] = MediaType.MOVIE
|
||||
ret_infos.append(movie)
|
||||
return ret_infos
|
||||
|
||||
@staticmethod
|
||||
def __compare_names(file_name: str, tmdb_names: list) -> bool:
|
||||
"""
|
||||
比较文件名是否匹配,忽略大小写和特殊字符
|
||||
:param file_name: 识别的文件名或者种子名
|
||||
:param tmdb_names: TMDB返回的译名
|
||||
:return: True or False
|
||||
"""
|
||||
if not file_name or not tmdb_names:
|
||||
return False
|
||||
if not isinstance(tmdb_names, list):
|
||||
tmdb_names = [tmdb_names]
|
||||
file_name = StringUtils.clear(file_name).upper()
|
||||
for tmdb_name in tmdb_names:
|
||||
tmdb_name = StringUtils.clear(tmdb_name).strip().upper()
|
||||
if file_name == tmdb_name:
|
||||
return True
|
||||
return False
|
||||
|
||||
def __search_movie_by_name(self, name: str, year: str) -> Optional[dict]:
|
||||
"""
|
||||
根据名称查询电影IMDB匹配
|
||||
:param name: 识别的文件名或种子名
|
||||
:param year: 电影上映日期
|
||||
:return: 匹配的媒体信息
|
||||
"""
|
||||
movies = self.search_movies(name, year=year)
|
||||
if (movies is None) or (len(movies) == 0):
|
||||
logger.debug(f"{name} 未找到相关电影信息!")
|
||||
return {}
|
||||
movies = sorted(
|
||||
movies,
|
||||
key=lambda x: str(x.get("y") or '0000'),
|
||||
reverse=True
|
||||
)
|
||||
for movie in movies:
|
||||
movie_year = f"{movie.get('y')}"
|
||||
if year and movie_year != year:
|
||||
# 年份不匹配
|
||||
continue
|
||||
# 匹配标题、原标题
|
||||
movie_info = self.imdbid(movie.get("id"))
|
||||
if not movie_info:
|
||||
continue
|
||||
if self.__compare_names(name, [movie_info.get("primary_title")]):
|
||||
return movie_info
|
||||
if movie_info.get("original_title") and self.__compare_names(name, [movie_info.get("original_title")]):
|
||||
return movie_info
|
||||
akas = self.__known_as(movie.get("id"))
|
||||
if not akas:
|
||||
continue
|
||||
akas_names = [item.get("title") for item in akas]
|
||||
if self.__compare_names(name, akas_names):
|
||||
return movie_info
|
||||
return {}
|
||||
|
||||
def __search_tv_by_name(self, name: str, year: str) -> Optional[dict]:
|
||||
"""
|
||||
根据名称查询电视剧IMDB匹配
|
||||
:param name: 识别的文件名或者种子名
|
||||
:param year: 电视剧的首播年份
|
||||
:return: 匹配的媒体信息
|
||||
"""
|
||||
tvs = self.search_tvs(name, year=year)
|
||||
if (tvs is None) or (len(tvs) == 0):
|
||||
logger.debug(f"{name} 未找到相关电影信息!")
|
||||
return {}
|
||||
tvs = sorted(
|
||||
tvs,
|
||||
key=lambda x: str(x.get("y") or '0000'),
|
||||
reverse=True
|
||||
)
|
||||
for tv in tvs:
|
||||
tv_year = f"{tv.get('y')}"
|
||||
if year and tv_year != year:
|
||||
# 年份不匹配
|
||||
continue
|
||||
# 匹配标题、原标题
|
||||
tv_info = self.imdbid(tv.get("id"))
|
||||
if not tv_info:
|
||||
continue
|
||||
if self.__compare_names(name, [tv_info.get("primary_title")]):
|
||||
return tv_info
|
||||
if tv_info.get("original_title") and self.__compare_names(name, [tv_info.get("original_title")]):
|
||||
return tv_info
|
||||
akas = self.__known_as(tv.get("id"))
|
||||
if not akas:
|
||||
continue
|
||||
akas_names = [item.get("title") for item in akas]
|
||||
if self.__compare_names(name, akas_names):
|
||||
return tv_info
|
||||
return {}
|
||||
|
||||
def __search_tv_by_season(self, name: str, season_year: str, season_number: int) -> Optional[dict]:
|
||||
"""
|
||||
根据电视剧的名称和季的年份及序号匹配IMDB
|
||||
:param name: 识别的文件名或者种子名
|
||||
:param season_year: 季的年份
|
||||
:param season_number: 季序号
|
||||
:return: 匹配的媒体信息
|
||||
"""
|
||||
|
||||
def __season_match(_tv_info: dict, _season_year: str) -> bool:
|
||||
tv_extra_info = self.__episodes(_tv_info.get("id"))
|
||||
if not tv_extra_info:
|
||||
return False
|
||||
release_year = []
|
||||
for item in tv_extra_info["episodes"]["items"]:
|
||||
if item.get("season") == season_number:
|
||||
release_year.append(item.get("releaseDate").get("year") or item.get("releaseYear"))
|
||||
first_release_year = min(release_year) if release_year else tv_extra_info["currentYear"]
|
||||
if first_release_year == _season_year:
|
||||
_tv_info["seasons"] = tv_extra_info["seasons"]
|
||||
_tv_info["episodes"] = tv_extra_info["episodes"]
|
||||
return True
|
||||
|
||||
tvs = self.search_tvs(title=name)
|
||||
if (tvs is None) or (len(tvs) == 0):
|
||||
logger.debug("%s 未找到季%s相关信息!" % (name, season_number))
|
||||
return {}
|
||||
tvs = sorted(
|
||||
tvs,
|
||||
key=lambda x: str(x.get('y') or '0000'),
|
||||
reverse=True
|
||||
)
|
||||
for tv in tvs:
|
||||
tv_info = self.imdbid(tv.get("id"))
|
||||
if not tv_info:
|
||||
continue
|
||||
tv_year = f"{tv.get('y')}" if tv.get('y') else None
|
||||
if (self.__compare_names(name, [tv_info.get('primary_title')])
|
||||
or (tv_info.get('original_title') and self.__compare_names(name, [tv_info.get('original_title')]))) \
|
||||
and (tv_year == str(season_year)):
|
||||
return tv_info
|
||||
akas = self.__known_as(tv.get("id"))
|
||||
if not akas:
|
||||
continue
|
||||
akas_names = [item.get("title") for item in akas]
|
||||
if not self.__compare_names(name, akas_names):
|
||||
continue
|
||||
if __season_match(_tv_info=tv_info, _season_year=season_year):
|
||||
return tv_info
|
||||
|
||||
def get_info(self,
|
||||
mtype: MediaType,
|
||||
imdbid: str) -> dict:
|
||||
"""
|
||||
给定IMDB号,查询一条媒体信息
|
||||
:param mtype: 类型:电影、电视剧,为空时都查(此时用不上年份)
|
||||
:param imdbid: IMDB的ID
|
||||
"""
|
||||
# 查询TMDB详情
|
||||
if mtype == MediaType.MOVIE:
|
||||
imdb_info = self.imdbid(imdbid)
|
||||
if imdb_info:
|
||||
imdb_info['media_type'] = MediaType.MOVIE
|
||||
elif mtype == MediaType.TV:
|
||||
imdb_info = self.imdbid(imdbid)
|
||||
if imdb_info:
|
||||
imdb_info['media_type'] = MediaType.TV
|
||||
tv_extra_info = self.__episodes(imdbid)
|
||||
imdb_info["seasons"] = tv_extra_info["seasons"]
|
||||
imdb_info["episodes"] = tv_extra_info["episodes"]
|
||||
else:
|
||||
imdb_info = None
|
||||
logger.warn(f"IMDb id:{imdbid} 未查询到媒体信息")
|
||||
return imdb_info
|
||||
|
||||
def match_multi(self, name: str) -> Optional[dict]:
|
||||
"""
|
||||
根据名称同时查询电影和电视剧,没有类型也没有年份时使用
|
||||
:param name: 识别的文件名或种子名
|
||||
:return: 匹配的媒体信息
|
||||
"""
|
||||
|
||||
multis = self.search_tvs(name) + self.search_movies(name)
|
||||
ret_info = {}
|
||||
if len(multis) == 0:
|
||||
logger.debug(f"{name} 未找到相关媒体息!")
|
||||
return {}
|
||||
else:
|
||||
multis = sorted(
|
||||
multis,
|
||||
key=lambda x: ("1" if x.get("media_type") == MediaType.MOVIE else "0") + str(x.get('y') or '0000'),
|
||||
reverse=True
|
||||
)
|
||||
media_t = MediaType.UNKNOWN
|
||||
for multi in multis:
|
||||
media_info = self.imdbid(multi.get("id"))
|
||||
if not media_info:
|
||||
continue
|
||||
if multi.get("media_type") == MediaType.MOVIE:
|
||||
if self.__compare_names(name, media_info.get('primary_title')) \
|
||||
or self.__compare_names(name, multi.get('primary_title')):
|
||||
ret_info = media_info
|
||||
media_t = MediaType.MOVIE
|
||||
break
|
||||
elif multi.get("media_type") == MediaType.TV:
|
||||
if self.__compare_names(name, media_info.get('primary_title')) \
|
||||
or self.__compare_names(name, multi.get('primary_title')):
|
||||
ret_info = media_info
|
||||
media_t = MediaType.TV
|
||||
break
|
||||
if ret_info and not isinstance(ret_info.get("media_type"), MediaType):
|
||||
ret_info['media_type'] = media_t
|
||||
return ret_info
|
||||
|
||||
def match(self, name: str,
|
||||
mtype: MediaType,
|
||||
year: Optional[str] = None,
|
||||
season_year: Optional[str] = None,
|
||||
season_number: Optional[int] = None,
|
||||
group_seasons: Optional[List[dict]] = None) -> Optional[dict]:
|
||||
"""
|
||||
搜索imdb中的媒体信息,匹配返回一条尽可能正确的信息
|
||||
:param name: 检索的名称
|
||||
:param mtype: 类型:电影、电视剧
|
||||
:param year: 年份,如要是季集需要是首播年份(first_air_date)
|
||||
:param season_year: 当前季集年份
|
||||
:param season_number: 季集,整数
|
||||
:param group_seasons: 集数组信息
|
||||
:return: TMDB的INFO,同时会将mtype赋值到media_type中
|
||||
"""
|
||||
if not name:
|
||||
return None
|
||||
info = {}
|
||||
if mtype != MediaType.TV:
|
||||
year_range = [year]
|
||||
if year:
|
||||
year_range.append(str(int(year) + 1))
|
||||
year_range.append(str(int(year) - 1))
|
||||
for year in year_range:
|
||||
logger.debug(
|
||||
f"正在识别{mtype.value}:{name}, 年份={year} ...")
|
||||
info = self.__search_movie_by_name(name, year)
|
||||
if info:
|
||||
info['media_type'] = MediaType.MOVIE
|
||||
break
|
||||
else:
|
||||
# 有当前季和当前季集年份,使用精确匹配
|
||||
if season_year and season_number:
|
||||
logger.debug(
|
||||
f"正在识别{mtype.value}:{name}, 季集={season_number}, 季集年份={season_year} ...")
|
||||
info = self.__search_tv_by_season(name,
|
||||
season_year,
|
||||
season_number)
|
||||
if not info:
|
||||
year_range = [year]
|
||||
if year:
|
||||
year_range.append(str(int(year) + 1))
|
||||
year_range.append(str(int(year) - 1))
|
||||
for year in year_range:
|
||||
logger.debug(
|
||||
f"正在识别{mtype.value}:{name}, 年份={year} ...")
|
||||
info = self.__search_tv_by_name(name, year)
|
||||
if info:
|
||||
break
|
||||
if info:
|
||||
info['media_type'] = MediaType.TV
|
||||
if not info.get("seasons"):
|
||||
tv_extra_info = self.__episodes(info.get('id'))
|
||||
if tv_extra_info:
|
||||
info["seasons"] = tv_extra_info["seasons"]
|
||||
info["episodes"] = tv_extra_info["episodes"]
|
||||
return info
|
||||
3
plugins.v2/imdbsource/requirements.txt
Normal file
3
plugins.v2/imdbsource/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
graphene~=3.4.3
|
||||
ijson~=3.4.0
|
||||
requests-html~=0.10.0
|
||||
@@ -48,10 +48,6 @@ class IYUUAutoSeed(_PluginBase):
|
||||
# 私有属性
|
||||
_scheduler = None
|
||||
iyuu_helper = None
|
||||
downloader_helper = None
|
||||
sites_helper = None
|
||||
site_oper = None
|
||||
torrent_helper = None
|
||||
# 开关
|
||||
_enabled = False
|
||||
_cron = None
|
||||
@@ -72,6 +68,7 @@ class IYUUAutoSeed(_PluginBase):
|
||||
_addhosttotag = False
|
||||
_size = None
|
||||
_clearcache = False
|
||||
_auto_start = False
|
||||
# 退出事件
|
||||
_event = Event()
|
||||
# 种子链接xpaths
|
||||
@@ -99,10 +96,7 @@ class IYUUAutoSeed(_PluginBase):
|
||||
cached = 0
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.sites_helper = SitesHelper()
|
||||
self.site_oper = SiteOper()
|
||||
self.torrent_helper = TorrentHelper()
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
@@ -128,8 +122,8 @@ class IYUUAutoSeed(_PluginBase):
|
||||
self._success_caches = [] if self._clearcache else config.get("success_caches") or []
|
||||
|
||||
# 过滤掉已删除的站点
|
||||
all_sites = [site.id for site in self.site_oper.list_order_by_pri()] + [site.get("id") for site in
|
||||
self.__custom_sites()]
|
||||
all_sites = [site.id for site in SiteOper().list_order_by_pri()] + [site.get("id") for site in
|
||||
self.__custom_sites()]
|
||||
self._sites = [site_id for site_id in all_sites if site_id in self._sites]
|
||||
self.__update_config()
|
||||
|
||||
@@ -171,7 +165,7 @@ class IYUUAutoSeed(_PluginBase):
|
||||
logger.warning("尚未配置下载器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.downloader_helper.get_services(name_filters=self._downloaders)
|
||||
services = DownloaderHelper().get_services(name_filters=self._downloaders)
|
||||
if not services:
|
||||
logger.warning("获取下载器实例失败,请检查配置")
|
||||
return None
|
||||
@@ -198,7 +192,7 @@ class IYUUAutoSeed(_PluginBase):
|
||||
logger.debug("尚未配置主辅分离下载器,辅种不分离")
|
||||
return None
|
||||
|
||||
service = self.downloader_helper.get_service(name=self._auto_downloader)
|
||||
service = DownloaderHelper().get_service(name=self._auto_downloader)
|
||||
if not service:
|
||||
logger.warning("获取主辅分离下载器实例失败,请检查配置")
|
||||
return None
|
||||
@@ -248,7 +242,7 @@ class IYUUAutoSeed(_PluginBase):
|
||||
|
||||
# 站点的可选项
|
||||
site_options = ([{"title": site.name, "value": site.id}
|
||||
for site in self.site_oper.list_order_by_pri()]
|
||||
for site in SiteOper().list_order_by_pri()]
|
||||
+ [{"title": site.get("name"), "value": site.get("id")}
|
||||
for site in customSites])
|
||||
return [
|
||||
@@ -381,7 +375,7 @@ class IYUUAutoSeed(_PluginBase):
|
||||
'model': 'downloaders',
|
||||
'label': '下载器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -401,7 +395,7 @@ class IYUUAutoSeed(_PluginBase):
|
||||
'model': 'auto_downloader',
|
||||
'label': '主辅分离',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -1048,7 +1042,8 @@ class IYUUAutoSeed(_PluginBase):
|
||||
# 查询站点
|
||||
site_domain = StringUtils.get_url_domain(site_url)
|
||||
# 站点信息
|
||||
site_info = self.sites_helper.get_indexer(site_domain)
|
||||
sites_helper = SitesHelper()
|
||||
site_info = sites_helper.get_indexer(site_domain)
|
||||
if not site_info or not site_info.get('url'):
|
||||
logger.debug(f"没有维护种子对应的站点:{site_url}")
|
||||
return False
|
||||
@@ -1064,7 +1059,7 @@ class IYUUAutoSeed(_PluginBase):
|
||||
self.exist += 1
|
||||
return False
|
||||
# 站点流控
|
||||
check, checkmsg = self.sites_helper.check(site_domain)
|
||||
check, checkmsg = sites_helper.check(site_domain)
|
||||
if check:
|
||||
logger.warn(checkmsg)
|
||||
self.fail += 1
|
||||
@@ -1086,7 +1081,7 @@ class IYUUAutoSeed(_PluginBase):
|
||||
else:
|
||||
torrent_url += "?https=1"
|
||||
# 下载种子文件
|
||||
_, content, _, _, error_msg = self.torrent_helper.download_torrent(
|
||||
_, content, _, _, error_msg = TorrentHelper().download_torrent(
|
||||
url=torrent_url,
|
||||
cookie=site_info.get("cookie"),
|
||||
ua=site_info.get("ua") or settings.USER_AGENT,
|
||||
|
||||
@@ -40,8 +40,6 @@ class LibraryScraper(_PluginBase):
|
||||
user_level = 1
|
||||
|
||||
# 私有属性
|
||||
transferhis = None
|
||||
mediachain = None
|
||||
_scheduler = None
|
||||
_scraper = None
|
||||
# 限速开关
|
||||
@@ -55,7 +53,7 @@ class LibraryScraper(_PluginBase):
|
||||
_event = Event()
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.mediachain = MediaChain()
|
||||
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
@@ -70,7 +68,6 @@ class LibraryScraper(_PluginBase):
|
||||
|
||||
# 启动定时任务 & 立即运行一次
|
||||
if self._enabled or self._onlyonce:
|
||||
self.transferhis = TransferHistoryOper()
|
||||
|
||||
if self._onlyonce:
|
||||
logger.info(f"媒体库刮削服务,立即运行一次")
|
||||
@@ -401,14 +398,14 @@ class LibraryScraper(_PluginBase):
|
||||
|
||||
# 如果未开启新增已入库媒体是否跟随TMDB信息变化则根据tmdbid查询之前的title
|
||||
if not settings.SCRAP_FOLLOW_TMDB:
|
||||
transfer_history = self.transferhis.get_by_type_tmdbid(tmdbid=mediainfo.tmdb_id,
|
||||
mtype=mediainfo.type.value)
|
||||
transfer_history = TransferHistoryOper().get_by_type_tmdbid(tmdbid=mediainfo.tmdb_id,
|
||||
mtype=mediainfo.type.value)
|
||||
if transfer_history:
|
||||
mediainfo.title = transfer_history.title
|
||||
# 获取图片
|
||||
self.chain.obtain_images(mediainfo)
|
||||
# 刮削
|
||||
self.mediachain.scrape_metadata(
|
||||
MediaChain().scrape_metadata(
|
||||
fileitem=schemas.FileItem(
|
||||
storage="local",
|
||||
type="dir",
|
||||
|
||||
@@ -31,7 +31,6 @@ class MediaServerMsg(_PluginBase):
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
mediaserver_helper = None
|
||||
_enabled = False
|
||||
_add_play_link = False
|
||||
_mediaservers = None
|
||||
@@ -59,7 +58,7 @@ class MediaServerMsg(_PluginBase):
|
||||
}
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.mediaserver_helper = MediaServerHelper()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._types = config.get("types") or []
|
||||
@@ -74,7 +73,7 @@ class MediaServerMsg(_PluginBase):
|
||||
logger.warning("尚未配置媒体服务器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.mediaserver_helper.get_services(type_filter=type_filter, name_filters=self._mediaservers)
|
||||
services = MediaServerHelper().get_services(type_filter=type_filter, name_filters=self._mediaservers)
|
||||
if not services:
|
||||
logger.warning("获取媒体服务器实例失败,请检查配置")
|
||||
return None
|
||||
@@ -181,7 +180,7 @@ class MediaServerMsg(_PluginBase):
|
||||
'model': 'mediaservers',
|
||||
'label': '媒体服务器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.mediaserver_helper.get_configs().values()]
|
||||
for config in MediaServerHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -342,7 +341,7 @@ class MediaServerMsg(_PluginBase):
|
||||
if service:
|
||||
play_link = service.instance.get_play_url(event_info.item_id)
|
||||
elif event_info.channel:
|
||||
services = self.mediaserver_helper.get_services(type_filter=event_info.channel)
|
||||
services = MediaServerHelper().get_services(type_filter=event_info.channel)
|
||||
for service in services.values():
|
||||
play_link = service.instance.get_play_url(event_info.item_id)
|
||||
if play_link:
|
||||
|
||||
@@ -32,13 +32,12 @@ class MediaServerRefresh(_PluginBase):
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
mediaserver_helper = None
|
||||
_enabled = False
|
||||
_delay = 0
|
||||
_mediaservers = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.mediaserver_helper = MediaServerHelper()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._delay = config.get("delay") or 0
|
||||
@@ -53,7 +52,7 @@ class MediaServerRefresh(_PluginBase):
|
||||
logger.warning("尚未配置媒体服务器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.mediaserver_helper.get_services(name_filters=self._mediaservers)
|
||||
services = MediaServerHelper().get_services(name_filters=self._mediaservers)
|
||||
if not services:
|
||||
logger.warning("获取媒体服务器实例失败,请检查配置")
|
||||
return None
|
||||
@@ -128,7 +127,7 @@ class MediaServerRefresh(_PluginBase):
|
||||
'model': 'mediaservers',
|
||||
'label': '媒体服务器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.mediaserver_helper.get_configs().values()]
|
||||
for config in MediaServerHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import datetime
|
||||
import re
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
@@ -7,12 +8,11 @@ from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.chain.system import SystemChain
|
||||
from app.core.config import settings
|
||||
from app.plugins import _PluginBase
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
from app.helper.system import SystemHelper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import NotificationType
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.system import SystemUtils
|
||||
|
||||
|
||||
class MoviePilotUpdateNotify(_PluginBase):
|
||||
@@ -23,7 +23,7 @@ class MoviePilotUpdateNotify(_PluginBase):
|
||||
# 插件图标
|
||||
plugin_icon = "Moviepilot_A.png"
|
||||
# 插件版本
|
||||
plugin_version = "2.1"
|
||||
plugin_version = "2.2"
|
||||
# 插件作者
|
||||
plugin_author = "thsrite"
|
||||
# 作者主页
|
||||
@@ -70,7 +70,7 @@ class MoviePilotUpdateNotify(_PluginBase):
|
||||
# 自动重启
|
||||
if (server_update or front_update) and self._restart:
|
||||
logger.info("开始执行自动重启…")
|
||||
SystemUtils.restart()
|
||||
SystemHelper.restart()
|
||||
|
||||
def __check_server_update(self):
|
||||
"""
|
||||
@@ -165,7 +165,7 @@ class MoviePilotUpdateNotify(_PluginBase):
|
||||
logger.error("无法获取版本信息,请检查网络连接或GitHub API请求。")
|
||||
return None
|
||||
|
||||
def __get_backend_latest(self) -> Tuple[str, str, str]:
|
||||
def __get_backend_latest(self) -> Tuple[Optional[str], Optional[str], Optional[str]]:
|
||||
"""
|
||||
获取最新版本
|
||||
"""
|
||||
|
||||
@@ -55,9 +55,6 @@ class PersonMeta(_PluginBase):
|
||||
|
||||
# 私有属性
|
||||
_scheduler = None
|
||||
tmdbchain = None
|
||||
mschain = None
|
||||
mediaserver_helper = None
|
||||
_enabled = False
|
||||
_onlyonce = False
|
||||
_cron = None
|
||||
@@ -67,9 +64,7 @@ class PersonMeta(_PluginBase):
|
||||
_mediaservers = []
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.tmdbchain = TmdbChain()
|
||||
self.mschain = MediaServerChain()
|
||||
self.mediaserver_helper = MediaServerHelper()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
@@ -266,7 +261,7 @@ class PersonMeta(_PluginBase):
|
||||
'model': 'mediaservers',
|
||||
'label': '媒体服务器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.mediaserver_helper.get_configs().values()]
|
||||
for config in MediaServerHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -316,7 +311,7 @@ class PersonMeta(_PluginBase):
|
||||
logger.warning("尚未配置媒体服务器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.mediaserver_helper.get_services(type_filter=type_filter, name_filters=self._mediaservers)
|
||||
services = MediaServerHelper().get_services(type_filter=type_filter, name_filters=self._mediaservers)
|
||||
if not services:
|
||||
logger.warning("获取媒体服务器实例失败,请检查配置")
|
||||
return None
|
||||
@@ -355,7 +350,7 @@ class PersonMeta(_PluginBase):
|
||||
logger.warn(f"{mediainfo.title_year} 在媒体库中不存在")
|
||||
return
|
||||
# 查询条目详情
|
||||
iteminfo = self.mschain.iteminfo(server=existsinfo.server, item_id=existsinfo.itemid)
|
||||
iteminfo = MediaServerChain().iteminfo(server=existsinfo.server, item_id=existsinfo.itemid)
|
||||
if not iteminfo:
|
||||
logger.warn(f"{mediainfo.title_year} 条目详情获取失败")
|
||||
return
|
||||
@@ -371,12 +366,13 @@ class PersonMeta(_PluginBase):
|
||||
service_infos = self.service_infos()
|
||||
if not service_infos:
|
||||
return
|
||||
mediaserverchain = MediaServerChain()
|
||||
for server, service in service_infos.items():
|
||||
# 扫描所有媒体库
|
||||
logger.info(f"开始刮削服务器 {server} 的演员信息 ...")
|
||||
for library in self.mschain.librarys(server):
|
||||
for library in mediaserverchain.librarys(server):
|
||||
logger.info(f"开始刮削媒体库 {library.name} 的演员信息 ...")
|
||||
for item in self.mschain.items(server, library.id):
|
||||
for item in mediaserverchain.items(server, library.id):
|
||||
if not item:
|
||||
continue
|
||||
if not item.item_id:
|
||||
@@ -577,7 +573,7 @@ class PersonMeta(_PluginBase):
|
||||
# 从TMDB信息中更新人物信息
|
||||
person_tmdbid, person_imdbid = __get_peopleid(personinfo)
|
||||
if person_tmdbid:
|
||||
person_detail = self.tmdbchain.person_detail(int(person_tmdbid))
|
||||
person_detail = TmdbChain().person_detail(int(person_tmdbid))
|
||||
if person_detail:
|
||||
cn_name = self.__get_chinese_name(person_detail)
|
||||
# 图片优先从TMDB获取
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import random
|
||||
import time
|
||||
import shutil
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Dict, Tuple
|
||||
|
||||
@@ -12,7 +12,6 @@ from app.core.event import eventmanager, Event
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas import TransferInfo
|
||||
from app.schemas.file import FileItem
|
||||
from app.schemas.types import EventType, MediaType, NotificationType
|
||||
from app.utils.system import SystemUtils
|
||||
|
||||
@@ -320,9 +319,7 @@ class PlayletCategory(_PluginBase):
|
||||
try:
|
||||
# 相对路径
|
||||
relative_path = file.relative_to(target_path)
|
||||
logger.debug(f"relative_path:{to_path}")
|
||||
to_path = new_path / relative_path
|
||||
logger.debug(f"to_path:{to_path}")
|
||||
shutil.move(file, to_path)
|
||||
except Exception as e:
|
||||
logger.error(f"移动文件失败:{e}")
|
||||
|
||||
@@ -41,8 +41,6 @@ class QbCommand(_PluginBase):
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
_sites = None
|
||||
_siteoper = None
|
||||
_qb = None
|
||||
_enabled: bool = False
|
||||
_notify: bool = False
|
||||
@@ -62,10 +60,10 @@ class QbCommand(_PluginBase):
|
||||
_multi_level_root_domain = ["edu.cn", "com.cn", "net.cn", "org.cn"]
|
||||
_scheduler = None
|
||||
_exclude_dirs = ""
|
||||
_downloaders = []
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self._sites = SitesHelper()
|
||||
self._siteoper = SiteOper()
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
# 读取配置
|
||||
@@ -87,7 +85,7 @@ class QbCommand(_PluginBase):
|
||||
self._op_site_ids = config.get("op_site_ids") or []
|
||||
self._downloaders = config.get("downloaders")
|
||||
# 查询所有站点
|
||||
all_sites = [site for site in self._sites.get_indexers() if not site.get("public")] + self.__custom_sites()
|
||||
all_sites = [site for site in SitesHelper().get_indexers() if not site.get("public")] + self.__custom_sites()
|
||||
# 过滤掉没有选中的站点
|
||||
self._op_sites = [site for site in all_sites if site.get("id") in self._op_site_ids]
|
||||
self._exclude_dirs = config.get("exclude_dirs") or ""
|
||||
@@ -101,8 +99,7 @@ class QbCommand(_PluginBase):
|
||||
self._scheduler.add_job(
|
||||
self.pause_torrent,
|
||||
"date",
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ))
|
||||
+ timedelta(seconds=3),
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
)
|
||||
elif self._only_resume_once:
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
@@ -110,8 +107,7 @@ class QbCommand(_PluginBase):
|
||||
self._scheduler.add_job(
|
||||
self.resume_torrent,
|
||||
"date",
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ))
|
||||
+ timedelta(seconds=3),
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
)
|
||||
|
||||
self._only_resume_once = False
|
||||
@@ -136,9 +132,9 @@ class QbCommand(_PluginBase):
|
||||
self._scheduler.start()
|
||||
|
||||
if (
|
||||
self._only_pause_upload
|
||||
or self._only_pause_download
|
||||
or self._only_pause_checking
|
||||
self._only_pause_upload
|
||||
or self._only_pause_download
|
||||
or self._only_pause_checking
|
||||
):
|
||||
if self._only_pause_upload:
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
@@ -146,8 +142,7 @@ class QbCommand(_PluginBase):
|
||||
self._scheduler.add_job(
|
||||
self.pause_torrent,
|
||||
"date",
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ))
|
||||
+ timedelta(seconds=3),
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
kwargs={
|
||||
'type': self.TorrentType.UPLOADING
|
||||
}
|
||||
@@ -158,8 +153,7 @@ class QbCommand(_PluginBase):
|
||||
self._scheduler.add_job(
|
||||
self.pause_torrent,
|
||||
"date",
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ))
|
||||
+ timedelta(seconds=3),
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
kwargs={
|
||||
'type': self.TorrentType.DOWNLOADING
|
||||
}
|
||||
@@ -170,8 +164,7 @@ class QbCommand(_PluginBase):
|
||||
self._scheduler.add_job(
|
||||
self.pause_torrent,
|
||||
"date",
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ))
|
||||
+ timedelta(seconds=3),
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
kwargs={
|
||||
'type': self.TorrentType.CHECKING
|
||||
}
|
||||
@@ -201,7 +194,7 @@ class QbCommand(_PluginBase):
|
||||
self.set_limit(self._upload_limit, self._download_limit)
|
||||
|
||||
@property
|
||||
def service_info(self) -> Optional[ServiceInfo]:
|
||||
def service_info(self) -> Optional[Dict[str, ServiceInfo]]:
|
||||
"""
|
||||
服务信息
|
||||
"""
|
||||
@@ -209,7 +202,7 @@ class QbCommand(_PluginBase):
|
||||
logger.warning("尚未配置下载器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.downloader_helper.get_services(name_filters=self._downloaders)
|
||||
services = DownloaderHelper().get_services(name_filters=self._downloaders)
|
||||
|
||||
if not services:
|
||||
logger.warning("获取下载器实例失败,请检查配置")
|
||||
@@ -230,15 +223,17 @@ class QbCommand(_PluginBase):
|
||||
|
||||
return active_services
|
||||
|
||||
def check_is_qb(self, service_info) -> bool:
|
||||
@staticmethod
|
||||
def check_is_qb(service_info) -> bool:
|
||||
"""
|
||||
检查下载器类型是否为 qbittorrent 或 transmission
|
||||
"""
|
||||
if self.downloader_helper.is_downloader(service_type="qbittorrent", service=service_info):
|
||||
if DownloaderHelper().is_downloader(service_type="qbittorrent", service=service_info):
|
||||
return True
|
||||
elif self.downloader_helper.is_downloader(service_type="transmission", service=service_info):
|
||||
elif DownloaderHelper().is_downloader(service_type="transmission", service=service_info):
|
||||
return False
|
||||
return False
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled
|
||||
|
||||
@@ -409,9 +404,9 @@ class QbCommand(_PluginBase):
|
||||
if torrent.state_enum.is_uploading and not torrent.state_enum.is_paused:
|
||||
uploading_torrents.append(torrent.get("hash"))
|
||||
elif (
|
||||
torrent.state_enum.is_downloading
|
||||
and not torrent.state_enum.is_paused
|
||||
and not torrent.state_enum.is_checking
|
||||
torrent.state_enum.is_downloading
|
||||
and not torrent.state_enum.is_paused
|
||||
and not torrent.state_enum.is_checking
|
||||
):
|
||||
downloading_torrents.append(torrent.get("hash"))
|
||||
elif torrent.state_enum.is_checking:
|
||||
@@ -476,7 +471,7 @@ class QbCommand(_PluginBase):
|
||||
downloader_name = service.name
|
||||
downloader_obj = service.instance
|
||||
if not downloader_obj:
|
||||
logger.error(f"{self.LOG_TAG} 获取下载器失败 {downloader_name}")
|
||||
logger.error(f"获取下载器失败 {downloader_name}")
|
||||
continue
|
||||
all_torrents = self.get_all_torrents(service)
|
||||
hash_downloading, hash_uploading, hash_paused, hash_checking, hash_error = (
|
||||
@@ -498,12 +493,12 @@ class QbCommand(_PluginBase):
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【下载器{downloader_name}暂停任务启动】",
|
||||
text=f"种子总数: {len(all_torrents)} \n"
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n"
|
||||
f"暂停操作中请稍等...\n",
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n"
|
||||
f"暂停操作中请稍等...\n",
|
||||
)
|
||||
pause_torrents = self.filter_pause_torrents(all_torrents)
|
||||
hash_downloading, hash_uploading, hash_paused, hash_checking, hash_error = (
|
||||
@@ -551,11 +546,11 @@ class QbCommand(_PluginBase):
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【下载器{downloader_name}暂停任务完成】",
|
||||
text=f"种子总数: {len(all_torrents)} \n"
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n",
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n",
|
||||
)
|
||||
|
||||
def __is_excluded(self, file_path) -> bool:
|
||||
@@ -566,6 +561,7 @@ class QbCommand(_PluginBase):
|
||||
if exclude_dir and exclude_dir in str(file_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def filter_pause_torrents(self, all_torrents):
|
||||
torrents = []
|
||||
for torrent in all_torrents:
|
||||
@@ -592,7 +588,7 @@ class QbCommand(_PluginBase):
|
||||
downloader_name = service.name
|
||||
downloader_obj = service.instance
|
||||
if not downloader_obj:
|
||||
logger.error(f"{self.LOG_TAG} 获取下载器失败 {downloader_name}")
|
||||
logger.error(f"获取下载器失败 {downloader_name}")
|
||||
continue
|
||||
all_torrents = self.get_all_torrents(service)
|
||||
hash_downloading, hash_uploading, hash_paused, hash_checking, hash_error = (
|
||||
@@ -613,12 +609,12 @@ class QbCommand(_PluginBase):
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【下载器{downloader_name}开始任务启动】",
|
||||
text=f"种子总数: {len(all_torrents)} \n"
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n"
|
||||
f"开始操作中请稍等...\n",
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n"
|
||||
f"开始操作中请稍等...\n",
|
||||
)
|
||||
|
||||
resume_torrents = self.filter_resume_torrents(all_torrents)
|
||||
@@ -655,11 +651,11 @@ class QbCommand(_PluginBase):
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【下载器{downloader_name}开始任务完成】",
|
||||
text=f"种子总数: {len(all_torrents)} \n"
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n",
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n",
|
||||
)
|
||||
|
||||
def filter_resume_torrents(self, all_torrents):
|
||||
@@ -714,7 +710,7 @@ class QbCommand(_PluginBase):
|
||||
downloader_name = service.name
|
||||
downloader_obj = service.instance
|
||||
if not downloader_obj:
|
||||
logger.error(f"{self.LOG_TAG} 获取下载器失败 {downloader_name}")
|
||||
logger.error(f"获取下载器失败 {downloader_name}")
|
||||
continue
|
||||
all_torrents = self.get_all_torrents(service)
|
||||
hash_downloading, hash_uploading, hash_paused, hash_checking, hash_error = (
|
||||
@@ -734,11 +730,11 @@ class QbCommand(_PluginBase):
|
||||
mtype=NotificationType.SiteMessage,
|
||||
title=f"【下载器{downloader_name}任务状态】",
|
||||
text=f"种子总数: {len(all_torrents)} \n"
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n"
|
||||
f"做种数量: {len(hash_uploading)}\n"
|
||||
f"下载数量: {len(hash_downloading)}\n"
|
||||
f"检查数量: {len(hash_checking)}\n"
|
||||
f"暂停数量: {len(hash_paused)}\n"
|
||||
f"错误数量: {len(hash_error)}\n"
|
||||
)
|
||||
|
||||
@eventmanager.register(EventType.PluginAction)
|
||||
@@ -766,10 +762,10 @@ class QbCommand(_PluginBase):
|
||||
return True
|
||||
|
||||
if (
|
||||
not upload_limit
|
||||
or not upload_limit.isdigit()
|
||||
or not download_limit
|
||||
or not download_limit.isdigit()
|
||||
not upload_limit
|
||||
or not upload_limit.isdigit()
|
||||
or not download_limit
|
||||
or not download_limit.isdigit()
|
||||
):
|
||||
self.post_message(
|
||||
mtype=NotificationType.SiteMessage,
|
||||
@@ -783,7 +779,7 @@ class QbCommand(_PluginBase):
|
||||
downloader_name = service.name
|
||||
downloader_obj = service.instance
|
||||
if not downloader_obj:
|
||||
logger.error(f"{self.LOG_TAG} 获取下载器失败 {downloader_name}")
|
||||
logger.error(f"获取下载器失败 {downloader_name}")
|
||||
continue
|
||||
flag = flag and downloader_obj.set_speed_limit(
|
||||
download_limit=int(download_limit), upload_limit=int(upload_limit)
|
||||
@@ -806,7 +802,7 @@ class QbCommand(_PluginBase):
|
||||
downloader_name = service.name
|
||||
downloader_obj = service.instance
|
||||
if not downloader_obj:
|
||||
logger.error(f"{self.LOG_TAG} 获取下载器失败 {downloader_name}")
|
||||
logger.error(f"获取下载器失败 {downloader_name}")
|
||||
continue
|
||||
download_limit_current_val, _ = downloader_obj.get_speed_limit()
|
||||
flag = flag and downloader_obj.set_speed_limit(
|
||||
@@ -831,7 +827,7 @@ class QbCommand(_PluginBase):
|
||||
downloader_name = service.name
|
||||
downloader_obj = service.instance
|
||||
if not downloader_obj:
|
||||
logger.error(f"{self.LOG_TAG} 获取下载器失败 {downloader_name}")
|
||||
logger.error(f"获取下载器失败 {downloader_name}")
|
||||
continue
|
||||
_, upload_limit_current_val = downloader_obj.get_speed_limit()
|
||||
flag = flag and downloader_obj.set_speed_limit(
|
||||
@@ -856,7 +852,7 @@ class QbCommand(_PluginBase):
|
||||
elif flag is None and self._enabled and self._enable_upload_limit:
|
||||
flag = self.set_upload_limit(upload_limit)
|
||||
|
||||
if flag == True:
|
||||
if flag is True:
|
||||
logger.info(f"设置QB限速成功")
|
||||
if self._notify:
|
||||
if upload_limit == 0:
|
||||
@@ -872,7 +868,7 @@ class QbCommand(_PluginBase):
|
||||
title=f"【QB远程操作】",
|
||||
text=text,
|
||||
)
|
||||
elif flag == False:
|
||||
elif flag is False:
|
||||
logger.error(f"QB设置限速失败")
|
||||
if self._notify:
|
||||
self.post_message(
|
||||
@@ -881,7 +877,8 @@ class QbCommand(_PluginBase):
|
||||
text=f"设置QB限速失败",
|
||||
)
|
||||
|
||||
def get_torrent_tracker(self, torrent):
|
||||
@staticmethod
|
||||
def get_torrent_tracker(torrent):
|
||||
"""
|
||||
qb解析 tracker
|
||||
:return: tracker url
|
||||
@@ -937,11 +934,11 @@ class QbCommand(_PluginBase):
|
||||
customSites = self.__custom_sites()
|
||||
|
||||
site_options = [
|
||||
{"title": site.name, "value": site.id}
|
||||
for site in self._siteoper.list_order_by_pri()
|
||||
] + [
|
||||
{"title": site.get("name"), "value": site.get("id")} for site in customSites
|
||||
]
|
||||
{"title": site.name, "value": site.id}
|
||||
for site in SiteOper().list_order_by_pri()
|
||||
] + [
|
||||
{"title": site.get("name"), "value": site.get("id")} for site in customSites
|
||||
]
|
||||
return [
|
||||
{
|
||||
"component": "VForm",
|
||||
@@ -1021,7 +1018,7 @@ class QbCommand(_PluginBase):
|
||||
'model': 'downloaders',
|
||||
'label': '下载器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -11,7 +11,6 @@ from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app import schemas
|
||||
from app.chain.download import DownloadChain
|
||||
from app.chain.search import SearchChain
|
||||
from app.chain.subscribe import SubscribeChain
|
||||
from app.core.config import settings
|
||||
from app.core.context import MediaInfo, TorrentInfo, Context
|
||||
@@ -48,10 +47,6 @@ class RssSubscribe(_PluginBase):
|
||||
# 私有变量
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
_cache_path: Optional[Path] = None
|
||||
rsshelper = None
|
||||
downloadchain = None
|
||||
searchchain = None
|
||||
subscribechain = None
|
||||
|
||||
# 配置属性
|
||||
_enabled: bool = False
|
||||
@@ -70,10 +65,6 @@ class RssSubscribe(_PluginBase):
|
||||
_size_range: str = ""
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.rsshelper = RssHelper()
|
||||
self.downloadchain = DownloadChain()
|
||||
self.searchchain = SearchChain()
|
||||
self.subscribechain = SubscribeChain()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
@@ -618,12 +609,14 @@ class RssSubscribe(_PluginBase):
|
||||
history = []
|
||||
else:
|
||||
history: List[dict] = self.get_data('history') or []
|
||||
downloadchain = DownloadChain()
|
||||
subscribechain = SubscribeChain()
|
||||
for url in self._address.split("\n"):
|
||||
# 处理每一个RSS链接
|
||||
if not url:
|
||||
continue
|
||||
logger.info(f"开始刷新RSS:{url} ...")
|
||||
results = self.rsshelper.parse(url, proxy=self._proxy)
|
||||
results = RssHelper().parse(url, proxy=self._proxy)
|
||||
if not results:
|
||||
logger.error(f"未获取到RSS数据:{url}")
|
||||
return
|
||||
@@ -704,7 +697,7 @@ class RssSubscribe(_PluginBase):
|
||||
# 下载或订阅
|
||||
if self._action == "download":
|
||||
# 添加下载
|
||||
result = self.downloadchain.download_single(
|
||||
result = downloadchain.download_single(
|
||||
context=Context(
|
||||
meta_info=meta,
|
||||
media_info=mediainfo,
|
||||
@@ -718,18 +711,18 @@ class RssSubscribe(_PluginBase):
|
||||
continue
|
||||
else:
|
||||
# 检查是否在订阅中
|
||||
subflag = self.subscribechain.exists(mediainfo=mediainfo, meta=meta)
|
||||
subflag = subscribechain.exists(mediainfo=mediainfo, meta=meta)
|
||||
if subflag:
|
||||
logger.info(f'{mediainfo.title_year} {meta.season} 正在订阅中')
|
||||
continue
|
||||
# 添加订阅
|
||||
self.subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="RSS订阅")
|
||||
subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="RSS订阅")
|
||||
# 存储历史记录
|
||||
history.append({
|
||||
"title": f"{mediainfo.title} {meta.season}",
|
||||
@@ -772,4 +765,4 @@ class RssSubscribe(_PluginBase):
|
||||
"""
|
||||
检查字符串是否表示单个数字或数字范围(如'5', '5.5', '5-10' 或 '5.5-10.2')
|
||||
"""
|
||||
return bool(re.match(r"^\d+(\.\d+)?(-\d+(\.\d+)?)?$", value))
|
||||
return bool(re.match(r"^\d+(\.\d+)?(-\d+(\.\d+)?)?$", value))
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import gc
|
||||
import warnings
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
from threading import Lock
|
||||
from typing import Optional, Any, List, Dict, Tuple
|
||||
|
||||
import pytz
|
||||
from app.helper.sites import SitesHelper
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from app import schemas
|
||||
@@ -14,6 +13,7 @@ from app.core.config import settings
|
||||
from app.core.event import eventmanager, Event
|
||||
from app.db.models.siteuserdata import SiteUserData
|
||||
from app.db.site_oper import SiteOper
|
||||
from app.helper.sites import SitesHelper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.schemas.types import EventType, NotificationType
|
||||
@@ -32,7 +32,7 @@ class SiteStatistic(_PluginBase):
|
||||
# 插件图标
|
||||
plugin_icon = "statistic.png"
|
||||
# 插件版本
|
||||
plugin_version = "1.6"
|
||||
plugin_version = "1.7.1"
|
||||
# 插件作者
|
||||
plugin_author = "lightolly,jxxghp"
|
||||
# 作者主页
|
||||
@@ -45,9 +45,6 @@ class SiteStatistic(_PluginBase):
|
||||
auth_level = 2
|
||||
|
||||
# 配置属性
|
||||
siteoper = None
|
||||
siteshelper = None
|
||||
sitechain = None
|
||||
_enabled: bool = False
|
||||
_onlyonce: bool = False
|
||||
_dashboard_type: str = "today"
|
||||
@@ -55,9 +52,6 @@ class SiteStatistic(_PluginBase):
|
||||
_scheduler = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.siteoper = SiteOper()
|
||||
self.siteshelper = SitesHelper()
|
||||
self.sitechain = SiteChain()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
@@ -72,7 +66,7 @@ class SiteStatistic(_PluginBase):
|
||||
if self._onlyonce:
|
||||
config["onlyonce"] = False
|
||||
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
|
||||
self._scheduler.add_job(self.sitechain.refresh_userdatas, "date",
|
||||
self._scheduler.add_job(SiteChain().refresh_userdatas, "date",
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
name="站点数据统计服务")
|
||||
self._scheduler.print_jobs()
|
||||
@@ -263,68 +257,55 @@ class SiteStatistic(_PluginBase):
|
||||
self.post_message(mtype=NotificationType.SiteMessage,
|
||||
title="站点数据统计", text="\n".join(sorted_messages))
|
||||
|
||||
def __get_data(self) -> Tuple[str, List[SiteUserData], List[SiteUserData]]:
|
||||
@staticmethod
|
||||
def __get_data() -> Tuple[str, List[SiteUserData], List[SiteUserData]]:
|
||||
"""
|
||||
获取最近一次统计的日期、最近一次统计的站点数据、上一次的站点数据
|
||||
如果上一次某个站点数据缺失,则 fallback 到该站点之前最近有数据的日期
|
||||
"""
|
||||
# 获取所有原始数据
|
||||
raw_data_list: List[SiteUserData] = self.siteoper.get_userdata()
|
||||
if not raw_data_list:
|
||||
# 优化:只获取最近的站点数据,而不是所有历史数据
|
||||
latest_data: List[SiteUserData] = SiteOper().get_userdata_latest()
|
||||
if not latest_data:
|
||||
return "", [], []
|
||||
|
||||
# 每个日期、每个站点只保留最后一条数据
|
||||
data_list = list({f"{data.updated_day}_{data.name}": data for data in raw_data_list}.values())
|
||||
# 获取最新日期(用于显示)
|
||||
latest_day = max(data.updated_day for data in latest_data)
|
||||
|
||||
# 按上传量降序排序
|
||||
latest_data.sort(key=lambda x: x.upload or 0, reverse=True)
|
||||
|
||||
# 按日期倒序排序
|
||||
data_list.sort(key=lambda x: x.updated_day, reverse=True)
|
||||
|
||||
# 按日期分组数据
|
||||
data_by_day = defaultdict(list)
|
||||
for data in data_list:
|
||||
data_by_day[data.updated_day].append(data)
|
||||
|
||||
# 获取最近一次统计的日期
|
||||
latest_day = data_list[0].updated_day
|
||||
|
||||
# 筛选最近一次统计的数据(可能为空)
|
||||
latest_data = [data for data in data_list if data.updated_day == latest_day]
|
||||
# 最近一次统计按上传量降序排序
|
||||
latest_data.sort(key=lambda x: x.upload, reverse=True)
|
||||
|
||||
# 获取所有日期倒序排序后的列表
|
||||
sorted_dates = sorted(data_by_day.keys(), reverse=True)
|
||||
|
||||
# 计算前一天的日期字符串(相对于最近一次日期)
|
||||
previous_day_str = (datetime.strptime(latest_day, "%Y-%m-%d") - timedelta(days=1)).strftime("%Y-%m-%d")
|
||||
# 获取前一天的站点数据
|
||||
previous_day_sites = data_by_day.get(previous_day_str, [])
|
||||
# 构建前一天站点到数据的映射
|
||||
previous_by_site = {data.name: data for data in previous_day_sites}
|
||||
|
||||
# 准备查找早于前一天的日期列表,用于 fallback
|
||||
fallback_dates = [d for d in sorted_dates if d < previous_day_str]
|
||||
|
||||
# 按站点细化进行上一次数据的 fallback 处理
|
||||
# 为每个站点查找对应的前一天数据
|
||||
previous_data = []
|
||||
for current_site in latest_data:
|
||||
site_name = current_site.name
|
||||
# 优先尝试获取前一天的同一站点数据
|
||||
current_day = current_site.updated_day
|
||||
|
||||
# 计算该站点的前一天日期
|
||||
previous_day_str = (datetime.strptime(current_day, "%Y-%m-%d") - timedelta(days=1)).strftime("%Y-%m-%d")
|
||||
|
||||
# 获取前一天的数据
|
||||
previous_data_list = SiteOper().get_userdata_by_date(previous_day_str)
|
||||
previous_by_site = {data.name: data for data in previous_data_list}
|
||||
site_prev = previous_by_site.get(site_name)
|
||||
|
||||
# 如果前一天没有该站点的数据,则进行逐日回退查找
|
||||
if site_prev is None or site_prev.err_msg:
|
||||
for d in fallback_dates:
|
||||
# 在每个候选日期中查找对应站点数据
|
||||
candidate = next((x for x in data_by_day[d] if x.name == site_name), None)
|
||||
if candidate:
|
||||
|
||||
# 如果前一天没有该站点数据,尝试查找更早的数据
|
||||
if not site_prev or site_prev.err_msg:
|
||||
# 最多回溯7天,避免查询过多历史数据
|
||||
for i in range(2, 8):
|
||||
fallback_date = (datetime.strptime(current_day, "%Y-%m-%d") - timedelta(days=i)).strftime("%Y-%m-%d")
|
||||
fallback_data_list = SiteOper().get_userdata_by_date(fallback_date)
|
||||
fallback_by_site = {data.name: data for data in fallback_data_list}
|
||||
candidate = fallback_by_site.get(site_name)
|
||||
if candidate and not candidate.err_msg:
|
||||
site_prev = candidate
|
||||
break
|
||||
|
||||
# 如果找到了上一次的数据,加入结果列表
|
||||
if site_prev:
|
||||
previous_data.append(site_prev)
|
||||
|
||||
# 清理垃圾
|
||||
gc.collect()
|
||||
|
||||
return latest_day, latest_data, previous_data
|
||||
|
||||
@staticmethod
|
||||
@@ -851,152 +832,80 @@ class SiteStatistic(_PluginBase):
|
||||
dashboard='all'
|
||||
)
|
||||
|
||||
# 站点数据明细
|
||||
site_trs = [
|
||||
{
|
||||
'component': 'tr',
|
||||
'props': {
|
||||
'class': 'text-sm'
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'td',
|
||||
'props': {
|
||||
'class': 'whitespace-nowrap break-keep text-high-emphasis'
|
||||
},
|
||||
'text': data.name
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
'text': data.username
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
'text': data.user_level
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
'props': {
|
||||
'class': 'text-success'
|
||||
},
|
||||
'text': StringUtils.str_filesize(data.upload)
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
'props': {
|
||||
'class': 'text-error'
|
||||
},
|
||||
'text': StringUtils.str_filesize(data.download)
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
'text': data.ratio
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
'text': format_bonus(data.bonus or 0)
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
'text': data.seeding
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
'text': StringUtils.str_filesize(data.seeding_size)
|
||||
}
|
||||
]
|
||||
} for data in stattistic_data
|
||||
# 优化:使用更轻量级的方式构建站点数据明细,避免创建过多嵌套对象
|
||||
# 先准备表头
|
||||
table_headers = [
|
||||
{'text': '站点', 'class': 'text-start ps-4'},
|
||||
{'text': '用户名', 'class': 'text-start ps-4'},
|
||||
{'text': '用户等级', 'class': 'text-start ps-4'},
|
||||
{'text': '上传量', 'class': 'text-start ps-4'},
|
||||
{'text': '下载量', 'class': 'text-start ps-4'},
|
||||
{'text': '分享率', 'class': 'text-start ps-4'},
|
||||
{'text': '魔力值', 'class': 'text-start ps-4'},
|
||||
{'text': '做种数', 'class': 'text-start ps-4'},
|
||||
{'text': '做种体积', 'class': 'text-start ps-4'}
|
||||
]
|
||||
|
||||
# 构建表头行
|
||||
header_row = {
|
||||
'component': 'thead',
|
||||
'content': [
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {'class': header['class']},
|
||||
'text': header['text']
|
||||
} for header in table_headers
|
||||
]
|
||||
}
|
||||
|
||||
# 构建数据行,避免在列表推导式中创建复杂嵌套
|
||||
table_rows = []
|
||||
for data in stattistic_data:
|
||||
# 预先计算所有需要的值
|
||||
row_data = [
|
||||
{'text': data.name, 'class': 'whitespace-nowrap break-keep text-high-emphasis'},
|
||||
{'text': data.username, 'class': ''},
|
||||
{'text': data.user_level, 'class': ''},
|
||||
{'text': StringUtils.str_filesize(data.upload), 'class': 'text-success'},
|
||||
{'text': StringUtils.str_filesize(data.download), 'class': 'text-error'},
|
||||
{'text': data.ratio, 'class': ''},
|
||||
{'text': format_bonus(data.bonus or 0), 'class': ''},
|
||||
{'text': data.seeding, 'class': ''},
|
||||
{'text': StringUtils.str_filesize(data.seeding_size), 'class': ''}
|
||||
]
|
||||
|
||||
# 构建单行配置
|
||||
row_content = []
|
||||
for cell_data in row_data:
|
||||
cell = {'component': 'td', 'text': cell_data['text']}
|
||||
if cell_data['class']:
|
||||
cell['props'] = {'class': cell_data['class']}
|
||||
row_content.append(cell)
|
||||
|
||||
table_rows.append({
|
||||
'component': 'tr',
|
||||
'props': {'class': 'text-sm'},
|
||||
'content': row_content
|
||||
})
|
||||
|
||||
# 拼装页面
|
||||
return [
|
||||
page = [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': site_totals + [
|
||||
# 各站点数据明细
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'props': {'cols': 12},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTable',
|
||||
'props': {
|
||||
'hover': True
|
||||
},
|
||||
'props': {'hover': True},
|
||||
'content': [
|
||||
{
|
||||
'component': 'thead',
|
||||
'content': [
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
},
|
||||
'text': '站点'
|
||||
},
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
},
|
||||
'text': '用户名'
|
||||
},
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
},
|
||||
'text': '用户等级'
|
||||
},
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
},
|
||||
'text': '上传量'
|
||||
},
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
},
|
||||
'text': '下载量'
|
||||
},
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
},
|
||||
'text': '分享率'
|
||||
},
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
},
|
||||
'text': '魔力值'
|
||||
},
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
},
|
||||
'text': '做种数'
|
||||
},
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
},
|
||||
'text': '做种体积'
|
||||
}
|
||||
]
|
||||
},
|
||||
header_row,
|
||||
{
|
||||
'component': 'tbody',
|
||||
'content': site_trs
|
||||
'content': table_rows
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1006,16 +915,22 @@ class SiteStatistic(_PluginBase):
|
||||
}
|
||||
]
|
||||
|
||||
# 清理垃圾
|
||||
gc.collect()
|
||||
|
||||
return page
|
||||
|
||||
def stop_service(self):
|
||||
pass
|
||||
|
||||
def refresh_by_domain(self, domain: str, apikey: str) -> schemas.Response:
|
||||
@staticmethod
|
||||
def refresh_by_domain(domain: str, apikey: str) -> schemas.Response:
|
||||
"""
|
||||
刷新一个站点数据,可由API调用
|
||||
"""
|
||||
if apikey != settings.API_TOKEN:
|
||||
return schemas.Response(success=False, message="API密钥错误")
|
||||
site_info = self.siteshelper.get_indexer(domain)
|
||||
site_info = SitesHelper().get_indexer(domain)
|
||||
if site_info:
|
||||
site_data = SiteChain().refresh_userdata(site=site_info)
|
||||
if site_data:
|
||||
|
||||
@@ -32,8 +32,6 @@ class SpeedLimiter(_PluginBase):
|
||||
auth_level = 1
|
||||
|
||||
# 私有属性
|
||||
downloader_helper = None
|
||||
mediaserver_helper = None
|
||||
_scheduler = None
|
||||
_enabled: bool = False
|
||||
_notify: bool = False
|
||||
@@ -54,8 +52,7 @@ class SpeedLimiter(_PluginBase):
|
||||
_exclude_path = ""
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
self.mediaserver_helper = MediaServerHelper()
|
||||
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
@@ -183,7 +180,7 @@ class SpeedLimiter(_PluginBase):
|
||||
'model': 'downloader',
|
||||
'label': '下载器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -402,7 +399,7 @@ class SpeedLimiter(_PluginBase):
|
||||
logger.warning("尚未配置下载器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.downloader_helper.get_services(name_filters=self._downloader)
|
||||
services = DownloaderHelper().get_services(name_filters=self._downloader)
|
||||
if not services:
|
||||
logger.warning("获取下载器实例失败,请检查配置")
|
||||
return None
|
||||
@@ -442,7 +439,7 @@ class SpeedLimiter(_PluginBase):
|
||||
return
|
||||
# 当前播放的总比特率
|
||||
total_bit_rate = 0
|
||||
media_servers = self.mediaserver_helper.get_services()
|
||||
media_servers = MediaServerHelper().get_services()
|
||||
if not media_servers:
|
||||
return
|
||||
# 查询所有媒体服务器状态
|
||||
|
||||
@@ -33,10 +33,9 @@ class SubscribeClear(_PluginBase):
|
||||
# 私有属性
|
||||
_titles = []
|
||||
_episodes = []
|
||||
downloader_helper = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
|
||||
if config:
|
||||
self._titles = config.get("titles") or []
|
||||
self._episodes = config.get("episodes") or []
|
||||
@@ -48,11 +47,10 @@ class SubscribeClear(_PluginBase):
|
||||
|
||||
def clear_history(self, titles: List[str], episodes: List[str]):
|
||||
logger.info(f"清除下载历史记录:{titles} {episodes}")
|
||||
data = self.get_data()
|
||||
down_oper = DownloadHistoryOper()
|
||||
downloader_history ={}
|
||||
data = self.get_download_data()
|
||||
downloader_history = {}
|
||||
for d in data:
|
||||
if d.title in titles or d.id in episodes:
|
||||
if d.title in titles or d.id in episodes:
|
||||
tmp = downloader_history.get(d.downloader)
|
||||
if not tmp:
|
||||
tmp = []
|
||||
@@ -70,7 +68,7 @@ class SubscribeClear(_PluginBase):
|
||||
history_torrents = {}
|
||||
for t in torrents:
|
||||
logger.info(f"种子信息: {t}")
|
||||
history_torrents[t.hash]=t
|
||||
history_torrents[t.hash] = t
|
||||
for h in history:
|
||||
# 判断当前历史记录的hash是否在未找到的hash列表中
|
||||
if h.download_hash not in history_torrents.keys():
|
||||
@@ -79,43 +77,39 @@ class SubscribeClear(_PluginBase):
|
||||
else:
|
||||
# 从下载器删除种子
|
||||
self.delete_download_history(h, history_torrents[h.download_hash])
|
||||
|
||||
|
||||
|
||||
|
||||
def delete_data(self, history: DownloadHistory):
|
||||
@staticmethod
|
||||
def delete_data(history: DownloadHistory):
|
||||
"""
|
||||
从订阅记录中删除该信息
|
||||
"""
|
||||
try:
|
||||
down_oper = DownloadHistoryOper()
|
||||
down_oper.delete_history(history.id)
|
||||
logger.info(f"删除下载历史记录:{history.id} {history.title} {history.seasons} {history.episodes} {history.download_hash}")
|
||||
logger.info(
|
||||
f"删除下载历史记录:{history.id} {history.title} {history.seasons} {history.episodes} {history.download_hash}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"删除下载历史记录失败:{str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
|
||||
def delete_download_history(self,history: DownloadHistory, torrent: Any):
|
||||
def delete_download_history(self, history: DownloadHistory, torrent: Any):
|
||||
downloader_name = history.downloader
|
||||
downloader_obj = self.__get_downloader(downloader_name)
|
||||
logger.info(f"删除种子信息:{history.id} {history.title} {history.seasons} {history.episodes} {history.download_hash}")
|
||||
logger.info(
|
||||
f"删除种子信息:{history.id} {history.title} {history.seasons} {history.episodes} {history.download_hash}")
|
||||
hashs = [history.download_hash]
|
||||
# 处理辅种
|
||||
torrents, error = downloader_obj.get_torrents()
|
||||
if error :
|
||||
if error:
|
||||
logger.error(f"获取辅种信息失败: {error}")
|
||||
else:
|
||||
for t in torrents:
|
||||
if t.name == torrent.name and t.size == torrent.size:
|
||||
hashs.append(t.hash)
|
||||
downloader_obj.delete_torrents(delete_file=True,ids=hashs)
|
||||
downloader_obj.delete_torrents(delete_file=True, ids=hashs)
|
||||
self.delete_data(history)
|
||||
|
||||
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return True
|
||||
|
||||
@@ -141,17 +135,17 @@ class SubscribeClear(_PluginBase):
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
# 获取下载历史数据
|
||||
histories = self.get_data()
|
||||
|
||||
histories = self.get_download_data()
|
||||
|
||||
# 构造标题和剧集列表
|
||||
titles = []
|
||||
episode_options = []
|
||||
|
||||
|
||||
for history in histories:
|
||||
# 标题列表
|
||||
if history.title not in titles:
|
||||
titles.append(history.title)
|
||||
|
||||
|
||||
# 剧集列表
|
||||
episode_str = history.title
|
||||
if history.seasons:
|
||||
@@ -160,7 +154,6 @@ class SubscribeClear(_PluginBase):
|
||||
episode_str += f" {history.episodes}"
|
||||
episode_options.append({"title": episode_str, "value": history.id})
|
||||
|
||||
|
||||
# 将列表转换为选择框选项格式
|
||||
title_options = [{"title": t, "value": t} for t in titles]
|
||||
|
||||
@@ -189,9 +182,9 @@ class SubscribeClear(_PluginBase):
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
episode_select = {
|
||||
'component': 'VRow',
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
@@ -220,15 +213,16 @@ class SubscribeClear(_PluginBase):
|
||||
'content': [
|
||||
title_select,
|
||||
episode_select
|
||||
]
|
||||
]
|
||||
}
|
||||
], {
|
||||
"titles": [],
|
||||
"episodes": []
|
||||
}
|
||||
|
||||
def get_data(self) -> List[DownloadHistory]:
|
||||
down_oper = DownloadHistoryOper()
|
||||
@staticmethod
|
||||
def get_download_data() -> List[DownloadHistory]:
|
||||
down_oper = DownloadHistoryOper()
|
||||
downs = []
|
||||
page = 1
|
||||
while True:
|
||||
@@ -241,7 +235,7 @@ class SubscribeClear(_PluginBase):
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
items = []
|
||||
for down in self.get_data():
|
||||
for down in self.get_download_data():
|
||||
items.append({
|
||||
'component': 'tr',
|
||||
'content': [
|
||||
@@ -255,7 +249,7 @@ class SubscribeClear(_PluginBase):
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
'text':down.seasons + " " + down.episodes
|
||||
'text': down.seasons + " " + down.episodes
|
||||
},
|
||||
{
|
||||
'component': 'td',
|
||||
@@ -300,7 +294,7 @@ class SubscribeClear(_PluginBase):
|
||||
},
|
||||
'text': '名称'
|
||||
},
|
||||
{
|
||||
{
|
||||
'component': 'th',
|
||||
'props': {
|
||||
'class': 'text-start ps-4'
|
||||
@@ -330,14 +324,6 @@ class SubscribeClear(_PluginBase):
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_api(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
注册API
|
||||
"""
|
||||
pass
|
||||
|
||||
def stop_service(self):
|
||||
"""
|
||||
退出插件
|
||||
@@ -349,7 +335,7 @@ class SubscribeClear(_PluginBase):
|
||||
"""
|
||||
服务信息
|
||||
"""
|
||||
services = self.downloader_helper.get_services(type_filter="qbittorrent")
|
||||
services = DownloaderHelper().get_services(type_filter="qbittorrent")
|
||||
if not services:
|
||||
logger.warning("获取下载器实例失败,请检查配置")
|
||||
return None
|
||||
|
||||
@@ -40,11 +40,9 @@ class SyncCookieCloud(_PluginBase):
|
||||
_enabled: bool = False
|
||||
_onlyonce: bool = False
|
||||
_cron: str = ""
|
||||
siteoper = None
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.siteoper = SiteOper()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
@@ -92,7 +90,7 @@ class SyncCookieCloud(_PluginBase):
|
||||
同步站点cookie到cookiecloud
|
||||
"""
|
||||
# 获取所有站点
|
||||
sites = self.siteoper.list_order_by_pri()
|
||||
sites = SiteOper().list_order_by_pri()
|
||||
if not sites:
|
||||
return
|
||||
|
||||
|
||||
@@ -1,26 +1,24 @@
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
from datetime import datetime, timedelta
|
||||
import ipaddress
|
||||
import socket
|
||||
import base64
|
||||
import json
|
||||
import asyncio
|
||||
import base64
|
||||
import ipaddress
|
||||
import json
|
||||
import socket
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, List, Dict, Tuple, Optional
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from fastapi import Response
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from fastapi import Response
|
||||
|
||||
from app.chain.site import SiteChain
|
||||
from app.core.config import settings
|
||||
from app.core.event import EventManager, eventmanager
|
||||
from app.core.event import eventmanager
|
||||
from app.db.site_oper import SiteOper
|
||||
from app.helper.sites import SitesHelper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.utils.http import RequestUtils
|
||||
from app.schemas.types import EventType, NotificationType
|
||||
from app.plugins.tobypasstrackers.dns_helper import DnsHelper
|
||||
from app.schemas.types import EventType, NotificationType
|
||||
from app.utils.http import RequestUtils
|
||||
|
||||
|
||||
class ToBypassTrackers(_PluginBase):
|
||||
@@ -43,13 +41,6 @@ class ToBypassTrackers(_PluginBase):
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
sites: SitesHelper = None
|
||||
site_chain: SiteChain = None
|
||||
siteoper: SiteOper = None
|
||||
|
||||
# 事件管理器
|
||||
event: EventManager = None
|
||||
# 定时器
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
# 开关
|
||||
@@ -67,13 +58,12 @@ class ToBypassTrackers(_PluginBase):
|
||||
_dns_input: str = ""
|
||||
ipv6_txt: str = ""
|
||||
ipv4_txt: str = ""
|
||||
trackers: Dict[str, List[str]] = {}
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.sites = SitesHelper()
|
||||
# self.event = EventManager()
|
||||
self.site_chain = SiteChain()
|
||||
|
||||
self.stop_service()
|
||||
self.siteoper = SiteOper()
|
||||
|
||||
self.trackers = {}
|
||||
self.ipv6_txt = self.get_data("ipv6_txt") if self.get_data("ipv6_txt") else ""
|
||||
self.ipv4_txt = self.get_data("ipv4_txt") if self.get_data("ipv4_txt") else ""
|
||||
@@ -98,7 +88,7 @@ class ToBypassTrackers(_PluginBase):
|
||||
self._china_ipv6_route = config.get("china_ipv6_route")
|
||||
self._china_ip_route = config.get("china_ip_route")
|
||||
# 过滤掉已删除的站点
|
||||
all_sites = [site.id for site in self.siteoper.list_order_by_pri()]
|
||||
all_sites = [site.id for site in SiteOper().list_order_by_pri()]
|
||||
self._bypassed_sites = [site_id for site_id in all_sites if site_id in self._bypassed_sites]
|
||||
self.__update_config()
|
||||
if self._enabled or self._onlyonce:
|
||||
@@ -160,8 +150,7 @@ class ToBypassTrackers(_PluginBase):
|
||||
|
||||
def get_form(self) -> Tuple[List[dict], Dict[str, Any]]:
|
||||
site_options = ([{"title": site.name, "value": site.id}
|
||||
for site in self.siteoper.list_order_by_pri()]
|
||||
)
|
||||
for site in SiteOper().list_order_by_pri()])
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
@@ -629,7 +618,7 @@ class ToBypassTrackers(_PluginBase):
|
||||
chnroute_lists = res.text[:-1].split('\n')
|
||||
for ipr in chnroute_lists:
|
||||
ip_list.append(ipr)
|
||||
do_sites = {site.domain: site.name for site in self.siteoper.list_order_by_pri() if
|
||||
do_sites = {site.domain: site.name for site in SiteOper().list_order_by_pri() if
|
||||
site.id in self._bypassed_sites}
|
||||
domain_name_map = {}
|
||||
for site in do_sites:
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import re
|
||||
from typing import Optional, List, Callable
|
||||
|
||||
import aioquic
|
||||
import dns.asyncresolver
|
||||
import dns.resolver
|
||||
|
||||
@@ -74,7 +73,6 @@ class DnsHelper:
|
||||
使用 UDP 异步方式解析域名
|
||||
|
||||
:param domain: 域名
|
||||
:param port: DNS服务器端口(默认53)
|
||||
:param dns_type: 记录类型,如 A、AAAA
|
||||
:return: IP地址列表 或 None
|
||||
"""
|
||||
|
||||
@@ -39,7 +39,6 @@ class TorrentRemover(_PluginBase):
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
downloader_helper = None
|
||||
_event = threading.Event()
|
||||
_scheduler = None
|
||||
_enabled = False
|
||||
@@ -63,7 +62,7 @@ class TorrentRemover(_PluginBase):
|
||||
_torrentcategorys = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._onlyonce = config.get("onlyonce")
|
||||
@@ -257,7 +256,7 @@ class TorrentRemover(_PluginBase):
|
||||
'model': 'downloaders',
|
||||
'label': '下载器',
|
||||
'items': [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -593,7 +592,7 @@ class TorrentRemover(_PluginBase):
|
||||
logger.warning("尚未配置下载器,请检查配置")
|
||||
return None
|
||||
|
||||
services = self.downloader_helper.get_services(name_filters=self._downloaders)
|
||||
services = DownloaderHelper().get_services(name_filters=self._downloaders)
|
||||
if not services:
|
||||
logger.warning("获取下载器实例失败,请检查配置")
|
||||
return None
|
||||
|
||||
@@ -12,7 +12,6 @@ from qbittorrentapi import TorrentDictionary
|
||||
|
||||
from app.core.config import settings
|
||||
from app.helper.downloader import DownloaderHelper
|
||||
from app.helper.torrent import TorrentHelper
|
||||
from app.log import logger
|
||||
from app.modules.qbittorrent import Qbittorrent
|
||||
from app.modules.transmission import Transmission
|
||||
@@ -43,8 +42,7 @@ class TorrentTransfer(_PluginBase):
|
||||
|
||||
# 私有属性
|
||||
_scheduler = None
|
||||
torrent_helper = None
|
||||
downloader_helper = None
|
||||
|
||||
# 开关
|
||||
_enabled = False
|
||||
_cron = None
|
||||
@@ -76,8 +74,7 @@ class TorrentTransfer(_PluginBase):
|
||||
_torrent_tags = []
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.torrent_helper = TorrentHelper()
|
||||
self.downloader_helper = DownloaderHelper()
|
||||
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
@@ -136,7 +133,8 @@ class TorrentTransfer(_PluginBase):
|
||||
self._scheduler.print_jobs()
|
||||
self._scheduler.start()
|
||||
|
||||
def service_info(self, name: str) -> Optional[ServiceInfo]:
|
||||
@staticmethod
|
||||
def service_info(name: str) -> Optional[ServiceInfo]:
|
||||
"""
|
||||
服务信息
|
||||
"""
|
||||
@@ -144,7 +142,7 @@ class TorrentTransfer(_PluginBase):
|
||||
logger.warning("尚未配置下载器,请检查配置")
|
||||
return None
|
||||
|
||||
service = self.downloader_helper.get_service(name)
|
||||
service = DownloaderHelper().get_service(name)
|
||||
if not service or not service.instance:
|
||||
logger.warning(f"获取下载器 {name} 实例失败,请检查配置")
|
||||
return None
|
||||
@@ -197,7 +195,7 @@ class TorrentTransfer(_PluginBase):
|
||||
拼装插件配置页面,需要返回两块数据:1、页面配置;2、数据结构
|
||||
"""
|
||||
downloader_options = [{"title": config.name, "value": config.name}
|
||||
for config in self.downloader_helper.get_configs().values()]
|
||||
for config in DownloaderHelper().get_configs().values()]
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
@@ -622,7 +620,8 @@ class TorrentTransfer(_PluginBase):
|
||||
return
|
||||
downloader = service.instance
|
||||
from_service = self.service_info(self._fromdownloader)
|
||||
if self.downloader_helper.is_downloader("qbittorrent", service=service):
|
||||
downloader_helper = DownloaderHelper()
|
||||
if downloader_helper.is_downloader("qbittorrent", service=service):
|
||||
# 生成随机Tag
|
||||
tag = StringUtils.generate_random_str(10)
|
||||
if self._remainoldtag:
|
||||
@@ -651,7 +650,7 @@ class TorrentTransfer(_PluginBase):
|
||||
logger.error(f"{downloader} 下载任务添加成功,但获取任务信息失败!")
|
||||
return None
|
||||
return torrent_hash
|
||||
elif self.downloader_helper.is_downloader("transmission", service=service):
|
||||
elif downloader_helper.is_downloader("transmission", service=service):
|
||||
# 添加任务
|
||||
if self._remainoldtag:
|
||||
# 获取种子标签
|
||||
@@ -780,6 +779,7 @@ class TorrentTransfer(_PluginBase):
|
||||
# 删除重复数
|
||||
del_dup = 0
|
||||
|
||||
downloader_helper = DownloaderHelper()
|
||||
for torrent_item in trans_torrents:
|
||||
# 检查种子文件是否存在
|
||||
torrent_file = Path(self._fromtorrentpath) / f"{torrent_item.get('hash')}.torrent"
|
||||
@@ -795,7 +795,7 @@ class TorrentTransfer(_PluginBase):
|
||||
# 删除重复的源种子,不能删除文件!
|
||||
if self._deleteduplicate:
|
||||
logger.info(f"删除重复的源下载器任务(不含文件):{torrent_item.get('hash')} ...")
|
||||
to_downloader.delete_torrents(delete_file=False, ids=[torrent_item.get('hash')])
|
||||
from_downloader.delete_torrents(delete_file=False, ids=[torrent_item.get('hash')])
|
||||
del_dup += 1
|
||||
else:
|
||||
logger.info(f"{torrent_item.get('hash')} 已在目的下载器中,跳过 ...")
|
||||
@@ -814,7 +814,7 @@ class TorrentTransfer(_PluginBase):
|
||||
continue
|
||||
|
||||
# 如果源下载器是QB检查是否有Tracker,没有的话额外获取
|
||||
if self.downloader_helper.is_downloader("qbittorrent", service=from_service):
|
||||
if downloader_helper.is_downloader("qbittorrent", service=from_service):
|
||||
# 读取种子内容、解析种子文件
|
||||
content = torrent_file.read_bytes()
|
||||
if not content:
|
||||
@@ -878,7 +878,7 @@ class TorrentTransfer(_PluginBase):
|
||||
logger.info(f"成功添加转移做种任务,种子文件:{torrent_file}")
|
||||
|
||||
# TR会自动校验,QB需要手动校验
|
||||
if self.downloader_helper.is_downloader("qbittorrent", service=to_service):
|
||||
if downloader_helper.is_downloader("qbittorrent", service=to_service):
|
||||
if self._skipverify:
|
||||
if self._autostart:
|
||||
logger.info(f"{download_id} 跳过校验,开启自动开始,注意观察种子的完整性")
|
||||
|
||||
@@ -13,76 +13,117 @@
|
||||
- 支持批量翻译以提高效率
|
||||
- 支持使用滑动窗口配置上下文提高翻译连贯性
|
||||
- 支持多种字幕提取语言偏好设置
|
||||
- 支持监听媒体入库事件自动执行字幕生成
|
||||
- 支持手动触发字幕生成任务
|
||||
- 支持任务队列机制,确保并发安全
|
||||
- 支持任务状态列表展示(等待中 / 进行中 / 已完成 / 失败)
|
||||
|
||||
## 配置说明
|
||||
|
||||
### 基础配置
|
||||
|
||||
| 配置项 | 说明 | 默认值 |
|
||||
|--------|------|--------|
|
||||
| 立即运行一次 | 保存配置后是否立即执行一次任务 | 否 |
|
||||
| 本地字幕提取策略 | 设置字幕提取的优先级策略 | 优先原音字幕 |
|
||||
| 翻译为中文 | 是否在需要时使用大模型将字幕翻译成中文 | 是 |
|
||||
| 发送通知 | 是否发送任务执行通知 | 否 |
|
||||
| 配置项 | 说明 | 默认值 |
|
||||
|----------|------------------------|--------|
|
||||
| 启用插件 | 是否启用插件 | 否 |
|
||||
| 清除历史记录 | 清除已完成的任务记录(完成、跳过或失败) | 否 |
|
||||
| 媒体入库自动执行 | 监听到媒体入库事件后自动执行字幕生成 | 是 |
|
||||
| 手动执行一次 | 保存配置后立即执行一次任务 | 否 |
|
||||
| 发送通知 | 是否发送任务执行通知 | 否 |
|
||||
| 文件大小(MB) | 最小处理的视频文件大小,小于该值的文件不处理 | 10 |
|
||||
| 字幕源语言偏好 | 设置字幕提取的优先级策略 | 优先原音字幕 |
|
||||
| 翻译为中文 | 是否使用大模型将字幕翻译成中文 | 是 |
|
||||
|
||||
### ASR配置
|
||||
### ASR配置(语音识别)
|
||||
|
||||
| 配置项 | 说明 | 默认值 |
|
||||
|--------|------|--------|
|
||||
| 允许从音轨提取字幕 | 是否允许从视频音轨中提取字幕 | 是 |
|
||||
| ASR引擎 | 语音识别引擎 | faster-whisper |
|
||||
| 模型 | 使用的模型大小 | base |
|
||||
| 使用代理下载模型 | 是否使用代理下载模型 | 是 |
|
||||
| 配置项 | 说明 | 默认值 |
|
||||
|---------------------|------------------|------|
|
||||
| 允许从音轨提取字幕 | 是否允许从视频音轨中提取字幕 | 是 |
|
||||
| faster-whisper 模型选择 | 使用的 Whisper 模型大小 | base |
|
||||
| 使用代理下载模型 | 是否使用代理下载模型 | 是 |
|
||||
|
||||
### 翻译配置
|
||||
### 翻译接口配置
|
||||
|
||||
| 配置项 | 说明 | 默认值 |
|
||||
|--------|------|--------|
|
||||
| 启用批量翻译 | 是否启用批量翻译以提高效率 | 是 |
|
||||
| 每批翻译行数 | 每批处理的字幕行数 | 20 |
|
||||
| 上下文窗口大小 | 翻译时考虑的上下文行数 | 5 |
|
||||
| llm请求重试次数 | 翻译失败时的重试次数 | 3 |
|
||||
> 可选使用 ChatGPT 插件配置 或 自定义 OpenAI 接口参数
|
||||
|
||||
### 其他配置
|
||||
| 配置项 | 说明 | 默认值 |
|
||||
|------------------|--------------------------------------|-------------------------|
|
||||
| 复用ChatGPT插件配置 | 是否直接使用系统中已配置的 ChatGPT 插件参数 | 否 |
|
||||
| 使用代理服务器 | 是否通过 MP 配置的代理访问 OpenAI 接口 | 否 |
|
||||
| 兼容模式 | 是否启用兼容模式(绕过 `/v1` 路径拼接) | 否 |
|
||||
| OpenAI API URL | 自定义 OpenAI 接口地址 | https://api.openai.com |
|
||||
| API 密钥 | OpenAI 的 API Key | 无 |
|
||||
| 自定义模型 | 使用的 LLM 模型名称(如 gpt-3.5-turbo) | gpt-3.5-turbo |
|
||||
|
||||
| 配置项 | 说明 | 默认值 |
|
||||
|--------|------|--------|
|
||||
| 媒体路径 | 要处理的媒体文件或文件夹绝对路径,每行一个 | 空 |
|
||||
| 文件大小(MB) | 最小处理文件大小 | 10 |
|
||||
### 翻译参数配置
|
||||
|
||||
| 配置项 | 说明 | 默认值 |
|
||||
|---------------|------------------------------------|-----|
|
||||
| 启用批量翻译 | 是否启用批量翻译以提高效率 | 是 |
|
||||
| 每批翻译行数 | 每批处理的字幕行数 | 10 |
|
||||
| 上下文窗口大小 | 翻译时考虑的上下文行数 | 5 |
|
||||
| LLM请求重试次数 | 翻译失败时的重试次数 | 3 |
|
||||
| 翻译英文时合并整句 | 对英文字幕先合并单词再翻译,提升翻译质量 | 否 |
|
||||
|
||||
### 手动运行配置
|
||||
|
||||
| 配置项 | 说明 | 默认值 |
|
||||
|------|-----------------------|-----|
|
||||
| 媒体路径 | 要处理的媒体文件或文件夹绝对路径,每行一个 | 空 |
|
||||
|
||||
## 字幕提取策略说明
|
||||
|
||||
字幕提取优先级:外挂字幕 > 内嵌字幕 > 音轨识别
|
||||
|
||||
字幕提取策略的选择主要取决于视频源语言和大模型的翻译能力。对于包含多语言字幕的非英语视频,建议根据以下原则选择策略:
|
||||
|
||||
1. 仅英文字幕
|
||||
- 仅使用英文字幕作为翻译源
|
||||
- 当视频无英文字幕时,使用ASR提取
|
||||
- 适用于大模型仅支持中英互译的场景
|
||||
- 仅使用英文字幕作为翻译源
|
||||
- 当视频无英文字幕时,使用ASR提取
|
||||
- 适用于大模型仅支持中英互译的场景
|
||||
|
||||
2. 优先英文字幕
|
||||
- 优先使用英文字幕作为翻译源
|
||||
- 无英文字幕时,使用其他语言字幕
|
||||
- 当所有字幕都不存在时,使用ASR提取
|
||||
- 适用于大模型在英译中任务上表现更好的场景
|
||||
- 优先使用英文字幕作为翻译源
|
||||
- 无英文字幕时,使用其他语言字幕
|
||||
- 当所有字幕都不存在时,使用ASR提取
|
||||
- 适用于大模型在英译中任务上表现更好的场景
|
||||
|
||||
3. 优先原音字幕
|
||||
- 优先使用视频原始语言的字幕
|
||||
- 无原音字幕时,使用英文字幕
|
||||
- 当所有字幕都不存在时,使用ASR提取
|
||||
- 适用于大模型支持多语言翻译且翻译质量较好的场景
|
||||
- 优先使用视频原始语言的字幕
|
||||
- 无原音字幕时,使用英文字幕
|
||||
- 当所有字幕都不存在时,使用ASR提取
|
||||
- 适用于大模型支持多语言翻译且翻译质量较好的场景
|
||||
|
||||
## 翻译方式说明
|
||||
|
||||
插件支持两种方式调用大模型进行翻译:
|
||||
|
||||
1. **复用 ChatGPT 插件配置**
|
||||
- 开启“复用ChatGPT插件配置”后,自动使用系统中维护的 ChatGPT 插件参数
|
||||
- 包括 API Key、API URL、是否使用代理等
|
||||
- 适合已有 ChatGPT 插件的用户快速部署
|
||||
|
||||
2. **自定义 OpenAI 接口参数**
|
||||
- 关闭“复用ChatGPT插件配置”后,可独立配置:
|
||||
- API 地址(支持反代)
|
||||
- API Key
|
||||
- 使用的模型
|
||||
- 是否使用代理
|
||||
- 是否启用兼容模式(避免 `/v1` 路径冲突)
|
||||
|
||||
|
||||
---
|
||||
|
||||
## 注意事项
|
||||
|
||||
1. 翻译功能依赖OpenAI插件配置,使用前请确保已正确配置
|
||||
2. 首次使用音轨识别功能时,会自动从HuggingFace下载模型。开启"使用代理下载模型"选项会使用MP配置的代理。
|
||||
3. 媒体路径支持单个文件或文件夹的绝对路径。选择文件夹时会递归处理其中的所有视频文件,外挂字幕将从媒体文件同级目录中查找
|
||||
4. 批量翻译通过一次处理多行字幕来减少API调用次数,提高效率。如果翻译结果与原文行数不匹配,系统会自动降级为逐行翻译
|
||||
5. 上下文窗口大小和批量翻译行数需要根据大模型的推理能力来调整。当模型能力不足时,过大的批量或上下文窗口可能会影响翻译质量
|
||||
1. 翻译功能依赖大模型配置,使用前请确保已正确配置 OpenAI Key 或 ChatGPT 插件。
|
||||
2. 首次使用音轨识别功能时,会自动从 HuggingFace 下载模型。开启"使用代理下载模型"选项会使用 MP 配置的代理。
|
||||
3. 媒体路径支持单个文件或文件夹的绝对路径。选择文件夹时会递归处理其中的所有视频文件,外挂字幕将从媒体文件同级目录中查找。
|
||||
4. 批量翻译通过一次处理多行字幕来减少 API 调用次数,提高效率。如果翻译结果与原文行数不匹配,系统会自动降级为逐行翻译。
|
||||
5. 上下文窗口大小和批量翻译行数需要根据大模型的推理能力来调整。当模型能力不足时,过大的批量或上下文窗口可能会影响翻译质量。
|
||||
6. 翻译后的中文字幕会打上“机翻”标签。
|
||||
7. 插件运行时会启动一个后台线程用于消费任务队列,插件关闭时会清空队列并终止当前任务。
|
||||
|
||||
## todo
|
||||
- 监听媒体入库事件自动调用字幕生成
|
||||
- 任务完成后调用媒体库刷新
|
||||
- 历史任务管理与展示
|
||||
|
||||
- 工作流/API接口
|
||||
- 任务完成后调用媒体库刷新
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,10 +12,11 @@ class OpenAi:
|
||||
_api_url: str = None
|
||||
_model: str = "gpt-3.5-turbo"
|
||||
|
||||
def __init__(self, api_key: str = None, api_url: str = None, proxy: dict = None, model: str = None):
|
||||
def __init__(self, api_key: str = None, api_url: str = None, proxy: dict = None, model: str = None,
|
||||
compatible: bool = False):
|
||||
self._api_key = api_key
|
||||
self._api_url = api_url
|
||||
openai.api_base = self._api_url + "/v1"
|
||||
openai.api_base = self._api_url if compatible else self._api_url + "/v1"
|
||||
openai.api_key = self._api_key
|
||||
if proxy and proxy.get("https"):
|
||||
openai.proxy = proxy.get("https")
|
||||
@@ -37,7 +37,7 @@ class BangumiColl(_PluginBase):
|
||||
# 插件图标
|
||||
plugin_icon = "bangumi_b.png"
|
||||
# 插件版本
|
||||
plugin_version = "1.5.6"
|
||||
plugin_version = "1.5.7"
|
||||
# 插件作者
|
||||
plugin_author = "Attente"
|
||||
# 作者主页
|
||||
@@ -67,12 +67,6 @@ class BangumiColl(_PluginBase):
|
||||
_group_select_order: list = []
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.downloadchain = DownloadChain()
|
||||
self.siteoper = SiteOper()
|
||||
self.subscribechain = SubscribeChain()
|
||||
self.subscribehelper = SubscribeHelper()
|
||||
self.subscribeoper = SubscribeOper()
|
||||
self.tmdbapi = TmdbApi()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
@@ -86,21 +80,21 @@ class BangumiColl(_PluginBase):
|
||||
if config:
|
||||
# 遍历配置中的键并设置相应的属性
|
||||
for key in (
|
||||
"enabled",
|
||||
"total_change",
|
||||
"cron",
|
||||
"notify",
|
||||
"onlyonce",
|
||||
"uid",
|
||||
"collection_type",
|
||||
"save_path",
|
||||
"sites",
|
||||
"match_groups",
|
||||
"group_select_order",
|
||||
"enabled",
|
||||
"total_change",
|
||||
"cron",
|
||||
"notify",
|
||||
"onlyonce",
|
||||
"uid",
|
||||
"collection_type",
|
||||
"save_path",
|
||||
"sites",
|
||||
"match_groups",
|
||||
"group_select_order",
|
||||
):
|
||||
setattr(self, f"_{key}", config.get(key, getattr(self, f"_{key}")))
|
||||
# 获得所有站点
|
||||
site_ids = {site.id for site in self.siteoper.list_order_by_pri()}
|
||||
site_ids = {site.id for site in SiteOper().list_order_by_pri()}
|
||||
# 过滤已删除的站点
|
||||
self._sites = [site_id for site_id in self._sites if site_id in site_ids]
|
||||
# 更新配置
|
||||
@@ -113,8 +107,7 @@ class BangumiColl(_PluginBase):
|
||||
self._scheduler.add_job(
|
||||
func=self.bangumi_coll,
|
||||
trigger='date',
|
||||
run_date=datetime.datetime.now(tz=pytz.timezone(settings.TZ))
|
||||
+ datetime.timedelta(seconds=3),
|
||||
run_date=datetime.datetime.now(tz=pytz.timezone(settings.TZ)) + datetime.timedelta(seconds=3),
|
||||
)
|
||||
self._scheduler.start()
|
||||
|
||||
@@ -141,14 +134,335 @@ class BangumiColl(_PluginBase):
|
||||
)
|
||||
|
||||
def get_form(self):
|
||||
from .page_components import form
|
||||
|
||||
# 列出所有站点
|
||||
sites_options = [
|
||||
{"title": site.name, "value": site.id}
|
||||
for site in self.siteoper.list_order_by_pri()
|
||||
for site in SiteOper().list_order_by_pri()
|
||||
]
|
||||
return form(sites_options, self._is_v2)
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 3},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 3},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '自动取消订阅并通知',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 3},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'total_change',
|
||||
'label': '固定总集数',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 3},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8, 'md': 4},
|
||||
'content': [
|
||||
{
|
||||
# 'component': 'VTextField', # 组件替换为VCronField
|
||||
'component': 'VCronField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '5位cron表达式,留空自动',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8, 'md': 4},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'uid',
|
||||
'label': 'UID/用户名',
|
||||
'placeholder': '设置了用户名填写用户名,否则填写UID',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8, 'md': 4},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'collection_type',
|
||||
'label': '收藏类型',
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'items': [
|
||||
{'title': '在看', 'value': 3},
|
||||
{'title': '想看', 'value': 1},
|
||||
],
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'success',
|
||||
'variant': 'tonal',
|
||||
'text': True
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'innerHTML': '提示:<strong>剧集组优先级</strong>越靠前优先级越高。'}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8, 'md': 4},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'match_groups',
|
||||
'disabled': not self._is_v2,
|
||||
'label': '剧集组填充(实验性)',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'group_select_order',
|
||||
'label': '剧集组优先级',
|
||||
'disabled': not self._is_v2,
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'clearable': True,
|
||||
'items': [
|
||||
{"title": "初始播出日期", "value": 1},
|
||||
{"title": "绝对", "value": 2},
|
||||
{"title": "DVD", "value": 3},
|
||||
{"title": "数字", "value": 4},
|
||||
{"title": "故事线", "value": 5},
|
||||
{"title": "制片", "value": 6},
|
||||
{"title": "电视", "value": 7},
|
||||
],
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 6},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'save_path',
|
||||
'label': '保存目录',
|
||||
'placeholder': '留空自动',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 6},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'sites',
|
||||
'label': '选择站点',
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'clearable': True,
|
||||
'items': sites_options,
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': True
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'innerHTML': '注意: 该插件仅会将<strong>公开</strong>的收藏添加到<strong>订阅</strong>。'}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': True
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'innerHTML': '注意: 开启<strong>自动取消订阅并通知</strong>后,已添加的订阅在下一次执行时若不在已选择的<strong>收藏类型</strong>中,将会被取消订阅。'}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': True
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'div',
|
||||
'props': {
|
||||
'innerHTML': '注意: 开启<strong>固定总集数</strong>后,从<a href="https://bangumi.github.io/api/#/%E7%AB%A0%E8%8A%82/getEpisodes" target="_blank"><u>Bangumi API</u></a>获取到总集数将不会因<strong>订阅元数据更新</strong>改变。'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
], {
|
||||
"enabled": False,
|
||||
"total_change": False,
|
||||
"notify": False,
|
||||
"onlyonce": False,
|
||||
"cron": "",
|
||||
"uid": "",
|
||||
"collection_type": [3],
|
||||
"save_path": "",
|
||||
"sites": [],
|
||||
"match_groups": False,
|
||||
"group_select_order": [],
|
||||
}
|
||||
|
||||
def get_service(self) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
@@ -194,11 +508,11 @@ class BangumiColl(_PluginBase):
|
||||
def get_command(self):
|
||||
return [
|
||||
{
|
||||
"cmd": "/bangumi_coll",
|
||||
"event": EventType.PluginAction,
|
||||
"desc": "Bangumi收藏订阅",
|
||||
"category": "",
|
||||
"data": {"action": "bangumi_coll"}
|
||||
"cmd": "/bangumi_coll",
|
||||
"event": EventType.PluginAction,
|
||||
"desc": "Bangumi收藏订阅",
|
||||
"category": "",
|
||||
"data": {"action": "bangumi_coll"}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -218,21 +532,21 @@ class BangumiColl(_PluginBase):
|
||||
return
|
||||
|
||||
self.post_message(channel=event_data.get("channel"),
|
||||
title=f"开始添加用户: {self._uid} 的收藏 ...",
|
||||
userid=event_data.get("user"))
|
||||
title=f"开始添加用户: {self._uid} 的收藏 ...",
|
||||
userid=event_data.get("user"))
|
||||
# 运行任务
|
||||
msg = self.bangumi_coll()
|
||||
|
||||
self.post_message(channel=event_data.get("channel"),
|
||||
title="添加完成" if not msg else msg,
|
||||
userid=event_data.get("user"))
|
||||
title="添加完成" if not msg else msg,
|
||||
userid=event_data.get("user"))
|
||||
|
||||
def bangumi_coll(self) -> str:
|
||||
"""订阅Bangumi用户收藏"""
|
||||
if not self._uid:
|
||||
logger.error("未设置UID")
|
||||
return "未设置UID"
|
||||
|
||||
msg = ""
|
||||
try:
|
||||
res = self.get_bgm_res(addr="UserCollections", id=self._uid)
|
||||
items = self.parse_collection_items(res)
|
||||
@@ -263,9 +577,8 @@ class BangumiColl(_PluginBase):
|
||||
"tags": [tag.get('name') for tag in item['subject'].get('tags', [{}])]
|
||||
}
|
||||
for item in data
|
||||
if item.get("type") in self._collection_type and item['subject'].get('date')\
|
||||
# 只添加未来30天内放送的条目
|
||||
and self.is_date_in_range(item['subject'].get('date'), threshold_days=30)[0]
|
||||
if item.get("type") in self._collection_type and item['subject'].get('date') and self.is_date_in_range(item['subject'].get('date'), threshold_days=30)[0]
|
||||
}
|
||||
|
||||
def manage_subscriptions(self, items: Dict[int, Dict[str, Any]]):
|
||||
@@ -273,7 +586,7 @@ class BangumiColl(_PluginBase):
|
||||
# 查询订阅
|
||||
db_sub = {
|
||||
i.bangumiid: i.id
|
||||
for i in self.subscribechain.subscribeoper.list()
|
||||
for i in SubscribeOper().list()
|
||||
if i.bangumiid
|
||||
}
|
||||
# bangumi 条目
|
||||
@@ -308,6 +621,7 @@ class BangumiColl(_PluginBase):
|
||||
"""添加订阅"""
|
||||
|
||||
fail_items = {}
|
||||
subscribeoper = SubscribeOper()
|
||||
for subid, item in items.items():
|
||||
if item.get("name_cn"):
|
||||
meta = MetaInfo(item.get("name_cn"))
|
||||
@@ -332,9 +646,9 @@ class BangumiColl(_PluginBase):
|
||||
meta.en_name = meta.title
|
||||
|
||||
if (mediainfo := self.chain.recognize_media(
|
||||
meta=meta,
|
||||
mtype=mtype,
|
||||
cache=False
|
||||
meta=meta,
|
||||
mtype=mtype,
|
||||
cache=False
|
||||
)) or any(
|
||||
getattr(meta, attr) == meta.org_string
|
||||
for attr in ('cn_name', 'en_name')
|
||||
@@ -349,9 +663,10 @@ class BangumiColl(_PluginBase):
|
||||
mediainfo.bangumi_id = subid
|
||||
# 根据发行日期判断是不是续作
|
||||
if mediainfo.type == MediaType.TV \
|
||||
and not self.is_date_in_range(sub_air_date, mediainfo.release_date)[0]:
|
||||
and not self.is_date_in_range(sub_air_date, mediainfo.release_date)[0]:
|
||||
# 识别剧集组标志
|
||||
group_flag: bool = True
|
||||
season_info = []
|
||||
if "OVA" in item.get("tags"):
|
||||
# 季0 处理
|
||||
if tmdb_info := self.chain.tmdb_info(mediainfo.tmdb_id, mediainfo.type, 0):
|
||||
@@ -359,12 +674,13 @@ class BangumiColl(_PluginBase):
|
||||
if self.is_date_in_range(sub_air_date, info.get("air_date"), 2)[0]:
|
||||
mediainfo.season = 0
|
||||
meta.begin_episode = info.get("episode_number")
|
||||
else: # 信息不完整, 跳过条目
|
||||
else: # 信息不完整, 跳过条目
|
||||
continue
|
||||
|
||||
else:
|
||||
# 过滤信息不完整和第0季
|
||||
season_info = [info for info in mediainfo.season_info if info.get("season_number") and info.get("air_date") and info.get("episode_count")]
|
||||
season_info = [info for info in mediainfo.season_info if
|
||||
info.get("season_number") and info.get("air_date") and info.get("episode_count")]
|
||||
# 获取 bangumi 信息
|
||||
meta = self.get_eps(meta, subid)
|
||||
# 先通过season_info处理三季及以上的情况, tmdb存在第二季也不能保证不会被合并
|
||||
@@ -399,16 +715,16 @@ class BangumiColl(_PluginBase):
|
||||
season_list = []
|
||||
for info in mediainfo.season_info:
|
||||
if info.get("season_number") == 0:
|
||||
season_list.append((len(season_info)+1, len(mediainfo.seasons[1])+info.get("episode_count")))
|
||||
season_list.append(
|
||||
(len(season_info) + 1, len(mediainfo.seasons[1]) + info.get("episode_count")))
|
||||
season_list.append((len(season_info), len(mediainfo.seasons[1])))
|
||||
# 预匹配剧集组
|
||||
candidate_groups = (
|
||||
group for group in mediainfo.episode_groups
|
||||
if any(
|
||||
group.get("group_count") == s[0] and
|
||||
group.get("episode_count") == s[1]
|
||||
for s in season_list
|
||||
)
|
||||
if any(group.get("group_count") == s[0] and
|
||||
group.get("episode_count") == s[1]
|
||||
for s in season_list
|
||||
)
|
||||
)
|
||||
|
||||
for group in candidate_groups:
|
||||
@@ -419,9 +735,10 @@ class BangumiColl(_PluginBase):
|
||||
else:
|
||||
mediainfo = self._match_group(air_date, meta, mediainfo)
|
||||
# 非续作
|
||||
elif mediainfo.type == MediaType.TV: mediainfo.season = 1
|
||||
elif mediainfo.type == MediaType.TV:
|
||||
mediainfo.season = 1
|
||||
# 检查本地媒体
|
||||
exist_flag, no_exists = self.downloadchain.get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
exist_flag, no_exists = DownloadChain().get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
if exist_flag:
|
||||
# 添加到排除
|
||||
self.update_data(key="exclude", value=subid)
|
||||
@@ -432,19 +749,19 @@ class BangumiColl(_PluginBase):
|
||||
self.update_data(key="exclude", value=subid)
|
||||
logger.info(f'{mediainfo.title_year} 媒体库中已存在 第 {mediainfo.season} 季')
|
||||
continue
|
||||
sid = self.subscribeoper.list_by_tmdbid(
|
||||
sid = subscribeoper.list_by_tmdbid(
|
||||
mediainfo.tmdb_id, mediainfo.season
|
||||
)
|
||||
if sid:
|
||||
logger.info(f"{mediainfo.title_year} 正在订阅中")
|
||||
if len(sid) == 1:
|
||||
self.subscribeoper.update(
|
||||
subscribeoper.update(
|
||||
sid=sid[0].id, payload={"bangumiid": subid}
|
||||
)
|
||||
logger.info(f"{mediainfo.title_year} Bangumi条目id更新成功")
|
||||
continue
|
||||
# 添加订阅
|
||||
sid, msg = self.subscribechain.add(**self.prepare_add_args(meta, mediainfo))
|
||||
sid, msg = SubscribeChain().add(**self.prepare_add_args(meta, mediainfo))
|
||||
if not sid:
|
||||
fail_items[subid] = f"{item.get('name_cn') or item.get('name')} {msg}"
|
||||
|
||||
@@ -454,6 +771,7 @@ class BangumiColl(_PluginBase):
|
||||
"""
|
||||
将tmdb多季合并的季信息进行拆分
|
||||
"""
|
||||
season_data = {}
|
||||
if tmdb_info := self.chain.tmdb_info(mediainfo.tmdb_id, mediainfo.type, season):
|
||||
season = 1
|
||||
air_date = tmdb_info.get("air_date")
|
||||
@@ -505,8 +823,8 @@ class BangumiColl(_PluginBase):
|
||||
episode_count = group.get("episode_count", 0)
|
||||
|
||||
if (
|
||||
group_count >= total_season
|
||||
and episode_count >= begin_ep
|
||||
group_count >= total_season
|
||||
and episode_count >= begin_ep
|
||||
):
|
||||
logger.info(
|
||||
f"{mediainfo.title_year} 正在匹配 剧集组: "
|
||||
@@ -514,8 +832,8 @@ class BangumiColl(_PluginBase):
|
||||
f"共 {group_count} 季 {episode_count} 集")
|
||||
|
||||
if season_num := self.get_group_season(
|
||||
group.get("id"), air_date, mediainfo
|
||||
):
|
||||
group.get("id"), air_date, mediainfo
|
||||
):
|
||||
mediainfo.episode_group = group.get("id")
|
||||
mediainfo.season = season_num
|
||||
return mediainfo
|
||||
@@ -529,7 +847,7 @@ class BangumiColl(_PluginBase):
|
||||
:param mediainfo: MediaInfo
|
||||
:return: 季号
|
||||
"""
|
||||
if group_seasons := self.tmdbapi.get_tv_group_seasons(group_id):
|
||||
if group_seasons := TmdbApi().get_tv_group_seasons(group_id):
|
||||
for group_season in group_seasons:
|
||||
if self.is_date_in_range(air_date, group_season.get("episodes")[0].get("air_date"))[0]:
|
||||
logger.info(f"{mediainfo.title_year} 剧集组: {group_id} 第{group_season.get('order')}季 ")
|
||||
@@ -564,9 +882,9 @@ class BangumiColl(_PluginBase):
|
||||
|
||||
total_episode = len(mediainfo.seasons.get(mediainfo.season or 1) or [])
|
||||
if (
|
||||
meta.begin_season
|
||||
and mediainfo.season != meta.begin_season
|
||||
or total_episode != meta.total_episode
|
||||
meta.begin_season
|
||||
and mediainfo.season != meta.begin_season
|
||||
or total_episode != meta.total_episode
|
||||
):
|
||||
meta = self.get_eps(meta, mediainfo.bangumi_id)
|
||||
total_ep: int = meta.end_episode if meta.end_episode else total_episode
|
||||
@@ -597,7 +915,7 @@ class BangumiColl(_PluginBase):
|
||||
"""更新媒体季信息"""
|
||||
best_info = None
|
||||
min_days = float('inf')
|
||||
|
||||
|
||||
for info in season_info:
|
||||
result, days = self.is_date_in_range(air_date, info.get("air_date"))
|
||||
if result:
|
||||
@@ -628,11 +946,12 @@ class BangumiColl(_PluginBase):
|
||||
# 移除订阅
|
||||
def delete_subscribe(self, del_items: dict[int, int]):
|
||||
"""删除订阅"""
|
||||
subscribeoper = SubscribeOper()
|
||||
for subscribe_id in del_items.keys():
|
||||
try:
|
||||
if subscribe := self.subscribeoper.get(subscribe_id):
|
||||
self.subscribeoper.delete(subscribe_id)
|
||||
self.subscribehelper.sub_done_async(
|
||||
if subscribe := subscribeoper.get(subscribe_id):
|
||||
subscribeoper.delete(subscribe_id)
|
||||
SubscribeHelper().sub_done_async(
|
||||
{"tmdbid": subscribe.tmdbid, "doubanid": subscribe.doubanid}
|
||||
)
|
||||
self.post_message(
|
||||
@@ -711,4 +1030,3 @@ class BangumiColl(_PluginBase):
|
||||
except Exception as e:
|
||||
logger.error(f"获取订阅历史失败: {str(e)}")
|
||||
return set()
|
||||
|
||||
|
||||
@@ -1,355 +0,0 @@
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
def form(sites_options: list[dict], is_v2: bool = True) -> list:
|
||||
return [
|
||||
{
|
||||
'component': 'VForm',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 3},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'enabled',
|
||||
'label': '启用插件',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 3},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'notify',
|
||||
'label': '自动取消订阅并通知',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 3},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'total_change',
|
||||
'label': '不更新元数据',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 3},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'onlyonce',
|
||||
'label': '立即运行一次',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8, 'md': 4},
|
||||
'content': [
|
||||
{
|
||||
# 'component': 'VTextField', # 组件替换为VCronField
|
||||
'component': 'VCronField',
|
||||
'props': {
|
||||
'model': 'cron',
|
||||
'label': '执行周期',
|
||||
'placeholder': '5位cron表达式,留空自动',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8, 'md': 4},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'uid',
|
||||
'label': 'UID/用户名',
|
||||
'placeholder': '设置了用户名填写用户名,否则填写UID',
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8, 'md': 4},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'collection_type',
|
||||
'label': '收藏类型',
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'items': [
|
||||
{'title': '在看', 'value': 3},
|
||||
{'title': '想看', 'value': 1},
|
||||
],
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
},
|
||||
'content': parse_html(
|
||||
'<p>提示: <strong>剧集组优先级</strong>越靠前优先级越高。</p>'
|
||||
),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8, 'md': 4},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSwitch',
|
||||
'props': {
|
||||
'model': 'match_groups',
|
||||
'disabled': not is_v2,
|
||||
'label': '剧集组填充(实验性)',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 8},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'group_select_order',
|
||||
'label': '剧集组优先级',
|
||||
'disabled': not is_v2,
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'clearable': True,
|
||||
'items': [
|
||||
{"title": "初始播出日期", "value": 1},
|
||||
{"title": "绝对", "value": 2},
|
||||
{"title": "DVD", "value": 3},
|
||||
{"title": "数字", "value": 4},
|
||||
{"title": "故事线", "value": 5},
|
||||
{"title": "制片", "value": 6},
|
||||
{"title": "电视", "value": 7},
|
||||
],
|
||||
},
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 6},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'save_path',
|
||||
'label': '保存目录',
|
||||
'placeholder': '留空自动',
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {'cols': 12, 'md': 6},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VSelect',
|
||||
'props': {
|
||||
'model': 'sites',
|
||||
'label': '选择站点',
|
||||
'chips': True,
|
||||
'multiple': True,
|
||||
'clearable': True,
|
||||
'items': sites_options,
|
||||
},
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
},
|
||||
'content': parse_html(
|
||||
'<p>注意: 该插件仅会将<strong>公开</strong>的收藏添加到<strong>订阅</strong>。</p>'
|
||||
),
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
},
|
||||
'content': parse_html(
|
||||
'<p>注意: 开启<strong>自动取消订阅并通知</strong>后,已添加的订阅在下一次执行时若不在已选择的<strong>收藏类型</strong>中,将会被取消订阅。</p>'
|
||||
),
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
},
|
||||
'content': parse_html(
|
||||
'<p>注意: 开启<strong>不更新元数据</strong>后,从<a href="https://bangumi.github.io/api/#/%E7%AB%A0%E8%8A%82/getEpisodes" target="_blank"><u>Bangumi API</u></a>获取到总集数将不会因<strong>订阅元数据更新</strong>改变。</p>'
|
||||
),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
], {
|
||||
"enabled": False,
|
||||
"total_change": False,
|
||||
"notify": False,
|
||||
"onlyonce": False,
|
||||
"cron": "",
|
||||
"uid": "",
|
||||
"collection_type": [3],
|
||||
"save_path": "",
|
||||
"sites": [],
|
||||
"match_groups": False,
|
||||
"group_select_order": [],
|
||||
}
|
||||
|
||||
|
||||
def parse_html(html_string: str) -> list:
|
||||
soup = BeautifulSoup(html_string, 'html.parser')
|
||||
result: list = []
|
||||
|
||||
# 定义需要直接转为文本的标签
|
||||
inline_text_tags = {'strong', 'u', 'em', 'b', 'i'}
|
||||
|
||||
def process_element(element: BeautifulSoup):
|
||||
# 处理纯文本节点
|
||||
if element.name is None:
|
||||
text = element.strip()
|
||||
return text if text else ""
|
||||
|
||||
# 处理HTML标签
|
||||
component = element.name
|
||||
props = {attr: element[attr] for attr in element.attrs}
|
||||
content = []
|
||||
|
||||
# 递归处理子元素
|
||||
for child in element.children:
|
||||
child_content = process_element(child)
|
||||
if isinstance(child_content, str):
|
||||
content.append({'component': 'span', 'text': child_content})
|
||||
elif child_content: # 只有在child_content不为空时添加
|
||||
content.append(child_content)
|
||||
|
||||
# 构建标签对象
|
||||
tag_data = {
|
||||
'component': component,
|
||||
'props': props,
|
||||
'content': content if component not in inline_text_tags else [],
|
||||
}
|
||||
|
||||
if content and component in inline_text_tags:
|
||||
tag_data['text'] = ' '.join(
|
||||
item['text'] for item in content if 'text' in item
|
||||
)
|
||||
|
||||
return tag_data
|
||||
|
||||
# 遍历所有子元素
|
||||
for element in soup.children:
|
||||
element_content = process_element(element)
|
||||
if element_content: # 只增加非空内容
|
||||
result.append(element_content)
|
||||
|
||||
return result
|
||||
@@ -39,7 +39,7 @@ class ContractCheck(_PluginBase):
|
||||
# 插件图标
|
||||
plugin_icon = "contract.png"
|
||||
# 插件版本
|
||||
plugin_version = "1.4"
|
||||
plugin_version = "1.4.1"
|
||||
# 插件作者
|
||||
plugin_author = "DzAvril"
|
||||
# 作者主页
|
||||
@@ -53,13 +53,13 @@ class ContractCheck(_PluginBase):
|
||||
|
||||
class ContractInfo:
|
||||
def __init__(
|
||||
self,
|
||||
site_name: str = "",
|
||||
official: bool = False,
|
||||
size: int = 0,
|
||||
num: int = 0,
|
||||
duration: int = 0,
|
||||
date: datetime = datetime.now(),
|
||||
self,
|
||||
site_name: str = "",
|
||||
official: bool = False,
|
||||
size: int = 0,
|
||||
num: int = 0,
|
||||
duration: int = 0,
|
||||
date: datetime = datetime.now(),
|
||||
):
|
||||
self.site_name: str = site_name
|
||||
self.official: bool = official
|
||||
@@ -69,8 +69,6 @@ class ContractCheck(_PluginBase):
|
||||
self.date: datetime = date
|
||||
|
||||
# 私有属性
|
||||
sites = None
|
||||
siteoper = None
|
||||
statistic_sites: list = []
|
||||
contract_infos: list[ContractInfo] = []
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
@@ -87,8 +85,7 @@ class ContractCheck(_PluginBase):
|
||||
_dashboard_type: str = "brief"
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.sites = SitesHelper()
|
||||
self.siteoper = SiteOper()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
# 配置
|
||||
@@ -123,8 +120,7 @@ class ContractCheck(_PluginBase):
|
||||
self._scheduler.add_job(
|
||||
self.refresh_all_site_data,
|
||||
"date",
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ))
|
||||
+ timedelta(seconds=3),
|
||||
run_date=datetime.now(tz=pytz.timezone(settings.TZ)) + timedelta(seconds=3),
|
||||
)
|
||||
# 关闭一次性开关
|
||||
self._onlyonce = False
|
||||
@@ -162,7 +158,7 @@ class ContractCheck(_PluginBase):
|
||||
self.statistic_sites.append(site_id)
|
||||
|
||||
def _get_site_id(self, name):
|
||||
all_sites = [site for site in self.siteoper.list_order_by_pri()] + [
|
||||
all_sites = [site for site in SiteOper().list_order_by_pri()] + [
|
||||
site for site in self.__custom_sites()
|
||||
]
|
||||
for site in all_sites:
|
||||
@@ -505,9 +501,7 @@ class ContractCheck(_PluginBase):
|
||||
}
|
||||
]
|
||||
|
||||
def get_dashboard(
|
||||
self,
|
||||
) -> Optional[Tuple[Dict[str, Any], Dict[str, Any], List[dict]]]:
|
||||
def get_dashboard(self, **kwargs) -> Optional[Tuple[Dict[str, Any], Dict[str, Any], List[dict]]]:
|
||||
"""
|
||||
获取插件仪表盘页面,需要返回:1、仪表板col配置字典;2、全局配置(自动刷新等);3、仪表板页面元素配置json(含数据)
|
||||
1、col配置参考:
|
||||
@@ -783,7 +777,7 @@ class ContractCheck(_PluginBase):
|
||||
i = html_text.find("window.location")
|
||||
if i == -1:
|
||||
return None
|
||||
tmp_url = url + html_text[i : html_text.find(";")].replace(
|
||||
tmp_url = url + html_text[i: html_text.find(";")].replace(
|
||||
'"', ""
|
||||
).replace("+", "").replace(" ", "").replace(
|
||||
"window.location=", ""
|
||||
@@ -793,8 +787,8 @@ class ContractCheck(_PluginBase):
|
||||
).get_res(url=tmp_url)
|
||||
if res and res.status_code == 200:
|
||||
if (
|
||||
"charset=utf-8" in res.text
|
||||
or "charset=UTF-8" in res.text
|
||||
"charset=utf-8" in res.text
|
||||
or "charset=UTF-8" in res.text
|
||||
):
|
||||
res.encoding = "UTF-8"
|
||||
else:
|
||||
@@ -819,7 +813,7 @@ class ContractCheck(_PluginBase):
|
||||
).get_res(url=url + "/index.php")
|
||||
if res and res.status_code == 200:
|
||||
if re.search(
|
||||
r"charset=\"?utf-8\"?", res.text, re.IGNORECASE
|
||||
r"charset=\"?utf-8\"?", res.text, re.IGNORECASE
|
||||
):
|
||||
res.encoding = "utf-8"
|
||||
else:
|
||||
@@ -975,7 +969,7 @@ class ContractCheck(_PluginBase):
|
||||
"""
|
||||
多线程刷新站点下载上传量,默认间隔6小时
|
||||
"""
|
||||
if not self.sites.get_indexers():
|
||||
if not SitesHelper().get_indexers():
|
||||
return
|
||||
|
||||
logger.info("开始刷新站点数据 ...")
|
||||
@@ -983,8 +977,8 @@ class ContractCheck(_PluginBase):
|
||||
with lock:
|
||||
|
||||
all_sites = [
|
||||
site for site in self.sites.get_indexers() if not site.get("public")
|
||||
] + self.__custom_sites()
|
||||
site for site in SitesHelper().get_indexers() if not site.get("public")
|
||||
] + self.__custom_sites()
|
||||
# 没有指定站点,默认使用全部站点
|
||||
if not self.statistic_sites:
|
||||
refresh_sites = all_sites
|
||||
|
||||
@@ -70,6 +70,7 @@ class ISiteUserInfo(metaclass=ABCMeta):
|
||||
"听听歌": ["TTG", "WiKi", "DoA", "NGB", "ARiN"],
|
||||
"馒头": ["MTeam", "MTeamTV"],
|
||||
"朋友": ["FRDS"],
|
||||
"猪猪": ["PigoHD","PigoWeb","PigoNF"]
|
||||
}
|
||||
|
||||
# 错误信息
|
||||
|
||||
@@ -28,12 +28,11 @@ class CustomIndexer(_PluginBase):
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
siteshelper = None
|
||||
_enabled = False
|
||||
_confstr = ""
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.siteshelper = SitesHelper()
|
||||
|
||||
# 读取配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
@@ -49,7 +48,7 @@ class CustomIndexer(_PluginBase):
|
||||
if not domain or not jsonstr:
|
||||
continue
|
||||
jsonstr = base64.b64decode(jsonstr).decode('utf-8')
|
||||
self.siteshelper.add_indexer(domain, json.loads(jsonstr))
|
||||
SitesHelper().add_indexer(domain, json.loads(jsonstr))
|
||||
except Exception as err:
|
||||
logger.error(f"自定义索引站点配置错误:{err}")
|
||||
self.systemmessage.put(f"自定义索引站点配置错误:{err}", title="自定义索引站点")
|
||||
|
||||
@@ -37,10 +37,8 @@ class DownloadingMsg(_PluginBase):
|
||||
_seconds = None
|
||||
_type = None
|
||||
_adminuser = None
|
||||
_downloadhis = None
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self._downloadhis = DownloadHistoryOper()
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
|
||||
@@ -71,8 +69,9 @@ class DownloadingMsg(_PluginBase):
|
||||
if self._type == "user" or self._type == "both":
|
||||
user_torrents = {}
|
||||
# 根据正在下载种子hash获取下载历史
|
||||
_downloadhis = DownloadHistoryOper()
|
||||
for torrent in torrents:
|
||||
downloadhis = self._downloadhis.get_by_hash(download_hash=torrent.hash)
|
||||
downloadhis = _downloadhis.get_by_hash(download_hash=torrent.hash)
|
||||
if not downloadhis:
|
||||
logger.warn(f"种子 {torrent.hash} 未获取到MoviePilot下载历史,无法推送下载进度")
|
||||
continue
|
||||
@@ -115,13 +114,14 @@ class DownloadingMsg(_PluginBase):
|
||||
messages = []
|
||||
index = 1
|
||||
channel_value = None
|
||||
_downloadhis = DownloadHistoryOper()
|
||||
for torrent in torrents:
|
||||
year = None
|
||||
name = None
|
||||
se = None
|
||||
ep = None
|
||||
# 先查询下载记录,没有再识别
|
||||
downloadhis = self._downloadhis.get_by_hash(download_hash=torrent.hash)
|
||||
downloadhis = _downloadhis.get_by_hash(download_hash=torrent.hash)
|
||||
if downloadhis:
|
||||
name = downloadhis.title
|
||||
year = downloadhis.year
|
||||
|
||||
@@ -23,6 +23,7 @@ from app.utils.common import retry
|
||||
from app.utils.http import RequestUtils
|
||||
from app.db.models import PluginData
|
||||
|
||||
|
||||
class ExistMediaInfo(BaseModel):
|
||||
# 季, 集
|
||||
groupep: Optional[Dict[int, list]] = {}
|
||||
@@ -522,7 +523,7 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
return [
|
||||
{
|
||||
'component': 'VRow',
|
||||
@@ -628,7 +629,7 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
time.sleep(int(self._delay))
|
||||
# 开始处理
|
||||
if self.start_rt(mediainfo=mediainfo, episode_groups=episode_groups):
|
||||
# 处理完成时, 属于自动匹配的, 发送通知
|
||||
# 处理完成时, 属于自动匹配的, 发送通知
|
||||
if self._notify and mediainfo_dict:
|
||||
self.post_message(
|
||||
mtype=schemas.NotificationType.Manual,
|
||||
@@ -673,8 +674,10 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
self.log_warn(f"{mediainfo.title_year} 在媒体库 {server} 中没有数据")
|
||||
continue
|
||||
else:
|
||||
self.log_info(f"{mediainfo.title_year} 在媒体库 {existsinfo.server} 中找到了这些季集:{existsinfo.groupep}")
|
||||
_bool = self.__start_rt_mediaserver(mediainfo=mediainfo, existsinfo=existsinfo, episode_groups=episode_groups, group_id=group_id)
|
||||
self.log_info(
|
||||
f"{mediainfo.title_year} 在媒体库 {existsinfo.server} 中找到了这些季集:{existsinfo.groupep}")
|
||||
_bool = self.__start_rt_mediaserver(mediainfo=mediainfo, existsinfo=existsinfo,
|
||||
episode_groups=episode_groups, group_id=group_id)
|
||||
relust_bool = relust_bool or _bool
|
||||
else:
|
||||
# v2版本 遍历所有媒体服务器的方式
|
||||
@@ -696,8 +699,11 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
self.log_warn(f"{mediainfo.title_year} 在 ({info.type}){name} 媒体服务器中没有数据")
|
||||
continue
|
||||
else:
|
||||
self.log_info(f"{mediainfo.title_year} 在媒体库 ({existsinfo.server_type}){existsinfo.server} 中找到了这些季集:{existsinfo.groupep}")
|
||||
_bool = self.__start_rt_mediaserver(mediainfo=mediainfo, existsinfo=existsinfo, episode_groups=episode_groups, group_id=group_id, mediaserver_instance=info.instance)
|
||||
self.log_info(
|
||||
f"{mediainfo.title_year} 在媒体库 ({existsinfo.server_type}){existsinfo.server} 中找到了这些季集:{existsinfo.groupep}")
|
||||
_bool = self.__start_rt_mediaserver(mediainfo=mediainfo, existsinfo=existsinfo,
|
||||
episode_groups=episode_groups, group_id=group_id,
|
||||
mediaserver_instance=info.instance)
|
||||
relust_bool = relust_bool or _bool
|
||||
return relust_bool
|
||||
|
||||
@@ -762,7 +768,8 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
ep_num = ep[_index]
|
||||
for _id in _ids:
|
||||
# 获取媒体服务器媒体项
|
||||
iteminfo = self.get_iteminfo(server_type=existsinfo.server_type, itemid=_id, mediaserver_instance=mediaserver_instance)
|
||||
iteminfo = self.get_iteminfo(server_type=existsinfo.server_type, itemid=_id,
|
||||
mediaserver_instance=mediaserver_instance)
|
||||
if not iteminfo:
|
||||
self.log_info(f"未找到媒体项 - itemid: {_id}, 第 {order} 季, 第 {ep_num} 集")
|
||||
continue
|
||||
@@ -771,7 +778,8 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
if iteminfo.get("LockData") or (
|
||||
"Name" in iteminfo.get("LockedFields", [])
|
||||
and "Overview" in iteminfo.get("LockedFields", [])):
|
||||
self.log_warn(f"已锁定媒体项 - itemid: {_id}, 第 {order} 季, 第 {ep_num} 集, 如果需要刮削请打开设置中的“锁定的剧集也刮削”选项")
|
||||
self.log_warn(
|
||||
f"已锁定媒体项 - itemid: {_id}, 第 {order} 季, 第 {ep_num} 集, 如果需要刮削请打开设置中的“锁定的剧集也刮削”选项")
|
||||
continue
|
||||
# 替换项目数据
|
||||
episode = episodes[ep_num - 1]
|
||||
@@ -789,7 +797,8 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
self.__append_to_list(new_dict["LockedFields"], "Name")
|
||||
self.__append_to_list(new_dict["LockedFields"], "Overview")
|
||||
# 更新数据
|
||||
self.set_iteminfo(server_type=existsinfo.server_type, itemid=_id, iteminfo=new_dict, mediaserver_instance=mediaserver_instance)
|
||||
self.set_iteminfo(server_type=existsinfo.server_type, itemid=_id, iteminfo=new_dict,
|
||||
mediaserver_instance=mediaserver_instance)
|
||||
# still_path 图片
|
||||
if episode.get("still_path"):
|
||||
self.set_item_image(server_type=existsinfo.server_type, itemid=_id,
|
||||
@@ -812,7 +821,8 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
if item not in list:
|
||||
list.append(item)
|
||||
|
||||
def __media_exists(self, mediainfo: schemas.MediaInfo, server: str, server_type: str, mediaserver_instance: Any = None) -> ExistMediaInfo:
|
||||
def __media_exists(self, mediainfo: schemas.MediaInfo, server: str, server_type: str,
|
||||
mediaserver_instance: Any = None) -> ExistMediaInfo:
|
||||
"""
|
||||
根据媒体信息,返回是否存在于指定媒体服务器中,剧集列表与剧集ID列表
|
||||
:param mediainfo: 媒体信息
|
||||
@@ -825,14 +835,14 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
try:
|
||||
instance = mediaserver_instance or self.emby
|
||||
res = instance.get_data(("[HOST]emby/Items?"
|
||||
"IncludeItemTypes=Series"
|
||||
"&Fields=ProductionYear"
|
||||
"&StartIndex=0"
|
||||
"&Recursive=true"
|
||||
"&SearchTerm=%s"
|
||||
"&Limit=10"
|
||||
"&IncludeSearchTypes=false"
|
||||
"&api_key=[APIKEY]") % mediainfo.title)
|
||||
"IncludeItemTypes=Series"
|
||||
"&Fields=ProductionYear"
|
||||
"&StartIndex=0"
|
||||
"&Recursive=true"
|
||||
"&SearchTerm=%s"
|
||||
"&Limit=10"
|
||||
"&IncludeSearchTypes=false"
|
||||
"&api_key=[APIKEY]") % mediainfo.title)
|
||||
res_items = res.json().get("Items")
|
||||
if res_items:
|
||||
for res_item in res_items:
|
||||
@@ -893,9 +903,9 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
try:
|
||||
instance = mediaserver_instance or self.jellyfin
|
||||
res = instance.get_data(url=f"[HOST]Users/[USER]/Items?api_key=[APIKEY]"
|
||||
f"&searchTerm={mediainfo.title}"
|
||||
f"&IncludeItemTypes=Series"
|
||||
f"&Limit=10&Recursive=true")
|
||||
f"&searchTerm={mediainfo.title}"
|
||||
f"&IncludeItemTypes=Series"
|
||||
f"&Limit=10&Recursive=true")
|
||||
res_items = res.json().get("Items")
|
||||
if res_items:
|
||||
for res_item in res_items:
|
||||
@@ -958,14 +968,14 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
return None
|
||||
# 根据标题和年份模糊搜索,该结果不够准确
|
||||
videos = _plex.library.search(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
libtype="show")
|
||||
year=mediainfo.year,
|
||||
libtype="show")
|
||||
if (not videos
|
||||
and mediainfo.original_title
|
||||
and str(mediainfo.original_title) != str(mediainfo.title)):
|
||||
videos = _plex.library.search(title=mediainfo.original_title,
|
||||
year=mediainfo.year,
|
||||
libtype="show")
|
||||
year=mediainfo.year,
|
||||
libtype="show")
|
||||
if not videos:
|
||||
return None
|
||||
if isinstance(videos, list):
|
||||
@@ -1323,7 +1333,7 @@ class EpisodeGroupMeta(_PluginBase):
|
||||
self.jellyfin = Jellyfin()
|
||||
return None
|
||||
|
||||
services = self.mediaserver_helper.get_services(type_filter=type_filter)#, name_filters=self._mediaservers)
|
||||
services = self.mediaserver_helper.get_services(type_filter=type_filter) #, name_filters=self._mediaservers)
|
||||
if not services:
|
||||
self.log_warn("获取媒体服务器实例失败,请检查配置")
|
||||
return None
|
||||
|
||||
@@ -9,7 +9,6 @@ from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
|
||||
from app.chain.download import DownloadChain
|
||||
from app.chain.search import SearchChain
|
||||
from app.chain.subscribe import SubscribeChain
|
||||
from app.core.config import settings
|
||||
from app.core.event import Event
|
||||
@@ -49,9 +48,6 @@ class NeoDBSync(_PluginBase):
|
||||
|
||||
_scheduler: Optional[BackgroundScheduler] = None
|
||||
_cache_path: Optional[Path] = None
|
||||
downloadchain = None
|
||||
searchchain = None
|
||||
subscribechain = None
|
||||
|
||||
# 配置属性
|
||||
_enabled: bool = False
|
||||
@@ -64,9 +60,6 @@ class NeoDBSync(_PluginBase):
|
||||
_tokens: str = ""
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.downloadchain = DownloadChain()
|
||||
self.searchchain = SearchChain()
|
||||
self.subscribechain = SubscribeChain()
|
||||
|
||||
# 停止现有任务
|
||||
self.stop_service()
|
||||
@@ -510,6 +503,8 @@ class NeoDBSync(_PluginBase):
|
||||
logger.info(f"用户 {username} 没有想看数据")
|
||||
continue
|
||||
# 遍历该用户的所有想看条目
|
||||
downloadchain = DownloadChain()
|
||||
subscribechain = SubscribeChain()
|
||||
for result in results:
|
||||
try:
|
||||
# Take the url as the unique identifier. For example: /movie/2fEdnxYWozPayayizQmk5M
|
||||
@@ -539,19 +534,19 @@ class NeoDBSync(_PluginBase):
|
||||
logger.warn(f'未识别到媒体信息,标题:{title}')
|
||||
continue
|
||||
# 查询缺失的媒体信息
|
||||
exist_flag, no_exists = self.downloadchain.get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
exist_flag, no_exists = downloadchain.get_no_exists_info(meta=meta, mediainfo=mediainfo)
|
||||
if exist_flag:
|
||||
logger.info(f'{mediainfo.title_year} 媒体库中已存在')
|
||||
else:
|
||||
# 添加订阅
|
||||
logger.info(f'{mediainfo.title_year} 媒体库中不存在或不完整,添加订阅 ...')
|
||||
self.subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="NeoDB 想看")
|
||||
subscribechain.add(title=mediainfo.title,
|
||||
year=mediainfo.year,
|
||||
mtype=mediainfo.type,
|
||||
tmdbid=mediainfo.tmdb_id,
|
||||
season=meta.begin_season,
|
||||
exist_ok=True,
|
||||
username="NeoDB 想看")
|
||||
action = "subscribe"
|
||||
# 存储历史记录
|
||||
history.append({
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import base64
|
||||
import json
|
||||
import requests
|
||||
|
||||
@@ -15,10 +16,20 @@ class NtfyClient:
|
||||
headers = {
|
||||
"Title": title.encode(encoding='utf-8'),
|
||||
"Markdown": "true" if format_as_markdown else "false",
|
||||
"Icon": "https://movie-pilot.org/images/logo.png",
|
||||
}
|
||||
|
||||
if self._token:
|
||||
headers["Authorization"] = "Bearer " + self._token
|
||||
elif self._user and self._password:
|
||||
authStr = self._user + ":" + self._password
|
||||
headers["Authorization"] = "Basic " + base64.b64encode(authStr.encode('utf-8')).decode('utf-8')
|
||||
|
||||
if self._actions:
|
||||
headers["Actions"] = self._actions.encode('utf-8')
|
||||
|
||||
response = json.loads(
|
||||
requests.post(url=self.url, data=message.encode(encoding='utf-8'), headers=headers, auth=self._auth).text
|
||||
requests.post(url=self.url, data=message.encode(encoding='utf-8'), headers=headers).text
|
||||
)
|
||||
return response
|
||||
|
||||
@@ -28,11 +39,16 @@ class NtfyClient:
|
||||
server: str = "https://ntfy.sh",
|
||||
user: str = "",
|
||||
password: str = "",
|
||||
token: str = "",
|
||||
actions: str = "",
|
||||
):
|
||||
self._server = server
|
||||
self._topic = topic
|
||||
self.__set_url(server, topic)
|
||||
self._auth = (user, password)
|
||||
self._user = user
|
||||
self._password = password
|
||||
self._token = token
|
||||
self._actions = actions
|
||||
|
||||
def __set_url(self, server, topic):
|
||||
self.url = server.strip("/") + "/" + topic
|
||||
@@ -46,7 +62,7 @@ class NtfyMsg(_PluginBase):
|
||||
# 插件图标
|
||||
plugin_icon = "Ntfy_A.png"
|
||||
# 插件版本
|
||||
plugin_version = "1.0"
|
||||
plugin_version = "1.1"
|
||||
# 插件作者
|
||||
plugin_author = "lethargicScribe"
|
||||
# 作者主页
|
||||
@@ -64,6 +80,8 @@ class NtfyMsg(_PluginBase):
|
||||
_topic = None
|
||||
_user = None
|
||||
_password = None
|
||||
_token = None
|
||||
_actions = None
|
||||
_msgtypes = []
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
@@ -74,6 +92,8 @@ class NtfyMsg(_PluginBase):
|
||||
self._topic = config.get("topic")
|
||||
self._user = config.get("user")
|
||||
self._password = config.get("password")
|
||||
self._token = config.get("token")
|
||||
self._actions = config.get("actions")
|
||||
|
||||
def get_state(self) -> bool:
|
||||
return self._enabled and (True if self._server and self._topic else False)
|
||||
@@ -194,6 +214,45 @@ class NtfyMsg(_PluginBase):
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'token',
|
||||
'label': '访问令牌',
|
||||
'placeholder': 'ntfytoken',
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
'md': 6
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VTextField',
|
||||
'props': {
|
||||
'model': 'actions',
|
||||
'label': '用户动作',
|
||||
'placeholder': 'ntfyactions',
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
@@ -217,6 +276,48 @@ class NtfyMsg(_PluginBase):
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '用户或Token创建参考:https://docs.ntfy.sh/config/#access-control'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
'component': 'VRow',
|
||||
'content': [
|
||||
{
|
||||
'component': 'VCol',
|
||||
'props': {
|
||||
'cols': 12,
|
||||
},
|
||||
'content': [
|
||||
{
|
||||
'component': 'VAlert',
|
||||
'props': {
|
||||
'type': 'info',
|
||||
'variant': 'tonal',
|
||||
'text': '用户动作创建参考:https://docs.ntfy.sh/publish/?h=action#using-a-header'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
]
|
||||
}
|
||||
], {
|
||||
@@ -226,6 +327,8 @@ class NtfyMsg(_PluginBase):
|
||||
'topic': 'MoviePilot',
|
||||
'user': '',
|
||||
'password': '',
|
||||
'token': '',
|
||||
'actions': '',
|
||||
}
|
||||
|
||||
def get_page(self) -> List[dict]:
|
||||
@@ -266,7 +369,11 @@ class NtfyMsg(_PluginBase):
|
||||
try:
|
||||
if not self._server or not self._topic:
|
||||
return False, "参数未配置"
|
||||
ntfy = NtfyClient(server=self._server, topic=self._topic, user=self._user, password=self._password)
|
||||
ntfy = NtfyClient(
|
||||
server=self._server, topic=self._topic,
|
||||
user=self._user, password=self._password,
|
||||
token=self._token, actions=self._actions
|
||||
)
|
||||
ntfy.send(title=title, message=text, format_as_markdown=True)
|
||||
|
||||
except Exception as msg_e:
|
||||
|
||||
@@ -139,14 +139,12 @@ class RemoveLink(_PluginBase):
|
||||
_delete_scrap_infos = False
|
||||
_delete_torrents = False
|
||||
_delete_history = False
|
||||
_transferhistory = None
|
||||
_observer = []
|
||||
# 监控目录的文件列表
|
||||
state_set: Dict[str, int] = {}
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
logger.info(f"Hello, RemoveLink! config {config}")
|
||||
self._transferhistory = TransferHistoryOper()
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
self._notify = config.get("notify")
|
||||
@@ -491,10 +489,11 @@ class RemoveLink(_PluginBase):
|
||||
if not self._delete_history:
|
||||
return
|
||||
# 查找历史记录
|
||||
transfer_history = self._transferhistory.get_by_src(path)
|
||||
_transferhistory = TransferHistoryOper()
|
||||
transfer_history = _transferhistory.get_by_src(path)
|
||||
if transfer_history:
|
||||
# 删除历史记录
|
||||
self._transferhistory.delete(transfer_history.id)
|
||||
_transferhistory.delete(transfer_history.id)
|
||||
logger.info(f"删除历史记录:{transfer_history.id}")
|
||||
|
||||
def delete_empty_folders(self, path):
|
||||
|
||||
@@ -29,9 +29,6 @@ class SiteRefresh(_PluginBase):
|
||||
# 可使用的用户级别
|
||||
auth_level = 2
|
||||
|
||||
# 私有属性
|
||||
siteoper: SiteOper = None
|
||||
|
||||
# 配置属性
|
||||
_enabled: bool = False
|
||||
_notify: bool = False
|
||||
@@ -42,7 +39,7 @@ class SiteRefresh(_PluginBase):
|
||||
_siteconf: list = []
|
||||
|
||||
def init_plugin(self, config: dict = None):
|
||||
self.siteoper = SiteOper()
|
||||
|
||||
# 配置
|
||||
if config:
|
||||
self._enabled = config.get("enabled")
|
||||
@@ -70,7 +67,7 @@ class SiteRefresh(_PluginBase):
|
||||
logger.error(f"未获取到site_id")
|
||||
return
|
||||
|
||||
site = self.siteoper.get(site_id)
|
||||
site = SiteOper().get(site_id)
|
||||
if not site:
|
||||
logger.error(f"未获取到site_id {site_id} 对应的站点数据")
|
||||
return
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, List, Dict, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import pytz
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
|
||||
from app.chain.tmdb import TmdbChain
|
||||
from app.core.config import settings
|
||||
from app.helper.wallpaper import WallpaperHelper
|
||||
from app.log import logger
|
||||
from app.plugins import _PluginBase
|
||||
from app.utils.http import RequestUtils
|
||||
from app.utils.web import WebUtils
|
||||
|
||||
|
||||
class TmdbWallpaper(_PluginBase):
|
||||
@@ -21,7 +21,7 @@ class TmdbWallpaper(_PluginBase):
|
||||
# 插件图标
|
||||
plugin_icon = "Macos_Sierra.png"
|
||||
# 插件版本
|
||||
plugin_version = "1.2"
|
||||
plugin_version = "1.4"
|
||||
# 插件作者
|
||||
plugin_author = "jxxghp"
|
||||
# 作者主页
|
||||
@@ -237,13 +237,19 @@ class TmdbWallpaper(_PluginBase):
|
||||
|
||||
if not self._savepath:
|
||||
return
|
||||
if settings.WALLPAPER == "tmdb":
|
||||
urls = TmdbChain().get_trending_wallpapers() or []
|
||||
for url in urls:
|
||||
urls = WallpaperHelper().get_wallpapers(10) or []
|
||||
for url in urls:
|
||||
if settings.WALLPAPER == "tmdb":
|
||||
filename = url.split("/")[-1]
|
||||
__save_file(url, filename)
|
||||
else:
|
||||
url = WebUtils.get_bing_wallpaper()
|
||||
if url:
|
||||
filename = f"{datetime.now().strftime('%Y%m%d')}.jpg"
|
||||
__save_file(url, filename)
|
||||
elif settings.WALLPAPER == "bing":
|
||||
# https://cn.bing.com/th?id=OHR.EchinaceaButterfly_EN-US8404044892_1920x1080.jpg&rf=LaDigue_1920x1080.jpg&pid=hp
|
||||
# 解析url参数,获取id的值
|
||||
url_params = urlparse(url)
|
||||
filename = url_params.query.split("id=")[-1]
|
||||
else:
|
||||
# 其他壁纸类型,直接使用url的文件名
|
||||
filename = url.split("/")[-1]
|
||||
# 没有后缀的文件名,添加.jpg后缀
|
||||
if not filename.endswith(".jpg"):
|
||||
filename += ".jpg"
|
||||
__save_file(url, filename)
|
||||
|
||||
@@ -19,7 +19,7 @@ class TrackerEditor(_PluginBase):
|
||||
# 插件图标
|
||||
plugin_icon = "trackereditor_A.png"
|
||||
# 插件版本
|
||||
plugin_version = "1.8"
|
||||
plugin_version = "1.9"
|
||||
# 插件作者
|
||||
plugin_author = "honue"
|
||||
# 作者主页
|
||||
@@ -91,7 +91,7 @@ class TrackerEditor(_PluginBase):
|
||||
new_url = tracker.url.replace(target_domain, tracker_dict[target_domain])
|
||||
logger.info(f"{original_url[:30]}... 替换为 {new_url[:30]}...")
|
||||
torrent.edit_tracker(orig_url=original_url, new_url=new_url)
|
||||
torrent_update_cnt += 1
|
||||
torrent_update_cnt += 1
|
||||
|
||||
elif self._downloader_type == "transmission":
|
||||
self._downloader = Transmission(self._host, self._port, self._username, self._password)
|
||||
|
||||
Reference in New Issue
Block a user