Merge pull request #809 from wumode/clashruleprovider

update ClashRuleProvider and ImdbSource
This commit is contained in:
jxxghp
2025-06-17 16:11:55 +08:00
committed by GitHub
14 changed files with 9351 additions and 41769 deletions

View File

@@ -430,11 +430,12 @@
"name": "IMDb源",
"description": "让探索支持IMDb数据源。",
"labels": "探索",
"version": "1.3.1",
"version": "1.3.2",
"icon": "IMDb_IOS-OSX_App.png",
"author": "wumode",
"level": 1,
"history": {
"v1.3.2": "更新 API query hash",
"v1.3.1": "修复按日期排序错误",
"v1.3": "优化网络连接",
"v1.2": "推荐热门纪录片",
@@ -446,11 +447,12 @@
"name": "Clash Rule Provider",
"description": "随时为Clash添加一些额外的规则。",
"labels": "工具",
"version": "0.1.0",
"version": "1.0.0",
"icon": "Mihomo_Meta_A.png",
"author": "wumode",
"level": 1,
"history": {
"v1.0.0": "支持: 规则分页; 导入规则; 代理组; 附加出站代理; 按区域分组",
"v0.1.0": "新增ClashRuleProvider"
}
},

View File

@@ -1,23 +1,27 @@
import hashlib
import json
import re
import time
from datetime import datetime, timedelta
from typing import Any, Optional, List, Dict, Tuple, Union
import pytz
import time
import yaml
import hashlib
from fastapi import Body, Response
from datetime import datetime, timedelta
import pytz
import copy
import math
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
from fastapi import Body, Response
from app import schemas
from app.core.config import settings
from app.core.event import eventmanager
from app.log import logger
from app.plugins import _PluginBase
from app.plugins.clashruleprovider.clash_rule_parser import Action, RuleType, ClashRule, MatchRule, LogicRule
from app.plugins.clashruleprovider.clash_rule_parser import ClashRuleParser
from app.schemas.types import EventType
from app.schemas.types import EventType, NotificationType
from app.utils.http import RequestUtils
from app.plugins.clashruleprovider.clash_rule_parser import ClashRuleParser
from app.plugins.clashruleprovider.clash_rule_parser import Action, RuleType, ClashRule, MatchRule, LogicRule
from app.plugins.clashruleprovider.clash_rule_parser import ProxyGroupValidator
class ClashRuleProvider(_PluginBase):
@@ -26,10 +30,9 @@ class ClashRuleProvider(_PluginBase):
# 插件描述
plugin_desc = "随时为Clash添加一些额外的规则。"
# 插件图标
plugin_icon = ("https://raw.githubusercontent.com/wumode/MoviePilot-Plugins/"
"refs/heads/imdbsource_assets/icons/Mihomo_Meta_A.png")
plugin_icon = "Mihomo_Meta_A.png"
# 插件版本
plugin_version = "0.1.0"
plugin_version = "1.0.0"
# 插件作者
plugin_author = "wumode"
# 作者主页
@@ -53,13 +56,14 @@ class ClashRuleProvider(_PluginBase):
# Clash 面板密钥
_clash_dashboard_secret = None
# MoviePilot URL
_movie_pilot_url = None
_movie_pilot_url = ''
_cron = ''
_timeout = 10
_retry_times = 3
_filter_keywords = []
_auto_update_subscriptions = True
_ruleset_prefix = '📂<-'
_group_by_region = False
# 插件数据
_clash_config = None
@@ -68,17 +72,23 @@ class ClashRuleProvider(_PluginBase):
_rule_provider: Dict[str, Any] = {}
_subscription_info = {}
_ruleset_names: Dict[str, str] = {}
_proxy_groups = []
_extra_proxies = []
# protected variables
_clash_rule_parser = None
_ruleset_rule_parser = None
_custom_rule_sets = None
_scheduler: Optional[BackgroundScheduler] = None
_countries: Optional[List[Dict[str, str]]] = None
_proxy_groups_by_region: List[Dict[str, Any]] = []
def init_plugin(self, config: dict = None):
self._clash_config = self.get_data("clash_config")
self._ruleset_rules = self.get_data("ruleset_rules")
self._top_rules = self.get_data("top_rules")
self._proxy_groups = self.get_data("proxy_groups") or []
self._extra_proxies = self.get_data("extra_proxies") or []
self._subscription_info = self.get_data("subscription_info") or \
{"download": 0, "upload": 0, "total": 0, "expire": 0, "last_update": 0}
self._rule_provider = self.get_data("rule_provider") or {}
@@ -91,7 +101,7 @@ class ClashRuleProvider(_PluginBase):
self._clash_dashboard_url = config.get("clash_dashboard_url")
self._clash_dashboard_secret = config.get("clash_dashboard_secret")
self._movie_pilot_url = config.get("movie_pilot_url")
if self._movie_pilot_url[-1] == '/':
if self._movie_pilot_url and self._movie_pilot_url[-1] == '/':
self._movie_pilot_url = self._movie_pilot_url[:-1]
self._cron = config.get("cron_string")
self._timeout = config.get("timeout")
@@ -99,9 +109,15 @@ class ClashRuleProvider(_PluginBase):
self._filter_keywords = config.get("filter_keywords")
self._ruleset_prefix = config.get("ruleset_prefix", "Custom_")
self._auto_update_subscriptions = config.get("auto_update_subscriptions")
self._group_by_region = config.get("group_by_region")
self._clash_rule_parser = ClashRuleParser()
self._ruleset_rule_parser = ClashRuleParser()
if self._enabled:
if self._group_by_region:
self._countries = ClashRuleProvider.__load_countries(
f"{settings.ROOT_PATH}/app/plugins/clashruleprovider/countries.json")
self._proxy_groups_by_region = ClashRuleProvider.__group_by_region(self._countries,
self._clash_config.get('proxies'))
self.__parse_config()
self._scheduler = BackgroundScheduler(timezone=settings.TZ)
self._scheduler.start()
@@ -124,7 +140,7 @@ class ClashRuleProvider(_PluginBase):
"description": "测试连接"
},
{
"path": "/clash_outbound",
"path": "/clash-outbound",
"endpoint": self.get_clash_outbound,
"methods": ["GET"],
"auth": "bear",
@@ -204,13 +220,61 @@ class ClashRuleProvider(_PluginBase):
"description": "update clash rules"
},
{
"path": "/rule_providers",
"path": "/rule-providers",
"endpoint": self.get_rule_providers,
"methods": ["GET"],
"auth": "bear",
"summary": "update rule providers",
"description": "update rule providers"
},
{
"path": "/extra-proxies",
"endpoint": self.get_extra_proxies,
"methods": ["GET"],
"auth": "bear",
"summary": "extra proxies",
"description": "extra proxies"
},
{
"path": "/extra-proxies",
"endpoint": self.delete_extra_proxy,
"methods": ["DELETE"],
"auth": "bear",
"summary": "delete a extra proxy",
"description": "delete a extra proxy"
},
{
"path": "/extra-proxies",
"endpoint": self.add_extra_proxies,
"methods": ["POST"],
"auth": "bear",
"summary": "add extra proxies",
"description": "add extra proxies"
},
{
"path": "/proxy-groups",
"endpoint": self.get_proxy_groups,
"methods": ["GET"],
"auth": "bear",
"summary": "proxy groups",
"description": "proxy groups"
},
{
"path": "/proxy-group",
"endpoint": self.delete_proxy_group,
"methods": ["DELETE"],
"auth": "bear",
"summary": "delete a proxy group",
"description": "delete a proxy group"
},
{
"path": "/proxy-group",
"endpoint": self.add_proxy_group,
"methods": ["POST"],
"auth": "bear",
"summary": "add a proxy group",
"description": "add a proxy group"
},
{
"path": "/ruleset",
"endpoint": self.get_ruleset,
@@ -218,6 +282,14 @@ class ClashRuleProvider(_PluginBase):
"summary": "update rule providers",
"description": "update rule providers"
},
{
"path": "/import",
"endpoint": self.import_rules,
"methods": ["POST"],
"auth": "bear",
"summary": "import top rules",
"description": "import top rules"
},
{
"path": "/config",
"endpoint": self.get_clash_config,
@@ -254,7 +326,7 @@ class ClashRuleProvider(_PluginBase):
if self.get_state() and self._auto_update_subscriptions:
return [{
"id": "ClashRuleProvider",
"name": "Clash Rule Provider 服务",
"name": "定时更新订阅",
"trigger": CronTrigger.from_crontab(self._cron),
"func": self.update_subscription_service,
"kwargs": {}
@@ -287,47 +359,47 @@ class ClashRuleProvider(_PluginBase):
self.save_data('subscription_info', self._subscription_info)
self.save_data('ruleset_names', self._ruleset_names)
self.save_data('rule_provider', self._rule_provider)
self.save_data('proxy_groups', self._proxy_groups)
self.save_data('extra_proxies', self._extra_proxies)
def __parse_config(self):
if not self._top_rules:
return
if self._top_rules is None:
self._top_rules = []
if self._ruleset_rules is None:
self._ruleset_rules = []
self._clash_rule_parser.parse_rules_from_list(self._top_rules)
if not self._ruleset_rules:
return
self._ruleset_rule_parser.parse_rules_from_list(self._ruleset_rules)
def test_connectivity(self, params: Dict[str, Any]) -> Dict[str, Any]:
def test_connectivity(self, params: Dict[str, Any]) -> schemas.Response:
if not self._enabled:
return {"success": False, "message": ""}
return schemas.Response(success=False, message="")
if not params.get('clash_dashboard_url') or not params.get('clash_dashboard_secret') \
or not params.get('sub_link'):
return {"success": False, "message": "missing params"}
return schemas.Response(success=True, message="missing params")
clash_version_url = f"{params.get('clash_dashboard_url')}/version"
ret = RequestUtils(accept_type="application/json",
headers={"authorization": f"Bearer {params.get('clash_dashboard_secret')}"}
).get(clash_version_url)
if not ret:
return {"success": False, "message": "无法连接到Clash"}
return schemas.Response(success=False, message="无法连接到Clash")
ret = RequestUtils(accept_type="text/html",
proxies=settings.PROXY if self._proxy else None
).get(params.get('sub_link'))
if not ret:
return {"success": False, "message": f"Unable to get {params.get('sub_link')}"}
return {"success": True, "message": "测试连接成功"}
return schemas.Response(success=False, message=f"Unable to get {params.get('sub_link')}")
return schemas.Response(success=True, message="测试连接成功")
def get_ruleset(self, name):
if not self._ruleset_names.get(name):
return None
name = self._ruleset_names.get(name)
rules = self.__get_ruleset(name)
# if rules or ruleset in self._rule_provider:
# self._rule_provider[ruleset] = rules
res = yaml.dump({"payload": rules}, allow_unicode=True)
return Response(content=res, media_type="text/yaml")
def get_clash_outbound(self):
def get_clash_outbound(self) -> schemas.Response:
outbound = self.clash_outbound(self._clash_config)
return {"success": True, "message": None, "data": {"outbound": outbound}}
return schemas.Response(success=True, message="", data={"outbound": outbound})
def get_status(self):
rule_size = len(self._clash_config.get("rules", [])) if self._clash_config else 0
@@ -342,7 +414,7 @@ class ClashRuleProvider(_PluginBase):
def get_clash_config(self):
config = self.clash_config()
if not config:
return {"success": False, "message": ""}
return {'success': False, "message": ''}
res = yaml.dump(config, allow_unicode=True)
headers = {'Subscription-Userinfo': f'upload={self._subscription_info["upload"]}; '
f'download={self._subscription_info["download"]}; '
@@ -350,22 +422,35 @@ class ClashRuleProvider(_PluginBase):
f'expire={self._subscription_info["expire"]}'}
return Response(headers=headers, content=res, media_type="text/yaml")
def get_rules(self, rule_type: str) -> Dict[str, Any]:
def get_rules(self, rule_type: str) -> schemas.Response:
if rule_type == 'ruleset':
return {"success": True, "message": None, "data": {"rules": self._ruleset_rule_parser.to_dict()}}
return {"success": True, "message": None, "data": {"rules": self._clash_rule_parser.to_dict()}}
return schemas.Response(success=True, message='', data={'rules': self._ruleset_rule_parser.to_dict()})
return schemas.Response(success=True, message='', data={'rules': self._clash_rule_parser.to_dict()})
def delete_rule(self, params: dict = Body(...)):
def delete_rule(self, params: dict = Body(...)) -> schemas.Response:
if not self._enabled:
return {"success": False, "message": ""}
return schemas.Response(success=False, message='')
if params.get('type') == 'ruleset':
res = self.delete_rule_by_priority(params.get('priority'), self._ruleset_rule_parser)
if res:
self.__add_notification_job(
f"{self._ruleset_prefix}{res.action.value if isinstance(res.action, Action) else res.action}")
else:
res = self.delete_rule_by_priority(params.get('priority'), self._clash_rule_parser)
return {"success": res, "message": None}
self.delete_rule_by_priority(params.get('priority'), self._clash_rule_parser)
return schemas.Response(success=True, message='')
def import_rules(self, params: Dict[str, Any]) -> schemas.Response:
if not self._enabled:
return schemas.Response(success=False, message='')
rules: List[str] = []
if params.get('type') == 'YAML':
try:
imported_rules = yaml.load(params["payload"], Loader=yaml.SafeLoader)
rules = imported_rules.get("rules", [])
except yaml.YAMLError as err:
return schemas.Response(success=False, message=f'YAML error: {err}')
self.append_top_rules(rules)
return schemas.Response(success=True)
def reorder_rules(self, params: Dict[str, Any]):
if not self._enabled:
@@ -413,31 +498,105 @@ class ClashRuleProvider(_PluginBase):
res = self.add_rule_by_priority(params.get('rule_data'), self._clash_rule_parser)
return {"success": bool(res), "message": None}
def get_subscription(self):
def get_subscription(self) -> schemas.Response:
if not self._sub_links:
return None
return {"success": True, "message": None, "data": {"url": self._sub_links[0]}}
return schemas.Response(success=False, message=f"Invalid subscription links: {self._sub_links}")
return schemas.Response(success=True, data={"url": self._sub_links[0]})
def update_subscription(self, params: Dict[str, Any]):
if not self._enabled:
return {"success": False, "message": ""}
return schemas.Response(success=False, message="")
url = params.get('url')
if not url:
return {"success": False, "message": "missing params"}
res = self.update_subscription_service()
return schemas.Response(success=False, message="missing params")
res = self.__update_subscription()
if not res:
return {"success": True, "message": f"订阅链接 {self._sub_links[0]} 更新失败"}
return {"success": True, "message": "订阅更新成功"}
return schemas.Response(success=False, message=f"订阅链接 {self._sub_links[0]} 更新失败")
return schemas.Response(success=True, message='订阅更新成功')
def get_rule_providers(self):
return {"success": True, "message": None, "data": self.rule_providers()}
def get_rule_providers(self) -> schemas.Response:
return schemas.Response(success=True, data=self.rule_providers())
@staticmethod
def clash_outbound(clash_config: Dict[str, Any]) -> Optional[List]:
def get_proxy_groups(self) -> schemas.Response:
return schemas.Response(success=True, data={'proxy_groups': self._proxy_groups})
def get_extra_proxies(self) -> schemas.Response:
return schemas.Response(success=True, data={'extra_proxies': self._extra_proxies})
def add_extra_proxies(self, params: Dict[str, Any]):
if not self._enabled:
return schemas.Response(success=False, message='')
extra_proxies: List = []
if params.get('type') == 'YAML':
try:
imported_proxies = yaml.load(params["payload"], Loader=yaml.SafeLoader)
extra_proxies = imported_proxies.get("proxies", [])
except yaml.YAMLError as err:
return schemas.Response(success=False, message=f'YAML error: {err}')
for proxy in extra_proxies:
name = proxy.get('name')
if not name or any(x.get('name') == name for x in self.clash_outbound(self._clash_config)):
logger.warning(f"The proxy name {proxy['name']} already exists. Skipping...")
continue
required_fields = {'name', 'type', 'server', 'port'}
if not required_fields.issubset(proxy.keys()):
missing = required_fields - proxy.keys()
logger.error(f"Required field is missing: {missing}")
continue
self._extra_proxies.append(proxy)
self.save_data('extra_proxies', self._extra_proxies)
return schemas.Response(success=True)
def delete_extra_proxy(self, params: dict = Body(...)) -> schemas.Response:
if not self._enabled:
return schemas.Response(success=False, message='')
name = params.get('name')
self._extra_proxies = [item for item in self._extra_proxies if item.get('name') != name]
self.save_data('extra_proxies', self._extra_proxies)
return schemas.Response(success=True, message='')
def add_proxy_group(self, params: Dict[str, Any]) -> schemas.Response:
if not self._enabled:
return schemas.Response(success=False, message='')
if 'proxy_group' not in params or params['proxy_group'] is None:
return schemas.Response(success=False, message="Missing params")
item = params['proxy_group']
if not item.get('name') or any(x.get('name') == item.get('name') for x in self._proxy_groups):
return schemas.Response(success=False, message=f"The proxy group name {item.get('name')} already exists")
try:
ProxyGroupValidator.parse_obj(item)
except Exception as e:
error_message = f"Failed to parse proxy group: Invalid data={item}, error={repr(e)}"
logger.error(error_message)
return schemas.Response(success=False, message=str(error_message))
new_item = {}
for k, v in item.items():
if type(v) is str and len(v) == 0:
continue
if v is None:
continue
new_item[k] = v
self._proxy_groups.append(new_item)
self.save_data('proxy_groups', self._proxy_groups)
return schemas.Response(success=True)
def delete_proxy_group(self, params: dict = Body(...)) -> schemas.Response:
if not self._enabled:
return schemas.Response(success=False, message='')
name = params.get('name')
self._proxy_groups = [item for item in self._proxy_groups if item.get('name') != name]
self.save_data('proxy_groups', self._proxy_groups)
return schemas.Response(success=True, message='')
def clash_outbound(self, clash_config: Dict[str, Any]) -> Optional[List]:
if not clash_config:
return []
outbound = [{'name': proxy_group.get("name")} for proxy_group in clash_config.get("proxy-groups")]
outbound.extend([{'name': proxy.get("name")} for proxy in clash_config.get("proxies")])
if self._group_by_region:
outbound.extend([{'name': proxy_group.get("name")} for proxy_group in self._proxy_groups_by_region])
outbound.extend([{'name': proxy.get("name")} for proxy in self._extra_proxies])
outbound.extend([{'name': proxy_group.get("name")} for proxy_group in self._proxy_groups])
return outbound
def rule_providers(self) -> Optional[Dict[str, Any]]:
@@ -490,6 +649,17 @@ class ClashRuleProvider(_PluginBase):
if not self._clash_rule_parser.has_rule(clash_rule):
self._clash_rule_parser.insert_rule_at_priority(clash_rule, 0)
def append_top_rules(self, rules: List[str]) -> None:
clash_rules = []
for rule in rules:
clash_rule = ClashRuleParser.parse_rule_line(rule)
if not clash_rule:
continue
clash_rules.append(clash_rule)
self._clash_rule_parser.append_rules(clash_rules)
self.__save_data()
return
def update_rule_by_priority(self, rule: Dict[str, Any], rule_parser: ClashRuleParser) -> bool:
if not isinstance(rule.get("priority"), int):
return False
@@ -522,8 +692,40 @@ class ClashRuleProvider(_PluginBase):
self.__save_data()
return res
@eventmanager.register(EventType.PluginAction)
def update_subscription_service(self) -> bool:
@staticmethod
def format_bytes(bytes):
if bytes == 0:
return '0 B'
k = 1024
sizes = ['B', 'KB', 'MB', 'GB', 'TB']
i = math.floor(math.log(bytes) / math.log(k))
return f"{bytes / math.pow(k, i):.2f} {sizes[i]}"
@staticmethod
def format_expire_time(timestamp):
seconds_left = timestamp - int(time.time())
days = seconds_left // 86400
return f"{days}天后过期" if days > 0 else "已过期"
def update_subscription_service(self):
res = self.__update_subscription()
if res:
used = self._subscription_info['download'] + self._subscription_info['upload']
remaining = self._subscription_info['total'] - used
message = (f"订阅更新成功\n"
f"已用流量: {ClashRuleProvider.format_bytes(used)}\n"
f"剩余流量: {ClashRuleProvider.format_bytes(remaining)}\n"
f"总量: {ClashRuleProvider.format_bytes(self._subscription_info['total'])}\n"
f"过期时间: {ClashRuleProvider.format_expire_time(self._subscription_info['expire'])}")
else:
message = "订阅更新失败"
if self._notify:
self.post_message(title=f"{self.plugin_name}",
mtype=NotificationType.Plugin,
text=f"{message}"
)
def __update_subscription(self) -> bool:
if not self._sub_links:
return False
url = self._sub_links[0]
@@ -556,6 +758,50 @@ class ClashRuleProvider(_PluginBase):
headers={"authorization": f"Bearer {self._clash_dashboard_secret}"}
).put(url)
@staticmethod
def __load_countries(file_path: str) -> List:
try:
countries = json.load(open(file_path))
except Exception as e:
logger.error(f"插件加载错误:{e}")
return []
return countries
@staticmethod
def __group_by_region(countries: List, proxies) -> List[Dict[str, Any]]:
continents_nodes = {'Asia': [], 'Europe': [], 'SouthAmerica': [], 'NorthAmerica': [], 'Africa': [],
'Oceania': [], 'AsiaExceptChina': []}
proxy_groups = []
for proxy_node in proxies:
continent = ClashRuleProvider.__continent_name_from_node(countries, proxy_node['name'])
if not continent:
continue
continents_nodes[continent].append(proxy_node['name'])
for continent_nodes in continents_nodes:
if len(continents_nodes[continent_nodes]):
proxy_group = {'name': continent_nodes, 'type': 'select', 'proxies': continents_nodes[continent_nodes]}
proxy_groups.append(proxy_group)
for continent_node in continents_nodes['Asia']:
if any(x in continent_node for x in ('中国', '香港', 'CN')):
continue
continents_nodes['AsiaExceptChina'].append(continent_node)
proxy_group = {'name': 'AsiaExceptChina', 'type': 'select', 'proxies': continents_nodes['AsiaExceptChina']}
proxy_groups.append(proxy_group)
return proxy_groups
@staticmethod
def __continent_name_from_node(countries: List[Dict[str, str]], node_name: str) -> Optional[str]:
continents_names = {'欧洲': 'Europe',
'亚洲': 'Asia',
'大洋洲': 'Oceania',
'非洲': 'Africa',
'北美洲': 'NorthAmerica',
'南美洲': 'SouthAmerica'}
for country in countries:
if country['chinese'] in node_name or country['english'].lower() in node_name.lower():
return continents_names[country['continent']]
return None
def __add_notification_job(self, ruleset: str):
if ruleset in self._rule_provider:
self._scheduler.add_job(self.notify_clash, "date",
@@ -581,20 +827,43 @@ class ClashRuleProvider(_PluginBase):
removed_proxies = []
for proxy_group in clash_config.get("proxy-groups", []):
proxy_group['proxies'] = [x for x in proxy_group.get('proxies') if x not in removed_proxies]
clash_config["proxy-groups"] = [x for x in clash_config.get("proxy-groups", []) if x.get("proxies")]
# clash_config["proxy-groups"] = [x for x in clash_config.get("proxy-groups", []) if x.get("proxies")]
return clash_config
def clash_config(self) -> Optional[Dict[str, Any]]:
if not self._clash_config:
return
return None
self.__insert_ruleset()
self._top_rules = self._clash_rule_parser.to_string()
clash_config = self._clash_config.copy()
clash_config = copy.deepcopy(self._clash_config)
# 添加代理组
proxy_groups = copy.deepcopy(self._proxy_groups)
if proxy_groups:
if clash_config.get("proxy-groups"):
clash_config['proxy-groups'].extend(proxy_groups)
else:
clash_config['proxy-groups'] = proxy_groups
# 添加额外节点
if clash_config.get('proxies'):
clash_config['proxies'].extend(self._extra_proxies)
else:
clash_config['proxies'] = copy.deepcopy(self._extra_proxies)
# 添加按大洲代理组
if self._group_by_region:
if self._proxy_groups_by_region:
if clash_config.get("proxy-groups"):
clash_config['proxy-groups'].extend(self._proxy_groups_by_region)
else:
clash_config['proxy-groups'] = copy.deepcopy(self._proxy_groups_by_region)
top_rules = []
for rule in self._clash_rule_parser.rules:
if (not isinstance(rule.action, Action) and
not len([x for x in self.clash_outbound(clash_config) if rule.action == x.get("name", '')])):
logger.warn(f"出站 {rule.action} 不存在, {rule.raw_rule}")
logger.warn(f"出站 {rule.action} 不存在, {rule.raw_rule}")
continue
top_rules.append(rule.raw_rule)
clash_config["rules"] = self._top_rules + clash_config.get("rules", [])
@@ -616,9 +885,15 @@ class ClashRuleProvider(_PluginBase):
clash_config['rule-providers'].update(self._rule_provider)
else:
clash_config['rule-providers'] = self._rule_provider
key_to_delete = []
for key, item in self._ruleset_names.items():
if item not in clash_config['rule-providers']:
del self._ruleset_names[key]
key_to_delete.append(key)
for key in key_to_delete:
del self._ruleset_names[key]
if not clash_config.get("rule-providers"):
del clash_config["rule-providers"]
self.save_data('ruleset_names', self._ruleset_names)
self.save_data('rule_provider', self._rule_provider)
return clash_config

View File

@@ -1,8 +1,96 @@
import re
from typing import List, Dict, Any, Optional, Union, Callable
from typing import List, Dict, Any, Optional, Union, Callable, Literal
from dataclasses import dataclass
from enum import Enum
from pydantic import BaseModel, Field, validator
class ProxyGroupBase(BaseModel):
"""
包含所有代理组类型共有的通用字段。
"""
# Required field
name: str = Field(..., description="The name of the proxy group.")
# Proxy and provider references
proxies: Optional[List[str]] = Field(None, description="References to outbound proxies or other proxy groups.")
use: Optional[List[str]] = Field(None, description="References to proxy provider sets.")
# Health check fields
url: Optional[str] = Field(None, description="Health check test address.")
interval: Optional[int] = Field(None, description="Health check interval in seconds.")
lazy: bool = Field(True, description="If not selected, no health checks are performed.")
timeout: Optional[int] = Field(5000, description="Health check timeout in milliseconds.")
max_failed_times: Optional[int] = Field(5, description="Maximum number of failures before a forced health check.")
expected_status: Optional[str] = Field(None, description="Expected HTTP response status code for health checks.")
# Network and routing fields
disable_udp: Optional[bool] = Field(False, description="Disables UDP for this proxy group.")
interface_name: Optional[str] = Field(None, description="DEPRECATED. Specifies the outbound interface.")
routing_mark: Optional[int] = Field(None, description="DEPRECATED. The routing mark for outbound connections.")
# Dynamic proxy inclusion
include_all: Optional[bool] = Field(False, description="Includes all outbound proxies and proxy sets.")
include_all_proxies: Optional[bool] = Field(False, description="Includes all outbound proxies.")
include_all_providers: Optional[bool] = Field(False, description="Includes all proxy provider sets.")
# Filtering
filter: Optional[str] = Field(None, description="Regex to filter nodes from providers.")
exclude_filter: Optional[str] = Field(None, description="Regex to exclude nodes.")
exclude_type: Optional[str] = Field(None, description="Exclude nodes by adapter type, separated by '|'.")
# UI fields
hidden: Optional[bool] = Field(False, description="Hides the proxy group in the API.")
icon: Optional[str] = Field(None, description="Icon string for the proxy group, for UI use.")
@validator('expected_status')
def validate_expected_status(cls, v: Optional[str]) -> Optional[str]:
if v is None or v == '*':
return v
pattern = re.compile(r'^\d{3}([-/]\d{3})*$')
if not pattern.match(v):
raise ValueError("Invalid format for expected-status.")
parts = re.split(r'[/]', v)
for part in parts:
if '-' in part:
start, end = part.split('-')
if not (start.isdigit() and end.isdigit() and 100 <= int(start) < 600 and 100 <= int(end) < 600 and int(start) <= int(end)):
raise ValueError(f"Invalid status code range: {part}")
elif not (part.isdigit() and 100 <= int(part) < 600):
raise ValueError(f"Invalid status code: {part}")
return v
class SelectGroup(ProxyGroupBase):
type: Literal['select']
class RelayGroup(ProxyGroupBase):
type: Literal['relay']
class FallbackGroup(ProxyGroupBase):
type: Literal['fallback']
class UrlTestGroup(ProxyGroupBase):
type: Literal['url-test']
tolerance: Optional[int] = Field(None, description="proxies switch tolerance, measured in milliseconds (ms).")
class LoadBalanceGroup(ProxyGroupBase):
type: Literal['load-balance']
strategy: Optional[Literal['round-robin', 'consistent-hashing', 'sticky-sessions']] = Field(
'round-robin',
description="Load balancing strategy."
)
# --- Discriminated Union ---
ProxyGroupUnion = Union[SelectGroup, RelayGroup, FallbackGroup, UrlTestGroup, LoadBalanceGroup]
class ProxyGroupValidator(BaseModel):
"""
这是Pydantic V1的验证器。
它使用 __root__ 字段来处理可辨识联合。
"""
__root__: ProxyGroupUnion
class RuleType(Enum):
"""Enumeration of all supported Clash rule types"""
@@ -282,8 +370,7 @@ class ClashRuleParser:
return self.rules
@staticmethod
def validate_rule(rule: ClashRule) -> bool:
def validate_rule(self, rule: ClashRule) -> bool:
"""Validate a parsed rule"""
try:
# Basic validation based on rule type
@@ -306,8 +393,7 @@ class ClashRuleParser:
return True
except Exception as e:
print(f"Invalid rule '{rule.raw_rule}': {e}")
except Exception:
return False
def to_string(self) -> List[str]:
@@ -370,6 +456,15 @@ class ClashRuleParser:
# Re-sort rules to maintain order
self.rules.sort(key=lambda r: r.priority)
def append_rules(self, rules: List[Union[ClashRule, LogicRule, MatchRule]]) -> None:
max_priority = max(rule.priority for rule in self.rules) if len(self.rules) else 0
priority = max_priority + 1
for rule in rules:
rule.priority = priority
self.rules.append(rule)
priority += 1
self.rules.sort(key=lambda r: r.priority)
def insert_rule_at_priority(self, rule: Union[ClashRule, LogicRule, MatchRule], priority: int):
"""Insert a rule at a specific priority position, adjusting other rules"""
# Adjust priorities of existing rules

File diff suppressed because it is too large Load Diff

View File

@@ -77,6 +77,7 @@ const defaultConfig = {
notify: false,
auto_update_subscriptions: true,
ruleset_prefix: '📂<-',
group_by_region: false,
};
// 响应式配置对象
@@ -290,7 +291,7 @@ return (_ctx, _cache) => {
}, {
default: _withCtx(() => [
_createVNode(_component_v_icon, { left: "" }, {
default: _withCtx(() => _cache[18] || (_cache[18] = [
default: _withCtx(() => _cache[19] || (_cache[19] = [
_createTextVNode("mdi-close")
])),
_: 1
@@ -301,7 +302,7 @@ return (_ctx, _cache) => {
]),
default: _withCtx(() => [
_createVNode(_component_v_card_title, null, {
default: _withCtx(() => _cache[17] || (_cache[17] = [
default: _withCtx(() => _cache[18] || (_cache[18] = [
_createTextVNode("Clash Rule Provider 插件配置")
])),
_: 1
@@ -327,11 +328,11 @@ return (_ctx, _cache) => {
ref_key: "form",
ref: form,
modelValue: isFormValid.value,
"onUpdate:modelValue": _cache[15] || (_cache[15] = $event => ((isFormValid).value = $event)),
"onUpdate:modelValue": _cache[16] || (_cache[16] = $event => ((isFormValid).value = $event)),
onSubmit: _withModifiers(saveConfig, ["prevent"])
}, {
default: _withCtx(() => [
_cache[28] || (_cache[28] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "基本设置", -1)),
_cache[29] || (_cache[29] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "基本设置", -1)),
_createVNode(_component_v_row, null, {
default: _withCtx(() => [
_createVNode(_component_v_col, {
@@ -388,7 +389,7 @@ return (_ctx, _cache) => {
]),
_: 1
}),
_cache[29] || (_cache[29] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "订阅配置", -1)),
_cache[30] || (_cache[30] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "订阅配置", -1)),
_createVNode(_component_v_row, null, {
default: _withCtx(() => [
_createVNode(_component_v_col, { cols: "12" }, {
@@ -454,7 +455,7 @@ return (_ctx, _cache) => {
]),
_: 1
}),
_cache[30] || (_cache[30] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "Clash 面板设置", -1)),
_cache[31] || (_cache[31] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "Clash 面板设置", -1)),
_createVNode(_component_v_row, null, {
default: _withCtx(() => [
_createVNode(_component_v_col, { cols: "12" }, {
@@ -471,7 +472,7 @@ return (_ctx, _cache) => {
}, {
"prepend-inner": _withCtx(() => [
_createVNode(_component_v_icon, { color: "primary" }, {
default: _withCtx(() => _cache[19] || (_cache[19] = [
default: _withCtx(() => _cache[20] || (_cache[20] = [
_createTextVNode("mdi-web")
])),
_: 1
@@ -498,7 +499,7 @@ return (_ctx, _cache) => {
}, {
"prepend-inner": _withCtx(() => [
_createVNode(_component_v_icon, { color: "warning" }, {
default: _withCtx(() => _cache[20] || (_cache[20] = [
default: _withCtx(() => _cache[21] || (_cache[21] = [
_createTextVNode("mdi-key")
])),
_: 1
@@ -512,7 +513,7 @@ return (_ctx, _cache) => {
]),
_: 1
}),
_cache[31] || (_cache[31] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "MoviePilot 设置", -1)),
_cache[32] || (_cache[32] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "MoviePilot 设置", -1)),
_createVNode(_component_v_row, null, {
default: _withCtx(() => [
_createVNode(_component_v_col, { cols: "12" }, {
@@ -529,7 +530,7 @@ return (_ctx, _cache) => {
}, {
"prepend-inner": _withCtx(() => [
_createVNode(_component_v_icon, { color: "success" }, {
default: _withCtx(() => _cache[21] || (_cache[21] = [
default: _withCtx(() => _cache[22] || (_cache[22] = [
_createTextVNode("mdi-movie")
])),
_: 1
@@ -543,7 +544,7 @@ return (_ctx, _cache) => {
]),
_: 1
}),
_cache[32] || (_cache[32] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "执行设置", -1)),
_cache[33] || (_cache[33] = _createElementVNode("div", { class: "text-subtitle-1 font-weight-bold mt-4 mb-2" }, "执行设置", -1)),
_createVNode(_component_v_row, null, {
default: _withCtx(() => [
_createVNode(_component_v_col, { cols: "12" }, {
@@ -583,7 +584,7 @@ return (_ctx, _cache) => {
}, {
"prepend-inner": _withCtx(() => [
_createVNode(_component_v_icon, { color: "info" }, {
default: _withCtx(() => _cache[22] || (_cache[22] = [
default: _withCtx(() => _cache[23] || (_cache[23] = [
_createTextVNode("mdi-clock-outline")
])),
_: 1
@@ -615,7 +616,7 @@ return (_ctx, _cache) => {
}, {
"prepend-inner": _withCtx(() => [
_createVNode(_component_v_icon, { color: "warning" }, {
default: _withCtx(() => _cache[23] || (_cache[23] = [
default: _withCtx(() => _cache[24] || (_cache[24] = [
_createTextVNode("mdi-timer")
])),
_: 1
@@ -646,7 +647,7 @@ return (_ctx, _cache) => {
}, {
"prepend-inner": _withCtx(() => [
_createVNode(_component_v_icon, { color: "info" }, {
default: _withCtx(() => _cache[24] || (_cache[24] = [
default: _withCtx(() => _cache[25] || (_cache[25] = [
_createTextVNode("mdi-refresh")
])),
_: 1
@@ -670,12 +671,12 @@ return (_ctx, _cache) => {
_createVNode(_component_v_expansion_panel_title, null, {
default: _withCtx(() => [
_createVNode(_component_v_icon, { class: "mr-2" }, {
default: _withCtx(() => _cache[25] || (_cache[25] = [
default: _withCtx(() => _cache[26] || (_cache[26] = [
_createTextVNode("mdi-cog")
])),
_: 1
}),
_cache[26] || (_cache[26] = _createTextVNode(" 高级选项 "))
_cache[27] || (_cache[27] = _createTextVNode(" 高级选项 "))
]),
_: 1
}),
@@ -683,7 +684,10 @@ return (_ctx, _cache) => {
default: _withCtx(() => [
_createVNode(_component_v_row, null, {
default: _withCtx(() => [
_createVNode(_component_v_col, { cols: "12" }, {
_createVNode(_component_v_col, {
cols: "12",
md: "6"
}, {
default: _withCtx(() => [
_createVNode(_component_v_switch, {
modelValue: config.auto_update_subscriptions,
@@ -691,7 +695,25 @@ return (_ctx, _cache) => {
label: "自动更新订阅",
color: "primary",
inset: "",
hint: "定期自动更新Clash订阅配置"
hint: "定期自动更新Clash订阅配置",
"persistent-hint": ""
}, null, 8, ["modelValue"])
]),
_: 1
}),
_createVNode(_component_v_col, {
cols: "12",
md: "6"
}, {
default: _withCtx(() => [
_createVNode(_component_v_switch, {
modelValue: config.group_by_region,
"onUpdate:modelValue": _cache[14] || (_cache[14] = $event => ((config.group_by_region) = $event)),
label: "按大洲分组节点",
color: "primary",
inset: "",
hint: "启用后根据名称,将节点添加到代理组",
"persistent-hint": ""
}, null, 8, ["modelValue"])
]),
_: 1
@@ -700,7 +722,7 @@ return (_ctx, _cache) => {
default: _withCtx(() => [
_createVNode(_component_v_text_field, {
modelValue: config.ruleset_prefix,
"onUpdate:modelValue": _cache[14] || (_cache[14] = $event => ((config.ruleset_prefix) = $event)),
"onUpdate:modelValue": _cache[15] || (_cache[15] = $event => ((config.ruleset_prefix) = $event)),
label: "规则集前缀",
variant: "outlined",
placeholder: "📂<-",
@@ -709,7 +731,7 @@ return (_ctx, _cache) => {
}, {
"prepend-inner": _withCtx(() => [
_createVNode(_component_v_icon, { color: "info" }, {
default: _withCtx(() => _cache[27] || (_cache[27] = [
default: _withCtx(() => _cache[28] || (_cache[28] = [
_createTextVNode("mdi-prefix")
])),
_: 1
@@ -746,12 +768,12 @@ return (_ctx, _cache) => {
}, {
default: _withCtx(() => [
_createVNode(_component_v_icon, { left: "" }, {
default: _withCtx(() => _cache[33] || (_cache[33] = [
default: _withCtx(() => _cache[34] || (_cache[34] = [
_createTextVNode("mdi-view-dashboard-edit")
])),
_: 1
}),
_cache[34] || (_cache[34] = _createTextVNode(" 规则 "))
_cache[35] || (_cache[35] = _createTextVNode(" 规则 "))
]),
_: 1
}),
@@ -759,7 +781,7 @@ return (_ctx, _cache) => {
color: "secondary",
onClick: resetForm
}, {
default: _withCtx(() => _cache[35] || (_cache[35] = [
default: _withCtx(() => _cache[36] || (_cache[36] = [
_createTextVNode("重置")
])),
_: 1
@@ -769,7 +791,7 @@ return (_ctx, _cache) => {
onClick: testConnection,
loading: testing.value
}, {
default: _withCtx(() => _cache[36] || (_cache[36] = [
default: _withCtx(() => _cache[37] || (_cache[37] = [
_createTextVNode("测试连接")
])),
_: 1
@@ -781,7 +803,7 @@ return (_ctx, _cache) => {
onClick: saveConfig,
loading: saving.value
}, {
default: _withCtx(() => _cache[37] || (_cache[37] = [
default: _withCtx(() => _cache[38] || (_cache[38] = [
_createTextVNode(" 保存配置 ")
])),
_: 1
@@ -796,7 +818,7 @@ return (_ctx, _cache) => {
variant: "tonal",
closable: "",
class: "ma-4 mt-0",
"onClick:close": _cache[16] || (_cache[16] = $event => (testResult.show = false))
"onClick:close": _cache[17] || (_cache[17] = $event => (testResult.show = false))
}, {
default: _withCtx(() => [
_createElementVNode("div", _hoisted_2, [
@@ -823,6 +845,6 @@ return (_ctx, _cache) => {
}
};
const ConfigComponent = /*#__PURE__*/_export_sfc(_sfc_main, [['__scopeId',"data-v-0e64dae0"]]);
const ConfigComponent = /*#__PURE__*/_export_sfc(_sfc_main, [['__scopeId',"data-v-e9acef13"]]);
export { ConfigComponent as default };

View File

@@ -1,5 +1,5 @@
.plugin-config[data-v-0e64dae0] {
.plugin-config[data-v-e9acef13] {
max-width: 800px;
margin: 0 auto;
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,15 @@
import { importShared } from './__federation_fn_import-JrT3xvdd.js';
import { _ as _export_sfc } from './_plugin-vue_export-helper-pcqpp-6-.js';
const _sfc_main = {};
const {openBlock:_openBlock,createElementBlock:_createElementBlock} = await importShared('vue');
const _hoisted_1 = { class: "dashboard-widget" };
function _sfc_render(_ctx, _cache) {
return (_openBlock(), _createElementBlock("div", _hoisted_1))
}
const DashboardComponent = /*#__PURE__*/_export_sfc(_sfc_main, [['render',_sfc_render]]);
export { DashboardComponent as default };

View File

@@ -1,14 +1,14 @@
.plugin-page[data-v-d5e502a5] {
.plugin-page[data-v-d5896cff] {
max-width: 1200px;
margin: 0 auto;
}
/* 使卡片等宽并适应移动端 */
.d-flex.flex-wrap[data-v-d5e502a5] {
.d-flex.flex-wrap[data-v-d5896cff] {
gap: 16px;
}
.url-display[data-v-d5e502a5] {
.url-display[data-v-d5896cff] {
word-break: break-all;
padding: 8px;
background: rgba(0, 0, 0, 0.05);
@@ -17,19 +17,19 @@
/* 移动端堆叠布局 */
@media (max-width: 768px) {
.d-flex.flex-wrap[data-v-d5e502a5] {
.d-flex.flex-wrap[data-v-d5896cff] {
flex-direction: column;
}
}
/* Add visual distinction between sections */
.ruleset-section[data-v-d5e502a5] {
.ruleset-section[data-v-d5896cff] {
border: 1px solid #e0e0e0;
border-radius: 4px;
padding: 16px;
background-color: #f5f5f5;
}
.top-section[data-v-d5e502a5] {
.top-section[data-v-d5896cff] {
border: 1px solid #e0e0e0;
border-radius: 4px;
padding: 16px;
@@ -37,12 +37,12 @@
}
/* Optional: Add different border colors to further distinguish */
.ruleset-section[data-v-d5e502a5] {
.ruleset-section[data-v-d5896cff] {
border-left: 4px solid #2196F3; /* Blue accent */
}
.top-section[data-v-d5e502a5] {
.top-section[data-v-d5896cff] {
border-left: 4px solid #4CAF50; /* Green accent */
}
.drag-handle[data-v-d5e502a5] {
.drag-handle[data-v-d5896cff] {
cursor: move;
}

File diff suppressed because it is too large Load Diff

View File

@@ -2,14 +2,14 @@ const currentImports = {};
const exportSet = new Set(['Module', '__esModule', 'default', '_export_sfc']);
let moduleMap = {
"./Page":()=>{
dynamicLoadingCss(["__federation_expose_Page-Bl7XNZ7k.css"], false, './Page');
return __federation_import('./__federation_expose_Page-DlQgf7u6.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},
dynamicLoadingCss(["__federation_expose_Page-BiV11X52.css"], false, './Page');
return __federation_import('./__federation_expose_Page-DSmFC_QV.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},
"./Config":()=>{
dynamicLoadingCss(["__federation_expose_Config-DXzIavcD.css"], false, './Config');
return __federation_import('./__federation_expose_Config-C3BpNVeC.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},
dynamicLoadingCss(["__federation_expose_Config-C_eVGIzn.css"], false, './Config');
return __federation_import('./__federation_expose_Config-BK6LRC9E.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},
"./Dashboard":()=>{
dynamicLoadingCss([], false, './Dashboard');
return __federation_import('./__federation_expose_Dashboard-BkyO-3pr.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},};
return __federation_import('./__federation_expose_Dashboard-DKtydfsT.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},};
const seen = {};
const dynamicLoadingCss = (cssFilePaths, dontAppendStylesToHead, exposeItemName) => {
const metaUrl = import.meta.url;

View File

@@ -1,13 +1,18 @@
from datetime import datetime
import re
import json
from typing import Optional, Any, List, Dict, Tuple
from datetime import datetime
from app import schemas
from app.core.config import settings
from app.core.event import eventmanager, Event
from app.log import logger
from app.plugins import _PluginBase
from app.plugins.imdbsource.imdb_helper import ImdbHelper
from app.schemas import DiscoverSourceEventData, MediaRecognizeConvertEventData, RecommendSourceEventData
from app.schemas.types import ChainEventType, MediaType
from app.core.meta import MetaBase
from app.core.context import MediaInfo
from app.plugins.imdbsource.imdb_helper import ImdbHelper
from app import schemas
from app.utils.http import RequestUtils
@@ -17,10 +22,9 @@ class ImdbSource(_PluginBase):
# 插件描述
plugin_desc = "让探索和推荐支持IMDb数据源。"
# 插件图标
plugin_icon = ("https://raw.githubusercontent.com/jxxghp/"
"MoviePilot-Plugins/refs/heads/main/icons/IMDb_IOS-OSX_App.png")
plugin_icon = "IMDb_IOS-OSX_App.png"
# 插件版本
plugin_version = "1.3.1"
plugin_version = "1.3.2"
# 插件作者
plugin_author = "wumode"
# 作者主页
@@ -123,8 +127,135 @@ class ImdbSource(_PluginBase):
"id2": self.xxx2,
}
"""
# return {"recognize_media": (self.recognize_media, ModuleExecutionType.Hijack)}
pass
@staticmethod
# @MediaInfo.source_processor("imdb")
def process_imdb_info(mediainfo: MediaInfo, info: dict):
"""处理 IMDB 信息"""
mediainfo.source_info["imdb"] = info
if isinstance(info.get('media_type'), MediaType):
mediainfo.type = info.get('media_type')
elif info.get('media_type'):
mediainfo.type = MediaType.MOVIE if info.get("type") == "movie" else MediaType.TV
mediainfo.title = info.get("title")
mediainfo.release_date = info.get('release_date')
if info.get("id"):
mediainfo.source_id["imdb"] = info.get("id")
mediainfo.imdb_id = info.get('id')
if not mediainfo.source_id:
return
mediainfo.vote_average = round(float(info.get("rating").get("aggregate_rating")), 1) if info.get("rating") else 0
mediainfo.overview = info.get('plot')
mediainfo.genre_ids = info.get('genre') or []
# 风格
if not mediainfo.genres:
mediainfo.genres = [{"id": genre, "name": genre} for genre in info.get("genres") or []]
if info.get('spoken_languages', []):
mediainfo.original_language = info.get('spoken_languages', [])[0].get("name")
mediainfo.en_title = info.get('primary_title')
mediainfo.title = info.get('primary_title')
mediainfo.original_title = info.get('original_title')
# mediainfo.release_date = info.get('start_year')
mediainfo.year = info.get('start_year')
if info.get('posters', []):
mediainfo.poster_path = info.get("posters", [])[0].get("url")
directors = []
if info.get('directors', []):
for dn in info.get('directors', []):
director = dn.get("name")
if not director:
continue
d_ = {"name": director.get("display_name"), "id": director.get("id"), "avatars": director.get("avatars")}
directors.append(d_)
if info.get('writers', []):
for wn in info.get('writers', []):
writer = wn.get("name")
d_ = {"name": writer.get("display_name"), "id": writer.get("id"), "avatars": writer.get("avatars")}
directors.append(d_)
mediainfo.directors = directors
actors = []
if info.get('casts', []):
for cast in info.get('casts', []):
cn = cast.get("name", {})
character_name = cast.get("characters")[0] if cast.get("characters") else ''
d_ = {"name": cn.get("display_name"), "id": cn.get("id"),
"avatars": cn.get("avatars"), "character": character_name}
actors.append(d_)
def recognize_media(self, meta: MetaBase = None,
mtype: MediaType = None,
imdbid: Optional[str] = None,
episode_group: Optional[str] = None,
cache: Optional[bool] = True,
**kwargs) -> Optional[MediaInfo]:
logger.warn(f"IMDb Source: {MetaBase.title}")
if not self._imdb_helper:
return None
if not imdbid and not meta:
return None
if not meta:
# 未提供元数据时直接使用imdbid查询不使用缓存
cache_info = {}
elif not meta.name:
logger.warn("识别媒体信息时未提供元数据名称")
return None
cache_info = {}
if not cache_info or not cache:
info = None
if imdbid:
info = self._imdb_helper.get_info(mtype=mtype, imdbid=imdbid)
if not info and meta:
info = {}
names = list(dict.fromkeys([k for k in [meta.cn_name, meta.en_name] if k]))
for name in names:
if meta.begin_season:
logger.info(f"正在识别 {name}{meta.begin_season}季 ...")
else:
logger.info(f"正在识别 {name} ...")
if meta.type == MediaType.UNKNOWN and not meta.year:
info = self._imdb_helper.match_multi(name)
else:
if meta.type == MediaType.TV:
# 确定是电视
info = self._imdb_helper.match(name=name,
year=meta.year,
mtype=meta.type,season_year=meta.year,
season_number=meta.begin_season)
if not info:
# 去掉年份再查一次
info = self._imdb_helper.match(name=name, mtype=meta.type)
else:
# 有年份先按电影查
info = self._imdb_helper.match(name=name, year=meta.year, mtype=MediaType.MOVIE)
# 没有再按电视剧查
if not info:
info = self._imdb_helper.match(name=name,
year=meta.year,
mtype=MediaType.TV)
if not info:
# 去掉年份和类型再查一次
info = self._imdb_helper.match_multi(name=name)
if info:
break
else:
info = None
if info:
# mediainfo = MediaInfo(source_info={"imdb": info})
mediainfo = MediaInfo()
if meta:
logger.info(f"{meta.name} IMDB识别结果{mediainfo.type.value} "
f"{mediainfo.title_year} "
f"{mediainfo.imdb_id}")
else:
logger.info(f"{imdbid} IMDB识别结果{mediainfo.type.value} "
f"{mediainfo.title_year}")
return mediainfo
logger.info(f"{meta.name if meta else imdbid} 未匹配到IMDB媒体信息")
return None
@staticmethod
def __movie_to_media(movie_info: dict) -> schemas.MediaInfo:
title = ""
@@ -150,7 +281,7 @@ class ImdbSource(_PluginBase):
return schemas.MediaInfo(
type="电影",
title=title,
year=release_year,
year=f'{release_year}',
title_year=f"{title} ({release_year})",
mediaid_prefix="imdb",
media_id=str(movie_info.get("id")),
@@ -191,7 +322,7 @@ class ImdbSource(_PluginBase):
return schemas.MediaInfo(
type="电视剧",
title=title,
year=release_year,
year=f'{release_year}',
title_year=f"{title} ({release_year})",
mediaid_prefix="imdb",
media_id=str(series_info.get("id")),

View File

@@ -1,15 +1,11 @@
import re
from typing import Optional, Any, Dict, List, Tuple
from io import StringIO
from collections import OrderedDict
from dataclasses import dataclass
import graphene
import requests
from requests_html import HTMLSession
import ijson
import json
import base64
from app.log import logger
from app.utils.http import RequestUtils
@@ -149,8 +145,7 @@ class ImdbHelper:
def __init__(self, proxies=None):
self._proxies = proxies
self._session = HTMLSession()
self._req_utils = RequestUtils(headers=self._imdb_headers, session=self._session, timeout=10, proxies=proxies)
self._req_utils = RequestUtils(headers=self._imdb_headers, session=HTMLSession(), timeout=10, proxies=proxies)
self._imdb_req = RequestUtils(accept_type="application/json",
content_type="application/json",
headers=self._imdb_headers,
@@ -158,6 +153,7 @@ class ImdbHelper:
proxies=proxies,
session=requests.Session())
self._imdb_api_hash = {"AdvancedTitleSearch": None, "TitleAkasPaginated": None}
self.hash_status = {"AdvancedTitleSearch": False, "TitleAkasPaginated": False}
self._search_states = OrderedDict()
self._max_states = 30
@@ -185,7 +181,7 @@ class ImdbHelper:
f"/en-US/title/{imdbid}/episodes.json?season={season}&ref_=ttep&tconst={imdbid}")
response = self._req_utils.get_res(url)
if not response or response.status_code != 200:
return
return None
json_content = response.text
try:
section = next(ijson.items(json_content, prefix))
@@ -247,11 +243,14 @@ class ImdbHelper:
return None
data = ret.json()
if "errors" in data:
logger.error(f"Imdb query errors")
return None
error = data.get("errors")[0] if data.get("errors") else {}
if error and error.get("message") == 'PersistedQueryNotFound':
logger.warn(f"PersistedQuery hash has expired, trying to update...")
self.__get_hash.cache_clear()
return {'error': error}
return data.get("data")
@cached(maxsize=1, ttl=30 * 24 * 3600)
@cached(maxsize=1, ttl=6 * 3600)
def __get_hash(self) -> Optional[dict]:
"""
根据IMDb hash使用
@@ -264,11 +263,13 @@ class ImdbHelper:
proxies=self._proxies
)
if not res:
logger.error("获取IMDb hash")
logger.error("Error getting hash")
return None
return res.json()
def __update_hash(self):
def __update_hash(self, force: bool = False) -> None:
if force:
self.__get_hash.cache_clear()
imdb_hash = self.__get_hash()
if imdb_hash:
self._imdb_api_hash["AdvancedTitleSearch"] = imdb_hash.get("AdvancedTitleSearch")
@@ -325,7 +326,8 @@ class ImdbHelper:
release_date_start: Optional[str] = None,
award_constraint: Optional[Tuple[str, ...]] = None,
ranked: Optional[Tuple[str, ...]] = None,
interests: Optional[Tuple[str, ...]] = None):
interests: Optional[Tuple[str, ...]] = None
)->Optional[Dict]:
# 创建参数对象
params = SearchParams(
title_types=title_types,
@@ -342,7 +344,7 @@ class ImdbHelper:
ranked=ranked,
interests=interests
)
sha256 = 'be358d7b41add9fd174461f4c8c673dfee5e2a88744e2d5dc037362a96e2b4e4'
sha256 = '81b46290a78cc1e8b3d713e6a43c191c55b4dccf3e1945d6b46668945846d832'
self.__update_hash()
if self._imdb_api_hash.get("AdvancedTitleSearch"):
sha256 = self._imdb_api_hash["AdvancedTitleSearch"]
@@ -359,7 +361,7 @@ class ImdbHelper:
'titleTypes': [], 'jobCategories': []}
if search_state.pageinfo.get('endCursor'):
last_cursor = search_state.pageinfo.get('endCursor')
# 这里实现基于上次结果的逻辑
# 实现基于上次结果的逻辑
else:
# 重新搜索
first_page = True
@@ -382,11 +384,12 @@ class ImdbHelper:
last_cursor: Optional[str] = None,
) -> Optional[Dict]:
variables = {"first": 50,
variables: Dict[str, Any] = {"first": 50,
"locale": "en-US",
"sortBy": params.sort_by,
"sortOrder": params.sort_order,
}
operation_name = 'AdvancedTitleSearch'
if params.title_types:
title_type_ids = []
for title_type in params.title_types:
@@ -439,12 +442,20 @@ class ImdbHelper:
if not first_page and last_cursor:
variables["after"] = last_cursor
params = {"operationName": "AdvancedTitleSearch",
params = {"operationName": operation_name,
"variables": variables}
data = self.__request(params, sha256)
if not data:
return None
return data.get("advancedTitleSearch")
if 'error' in data:
error = data['error']
if error:
logger.error(f"Error querying {operation_name}: {error.get('message')}")
if error.get('message') == 'PersistedQueryNotFound':
self.hash_status[operation_name] = False
return None
self.hash_status[operation_name] = True
return data.get('advancedTitleSearch')
def __known_as(self, imdbid: str,
sha256='48d4f7bfa73230fb550147bd4704d8050080e65fe2ad576da6276cac2330e446') -> Optional[List]:
@@ -453,14 +464,23 @@ class ImdbHelper:
:param imdbid: IMBd id
:return: 别名列表
"""
operation_name = "TitleAkasPaginated"
self.__update_hash()
if self._imdb_api_hash.get("TitleAkasPaginated"):
sha256 = self._imdb_api_hash["TitleAkasPaginated"]
params = {"operationName": "TitleAkasPaginated",
if self._imdb_api_hash.get(operation_name):
sha256 = self._imdb_api_hash[operation_name]
params = {"operationName": operation_name,
"variables": {"const": imdbid, "first": 50, "locale": "en-US", "originalTitleText": False}}
data = self.__request(params=params, sha256=sha256)
if not data:
return None
if 'error' in data:
error = data['error']
if error:
logger.error(f"Error querying {operation_name} API: {error.get('message')}")
if error.get('message') == 'PersistedQueryNotFound':
self.hash_status[operation_name] = False
return None
self.hash_status[operation_name] = True
if not data.get("data", {}).get("title", {}).get("akas", {}).get("total"):
return None
akas = []
@@ -633,6 +653,7 @@ class ImdbHelper:
_tv_info["seasons"] = tv_extra_info["seasons"]
_tv_info["episodes"] = tv_extra_info["episodes"]
return True
return False
tvs = self.search_tvs(title=name)
if (tvs is None) or (len(tvs) == 0):
@@ -660,6 +681,7 @@ class ImdbHelper:
continue
if __season_match(_tv_info=tv_info, _season_year=season_year):
return tv_info
return None
def get_info(self,
mtype: MediaType,