fix(ClashRuleProvider): 修复分享链接解析错误

This commit is contained in:
wumode
2025-07-08 21:16:19 +08:00
parent 618c889206
commit f9c60a0683
6 changed files with 209 additions and 185 deletions

View File

@@ -457,11 +457,12 @@
"name": "Clash Rule Provider",
"description": "随时为Clash添加一些额外的规则。",
"labels": "工具",
"version": "1.2.6",
"version": "1.3.0",
"icon": "Mihomo_Meta_A.png",
"author": "wumode",
"level": 1,
"history": {
"v1.2.7": "修复分享链接解析错误",
"v1.2.6": "修复代理组修改丢失问题",
"v1.2.4": "支持geo规则补全; 代理组编辑",
"v1.2.3": "修复规则集名称错误",

View File

@@ -984,7 +984,8 @@ class ClashRuleProvider(_PluginBase):
continue
clash_rules.append(clash_rule)
self._clash_rule_parser.append_rules(clash_rules)
self.__save_data()
self._top_rules = self._clash_rule_parser.to_list()
self.save_data('top_rules', self._top_rules)
return
def update_rule_by_priority(self, rule: Dict[str, Any], priority: int, rule_parser: ClashRuleParser) -> bool:
@@ -992,7 +993,7 @@ class ClashRuleProvider(_PluginBase):
return False
clash_rule = ClashRuleParser.parse_rule_dict(rule)
if not clash_rule:
logger.error(f"Failed to update rule at priority {priority}. Invalid clash rule: {rule}")
logger.error(f"Failed to update rule at priority {priority}. Invalid clash rule: {rule!r}")
return False
res = rule_parser.update_rule_at_priority(clash_rule, priority)
self.__save_data()
@@ -1279,7 +1280,7 @@ class ClashRuleProvider(_PluginBase):
"""
for item in from_list:
if any(p.get('name') == item.get('name', '') for p in to_list):
logger.warn(f"Item named {item.get('name')} already exists. Skipping...")
logger.warn(f"Item named {item.get('name')!r} already exists. Skipping...")
continue
to_list.append(item)
return to_list
@@ -1310,7 +1311,7 @@ class ClashRuleProvider(_PluginBase):
for proxy in self.all_proxies() :
if any(p.get('name') == proxy.get('name', '') for p in proxies):
logger.warn(f"Proxy named {proxy.get('name')} already exists. Skipping...")
logger.warn(f"Proxy named {proxy.get('name')!r} already exists. Skipping...")
continue
proxies.append(proxy)
if proxies:
@@ -1363,16 +1364,16 @@ class ClashRuleProvider(_PluginBase):
if rule.payload in self._acl4ssr_providers:
clash_config['rule-providers'][rule.payload] = self._acl4ssr_providers[rule.payload]
if rule.payload not in clash_config.get('rule-providers', {}):
logger.warn(f"规则集合 {rule.payload} 不存在, 跳过 {rule.raw_rule}")
logger.warn(f"规则集合 {rule.payload!r} 不存在, 跳过 {rule.raw_rule!r}")
continue
top_rules.append(rule.raw_rule)
for raw_rule in clash_config.get("rules", []):
rule = ClashRuleParser.parse_rule_line(raw_rule)
if not rule:
logger.warn(f"无效的规则 {raw_rule}, 跳过")
logger.warn(f"无效的规则 {raw_rule!r}, 跳过")
continue
if not isinstance(rule.action, Action) and rule.action not in outbound_names:
logger.warn(f"出站 {rule.action} 不存在, 跳过 {rule.raw_rule}")
logger.warn(f"出站 {rule.action!r} 不存在, 跳过 {rule.raw_rule!r}")
continue
top_rules.append(rule.raw_rule)
clash_config["rules"] = top_rules

View File

@@ -658,6 +658,8 @@ class Converter:
https://github.com/MetaCubeX/mihomo/blob/Alpha/common/convert/converter.go
https://github.com/SubConv/SubConv/blob/main/modules/convert/converter.py
"""
user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome'
@staticmethod
def decode_base64(data):
# 添加适配不同 padding 的容错机制
@@ -693,7 +695,7 @@ class Converter:
raise ValueError(f"invalid truth value {val!r}")
@staticmethod
def convert_v2ray(v2ray_link: Union[list, bytes]) -> List[Dict[str, Any]]:
def convert_v2ray(v2ray_link: Union[list, bytes], skip_exception: bool = True) -> List[Dict[str, Any]]:
if isinstance(v2ray_link, bytes):
decoded = Converter.decode_base64(v2ray_link).decode("utf-8")
lines = decoded.strip().splitlines()
@@ -713,176 +715,182 @@ class Converter:
scheme = scheme.lower()
if scheme == "vmess":
vmess_data = Converter.try_decode_base64_json(body)
if not vmess_data:
continue
try:
vmess_data = Converter.try_decode_base64_json(body)
name = Converter.unique_name(names, vmess_data.get("ps", "vmess"))
net = str(vmess_data.get("net", "")).lower()
fake_type = str(vmess_data.get("type", "")).lower()
tls_mode = str(vmess_data.get("tls", "")).lower()
cipher = vmess_data.get("scy", "auto") or "auto"
alter_id = vmess_data.get("aid", 0)
name = Converter.unique_name(names, vmess_data.get("ps", "vmess"))
net = str(vmess_data.get("net", "")).lower()
fake_type = str(vmess_data.get("type", "")).lower()
tls_mode = str(vmess_data.get("tls", "")).lower()
cipher = vmess_data.get("scy", "auto") or "auto"
alter_id = vmess_data.get("aid", 0)
# 调整 network 类型
if fake_type == "http":
net = "http"
elif net == "http":
net = "h2"
proxy = {
"name": name,
"type": "vmess",
"server": vmess_data.get("add"),
"port": vmess_data.get("port"),
"uuid": vmess_data.get("id"),
"alterId": alter_id,
"cipher": cipher,
"tls": tls_mode.endswith("tls") or tls_mode == "reality",
"udp": True,
"xudp": True,
"skip-cert-verify": False,
"network": net
}
# TLS、Reality 扩展
if proxy["tls"]:
proxy["client-fingerprint"] = vmess_data.get("fp", "chrome") or "chrome"
alpn = vmess_data.get("alpn")
if alpn:
proxy["alpn"] = alpn.split(",") if isinstance(alpn, str) else alpn
sni = vmess_data.get("sni")
if sni:
proxy["servername"] = sni
if tls_mode == "reality":
proxy["reality-opts"] = {
"public-key": vmess_data.get("pbk", ""),
"short-id": vmess_data.get("sid", "")
}
path = vmess_data.get("path", "/")
host = vmess_data.get("host")
# 不同 network 的扩展字段处理
if net == "tcp":
# 调整 network 类型
if fake_type == "http":
net = "http"
elif net == "http":
net = "h2"
proxy = {
"name": name,
"type": "vmess",
"server": vmess_data.get("add"),
"port": vmess_data.get("port"),
"uuid": vmess_data.get("id"),
"alterId": alter_id,
"cipher": cipher,
"tls": tls_mode.endswith("tls") or tls_mode == "reality",
"udp": True,
"xudp": True,
"skip-cert-verify": False,
"network": net
}
# TLS Reality 扩展
if proxy["tls"]:
proxy["client-fingerprint"] = vmess_data.get("fp", "chrome") or "chrome"
alpn = vmess_data.get("alpn")
if alpn:
proxy["alpn"] = alpn.split(",") if isinstance(alpn, str) else alpn
sni = vmess_data.get("sni")
if sni:
proxy["servername"] = sni
if tls_mode == "reality":
proxy["reality-opts"] = {
"public-key": vmess_data.get("pbk", ""),
"short-id": vmess_data.get("sid", "")
}
path = vmess_data.get("path", "/")
host = vmess_data.get("host")
# 不同 network 的扩展字段处理
if net == "tcp":
if fake_type == "http":
proxy["http-opts"] = {
"path": path,
"headers": {"Host": host} if host else {}
}
elif net == "http":
proxy["network"] = "http"
proxy["http-opts"] = {
"path": path,
"headers": {"Host": host} if host else {}
}
elif net == "http":
proxy["network"] = "http"
proxy["http-opts"] = {
"path": path,
"headers": {"Host": host} if host else {}
}
elif net == "h2":
proxy["h2-opts"] = {
"path": path,
"host": [host] if host else []
}
elif net == "h2":
proxy["h2-opts"] = {
"path": path,
"host": [host] if host else []
}
elif net == "ws":
ws_headers = {"Host": host} if host else {}
ws_headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64)" # 可选伪装
ws_opts = {
"path": path,
"headers": ws_headers
}
# 补充 early-data 配置
early_data = vmess_data.get("ed")
if early_data:
try:
ws_opts["max-early-data"] = int(early_data)
except ValueError:
pass
early_data_header = vmess_data.get("edh")
if early_data_header:
ws_opts["early-data-header-name"] = early_data_header
proxy["ws-opts"] = ws_opts
elif net == "ws":
ws_headers = {"Host": host} if host else {}
ws_headers["User-Agent"] = Converter.user_agent
ws_opts = {
"path": path,
"headers": ws_headers
}
# 补充 early-data 配置
early_data = vmess_data.get("ed")
if early_data:
try:
ws_opts["max-early-data"] = int(early_data)
except ValueError:
pass
early_data_header = vmess_data.get("edh")
if early_data_header:
ws_opts["early-data-header-name"] = early_data_header
proxy["ws-opts"] = ws_opts
elif net == "grpc":
proxy["grpc-opts"] = {
"grpc-service-name": path
}
proxies.append(proxy)
except Exception as e:
if not skip_exception:
raise ValueError(f"VMESS parse error: {e}") from e
elif net == "grpc":
proxy["grpc-opts"] = {
"grpc-service-name": path
}
proxies.append(proxy)
elif scheme == "vless":
try:
parsed = urlparse(line)
query = parse_qs(parsed.query)
query = dict(parse_qsl(parsed.query))
uuid = parsed.username or ""
server = parsed.hostname or ""
port = parsed.port or 443
tls_mode = query.get("security", [""])[0].lower()
tls_mode = query.get("security", "").lower()
tls = tls_mode == "tls" or tls_mode == "reality"
sni = query.get("sni", [""])[0]
flow = query.get("flow", [""])[0]
network = query.get("type", [""])[0]
path = query.get("path", [""])[0]
host = query.get("host", [""])[0]
sni = query.get("sni", "")
flow = query.get("flow", "")
network = query.get("type", "tcp")
path = query.get("path", "")
host = query.get("host", "")
name = Converter.unique_name(names, unquote(parsed.fragment or f"{server}:{port}"))
proxy = {
proxy: Dict[str, Any] = {
"name": name,
"type": "vless",
"server": server,
"port": str(port),
"port": port,
"uuid": uuid,
"tls": tls,
"udp": True
}
if sni:
proxy["sni"] = sni
proxy["servername"] = sni
if flow:
proxy["flow"] = flow
if tls:
proxy["skip-cert-verify"] = Converter.strtobool(query.get("allowInsecure", "0"))
if network:
proxy["network"] = network
if network in ["ws", "httpupgrade"]:
headers = {}
headers = {"User-Agent": Converter.user_agent}
if host:
headers["Host"] = host
ws_opts:Dict[str, Any] = { "path": path, "headers": headers }
try:
parsed_path = urlparse(path)
q = parse_qs(parsed_path.query)
q = dict(parse_qsl(parsed_path.query))
if "ed" in q:
med = int(q["ed"][0])
ws_opts["max-early-data"] = med
ws_opts["early-data-header-name"] = q.get("eh", ["Sec-WebSocket-Protocol"])[0]
q.pop("ed", None)
new_query = urlencode(q, doseq=True)
parsed = parsed._replace(query=new_query)
path = urlunparse(parsed)
elif "eh" in q:
ws_opts["early-data-header-name"] = q["eh"][0]
ws_opts["path"] = path
med = int(q["ed"])
if network == "ws":
ws_opts["max-early-data"] = med
ws_opts["early-data-header-name"] = q.get("eh", "Sec-WebSocket-Protocol")
elif network == "httpupgrade":
ws_opts["v2ray-http-upgrade-fast-open"] = True
if "eh" in q and q["eh"]:
ws_opts["early-data-header-name"] = q["eh"]
except Exception:
pass
if network == "httpupgrade":
ws_opts["v2ray-http-upgrade-fast-open"] = True
proxy["ws-opts"] = ws_opts
elif network == "grpc":
proxy["grpc-opts"] = {
"grpc-service-name": path
"grpc-service-name": query.get("serviceName", "")
}
if tls_mode == "reality":
proxy["reality-opts"] = {
"public-key": query.get("pbk", [""])[0]
"public-key": query.get("pbk", "")
}
if query.get("sid"):
proxy["reality-opts"]["short-id"] = query.get("sid", [""])[0]
proxy["client-fingerprint"] = query.get("fp", ["chrome"])[0]
alpn = query.get("alpn", [""])[0]
proxy["reality-opts"]["short-id"] = query.get("sid", "")
proxy["client-fingerprint"] = query.get("fp", "chrome")
alpn = query.get("alpn", "")
if alpn:
proxy["alpn"] = alpn.split(",")
if tls_mode.endswith("tls"):
proxy["client-fingerprint"] = query.get("fp", "chrome")
alpn = query.get("alpn", "")
if alpn:
proxy["alpn"] = alpn.split(",")
proxies.append(proxy)
except Exception as e:
raise ValueError(f"VLESS parse error: {e}") from e
if not skip_exception:
raise ValueError(f"VLESS parse error: {e}") from e
elif scheme == "trojan":
try:
@@ -919,7 +927,7 @@ class Converter:
trojan["network"] = network
if network == "ws":
headers = {"User-Agent": "clash"} # 或 RandUserAgent()
headers = {"User-Agent": Converter.user_agent}
trojan["ws-opts"] = {
"path": query.get("path", "/"),
"headers": headers
@@ -936,7 +944,9 @@ class Converter:
proxies.append(trojan)
except Exception as e:
raise ValueError(f"Error parsing trojan:// link: {e}") from e
if not skip_exception:
raise ValueError(f"Trojan parse error: {e}") from e
elif scheme == "hysteria":
try:
parsed = urlparse(line)
@@ -976,7 +986,9 @@ class Converter:
proxies.append(hysteria)
except Exception as e:
raise ValueError(f"Hysteria parse error: {e}") from e
if not skip_exception:
raise ValueError(f"Hysteria parse error: {e}") from e
elif scheme in ("socks", "socks5", "socks5h"):
try:
parsed = urlparse(line)
@@ -990,14 +1002,16 @@ class Converter:
"name": name,
"type": "socks5",
"server": server,
"port": str(port),
"port": port,
"username": username,
"password": password,
"udp": True
}
proxies.append(proxy)
except Exception as e:
raise ValueError(f"SOCKS5 parse error: {e}") from e
if not skip_exception:
raise ValueError(f"SOCKS5 parse error: {e}") from e
elif scheme == "ss":
try:
parsed = urlparse(line)
@@ -1052,7 +1066,9 @@ class Converter:
}
proxies.append(proxy)
except Exception as e:
raise ValueError(f"SS parse error: {e}") from e
if not skip_exception:
raise ValueError(f"SS parse error: {e}") from e
elif scheme == "ssr":
try:
decoded = Converter.decode_base64(body).decode()
@@ -1087,7 +1103,9 @@ class Converter:
proxies.append(proxy)
except Exception as e:
raise ValueError(f"SSR parse error: {e}") from e
if not skip_exception:
raise ValueError(f"SSR parse error: {e}") from e
elif scheme == "tuic":
try:
parsed = urlparse(line)
@@ -1103,7 +1121,7 @@ class Converter:
"name": name,
"type": "tuic",
"server": server,
"port": str(port),
"port": port,
"udp": True
}
@@ -1126,7 +1144,9 @@ class Converter:
proxies.append(proxy)
except Exception as e:
raise ValueError(f"TUIC parse error: {e}") from e
if not skip_exception:
raise ValueError(f"TUIC parse error: {e}") from e
elif scheme == "anytls":
try:
parsed = urlparse(line)
@@ -1145,7 +1165,7 @@ class Converter:
"name": name,
"type": "anytls",
"server": server,
"port": str(port),
"port": port,
"username": username,
"password": password,
"sni": sni,
@@ -1156,40 +1176,42 @@ class Converter:
proxies.append(proxy)
except Exception as e:
raise ValueError(f"AnyTLS parse error: {e}") from e
if not skip_exception:
raise ValueError(f"AnyTLS parse error: {e}") from e
elif scheme in ("hysteria2", "hy2"):
try:
parsed = urlparse(line)
query = parse_qs(parsed.query)
query = dict(parse_qsl(parsed.query))
password = parsed.username or ""
server = parsed.hostname
port = parsed.port or 443
name = Converter.unique_name(names, unquote(parsed.fragment or f"{server}:{port}"))
proxy = {
"name": name,
"type": "hysteria2",
"server": server,
"port": str(port),
"port": port,
"password": password,
"obfs": query.get("obfs", [""])[0],
"obfs-password": query.get("obfs-password", [""])[0],
"sni": query.get("sni", [""])[0],
"skip-cert-verify": query.get("insecure", ["false"])[0] == "true",
"down": query.get("down", [""])[0],
"up": query.get("up", [""])[0],
"fingerprint": query.get("pinSHA256", [""])[0]
"obfs": query.get("obfs", ""),
"obfs-password": query.get("obfs-password", ""),
"sni": query.get("sni", ""),
"skip-cert-verify": Converter.strtobool(query.get("insecure", "false")),
"down": query.get("down", ""),
"up": query.get("up", ""),
}
if "pinSHA256" in query:
proxy["fingerprint"] = query.get("pinSHA256", "")
if "alpn" in query:
proxy["alpn"] = query["alpn"][0].split(",")
proxy["alpn"] = query["alpn"].split(",")
proxies.append(proxy)
except Exception as e:
raise ValueError(f"Hysteria2 parse error: {e}") from e
if not skip_exception:
raise ValueError(f"Hysteria2 parse error: {e}") from e
if not proxies:
raise ValueError("convert v2ray subscribe error: format invalid")
if not skip_exception:
raise ValueError("convert v2ray subscribe error: format invalid")
return proxies

View File

@@ -1,13 +1,13 @@
.plugin-page[data-v-4a22a0ac] {
.plugin-page[data-v-0e274fe6] {
margin: 0 auto;
}
/* 使卡片等宽并适应移动端 */
.d-flex.flex-wrap[data-v-4a22a0ac] {
.d-flex.flex-wrap[data-v-0e274fe6] {
gap: 16px;
}
.url-display[data-v-4a22a0ac] {
.url-display[data-v-0e274fe6] {
word-break: break-all;
padding: 8px;
background: rgba(0, 0, 0, 0.05);
@@ -16,19 +16,19 @@
/* 移动端堆叠布局 */
@media (max-width: 768px) {
.d-flex.flex-wrap[data-v-4a22a0ac] {
.d-flex.flex-wrap[data-v-0e274fe6] {
flex-direction: column;
}
}
/* Add visual distinction between sections */
.ruleset-section[data-v-4a22a0ac] {
.ruleset-section[data-v-0e274fe6] {
border: 1px solid #e0e0e0;
border-radius: 4px;
padding: 16px;
background-color: #f5f5f5;
}
.top-section[data-v-4a22a0ac] {
.top-section[data-v-0e274fe6] {
border: 1px solid #e0e0e0;
border-radius: 4px;
padding: 16px;
@@ -36,15 +36,15 @@
}
/* Optional: Add different border colors to further distinguish */
.ruleset-section[data-v-4a22a0ac] {
.ruleset-section[data-v-0e274fe6] {
border-left: 4px solid #2196F3; /* Blue accent */
}
.top-section[data-v-4a22a0ac] {
.top-section[data-v-0e274fe6] {
border-left: 4px solid #4CAF50; /* Green accent */
}
.drag-handle[data-v-4a22a0ac] {
.drag-handle[data-v-0e274fe6] {
cursor: move;
}
.gap-2[data-v-4a22a0ac] {
.gap-2[data-v-0e274fe6] {
gap: 8px;
}

View File

@@ -5855,24 +5855,6 @@ return (_ctx, _cache) => {
_createElementVNode("td", null, _toDisplayString(item.type), 1),
_createElementVNode("td", null, _toDisplayString(item.source), 1),
_createElementVNode("td", null, [
_createVNode(_component_v_btn, {
icon: "",
size: "small",
color: "primary",
variant: "text",
onClick: $event => (editProxyGroup(item.name)),
disabled: !isManual(item.source)
}, {
default: _withCtx(() => [
_createVNode(_component_v_icon, null, {
default: _withCtx(() => _cache[107] || (_cache[107] = [
_createTextVNode("mdi-pencil")
])),
_: 1
})
]),
_: 2
}, 1032, ["onClick", "disabled"]),
_createVNode(_component_v_btn, {
icon: "",
size: "small",
@@ -5882,7 +5864,7 @@ return (_ctx, _cache) => {
}, {
default: _withCtx(() => [
_createVNode(_component_v_icon, null, {
default: _withCtx(() => _cache[108] || (_cache[108] = [
default: _withCtx(() => _cache[107] || (_cache[107] = [
_createTextVNode("mdi-code-json")
])),
_: 1
@@ -5890,6 +5872,24 @@ return (_ctx, _cache) => {
]),
_: 2
}, 1032, ["onClick"]),
_createVNode(_component_v_btn, {
icon: "",
size: "small",
color: "primary",
variant: "text",
onClick: $event => (editProxyGroup(item.name)),
disabled: !isManual(item.source)
}, {
default: _withCtx(() => [
_createVNode(_component_v_icon, null, {
default: _withCtx(() => _cache[108] || (_cache[108] = [
_createTextVNode("mdi-pencil")
])),
_: 1
})
]),
_: 2
}, 1032, ["onClick", "disabled"]),
_createVNode(_component_v_btn, {
icon: "",
size: "small",
@@ -7618,6 +7618,6 @@ return (_ctx, _cache) => {
}
};
const PageComponent = /*#__PURE__*/_export_sfc(_sfc_main, [['__scopeId',"data-v-4a22a0ac"]]);
const PageComponent = /*#__PURE__*/_export_sfc(_sfc_main, [['__scopeId',"data-v-0e274fe6"]]);
export { PageComponent as default };

View File

@@ -2,8 +2,8 @@ const currentImports = {};
const exportSet = new Set(['Module', '__esModule', 'default', '_export_sfc']);
let moduleMap = {
"./Page":()=>{
dynamicLoadingCss(["__federation_expose_Page-DKIzYtCl.css"], false, './Page');
return __federation_import('./__federation_expose_Page-BOIuCekN.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},
dynamicLoadingCss(["__federation_expose_Page-DEA-0_Sq.css"], false, './Page');
return __federation_import('./__federation_expose_Page-y2yjbals.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},
"./Config":()=>{
dynamicLoadingCss(["__federation_expose_Config-s4NWY2Hy.css"], false, './Config');
return __federation_import('./__federation_expose_Config-HFXrSiMi.js').then(module =>Object.keys(module).every(item => exportSet.has(item)) ? () => module.default : () => module)},