update 2024-12-03 20:41:15
This commit is contained in:
parent
fc63828564
commit
b79ab83065
|
@ -55,21 +55,17 @@ void init_not_http_cache(const int interval) {
|
|||
}
|
||||
|
||||
bool cache_contains(struct addr_port target) {
|
||||
pthread_rwlock_rdlock(&cacheLock);
|
||||
pthread_rwlock_wrlock(&cacheLock);
|
||||
|
||||
struct cache *s;
|
||||
HASH_FIND(hh, not_http_dst_cache, &target, sizeof(struct addr_port), s);
|
||||
|
||||
pthread_rwlock_unlock(&cacheLock);
|
||||
|
||||
if (s != NULL) {
|
||||
pthread_rwlock_wrlock(&cacheLock);
|
||||
s->last_time = time(NULL);
|
||||
pthread_rwlock_unlock(&cacheLock);
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
pthread_rwlock_unlock(&cacheLock);
|
||||
|
||||
return s != NULL;
|
||||
}
|
||||
|
||||
void cache_add(struct addr_port addr_port) {
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#endif
|
||||
|
||||
#include <assert.h>
|
||||
#include <linux/if_ether.h>
|
||||
#include <libnetfilter_queue/libnetfilter_queue_ipv4.h>
|
||||
#include <libnetfilter_queue/libnetfilter_queue_ipv6.h>
|
||||
#include <libnetfilter_queue/libnetfilter_queue_tcp.h>
|
||||
|
@ -84,7 +85,7 @@ static void send_verdict(const struct nf_queue *queue, const struct nf_packet *p
|
|||
syslog(LOG_ERR, "failed to put nfqueue header");
|
||||
goto end;
|
||||
}
|
||||
nfq_nlmsg_verdict_put(nlh, pkt->packet_id, NF_ACCEPT);
|
||||
nfq_nlmsg_verdict_put(nlh, (int)pkt->packet_id, NF_ACCEPT);
|
||||
|
||||
if (mark.should_set) {
|
||||
struct nlattr *nest = mnl_attr_nest_start_check(nlh, SEND_BUF_LEN, NFQA_CT);
|
||||
|
@ -202,32 +203,19 @@ static bool ipv6_set_transport_header(struct pkt_buff *pkt_buff) {
|
|||
return true;
|
||||
}
|
||||
|
||||
static int set_transport_header(struct pkt_buff *pkt_buff, const int ip_type) {
|
||||
if (ip_type == IPV4) {
|
||||
if (ipv4_set_transport_header(pkt_buff)) {
|
||||
count_ipv4_packet();
|
||||
return IPV4;
|
||||
}
|
||||
return IP_UNK;
|
||||
}
|
||||
if (ip_type == IPV6) {
|
||||
if (ipv6_set_transport_header(pkt_buff)) {
|
||||
count_ipv6_packet();
|
||||
return IPV6;
|
||||
}
|
||||
return IP_UNK;
|
||||
int get_pkt_ip_version(const struct nf_packet *pkt) {
|
||||
if (pkt->has_conntrack) {
|
||||
return pkt->orig.ip_version;
|
||||
}
|
||||
|
||||
// unknown ip type
|
||||
if (ipv4_set_transport_header(pkt_buff)) {
|
||||
count_ipv4_packet();
|
||||
switch (pkt->hw_protocol) {
|
||||
case ETH_P_IP:
|
||||
return IPV4;
|
||||
}
|
||||
if (ipv6_set_transport_header(pkt_buff)) {
|
||||
count_ipv6_packet();
|
||||
case ETH_P_IPV6:
|
||||
return IPV6;
|
||||
}
|
||||
default:
|
||||
return IP_UNK;
|
||||
}
|
||||
}
|
||||
|
||||
void handle_packet(const struct nf_queue *queue, const struct nf_packet *pkt) {
|
||||
|
@ -244,18 +232,22 @@ void handle_packet(const struct nf_queue *queue, const struct nf_packet *pkt) {
|
|||
}
|
||||
|
||||
struct pkt_buff *pkt_buff = pktb_alloc(AF_INET, pkt->payload, pkt->payload_len, 0);
|
||||
assert(pkt_buff != NULL);
|
||||
|
||||
int type;
|
||||
if (pkt->has_conntrack) {
|
||||
type = pkt->orig.ip_version;
|
||||
set_transport_header(pkt_buff, type);
|
||||
} else {
|
||||
type = set_transport_header(pkt_buff, IP_UNK);
|
||||
if (type == IP_UNK) {
|
||||
syslog(LOG_ERR, "Failed to set transport header");
|
||||
if (pkt_buff == NULL) {
|
||||
syslog(LOG_ERR, "Failed to allocate packet buffer");
|
||||
goto end;
|
||||
}
|
||||
|
||||
int type = get_pkt_ip_version(pkt);
|
||||
if (type == IP_UNK) {
|
||||
// will this happen?
|
||||
send_verdict(queue, pkt, get_next_mark(pkt, false), NULL);
|
||||
syslog(LOG_WARNING, "Received unknown ip packet %x. You may set wrong firewall rules.", pkt->hw_protocol);
|
||||
}
|
||||
|
||||
if (type == IPV4) {
|
||||
assert(ipv4_set_transport_header(pkt_buff));
|
||||
} else if (type == IPV6) {
|
||||
assert(ipv6_set_transport_header(pkt_buff));
|
||||
}
|
||||
|
||||
const __auto_type tcp_hdr = nfq_tcp_get_hdr(pkt_buff);
|
||||
|
@ -347,7 +339,9 @@ void handle_packet(const struct nf_queue *queue, const struct nf_packet *pkt) {
|
|||
|
||||
end:
|
||||
free(pkt->payload);
|
||||
if (pkt_buff != NULL) {
|
||||
pktb_free(pkt_buff);
|
||||
}
|
||||
|
||||
try_print_statistics();
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ static long long tcp_packet_count = 0;
|
|||
|
||||
static long long ipv4_packet_count = 0;
|
||||
static long long ipv6_packet_count = 0;
|
||||
static long long last_report_count = 4;
|
||||
static long long last_report_count = 1;
|
||||
|
||||
static time_t start_t;
|
||||
|
||||
|
@ -29,7 +29,7 @@ void count_ipv4_packet() { ipv4_packet_count++; }
|
|||
|
||||
void count_ipv6_packet() { ipv6_packet_count++; }
|
||||
|
||||
static char time_string_buffer[100];
|
||||
static char time_string_buffer[512];
|
||||
|
||||
char *fill_time_string(const double sec) {
|
||||
const int s = (int)sec;
|
||||
|
|
|
@ -72,6 +72,7 @@ void main_loop(struct nf_queue *queue) {
|
|||
break;
|
||||
}
|
||||
} else {
|
||||
should_exit = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@ if not api.finded_com("xray") then
|
|||
end
|
||||
|
||||
local appname = api.appname
|
||||
local jsonc = api.jsonc
|
||||
local uci = api.uci
|
||||
|
||||
local type_name = "Xray"
|
||||
|
@ -393,7 +394,7 @@ o:depends({ [option_name("tls")] = true, [option_name("utls")] = true })
|
|||
o:depends({ [option_name("tls")] = true, [option_name("reality")] = true })
|
||||
|
||||
o = s:option(ListValue, option_name("transport"), translate("Transport"))
|
||||
o:value("raw", "RAW")
|
||||
o:value("raw", "RAW (TCP)")
|
||||
o:value("mkcp", "mKCP")
|
||||
o:value("ws", "WebSocket")
|
||||
o:value("h2", "HTTP/2")
|
||||
|
@ -401,7 +402,7 @@ o:value("ds", "DomainSocket")
|
|||
o:value("quic", "QUIC")
|
||||
o:value("grpc", "gRPC")
|
||||
o:value("httpupgrade", "HttpUpgrade")
|
||||
o:value("xhttp", "XHTTP")
|
||||
o:value("xhttp", "XHTTP (SplitHTTP)")
|
||||
o:depends({ [option_name("protocol")] = "vmess" })
|
||||
o:depends({ [option_name("protocol")] = "vless" })
|
||||
o:depends({ [option_name("protocol")] = "socks" })
|
||||
|
@ -494,6 +495,10 @@ o = s:option(Value, option_name("ws_path"), translate("WebSocket Path"))
|
|||
o.placeholder = "/"
|
||||
o:depends({ [option_name("transport")] = "ws" })
|
||||
|
||||
o = s:option(Value, option_name("ws_heartbeatPeriod"), translate("HeartbeatPeriod(second)"))
|
||||
o.datatype = "integer"
|
||||
o:depends({ [option_name("transport")] = "ws" })
|
||||
|
||||
-- [[ HTTP/2部分 ]]--
|
||||
o = s:option(Value, option_name("h2_host"), translate("HTTP/2 Host"))
|
||||
o:depends({ [option_name("transport")] = "h2" })
|
||||
|
@ -568,6 +573,14 @@ o.placeholder = "/"
|
|||
o:depends({ [option_name("transport")] = "httpupgrade" })
|
||||
|
||||
-- [[ XHTTP部分 ]]--
|
||||
o = s:option(ListValue, option_name("xhttp_mode"), "XHTTP " .. translate("Mode"))
|
||||
o:depends({ [option_name("transport")] = "xhttp" })
|
||||
o.default = "auto"
|
||||
o:value("auto")
|
||||
o:value("packet-up")
|
||||
o:value("stream-up")
|
||||
o:value("stream-one")
|
||||
|
||||
o = s:option(Value, option_name("xhttp_host"), translate("XHTTP Host"))
|
||||
o:depends({ [option_name("transport")] = "xhttp" })
|
||||
|
||||
|
@ -575,104 +588,34 @@ o = s:option(Value, option_name("xhttp_path"), translate("XHTTP Path"))
|
|||
o.placeholder = "/"
|
||||
o:depends({ [option_name("transport")] = "xhttp" })
|
||||
|
||||
-- XHTTP XMUX
|
||||
o = s:option(Flag, option_name("xhttp_xmux"), "XMUX", translate("Enable XHTTP XMUX. It's not recommended to enable Mux.Cool at the same time."))
|
||||
o = s:option(TextValue, option_name("xhttp_extra"), translate("XHTTP Extra"), translate("An <a target='_blank' href='https://xtls.github.io/config/transports/splithttp.html#extra'>XHTTP extra object</a> in raw json"))
|
||||
o:depends({ [option_name("transport")] = "xhttp" })
|
||||
o.rows = 15
|
||||
o.wrap = "off"
|
||||
o.custom_write = function(self, section, value)
|
||||
|
||||
o = s:option(Value, option_name("maxConcurrency"), translate("XMUX Max Concurrency"))
|
||||
o:depends({ [option_name("xhttp_xmux")] = true })
|
||||
m:set(section, self.option:sub(1 + #option_prefix), value)
|
||||
|
||||
o = s:option(Value, option_name("maxConnections"), translate("XMUX Max Connections"))
|
||||
o:depends({ [option_name("xhttp_xmux")] = true })
|
||||
|
||||
o = s:option(Value, option_name("cMaxReuseTimes"), translate("XMUX Connection Max Reuse Times"))
|
||||
o:depends({ [option_name("xhttp_xmux")] = true })
|
||||
|
||||
o = s:option(Value, option_name("cMaxLifetimeMs"), translate("XMUX Connection Max Lifetime (ms)"))
|
||||
o:depends({ [option_name("xhttp_xmux")] = true })
|
||||
|
||||
-- XHTTP 下行
|
||||
o = s:option(Flag, option_name("xhttp_download"), string.format('<a style="color:red">%s</a>', translate("XHTTP download splitting")))
|
||||
o:depends({ [option_name("transport")] = "xhttp" })
|
||||
|
||||
o = s:option(Value, option_name("xhttp_download_address"), string.format('<a style="color:red">%s</a>', translate("Address")))
|
||||
o:depends({ [option_name("xhttp_download")] = true })
|
||||
|
||||
o = s:option(Value, option_name("xhttp_download_port"), string.format('<a style="color:red">%s</a>', translate("Port")))
|
||||
o:depends({ [option_name("xhttp_download")] = true })
|
||||
|
||||
o = s:option(Value, option_name("xhttp_download_host"), string.format('<a style="color:red">%s</a>', "XHTTP Host"))
|
||||
o:depends({ [option_name("xhttp_download")] = true })
|
||||
|
||||
o = s:option(Value, option_name("xhttp_download_path"), string.format('<a style="color:red">%s</a>', "XHTTP Path"), translate("Must be the same as upload path."))
|
||||
o.placeholder = "/"
|
||||
o:depends({ [option_name("xhttp_download")] = true })
|
||||
|
||||
o = s:option(Flag, option_name("xhttp_download_tls"), string.format('<a style="color:red">%s</a>', "TLS"))
|
||||
o:depends({ [option_name("xhttp_download")] = true })
|
||||
o.default = 0
|
||||
|
||||
o = s:option(Flag, option_name("xhttp_download_reality"), string.format('<a style="color:red">%s</a>', "REALITY"))
|
||||
o.default = 0
|
||||
o:depends({ [option_name("xhttp_download_tls")] = true })
|
||||
|
||||
o = s:option(ListValue, option_name("xhttp_download_alpn"), string.format('<a style="color:red">%s</a>', "alpn"))
|
||||
o.default = "default"
|
||||
o:value("default", translate("Default"))
|
||||
o:value("h3")
|
||||
o:value("h2")
|
||||
o:value("h3,h2")
|
||||
o:value("http/1.1")
|
||||
o:value("h2,http/1.1")
|
||||
o:value("h3,h2,http/1.1")
|
||||
o:depends({ [option_name("xhttp_download_tls")] = true, [option_name("xhttp_download_reality")] = false })
|
||||
|
||||
o = s:option(Value, option_name("xhttp_download_tls_serverName"), string.format('<a style="color:red">%s</a>', translate("Domain")))
|
||||
o:depends({ [option_name("xhttp_download_tls")] = true })
|
||||
|
||||
o = s:option(Value, option_name("xhttp_download_reality_publicKey"), string.format('<a style="color:red">%s</a>', translate("Public Key")))
|
||||
o:depends({ [option_name("xhttp_download_tls")] = true, [option_name("xhttp_download_reality")] = true })
|
||||
|
||||
o = s:option(Value, option_name("xhttp_download_reality_shortId"), string.format('<a style="color:red">%s</a>', translate("Short Id")))
|
||||
o:depends({ [option_name("xhttp_download_tls")] = true, [option_name("xhttp_download_reality")] = true })
|
||||
|
||||
o = s:option(Value, option_name("xhttp_download_reality_spiderX"), string.format('<a style="color:red">%s</a>', "Spider X"))
|
||||
o.placeholder = "/"
|
||||
o:depends({ [option_name("xhttp_download_tls")] = true, [option_name("xhttp_download_reality")] = true })
|
||||
|
||||
o = s:option(Flag, option_name("xhttp_download_utls"), string.format('<a style="color:red">%s</a>', "uTLS"))
|
||||
o.default = "0"
|
||||
o:depends({ [option_name("xhttp_download_tls")] = true, [option_name("xhttp_download_reality")] = false })
|
||||
|
||||
o = s:option(ListValue, option_name("xhttp_download_fingerprint"), string.format('<a style="color:red">%s</a>', translate("Finger Print")))
|
||||
o:value("chrome")
|
||||
o:value("firefox")
|
||||
o:value("edge")
|
||||
o:value("safari")
|
||||
o:value("360")
|
||||
o:value("qq")
|
||||
o:value("ios")
|
||||
o:value("android")
|
||||
o:value("random")
|
||||
o:value("randomized")
|
||||
o.default = "chrome"
|
||||
o:depends({ [option_name("xhttp_download_tls")] = true, [option_name("xhttp_download_utls")] = true })
|
||||
o:depends({ [option_name("xhttp_download_tls")] = true, [option_name("xhttp_download_reality")] = true })
|
||||
|
||||
o = s:option(Flag, option_name("xhttp_download_xmux"), string.format('<a style="color:red">%s</a>', "XMUX"), translate("Enable XHTTP XMUX. It's not recommended to enable Mux.Cool at the same time."))
|
||||
o:depends({ [option_name("xhttp_download")] = true })
|
||||
|
||||
o = s:option(Value, option_name("download_maxConcurrency"), string.format('<a style="color:red">%s</a>', translate("XMUX Max Concurrency")))
|
||||
o:depends({ [option_name("xhttp_download_xmux")] = true })
|
||||
|
||||
o = s:option(Value, option_name("download_maxConnections"), string.format('<a style="color:red">%s</a>', translate("XMUX Max Connections")))
|
||||
o:depends({ [option_name("xhttp_download_xmux")] = true })
|
||||
|
||||
o = s:option(Value, option_name("download_cMaxReuseTimes"), string.format('<a style="color:red">%s</a>', translate("XMUX Connection Max Reuse Times")))
|
||||
o:depends({ [option_name("xhttp_download_xmux")] = true })
|
||||
|
||||
o = s:option(Value, option_name("download_cMaxLifetimeMs"), string.format('<a style="color:red">%s</a>', translate("XMUX Connection Max Lifetime (ms)")))
|
||||
o:depends({ [option_name("xhttp_download_xmux")] = true })
|
||||
local success, data = pcall(jsonc.parse, value)
|
||||
if success and data then
|
||||
local address = (data.extra and data.extra.downloadSettings and data.extra.downloadSettings.address)
|
||||
or (data.downloadSettings and data.downloadSettings.address)
|
||||
if address and address ~= "" then
|
||||
m:set(section, "download_address", address)
|
||||
else
|
||||
m:del(section, "download_address")
|
||||
end
|
||||
else
|
||||
m:del(section, "download_address")
|
||||
end
|
||||
end
|
||||
o.validate = function(self, value)
|
||||
value = value:gsub("\r\n", "\n"):gsub("^[ \t]*\n", ""):gsub("\n[ \t]*$", ""):gsub("\n[ \t]*\n", "\n")
|
||||
if value:sub(-1) == "\n" then
|
||||
value = value:sub(1, -2)
|
||||
end
|
||||
return value
|
||||
end
|
||||
|
||||
-- [[ Mux.Cool ]]--
|
||||
o = s:option(Flag, option_name("mux"), "Mux", translate("Enable Mux.Cool"))
|
||||
|
|
|
@ -1069,6 +1069,10 @@ function luci_types(id, m, s, type_name, option_prefix)
|
|||
end
|
||||
s.fields[key].write = function(self, section, value)
|
||||
if s.fields["type"]:formvalue(id) == type_name then
|
||||
-- 添加自定义 custom_write 属性,如果有自定义的 custom_write 函数,则使用自定义的 write 逻辑
|
||||
if self.custom_write then
|
||||
self:custom_write(section, value)
|
||||
else
|
||||
if self.rewrite_option then
|
||||
m:set(section, self.rewrite_option, value)
|
||||
else
|
||||
|
@ -1078,6 +1082,7 @@ function luci_types(id, m, s, type_name, option_prefix)
|
|||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
s.fields[key].remove = function(self, section)
|
||||
if s.fields["type"]:formvalue(id) == type_name then
|
||||
if self.rewrite_option and rewrite_option_table[self.rewrite_option] == 1 then
|
||||
|
|
|
@ -110,15 +110,6 @@ function gen_outbound(flag, node, tag, proxy_table)
|
|||
end
|
||||
end
|
||||
|
||||
if node.type == "Xray" and node.transport == "xhttp" then
|
||||
if node.xhttp_download_tls and node.xhttp_download_tls == "1" then
|
||||
node.xhttp_download_stream_security = "tls"
|
||||
if node.xhttp_download_reality and node.xhttp_download_reality == "1" then
|
||||
node.xhttp_download_stream_security = "reality"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if node.protocol == "wireguard" and node.wireguard_reserved then
|
||||
local bytes = {}
|
||||
if not node.wireguard_reserved:match("[^%d,]+") then
|
||||
|
@ -195,7 +186,8 @@ function gen_outbound(flag, node, tag, proxy_table)
|
|||
headers = (node.ws_host ~= nil) and
|
||||
{Host = node.ws_host} or nil,
|
||||
maxEarlyData = tonumber(node.ws_maxEarlyData) or nil,
|
||||
earlyDataHeaderName = (node.ws_earlyDataHeaderName) and node.ws_earlyDataHeaderName or nil
|
||||
earlyDataHeaderName = (node.ws_earlyDataHeaderName) and node.ws_earlyDataHeaderName or nil,
|
||||
heartbeatPeriod = tonumber(node.ws_heartbeatPeriod) or nil
|
||||
} or nil,
|
||||
httpSettings = (node.transport == "h2") and {
|
||||
path = node.h2_path or "/",
|
||||
|
@ -223,34 +215,18 @@ function gen_outbound(flag, node, tag, proxy_table)
|
|||
host = node.httpupgrade_host
|
||||
} or nil,
|
||||
xhttpSettings = (node.transport == "xhttp" or node.transport == "splithttp") and {
|
||||
mode = node.xhttp_mode or "auto",
|
||||
path = node.xhttp_path or node.splithttp_path or "/",
|
||||
host = node.xhttp_host or node.splithttp_host,
|
||||
downloadSettings = (node.xhttp_download == "1") and {
|
||||
address = node.xhttp_download_address,
|
||||
port = tonumber(node.xhttp_download_port),
|
||||
network = "xhttp",
|
||||
xhttpSettings = {
|
||||
path = node.xhttp_download_path,
|
||||
host = node.xhttp_download_host,
|
||||
},
|
||||
security = node.xhttp_download_stream_security,
|
||||
tlsSettings = (node.xhttp_download_stream_security == "tls") and {
|
||||
serverName = node.xhttp_download_tls_serverName,
|
||||
allowInsecure = false,
|
||||
fingerprint = (node.xhttp_download_utls == "1" and
|
||||
node.xhttp_download_fingerprint and
|
||||
node.xhttp_download_fingerprint ~= "") and node.xhttp_download_fingerprint or nil
|
||||
} or nil,
|
||||
realitySettings = (node.xhttp_download_stream_security == "reality") and {
|
||||
serverName = node.xhttp_download_tls_serverName,
|
||||
publicKey = node.xhttp_download_reality_publicKey,
|
||||
shortId = node.xhttp_download_reality_shortId or "",
|
||||
spiderX = node.xhttp_download_reality_spiderX or "/",
|
||||
fingerprint = (
|
||||
node.xhttp_download_fingerprint and
|
||||
node.xhttp_download_fingerprint ~= "") and node.xhttp_download_fingerprint or nil
|
||||
} or nil,
|
||||
} or nil
|
||||
-- 如果包含 "extra" 节,取 "extra" 内的内容,否则直接赋值给 extra
|
||||
extra = node.xhttp_extra and (function()
|
||||
local success, parsed = pcall(jsonc.parse, node.xhttp_extra)
|
||||
if success then
|
||||
return parsed.extra or parsed
|
||||
else
|
||||
return nil
|
||||
end
|
||||
end)() or nil
|
||||
} or nil,
|
||||
} or nil,
|
||||
settings = {
|
||||
|
@ -317,40 +293,6 @@ function gen_outbound(flag, node, tag, proxy_table)
|
|||
end
|
||||
end
|
||||
|
||||
local alpn_download = {}
|
||||
if node.xhttp_download_alpn and node.xhttp_download_alpn ~= "default" then
|
||||
string.gsub(node.xhttp_download_alpn, '[^' .. "," .. ']+', function(w)
|
||||
table.insert(alpn_download, w)
|
||||
end)
|
||||
end
|
||||
if alpn_download and #alpn_download > 0 then
|
||||
if result.streamSettings.xhttpSettings.downloadSettings.tlsSettings then
|
||||
result.streamSettings.xhttpSettings.downloadSettings.tlsSettings.alpn = alpn_download
|
||||
end
|
||||
end
|
||||
|
||||
local xmux = {}
|
||||
if (node.xhttp_xmux == "1") then
|
||||
xmux.maxConcurrency = node.maxConcurrency and (string.find(node.maxConcurrency, "-") and node.maxConcurrency or tonumber(node.maxConcurrency)) or 0
|
||||
xmux.maxConnections = node.maxConnections and (string.find(node.maxConnections, "-") and node.maxConnections or tonumber(node.maxConnections)) or 0
|
||||
xmux.cMaxReuseTimes = node.cMaxReuseTimes and (string.find(node.cMaxReuseTimes, "-") and node.cMaxReuseTimes or tonumber(node.cMaxReuseTimes)) or 0
|
||||
xmux.cMaxLifetimeMs = node.cMaxLifetimeMs and (string.find(node.cMaxLifetimeMs, "-") and node.cMaxLifetimeMs or tonumber(node.cMaxLifetimeMs)) or 0
|
||||
if result.streamSettings.xhttpSettings then
|
||||
result.streamSettings.xhttpSettings.xmux = xmux
|
||||
end
|
||||
end
|
||||
|
||||
local xmux_download = {}
|
||||
if (node.xhttp_download_xmux == "1") then
|
||||
xmux_download.maxConcurrency = node.download_maxConcurrency and (string.find(node.download_maxConcurrency, "-") and node.download_maxConcurrency or tonumber(node.download_maxConcurrency)) or 0
|
||||
xmux_download.maxConnections = node.download_maxConnections and (string.find(node.download_maxConnections, "-") and node.download_maxConnections or tonumber(node.download_maxConnections)) or 0
|
||||
xmux_download.cMaxReuseTimes = node.download_cMaxReuseTimes and (string.find(node.download_cMaxReuseTimes, "-") and node.download_cMaxReuseTimes or tonumber(node.download_cMaxReuseTimes)) or 0
|
||||
xmux_download.cMaxLifetimeMs = node.download_cMaxLifetimeMs and (string.find(node.download_cMaxLifetimeMs, "-") and node.download_cMaxLifetimeMs or tonumber(node.download_cMaxLifetimeMs)) or 0
|
||||
if result.streamSettings.xhttpSettings.downloadSettings.xhttpSettings then
|
||||
result.streamSettings.xhttpSettings.downloadSettings.xhttpSettings.xmux = xmux_download
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
return result
|
||||
end
|
||||
|
|
|
@ -367,6 +367,8 @@ local api = require "luci.passwall2.api"
|
|||
} else if (v_transport === "xhttp") {
|
||||
params += opt.query("host", dom_prefix + "xhttp_host");
|
||||
params += opt.query("path", dom_prefix + "xhttp_path");
|
||||
params += opt.query("mode", dom_prefix + "xhttp_mode");
|
||||
params += opt.query("extra", dom_prefix + "xhttp_extra");
|
||||
}
|
||||
params += "&type=" + v_transport;
|
||||
|
||||
|
@ -1154,6 +1156,8 @@ local api = require "luci.passwall2.api"
|
|||
} else if (queryParam.type === "xhttp" || queryParam.type === "splithttp") {
|
||||
opt.set(dom_prefix + 'xhttp_host', queryParam.host || "");
|
||||
opt.set(dom_prefix + 'xhttp_path', queryParam.path || "");
|
||||
opt.set(dom_prefix + 'xhttp_mode', queryParam.mode || "auto");
|
||||
opt.set(dom_prefix + 'xhttp_extra', queryParam.extra || "");
|
||||
}
|
||||
|
||||
if (m.hash) {
|
||||
|
|
|
@ -1381,26 +1381,8 @@ msgstr "客户端文件不适合当前设备。"
|
|||
msgid "Can't move new file to path: %s"
|
||||
msgstr "无法移动新文件到:%s"
|
||||
|
||||
msgid "XHTTP download splitting"
|
||||
msgstr "XHTTP 下行分离"
|
||||
|
||||
msgid "Must be the same as upload path."
|
||||
msgstr "必须与上行 path 相同。"
|
||||
|
||||
msgid "Enable XHTTP XMUX. It's not recommended to enable Mux.Cool at the same time."
|
||||
msgstr "启用 XHTTP XMUX。不建议与 Mux.Cool 同时启用。"
|
||||
|
||||
msgid "XMUX Max Concurrency"
|
||||
msgstr "XMUX 连接最大复用流数"
|
||||
|
||||
msgid "XMUX Max Connections"
|
||||
msgstr "XMUX 最大连接数"
|
||||
|
||||
msgid "XMUX Connection Max Reuse Times"
|
||||
msgstr "XMUX 连接最多复用次数"
|
||||
|
||||
msgid "XMUX Connection Max Lifetime (ms)"
|
||||
msgstr "XMUX 连接最大存活时间(ms)"
|
||||
msgid "An <a target='_blank' href='https://xtls.github.io/config/transports/splithttp.html#extra'>XHTTP extra object</a> in raw json"
|
||||
msgstr "一个 json 格式的 <a target='_blank' href='https://xtls.github.io/config/transports/splithttp.html#extra'>XHTTP extra object</a>"
|
||||
|
||||
msgid "Enable Mux.Cool"
|
||||
msgstr "启用 Mux.Cool"
|
||||
|
@ -1480,6 +1462,9 @@ msgstr "端口跳跃时间 "
|
|||
msgid "Additional ports for hysteria hop"
|
||||
msgstr "端口跳跃额外端口"
|
||||
|
||||
msgid "HeartbeatPeriod(second)"
|
||||
msgstr "心跳周期(单位:秒)"
|
||||
|
||||
msgid "Remove resource files"
|
||||
msgstr "删除资源文件"
|
||||
|
||||
|
|
|
@ -100,7 +100,7 @@ add() {
|
|||
}
|
||||
|
||||
#始终用国内DNS解析节点域名
|
||||
servers=$(uci show "${CONFIG}" | grep ".address=" | cut -d "'" -f 2)
|
||||
servers=$(uci show "${CONFIG}" | grep -E "(.address=|.download_address=)" | cut -d "'" -f 2)
|
||||
hosts_foreach "servers" host_from_url | grep '[a-zA-Z]$' | sort -u | grep -v "engage.cloudflareclient.com" | gen_items settype="${set_type}" setnames="${setflag_4}passwall2_vpslist,${setflag_6}passwall2_vpslist6" dnss="${LOCAL_DNS:-${DEFAULT_DNS}}" outf="${TMP_DNSMASQ_PATH}/10-vpslist_host.conf" ipsetoutf="${TMP_DNSMASQ_PATH}/ipset.conf"
|
||||
echolog " - [$?]节点列表中的域名(vpslist):${DEFAULT_DNS:-默认}"
|
||||
|
||||
|
|
|
@ -458,6 +458,8 @@ filter_haproxy() {
|
|||
filter_vpsip() {
|
||||
uci show $CONFIG | grep ".address=" | cut -d "'" -f 2 | grep -E "([0-9]{1,3}[\.]){3}[0-9]{1,3}" | sed -e "/^$/d" | sed -e "s/^/add $IPSET_VPSLIST &/g" | awk '{print $0} END{print "COMMIT"}' | ipset -! -R
|
||||
uci show $CONFIG | grep ".address=" | cut -d "'" -f 2 | grep -E "([A-Fa-f0-9]{1,4}::?){1,7}[A-Fa-f0-9]{1,4}" | sed -e "/^$/d" | sed -e "s/^/add $IPSET_VPSLIST6 &/g" | awk '{print $0} END{print "COMMIT"}' | ipset -! -R
|
||||
uci show $CONFIG | grep ".download_address=" | cut -d "'" -f 2 | grep -E "([0-9]{1,3}[\.]){3}[0-9]{1,3}" | sed -e "/^$/d" | sed -e "s/^/add $IPSET_VPSLIST &/g" | awk '{print $0} END{print "COMMIT"}' | ipset -! -R
|
||||
uci show $CONFIG | grep ".download_address=" | cut -d "'" -f 2 | grep -E "([A-Fa-f0-9]{1,4}::?){1,7}[A-Fa-f0-9]{1,4}" | sed -e "/^$/d" | sed -e "s/^/add $IPSET_VPSLIST6 &/g" | awk '{print $0} END{print "COMMIT"}' | ipset -! -R
|
||||
echolog "加入所有节点到ipset[$IPSET_VPSLIST]直连完成"
|
||||
}
|
||||
|
||||
|
|
|
@ -517,6 +517,8 @@ filter_vps_addr() {
|
|||
filter_vpsip() {
|
||||
insert_nftset $NFTSET_VPSLIST "-1" $(uci show $CONFIG | grep ".address=" | cut -d "'" -f 2 | grep -E "([0-9]{1,3}[\.]){3}[0-9]{1,3}" | sed -e "/^$/d")
|
||||
insert_nftset $NFTSET_VPSLIST6 "-1" $(uci show $CONFIG | grep ".address=" | cut -d "'" -f 2 | grep -E "([A-Fa-f0-9]{1,4}::?){1,7}[A-Fa-f0-9]{1,4}" | sed -e "/^$/d")
|
||||
insert_nftset $NFTSET_VPSLIST "-1" $(uci show $CONFIG | grep ".download_address=" | cut -d "'" -f 2 | grep -E "([0-9]{1,3}[\.]){3}[0-9]{1,3}" | sed -e "/^$/d")
|
||||
insert_nftset $NFTSET_VPSLIST6 "-1" $(uci show $CONFIG | grep ".download_address=" | cut -d "'" -f 2 | grep -E "([A-Fa-f0-9]{1,4}::?){1,7}[A-Fa-f0-9]{1,4}" | sed -e "/^$/d")
|
||||
echolog "加入所有节点到nftset[$NFTSET_VPSLIST]直连完成"
|
||||
}
|
||||
|
||||
|
@ -686,6 +688,7 @@ add_firewall_rule() {
|
|||
filter_haproxy > /dev/null 2>&1 &
|
||||
# Prevent some conditions
|
||||
filter_vps_addr $(config_n_get $NODE address) > /dev/null 2>&1 &
|
||||
filter_vps_addr $(config_n_get $NODE download_address) > /dev/null 2>&1 &
|
||||
|
||||
accept_icmp=$(config_t_get global_forwarding accept_icmp 0)
|
||||
accept_icmpv6=$(config_t_get global_forwarding accept_icmpv6 0)
|
||||
|
|
|
@ -488,6 +488,16 @@ local function processData(szType, content, add_mode, add_from)
|
|||
if info.net == 'xhttp' or info.net == 'splithttp' then
|
||||
result.xhttp_host = info.host
|
||||
result.xhttp_path = info.path
|
||||
result.xhttp_mode = params.mode or "auto"
|
||||
result.xhttp_extra = params.extra
|
||||
local success, Data = pcall(jsonParse, params.extra)
|
||||
if success and Data then
|
||||
local address = (Data.extra and Data.extra.downloadSettings and Data.extra.downloadSettings.address)
|
||||
or (Data.downloadSettings and Data.downloadSettings.address)
|
||||
result.download_address = address and address ~= "" and address or nil
|
||||
else
|
||||
result.download_address = nil
|
||||
end
|
||||
end
|
||||
if not info.security then result.security = "auto" end
|
||||
if info.tls == "tls" or info.tls == "1" then
|
||||
|
|
Loading…
Reference in New Issue