mirror of
https://github.com/bolucat/Archive.git
synced 2025-10-05 16:18:04 +08:00
Update On Fri Sep 6 20:35:08 CEST 2024
This commit is contained in:
1
.github/update.log
vendored
1
.github/update.log
vendored
@@ -755,3 +755,4 @@ Update On Mon Sep 2 20:34:00 CEST 2024
|
|||||||
Update On Tue Sep 3 20:32:43 CEST 2024
|
Update On Tue Sep 3 20:32:43 CEST 2024
|
||||||
Update On Wed Sep 4 20:31:01 CEST 2024
|
Update On Wed Sep 4 20:31:01 CEST 2024
|
||||||
Update On Thu Sep 5 20:35:23 CEST 2024
|
Update On Thu Sep 5 20:35:23 CEST 2024
|
||||||
|
Update On Fri Sep 6 20:34:58 CEST 2024
|
||||||
|
62
clash-nyanpasu/backend/Cargo.lock
generated
62
clash-nyanpasu/backend/Cargo.lock
generated
@@ -1284,6 +1284,7 @@ dependencies = [
|
|||||||
"parking_lot",
|
"parking_lot",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"port_scanner",
|
"port_scanner",
|
||||||
|
"pretty_assertions",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"redb",
|
"redb",
|
||||||
"regex",
|
"regex",
|
||||||
@@ -1963,6 +1964,12 @@ dependencies = [
|
|||||||
"syn 2.0.77",
|
"syn 2.0.77",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "diff"
|
||||||
|
version = "0.1.13"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "digest"
|
name = "digest"
|
||||||
version = "0.9.0"
|
version = "0.9.0"
|
||||||
@@ -5125,9 +5132,9 @@ checksum = "caff54706df99d2a78a5a4e3455ff45448d81ef1bb63c22cd14052ca0e993a3f"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oxc_allocator"
|
name = "oxc_allocator"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "27b70e00e3c62b26ccefc5c5942b365091ebc398b4493625d34d54fabe9106cb"
|
checksum = "8f922944b51ca85c0acf47c37726a1e9475e5dd9f36c2ea89d1057f5c68f91ff"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"allocator-api2",
|
"allocator-api2",
|
||||||
"bumpalo",
|
"bumpalo",
|
||||||
@@ -5135,23 +5142,24 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oxc_ast"
|
name = "oxc_ast"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "65eda6aacccb2922fe506bff183930df82bf7a7217ef76480f82c35813a37a91"
|
checksum = "4385ef64890edde1135e5431fbe397cdc8f38bf7341d7e23429b5de09dd03897"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags 2.6.0",
|
"bitflags 2.6.0",
|
||||||
"num-bigint",
|
"num-bigint",
|
||||||
"oxc_allocator",
|
"oxc_allocator",
|
||||||
"oxc_ast_macros",
|
"oxc_ast_macros",
|
||||||
|
"oxc_regular_expression",
|
||||||
"oxc_span",
|
"oxc_span",
|
||||||
"oxc_syntax",
|
"oxc_syntax",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oxc_ast_macros"
|
name = "oxc_ast_macros"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "590d563fddfb8f75bcc2e8c34a6fb6d96bcce30e716b82ccb46c0a8600bc5cd2"
|
checksum = "807868208f9a594a88f6714dae60bc8bed4ccb87a5d3fd33ed946f6fc8a216de"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
@@ -5160,9 +5168,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oxc_diagnostics"
|
name = "oxc_diagnostics"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "42b23332f6e1781ec5d17c8c76756564331acf45bbed9a80cc07797ab6b29d24"
|
checksum = "bb283f8d9f7926c5ec4db85a65908ad72a3783110cc14771dbdec5ac81ad5d79"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"miette",
|
"miette",
|
||||||
"owo-colors",
|
"owo-colors",
|
||||||
@@ -5172,15 +5180,15 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oxc_index"
|
name = "oxc_index"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "582f984479fb15e680df6f42499e0c5d1aee8f7e38fd3716851a882357c4fce0"
|
checksum = "b15c56c7fe9c3d99df968c5d0b3129eb373228c09915e22fc91d24e80c262e0d"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oxc_parser"
|
name = "oxc_parser"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7ecc8a633c442f6f828159e9bb326927ee8b13bbcf871c1bdd45223cea5d901d"
|
checksum = "e1ac96c09e7d0a33f25bfac70632eb3d8196e52b8e276ba5661c71da36f3d1ee"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"assert-unchecked",
|
"assert-unchecked",
|
||||||
"bitflags 2.6.0",
|
"bitflags 2.6.0",
|
||||||
@@ -5199,11 +5207,12 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oxc_regular_expression"
|
name = "oxc_regular_expression"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d74222e38ec5fa7b4065b899282ece8689fb804ce0daeb0a2e064b3215bebbc4"
|
checksum = "0d98c72fa996ba40322be6bd6c00e427036d9ffacbafad9347f08401e3553266"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"oxc_allocator",
|
"oxc_allocator",
|
||||||
|
"oxc_ast_macros",
|
||||||
"oxc_diagnostics",
|
"oxc_diagnostics",
|
||||||
"oxc_span",
|
"oxc_span",
|
||||||
"phf 0.11.2",
|
"phf 0.11.2",
|
||||||
@@ -5213,9 +5222,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oxc_span"
|
name = "oxc_span"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "60d6f98552a864d9c68173842bdffbd30e4ca0746778c308ebd839e70a1571f5"
|
checksum = "984cf0c05a0da6c557d7c7a600120841910d382008b92ac0ff44af400ba79e29"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"compact_str",
|
"compact_str",
|
||||||
"miette",
|
"miette",
|
||||||
@@ -5225,10 +5234,11 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "oxc_syntax"
|
name = "oxc_syntax"
|
||||||
version = "0.26.0"
|
version = "0.27.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "3ce885718dd2744dd4e6500e765ea7fe4281d17ac0ec23c499a2ad747dbe1699"
|
checksum = "0343ef487214dbf9f296e155caeaab0d20a5da577ebd54a80f4fa4ce6a462f5a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"assert-unchecked",
|
||||||
"bitflags 2.6.0",
|
"bitflags 2.6.0",
|
||||||
"dashmap 6.0.1",
|
"dashmap 6.0.1",
|
||||||
"nonmax",
|
"nonmax",
|
||||||
@@ -5680,6 +5690,16 @@ version = "0.1.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "pretty_assertions"
|
||||||
|
version = "1.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66"
|
||||||
|
dependencies = [
|
||||||
|
"diff",
|
||||||
|
"yansi",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "prettyplease"
|
name = "prettyplease"
|
||||||
version = "0.2.22"
|
version = "0.2.22"
|
||||||
@@ -9612,6 +9632,12 @@ version = "0.2.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "b9cc00251562a284751c9973bace760d86c0276c471b4be569fe6b068ee97a56"
|
checksum = "b9cc00251562a284751c9973bace760d86c0276c471b4be569fe6b068ee97a56"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "yansi"
|
||||||
|
version = "0.5.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yoke"
|
name = "yoke"
|
||||||
version = "0.7.4"
|
version = "0.7.4"
|
||||||
|
@@ -23,6 +23,7 @@ nyanpasu-ipc = { git = "https://github.com/LibNyanpasu/nyanpasu-service.git", fe
|
|||||||
] }
|
] }
|
||||||
nyanpasu-utils = { git = "https://github.com/LibNyanpasu/nyanpasu-utils.git" }
|
nyanpasu-utils = { git = "https://github.com/LibNyanpasu/nyanpasu-utils.git" }
|
||||||
boa_utils = { path = "../boa_utils" } # should be removed when boa support console customize
|
boa_utils = { path = "../boa_utils" } # should be removed when boa support console customize
|
||||||
|
pretty_assertions = "1.4.0"
|
||||||
which = "6"
|
which = "6"
|
||||||
anyhow = "1.0"
|
anyhow = "1.0"
|
||||||
dirs = "5.0.1"
|
dirs = "5.0.1"
|
||||||
@@ -123,11 +124,11 @@ os_pipe = "1.2.0"
|
|||||||
whoami = "1.5.1"
|
whoami = "1.5.1"
|
||||||
atomic_enum = "0.3.0"
|
atomic_enum = "0.3.0"
|
||||||
boa_engine.workspace = true
|
boa_engine.workspace = true
|
||||||
oxc_parser = "0.26"
|
oxc_parser = "0.27"
|
||||||
oxc_allocator = "0.26"
|
oxc_allocator = "0.27"
|
||||||
oxc_span = "0.26"
|
oxc_span = "0.27"
|
||||||
oxc_ast = "0.26"
|
oxc_ast = "0.27"
|
||||||
oxc_syntax = "0.26"
|
oxc_syntax = "0.27"
|
||||||
mlua = { version = "0.9", features = [
|
mlua = { version = "0.9", features = [
|
||||||
"lua54",
|
"lua54",
|
||||||
"async",
|
"async",
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
use super::{runner::ProcessOutput, Logs, LogsExt};
|
use super::{runner::ProcessOutput, Logs, LogsExt};
|
||||||
use mlua::LuaSerdeExt;
|
use mlua::LuaSerdeExt;
|
||||||
|
use serde::de::DeserializeOwned;
|
||||||
use serde_yaml::{Mapping, Value};
|
use serde_yaml::{Mapping, Value};
|
||||||
use tracing_attributes::instrument;
|
use tracing_attributes::instrument;
|
||||||
|
|
||||||
@@ -58,6 +59,161 @@ fn merge_sequence(target: &mut Value, to_merge: &Value, append: bool) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn run_expr<T: DeserializeOwned>(logs: &mut Logs, item: &Value, expr: &str) -> Option<T> {
|
||||||
|
let lua_runtime = match super::script::create_lua_context() {
|
||||||
|
Ok(lua) => lua,
|
||||||
|
Err(e) => {
|
||||||
|
logs.error(e.to_string());
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let item = match lua_runtime.to_value(item) {
|
||||||
|
Ok(v) => v,
|
||||||
|
Err(e) => {
|
||||||
|
logs.error(format!("failed to convert item to lua value: {:#?}", e));
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(e) = lua_runtime.globals().set("item", item) {
|
||||||
|
logs.error(e.to_string());
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let res = lua_runtime.load(expr).eval::<mlua::Value>();
|
||||||
|
match res {
|
||||||
|
Ok(v) => {
|
||||||
|
if let Ok(v) = lua_runtime.from_value(v) {
|
||||||
|
Some(v)
|
||||||
|
} else {
|
||||||
|
logs.error("failed to convert lua value to serde value");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
logs.error(format!("failed to run expr: {:#?}", e));
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn do_filter(logs: &mut Logs, config: &mut Value, field_str: &str, filter: &Value) {
|
||||||
|
let field = match find_field(config, field_str) {
|
||||||
|
Some(field) if !field.is_sequence() => {
|
||||||
|
logs.warn(format!("field is not sequence: {:#?}", field_str));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
Some(field) => field,
|
||||||
|
None => {
|
||||||
|
logs.warn(format!("field not found: {:#?}", field_str));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
match filter {
|
||||||
|
Value::Sequence(filters) => {
|
||||||
|
for filter in filters {
|
||||||
|
do_filter(logs, config, field_str, filter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Value::String(filter) => {
|
||||||
|
let list = field.as_sequence_mut().unwrap();
|
||||||
|
list.retain(|item| run_expr(logs, item, filter).unwrap_or(false));
|
||||||
|
}
|
||||||
|
Value::Mapping(filter)
|
||||||
|
if filter.get("when").is_some_and(|v| v.is_string())
|
||||||
|
&& filter.get("expr").is_some_and(|v| v.is_string()) =>
|
||||||
|
{
|
||||||
|
let when = filter.get("when").unwrap().as_str().unwrap();
|
||||||
|
let expr = filter.get("expr").unwrap().as_str().unwrap();
|
||||||
|
let list = field.as_sequence_mut().unwrap();
|
||||||
|
list.iter_mut().for_each(|item| {
|
||||||
|
let r#match = run_expr(logs, item, when);
|
||||||
|
if r#match.unwrap_or(false) {
|
||||||
|
let res: Option<Value> = run_expr(logs, item, expr);
|
||||||
|
if let Some(res) = res {
|
||||||
|
*item = res;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Value::Mapping(filter)
|
||||||
|
if filter.get("when").is_some_and(|v| v.is_string())
|
||||||
|
&& filter.contains_key("override") =>
|
||||||
|
{
|
||||||
|
let when = filter.get("when").unwrap().as_str().unwrap();
|
||||||
|
let r#override = filter.get("override").unwrap();
|
||||||
|
let list = field.as_sequence_mut().unwrap();
|
||||||
|
list.iter_mut().for_each(|item| {
|
||||||
|
let r#match = run_expr(logs, item, when);
|
||||||
|
if r#match.unwrap_or(false) {
|
||||||
|
*item = r#override.clone();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Value::Mapping(filter)
|
||||||
|
if filter.get("when").is_some_and(|v| v.is_string())
|
||||||
|
&& filter.get("merge").is_some_and(|v| v.is_mapping()) =>
|
||||||
|
{
|
||||||
|
let when = filter.get("when").unwrap().as_str().unwrap();
|
||||||
|
let merge = filter.get("merge").unwrap().as_mapping().unwrap();
|
||||||
|
let list = field.as_sequence_mut().unwrap();
|
||||||
|
list.iter_mut().for_each(|item| {
|
||||||
|
let r#match = run_expr(logs, item, when);
|
||||||
|
if r#match.unwrap_or(false) {
|
||||||
|
for (key, value) in merge.iter() {
|
||||||
|
override_recursive(item.as_mapping_mut().unwrap(), key, value.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Value::Mapping(filter)
|
||||||
|
if filter.get("when").is_some_and(|v| v.is_string())
|
||||||
|
&& filter.get("remove").is_some_and(|v| v.is_sequence()) =>
|
||||||
|
{
|
||||||
|
let when = filter.get("when").unwrap().as_str().unwrap();
|
||||||
|
let remove = filter.get("remove").unwrap().as_sequence().unwrap();
|
||||||
|
let list = field.as_sequence_mut().unwrap();
|
||||||
|
list.iter_mut().for_each(|item| {
|
||||||
|
let r#match = run_expr(logs, item, when);
|
||||||
|
if r#match.unwrap_or(false) {
|
||||||
|
remove.iter().for_each(|key| {
|
||||||
|
if key.is_string() && item.is_mapping() {
|
||||||
|
let key_str = key.as_str().unwrap();
|
||||||
|
// 对 key_str 做一下处理,跳过最后一个元素
|
||||||
|
let mut keys = key_str.split('.').collect::<Vec<_>>();
|
||||||
|
let last_key = if keys.len() > 1 {
|
||||||
|
keys.pop().unwrap()
|
||||||
|
} else {
|
||||||
|
key_str
|
||||||
|
};
|
||||||
|
let key_str = keys.join(".");
|
||||||
|
if let Some(field) = find_field(item, &key_str) {
|
||||||
|
field.as_mapping_mut().unwrap().remove(last_key);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
match item {
|
||||||
|
Value::Sequence(list) if key.is_i64() => {
|
||||||
|
let index = key.as_i64().unwrap() as usize;
|
||||||
|
if index < list.len() {
|
||||||
|
list.remove(index);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
logs.warn(format!("invalid key: {:#?}", key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
logs.warn(format!("invalid filter: {:#?}", filter));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[instrument(skip(merge, config))]
|
#[instrument(skip(merge, config))]
|
||||||
pub fn use_merge(merge: Mapping, mut config: Mapping) -> ProcessOutput {
|
pub fn use_merge(merge: Mapping, mut config: Mapping) -> ProcessOutput {
|
||||||
tracing::trace!("original config: {:#?}", config);
|
tracing::trace!("original config: {:#?}", config);
|
||||||
@@ -124,41 +280,7 @@ pub fn use_merge(merge: Mapping, mut config: Mapping) -> ProcessOutput {
|
|||||||
}
|
}
|
||||||
key_str if key_str.starts_with("filter__") => {
|
key_str if key_str.starts_with("filter__") => {
|
||||||
let key_str = key_str.replace("filter__", "");
|
let key_str = key_str.replace("filter__", "");
|
||||||
if !value.is_string() {
|
do_filter(&mut logs, &mut map, &key_str, value);
|
||||||
logs.warn(format!("filter value is not string: {:#?}", key_str));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let field = find_field(&mut map, &key_str);
|
|
||||||
match field {
|
|
||||||
Some(field) => {
|
|
||||||
if !field.is_sequence() {
|
|
||||||
logs.warn(format!("field is not sequence: {:#?}", key_str));
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
let filter = value.as_str().unwrap_or_default();
|
|
||||||
let lua = match super::script::create_lua_context() {
|
|
||||||
Ok(lua) => lua,
|
|
||||||
Err(e) => {
|
|
||||||
logs.error(e.to_string());
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let list = field.as_sequence_mut().unwrap();
|
|
||||||
// apply filter to each item
|
|
||||||
list.retain(|item| {
|
|
||||||
let item = lua.to_value(item).unwrap();
|
|
||||||
if let Err(e) = lua.globals().set("item", item) {
|
|
||||||
logs.error(e.to_string());
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
lua.load(filter).eval::<bool>().unwrap_or(false)
|
|
||||||
});
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
logs.warn(format!("field not found: {:#?}", key_str));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
@@ -172,6 +294,7 @@ pub fn use_merge(merge: Mapping, mut config: Mapping) -> ProcessOutput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use pretty_assertions::{assert_eq, assert_ne};
|
||||||
#[test]
|
#[test]
|
||||||
fn test_find_field() {
|
fn test_find_field() {
|
||||||
let config = r"
|
let config = r"
|
||||||
@@ -327,10 +450,10 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_filter() {
|
fn test_filter_string() {
|
||||||
let merge = r"
|
let merge = r"
|
||||||
filter__proxies: |
|
filter__proxies: |
|
||||||
item.type == 'ss' or item.type == 'hysteria2'
|
type(item) == 'table' and (item.type == 'ss' or item.type == 'hysteria2')
|
||||||
filter__wow: |
|
filter__wow: |
|
||||||
item == 'wow'
|
item == 'wow'
|
||||||
";
|
";
|
||||||
@@ -423,6 +546,156 @@ mod tests {
|
|||||||
assert_eq!(result.unwrap(), expected);
|
assert_eq!(result.unwrap(), expected);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_filter_when_and_expr() {
|
||||||
|
let merge = r"
|
||||||
|
filter__proxies:
|
||||||
|
- when: |
|
||||||
|
type(item) == 'table' and (item.type == 'ss' or item.type == 'hysteria2')
|
||||||
|
expr: |
|
||||||
|
item
|
||||||
|
filter__proxy-groups:
|
||||||
|
- when: |
|
||||||
|
item.name == 'Spotify'
|
||||||
|
expr: |
|
||||||
|
item.icon = 'https://raw.githubusercontent.com/Koolson/Qure/master/IconSet/Color/Spotify.png'
|
||||||
|
return item
|
||||||
|
";
|
||||||
|
let config = r#"proxy-groups:
|
||||||
|
- name: Spotify
|
||||||
|
type: select
|
||||||
|
proxies:
|
||||||
|
- Proxies
|
||||||
|
- DIRECT
|
||||||
|
- HK
|
||||||
|
- JP
|
||||||
|
- SG
|
||||||
|
- TW
|
||||||
|
- US
|
||||||
|
- name: Steam
|
||||||
|
type: select
|
||||||
|
proxies:
|
||||||
|
- Proxies
|
||||||
|
- DIRECT
|
||||||
|
- HK
|
||||||
|
- JP
|
||||||
|
- SG
|
||||||
|
- TW
|
||||||
|
- US
|
||||||
|
- name: Telegram
|
||||||
|
type: select
|
||||||
|
proxies:
|
||||||
|
- Proxies
|
||||||
|
- HK
|
||||||
|
- JP
|
||||||
|
- SG
|
||||||
|
- TW
|
||||||
|
- US"#;
|
||||||
|
let expected = r#"proxy-groups:
|
||||||
|
- name: Spotify
|
||||||
|
icon: https://raw.githubusercontent.com/Koolson/Qure/master/IconSet/Color/Spotify.png
|
||||||
|
type: select
|
||||||
|
proxies:
|
||||||
|
- Proxies
|
||||||
|
- DIRECT
|
||||||
|
- HK
|
||||||
|
- JP
|
||||||
|
- SG
|
||||||
|
- TW
|
||||||
|
- US
|
||||||
|
- name: Steam
|
||||||
|
type: select
|
||||||
|
proxies:
|
||||||
|
- Proxies
|
||||||
|
- DIRECT
|
||||||
|
- HK
|
||||||
|
- JP
|
||||||
|
- SG
|
||||||
|
- TW
|
||||||
|
- US
|
||||||
|
- name: Telegram
|
||||||
|
type: select
|
||||||
|
proxies:
|
||||||
|
- Proxies
|
||||||
|
- HK
|
||||||
|
- JP
|
||||||
|
- SG
|
||||||
|
- TW
|
||||||
|
- US"#;
|
||||||
|
let merge = serde_yaml::from_str::<super::Mapping>(merge).unwrap();
|
||||||
|
let config = serde_yaml::from_str::<super::Mapping>(config).unwrap();
|
||||||
|
let (result, logs) = super::use_merge(merge, config);
|
||||||
|
eprintln!("{:#?}\n\n{:#?}", logs, result);
|
||||||
|
assert_eq!(logs.len(), 1);
|
||||||
|
let expected = serde_yaml::from_str::<super::Mapping>(expected).unwrap();
|
||||||
|
assert_eq!(result.unwrap(), expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_filter_when_and_override() {
|
||||||
|
let merge = r"
|
||||||
|
filter__proxies:
|
||||||
|
- when: |
|
||||||
|
type(item) == 'table' and (item.type == 'ss' or item.type == 'hysteria2')
|
||||||
|
override: OVERRIDDEN
|
||||||
|
";
|
||||||
|
let config = r#"
|
||||||
|
proxies:
|
||||||
|
- 123
|
||||||
|
- 555
|
||||||
|
- name: "hysteria2"
|
||||||
|
type: hysteria2
|
||||||
|
server: server.com
|
||||||
|
port: 443
|
||||||
|
ports: 443-8443
|
||||||
|
password: yourpassword
|
||||||
|
up: "30 Mbps"
|
||||||
|
down: "200 Mbps"
|
||||||
|
obfs: salamander # 默认为空,如果填写则开启obfs,目前仅支持salamander
|
||||||
|
obfs-password: yourpassword
|
||||||
|
|
||||||
|
sni: server.com
|
||||||
|
skip-cert-verify: false
|
||||||
|
fingerprint: xxxx
|
||||||
|
alpn:
|
||||||
|
- h3
|
||||||
|
ca: "./my.ca"
|
||||||
|
ca-str: "xyz"
|
||||||
|
- name: "hysteria2"
|
||||||
|
type: ss
|
||||||
|
server: server.com
|
||||||
|
port: 443
|
||||||
|
ports: 443-8443
|
||||||
|
password: yourpassword
|
||||||
|
up: "30 Mbps"
|
||||||
|
down: "200 Mbps"
|
||||||
|
obfs: salamander # 默认为空,如果填写则开启obfs,目前仅支持salamander
|
||||||
|
obfs-password: yourpassword
|
||||||
|
|
||||||
|
sni: server.com
|
||||||
|
skip-cert-verify: false
|
||||||
|
fingerprint: xxxx
|
||||||
|
alpn:
|
||||||
|
- h3
|
||||||
|
ca: "./my.ca"
|
||||||
|
ca-str: "xyz"
|
||||||
|
"#;
|
||||||
|
let expected = r#"
|
||||||
|
proxies:
|
||||||
|
- 123
|
||||||
|
- 555
|
||||||
|
- OVERRIDDEN
|
||||||
|
- OVERRIDDEN
|
||||||
|
"#;
|
||||||
|
let merge = serde_yaml::from_str::<super::Mapping>(merge).unwrap();
|
||||||
|
let config = serde_yaml::from_str::<super::Mapping>(config).unwrap();
|
||||||
|
let (result, logs) = super::use_merge(merge, config);
|
||||||
|
eprintln!("{:#?}\n\n{:#?}", logs, result);
|
||||||
|
assert_eq!(logs.len(), 0);
|
||||||
|
let expected = serde_yaml::from_str::<super::Mapping>(expected).unwrap();
|
||||||
|
assert_eq!(result.unwrap(), expected);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_override_recursive() {
|
fn test_override_recursive() {
|
||||||
let merge = r"
|
let merge = r"
|
||||||
|
@@ -182,13 +182,14 @@ impl Runner for JSRunner {
|
|||||||
let boa_runner = wrap_result!(BoaRunner::try_new(), take_logs(logs));
|
let boa_runner = wrap_result!(BoaRunner::try_new(), take_logs(logs));
|
||||||
wrap_result!(boa_runner.setup_console(logger), take_logs(logs));
|
wrap_result!(boa_runner.setup_console(logger), take_logs(logs));
|
||||||
let config = wrap_result!(
|
let config = wrap_result!(
|
||||||
simd_json::serde::to_string_pretty(&mapping)
|
simd_json::serde::to_string(&mapping)
|
||||||
.map_err(|e| { std::io::Error::new(std::io::ErrorKind::InvalidData, e) }),
|
.map_err(|e| { std::io::Error::new(std::io::ErrorKind::InvalidData, e) }),
|
||||||
take_logs(logs)
|
take_logs(logs)
|
||||||
);
|
);
|
||||||
|
let config = simd_json::to_string(&config).unwrap(); // escape the string
|
||||||
let execute_module = format!(
|
let execute_module = format!(
|
||||||
r#"import process from "./{hash}.mjs";
|
r#"import process from "./{hash}.mjs";
|
||||||
let config = JSON.parse(`{config}`);
|
let config = JSON.parse({config});
|
||||||
export let result = JSON.stringify(await process(config));
|
export let result = JSON.stringify(await process(config));
|
||||||
"#
|
"#
|
||||||
);
|
);
|
||||||
|
@@ -26,7 +26,7 @@
|
|||||||
"allotment": "1.20.2",
|
"allotment": "1.20.2",
|
||||||
"country-code-emoji": "2.3.0",
|
"country-code-emoji": "2.3.0",
|
||||||
"dayjs": "1.11.13",
|
"dayjs": "1.11.13",
|
||||||
"framer-motion": "12.0.0-alpha.0",
|
"framer-motion": "12.0.0-alpha.1",
|
||||||
"i18next": "23.14.0",
|
"i18next": "23.14.0",
|
||||||
"jotai": "2.9.3",
|
"jotai": "2.9.3",
|
||||||
"material-react-table": "2.13.3",
|
"material-react-table": "2.13.3",
|
||||||
@@ -49,12 +49,15 @@
|
|||||||
"@csstools/normalize.css": "12.1.1",
|
"@csstools/normalize.css": "12.1.1",
|
||||||
"@emotion/babel-plugin": "11.12.0",
|
"@emotion/babel-plugin": "11.12.0",
|
||||||
"@emotion/react": "11.13.3",
|
"@emotion/react": "11.13.3",
|
||||||
"@iconify/json": "2.2.244",
|
"@iconify/json": "2.2.245",
|
||||||
"@types/react": "18.3.5",
|
"@types/react": "18.3.5",
|
||||||
"@types/react-dom": "18.3.0",
|
"@types/react-dom": "18.3.0",
|
||||||
"@vitejs/plugin-react": "4.3.1",
|
"@vitejs/plugin-react": "4.3.1",
|
||||||
"@vitejs/plugin-react-swc": "3.7.0",
|
"@vitejs/plugin-react-swc": "3.7.0",
|
||||||
"clsx": "2.1.1",
|
"clsx": "2.1.1",
|
||||||
|
"meta-json-schema": "github:libnyanpasu/meta-json-schema#main",
|
||||||
|
"monaco-yaml": "5.2.2",
|
||||||
|
"nanoid": "5.0.7",
|
||||||
"sass": "1.78.0",
|
"sass": "1.78.0",
|
||||||
"shiki": "1.16.2",
|
"shiki": "1.16.2",
|
||||||
"tailwindcss-textshadow": "2.1.3",
|
"tailwindcss-textshadow": "2.1.3",
|
||||||
|
@@ -5,6 +5,7 @@ import { classNames } from "@/utils";
|
|||||||
import { useNyanpasu } from "@nyanpasu/interface";
|
import { useNyanpasu } from "@nyanpasu/interface";
|
||||||
import styles from "./animated-logo.module.scss";
|
import styles from "./animated-logo.module.scss";
|
||||||
|
|
||||||
|
// @ts-expect-error framer-motion types is wrong
|
||||||
const Logo = motion(LogoSvg);
|
const Logo = motion(LogoSvg);
|
||||||
|
|
||||||
const transition = {
|
const transition = {
|
||||||
@@ -45,21 +46,21 @@ const motionVariants: { [name: string]: Variants } = {
|
|||||||
export default function AnimatedLogo({
|
export default function AnimatedLogo({
|
||||||
className,
|
className,
|
||||||
style,
|
style,
|
||||||
disbaleMotion,
|
disableMotion,
|
||||||
}: {
|
}: {
|
||||||
className?: string;
|
className?: string;
|
||||||
style?: CSSProperties;
|
style?: CSSProperties;
|
||||||
disbaleMotion?: boolean;
|
disableMotion?: boolean;
|
||||||
}) {
|
}) {
|
||||||
const { nyanpasuConfig } = useNyanpasu();
|
const { nyanpasuConfig } = useNyanpasu();
|
||||||
|
|
||||||
const disbale = disbaleMotion ?? nyanpasuConfig?.lighten_animation_effects;
|
const disable = disableMotion ?? nyanpasuConfig?.lighten_animation_effects;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<AnimatePresence initial={false}>
|
<AnimatePresence initial={false}>
|
||||||
<Logo
|
<Logo
|
||||||
className={classNames(styles.LogoSchema, className)}
|
className={classNames(styles.LogoSchema, className)}
|
||||||
variants={motionVariants[disbale ? "none" : "default"]}
|
variants={motionVariants[disable ? "none" : "default"]}
|
||||||
style={style}
|
style={style}
|
||||||
drag
|
drag
|
||||||
dragConstraints={{ left: 0, right: 0, top: 0, bottom: 0 }}
|
dragConstraints={{ left: 0, right: 0, top: 0, bottom: 0 }}
|
||||||
|
@@ -15,7 +15,6 @@ import {
|
|||||||
useForm,
|
useForm,
|
||||||
} from "react-hook-form-mui";
|
} from "react-hook-form-mui";
|
||||||
import { useTranslation } from "react-i18next";
|
import { useTranslation } from "react-i18next";
|
||||||
import { classNames } from "@/utils";
|
|
||||||
import { Divider, InputAdornment } from "@mui/material";
|
import { Divider, InputAdornment } from "@mui/material";
|
||||||
import { Profile, useClash } from "@nyanpasu/interface";
|
import { Profile, useClash } from "@nyanpasu/interface";
|
||||||
import { BaseDialog } from "@nyanpasu/ui";
|
import { BaseDialog } from "@nyanpasu/ui";
|
||||||
@@ -300,6 +299,8 @@ export const ProfileDialog = ({
|
|||||||
<ProfileMonacoView
|
<ProfileMonacoView
|
||||||
className="w-full"
|
className="w-full"
|
||||||
ref={profileMonacoViewRef}
|
ref={profileMonacoViewRef}
|
||||||
|
readonly={isRemote}
|
||||||
|
schemaType="clash"
|
||||||
open={open}
|
open={open}
|
||||||
value={editor.value}
|
value={editor.value}
|
||||||
language={editor.language}
|
language={editor.language}
|
||||||
|
@@ -1,6 +1,8 @@
|
|||||||
import { useUpdateEffect } from "ahooks";
|
import { useUpdateEffect } from "ahooks";
|
||||||
import { useAtomValue } from "jotai";
|
import { useAtomValue } from "jotai";
|
||||||
|
import { nanoid } from "nanoid";
|
||||||
import { forwardRef, useEffect, useImperativeHandle, useRef } from "react";
|
import { forwardRef, useEffect, useImperativeHandle, useRef } from "react";
|
||||||
|
import { OS } from "@/consts";
|
||||||
import { monaco } from "@/services/monaco";
|
import { monaco } from "@/services/monaco";
|
||||||
import { themeMode } from "@/store";
|
import { themeMode } from "@/store";
|
||||||
|
|
||||||
@@ -9,6 +11,8 @@ export interface ProfileMonacoViewProps {
|
|||||||
value?: string;
|
value?: string;
|
||||||
language?: string;
|
language?: string;
|
||||||
className?: string;
|
className?: string;
|
||||||
|
readonly?: boolean;
|
||||||
|
schemaType?: "clash" | "merge";
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ProfileMonacoViewRef {
|
export interface ProfileMonacoViewRef {
|
||||||
@@ -16,33 +20,56 @@ export interface ProfileMonacoViewRef {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const ProfileMonacoView = forwardRef(function ProfileMonacoView(
|
export const ProfileMonacoView = forwardRef(function ProfileMonacoView(
|
||||||
{ open, value, language, className }: ProfileMonacoViewProps,
|
{
|
||||||
|
open,
|
||||||
|
value,
|
||||||
|
language,
|
||||||
|
readonly = false,
|
||||||
|
schemaType,
|
||||||
|
className,
|
||||||
|
}: ProfileMonacoViewProps,
|
||||||
ref,
|
ref,
|
||||||
) {
|
) {
|
||||||
const mode = useAtomValue(themeMode);
|
const mode = useAtomValue(themeMode);
|
||||||
|
|
||||||
const monacoRef = useRef<HTMLDivElement>(null);
|
const monacoRef = useRef<HTMLDivElement>(null);
|
||||||
|
|
||||||
const monacoeditorRef = useRef<typeof monaco | null>(null);
|
const monacoEditorRef = useRef<typeof monaco | null>(null);
|
||||||
|
|
||||||
const instanceRef = useRef<monaco.editor.IStandaloneCodeEditor | null>(null);
|
const instanceRef = useRef<monaco.editor.IStandaloneCodeEditor | null>(null);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const run = async () => {
|
const run = async () => {
|
||||||
const { monaco } = await import("@/services/monaco");
|
const { monaco } = await import("@/services/monaco");
|
||||||
monacoeditorRef.current = monaco;
|
monacoEditorRef.current = monaco;
|
||||||
|
|
||||||
if (!monacoRef.current) {
|
if (!monacoRef.current) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
instanceRef.current = monaco.editor.create(monacoRef.current, {
|
instanceRef.current = monaco.editor.create(monacoRef.current, {
|
||||||
value,
|
readOnly: readonly,
|
||||||
language,
|
renderValidationDecorations: "on",
|
||||||
theme: mode === "light" ? "vs" : "vs-dark",
|
theme: mode === "light" ? "vs" : "vs-dark",
|
||||||
|
tabSize: language === "yaml" ? 2 : 4,
|
||||||
minimap: { enabled: false },
|
minimap: { enabled: false },
|
||||||
automaticLayout: true,
|
automaticLayout: true,
|
||||||
|
fontLigatures: true,
|
||||||
|
smoothScrolling: true,
|
||||||
|
fontFamily: `'Cascadia Code NF', 'Cascadia Code', Fira Code, JetBrains Mono, Roboto Mono, "Source Code Pro", Consolas, Menlo, Monaco, monospace, "Courier New", "Apple Color Emoji"${
|
||||||
|
OS === "windows" ? ", twemoji mozilla" : ""
|
||||||
|
}`,
|
||||||
|
quickSuggestions: {
|
||||||
|
strings: true,
|
||||||
|
comments: true,
|
||||||
|
other: true,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
const uri = monaco.Uri.parse(
|
||||||
|
`${nanoid()}.${!!schemaType ? `${schemaType}.` : ""}.${language}`,
|
||||||
|
);
|
||||||
|
const model = monaco.editor.createModel(value || "", language, uri);
|
||||||
|
instanceRef.current.setModel(model);
|
||||||
};
|
};
|
||||||
if (open) {
|
if (open) {
|
||||||
run().catch(console.error);
|
run().catch(console.error);
|
||||||
@@ -50,7 +77,7 @@ export const ProfileMonacoView = forwardRef(function ProfileMonacoView(
|
|||||||
return () => {
|
return () => {
|
||||||
instanceRef.current?.dispose();
|
instanceRef.current?.dispose();
|
||||||
};
|
};
|
||||||
}, [language, mode, open, value]);
|
}, [language, mode, open, readonly, schemaType, value]);
|
||||||
|
|
||||||
useImperativeHandle(ref, () => ({
|
useImperativeHandle(ref, () => ({
|
||||||
getValue: () => instanceRef.current?.getValue(),
|
getValue: () => instanceRef.current?.getValue(),
|
||||||
@@ -63,7 +90,7 @@ export const ProfileMonacoView = forwardRef(function ProfileMonacoView(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
monacoeditorRef.current?.editor.setModelLanguage(model, language);
|
monacoEditorRef.current?.editor.setModelLanguage(model, language);
|
||||||
}, [language]);
|
}, [language]);
|
||||||
|
|
||||||
useUpdateEffect(() => {
|
useUpdateEffect(() => {
|
||||||
|
@@ -222,6 +222,9 @@ export const ScriptDialog = ({
|
|||||||
open={openMonaco}
|
open={openMonaco}
|
||||||
value={editor.value}
|
value={editor.value}
|
||||||
language={editor.language}
|
language={editor.language}
|
||||||
|
schemaType={
|
||||||
|
editor.rawType === Profile.Type.Merge ? "merge" : undefined
|
||||||
|
}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</BaseDialog>
|
</BaseDialog>
|
||||||
|
@@ -1,27 +1,31 @@
|
|||||||
import { useAsyncEffect } from "ahooks";
|
import { useAsyncEffect } from "ahooks";
|
||||||
import { useAtom } from "jotai";
|
import { useAtom, useSetAtom } from "jotai";
|
||||||
import { useState } from "react";
|
import { useState } from "react";
|
||||||
import useSWR from "swr";
|
import useSWR from "swr";
|
||||||
|
import { OS } from "@/consts";
|
||||||
import { serviceManualPromptDialogAtom } from "@/store/service";
|
import { serviceManualPromptDialogAtom } from "@/store/service";
|
||||||
import { getShikiSingleton } from "@/utils/shiki";
|
import { getShikiSingleton } from "@/utils/shiki";
|
||||||
import { getServiceInstallPrompt } from "@nyanpasu/interface";
|
import { getServiceInstallPrompt } from "@nyanpasu/interface";
|
||||||
import { BaseDialog, BaseDialogProps } from "@nyanpasu/ui";
|
import { BaseDialog, BaseDialogProps } from "@nyanpasu/ui";
|
||||||
|
|
||||||
export type ServerManualPromptDialogProps = Omit<BaseDialogProps, "title">;
|
export type ServerManualPromptDialogProps = Omit<BaseDialogProps, "title"> & {
|
||||||
|
operation: "uninstall" | "install" | "start" | "stop" | null;
|
||||||
|
};
|
||||||
|
|
||||||
// TODO: maybe support more commands prompt?
|
// TODO: maybe support more commands prompt?
|
||||||
export default function ServerManualPromptDialog({
|
export default function ServerManualPromptDialog({
|
||||||
open,
|
open,
|
||||||
onClose,
|
onClose,
|
||||||
|
operation,
|
||||||
...props
|
...props
|
||||||
}: ServerManualPromptDialogProps) {
|
}: ServerManualPromptDialogProps) {
|
||||||
const { data: serviceInstallPrompt } = useSWR(
|
const { data: serviceInstallPrompt, error } = useSWR(
|
||||||
"/service_install_prompt",
|
operation === "install" ? "/service_install_prompt" : null,
|
||||||
getServiceInstallPrompt,
|
getServiceInstallPrompt,
|
||||||
);
|
);
|
||||||
const [codes, setCodes] = useState<string | null>(null);
|
const [codes, setCodes] = useState<string | null>(null);
|
||||||
useAsyncEffect(async () => {
|
useAsyncEffect(async () => {
|
||||||
if (serviceInstallPrompt) {
|
if (operation === "install" && serviceInstallPrompt) {
|
||||||
const shiki = await getShikiSingleton();
|
const shiki = await getShikiSingleton();
|
||||||
const code = await shiki.codeToHtml(serviceInstallPrompt, {
|
const code = await shiki.codeToHtml(serviceInstallPrompt, {
|
||||||
lang: "shell",
|
lang: "shell",
|
||||||
@@ -31,17 +35,36 @@ export default function ServerManualPromptDialog({
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
setCodes(code);
|
setCodes(code);
|
||||||
|
} else if (!!operation) {
|
||||||
|
const shiki = await getShikiSingleton();
|
||||||
|
const code = await shiki.codeToHtml(
|
||||||
|
`${OS !== "windows" ? "sudo " : ""}./nyanpasu-service ${operation}`,
|
||||||
|
{
|
||||||
|
lang: "shell",
|
||||||
|
themes: {
|
||||||
|
dark: "nord",
|
||||||
|
light: "min-light",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
setCodes(code);
|
||||||
}
|
}
|
||||||
}, [serviceInstallPrompt]);
|
}, [serviceInstallPrompt, operation, setCodes]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<BaseDialog title="Server Manual" open={open} onClose={onClose} {...props}>
|
<BaseDialog
|
||||||
|
title="Service Manual Tips"
|
||||||
|
open={open}
|
||||||
|
onClose={onClose}
|
||||||
|
{...props}
|
||||||
|
>
|
||||||
<div className="grid gap-3">
|
<div className="grid gap-3">
|
||||||
<p>
|
<p>
|
||||||
Unable to install service automatically. Please open a PowerShell(as
|
Unable to install service automatically. Please open a PowerShell(as
|
||||||
administrator) in Windows or a terminal emulator in macOS, Linux and
|
administrator) in Windows or a terminal emulator in macOS, Linux and
|
||||||
run the following commands:
|
run the following commands:
|
||||||
</p>
|
</p>
|
||||||
|
{error && <p className="text-red-500">{error.message}</p>}
|
||||||
{!!codes && (
|
{!!codes && (
|
||||||
<div
|
<div
|
||||||
dangerouslySetInnerHTML={{
|
dangerouslySetInnerHTML={{
|
||||||
@@ -55,16 +78,21 @@ export default function ServerManualPromptDialog({
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function ServerManualPromptDialogWrapper() {
|
export function ServerManualPromptDialogWrapper() {
|
||||||
const [open, setOpen] = useAtom(serviceManualPromptDialogAtom);
|
const [prompt, setPrompt] = useAtom(serviceManualPromptDialogAtom);
|
||||||
return (
|
return (
|
||||||
<ServerManualPromptDialog open={open} onClose={() => setOpen(false)} />
|
<ServerManualPromptDialog
|
||||||
|
open={!!prompt}
|
||||||
|
onClose={() => setPrompt(null)}
|
||||||
|
operation={prompt}
|
||||||
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function useServerManualPromptDialog() {
|
export function useServerManualPromptDialog() {
|
||||||
const [, setOpen] = useAtom(serviceManualPromptDialogAtom);
|
const setPrompt = useSetAtom(serviceManualPromptDialogAtom);
|
||||||
return {
|
return {
|
||||||
show: () => setOpen(true),
|
show: (prompt: "install" | "uninstall" | "stop" | "start") =>
|
||||||
close: () => setOpen(false),
|
setPrompt(prompt),
|
||||||
|
close: () => setPrompt(null),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@@ -67,19 +67,20 @@ export const SettingSystemService = () => {
|
|||||||
}
|
}
|
||||||
await restartSidecar();
|
await restartSidecar();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
const errorMessage =
|
const errorMessage = `${
|
||||||
getServiceStatus.data === "not_installed"
|
getServiceStatus.data === "not_installed"
|
||||||
? "Install failed"
|
? "Install failed"
|
||||||
: "Uninstall failed";
|
: "Uninstall failed"
|
||||||
|
}: ${formatError(e)}`;
|
||||||
|
|
||||||
message(errorMessage, {
|
message(errorMessage, {
|
||||||
type: "error",
|
type: "error",
|
||||||
title: t("Error"),
|
title: t("Error"),
|
||||||
});
|
});
|
||||||
// If install failed show a prompt to user to install the service manually
|
// If install failed show a prompt to user to install the service manually
|
||||||
if (getServiceStatus.data === "not_installed") {
|
promptDialog.show(
|
||||||
promptDialog.show();
|
getServiceStatus.data === "not_installed" ? "install" : "uninstall",
|
||||||
}
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -111,6 +112,10 @@ export const SettingSystemService = () => {
|
|||||||
type: "error",
|
type: "error",
|
||||||
title: t("Error"),
|
title: t("Error"),
|
||||||
});
|
});
|
||||||
|
// If start failed show a prompt to user to start the service manually
|
||||||
|
promptDialog.show(
|
||||||
|
getServiceStatus.data === "running" ? "stop" : "start",
|
||||||
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
import nyanpasuMergeSchema from "meta-json-schema/schemas/clash-nyanpasu-merge-json-schema.json";
|
||||||
|
import clashMetaSchema from "meta-json-schema/schemas/meta-json-schema.json";
|
||||||
|
import { configureMonacoYaml } from "monaco-yaml";
|
||||||
// features
|
// features
|
||||||
// langs
|
// langs
|
||||||
import "monaco-editor/esm/vs/basic-languages/javascript/javascript.contribution.js";
|
import "monaco-editor/esm/vs/basic-languages/javascript/javascript.contribution.js";
|
||||||
@@ -14,4 +17,21 @@ monaco.languages.typescript.javascriptDefaults.setCompilerOptions({
|
|||||||
allowJs: true,
|
allowJs: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
configureMonacoYaml(monaco, {
|
||||||
|
validate: true,
|
||||||
|
enableSchemaRequest: true,
|
||||||
|
schemas: [
|
||||||
|
{
|
||||||
|
fileMatch: ["**/*.clash.yaml"],
|
||||||
|
// @ts-expect-error monaco-yaml parse issue
|
||||||
|
schema: clashMetaSchema,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
fileMatch: ["**/*.merge.yaml"],
|
||||||
|
// @ts-expect-error monaco-yaml parse issue
|
||||||
|
schema: nyanpasuMergeSchema,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
|
||||||
export { monaco };
|
export { monaco };
|
||||||
|
@@ -1,3 +1,5 @@
|
|||||||
import { atom } from "jotai";
|
import { atom } from "jotai";
|
||||||
|
|
||||||
export const serviceManualPromptDialogAtom = atom<boolean>(false);
|
export const serviceManualPromptDialogAtom = atom<
|
||||||
|
"install" | "uninstall" | "start" | "stop" | null
|
||||||
|
>(null);
|
||||||
|
@@ -68,7 +68,15 @@ export default defineConfig(({ command }) => {
|
|||||||
}),
|
}),
|
||||||
generouted(),
|
generouted(),
|
||||||
sassDts({ esmExport: true }),
|
sassDts({ esmExport: true }),
|
||||||
monaco({ languageWorkers: ["editorWorkerService", "typescript"] }),
|
monaco({
|
||||||
|
languageWorkers: ["editorWorkerService", "typescript"],
|
||||||
|
customWorkers: [
|
||||||
|
{
|
||||||
|
label: "yaml",
|
||||||
|
entry: "monaco-yaml/yaml.worker",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
isDev && devtools(),
|
isDev && devtools(),
|
||||||
],
|
],
|
||||||
resolve: {
|
resolve: {
|
||||||
|
@@ -28,7 +28,7 @@
|
|||||||
"@vitejs/plugin-react": "4.3.1",
|
"@vitejs/plugin-react": "4.3.1",
|
||||||
"ahooks": "3.8.1",
|
"ahooks": "3.8.1",
|
||||||
"d3": "7.9.0",
|
"d3": "7.9.0",
|
||||||
"framer-motion": "12.0.0-alpha.0",
|
"framer-motion": "12.0.0-alpha.1",
|
||||||
"react": "18.3.1",
|
"react": "18.3.1",
|
||||||
"react-error-boundary": "4.0.13",
|
"react-error-boundary": "4.0.13",
|
||||||
"react-i18next": "15.0.1",
|
"react-i18next": "15.0.1",
|
||||||
|
@@ -3,7 +3,7 @@
|
|||||||
"latest": {
|
"latest": {
|
||||||
"mihomo": "v1.18.8",
|
"mihomo": "v1.18.8",
|
||||||
"mihomo_alpha": "alpha-faaa90f",
|
"mihomo_alpha": "alpha-faaa90f",
|
||||||
"clash_rs": "v0.3.0",
|
"clash_rs": "v0.3.1",
|
||||||
"clash_premium": "2023-09-05-gdcc8d87"
|
"clash_premium": "2023-09-05-gdcc8d87"
|
||||||
},
|
},
|
||||||
"arch_template": {
|
"arch_template": {
|
||||||
@@ -36,5 +36,5 @@
|
|||||||
"darwin-x64": "clash-darwin-amd64-n{}.gz"
|
"darwin-x64": "clash-darwin-amd64-n{}.gz"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"updated_at": "2024-09-03T22:20:35.876Z"
|
"updated_at": "2024-09-05T22:20:27.332Z"
|
||||||
}
|
}
|
||||||
|
1184
clash-nyanpasu/pnpm-lock.yaml
generated
1184
clash-nyanpasu/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -39,7 +39,8 @@ type Cmgr interface {
|
|||||||
Start(ctx context.Context, errCH chan error)
|
Start(ctx context.Context, errCH chan error)
|
||||||
|
|
||||||
// Metrics related
|
// Metrics related
|
||||||
QueryNodeMetrics(ctx context.Context, req *ms.QueryNodeMetricsReq) (*ms.QueryNodeMetricsResp, error)
|
QueryNodeMetrics(ctx context.Context, req *ms.QueryNodeMetricsReq, refresh bool) (*ms.QueryNodeMetricsResp, error)
|
||||||
|
QueryRuleMetrics(ctx context.Context, req *ms.QueryRuleMetricsReq, refresh bool) (*ms.QueryRuleMetricsResp, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type cmgrImpl struct {
|
type cmgrImpl struct {
|
||||||
@@ -201,20 +202,30 @@ func (cm *cmgrImpl) Start(ctx context.Context, errCH chan error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (cm *cmgrImpl) QueryNodeMetrics(ctx context.Context, req *ms.QueryNodeMetricsReq) (*ms.QueryNodeMetricsResp, error) {
|
func (cm *cmgrImpl) QueryNodeMetrics(ctx context.Context, req *ms.QueryNodeMetricsReq, refresh bool) (*ms.QueryNodeMetricsResp, error) {
|
||||||
num := -1 // default to return all metrics
|
if refresh {
|
||||||
if req.Latest {
|
nm, _, err := cm.mr.ReadOnce(ctx)
|
||||||
m, err := cm.mr.ReadOnce(ctx)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if err := cm.ms.AddNodeMetric(m); err != nil {
|
if err := cm.ms.AddNodeMetric(ctx, nm); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
num = 1
|
|
||||||
}
|
}
|
||||||
|
return cm.ms.QueryNodeMetric(ctx, req)
|
||||||
startTime := time.Unix(req.StartTimestamp, 0)
|
}
|
||||||
endTime := time.Unix(req.EndTimestamp, 0)
|
|
||||||
return cm.ms.QueryNodeMetric(startTime, endTime, num)
|
func (cm *cmgrImpl) QueryRuleMetrics(ctx context.Context, req *ms.QueryRuleMetricsReq, refresh bool) (*ms.QueryRuleMetricsResp, error) {
|
||||||
|
if refresh {
|
||||||
|
_, rm, err := cm.mr.ReadOnce(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, m := range rm {
|
||||||
|
if err := cm.ms.AddRuleMetric(ctx, m); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return cm.ms.QueryRuleMetric(ctx, req)
|
||||||
}
|
}
|
||||||
|
163
echo/internal/cmgr/ms/handler.go
Normal file
163
echo/internal/cmgr/ms/handler.go
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
package ms
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/Ehco1996/ehco/pkg/metric_reader"
|
||||||
|
)
|
||||||
|
|
||||||
|
type NodeMetrics struct {
|
||||||
|
Timestamp int64 `json:"timestamp"`
|
||||||
|
|
||||||
|
CPUUsage float64 `json:"cpu_usage"`
|
||||||
|
MemoryUsage float64 `json:"memory_usage"`
|
||||||
|
DiskUsage float64 `json:"disk_usage"`
|
||||||
|
NetworkIn float64 `json:"network_in"` // bytes per second
|
||||||
|
NetworkOut float64 `json:"network_out"` // bytes per second
|
||||||
|
}
|
||||||
|
|
||||||
|
type QueryNodeMetricsReq struct {
|
||||||
|
StartTimestamp int64
|
||||||
|
EndTimestamp int64
|
||||||
|
Num int64
|
||||||
|
}
|
||||||
|
|
||||||
|
type QueryNodeMetricsResp struct {
|
||||||
|
TOTAL int `json:"total"`
|
||||||
|
Data []NodeMetrics `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RuleMetricsData struct {
|
||||||
|
Timestamp int64 `json:"timestamp"`
|
||||||
|
Label string `json:"label"`
|
||||||
|
Remote string `json:"remote"`
|
||||||
|
PingLatency int64 `json:"ping_latency"`
|
||||||
|
TCPConnectionCount int64 `json:"tcp_connection_count"`
|
||||||
|
TCPHandshakeDuration int64 `json:"tcp_handshake_duration"`
|
||||||
|
TCPNetworkTransmitBytes int64 `json:"tcp_network_transmit_bytes"`
|
||||||
|
UDPConnectionCount int64 `json:"udp_connection_count"`
|
||||||
|
UDPHandshakeDuration int64 `json:"udp_handshake_duration"`
|
||||||
|
UDPNetworkTransmitBytes int64 `json:"udp_network_transmit_bytes"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type QueryRuleMetricsReq struct {
|
||||||
|
RuleLabel string
|
||||||
|
Remote string
|
||||||
|
|
||||||
|
StartTimestamp int64
|
||||||
|
EndTimestamp int64
|
||||||
|
Num int64
|
||||||
|
}
|
||||||
|
|
||||||
|
type QueryRuleMetricsResp struct {
|
||||||
|
TOTAL int `json:"total"`
|
||||||
|
Data []RuleMetricsData `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ms *MetricsStore) AddNodeMetric(ctx context.Context, m *metric_reader.NodeMetrics) error {
|
||||||
|
_, err := ms.db.ExecContext(ctx, `
|
||||||
|
INSERT OR REPLACE INTO node_metrics (timestamp, cpu_usage, memory_usage, disk_usage, network_in, network_out)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
|
`, m.SyncTime.Unix(), m.CpuUsagePercent, m.MemoryUsagePercent, m.DiskUsagePercent, m.NetworkReceiveBytesRate, m.NetworkTransmitBytesRate)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ms *MetricsStore) AddRuleMetric(ctx context.Context, rm *metric_reader.RuleMetrics) error {
|
||||||
|
tx, err := ms.db.BeginTx(ctx, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer tx.Rollback() //nolint:errcheck
|
||||||
|
|
||||||
|
stmt, err := tx.PrepareContext(ctx, `
|
||||||
|
INSERT OR REPLACE INTO rule_metrics
|
||||||
|
(timestamp, label, remote, ping_latency,
|
||||||
|
tcp_connection_count, tcp_handshake_duration, tcp_network_transmit_bytes,
|
||||||
|
udp_connection_count, udp_handshake_duration, udp_network_transmit_bytes)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer stmt.Close() //nolint:errcheck
|
||||||
|
|
||||||
|
for remote, pingMetric := range rm.PingMetrics {
|
||||||
|
_, err := stmt.ExecContext(ctx, rm.SyncTime.Unix(), rm.Label, remote, pingMetric.Latency,
|
||||||
|
rm.TCPConnectionCount[remote], rm.TCPHandShakeDuration[remote], rm.TCPNetworkTransmitBytes[remote],
|
||||||
|
rm.UDPConnectionCount[remote], rm.UDPHandShakeDuration[remote], rm.UDPNetworkTransmitBytes[remote])
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return tx.Commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ms *MetricsStore) QueryNodeMetric(ctx context.Context, req *QueryNodeMetricsReq) (*QueryNodeMetricsResp, error) {
|
||||||
|
rows, err := ms.db.QueryContext(ctx, `
|
||||||
|
SELECT timestamp, cpu_usage, memory_usage, disk_usage, network_in, network_out
|
||||||
|
FROM node_metrics
|
||||||
|
WHERE timestamp >= ? AND timestamp <= ?
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT ?
|
||||||
|
`, req.StartTimestamp, req.EndTimestamp, req.Num)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close() //nolint:errcheck
|
||||||
|
|
||||||
|
var resp QueryNodeMetricsResp
|
||||||
|
for rows.Next() {
|
||||||
|
var m NodeMetrics
|
||||||
|
if err := rows.Scan(&m.Timestamp, &m.CPUUsage, &m.MemoryUsage, &m.DiskUsage, &m.NetworkIn, &m.NetworkOut); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
resp.Data = append(resp.Data, m)
|
||||||
|
}
|
||||||
|
resp.TOTAL = len(resp.Data)
|
||||||
|
return &resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ms *MetricsStore) QueryRuleMetric(ctx context.Context, req *QueryRuleMetricsReq) (*QueryRuleMetricsResp, error) {
|
||||||
|
query := `
|
||||||
|
SELECT timestamp, label, remote, ping_latency,
|
||||||
|
tcp_connection_count, tcp_handshake_duration, tcp_network_transmit_bytes,
|
||||||
|
udp_connection_count, udp_handshake_duration, udp_network_transmit_bytes
|
||||||
|
FROM rule_metrics
|
||||||
|
WHERE timestamp >= ? AND timestamp <= ?
|
||||||
|
`
|
||||||
|
args := []interface{}{req.StartTimestamp, req.EndTimestamp}
|
||||||
|
|
||||||
|
if req.RuleLabel != "" {
|
||||||
|
query += " AND label = ?"
|
||||||
|
args = append(args, req.RuleLabel)
|
||||||
|
}
|
||||||
|
if req.Remote != "" {
|
||||||
|
query += " AND remote = ?"
|
||||||
|
args = append(args, req.Remote)
|
||||||
|
}
|
||||||
|
|
||||||
|
query += `
|
||||||
|
ORDER BY timestamp DESC
|
||||||
|
LIMIT ?
|
||||||
|
`
|
||||||
|
args = append(args, req.Num)
|
||||||
|
|
||||||
|
rows, err := ms.db.Query(query, args...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer rows.Close() //nolint:errcheck
|
||||||
|
var resp QueryRuleMetricsResp
|
||||||
|
for rows.Next() {
|
||||||
|
var m RuleMetricsData
|
||||||
|
if err := rows.Scan(&m.Timestamp, &m.Label, &m.Remote, &m.PingLatency,
|
||||||
|
&m.TCPConnectionCount, &m.TCPHandshakeDuration, &m.TCPNetworkTransmitBytes,
|
||||||
|
&m.UDPConnectionCount, &m.UDPHandshakeDuration, &m.UDPNetworkTransmitBytes); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
resp.Data = append(resp.Data, m)
|
||||||
|
}
|
||||||
|
resp.TOTAL = len(resp.Data)
|
||||||
|
return &resp, nil
|
||||||
|
}
|
@@ -8,31 +8,8 @@ import (
|
|||||||
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
_ "modernc.org/sqlite"
|
_ "modernc.org/sqlite"
|
||||||
|
|
||||||
"github.com/Ehco1996/ehco/pkg/metric_reader"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type NodeMetrics struct {
|
|
||||||
Timestamp int64 `json:"timestamp"`
|
|
||||||
|
|
||||||
CPUUsage float64 `json:"cpu_usage"`
|
|
||||||
MemoryUsage float64 `json:"memory_usage"`
|
|
||||||
DiskUsage float64 `json:"disk_usage"`
|
|
||||||
NetworkIn float64 `json:"network_in"`
|
|
||||||
NetworkOut float64 `json:"network_out"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type QueryNodeMetricsReq struct {
|
|
||||||
StartTimestamp int64 `json:"start_ts"`
|
|
||||||
EndTimestamp int64 `json:"end_ts"`
|
|
||||||
|
|
||||||
Latest bool `json:"latest"` // whether to refresh the cache and get the latest data
|
|
||||||
}
|
|
||||||
type QueryNodeMetricsResp struct {
|
|
||||||
TOTAL int `json:"total"`
|
|
||||||
Data []NodeMetrics `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type MetricsStore struct {
|
type MetricsStore struct {
|
||||||
db *sql.DB
|
db *sql.DB
|
||||||
dbPath string
|
dbPath string
|
||||||
@@ -65,12 +42,34 @@ func NewMetricsStore(dbPath string) (*MetricsStore, error) {
|
|||||||
if err := ms.initDB(); err != nil {
|
if err := ms.initDB(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if err := ms.cleanOldData(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
return ms, nil
|
return ms, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ms *MetricsStore) cleanOldData() error {
|
||||||
|
thirtyDaysAgo := time.Now().AddDate(0, 0, -30).Unix()
|
||||||
|
|
||||||
|
// 清理 node_metrics 表
|
||||||
|
_, err := ms.db.Exec("DELETE FROM node_metrics WHERE timestamp < ?", thirtyDaysAgo)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 清理 rule_metrics 表
|
||||||
|
_, err = ms.db.Exec("DELETE FROM rule_metrics WHERE timestamp < ?", thirtyDaysAgo)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ms.l.Infof("Cleaned data older than 30 days")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (ms *MetricsStore) initDB() error {
|
func (ms *MetricsStore) initDB() error {
|
||||||
// init NodeMetrics table
|
// init NodeMetrics table
|
||||||
_, err := ms.db.Exec(`
|
if _, err := ms.db.Exec(`
|
||||||
CREATE TABLE IF NOT EXISTS node_metrics (
|
CREATE TABLE IF NOT EXISTS node_metrics (
|
||||||
timestamp INTEGER,
|
timestamp INTEGER,
|
||||||
cpu_usage REAL,
|
cpu_usage REAL,
|
||||||
@@ -80,39 +79,27 @@ func (ms *MetricsStore) initDB() error {
|
|||||||
network_out REAL,
|
network_out REAL,
|
||||||
PRIMARY KEY (timestamp)
|
PRIMARY KEY (timestamp)
|
||||||
)
|
)
|
||||||
`)
|
`); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
|
||||||
|
|
||||||
func (ms *MetricsStore) AddNodeMetric(m *metric_reader.NodeMetrics) error {
|
|
||||||
_, err := ms.db.Exec(`
|
|
||||||
INSERT OR REPLACE INTO node_metrics (timestamp, cpu_usage, memory_usage, disk_usage, network_in, network_out)
|
|
||||||
VALUES (?, ?, ?, ?, ?, ?)
|
|
||||||
`, m.SyncTime.Unix(), m.CpuUsagePercent, m.MemoryUsagePercent, m.DiskUsagePercent, m.NetworkReceiveBytesRate, m.NetworkTransmitBytesRate)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ms *MetricsStore) QueryNodeMetric(startTime, endTime time.Time, num int) (*QueryNodeMetricsResp, error) {
|
|
||||||
rows, err := ms.db.Query(`
|
|
||||||
SELECT timestamp, cpu_usage, memory_usage, disk_usage, network_in, network_out
|
|
||||||
FROM node_metrics
|
|
||||||
WHERE timestamp >= ? AND timestamp <= ?
|
|
||||||
ORDER BY timestamp DESC
|
|
||||||
LIMIT ?
|
|
||||||
`, startTime.Unix(), endTime.Unix(), num)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
defer rows.Close() //nolint:errcheck
|
|
||||||
|
|
||||||
var resp QueryNodeMetricsResp
|
// init rule_metrics
|
||||||
for rows.Next() {
|
if _, err := ms.db.Exec(`
|
||||||
var m NodeMetrics
|
CREATE TABLE IF NOT EXISTS rule_metrics (
|
||||||
if err := rows.Scan(&m.Timestamp, &m.CPUUsage, &m.MemoryUsage, &m.DiskUsage, &m.NetworkIn, &m.NetworkOut); err != nil {
|
timestamp INTEGER,
|
||||||
return nil, err
|
label TEXT,
|
||||||
}
|
remote TEXT,
|
||||||
resp.Data = append(resp.Data, m)
|
ping_latency INTEGER,
|
||||||
|
tcp_connection_count INTEGER,
|
||||||
|
tcp_handshake_duration INTEGER,
|
||||||
|
tcp_network_transmit_bytes INTEGER,
|
||||||
|
udp_connection_count INTEGER,
|
||||||
|
udp_handshake_duration INTEGER,
|
||||||
|
udp_network_transmit_bytes INTEGER,
|
||||||
|
PRIMARY KEY (timestamp, label, remote)
|
||||||
|
)
|
||||||
|
`); err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
resp.TOTAL = len(resp.Data)
|
return nil
|
||||||
return &resp, nil
|
|
||||||
}
|
}
|
||||||
|
@@ -45,14 +45,19 @@ func (cm *cmgrImpl) syncOnce(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if cm.cfg.NeedMetrics() {
|
if cm.cfg.NeedMetrics() {
|
||||||
metrics, err := cm.mr.ReadOnce(ctx)
|
nm, rmm, err := cm.mr.ReadOnce(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
cm.l.Errorf("read metrics failed: %v", err)
|
cm.l.Errorf("read metrics failed: %v", err)
|
||||||
} else {
|
} else {
|
||||||
req.Node = *metrics
|
req.Node = *nm
|
||||||
if err := cm.ms.AddNodeMetric(metrics); err != nil {
|
if err := cm.ms.AddNodeMetric(ctx, nm); err != nil {
|
||||||
cm.l.Errorf("add metrics to store failed: %v", err)
|
cm.l.Errorf("add metrics to store failed: %v", err)
|
||||||
}
|
}
|
||||||
|
for _, rm := range rmm {
|
||||||
|
if err := cm.ms.AddRuleMetric(ctx, rm); err != nil {
|
||||||
|
cm.l.Errorf("add rule metrics to store failed: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -209,14 +209,12 @@ func (c *innerConn) recordStats(n int, isRead bool) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if isRead {
|
if isRead {
|
||||||
metrics.NetWorkTransmitBytes.WithLabelValues(
|
labels := []string{c.rc.RelayLabel, c.rc.ConnType, metrics.METRIC_FLOW_READ, c.rc.remote.Address}
|
||||||
c.rc.remote.Label, metrics.METRIC_CONN_TYPE_TCP, metrics.METRIC_CONN_FLOW_READ,
|
metrics.NetWorkTransmitBytes.WithLabelValues(labels...).Add(float64(n))
|
||||||
).Add(float64(n))
|
|
||||||
c.rc.Stats.Record(0, int64(n))
|
c.rc.Stats.Record(0, int64(n))
|
||||||
} else {
|
} else {
|
||||||
metrics.NetWorkTransmitBytes.WithLabelValues(
|
labels := []string{c.rc.RelayLabel, c.rc.ConnType, metrics.METRIC_FLOW_WRITE, c.rc.remote.Address}
|
||||||
c.rc.remote.Label, metrics.METRIC_CONN_TYPE_TCP, metrics.METRIC_CONN_FLOW_WRITE,
|
metrics.NetWorkTransmitBytes.WithLabelValues(labels...).Add(float64(n))
|
||||||
).Add(float64(n))
|
|
||||||
c.rc.Stats.Record(int64(n), 0)
|
c.rc.Stats.Record(int64(n), 0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -236,7 +234,7 @@ func (c *innerConn) Read(p []byte) (n int, err error) {
|
|||||||
if netErr, ok := err.(net.Error); ok && netErr.Timeout() {
|
if netErr, ok := err.(net.Error); ok && netErr.Timeout() {
|
||||||
since := time.Since(c.lastActive)
|
since := time.Since(c.lastActive)
|
||||||
if since > c.rc.Options.IdleTimeout {
|
if since > c.rc.Options.IdleTimeout {
|
||||||
c.l.Debugf("Read idle, close remote: %s", c.rc.remote.Label)
|
c.l.Debugf("Read idle, close remote: %s", c.rc.remote.Address)
|
||||||
return 0, ErrIdleTimeout
|
return 0, ErrIdleTimeout
|
||||||
}
|
}
|
||||||
continue
|
continue
|
||||||
|
@@ -24,7 +24,7 @@ func TestInnerConn_ReadWrite(t *testing.T) {
|
|||||||
serverConn.SetDeadline(time.Now().Add(1 * time.Second))
|
serverConn.SetDeadline(time.Now().Add(1 * time.Second))
|
||||||
defer clientConn.Close()
|
defer clientConn.Close()
|
||||||
defer serverConn.Close()
|
defer serverConn.Close()
|
||||||
rc := relayConnImpl{Stats: &Stats{}, remote: &lb.Node{Label: "client"}, Options: &testOptions}
|
rc := relayConnImpl{Stats: &Stats{}, remote: &lb.Node{}, Options: &testOptions}
|
||||||
innerC := newInnerConn(clientConn, &rc)
|
innerC := newInnerConn(clientConn, &rc)
|
||||||
errChan := make(chan error, 1)
|
errChan := make(chan error, 1)
|
||||||
go func() {
|
go func() {
|
||||||
@@ -100,7 +100,7 @@ func TestCopyTCPConn(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
defer remoteConn.Close()
|
defer remoteConn.Close()
|
||||||
testOptions := conf.Options{IdleTimeout: time.Second, ReadTimeout: time.Second}
|
testOptions := conf.Options{IdleTimeout: time.Second, ReadTimeout: time.Second}
|
||||||
rc := relayConnImpl{Stats: &Stats{}, remote: &lb.Node{Label: "client"}, Options: &testOptions}
|
rc := relayConnImpl{Stats: &Stats{}, remote: &lb.Node{}, Options: &testOptions}
|
||||||
c1 := newInnerConn(clientConn, &rc)
|
c1 := newInnerConn(clientConn, &rc)
|
||||||
c2 := newInnerConn(remoteConn, &rc)
|
c2 := newInnerConn(remoteConn, &rc)
|
||||||
|
|
||||||
@@ -161,7 +161,7 @@ func TestCopyUDPConn(t *testing.T) {
|
|||||||
defer remoteConn.Close()
|
defer remoteConn.Close()
|
||||||
|
|
||||||
testOptions := conf.Options{IdleTimeout: time.Second, ReadTimeout: time.Second}
|
testOptions := conf.Options{IdleTimeout: time.Second, ReadTimeout: time.Second}
|
||||||
rc := relayConnImpl{Stats: &Stats{}, remote: &lb.Node{Label: "client"}, Options: &testOptions}
|
rc := relayConnImpl{Stats: &Stats{}, remote: &lb.Node{}, Options: &testOptions}
|
||||||
c1 := newInnerConn(clientConn, &rc)
|
c1 := newInnerConn(clientConn, &rc)
|
||||||
c2 := newInnerConn(remoteConn, &rc)
|
c2 := newInnerConn(remoteConn, &rc)
|
||||||
|
|
||||||
|
@@ -1,6 +1,8 @@
|
|||||||
package lb
|
package lb
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"go.uber.org/atomic"
|
"go.uber.org/atomic"
|
||||||
@@ -8,21 +10,38 @@ import (
|
|||||||
|
|
||||||
type Node struct {
|
type Node struct {
|
||||||
Address string
|
Address string
|
||||||
Label string
|
|
||||||
HandShakeDuration time.Duration
|
HandShakeDuration time.Duration
|
||||||
}
|
}
|
||||||
|
|
||||||
func (n *Node) Clone() *Node {
|
func (n *Node) Clone() *Node {
|
||||||
return &Node{
|
return &Node{
|
||||||
Address: n.Address,
|
Address: n.Address,
|
||||||
Label: n.Label,
|
|
||||||
HandShakeDuration: n.HandShakeDuration,
|
HandShakeDuration: n.HandShakeDuration,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func extractHost(input string) (string, error) {
|
||||||
|
// Check if the input string has a scheme, if not, add "http://"
|
||||||
|
if !strings.Contains(input, "://") {
|
||||||
|
input = "http://" + input
|
||||||
|
}
|
||||||
|
// Parse the URL
|
||||||
|
u, err := url.Parse(input)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return u.Hostname(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE for (https/ws/wss)://xxx.com -> xxx.com
|
||||||
|
func (n *Node) GetAddrHost() (string, error) {
|
||||||
|
return extractHost(n.Address)
|
||||||
|
}
|
||||||
|
|
||||||
// RoundRobin is an interface for representing round-robin balancing.
|
// RoundRobin is an interface for representing round-robin balancing.
|
||||||
type RoundRobin interface {
|
type RoundRobin interface {
|
||||||
Next() *Node
|
Next() *Node
|
||||||
|
GetAll() []*Node
|
||||||
}
|
}
|
||||||
|
|
||||||
type roundrobin struct {
|
type roundrobin struct {
|
||||||
@@ -42,3 +61,7 @@ func (r *roundrobin) Next() *Node {
|
|||||||
next := r.nodeList[(int(n)-1)%r.len]
|
next := r.nodeList[(int(n)-1)%r.len]
|
||||||
return next
|
return next
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *roundrobin) GetAll() []*Node {
|
||||||
|
return r.nodeList
|
||||||
|
}
|
||||||
|
@@ -13,53 +13,43 @@ const (
|
|||||||
METRIC_SUBSYSTEM_TRAFFIC = "traffic"
|
METRIC_SUBSYSTEM_TRAFFIC = "traffic"
|
||||||
METRIC_SUBSYSTEM_PING = "ping"
|
METRIC_SUBSYSTEM_PING = "ping"
|
||||||
|
|
||||||
METRIC_LABEL_REMOTE = "remote"
|
METRIC_CONN_TYPE_TCP = "tcp"
|
||||||
|
METRIC_CONN_TYPE_UDP = "udp"
|
||||||
METRIC_LABEL_CONN_FLOW = "flow"
|
METRIC_FLOW_READ = "read"
|
||||||
METRIC_CONN_FLOW_WRITE = "write"
|
METRIC_FLOW_WRITE = "write"
|
||||||
METRIC_CONN_FLOW_READ = "read"
|
|
||||||
|
|
||||||
METRIC_LABEL_CONN_TYPE = "type"
|
|
||||||
METRIC_CONN_TYPE_TCP = "tcp"
|
|
||||||
METRIC_CONN_TYPE_UDP = "udp"
|
|
||||||
|
|
||||||
EhcoAliveStateInit = 0
|
EhcoAliveStateInit = 0
|
||||||
EhcoAliveStateRunning = 1
|
EhcoAliveStateRunning = 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
Hostname, _ = os.Hostname()
|
||||||
|
ConstLabels = map[string]string{
|
||||||
|
"ehco_runner_hostname": Hostname,
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1ms ~ 5s (1ms 到 437ms )
|
||||||
|
msBuckets = prometheus.ExponentialBuckets(1, 1.5, 16)
|
||||||
|
)
|
||||||
|
|
||||||
// ping metrics
|
// ping metrics
|
||||||
var (
|
var (
|
||||||
pingLabelNames = []string{"ip", "host", "label"}
|
pingInterval = time.Second * 30
|
||||||
pingBuckets = prometheus.ExponentialBuckets(0.001, 2, 12) // 1ms ~ 4s
|
PingResponseDurationMilliseconds = prometheus.NewHistogramVec(
|
||||||
pingInterval = time.Second * 30
|
|
||||||
|
|
||||||
PingResponseDurationSeconds = prometheus.NewHistogramVec(
|
|
||||||
prometheus.HistogramOpts{
|
prometheus.HistogramOpts{
|
||||||
Namespace: METRIC_NS,
|
Namespace: METRIC_NS,
|
||||||
Subsystem: METRIC_SUBSYSTEM_PING,
|
Subsystem: METRIC_SUBSYSTEM_PING,
|
||||||
Name: "response_duration_seconds",
|
Name: "response_duration_milliseconds",
|
||||||
Help: "A histogram of latencies for ping responses.",
|
Help: "A histogram of latencies for ping responses.",
|
||||||
Buckets: pingBuckets,
|
Buckets: msBuckets,
|
||||||
ConstLabels: ConstLabels,
|
ConstLabels: ConstLabels,
|
||||||
},
|
},
|
||||||
pingLabelNames,
|
[]string{"label", "remote", "ip"},
|
||||||
)
|
|
||||||
PingRequestTotal = prometheus.NewDesc(
|
|
||||||
prometheus.BuildFQName(METRIC_NS, METRIC_SUBSYSTEM_PING, "requests_total"),
|
|
||||||
"Number of ping requests sent",
|
|
||||||
pingLabelNames,
|
|
||||||
ConstLabels,
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
// traffic metrics
|
// traffic metrics
|
||||||
var (
|
var (
|
||||||
Hostname, _ = os.Hostname()
|
|
||||||
|
|
||||||
ConstLabels = map[string]string{
|
|
||||||
"ehco_runner_hostname": Hostname,
|
|
||||||
}
|
|
||||||
|
|
||||||
EhcoAlive = prometheus.NewGauge(prometheus.GaugeOpts{
|
EhcoAlive = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||||
Namespace: METRIC_NS,
|
Namespace: METRIC_NS,
|
||||||
Subsystem: "",
|
Subsystem: "",
|
||||||
@@ -74,7 +64,15 @@ var (
|
|||||||
Name: "current_connection_count",
|
Name: "current_connection_count",
|
||||||
Help: "当前链接数",
|
Help: "当前链接数",
|
||||||
ConstLabels: ConstLabels,
|
ConstLabels: ConstLabels,
|
||||||
}, []string{METRIC_LABEL_REMOTE, METRIC_LABEL_CONN_TYPE})
|
}, []string{"label", "conn_type", "remote"})
|
||||||
|
|
||||||
|
HandShakeDurationMilliseconds = prometheus.NewHistogramVec(prometheus.HistogramOpts{
|
||||||
|
Subsystem: METRIC_SUBSYSTEM_TRAFFIC,
|
||||||
|
Namespace: METRIC_NS,
|
||||||
|
Name: "handshake_duration_milliseconds",
|
||||||
|
Help: "握手时间ms",
|
||||||
|
ConstLabels: ConstLabels,
|
||||||
|
}, []string{"label", "conn_type", "remote"})
|
||||||
|
|
||||||
NetWorkTransmitBytes = prometheus.NewCounterVec(prometheus.CounterOpts{
|
NetWorkTransmitBytes = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||||
Namespace: METRIC_NS,
|
Namespace: METRIC_NS,
|
||||||
@@ -82,15 +80,7 @@ var (
|
|||||||
Name: "network_transmit_bytes",
|
Name: "network_transmit_bytes",
|
||||||
Help: "传输流量总量bytes",
|
Help: "传输流量总量bytes",
|
||||||
ConstLabels: ConstLabels,
|
ConstLabels: ConstLabels,
|
||||||
}, []string{METRIC_LABEL_REMOTE, METRIC_LABEL_CONN_TYPE, METRIC_LABEL_CONN_FLOW})
|
}, []string{"label", "conn_type", "flow", "remote"})
|
||||||
|
|
||||||
HandShakeDuration = prometheus.NewHistogramVec(prometheus.HistogramOpts{
|
|
||||||
Subsystem: METRIC_SUBSYSTEM_TRAFFIC,
|
|
||||||
Namespace: METRIC_NS,
|
|
||||||
Name: "handshake_duration",
|
|
||||||
Help: "握手时间ms",
|
|
||||||
ConstLabels: ConstLabels,
|
|
||||||
}, []string{METRIC_LABEL_REMOTE})
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func RegisterEhcoMetrics(cfg *config.Config) error {
|
func RegisterEhcoMetrics(cfg *config.Config) error {
|
||||||
@@ -98,15 +88,14 @@ func RegisterEhcoMetrics(cfg *config.Config) error {
|
|||||||
prometheus.MustRegister(EhcoAlive)
|
prometheus.MustRegister(EhcoAlive)
|
||||||
prometheus.MustRegister(CurConnectionCount)
|
prometheus.MustRegister(CurConnectionCount)
|
||||||
prometheus.MustRegister(NetWorkTransmitBytes)
|
prometheus.MustRegister(NetWorkTransmitBytes)
|
||||||
prometheus.MustRegister(HandShakeDuration)
|
prometheus.MustRegister(HandShakeDurationMilliseconds)
|
||||||
|
|
||||||
EhcoAlive.Set(EhcoAliveStateInit)
|
EhcoAlive.Set(EhcoAliveStateInit)
|
||||||
|
|
||||||
// ping
|
// ping
|
||||||
if cfg.EnablePing {
|
if cfg.EnablePing {
|
||||||
pg := NewPingGroup(cfg)
|
pg := NewPingGroup(cfg)
|
||||||
prometheus.MustRegister(PingResponseDurationSeconds)
|
prometheus.MustRegister(PingResponseDurationMilliseconds)
|
||||||
prometheus.MustRegister(pg)
|
|
||||||
go pg.Run()
|
go pg.Run()
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
|
@@ -1,20 +1,16 @@
|
|||||||
package metrics
|
package metrics
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"math"
|
"math"
|
||||||
"net/url"
|
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/Ehco1996/ehco/internal/config"
|
"github.com/Ehco1996/ehco/internal/config"
|
||||||
"github.com/go-ping/ping"
|
"github.com/go-ping/ping"
|
||||||
"github.com/prometheus/client_golang/prometheus"
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (pg *PingGroup) newPinger(addr string) (*ping.Pinger, error) {
|
func (pg *PingGroup) newPinger(ruleLabel string, remote string, addr string) (*ping.Pinger, error) {
|
||||||
pinger := ping.New(addr)
|
pinger := ping.New(addr)
|
||||||
if err := pinger.Resolve(); err != nil {
|
if err := pinger.Resolve(); err != nil {
|
||||||
pg.logger.Error("failed to resolve pinger", zap.String("addr", addr), zap.Error(err))
|
pg.logger.Error("failed to resolve pinger", zap.String("addr", addr), zap.Error(err))
|
||||||
@@ -26,6 +22,13 @@ func (pg *PingGroup) newPinger(addr string) (*ping.Pinger, error) {
|
|||||||
if runtime.GOOS != "darwin" {
|
if runtime.GOOS != "darwin" {
|
||||||
pinger.SetPrivileged(true)
|
pinger.SetPrivileged(true)
|
||||||
}
|
}
|
||||||
|
pinger.OnRecv = func(pkt *ping.Packet) {
|
||||||
|
ip := pkt.IPAddr.String()
|
||||||
|
PingResponseDurationMilliseconds.WithLabelValues(
|
||||||
|
ruleLabel, remote, ip).Observe(float64(pkt.Rtt.Milliseconds()))
|
||||||
|
pg.logger.Sugar().Infof("%d bytes from %s icmp_seq=%d time=%v ttl=%v",
|
||||||
|
pkt.Nbytes, pkt.Addr, pkt.Seq, pkt.Rtt, pkt.Ttl)
|
||||||
|
}
|
||||||
return pinger, nil
|
return pinger, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -34,89 +37,29 @@ type PingGroup struct {
|
|||||||
|
|
||||||
// k: addr
|
// k: addr
|
||||||
Pingers map[string]*ping.Pinger
|
Pingers map[string]*ping.Pinger
|
||||||
|
|
||||||
// k: addr v:relay rule label joined by ","
|
|
||||||
PingerLabels map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
func extractHost(input string) (string, error) {
|
|
||||||
// Check if the input string has a scheme, if not, add "http://"
|
|
||||||
if !strings.Contains(input, "://") {
|
|
||||||
input = "http://" + input
|
|
||||||
}
|
|
||||||
// Parse the URL
|
|
||||||
u, err := url.Parse(input)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return u.Hostname(), nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewPingGroup(cfg *config.Config) *PingGroup {
|
func NewPingGroup(cfg *config.Config) *PingGroup {
|
||||||
logger := zap.L().Named("pinger")
|
|
||||||
|
|
||||||
pg := &PingGroup{
|
pg := &PingGroup{
|
||||||
logger: logger,
|
logger: zap.L().Named("pinger"),
|
||||||
Pingers: make(map[string]*ping.Pinger),
|
Pingers: make(map[string]*ping.Pinger),
|
||||||
PingerLabels: map[string]string{},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse addr from rule
|
|
||||||
for _, relayCfg := range cfg.RelayConfigs {
|
for _, relayCfg := range cfg.RelayConfigs {
|
||||||
// NOTE for (https/ws/wss)://xxx.com -> xxx.com
|
for _, remote := range relayCfg.GetAllRemotes() {
|
||||||
for _, remote := range relayCfg.Remotes {
|
addr, err := remote.GetAddrHost()
|
||||||
addr, err := extractHost(remote)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
pg.logger.Error("try parse host error", zap.Error(err))
|
pg.logger.Error("try parse host error", zap.Error(err))
|
||||||
}
|
}
|
||||||
if _, ok := pg.Pingers[addr]; ok {
|
if pinger, err := pg.newPinger(relayCfg.Label, remote.Address, addr); err != nil {
|
||||||
// append rule label when remote host is same
|
|
||||||
pg.PingerLabels[addr] += fmt.Sprintf(",%s", relayCfg.Label)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if pinger, err := pg.newPinger(addr); err != nil {
|
|
||||||
pg.logger.Error("new pinger meet error", zap.Error(err))
|
pg.logger.Error("new pinger meet error", zap.Error(err))
|
||||||
} else {
|
} else {
|
||||||
pg.Pingers[pinger.Addr()] = pinger
|
pg.Pingers[addr] = pinger
|
||||||
pg.PingerLabels[addr] = relayCfg.Label
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// update metrics
|
|
||||||
for addr, pinger := range pg.Pingers {
|
|
||||||
pinger.OnRecv = func(pkt *ping.Packet) {
|
|
||||||
PingResponseDurationSeconds.WithLabelValues(
|
|
||||||
pkt.IPAddr.String(), pkt.Addr, pg.PingerLabels[addr]).Observe(pkt.Rtt.Seconds())
|
|
||||||
pg.logger.Sugar().Infof("%d bytes from %s icmp_seq=%d time=%v ttl=%v",
|
|
||||||
pkt.Nbytes, pkt.Addr, pkt.Seq, pkt.Rtt, pkt.Ttl)
|
|
||||||
}
|
|
||||||
pinger.OnDuplicateRecv = func(pkt *ping.Packet) {
|
|
||||||
pg.logger.Sugar().Infof("%d bytes from %s icmp_seq=%d time=%v ttl=%v (DUP!)",
|
|
||||||
pkt.Nbytes, pkt.IPAddr, pkt.Seq, pkt.Rtt, pkt.Ttl)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return pg
|
return pg
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pg *PingGroup) Describe(ch chan<- *prometheus.Desc) {
|
|
||||||
ch <- PingRequestTotal
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pg *PingGroup) Collect(ch chan<- prometheus.Metric) {
|
|
||||||
for addr, pinger := range pg.Pingers {
|
|
||||||
stats := pinger.Statistics()
|
|
||||||
ch <- prometheus.MustNewConstMetric(
|
|
||||||
PingRequestTotal,
|
|
||||||
prometheus.CounterValue,
|
|
||||||
float64(stats.PacketsSent),
|
|
||||||
stats.IPAddr.String(),
|
|
||||||
stats.Addr,
|
|
||||||
pg.PingerLabels[addr],
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pg *PingGroup) Run() {
|
func (pg *PingGroup) Run() {
|
||||||
if len(pg.Pingers) <= 0 {
|
if len(pg.Pingers) <= 0 {
|
||||||
return
|
return
|
||||||
|
@@ -179,14 +179,16 @@ func (r *Config) DefaultLabel() string {
|
|||||||
func (r *Config) ToRemotesLB() lb.RoundRobin {
|
func (r *Config) ToRemotesLB() lb.RoundRobin {
|
||||||
tcpNodeList := make([]*lb.Node, len(r.Remotes))
|
tcpNodeList := make([]*lb.Node, len(r.Remotes))
|
||||||
for idx, addr := range r.Remotes {
|
for idx, addr := range r.Remotes {
|
||||||
tcpNodeList[idx] = &lb.Node{
|
tcpNodeList[idx] = &lb.Node{Address: addr}
|
||||||
Address: addr,
|
|
||||||
Label: fmt.Sprintf("%s-%s", r.Label, addr),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return lb.NewRoundRobin(tcpNodeList)
|
return lb.NewRoundRobin(tcpNodeList)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (r *Config) GetAllRemotes() []*lb.Node {
|
||||||
|
lb := r.ToRemotesLB()
|
||||||
|
return lb.GetAll()
|
||||||
|
}
|
||||||
|
|
||||||
func (r *Config) GetLoggerName() string {
|
func (r *Config) GetLoggerName() string {
|
||||||
return fmt.Sprintf("%s(%s<->%s)", r.Label, r.ListenType, r.TransportType)
|
return fmt.Sprintf("%s(%s<->%s)", r.Label, r.ListenType, r.TransportType)
|
||||||
}
|
}
|
||||||
|
@@ -44,8 +44,9 @@ func newBaseRelayServer(cfg *conf.Config, cmgr cmgr.Cmgr) (*BaseRelayServer, err
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseRelayServer) RelayTCPConn(ctx context.Context, c net.Conn, remote *lb.Node) error {
|
func (b *BaseRelayServer) RelayTCPConn(ctx context.Context, c net.Conn, remote *lb.Node) error {
|
||||||
metrics.CurConnectionCount.WithLabelValues(remote.Label, metrics.METRIC_CONN_TYPE_TCP).Inc()
|
labels := []string{b.cfg.Label, metrics.METRIC_CONN_TYPE_TCP, remote.Address}
|
||||||
defer metrics.CurConnectionCount.WithLabelValues(remote.Label, metrics.METRIC_CONN_TYPE_TCP).Dec()
|
metrics.CurConnectionCount.WithLabelValues(labels...).Inc()
|
||||||
|
defer metrics.CurConnectionCount.WithLabelValues(labels...).Dec()
|
||||||
|
|
||||||
if err := b.checkConnectionLimit(); err != nil {
|
if err := b.checkConnectionLimit(); err != nil {
|
||||||
return err
|
return err
|
||||||
@@ -68,8 +69,9 @@ func (b *BaseRelayServer) RelayTCPConn(ctx context.Context, c net.Conn, remote *
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (b *BaseRelayServer) RelayUDPConn(ctx context.Context, c net.Conn, remote *lb.Node) error {
|
func (b *BaseRelayServer) RelayUDPConn(ctx context.Context, c net.Conn, remote *lb.Node) error {
|
||||||
metrics.CurConnectionCount.WithLabelValues(remote.Label, metrics.METRIC_CONN_TYPE_UDP).Inc()
|
labels := []string{b.cfg.Label, metrics.METRIC_CONN_TYPE_UDP, remote.Address}
|
||||||
defer metrics.CurConnectionCount.WithLabelValues(remote.Label, metrics.METRIC_CONN_TYPE_UDP).Dec()
|
metrics.CurConnectionCount.WithLabelValues(labels...).Inc()
|
||||||
|
defer metrics.CurConnectionCount.WithLabelValues(labels...).Dec()
|
||||||
|
|
||||||
rc, err := b.relayer.HandShake(ctx, remote, false)
|
rc, err := b.relayer.HandShake(ctx, remote, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@@ -47,7 +47,12 @@ func (raw *RawClient) HandShake(ctx context.Context, remote *lb.Node, isTCP bool
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
latency := time.Since(t1)
|
latency := time.Since(t1)
|
||||||
metrics.HandShakeDuration.WithLabelValues(remote.Label).Observe(float64(latency.Milliseconds()))
|
connType := metrics.METRIC_CONN_TYPE_TCP
|
||||||
|
if !isTCP {
|
||||||
|
connType = metrics.METRIC_CONN_TYPE_UDP
|
||||||
|
}
|
||||||
|
labels := []string{raw.cfg.Label, connType, remote.Address}
|
||||||
|
metrics.HandShakeDurationMilliseconds.WithLabelValues(labels...).Observe(float64(latency.Milliseconds()))
|
||||||
remote.HandShakeDuration = latency
|
remote.HandShakeDuration = latency
|
||||||
return rc, nil
|
return rc, nil
|
||||||
}
|
}
|
||||||
|
@@ -67,7 +67,12 @@ func (s *WsClient) HandShake(ctx context.Context, remote *lb.Node, isTCP bool) (
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
latency := time.Since(t1)
|
latency := time.Since(t1)
|
||||||
metrics.HandShakeDuration.WithLabelValues(remote.Label).Observe(float64(latency.Milliseconds()))
|
connType := metrics.METRIC_CONN_TYPE_TCP
|
||||||
|
if !isTCP {
|
||||||
|
connType = metrics.METRIC_CONN_TYPE_UDP
|
||||||
|
}
|
||||||
|
labels := []string{s.cfg.Label, connType, remote.Address}
|
||||||
|
metrics.HandShakeDurationMilliseconds.WithLabelValues(labels...).Observe(float64(latency.Milliseconds()))
|
||||||
remote.HandShakeDuration = latency
|
remote.HandShakeDuration = latency
|
||||||
c := conn.NewWSConn(wsc, false)
|
c := conn.NewWSConn(wsc, false)
|
||||||
return c, nil
|
return c, nil
|
||||||
@@ -97,7 +102,7 @@ func (s *WsServer) handleRequest(w http.ResponseWriter, req *http.Request) {
|
|||||||
|
|
||||||
var remote *lb.Node
|
var remote *lb.Node
|
||||||
if addr := req.URL.Query().Get(conf.WS_QUERY_REMOTE_ADDR); addr != "" {
|
if addr := req.URL.Query().Get(conf.WS_QUERY_REMOTE_ADDR); addr != "" {
|
||||||
remote = &lb.Node{Address: addr, Label: addr}
|
remote = &lb.Node{Address: addr}
|
||||||
} else {
|
} else {
|
||||||
remote = s.remotes.Next()
|
remote = s.remotes.Next()
|
||||||
}
|
}
|
||||||
|
134
echo/internal/web/handler_api.go
Normal file
134
echo/internal/web/handler_api.go
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
package web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Ehco1996/ehco/internal/cmgr/ms"
|
||||||
|
"github.com/labstack/echo/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
defaultTimeRange = 60 // seconds
|
||||||
|
errInvalidParam = "invalid parameter: %s"
|
||||||
|
)
|
||||||
|
|
||||||
|
type queryParams struct {
|
||||||
|
startTS int64
|
||||||
|
endTS int64
|
||||||
|
refresh bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseQueryParams(c echo.Context) (*queryParams, error) {
|
||||||
|
now := time.Now().Unix()
|
||||||
|
params := &queryParams{
|
||||||
|
startTS: now - defaultTimeRange,
|
||||||
|
endTS: now,
|
||||||
|
refresh: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
if start, err := parseTimestamp(c.QueryParam("start_ts")); err == nil {
|
||||||
|
params.startTS = start
|
||||||
|
}
|
||||||
|
|
||||||
|
if end, err := parseTimestamp(c.QueryParam("end_ts")); err == nil {
|
||||||
|
params.endTS = end
|
||||||
|
}
|
||||||
|
|
||||||
|
if refresh, err := strconv.ParseBool(c.QueryParam("latest")); err == nil {
|
||||||
|
params.refresh = refresh
|
||||||
|
}
|
||||||
|
|
||||||
|
if params.startTS >= params.endTS {
|
||||||
|
return nil, fmt.Errorf(errInvalidParam, "time range")
|
||||||
|
}
|
||||||
|
|
||||||
|
return params, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseTimestamp(s string) (int64, error) {
|
||||||
|
if s == "" {
|
||||||
|
return 0, fmt.Errorf("empty timestamp")
|
||||||
|
}
|
||||||
|
return strconv.ParseInt(s, 10, 64)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) GetNodeMetrics(c echo.Context) error {
|
||||||
|
params, err := parseQueryParams(c)
|
||||||
|
if err != nil {
|
||||||
|
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
||||||
|
}
|
||||||
|
req := &ms.QueryNodeMetricsReq{StartTimestamp: params.startTS, EndTimestamp: params.endTS, Num: -1}
|
||||||
|
if params.refresh {
|
||||||
|
req.Num = 1
|
||||||
|
}
|
||||||
|
metrics, err := s.connMgr.QueryNodeMetrics(c.Request().Context(), req, params.refresh)
|
||||||
|
if err != nil {
|
||||||
|
return echo.NewHTTPError(http.StatusInternalServerError, err.Error())
|
||||||
|
}
|
||||||
|
return c.JSON(http.StatusOK, metrics)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) GetRuleMetrics(c echo.Context) error {
|
||||||
|
params, err := parseQueryParams(c)
|
||||||
|
if err != nil {
|
||||||
|
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
||||||
|
}
|
||||||
|
req := &ms.QueryRuleMetricsReq{
|
||||||
|
StartTimestamp: params.startTS,
|
||||||
|
EndTimestamp: params.endTS,
|
||||||
|
Num: -1,
|
||||||
|
RuleLabel: c.QueryParam("label"),
|
||||||
|
Remote: c.QueryParam("remote"),
|
||||||
|
}
|
||||||
|
if params.refresh {
|
||||||
|
req.Num = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
metrics, err := s.connMgr.QueryRuleMetrics(c.Request().Context(), req, params.refresh)
|
||||||
|
if err != nil {
|
||||||
|
return echo.NewHTTPError(http.StatusInternalServerError, err.Error())
|
||||||
|
}
|
||||||
|
return c.JSON(http.StatusOK, metrics)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) CurrentConfig(c echo.Context) error {
|
||||||
|
ret, err := json.Marshal(s.cfg)
|
||||||
|
if err != nil {
|
||||||
|
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.JSONBlob(http.StatusOK, ret)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) HandleReload(c echo.Context) error {
|
||||||
|
if s.Reloader == nil {
|
||||||
|
return echo.NewHTTPError(http.StatusBadRequest, "reload not support")
|
||||||
|
}
|
||||||
|
err := s.Reloader.Reload(true)
|
||||||
|
if err != nil {
|
||||||
|
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := c.Response().Write([]byte("reload success")); err != nil {
|
||||||
|
s.l.Errorf("write response meet err=%v", err)
|
||||||
|
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Server) HandleHealthCheck(c echo.Context) error {
|
||||||
|
relayLabel := c.QueryParam("relay_label")
|
||||||
|
if relayLabel == "" {
|
||||||
|
return echo.NewHTTPError(http.StatusBadRequest, "relay_label is required")
|
||||||
|
}
|
||||||
|
latency, err := s.HealthCheck(c.Request().Context(), relayLabel)
|
||||||
|
if err != nil {
|
||||||
|
res := HealthCheckResp{Message: err.Error(), ErrorCode: -1}
|
||||||
|
return c.JSON(http.StatusBadRequest, res)
|
||||||
|
}
|
||||||
|
return c.JSON(http.StatusOK, HealthCheckResp{Message: "connect success", Latency: latency})
|
||||||
|
}
|
@@ -1,13 +1,10 @@
|
|||||||
package web
|
package web
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/Ehco1996/ehco/internal/cmgr/ms"
|
|
||||||
"github.com/Ehco1996/ehco/internal/config"
|
"github.com/Ehco1996/ehco/internal/config"
|
||||||
"github.com/Ehco1996/ehco/internal/constant"
|
"github.com/Ehco1996/ehco/internal/constant"
|
||||||
"github.com/labstack/echo/v4"
|
"github.com/labstack/echo/v4"
|
||||||
@@ -42,44 +39,6 @@ func (s *Server) index(c echo.Context) error {
|
|||||||
return c.Render(http.StatusOK, "index.html", data)
|
return c.Render(http.StatusOK, "index.html", data)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) HandleReload(c echo.Context) error {
|
|
||||||
if s.Reloader == nil {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, "reload not support")
|
|
||||||
}
|
|
||||||
err := s.Reloader.Reload(true)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := c.Response().Write([]byte("reload success")); err != nil {
|
|
||||||
s.l.Errorf("write response meet err=%v", err)
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Server) HandleHealthCheck(c echo.Context) error {
|
|
||||||
relayLabel := c.QueryParam("relay_label")
|
|
||||||
if relayLabel == "" {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, "relay_label is required")
|
|
||||||
}
|
|
||||||
latency, err := s.HealthCheck(c.Request().Context(), relayLabel)
|
|
||||||
if err != nil {
|
|
||||||
res := HealthCheckResp{Message: err.Error(), ErrorCode: -1}
|
|
||||||
return c.JSON(http.StatusBadRequest, res)
|
|
||||||
}
|
|
||||||
return c.JSON(http.StatusOK, HealthCheckResp{Message: "connect success", Latency: latency})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Server) CurrentConfig(c echo.Context) error {
|
|
||||||
ret, err := json.Marshal(s.cfg)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.JSONBlob(http.StatusOK, ret)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Server) ListConnections(c echo.Context) error {
|
func (s *Server) ListConnections(c echo.Context) error {
|
||||||
pageStr := c.QueryParam("page")
|
pageStr := c.QueryParam("page")
|
||||||
page, err := strconv.Atoi(pageStr)
|
page, err := strconv.Atoi(pageStr)
|
||||||
@@ -126,36 +85,12 @@ func (s *Server) ListRules(c echo.Context) error {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) GetNodeMetrics(c echo.Context) error {
|
func (s *Server) RuleMetrics(c echo.Context) error {
|
||||||
startTS := time.Now().Unix() - 60
|
return c.Render(http.StatusOK, "rule_metrics.html", map[string]interface{}{
|
||||||
if c.QueryParam("start_ts") != "" {
|
"Configs": s.cfg.RelayConfigs,
|
||||||
star, err := strconv.ParseInt(c.QueryParam("start_ts"), 10, 64)
|
})
|
||||||
if err != nil {
|
}
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
|
||||||
}
|
func (s *Server) LogsPage(c echo.Context) error {
|
||||||
startTS = star
|
return c.Render(http.StatusOK, "logs.html", nil)
|
||||||
}
|
|
||||||
endTS := time.Now().Unix()
|
|
||||||
if c.QueryParam("end_ts") != "" {
|
|
||||||
end, err := strconv.ParseInt(c.QueryParam("end_ts"), 10, 64)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
|
||||||
}
|
|
||||||
endTS = end
|
|
||||||
}
|
|
||||||
req := &ms.QueryNodeMetricsReq{StartTimestamp: startTS, EndTimestamp: endTS}
|
|
||||||
latest := c.QueryParam("latest")
|
|
||||||
if latest != "" {
|
|
||||||
r, err := strconv.ParseBool(latest)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
|
||||||
}
|
|
||||||
req.Latest = r
|
|
||||||
}
|
|
||||||
|
|
||||||
metrics, err := s.connMgr.QueryNodeMetrics(c.Request().Context(), req)
|
|
||||||
if err != nil {
|
|
||||||
return echo.NewHTTPError(http.StatusBadRequest, err.Error())
|
|
||||||
}
|
|
||||||
return c.JSON(http.StatusOK, metrics)
|
|
||||||
}
|
}
|
33
echo/internal/web/handlers_ws.go
Normal file
33
echo/internal/web/handlers_ws.go
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
package web
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net"
|
||||||
|
|
||||||
|
"github.com/Ehco1996/ehco/pkg/log"
|
||||||
|
"github.com/gobwas/ws"
|
||||||
|
"github.com/labstack/echo/v4"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (s *Server) handleWebSocketLogs(c echo.Context) error {
|
||||||
|
conn, _, _, err := ws.UpgradeHTTP(c.Request(), c.Response())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer conn.Close()
|
||||||
|
|
||||||
|
log.SetWebSocketConn(conn)
|
||||||
|
|
||||||
|
// 保持连接打开并处理可能的入站消息
|
||||||
|
for {
|
||||||
|
_, err := ws.ReadFrame(conn)
|
||||||
|
if err != nil {
|
||||||
|
if _, ok := err.(net.Error); ok {
|
||||||
|
// 处理网络错误
|
||||||
|
s.l.Errorf("WebSocket read error: %v", err)
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.SetWebSocketConn(nil)
|
||||||
|
return nil
|
||||||
|
}
|
@@ -1,393 +0,0 @@
|
|||||||
const MetricsModule = (function () {
|
|
||||||
// Constants
|
|
||||||
const API_BASE_URL = '/api/v1';
|
|
||||||
const NODE_METRICS_PATH = '/node_metrics/';
|
|
||||||
const BYTE_TO_MB = 1024 * 1024;
|
|
||||||
|
|
||||||
const handleError = (error) => {
|
|
||||||
console.error('Error:', error);
|
|
||||||
};
|
|
||||||
|
|
||||||
// API functions
|
|
||||||
const fetchData = async (path, params = {}) => {
|
|
||||||
const url = new URL(API_BASE_URL + path, window.location.origin);
|
|
||||||
Object.entries(params).forEach(([key, value]) => url.searchParams.append(key, value));
|
|
||||||
try {
|
|
||||||
const response = await fetch(url.toString());
|
|
||||||
if (!response.ok) {
|
|
||||||
throw new Error(`HTTP error! status: ${response.status}`);
|
|
||||||
}
|
|
||||||
return await response.json();
|
|
||||||
} catch (error) {
|
|
||||||
handleError(error);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const fetchLatestMetric = () => fetchData(NODE_METRICS_PATH, { latest: true }).then((data) => data?.data[0]);
|
|
||||||
const fetchMetrics = (startTs, endTs) => fetchData(NODE_METRICS_PATH, { start_ts: startTs, end_ts: endTs }).then((data) => data?.data);
|
|
||||||
|
|
||||||
// Chart functions
|
|
||||||
const initChart = (canvasId, type, datasets, legendPosition = '', yDisplayText = '', title = '', unit = '') => {
|
|
||||||
const ctx = $(`#${canvasId}`)[0].getContext('2d');
|
|
||||||
const colors = {
|
|
||||||
cpu: 'rgba(255, 99, 132, 1)',
|
|
||||||
memory: 'rgba(54, 162, 235, 1)',
|
|
||||||
disk: 'rgba(255, 206, 86, 1)',
|
|
||||||
receive: 'rgba(0, 150, 255, 1)',
|
|
||||||
transmit: 'rgba(255, 140, 0, 1)',
|
|
||||||
};
|
|
||||||
|
|
||||||
const getDatasetConfig = (label) => {
|
|
||||||
const color = colors[label.toLowerCase()] || 'rgba(0, 0, 0, 1)';
|
|
||||||
return {
|
|
||||||
label,
|
|
||||||
borderColor: color,
|
|
||||||
backgroundColor: color.replace('1)', '0.2)'),
|
|
||||||
borderWidth: 2,
|
|
||||||
pointRadius: 2,
|
|
||||||
pointHoverRadius: 2,
|
|
||||||
fill: true,
|
|
||||||
data: [],
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
const data = {
|
|
||||||
labels: [],
|
|
||||||
datasets: $.isArray(datasets) ? datasets.map((dataset) => getDatasetConfig(dataset.label)) : [getDatasetConfig(datasets.label)],
|
|
||||||
};
|
|
||||||
|
|
||||||
return new Chart(ctx, {
|
|
||||||
type,
|
|
||||||
data,
|
|
||||||
options: {
|
|
||||||
line: {
|
|
||||||
spanGaps: false, // 设置为 false,不连接空值
|
|
||||||
},
|
|
||||||
responsive: true,
|
|
||||||
plugins: {
|
|
||||||
legend: { position: legendPosition },
|
|
||||||
title: {
|
|
||||||
display: !!title,
|
|
||||||
text: title,
|
|
||||||
position: 'bottom',
|
|
||||||
font: { size: 14, weight: 'bold' },
|
|
||||||
},
|
|
||||||
tooltip: {
|
|
||||||
callbacks: {
|
|
||||||
title: function (tooltipItems) {
|
|
||||||
return new Date(tooltipItems[0].label).toLocaleString();
|
|
||||||
},
|
|
||||||
label: function (context) {
|
|
||||||
let label = context.dataset.label || '';
|
|
||||||
if (label) {
|
|
||||||
label += ': ';
|
|
||||||
}
|
|
||||||
if (context.parsed.y !== null) {
|
|
||||||
label += context.parsed.y.toFixed(2) + ' ' + unit;
|
|
||||||
}
|
|
||||||
return label;
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
scales: {
|
|
||||||
x: {
|
|
||||||
type: 'time',
|
|
||||||
time: {
|
|
||||||
unit: 'minute',
|
|
||||||
displayFormats: {
|
|
||||||
minute: 'HH:mm',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
ticks: {
|
|
||||||
maxRotation: 0,
|
|
||||||
autoSkip: true,
|
|
||||||
maxTicksLimit: 10,
|
|
||||||
},
|
|
||||||
adapters: {
|
|
||||||
date: {
|
|
||||||
locale: 'en',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
y: {
|
|
||||||
beginAtZero: true,
|
|
||||||
title: { display: true, text: yDisplayText, font: { weight: 'bold' } },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
elements: { line: { tension: 0.4 } },
|
|
||||||
downsample: {
|
|
||||||
enabled: true,
|
|
||||||
threshold: 200,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const updateChart = (chart, newData, labels) => {
|
|
||||||
if (!newData || !labels) {
|
|
||||||
console.error('Invalid data or labels provided');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ($.isArray(newData) && $.isArray(newData[0])) {
|
|
||||||
$.each(chart.data.datasets, (index, dataset) => {
|
|
||||||
if (newData[index]) {
|
|
||||||
dataset.data = newData[index].map((value, i) => ({ x: moment(labels[i]), y: value }));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
chart.data.datasets[0].data = newData.map((value, i) => ({ x: moment(labels[i]), y: value }));
|
|
||||||
}
|
|
||||||
|
|
||||||
chart.options.scales.x.min = moment(labels[0]);
|
|
||||||
chart.options.scales.x.max = moment(labels[labels.length - 1]);
|
|
||||||
chart.update();
|
|
||||||
};
|
|
||||||
|
|
||||||
const updateCharts = (charts, metrics, startTs, endTs) => {
|
|
||||||
console.log('Raw metrics data:', metrics);
|
|
||||||
|
|
||||||
const generateTimestamps = (start, end) => {
|
|
||||||
const timestamps = [];
|
|
||||||
let current = moment.unix(start);
|
|
||||||
const endMoment = moment.unix(end);
|
|
||||||
while (current.isSameOrBefore(endMoment)) {
|
|
||||||
timestamps.push(current.toISOString());
|
|
||||||
current.add(1, 'minute');
|
|
||||||
}
|
|
||||||
return timestamps;
|
|
||||||
};
|
|
||||||
|
|
||||||
const timestamps = generateTimestamps(startTs, endTs);
|
|
||||||
|
|
||||||
const processData = (dataKey) => {
|
|
||||||
const data = new Array(timestamps.length).fill(null);
|
|
||||||
metrics.forEach((metric) => {
|
|
||||||
const index = Math.floor((metric.timestamp - startTs) / 60);
|
|
||||||
if (index >= 0 && index < data.length) {
|
|
||||||
data[index] = metric[dataKey];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
return data;
|
|
||||||
};
|
|
||||||
|
|
||||||
updateChart(charts.cpu, processData('cpu_usage'), timestamps);
|
|
||||||
updateChart(charts.memory, processData('memory_usage'), timestamps);
|
|
||||||
updateChart(charts.disk, processData('disk_usage'), timestamps);
|
|
||||||
updateChart(
|
|
||||||
charts.network,
|
|
||||||
[
|
|
||||||
processData('network_in').map((v) => (v === null ? null : v / BYTE_TO_MB)),
|
|
||||||
processData('network_out').map((v) => (v === null ? null : v / BYTE_TO_MB)),
|
|
||||||
],
|
|
||||||
timestamps
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
const addLatestDataToCharts = (charts, latestMetric) => {
|
|
||||||
console.log('Raw latestMetric data:', latestMetric);
|
|
||||||
const timestamp = moment.unix(latestMetric.timestamp);
|
|
||||||
|
|
||||||
$.each(charts, (key, chart) => {
|
|
||||||
// 检查是否已经有这个时间戳的数据
|
|
||||||
const existingDataIndex = chart.data.labels.findIndex((label) => label.isSame(timestamp));
|
|
||||||
|
|
||||||
if (existingDataIndex === -1) {
|
|
||||||
// 如果是新数据,添加到末尾
|
|
||||||
chart.data.labels.push(timestamp);
|
|
||||||
if (key === 'network') {
|
|
||||||
chart.data.datasets[0].data.push({ x: timestamp, y: latestMetric.network_in / BYTE_TO_MB });
|
|
||||||
chart.data.datasets[1].data.push({ x: timestamp, y: latestMetric.network_out / BYTE_TO_MB });
|
|
||||||
} else {
|
|
||||||
chart.data.datasets[0].data.push({ x: timestamp, y: latestMetric[`${key}_usage`] });
|
|
||||||
}
|
|
||||||
|
|
||||||
// 更新x轴范围,但保持一定的时间窗口
|
|
||||||
const timeWindow = moment.duration(30, 'minutes'); // 设置显示的时间窗口,例如30分钟
|
|
||||||
const oldestAllowedTime = moment(timestamp).subtract(timeWindow);
|
|
||||||
|
|
||||||
chart.options.scales.x.min = oldestAllowedTime;
|
|
||||||
chart.options.scales.x.max = timestamp;
|
|
||||||
|
|
||||||
// 开启图表的平移和缩放功能
|
|
||||||
chart.options.plugins.zoom = {
|
|
||||||
pan: {
|
|
||||||
enabled: true,
|
|
||||||
mode: 'x',
|
|
||||||
},
|
|
||||||
zoom: {
|
|
||||||
wheel: {
|
|
||||||
enabled: true,
|
|
||||||
},
|
|
||||||
pinch: {
|
|
||||||
enabled: true,
|
|
||||||
},
|
|
||||||
mode: 'x',
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
chart.update();
|
|
||||||
}
|
|
||||||
// 如果数据已存在,我们不做任何操作,保持现有数据
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
// Chart initialization
|
|
||||||
const initializeCharts = async () => {
|
|
||||||
const metric = await fetchLatestMetric();
|
|
||||||
if (!metric) return null;
|
|
||||||
return {
|
|
||||||
cpu: initChart('cpuChart', 'line', { label: 'CPU' }, 'top', 'Usage (%)', `CPU`, '%'),
|
|
||||||
memory: initChart('memoryChart', 'line', { label: 'Memory' }, 'top', 'Usage (%)', `Memory`, '%'),
|
|
||||||
disk: initChart('diskChart', 'line', { label: 'Disk' }, 'top', 'Usage (%)', `Disk`, '%'),
|
|
||||||
network: initChart(
|
|
||||||
'networkChart',
|
|
||||||
'line',
|
|
||||||
[{ label: 'Receive' }, { label: 'Transmit' }],
|
|
||||||
'top',
|
|
||||||
'Rate (MB/s)',
|
|
||||||
'Network Rate',
|
|
||||||
'MB/s'
|
|
||||||
),
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
// Date range functions
|
|
||||||
const setupDateRangeDropdown = (charts) => {
|
|
||||||
const $dateRangeDropdown = $('#dateRangeDropdown');
|
|
||||||
const $dateRangeButton = $('#dateRangeButton');
|
|
||||||
const $dateRangeText = $('#dateRangeText');
|
|
||||||
const $dateRangeInput = $('#dateRangeInput');
|
|
||||||
|
|
||||||
$dateRangeDropdown.find('.dropdown-item[data-range]').on('click', function (e) {
|
|
||||||
e.preventDefault();
|
|
||||||
const range = $(this).data('range');
|
|
||||||
const now = new Date();
|
|
||||||
let start, end;
|
|
||||||
switch (range) {
|
|
||||||
case '30m':
|
|
||||||
start = new Date(now - 30 * 60 * 1000);
|
|
||||||
break;
|
|
||||||
case '1h':
|
|
||||||
start = new Date(now - 60 * 60 * 1000);
|
|
||||||
break;
|
|
||||||
case '3h':
|
|
||||||
start = new Date(now - 3 * 60 * 60 * 1000);
|
|
||||||
break;
|
|
||||||
case '6h':
|
|
||||||
start = new Date(now - 6 * 60 * 60 * 1000);
|
|
||||||
break;
|
|
||||||
case '12h':
|
|
||||||
start = new Date(now - 12 * 60 * 60 * 1000);
|
|
||||||
break;
|
|
||||||
case '24h':
|
|
||||||
start = new Date(now - 24 * 60 * 60 * 1000);
|
|
||||||
break;
|
|
||||||
case '7d':
|
|
||||||
start = new Date(now - 7 * 24 * 60 * 60 * 1000);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
end = now;
|
|
||||||
|
|
||||||
const startTs = Math.floor(start.getTime() / 1000);
|
|
||||||
const endTs = Math.floor(end.getTime() / 1000);
|
|
||||||
fetchDataForRange(charts, startTs, endTs);
|
|
||||||
$dateRangeText.text($(this).text());
|
|
||||||
$dateRangeDropdown.removeClass('is-active');
|
|
||||||
});
|
|
||||||
|
|
||||||
$dateRangeButton.on('click', (event) => {
|
|
||||||
event.stopPropagation();
|
|
||||||
$dateRangeDropdown.toggleClass('is-active');
|
|
||||||
});
|
|
||||||
|
|
||||||
$(document).on('click', (event) => {
|
|
||||||
if (!$dateRangeDropdown.has(event.target).length) {
|
|
||||||
$dateRangeDropdown.removeClass('is-active');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
const picker = flatpickr($dateRangeInput[0], {
|
|
||||||
mode: 'range',
|
|
||||||
enableTime: true,
|
|
||||||
dateFormat: 'Y-m-d H:i',
|
|
||||||
onChange: function (selectedDates) {
|
|
||||||
if (selectedDates.length === 2) {
|
|
||||||
const startTs = Math.floor(selectedDates[0].getTime() / 1000);
|
|
||||||
const endTs = Math.floor(selectedDates[1].getTime() / 1000);
|
|
||||||
fetchDataForRange(charts, startTs, endTs);
|
|
||||||
|
|
||||||
const formattedStart = selectedDates[0].toLocaleString();
|
|
||||||
const formattedEnd = selectedDates[1].toLocaleString();
|
|
||||||
$dateRangeText.text(`${formattedStart} - ${formattedEnd}`);
|
|
||||||
|
|
||||||
// 关闭下拉菜单
|
|
||||||
$dateRangeDropdown.removeClass('is-active');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onClose: function () {
|
|
||||||
// 确保在日期选择器关闭时也关闭下拉菜单
|
|
||||||
$dateRangeDropdown.removeClass('is-active');
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
// 防止点击日期选择器时关闭下拉菜单
|
|
||||||
$dateRangeInput.on('click', (event) => {
|
|
||||||
event.stopPropagation();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const fetchDataForRange = async (charts, startTs, endTs) => {
|
|
||||||
const metrics = await fetchMetrics(startTs, endTs);
|
|
||||||
if (metrics) {
|
|
||||||
console.log('Raw metrics data:', metrics);
|
|
||||||
updateCharts(charts, metrics, startTs, endTs);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Auto refresh functions
|
|
||||||
const setupAutoRefresh = (charts) => {
|
|
||||||
let autoRefreshInterval;
|
|
||||||
let isAutoRefreshing = false;
|
|
||||||
$('#refreshButton').click(function () {
|
|
||||||
if (isAutoRefreshing) {
|
|
||||||
clearInterval(autoRefreshInterval);
|
|
||||||
$(this).removeClass('is-info');
|
|
||||||
$(this).find('span:last').text('Auto Refresh');
|
|
||||||
isAutoRefreshing = false;
|
|
||||||
} else {
|
|
||||||
$(this).addClass('is-info');
|
|
||||||
$(this).find('span:last').text('Stop Refresh');
|
|
||||||
isAutoRefreshing = true;
|
|
||||||
refreshData(charts);
|
|
||||||
autoRefreshInterval = setInterval(() => refreshData(charts), 5000);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
const refreshData = async (charts) => {
|
|
||||||
const latestMetric = await fetchLatestMetric();
|
|
||||||
if (latestMetric) {
|
|
||||||
addLatestDataToCharts(charts, latestMetric);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Main initialization function
|
|
||||||
const init = async () => {
|
|
||||||
const charts = await initializeCharts();
|
|
||||||
if (charts) {
|
|
||||||
setupDateRangeDropdown(charts);
|
|
||||||
setupAutoRefresh(charts);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Public API
|
|
||||||
return {
|
|
||||||
init: init,
|
|
||||||
};
|
|
||||||
})();
|
|
||||||
|
|
||||||
// Initialize when the DOM is ready
|
|
||||||
document.addEventListener('DOMContentLoaded', MetricsModule.init);
|
|
404
echo/internal/web/js/node_metrics.js
Normal file
404
echo/internal/web/js/node_metrics.js
Normal file
@@ -0,0 +1,404 @@
|
|||||||
|
const Config = {
|
||||||
|
API_BASE_URL: '/api/v1',
|
||||||
|
NODE_METRICS_PATH: '/node_metrics/',
|
||||||
|
BYTE_TO_MB: 1024 * 1024,
|
||||||
|
CHART_COLORS: {
|
||||||
|
cpu: 'rgba(255, 99, 132, 1)',
|
||||||
|
memory: 'rgba(54, 162, 235, 1)',
|
||||||
|
disk: 'rgba(255, 206, 86, 1)',
|
||||||
|
receive: 'rgba(0, 150, 255, 1)',
|
||||||
|
transmit: 'rgba(255, 140, 0, 1)',
|
||||||
|
},
|
||||||
|
TIME_WINDOW: 30, // minutes
|
||||||
|
AUTO_REFRESH_INTERVAL: 5000, // milliseconds
|
||||||
|
};
|
||||||
|
|
||||||
|
class ApiService {
|
||||||
|
static async fetchData(path, params = {}) {
|
||||||
|
const url = new URL(Config.API_BASE_URL + path, window.location.origin);
|
||||||
|
Object.entries(params).forEach(([key, value]) => url.searchParams.append(key, value));
|
||||||
|
try {
|
||||||
|
const response = await fetch(url.toString());
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`HTTP error! status: ${response.status}`);
|
||||||
|
}
|
||||||
|
return await response.json();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fetchLatestMetric() {
|
||||||
|
const data = await this.fetchData(Config.NODE_METRICS_PATH, { latest: true });
|
||||||
|
return data?.data[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fetchMetrics(startTs, endTs) {
|
||||||
|
const data = await this.fetchData(Config.NODE_METRICS_PATH, { start_ts: startTs, end_ts: endTs });
|
||||||
|
return data?.data;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ChartManager {
|
||||||
|
constructor() {
|
||||||
|
this.charts = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
initializeCharts() {
|
||||||
|
this.charts = {
|
||||||
|
cpu: this.initChart('cpuChart', 'line', { label: 'CPU' }, 'top', 'Usage (%)', 'CPU', '%'),
|
||||||
|
memory: this.initChart('memoryChart', 'line', { label: 'Memory' }, 'top', 'Usage (%)', 'Memory', '%'),
|
||||||
|
disk: this.initChart('diskChart', 'line', { label: 'Disk' }, 'top', 'Usage (%)', 'Disk', '%'),
|
||||||
|
network: this.initChart(
|
||||||
|
'networkChart',
|
||||||
|
'line',
|
||||||
|
[{ label: 'Receive' }, { label: 'Transmit' }],
|
||||||
|
'top',
|
||||||
|
'Rate (MB/s)',
|
||||||
|
'Network Rate',
|
||||||
|
'MB/s'
|
||||||
|
),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
initChart(canvasId, type, datasets, legendPosition, yDisplayText, title, unit) {
|
||||||
|
const ctx = $(`#${canvasId}`)[0].getContext('2d');
|
||||||
|
const data = {
|
||||||
|
labels: [],
|
||||||
|
datasets: Array.isArray(datasets)
|
||||||
|
? datasets.map((dataset) => this.getDatasetConfig(dataset.label))
|
||||||
|
: [this.getDatasetConfig(datasets.label)],
|
||||||
|
};
|
||||||
|
|
||||||
|
return new Chart(ctx, {
|
||||||
|
type,
|
||||||
|
data,
|
||||||
|
options: this.getChartOptions(legendPosition, yDisplayText, title, unit),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getDatasetConfig(label) {
|
||||||
|
const color = Config.CHART_COLORS[label.toLowerCase()] || 'rgba(0, 0, 0, 1)';
|
||||||
|
return {
|
||||||
|
label,
|
||||||
|
borderColor: color,
|
||||||
|
backgroundColor: color.replace('1)', '0.2)'),
|
||||||
|
borderWidth: 2,
|
||||||
|
pointRadius: 2,
|
||||||
|
pointHoverRadius: 2,
|
||||||
|
fill: true,
|
||||||
|
data: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
getChartOptions(legendPosition, yDisplayText, title, unit) {
|
||||||
|
return {
|
||||||
|
line: { spanGaps: false },
|
||||||
|
responsive: true,
|
||||||
|
plugins: {
|
||||||
|
legend: { position: legendPosition },
|
||||||
|
title: {
|
||||||
|
display: !!title,
|
||||||
|
text: title,
|
||||||
|
position: 'bottom',
|
||||||
|
font: { size: 14, weight: 'bold' },
|
||||||
|
},
|
||||||
|
tooltip: {
|
||||||
|
callbacks: {
|
||||||
|
title: (tooltipItems) => new Date(tooltipItems[0].label).toLocaleString(),
|
||||||
|
label: (context) => {
|
||||||
|
let label = context.dataset.label || '';
|
||||||
|
if (label) {
|
||||||
|
label += ': ';
|
||||||
|
}
|
||||||
|
if (context.parsed.y !== null) {
|
||||||
|
label += context.parsed.y.toFixed(2) + ' ' + unit;
|
||||||
|
}
|
||||||
|
return label;
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
zoom: {
|
||||||
|
pan: { enabled: true, mode: 'x' },
|
||||||
|
zoom: {
|
||||||
|
wheel: { enabled: true },
|
||||||
|
pinch: { enabled: true },
|
||||||
|
mode: 'x',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
scales: {
|
||||||
|
x: {
|
||||||
|
type: 'time',
|
||||||
|
time: {
|
||||||
|
unit: 'minute',
|
||||||
|
displayFormats: { minute: 'HH:mm' },
|
||||||
|
},
|
||||||
|
ticks: {
|
||||||
|
maxRotation: 0,
|
||||||
|
autoSkip: true,
|
||||||
|
maxTicksLimit: 10,
|
||||||
|
},
|
||||||
|
adapters: {
|
||||||
|
date: { locale: 'en' },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
y: {
|
||||||
|
beginAtZero: true,
|
||||||
|
title: { display: true, text: yDisplayText, font: { weight: 'bold' } },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
elements: { line: { tension: 0.4 } },
|
||||||
|
downsample: {
|
||||||
|
enabled: true,
|
||||||
|
threshold: 200,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
updateCharts(metrics, startTs, endTs) {
|
||||||
|
const timestamps = this.generateTimestamps(startTs, endTs);
|
||||||
|
const processData = (dataKey) => {
|
||||||
|
const data = new Array(timestamps.length).fill(null);
|
||||||
|
metrics.forEach((metric) => {
|
||||||
|
const index = Math.floor((metric.timestamp - startTs) / 60);
|
||||||
|
if (index >= 0 && index < data.length) {
|
||||||
|
data[index] = metric[dataKey];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return data;
|
||||||
|
};
|
||||||
|
|
||||||
|
this.updateChart(this.charts.cpu, processData('cpu_usage'), timestamps);
|
||||||
|
this.updateChart(this.charts.memory, processData('memory_usage'), timestamps);
|
||||||
|
this.updateChart(this.charts.disk, processData('disk_usage'), timestamps);
|
||||||
|
this.updateChart(
|
||||||
|
this.charts.network,
|
||||||
|
[
|
||||||
|
processData('network_in').map((v) => (v === null ? null : v / Config.BYTE_TO_MB)),
|
||||||
|
processData('network_out').map((v) => (v === null ? null : v / Config.BYTE_TO_MB)),
|
||||||
|
],
|
||||||
|
timestamps
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
updateChart(chart, newData, labels) {
|
||||||
|
if (!newData || !labels) {
|
||||||
|
console.error('Invalid data or labels provided');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(newData) && Array.isArray(newData[0])) {
|
||||||
|
chart.data.datasets.forEach((dataset, index) => {
|
||||||
|
if (newData[index]) {
|
||||||
|
dataset.data = newData[index].map((value, i) => ({ x: moment(labels[i]), y: value }));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
chart.data.datasets[0].data = newData.map((value, i) => ({ x: moment(labels[i]), y: value }));
|
||||||
|
}
|
||||||
|
|
||||||
|
chart.options.scales.x.min = moment(labels[0]);
|
||||||
|
chart.options.scales.x.max = moment(labels[labels.length - 1]);
|
||||||
|
chart.update();
|
||||||
|
}
|
||||||
|
|
||||||
|
addLatestDataToCharts(latestMetric) {
|
||||||
|
const timestamp = moment.unix(latestMetric.timestamp);
|
||||||
|
|
||||||
|
Object.entries(this.charts).forEach(([key, chart]) => {
|
||||||
|
const existingDataIndex = chart.data.labels.findIndex((label) => label.isSame(timestamp));
|
||||||
|
|
||||||
|
if (existingDataIndex === -1) {
|
||||||
|
chart.data.labels.push(timestamp);
|
||||||
|
if (key === 'network') {
|
||||||
|
chart.data.datasets[0].data.push({ x: timestamp, y: latestMetric.network_in / Config.BYTE_TO_MB });
|
||||||
|
chart.data.datasets[1].data.push({ x: timestamp, y: latestMetric.network_out / Config.BYTE_TO_MB });
|
||||||
|
} else {
|
||||||
|
chart.data.datasets[0].data.push({ x: timestamp, y: latestMetric[`${key}_usage`] });
|
||||||
|
}
|
||||||
|
|
||||||
|
const timeWindow = moment.duration(Config.TIME_WINDOW, 'minutes');
|
||||||
|
const oldestAllowedTime = moment(timestamp).subtract(timeWindow);
|
||||||
|
|
||||||
|
chart.options.scales.x.min = oldestAllowedTime;
|
||||||
|
chart.options.scales.x.max = timestamp;
|
||||||
|
|
||||||
|
chart.update();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
generateTimestamps(start, end) {
|
||||||
|
const timestamps = [];
|
||||||
|
let current = moment.unix(start);
|
||||||
|
const endMoment = moment.unix(end);
|
||||||
|
while (current.isSameOrBefore(endMoment)) {
|
||||||
|
timestamps.push(current.toISOString());
|
||||||
|
current.add(1, 'minute');
|
||||||
|
}
|
||||||
|
return timestamps;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class DateRangeManager {
|
||||||
|
constructor(chartManager) {
|
||||||
|
this.chartManager = chartManager;
|
||||||
|
this.$dateRangeDropdown = $('#dateRangeDropdown');
|
||||||
|
this.$dateRangeButton = $('#dateRangeButton');
|
||||||
|
this.$dateRangeText = $('#dateRangeText');
|
||||||
|
this.$dateRangeInput = $('#dateRangeInput');
|
||||||
|
this.setupEventListeners();
|
||||||
|
}
|
||||||
|
|
||||||
|
setupEventListeners() {
|
||||||
|
this.$dateRangeDropdown.find('.dropdown-item[data-range]').on('click', (e) => this.handlePresetDateRange(e));
|
||||||
|
this.$dateRangeButton.on('click', (event) => this.toggleDropdown(event));
|
||||||
|
$(document).on('click', (event) => this.closeDropdownOnOutsideClick(event));
|
||||||
|
this.initializeDatePicker();
|
||||||
|
}
|
||||||
|
|
||||||
|
handlePresetDateRange(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
const range = $(e.currentTarget).data('range');
|
||||||
|
const [start, end] = this.calculateDateRange(range);
|
||||||
|
this.fetchAndUpdateCharts(start, end);
|
||||||
|
this.$dateRangeText.text($(e.currentTarget).text());
|
||||||
|
this.$dateRangeDropdown.removeClass('is-active');
|
||||||
|
}
|
||||||
|
|
||||||
|
calculateDateRange(range) {
|
||||||
|
const now = new Date();
|
||||||
|
let start;
|
||||||
|
switch (range) {
|
||||||
|
case '30m':
|
||||||
|
start = new Date(now - 30 * 60 * 1000);
|
||||||
|
break;
|
||||||
|
case '1h':
|
||||||
|
start = new Date(now - 60 * 60 * 1000);
|
||||||
|
break;
|
||||||
|
case '3h':
|
||||||
|
start = new Date(now - 3 * 60 * 60 * 1000);
|
||||||
|
break;
|
||||||
|
case '6h':
|
||||||
|
start = new Date(now - 6 * 60 * 60 * 1000);
|
||||||
|
break;
|
||||||
|
case '12h':
|
||||||
|
start = new Date(now - 12 * 60 * 60 * 1000);
|
||||||
|
break;
|
||||||
|
case '24h':
|
||||||
|
start = new Date(now - 24 * 60 * 60 * 1000);
|
||||||
|
break;
|
||||||
|
case '7d':
|
||||||
|
start = new Date(now - 7 * 24 * 60 * 60 * 1000);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
return [start, now];
|
||||||
|
}
|
||||||
|
|
||||||
|
toggleDropdown(event) {
|
||||||
|
event.stopPropagation();
|
||||||
|
this.$dateRangeDropdown.toggleClass('is-active');
|
||||||
|
}
|
||||||
|
|
||||||
|
closeDropdownOnOutsideClick(event) {
|
||||||
|
if (!this.$dateRangeDropdown.has(event.target).length) {
|
||||||
|
this.$dateRangeDropdown.removeClass('is-active');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
initializeDatePicker() {
|
||||||
|
flatpickr(this.$dateRangeInput[0], {
|
||||||
|
mode: 'range',
|
||||||
|
enableTime: true,
|
||||||
|
dateFormat: 'Y-m-d H:i',
|
||||||
|
onChange: (selectedDates) => this.handleDatePickerChange(selectedDates),
|
||||||
|
onClose: () => this.$dateRangeDropdown.removeClass('is-active'),
|
||||||
|
});
|
||||||
|
|
||||||
|
this.$dateRangeInput.on('click', (event) => event.stopPropagation());
|
||||||
|
}
|
||||||
|
|
||||||
|
handleDatePickerChange(selectedDates) {
|
||||||
|
if (selectedDates.length === 2) {
|
||||||
|
const [start, end] = selectedDates;
|
||||||
|
this.fetchAndUpdateCharts(start, end);
|
||||||
|
const formattedStart = start.toLocaleString();
|
||||||
|
const formattedEnd = end.toLocaleString();
|
||||||
|
this.$dateRangeText.text(`${formattedStart} - ${formattedEnd}`);
|
||||||
|
this.$dateRangeDropdown.removeClass('is-active');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchAndUpdateCharts(start, end) {
|
||||||
|
const startTs = Math.floor(start.getTime() / 1000);
|
||||||
|
const endTs = Math.floor(end.getTime() / 1000);
|
||||||
|
const metrics = await ApiService.fetchMetrics(startTs, endTs);
|
||||||
|
if (metrics) {
|
||||||
|
this.chartManager.updateCharts(metrics, startTs, endTs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class AutoRefreshManager {
|
||||||
|
constructor(chartManager) {
|
||||||
|
this.chartManager = chartManager;
|
||||||
|
this.autoRefreshInterval = null;
|
||||||
|
this.isAutoRefreshing = false;
|
||||||
|
this.$refreshButton = $('#refreshButton');
|
||||||
|
this.setupEventListeners();
|
||||||
|
}
|
||||||
|
|
||||||
|
setupEventListeners() {
|
||||||
|
this.$refreshButton.click(() => this.toggleAutoRefresh());
|
||||||
|
}
|
||||||
|
|
||||||
|
toggleAutoRefresh() {
|
||||||
|
if (this.isAutoRefreshing) {
|
||||||
|
this.stopAutoRefresh();
|
||||||
|
} else {
|
||||||
|
this.startAutoRefresh();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
startAutoRefresh() {
|
||||||
|
this.isAutoRefreshing = true;
|
||||||
|
this.$refreshButton.addClass('is-info');
|
||||||
|
this.$refreshButton.find('span:last').text('Stop Refresh');
|
||||||
|
this.refreshData();
|
||||||
|
this.autoRefreshInterval = setInterval(() => this.refreshData(), Config.AUTO_REFRESH_INTERVAL);
|
||||||
|
}
|
||||||
|
|
||||||
|
stopAutoRefresh() {
|
||||||
|
this.isAutoRefreshing = false;
|
||||||
|
clearInterval(this.autoRefreshInterval);
|
||||||
|
this.$refreshButton.removeClass('is-info');
|
||||||
|
this.$refreshButton.find('span:last').text('Auto Refresh');
|
||||||
|
}
|
||||||
|
|
||||||
|
async refreshData() {
|
||||||
|
const latestMetric = await ApiService.fetchLatestMetric();
|
||||||
|
if (latestMetric) {
|
||||||
|
this.chartManager.addLatestDataToCharts(latestMetric);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class MetricsModule {
|
||||||
|
constructor() {
|
||||||
|
this.chartManager = new ChartManager();
|
||||||
|
this.dateRangeManager = new DateRangeManager(this.chartManager);
|
||||||
|
this.autoRefreshManager = new AutoRefreshManager(this.chartManager);
|
||||||
|
}
|
||||||
|
|
||||||
|
async init() {
|
||||||
|
this.chartManager.initializeCharts();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize when the DOM is ready
|
||||||
|
document.addEventListener('DOMContentLoaded', () => {
|
||||||
|
const metricsModule = new MetricsModule();
|
||||||
|
metricsModule.init();
|
||||||
|
});
|
402
echo/internal/web/js/rule_metrics.js
Normal file
402
echo/internal/web/js/rule_metrics.js
Normal file
@@ -0,0 +1,402 @@
|
|||||||
|
const Config = {
|
||||||
|
API_BASE_URL: '/api/v1',
|
||||||
|
RULE_METRICS_PATH: '/rule_metrics/',
|
||||||
|
BYTE_TO_MB: 1024 * 1024,
|
||||||
|
CHART_COLORS: {
|
||||||
|
connectionCount: 'rgba(255, 99, 132, 1)',
|
||||||
|
handshakeDuration: 'rgba(54, 162, 235, 1)',
|
||||||
|
pingLatency: 'rgba(255, 206, 86, 1)',
|
||||||
|
networkTransmitBytes: 'rgba(75, 192, 192, 1)',
|
||||||
|
},
|
||||||
|
TIME_WINDOW: 30, // minutes
|
||||||
|
AUTO_REFRESH_INTERVAL: 5000, // milliseconds
|
||||||
|
};
|
||||||
|
|
||||||
|
class ApiService {
|
||||||
|
static async fetchData(path, params = {}) {
|
||||||
|
const url = new URL(Config.API_BASE_URL + path, window.location.origin);
|
||||||
|
Object.entries(params).forEach(([key, value]) => url.searchParams.append(key, value));
|
||||||
|
try {
|
||||||
|
const response = await fetch(url.toString());
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`HTTP error! status: ${response.status}`);
|
||||||
|
}
|
||||||
|
return await response.json();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error:', error);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fetchRuleMetrics(startTs, endTs, label = '', remote = '') {
|
||||||
|
const params = { start_ts: startTs, end_ts: endTs };
|
||||||
|
if (label) params.label = label;
|
||||||
|
if (remote) params.remote = remote;
|
||||||
|
return await this.fetchData(Config.RULE_METRICS_PATH, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fetchConfig() {
|
||||||
|
return await this.fetchData('/config/');
|
||||||
|
}
|
||||||
|
static async fetchLabelsAndRemotes() {
|
||||||
|
const config = await this.fetchConfig();
|
||||||
|
if (!config || !config.relay_configs) {
|
||||||
|
return { labels: [], remotes: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
const labels = new Set();
|
||||||
|
const remotes = new Set();
|
||||||
|
|
||||||
|
config.relay_configs.forEach((relayConfig) => {
|
||||||
|
if (relayConfig.label) labels.add(relayConfig.label);
|
||||||
|
if (relayConfig.remotes) {
|
||||||
|
relayConfig.remotes.forEach((remote) => remotes.add(remote));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
labels: Array.from(labels),
|
||||||
|
remotes: Array.from(remotes),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class ChartManager {
|
||||||
|
constructor() {
|
||||||
|
this.charts = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
initializeCharts() {
|
||||||
|
this.charts = {
|
||||||
|
connectionCount: this.initChart('connectionCountChart', 'line', 'Connection Count', 'Count'),
|
||||||
|
handshakeDuration: this.initChart('handshakeDurationChart', 'line', 'Handshake Duration', 'ms'),
|
||||||
|
pingLatency: this.initChart('pingLatencyChart', 'line', 'Ping Latency', 'ms'),
|
||||||
|
networkTransmitBytes: this.initChart('networkTransmitBytesChart', 'line', 'Network Transmit', 'MB'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
initChart(canvasId, type, title, unit) {
|
||||||
|
const ctx = $(`#${canvasId}`)[0].getContext('2d');
|
||||||
|
const color = Config.CHART_COLORS[canvasId.replace('Chart', '')];
|
||||||
|
|
||||||
|
return new Chart(ctx, {
|
||||||
|
type: type,
|
||||||
|
data: {
|
||||||
|
labels: [],
|
||||||
|
datasets: [
|
||||||
|
{
|
||||||
|
label: title,
|
||||||
|
borderColor: color,
|
||||||
|
backgroundColor: color.replace('1)', '0.2)'),
|
||||||
|
borderWidth: 2,
|
||||||
|
data: [],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
options: this.getChartOptions(title, unit),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
getChartOptions(title, unit) {
|
||||||
|
return {
|
||||||
|
responsive: true,
|
||||||
|
plugins: {
|
||||||
|
title: {
|
||||||
|
display: true,
|
||||||
|
text: title,
|
||||||
|
font: { size: 16, weight: 'bold' },
|
||||||
|
},
|
||||||
|
tooltip: {
|
||||||
|
callbacks: {
|
||||||
|
label: (context) => `${context.dataset.label}: ${context.parsed.y.toFixed(2)} ${unit}`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
scales: {
|
||||||
|
x: {
|
||||||
|
type: 'time',
|
||||||
|
time: { unit: 'minute', displayFormats: { minute: 'HH:mm' } },
|
||||||
|
title: { display: true, text: 'Time' },
|
||||||
|
},
|
||||||
|
y: {
|
||||||
|
beginAtZero: true,
|
||||||
|
title: { display: true, text: unit },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fillMissingDataPoints(data, startTime, endTime) {
|
||||||
|
const filledData = [];
|
||||||
|
let currentTime = new Date(startTime);
|
||||||
|
const endTimeDate = new Date(endTime);
|
||||||
|
|
||||||
|
while (currentTime <= endTimeDate) {
|
||||||
|
const existingPoint = data.find((point) => Math.abs(point.x.getTime() - currentTime.getTime()) < 60000);
|
||||||
|
if (existingPoint) {
|
||||||
|
filledData.push(existingPoint);
|
||||||
|
} else {
|
||||||
|
filledData.push({ x: new Date(currentTime), y: null });
|
||||||
|
}
|
||||||
|
currentTime.setMinutes(currentTime.getMinutes() + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return filledData;
|
||||||
|
}
|
||||||
|
|
||||||
|
updateCharts(metrics, startTime, endTime) {
|
||||||
|
// 检查metrics是否为null或undefined
|
||||||
|
if (!metrics) {
|
||||||
|
// 如果为null,则更新所有图表为空
|
||||||
|
Object.values(this.charts).forEach((chart) => {
|
||||||
|
chart.data.datasets = [
|
||||||
|
{
|
||||||
|
label: 'No Data',
|
||||||
|
data: [],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
chart.update();
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// 首先按时间正序排列数据
|
||||||
|
metrics.sort((a, b) => a.timestamp - b.timestamp);
|
||||||
|
// 按 label-remote 分组
|
||||||
|
const groupedMetrics = this.groupMetricsByLabelRemote(metrics);
|
||||||
|
console.log('groupedMetrics', groupedMetrics);
|
||||||
|
|
||||||
|
// 预处理所有指标的数据
|
||||||
|
const processedData = {};
|
||||||
|
|
||||||
|
Object.keys(this.charts).forEach((key) => {
|
||||||
|
processedData[key] = groupedMetrics.map((group, index) => {
|
||||||
|
const data = group.metrics.map((m) => ({
|
||||||
|
x: new Date(m.timestamp * 1000),
|
||||||
|
y: this.getMetricValue(key, m),
|
||||||
|
}));
|
||||||
|
const filledData = this.fillMissingDataPoints(data, startTime, endTime);
|
||||||
|
return {
|
||||||
|
label: `${group.label} - ${group.remote}`,
|
||||||
|
borderColor: this.getColor(index),
|
||||||
|
backgroundColor: this.getColor(index, 0.2),
|
||||||
|
borderWidth: 2,
|
||||||
|
data: filledData,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// 更新每个图表
|
||||||
|
Object.entries(this.charts).forEach(([key, chart]) => {
|
||||||
|
chart.data.datasets = processedData[key];
|
||||||
|
chart.update();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
groupMetricsByLabelRemote(metrics) {
|
||||||
|
const groups = {};
|
||||||
|
metrics.forEach((metric) => {
|
||||||
|
const key = `${metric.label}-${metric.remote}`;
|
||||||
|
if (!groups[key]) {
|
||||||
|
groups[key] = { label: metric.label, remote: metric.remote, metrics: [] };
|
||||||
|
}
|
||||||
|
groups[key].metrics.push(metric);
|
||||||
|
});
|
||||||
|
return Object.values(groups);
|
||||||
|
}
|
||||||
|
|
||||||
|
getMetricValue(metricType, metric) {
|
||||||
|
switch (metricType) {
|
||||||
|
case 'connectionCount':
|
||||||
|
return metric.tcp_connection_count + metric.udp_connection_count;
|
||||||
|
case 'handshakeDuration':
|
||||||
|
return Math.max(metric.tcp_handshake_duration, metric.udp_handshake_duration);
|
||||||
|
case 'pingLatency':
|
||||||
|
return metric.ping_latency;
|
||||||
|
case 'networkTransmitBytes':
|
||||||
|
return (metric.tcp_network_transmit_bytes + metric.udp_network_transmit_bytes) / Config.BYTE_TO_MB;
|
||||||
|
default:
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getColor(index, alpha = 1) {
|
||||||
|
const colors = [
|
||||||
|
`rgba(255, 99, 132, ${alpha})`,
|
||||||
|
`rgba(54, 162, 235, ${alpha})`,
|
||||||
|
`rgba(255, 206, 86, ${alpha})`,
|
||||||
|
`rgba(75, 192, 192, ${alpha})`,
|
||||||
|
`rgba(153, 102, 255, ${alpha})`,
|
||||||
|
`rgba(255, 159, 64, ${alpha})`,
|
||||||
|
];
|
||||||
|
return colors[index % colors.length];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class FilterManager {
|
||||||
|
constructor(chartManager, dateRangeManager) {
|
||||||
|
this.chartManager = chartManager;
|
||||||
|
this.dateRangeManager = dateRangeManager;
|
||||||
|
this.$labelFilter = $('#labelFilter');
|
||||||
|
this.$remoteFilter = $('#remoteFilter');
|
||||||
|
this.relayConfigs = [];
|
||||||
|
this.currentStartDate = null;
|
||||||
|
this.currentEndDate = null;
|
||||||
|
this.setupEventListeners();
|
||||||
|
this.loadFilters();
|
||||||
|
}
|
||||||
|
|
||||||
|
setupEventListeners() {
|
||||||
|
this.$labelFilter.on('change', () => this.onLabelChange());
|
||||||
|
this.$remoteFilter.on('change', () => this.applyFilters());
|
||||||
|
}
|
||||||
|
|
||||||
|
async loadFilters() {
|
||||||
|
const config = await ApiService.fetchConfig();
|
||||||
|
if (config && config.relay_configs) {
|
||||||
|
this.relayConfigs = config.relay_configs;
|
||||||
|
this.populateLabelFilter();
|
||||||
|
this.onLabelChange(); // Initialize remotes for the first label
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
populateLabelFilter() {
|
||||||
|
const labels = [...new Set(this.relayConfigs.map((config) => config.label))];
|
||||||
|
this.populateFilter(this.$labelFilter, labels);
|
||||||
|
}
|
||||||
|
|
||||||
|
onLabelChange() {
|
||||||
|
const selectedLabel = this.$labelFilter.val();
|
||||||
|
const remotes = this.getRemotesForLabel(selectedLabel);
|
||||||
|
this.populateFilter(this.$remoteFilter, remotes);
|
||||||
|
this.applyFilters();
|
||||||
|
}
|
||||||
|
|
||||||
|
getRemotesForLabel(label) {
|
||||||
|
const config = this.relayConfigs.find((c) => c.label === label);
|
||||||
|
return config ? config.remotes : [];
|
||||||
|
}
|
||||||
|
|
||||||
|
populateFilter($select, options) {
|
||||||
|
$select.empty().append($('<option>', { value: '', text: 'All' }));
|
||||||
|
options.forEach((option) => {
|
||||||
|
$select.append($('<option>', { value: option, text: option }));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async applyFilters() {
|
||||||
|
const label = this.$labelFilter.val();
|
||||||
|
const remote = this.$remoteFilter.val();
|
||||||
|
|
||||||
|
// 使用当前保存的日期范围,如果没有则使用默认的30分钟
|
||||||
|
const endDate = this.currentEndDate || new Date();
|
||||||
|
const startDate = this.currentStartDate || new Date(endDate - Config.TIME_WINDOW * 60 * 1000);
|
||||||
|
|
||||||
|
const metrics = await ApiService.fetchRuleMetrics(
|
||||||
|
Math.floor(startDate.getTime() / 1000),
|
||||||
|
Math.floor(endDate.getTime() / 1000),
|
||||||
|
label,
|
||||||
|
remote
|
||||||
|
);
|
||||||
|
|
||||||
|
this.chartManager.updateCharts(metrics.data, startDate, endDate);
|
||||||
|
}
|
||||||
|
|
||||||
|
setDateRange(start, end) {
|
||||||
|
this.currentStartDate = start;
|
||||||
|
this.currentEndDate = end;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class DateRangeManager {
|
||||||
|
constructor(chartManager, filterManager) {
|
||||||
|
this.chartManager = chartManager;
|
||||||
|
this.filterManager = filterManager;
|
||||||
|
this.$dateRangeDropdown = $('#dateRangeDropdown');
|
||||||
|
this.$dateRangeButton = $('#dateRangeButton');
|
||||||
|
this.$dateRangeText = $('#dateRangeText');
|
||||||
|
this.$dateRangeInput = $('#dateRangeInput');
|
||||||
|
this.setupEventListeners();
|
||||||
|
}
|
||||||
|
|
||||||
|
setupEventListeners() {
|
||||||
|
this.$dateRangeDropdown.find('.dropdown-item[data-range]').on('click', (e) => this.handlePresetDateRange(e));
|
||||||
|
this.$dateRangeButton.on('click', () => this.$dateRangeDropdown.toggleClass('is-active'));
|
||||||
|
$(document).on('click', (e) => {
|
||||||
|
if (!this.$dateRangeDropdown.has(e.target).length) {
|
||||||
|
this.$dateRangeDropdown.removeClass('is-active');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.initializeDatePicker();
|
||||||
|
}
|
||||||
|
|
||||||
|
handlePresetDateRange(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
const range = $(e.currentTarget).data('range');
|
||||||
|
const [start, end] = this.calculateDateRange(range);
|
||||||
|
this.fetchAndUpdateCharts(start, end);
|
||||||
|
this.$dateRangeText.text($(e.currentTarget).text());
|
||||||
|
this.$dateRangeDropdown.removeClass('is-active');
|
||||||
|
}
|
||||||
|
|
||||||
|
calculateDateRange(range) {
|
||||||
|
const now = new Date();
|
||||||
|
const start = new Date(now - this.getMillisecondsFromRange(range));
|
||||||
|
return [start, now];
|
||||||
|
}
|
||||||
|
|
||||||
|
getMillisecondsFromRange(range) {
|
||||||
|
const rangeMap = {
|
||||||
|
'30m': 30 * 60 * 1000,
|
||||||
|
'1h': 60 * 60 * 1000,
|
||||||
|
'3h': 3 * 60 * 60 * 1000,
|
||||||
|
'6h': 6 * 60 * 60 * 1000,
|
||||||
|
'12h': 12 * 60 * 60 * 1000,
|
||||||
|
'24h': 24 * 60 * 60 * 1000,
|
||||||
|
'7d': 7 * 24 * 60 * 60 * 1000,
|
||||||
|
};
|
||||||
|
return rangeMap[range] || 30 * 60 * 1000; // Default to 30 minutes
|
||||||
|
}
|
||||||
|
|
||||||
|
initializeDatePicker() {
|
||||||
|
flatpickr(this.$dateRangeInput[0], {
|
||||||
|
mode: 'range',
|
||||||
|
enableTime: true,
|
||||||
|
dateFormat: 'Y-m-d H:i',
|
||||||
|
onChange: (selectedDates) => this.handleDatePickerChange(selectedDates),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
handleDatePickerChange(selectedDates) {
|
||||||
|
if (selectedDates.length === 2) {
|
||||||
|
const [start, end] = selectedDates;
|
||||||
|
this.fetchAndUpdateCharts(start, end);
|
||||||
|
this.$dateRangeText.text(`${start.toLocaleString()} - ${end.toLocaleString()}`);
|
||||||
|
this.$dateRangeDropdown.removeClass('is-active');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchAndUpdateCharts(start, end) {
|
||||||
|
this.filterManager.setDateRange(start, end);
|
||||||
|
await this.filterManager.applyFilters();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class RuleMetricsModule {
|
||||||
|
constructor() {
|
||||||
|
this.chartManager = new ChartManager();
|
||||||
|
this.filterManager = new FilterManager(this.chartManager);
|
||||||
|
this.dateRangeManager = new DateRangeManager(this.chartManager, this.filterManager);
|
||||||
|
this.filterManager.dateRangeManager = this.dateRangeManager;
|
||||||
|
}
|
||||||
|
|
||||||
|
init() {
|
||||||
|
this.chartManager.initializeCharts();
|
||||||
|
this.filterManager.applyFilters();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize when the DOM is ready
|
||||||
|
$(document).ready(() => {
|
||||||
|
const ruleMetricsModule = new RuleMetricsModule();
|
||||||
|
ruleMetricsModule.init();
|
||||||
|
});
|
@@ -165,15 +165,22 @@ func setupRoutes(s *Server) {
|
|||||||
e.GET(metricsPath, echo.WrapHandler(promhttp.Handler()))
|
e.GET(metricsPath, echo.WrapHandler(promhttp.Handler()))
|
||||||
e.GET("/debug/pprof/*", echo.WrapHandler(http.DefaultServeMux))
|
e.GET("/debug/pprof/*", echo.WrapHandler(http.DefaultServeMux))
|
||||||
|
|
||||||
|
// web pages
|
||||||
e.GET(indexPath, s.index)
|
e.GET(indexPath, s.index)
|
||||||
e.GET(connectionsPath, s.ListConnections)
|
e.GET(connectionsPath, s.ListConnections)
|
||||||
e.GET(rulesPath, s.ListRules)
|
e.GET(rulesPath, s.ListRules)
|
||||||
|
e.GET("/rule_metrics/", s.RuleMetrics)
|
||||||
|
e.GET("/logs/", s.LogsPage)
|
||||||
|
|
||||||
api := e.Group(apiPrefix)
|
api := e.Group(apiPrefix)
|
||||||
api.GET("/config/", s.CurrentConfig)
|
api.GET("/config/", s.CurrentConfig)
|
||||||
api.POST("/config/reload/", s.HandleReload)
|
api.POST("/config/reload/", s.HandleReload)
|
||||||
api.GET("/health_check/", s.HandleHealthCheck)
|
api.GET("/health_check/", s.HandleHealthCheck)
|
||||||
api.GET("/node_metrics/", s.GetNodeMetrics)
|
api.GET("/node_metrics/", s.GetNodeMetrics)
|
||||||
|
api.GET("/rule_metrics/", s.GetRuleMetrics)
|
||||||
|
|
||||||
|
// ws
|
||||||
|
e.GET("/ws/logs", s.handleWebSocketLogs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Server) Start() error {
|
func (s *Server) Start() error {
|
||||||
|
@@ -16,18 +16,20 @@
|
|||||||
<div class="navbar-start">
|
<div class="navbar-start">
|
||||||
<a href="/rules/" class="navbar-item">
|
<a href="/rules/" class="navbar-item">
|
||||||
<span class="icon"><i class="fas fa-list"></i></span>
|
<span class="icon"><i class="fas fa-list"></i></span>
|
||||||
<span>Rule List</span>
|
<span>Rules</span>
|
||||||
|
</a>
|
||||||
|
<a href="/rule_metrics/" class="navbar-item">
|
||||||
|
<span class="icon"><i class="fas fa-chart-line"></i></span>
|
||||||
|
<span>Metrics</span>
|
||||||
|
</a>
|
||||||
|
<a href="/logs/" class="navbar-item">
|
||||||
|
<span class="icon"><i class="fas fa-file-alt"></i></span>
|
||||||
|
<span>Logs</span>
|
||||||
|
</a>
|
||||||
|
<a href="/connections/?conn_type=active/" class="navbar-item">
|
||||||
|
<span class="icon"><i class="fas fa-link"></i></span>
|
||||||
|
<span>Connections</span>
|
||||||
</a>
|
</a>
|
||||||
<div class="navbar-item has-dropdown is-hoverable">
|
|
||||||
<a class="navbar-link">
|
|
||||||
<span class="icon"><i class="fas fa-link"></i></span>
|
|
||||||
<span>Connections</span>
|
|
||||||
</a>
|
|
||||||
<div class="navbar-dropdown">
|
|
||||||
<a href="/connections/?conn_type=active" class="navbar-item">Active</a>
|
|
||||||
<a href="/connections/?conn_type=closed" class="navbar-item">Closed</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="navbar-end">
|
<div class="navbar-end">
|
||||||
|
@@ -52,6 +52,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<!-- </div> -->
|
|
||||||
<script src="js/metrics.js"></script>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<script src="/js/node_metrics.js"></script>
|
||||||
|
</script>
|
82
echo/internal/web/templates/_rule_metrics_dash.html
Normal file
82
echo/internal/web/templates/_rule_metrics_dash.html
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
<div class="card" id="rule-metrics-card">
|
||||||
|
<header class="card-header is-flex is-flex-wrap-wrap">
|
||||||
|
<p class="card-header-title has-text-centered">Rule Metrics</p>
|
||||||
|
<div class="card-header-icon is-flex-grow-1 is-flex is-justify-content-space-between">
|
||||||
|
<div class="field is-horizontal mr-2">
|
||||||
|
<div class="field-label is-small mr-2">
|
||||||
|
<label class="label" for="labelFilter">Label:</label>
|
||||||
|
</div>
|
||||||
|
<div class="field-body">
|
||||||
|
<div class="field">
|
||||||
|
<div class="control">
|
||||||
|
<div class="select is-small">
|
||||||
|
<select id="labelFilter" aria-label="Filter by label">
|
||||||
|
<option value="">All Labels</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="field is-horizontal mr-2">
|
||||||
|
<div class="field-label is-small mr-2">
|
||||||
|
<label class="label" for="remoteFilter">Remote:</label>
|
||||||
|
</div>
|
||||||
|
<div class="field-body">
|
||||||
|
<div class="field">
|
||||||
|
<div class="control">
|
||||||
|
<div class="select is-small">
|
||||||
|
<select id="remoteFilter" aria-label="Filter by remote">
|
||||||
|
<option value="">All Remotes</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="dropdown" id="dateRangeDropdown">
|
||||||
|
<div class="dropdown-trigger">
|
||||||
|
<button class="button is-small" aria-haspopup="true" aria-controls="dropdown-menu" id="dateRangeButton">
|
||||||
|
<span id="dateRangeText">Select date range</span>
|
||||||
|
<span class="icon is-small">
|
||||||
|
<i class="fas fa-angle-down" aria-hidden="true"></i>
|
||||||
|
</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div class="dropdown-menu" id="dropdown-menu" role="menu">
|
||||||
|
<div class="dropdown-content">
|
||||||
|
<a href="#" class="dropdown-item" data-range="30m">Last 30 minutes</a>
|
||||||
|
<a href="#" class="dropdown-item" data-range="1h">Last 1 hour</a>
|
||||||
|
<a href="#" class="dropdown-item" data-range="3h">Last 3 hours</a>
|
||||||
|
<a href="#" class="dropdown-item" data-range="6h">Last 6 hours</a>
|
||||||
|
<a href="#" class="dropdown-item" data-range="12h">Last 12 hours</a>
|
||||||
|
<a href="#" class="dropdown-item" data-range="24h">Last 24 hours</a>
|
||||||
|
<a href="#" class="dropdown-item" data-range="7d">Last 7 days</a>
|
||||||
|
<hr class="dropdown-divider" />
|
||||||
|
<a href="#" class="dropdown-item" id="dateRangeInput">Custom range</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</header>
|
||||||
|
<div class="card-content">
|
||||||
|
<div class="content">
|
||||||
|
<div class="columns is-multiline">
|
||||||
|
<div class="column is-6">
|
||||||
|
<canvas id="connectionCountChart"></canvas>
|
||||||
|
</div>
|
||||||
|
<div class="column is-6">
|
||||||
|
<canvas id="handshakeDurationChart"></canvas>
|
||||||
|
</div>
|
||||||
|
<div class="column is-12">
|
||||||
|
<canvas id="pingLatencyChart"></canvas>
|
||||||
|
</div>
|
||||||
|
<div class="column is-12">
|
||||||
|
<canvas id="networkTransmitBytesChart"></canvas>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<script src="/js/rule_metrics.js"></script>
|
@@ -1,4 +1,4 @@
|
|||||||
<!doctype html>
|
<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
{{template "_head.html" .}}
|
{{template "_head.html" .}}
|
||||||
<body>
|
<body>
|
||||||
@@ -32,7 +32,7 @@
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<!-- metrics -->
|
<!-- metrics -->
|
||||||
{{template "_metrics.html" .}}
|
{{template "_node_metrics_dash.html" .}}
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<!-- Footer -->
|
<!-- Footer -->
|
||||||
|
176
echo/internal/web/templates/logs.html
Normal file
176
echo/internal/web/templates/logs.html
Normal file
@@ -0,0 +1,176 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
{{template "_head.html" .}}
|
||||||
|
<style>
|
||||||
|
.logs-container {
|
||||||
|
height: 700px;
|
||||||
|
overflow-y: auto;
|
||||||
|
font-family: 'Fira Code', monospace;
|
||||||
|
border-radius: 6px;
|
||||||
|
background-color: #f5f5f5;
|
||||||
|
padding: 1rem;
|
||||||
|
transition: background-color 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-entry {
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
padding: 0.5rem;
|
||||||
|
border-radius: 4px;
|
||||||
|
transition: background-color 0.3s ease;
|
||||||
|
}
|
||||||
|
</style>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
{{ template "_navbar.html" . }}
|
||||||
|
<section class="section">
|
||||||
|
<div class="container-fluid">
|
||||||
|
<h1 class="title is-2 mb-6">Real-time Logs</h1>
|
||||||
|
<div class="card">
|
||||||
|
<div class="card-content">
|
||||||
|
<div class="field has-addons mb-4">
|
||||||
|
<div class="control is-expanded">
|
||||||
|
<input class="input is-medium" type="text" id="filterInput" placeholder="Filter logs..." />
|
||||||
|
</div>
|
||||||
|
<div class="control">
|
||||||
|
<button class="button is-info is-medium" id="filterButton">
|
||||||
|
<span class="icon">
|
||||||
|
<i class="fas fa-filter"></i>
|
||||||
|
</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="buttons mb-4">
|
||||||
|
<button class="button is-warning is-medium" id="pauseButton">
|
||||||
|
<span class="icon">
|
||||||
|
<i class="fas fa-pause"></i>
|
||||||
|
</span>
|
||||||
|
<span>Pause</span>
|
||||||
|
</button>
|
||||||
|
<button class="button is-danger is-medium" id="clearButton">
|
||||||
|
<span class="icon">
|
||||||
|
<i class="fas fa-trash"></i>
|
||||||
|
</span>
|
||||||
|
<span>Clear</span>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
<div id="logsContainer" class="logs-container"></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
<script>
|
||||||
|
const logsContainer = document.getElementById('logsContainer');
|
||||||
|
const filterInput = document.getElementById('filterInput');
|
||||||
|
const filterButton = document.getElementById('filterButton');
|
||||||
|
const pauseButton = document.getElementById('pauseButton');
|
||||||
|
const clearButton = document.getElementById('clearButton');
|
||||||
|
let isPaused = false;
|
||||||
|
let ws;
|
||||||
|
let filterTerm = '';
|
||||||
|
|
||||||
|
function connectWebSocket() {
|
||||||
|
ws = new WebSocket('ws://' + window.location.host + '/ws/logs');
|
||||||
|
ws.onopen = function (event) {
|
||||||
|
console.log('WebSocket connection established');
|
||||||
|
};
|
||||||
|
|
||||||
|
ws.onerror = function (event) {
|
||||||
|
console.error('WebSocket error observed:', event);
|
||||||
|
};
|
||||||
|
|
||||||
|
ws.onmessage = function (event) {
|
||||||
|
if (!isPaused) {
|
||||||
|
const logEntry = document.createElement('div');
|
||||||
|
logEntry.innerHTML = formatLogMessage(event.data);
|
||||||
|
if (shouldDisplayLog(logEntry.textContent)) {
|
||||||
|
logsContainer.appendChild(logEntry);
|
||||||
|
logsContainer.scrollTop = logsContainer.scrollHeight;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ws.onclose = function (event) {
|
||||||
|
console.log('WebSocket connection closed. Reconnecting...');
|
||||||
|
setTimeout(connectWebSocket, 1000);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatLogMessage(message) {
|
||||||
|
try {
|
||||||
|
const logEntry = JSON.parse(message);
|
||||||
|
const timestamp = logEntry.ts;
|
||||||
|
const level = logEntry.level;
|
||||||
|
const msg = logEntry.msg;
|
||||||
|
const logger = logEntry.logger;
|
||||||
|
console.log('Log entry:', logEntry);
|
||||||
|
const levelClass = getLevelClass(level);
|
||||||
|
|
||||||
|
return `
|
||||||
|
<div class="columns is-mobile">
|
||||||
|
<div class="column is-3"><span class="has-text-grey-light is-small">${timestamp}</span></div>
|
||||||
|
<div class="column is-1"><span class="tag ${levelClass} is-small">${level.toUpperCase()}</span></div>
|
||||||
|
<div class="column is-2"><span class="is-info is-light is-small">${logger}</span></div>
|
||||||
|
<div class="column"><span>${msg}</span></div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Error parsing log message:', e);
|
||||||
|
return `<div class="has-text-danger">${message}</div>`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getLevelClass(level) {
|
||||||
|
switch (level) {
|
||||||
|
case 'debug':
|
||||||
|
return 'is-light';
|
||||||
|
case 'info':
|
||||||
|
return 'is-info';
|
||||||
|
case 'warn':
|
||||||
|
return 'is-warning';
|
||||||
|
case 'error':
|
||||||
|
return 'is-danger';
|
||||||
|
case 'fatal':
|
||||||
|
return 'is-dark';
|
||||||
|
default:
|
||||||
|
return 'is-light';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function shouldDisplayLog(logText) {
|
||||||
|
return filterTerm === '' || logText.toLowerCase().includes(filterTerm.toLowerCase());
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyFilter() {
|
||||||
|
filterTerm = filterInput.value;
|
||||||
|
const logEntries = logsContainer.getElementsByClassName('log-entry');
|
||||||
|
for (let entry of logEntries) {
|
||||||
|
if (shouldDisplayLog(entry.textContent)) {
|
||||||
|
entry.style.display = '';
|
||||||
|
} else {
|
||||||
|
entry.style.display = 'none';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
connectWebSocket();
|
||||||
|
|
||||||
|
filterButton.addEventListener('click', applyFilter);
|
||||||
|
filterInput.addEventListener('keyup', function (event) {
|
||||||
|
if (event.key === 'Enter') {
|
||||||
|
applyFilter();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
pauseButton.addEventListener('click', function () {
|
||||||
|
isPaused = !isPaused;
|
||||||
|
pauseButton.textContent = isPaused ? 'Resume' : 'Pause';
|
||||||
|
pauseButton.classList.toggle('is-warning');
|
||||||
|
pauseButton.classList.toggle('is-success');
|
||||||
|
});
|
||||||
|
|
||||||
|
clearButton.addEventListener('click', function () {
|
||||||
|
logsContainer.innerHTML = '';
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
@@ -1,4 +1,4 @@
|
|||||||
<!doctype html>
|
<!DOCTYPE html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
{{template "_head.html" .}}
|
{{template "_head.html" .}}
|
||||||
<body>
|
<body>
|
||||||
@@ -53,7 +53,7 @@
|
|||||||
response.msg + // Use 'msg' as per Go struct
|
response.msg + // Use 'msg' as per Go struct
|
||||||
' (Latency: ' +
|
' (Latency: ' +
|
||||||
response.latency + // Ensure this matches the Go struct field name
|
response.latency + // Ensure this matches the Go struct field name
|
||||||
'ms)',
|
'ms)'
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
// If error code is not 0, show error message
|
// If error code is not 0, show error message
|
||||||
|
14
echo/internal/web/templates/rule_metrics.html
Normal file
14
echo/internal/web/templates/rule_metrics.html
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
{{template "_head.html" .}}
|
||||||
|
<body>
|
||||||
|
{{ template "_navbar.html" . }}
|
||||||
|
<section class="section">
|
||||||
|
<div class="container">
|
||||||
|
<h1 class="title">Rule Metrics</h1>
|
||||||
|
|
||||||
|
{{template "_rule_metrics_dash.html" .}}
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
</body>
|
||||||
|
</html>
|
@@ -11,6 +11,8 @@ import (
|
|||||||
var (
|
var (
|
||||||
doOnce sync.Once
|
doOnce sync.Once
|
||||||
globalInitd bool
|
globalInitd bool
|
||||||
|
|
||||||
|
globalWebSocketSyncher *WebSocketLogSyncher
|
||||||
)
|
)
|
||||||
|
|
||||||
func initLogger(logLevel string, replaceGlobal bool) (*zap.Logger, error) {
|
func initLogger(logLevel string, replaceGlobal bool) (*zap.Logger, error) {
|
||||||
@@ -18,8 +20,8 @@ func initLogger(logLevel string, replaceGlobal bool) (*zap.Logger, error) {
|
|||||||
if err := level.UnmarshalText([]byte(logLevel)); err != nil {
|
if err := level.UnmarshalText([]byte(logLevel)); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
writers := []zapcore.WriteSyncer{zapcore.AddSync(os.Stdout)}
|
|
||||||
encoder := zapcore.EncoderConfig{
|
consoleEncoder := zapcore.NewConsoleEncoder(zapcore.EncoderConfig{
|
||||||
TimeKey: "ts",
|
TimeKey: "ts",
|
||||||
LevelKey: "level",
|
LevelKey: "level",
|
||||||
MessageKey: "msg",
|
MessageKey: "msg",
|
||||||
@@ -27,12 +29,29 @@ func initLogger(logLevel string, replaceGlobal bool) (*zap.Logger, error) {
|
|||||||
EncodeLevel: zapcore.LowercaseColorLevelEncoder,
|
EncodeLevel: zapcore.LowercaseColorLevelEncoder,
|
||||||
EncodeTime: zapcore.RFC3339TimeEncoder,
|
EncodeTime: zapcore.RFC3339TimeEncoder,
|
||||||
EncodeName: zapcore.FullNameEncoder,
|
EncodeName: zapcore.FullNameEncoder,
|
||||||
}
|
})
|
||||||
core := zapcore.NewCore(
|
stdoutCore := zapcore.NewCore(consoleEncoder, zapcore.AddSync(os.Stdout), level)
|
||||||
zapcore.NewConsoleEncoder(encoder),
|
|
||||||
zapcore.NewMultiWriteSyncer(writers...),
|
jsonEncoder := zapcore.NewJSONEncoder(zapcore.EncoderConfig{
|
||||||
level,
|
TimeKey: "ts",
|
||||||
)
|
LevelKey: "level",
|
||||||
|
NameKey: "logger",
|
||||||
|
CallerKey: "caller",
|
||||||
|
MessageKey: "msg",
|
||||||
|
StacktraceKey: "stacktrace",
|
||||||
|
LineEnding: zapcore.DefaultLineEnding,
|
||||||
|
EncodeLevel: zapcore.LowercaseLevelEncoder,
|
||||||
|
EncodeTime: zapcore.ISO8601TimeEncoder,
|
||||||
|
EncodeDuration: zapcore.SecondsDurationEncoder,
|
||||||
|
EncodeCaller: zapcore.ShortCallerEncoder,
|
||||||
|
})
|
||||||
|
|
||||||
|
globalWebSocketSyncher = NewWebSocketLogSyncher()
|
||||||
|
wsCore := zapcore.NewCore(jsonEncoder, globalWebSocketSyncher, level)
|
||||||
|
|
||||||
|
// 合并两个 core
|
||||||
|
core := zapcore.NewTee(stdoutCore, wsCore)
|
||||||
|
|
||||||
l := zap.New(core)
|
l := zap.New(core)
|
||||||
if replaceGlobal {
|
if replaceGlobal {
|
||||||
zap.ReplaceGlobals(l)
|
zap.ReplaceGlobals(l)
|
||||||
|
52
echo/pkg/log/ws.go
Normal file
52
echo/pkg/log/ws.go
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
package log
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/gobwas/ws"
|
||||||
|
)
|
||||||
|
|
||||||
|
type WebSocketLogSyncher struct {
|
||||||
|
conn net.Conn
|
||||||
|
mu sync.Mutex
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewWebSocketLogSyncher() *WebSocketLogSyncher {
|
||||||
|
return &WebSocketLogSyncher{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (wsSync *WebSocketLogSyncher) Write(p []byte) (n int, err error) {
|
||||||
|
wsSync.mu.Lock()
|
||||||
|
defer wsSync.mu.Unlock()
|
||||||
|
|
||||||
|
if wsSync.conn != nil {
|
||||||
|
var logEntry map[string]interface{}
|
||||||
|
if err := json.Unmarshal(p, &logEntry); err == nil {
|
||||||
|
jsonData, _ := json.Marshal(logEntry)
|
||||||
|
_ = ws.WriteFrame(wsSync.conn, ws.NewTextFrame(jsonData))
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return 0, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return len(p), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (wsSync *WebSocketLogSyncher) Sync() error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (wsSync *WebSocketLogSyncher) SetWSConn(conn net.Conn) {
|
||||||
|
wsSync.mu.Lock()
|
||||||
|
defer wsSync.mu.Unlock()
|
||||||
|
wsSync.conn = conn
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetWebSocketConn(conn net.Conn) {
|
||||||
|
if globalWebSocketSyncher != nil {
|
||||||
|
globalWebSocketSyncher.SetWSConn(conn)
|
||||||
|
}
|
||||||
|
}
|
165
echo/pkg/metric_reader/node.go
Normal file
165
echo/pkg/metric_reader/node.go
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
package metric_reader
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"math"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
dto "github.com/prometheus/client_model/go"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
metricCPUSecondsTotal = "node_cpu_seconds_total"
|
||||||
|
metricLoad1 = "node_load1"
|
||||||
|
metricLoad5 = "node_load5"
|
||||||
|
metricLoad15 = "node_load15"
|
||||||
|
metricMemoryTotalBytes = "node_memory_total_bytes"
|
||||||
|
metricMemoryActiveBytes = "node_memory_active_bytes"
|
||||||
|
metricMemoryWiredBytes = "node_memory_wired_bytes"
|
||||||
|
metricMemoryMemTotalBytes = "node_memory_MemTotal_bytes"
|
||||||
|
metricMemoryMemAvailableBytes = "node_memory_MemAvailable_bytes"
|
||||||
|
metricFilesystemSizeBytes = "node_filesystem_size_bytes"
|
||||||
|
metricFilesystemAvailBytes = "node_filesystem_avail_bytes"
|
||||||
|
metricNetworkReceiveBytesTotal = "node_network_receive_bytes_total"
|
||||||
|
metricNetworkTransmitBytesTotal = "node_network_transmit_bytes_total"
|
||||||
|
)
|
||||||
|
|
||||||
|
type NodeMetrics struct {
|
||||||
|
// cpu
|
||||||
|
CpuCoreCount int `json:"cpu_core_count"`
|
||||||
|
CpuLoadInfo string `json:"cpu_load_info"`
|
||||||
|
CpuUsagePercent float64 `json:"cpu_usage_percent"`
|
||||||
|
|
||||||
|
// memory
|
||||||
|
MemoryTotalBytes int64 `json:"memory_total_bytes"`
|
||||||
|
MemoryUsageBytes int64 `json:"memory_usage_bytes"`
|
||||||
|
MemoryUsagePercent float64 `json:"memory_usage_percent"`
|
||||||
|
|
||||||
|
// disk
|
||||||
|
DiskTotalBytes int64 `json:"disk_total_bytes"`
|
||||||
|
DiskUsageBytes int64 `json:"disk_usage_bytes"`
|
||||||
|
DiskUsagePercent float64 `json:"disk_usage_percent"`
|
||||||
|
|
||||||
|
// network
|
||||||
|
NetworkReceiveBytesTotal int64 `json:"network_receive_bytes_total"`
|
||||||
|
NetworkTransmitBytesTotal int64 `json:"network_transmit_bytes_total"`
|
||||||
|
NetworkReceiveBytesRate float64 `json:"network_receive_bytes_rate"`
|
||||||
|
NetworkTransmitBytesRate float64 `json:"network_transmit_bytes_rate"`
|
||||||
|
|
||||||
|
SyncTime time.Time
|
||||||
|
}
|
||||||
|
type cpuStats struct {
|
||||||
|
totalTime float64
|
||||||
|
idleTime float64
|
||||||
|
cores int
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) ParseNodeMetrics(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics) error {
|
||||||
|
isMac := metricMap[metricMemoryTotalBytes] != nil
|
||||||
|
cpu := &cpuStats{}
|
||||||
|
|
||||||
|
b.processCPUMetrics(metricMap, cpu)
|
||||||
|
b.processMemoryMetrics(metricMap, nm, isMac)
|
||||||
|
b.processDiskMetrics(metricMap, nm)
|
||||||
|
b.processNetworkMetrics(metricMap, nm)
|
||||||
|
b.processLoadMetrics(metricMap, nm)
|
||||||
|
|
||||||
|
b.calculateFinalMetrics(nm, cpu)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) processCPUMetrics(metricMap map[string]*dto.MetricFamily, cpu *cpuStats) {
|
||||||
|
if cpuMetric, ok := metricMap[metricCPUSecondsTotal]; ok {
|
||||||
|
for _, metric := range cpuMetric.Metric {
|
||||||
|
value := getMetricValue(metric, cpuMetric.GetType())
|
||||||
|
cpu.totalTime += value
|
||||||
|
if getLabel(metric, "mode") == "idle" {
|
||||||
|
cpu.idleTime += value
|
||||||
|
cpu.cores++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) processMemoryMetrics(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics, isMac bool) {
|
||||||
|
if isMac {
|
||||||
|
nm.MemoryTotalBytes = sumInt64Metric(metricMap, metricMemoryTotalBytes)
|
||||||
|
nm.MemoryUsageBytes = sumInt64Metric(metricMap, metricMemoryActiveBytes) + sumInt64Metric(metricMap, metricMemoryWiredBytes)
|
||||||
|
} else {
|
||||||
|
nm.MemoryTotalBytes = sumInt64Metric(metricMap, metricMemoryMemTotalBytes)
|
||||||
|
availableMemory := sumInt64Metric(metricMap, metricMemoryMemAvailableBytes)
|
||||||
|
nm.MemoryUsageBytes = nm.MemoryTotalBytes - availableMemory
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) processDiskMetrics(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics) {
|
||||||
|
nm.DiskTotalBytes = sumInt64Metric(metricMap, metricFilesystemSizeBytes)
|
||||||
|
availableDisk := sumInt64Metric(metricMap, metricFilesystemAvailBytes)
|
||||||
|
nm.DiskUsageBytes = nm.DiskTotalBytes - availableDisk
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) processNetworkMetrics(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics) {
|
||||||
|
nm.NetworkReceiveBytesTotal = sumInt64Metric(metricMap, metricNetworkReceiveBytesTotal)
|
||||||
|
nm.NetworkTransmitBytesTotal = sumInt64Metric(metricMap, metricNetworkTransmitBytesTotal)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) processLoadMetrics(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics) {
|
||||||
|
loads := []string{metricLoad1, metricLoad5, metricLoad15}
|
||||||
|
for _, load := range loads {
|
||||||
|
value := sumFloat64Metric(metricMap, load)
|
||||||
|
nm.CpuLoadInfo += fmt.Sprintf("%.2f|", value)
|
||||||
|
}
|
||||||
|
nm.CpuLoadInfo = strings.TrimRight(nm.CpuLoadInfo, "|")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) calculateFinalMetrics(nm *NodeMetrics, cpu *cpuStats) {
|
||||||
|
nm.CpuCoreCount = cpu.cores
|
||||||
|
nm.CpuUsagePercent = 100 * (cpu.totalTime - cpu.idleTime) / cpu.totalTime
|
||||||
|
nm.MemoryUsagePercent = 100 * float64(nm.MemoryUsageBytes) / float64(nm.MemoryTotalBytes)
|
||||||
|
nm.DiskUsagePercent = 100 * float64(nm.DiskUsageBytes) / float64(nm.DiskTotalBytes)
|
||||||
|
|
||||||
|
nm.CpuUsagePercent = math.Round(nm.CpuUsagePercent*100) / 100
|
||||||
|
nm.MemoryUsagePercent = math.Round(nm.MemoryUsagePercent*100) / 100
|
||||||
|
nm.DiskUsagePercent = math.Round(nm.DiskUsagePercent*100) / 100
|
||||||
|
|
||||||
|
if b.lastMetrics != nil {
|
||||||
|
duration := time.Since(b.lastMetrics.SyncTime).Seconds()
|
||||||
|
if duration > 0.1 {
|
||||||
|
nm.NetworkReceiveBytesRate = math.Max(0, float64(nm.NetworkReceiveBytesTotal-b.lastMetrics.NetworkReceiveBytesTotal)/duration)
|
||||||
|
nm.NetworkTransmitBytesRate = math.Max(0, float64(nm.NetworkTransmitBytesTotal-b.lastMetrics.NetworkTransmitBytesTotal)/duration)
|
||||||
|
nm.NetworkReceiveBytesRate = math.Round(nm.NetworkReceiveBytesRate)
|
||||||
|
nm.NetworkTransmitBytesRate = math.Round(nm.NetworkTransmitBytesRate)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func sumInt64Metric(metricMap map[string]*dto.MetricFamily, metricName string) int64 {
|
||||||
|
ret := int64(0)
|
||||||
|
if metric, ok := metricMap[metricName]; ok && len(metric.Metric) > 0 {
|
||||||
|
for _, m := range metric.Metric {
|
||||||
|
ret += int64(getMetricValue(m, metric.GetType()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func sumFloat64Metric(metricMap map[string]*dto.MetricFamily, metricName string) float64 {
|
||||||
|
ret := float64(0)
|
||||||
|
if metric, ok := metricMap[metricName]; ok && len(metric.Metric) > 0 {
|
||||||
|
for _, m := range metric.Metric {
|
||||||
|
ret += getMetricValue(m, metric.GetType())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func getLabel(metric *dto.Metric, name string) string {
|
||||||
|
for _, label := range metric.Label {
|
||||||
|
if label.GetName() == name {
|
||||||
|
return label.GetValue()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
@@ -2,25 +2,28 @@ package metric_reader
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/pkg/errors"
|
||||||
dto "github.com/prometheus/client_model/go"
|
dto "github.com/prometheus/client_model/go"
|
||||||
"github.com/prometheus/common/expfmt"
|
"github.com/prometheus/common/expfmt"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Reader interface {
|
type Reader interface {
|
||||||
ReadOnce(ctx context.Context) (*NodeMetrics, error)
|
ReadOnce(ctx context.Context) (*NodeMetrics, map[string]*RuleMetrics, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type readerImpl struct {
|
type readerImpl struct {
|
||||||
metricsURL string
|
metricsURL string
|
||||||
httpClient *http.Client
|
httpClient *http.Client
|
||||||
lastMetrics *NodeMetrics
|
|
||||||
|
lastMetrics *NodeMetrics
|
||||||
|
lastRuleMetrics map[string]*RuleMetrics // key: label value: RuleMetrics
|
||||||
|
l *zap.SugaredLogger
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewReader(metricsURL string) *readerImpl {
|
func NewReader(metricsURL string) *readerImpl {
|
||||||
@@ -28,267 +31,47 @@ func NewReader(metricsURL string) *readerImpl {
|
|||||||
return &readerImpl{
|
return &readerImpl{
|
||||||
httpClient: c,
|
httpClient: c,
|
||||||
metricsURL: metricsURL,
|
metricsURL: metricsURL,
|
||||||
|
l: zap.S().Named("metric_reader"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *readerImpl) parsePingInfo(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics) error {
|
func (b *readerImpl) ReadOnce(ctx context.Context) (*NodeMetrics, map[string]*RuleMetrics, error) {
|
||||||
metric, ok := metricMap["ehco_ping_response_duration_seconds"]
|
metricMap, err := b.fetchMetrics(ctx)
|
||||||
if !ok {
|
|
||||||
// this metric is optional when enable_ping = false
|
|
||||||
zap.S().Debug("ping metric not found")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
for _, m := range metric.Metric {
|
|
||||||
g := m.GetHistogram()
|
|
||||||
ip := ""
|
|
||||||
val := float64(g.GetSampleSum()) / float64(g.GetSampleCount()) * 1000 // to ms
|
|
||||||
for _, label := range m.GetLabel() {
|
|
||||||
if label.GetName() == "ip" {
|
|
||||||
ip = label.GetValue()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
nm.PingMetrics = append(nm.PingMetrics, PingMetric{Latency: val, Target: ip})
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *readerImpl) parseCpuInfo(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics) error {
|
|
||||||
handleMetric := func(metricName string, handleValue func(float64, string)) error {
|
|
||||||
metric, ok := metricMap[metricName]
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("%s not found", metricName)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, m := range metric.Metric {
|
|
||||||
g := m.GetCounter()
|
|
||||||
mode := ""
|
|
||||||
for _, label := range m.GetLabel() {
|
|
||||||
if label.GetName() == "mode" {
|
|
||||||
mode = label.GetValue()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
handleValue(g.GetValue(), mode)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
totalIdleTime float64
|
|
||||||
totalCpuTime float64
|
|
||||||
cpuCores int
|
|
||||||
)
|
|
||||||
|
|
||||||
err := handleMetric("node_cpu_seconds_total", func(val float64, mode string) {
|
|
||||||
totalCpuTime += val
|
|
||||||
if mode == "idle" {
|
|
||||||
totalIdleTime += val
|
|
||||||
cpuCores++
|
|
||||||
}
|
|
||||||
})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, nil, errors.Wrap(err, "failed to fetch metrics")
|
||||||
|
}
|
||||||
|
nm := &NodeMetrics{SyncTime: time.Now()}
|
||||||
|
if err := b.ParseNodeMetrics(metricMap, nm); err != nil {
|
||||||
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
nm.CpuCoreCount = cpuCores
|
rm := make(map[string]*RuleMetrics)
|
||||||
nm.CpuUsagePercent = 100 * (totalCpuTime - totalIdleTime) / totalCpuTime
|
if err := b.ParseRuleMetrics(metricMap, rm); err != nil {
|
||||||
for _, load := range []string{"1", "5", "15"} {
|
return nil, nil, err
|
||||||
loadMetricName := fmt.Sprintf("node_load%s", load)
|
|
||||||
loadMetric, ok := metricMap[loadMetricName]
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("%s not found", loadMetricName)
|
|
||||||
}
|
|
||||||
for _, m := range loadMetric.Metric {
|
|
||||||
g := m.GetGauge()
|
|
||||||
nm.CpuLoadInfo += fmt.Sprintf("%.2f|", g.GetValue())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
nm.CpuLoadInfo = strings.TrimRight(nm.CpuLoadInfo, "|")
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *readerImpl) parseMemoryInfo(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics) error {
|
|
||||||
handleMetric := func(metricName string, handleValue func(float64)) error {
|
|
||||||
metric, ok := metricMap[metricName]
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("%s not found", metricName)
|
|
||||||
}
|
|
||||||
for _, m := range metric.Metric {
|
|
||||||
g := m.GetGauge()
|
|
||||||
handleValue(g.GetValue())
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
isMac := false
|
|
||||||
if _, ok := metricMap["node_memory_total_bytes"]; ok {
|
|
||||||
isMac = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if isMac {
|
|
||||||
err := handleMetric("node_memory_total_bytes", func(val float64) {
|
|
||||||
nm.MemoryTotalBytes = val
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = handleMetric("node_memory_active_bytes", func(val float64) {
|
|
||||||
nm.MemoryUsageBytes += val
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = handleMetric("node_memory_wired_bytes", func(val float64) {
|
|
||||||
nm.MemoryUsageBytes += val
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
err := handleMetric("node_memory_MemTotal_bytes", func(val float64) {
|
|
||||||
nm.MemoryTotalBytes = val
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = handleMetric("node_memory_MemAvailable_bytes", func(val float64) {
|
|
||||||
nm.MemoryUsageBytes = nm.MemoryTotalBytes - val
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if nm.MemoryTotalBytes != 0 {
|
|
||||||
nm.MemoryUsagePercent = 100 * nm.MemoryUsageBytes / nm.MemoryTotalBytes
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *readerImpl) parseDiskInfo(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics) error {
|
|
||||||
handleMetric := func(metricName string, handleValue func(float64)) error {
|
|
||||||
forMac := false
|
|
||||||
diskMap := make(map[string]float64)
|
|
||||||
metric, ok := metricMap[metricName]
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("%s not found", metricName)
|
|
||||||
}
|
|
||||||
for _, m := range metric.Metric {
|
|
||||||
g := m.GetGauge()
|
|
||||||
disk := ""
|
|
||||||
for _, label := range m.GetLabel() {
|
|
||||||
if label.GetName() == "device" {
|
|
||||||
disk = getDiskName(label.GetValue())
|
|
||||||
}
|
|
||||||
if label.GetName() == "fstype" && label.GetValue() == "apfs" {
|
|
||||||
forMac = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
diskMap[disk] = g.GetValue()
|
|
||||||
}
|
|
||||||
// 对于 macos 的 apfs 文件系统,可能会有多个相同大小的磁盘,这是因为 apfs 磁盘(卷)会共享物理磁盘
|
|
||||||
seenVal := map[float64]bool{}
|
|
||||||
for _, val := range diskMap {
|
|
||||||
if seenVal[val] && forMac {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
handleValue(val)
|
|
||||||
seenVal[val] = true
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
err := handleMetric("node_filesystem_size_bytes", func(val float64) {
|
|
||||||
nm.DiskTotalBytes += val
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var availBytes float64
|
|
||||||
err = handleMetric("node_filesystem_avail_bytes", func(val float64) {
|
|
||||||
availBytes += val
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
nm.DiskUsageBytes = nm.DiskTotalBytes - availBytes
|
|
||||||
if nm.DiskTotalBytes != 0 {
|
|
||||||
nm.DiskUsagePercent = 100 * nm.DiskUsageBytes / nm.DiskTotalBytes
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *readerImpl) parseNetworkInfo(metricMap map[string]*dto.MetricFamily, nm *NodeMetrics) error {
|
|
||||||
now := time.Now()
|
|
||||||
handleMetric := func(metricName string, handleValue func(float64)) error {
|
|
||||||
metric, ok := metricMap[metricName]
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("%s not found", metricName)
|
|
||||||
}
|
|
||||||
for _, m := range metric.Metric {
|
|
||||||
g := m.GetCounter()
|
|
||||||
handleValue(g.GetValue())
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
err := handleMetric("node_network_receive_bytes_total", func(val float64) {
|
|
||||||
nm.NetworkReceiveBytesTotal += val
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = handleMetric("node_network_transmit_bytes_total", func(val float64) {
|
|
||||||
nm.NetworkTransmitBytesTotal += val
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if b.lastMetrics != nil {
|
|
||||||
passedTime := now.Sub(b.lastMetrics.SyncTime).Seconds()
|
|
||||||
nm.NetworkReceiveBytesRate = (nm.NetworkReceiveBytesTotal - b.lastMetrics.NetworkReceiveBytesTotal) / passedTime
|
|
||||||
nm.NetworkTransmitBytesRate = (nm.NetworkTransmitBytesTotal - b.lastMetrics.NetworkTransmitBytesTotal) / passedTime
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *readerImpl) ReadOnce(ctx context.Context) (*NodeMetrics, error) {
|
|
||||||
response, err := b.httpClient.Get(b.metricsURL)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer response.Body.Close()
|
|
||||||
|
|
||||||
body, err := io.ReadAll(response.Body)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
var parser expfmt.TextParser
|
|
||||||
parsed, err := parser.TextToMetricFamilies(strings.NewReader(string(body)))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
nm := &NodeMetrics{SyncTime: time.Now(), PingMetrics: []PingMetric{}}
|
|
||||||
if err := b.parseCpuInfo(parsed, nm); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := b.parseMemoryInfo(parsed, nm); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := b.parseDiskInfo(parsed, nm); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := b.parseNetworkInfo(parsed, nm); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if err := b.parsePingInfo(parsed, nm); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
b.lastMetrics = nm
|
b.lastMetrics = nm
|
||||||
return nm, nil
|
b.lastRuleMetrics = rm
|
||||||
|
return nm, rm, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *readerImpl) fetchMetrics(ctx context.Context) (map[string]*dto.MetricFamily, error) {
|
||||||
|
req, err := http.NewRequestWithContext(ctx, "GET", r.metricsURL, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "failed to create request")
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := r.httpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "failed to send request")
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.Wrap(err, "failed to read response body")
|
||||||
|
}
|
||||||
|
|
||||||
|
var parser expfmt.TextParser
|
||||||
|
return parser.TextToMetricFamilies(strings.NewReader(string(body)))
|
||||||
}
|
}
|
||||||
|
146
echo/pkg/metric_reader/rule.go
Normal file
146
echo/pkg/metric_reader/rule.go
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
package metric_reader
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
dto "github.com/prometheus/client_model/go"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
metricConnectionCount = "ehco_traffic_current_connection_count"
|
||||||
|
metricNetworkTransmit = "ehco_traffic_network_transmit_bytes"
|
||||||
|
metricPingResponse = "ehco_ping_response_duration_milliseconds"
|
||||||
|
metricHandshakeDuration = "ehco_traffic_handshake_duration_milliseconds"
|
||||||
|
|
||||||
|
labelKey = "label"
|
||||||
|
remoteKey = "remote"
|
||||||
|
connTypeKey = "conn_type"
|
||||||
|
flowKey = "flow"
|
||||||
|
ipKey = "ip"
|
||||||
|
)
|
||||||
|
|
||||||
|
type PingMetric struct {
|
||||||
|
Latency int64 `json:"latency"` // in ms
|
||||||
|
Target string `json:"target"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RuleMetrics struct {
|
||||||
|
Label string // rule label
|
||||||
|
|
||||||
|
PingMetrics map[string]*PingMetric // key: remote
|
||||||
|
|
||||||
|
TCPConnectionCount map[string]int64 // key: remote
|
||||||
|
TCPHandShakeDuration map[string]int64 // key: remote in ms
|
||||||
|
TCPNetworkTransmitBytes map[string]int64 // key: remote
|
||||||
|
|
||||||
|
UDPConnectionCount map[string]int64 // key: remote
|
||||||
|
UDPHandShakeDuration map[string]int64 // key: remote in ms
|
||||||
|
UDPNetworkTransmitBytes map[string]int64 // key: remote
|
||||||
|
|
||||||
|
SyncTime time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) ParseRuleMetrics(metricMap map[string]*dto.MetricFamily, rm map[string]*RuleMetrics) error {
|
||||||
|
requiredMetrics := []string{
|
||||||
|
metricConnectionCount,
|
||||||
|
metricNetworkTransmit,
|
||||||
|
metricPingResponse,
|
||||||
|
metricHandshakeDuration,
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, metricName := range requiredMetrics {
|
||||||
|
metricFamily, ok := metricMap[metricName]
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, metric := range metricFamily.Metric {
|
||||||
|
labels := getLabelMap(metric)
|
||||||
|
value := int64(getMetricValue(metric, metricFamily.GetType()))
|
||||||
|
label, ok := labels[labelKey]
|
||||||
|
if !ok || label == "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
ruleMetric := b.ensureRuleMetric(rm, label)
|
||||||
|
|
||||||
|
switch metricName {
|
||||||
|
case metricConnectionCount:
|
||||||
|
b.updateConnectionCount(ruleMetric, labels, value)
|
||||||
|
case metricNetworkTransmit:
|
||||||
|
b.updateNetworkTransmit(ruleMetric, labels, value)
|
||||||
|
case metricPingResponse:
|
||||||
|
b.updatePingMetrics(ruleMetric, labels, value)
|
||||||
|
case metricHandshakeDuration:
|
||||||
|
b.updateHandshakeDuration(ruleMetric, labels, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) ensureRuleMetric(rm map[string]*RuleMetrics, label string) *RuleMetrics {
|
||||||
|
if _, ok := rm[label]; !ok {
|
||||||
|
rm[label] = &RuleMetrics{
|
||||||
|
Label: label,
|
||||||
|
PingMetrics: make(map[string]*PingMetric),
|
||||||
|
TCPConnectionCount: make(map[string]int64),
|
||||||
|
TCPHandShakeDuration: make(map[string]int64),
|
||||||
|
TCPNetworkTransmitBytes: make(map[string]int64),
|
||||||
|
UDPConnectionCount: make(map[string]int64),
|
||||||
|
UDPHandShakeDuration: make(map[string]int64),
|
||||||
|
UDPNetworkTransmitBytes: make(map[string]int64),
|
||||||
|
|
||||||
|
SyncTime: time.Now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rm[label]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) updateConnectionCount(rm *RuleMetrics, labels map[string]string, value int64) {
|
||||||
|
key := labels[remoteKey]
|
||||||
|
switch labels[connTypeKey] {
|
||||||
|
case "tcp":
|
||||||
|
rm.TCPConnectionCount[key] = value
|
||||||
|
default:
|
||||||
|
rm.UDPConnectionCount[key] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) updateNetworkTransmit(rm *RuleMetrics, labels map[string]string, value int64) {
|
||||||
|
if labels[flowKey] == "read" {
|
||||||
|
key := labels[remoteKey]
|
||||||
|
switch labels[connTypeKey] {
|
||||||
|
case "tcp":
|
||||||
|
rm.TCPNetworkTransmitBytes[key] += value
|
||||||
|
default:
|
||||||
|
rm.UDPNetworkTransmitBytes[key] += value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) updatePingMetrics(rm *RuleMetrics, labels map[string]string, value int64) {
|
||||||
|
remote := labels[remoteKey]
|
||||||
|
rm.PingMetrics[remote] = &PingMetric{
|
||||||
|
Latency: value,
|
||||||
|
Target: labels[ipKey],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *readerImpl) updateHandshakeDuration(rm *RuleMetrics, labels map[string]string, value int64) {
|
||||||
|
key := labels[remoteKey]
|
||||||
|
switch labels[connTypeKey] {
|
||||||
|
case "tcp":
|
||||||
|
rm.TCPHandShakeDuration[key] = value
|
||||||
|
default:
|
||||||
|
rm.UDPHandShakeDuration[key] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getLabelMap(metric *dto.Metric) map[string]string {
|
||||||
|
labels := make(map[string]string)
|
||||||
|
for _, label := range metric.Label {
|
||||||
|
labels[label.GetName()] = label.GetValue()
|
||||||
|
}
|
||||||
|
return labels
|
||||||
|
}
|
@@ -1,38 +0,0 @@
|
|||||||
package metric_reader
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type NodeMetrics struct {
|
|
||||||
// cpu
|
|
||||||
CpuCoreCount int `json:"cpu_core_count"`
|
|
||||||
CpuLoadInfo string `json:"cpu_load_info"`
|
|
||||||
CpuUsagePercent float64 `json:"cpu_usage_percent"`
|
|
||||||
|
|
||||||
// memory
|
|
||||||
MemoryTotalBytes float64 `json:"memory_total_bytes"`
|
|
||||||
MemoryUsageBytes float64 `json:"memory_usage_bytes"`
|
|
||||||
MemoryUsagePercent float64 `json:"memory_usage_percent"`
|
|
||||||
|
|
||||||
// disk
|
|
||||||
DiskTotalBytes float64 `json:"disk_total_bytes"`
|
|
||||||
DiskUsageBytes float64 `json:"disk_usage_bytes"`
|
|
||||||
DiskUsagePercent float64 `json:"disk_usage_percent"`
|
|
||||||
|
|
||||||
// network
|
|
||||||
NetworkReceiveBytesTotal float64 `json:"network_receive_bytes_total"`
|
|
||||||
NetworkTransmitBytesTotal float64 `json:"network_transmit_bytes_total"`
|
|
||||||
NetworkReceiveBytesRate float64 `json:"network_receive_bytes_rate"`
|
|
||||||
NetworkTransmitBytesRate float64 `json:"network_transmit_bytes_rate"`
|
|
||||||
|
|
||||||
// ping
|
|
||||||
PingMetrics []PingMetric `json:"ping_metrics"`
|
|
||||||
|
|
||||||
SyncTime time.Time
|
|
||||||
}
|
|
||||||
|
|
||||||
type PingMetric struct {
|
|
||||||
Latency float64 `json:"latency"` // in ms
|
|
||||||
Target string `json:"target"`
|
|
||||||
}
|
|
@@ -1,22 +1,46 @@
|
|||||||
package metric_reader
|
package metric_reader
|
||||||
|
|
||||||
import "regexp"
|
import (
|
||||||
|
"math"
|
||||||
|
|
||||||
// parse disk name from device path,such as:
|
dto "github.com/prometheus/client_model/go"
|
||||||
// e.g. /dev/disk1s1 -> disk1
|
)
|
||||||
// e.g. /dev/disk1s2 -> disk1
|
|
||||||
// e.g. ntfs://disk1s1 -> disk1
|
|
||||||
// e.g. ntfs://disk1s2 -> disk1
|
|
||||||
// e.g. /dev/sda1 -> sda
|
|
||||||
// e.g. /dev/sda2 -> sda
|
|
||||||
var diskNameRegex = regexp.MustCompile(`/dev/disk(\d+)|ntfs://disk(\d+)|/dev/sd[a-zA-Z]`)
|
|
||||||
|
|
||||||
func getDiskName(devicePath string) string {
|
func calculatePercentile(histogram *dto.Histogram, percentile float64) float64 {
|
||||||
matches := diskNameRegex.FindStringSubmatch(devicePath)
|
if histogram == nil {
|
||||||
for _, match := range matches {
|
return 0
|
||||||
if match != "" {
|
}
|
||||||
return match
|
totalSamples := histogram.GetSampleCount()
|
||||||
|
targetSample := percentile * float64(totalSamples)
|
||||||
|
cumulativeCount := uint64(0)
|
||||||
|
var lastBucketBound float64
|
||||||
|
|
||||||
|
for _, bucket := range histogram.Bucket {
|
||||||
|
cumulativeCount += bucket.GetCumulativeCount()
|
||||||
|
if float64(cumulativeCount) >= targetSample {
|
||||||
|
// Linear interpolation between bucket boundaries
|
||||||
|
if bucket.GetCumulativeCount() > 0 && lastBucketBound != bucket.GetUpperBound() {
|
||||||
|
return lastBucketBound + (float64(targetSample-float64(cumulativeCount-bucket.GetCumulativeCount()))/float64(bucket.GetCumulativeCount()))*(bucket.GetUpperBound()-lastBucketBound)
|
||||||
|
} else {
|
||||||
|
return bucket.GetUpperBound()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
lastBucketBound = bucket.GetUpperBound()
|
||||||
|
}
|
||||||
|
return math.NaN()
|
||||||
|
}
|
||||||
|
|
||||||
|
func getMetricValue(metric *dto.Metric, metricType dto.MetricType) float64 {
|
||||||
|
switch metricType {
|
||||||
|
case dto.MetricType_COUNTER:
|
||||||
|
return metric.Counter.GetValue()
|
||||||
|
case dto.MetricType_GAUGE:
|
||||||
|
return metric.Gauge.GetValue()
|
||||||
|
case dto.MetricType_HISTOGRAM:
|
||||||
|
histogram := metric.Histogram
|
||||||
|
if histogram != nil {
|
||||||
|
return calculatePercentile(histogram, 0.9)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ""
|
return 0
|
||||||
}
|
}
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
package echo
|
package echo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"log"
|
"log"
|
||||||
@@ -118,6 +119,10 @@ func (s *EchoServer) handleTCPConn(conn net.Conn) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isClosedConnError(err error) bool {
|
||||||
|
return errors.Is(err, net.ErrClosed)
|
||||||
|
}
|
||||||
|
|
||||||
func (s *EchoServer) serveUDP() {
|
func (s *EchoServer) serveUDP() {
|
||||||
defer s.wg.Done()
|
defer s.wg.Done()
|
||||||
buf := make([]byte, 1024)
|
buf := make([]byte, 1024)
|
||||||
@@ -128,6 +133,9 @@ func (s *EchoServer) serveUDP() {
|
|||||||
default:
|
default:
|
||||||
n, remoteAddr, err := s.udpConn.ReadFromUDP(buf)
|
n, remoteAddr, err := s.udpConn.ReadFromUDP(buf)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
if isClosedConnError(err) {
|
||||||
|
break
|
||||||
|
}
|
||||||
s.logger.Errorf("Error reading UDP: %v", err)
|
s.logger.Errorf("Error reading UDP: %v", err)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@@ -20,25 +20,27 @@ For an explanation of the mieru protocol, see [mieru Proxy Protocol](./docs/prot
|
|||||||
## Features
|
## Features
|
||||||
|
|
||||||
1. mieru uses a high-strength XChaCha20-Poly1305 encryption algorithm that generates encryption keys based on username, password and system time.
|
1. mieru uses a high-strength XChaCha20-Poly1305 encryption algorithm that generates encryption keys based on username, password and system time.
|
||||||
2. mieru implements complete encryption of all transmitted content between the client and the proxy server, without transmitting any plaintext information.
|
1. mieru implements complete encryption of all transmitted content between the client and the proxy server, without transmitting any plaintext information.
|
||||||
3. When mieru sends a packet, it is padded with random bytes at the end. Even when the same content is transmitted, the packet size varies.
|
1. When mieru sends a packet, it is padded with random bytes at the end. Even when the same content is transmitted, the packet size varies.
|
||||||
4. When using the UDP transport protocol, mieru does not require a handshake between client and server.
|
1. When using the UDP transport protocol, mieru does not require a handshake between client and server.
|
||||||
5. When the server can not decrypt the data sent by the client, no content is returned. it is difficult for GFW to discover the mieru service through active probing.
|
1. When the server can not decrypt the data sent by the client, no content is returned. it is difficult for GFW to discover the mieru service through active probing.
|
||||||
6. mieru supports multiple users sharing a single proxy server.
|
1. mieru supports multiple users sharing a single proxy server.
|
||||||
7. mieru supports IPv4 and IPv6.
|
1. mieru supports IPv4 and IPv6.
|
||||||
8. mieru provides socks5, HTTP and HTTPS proxy.
|
1. mieru provides socks5, HTTP and HTTPS proxy.
|
||||||
9. The client software supports Windows, Mac OS, Linux and Android. For Android users, please use [NekoBox](https://github.com/MatsuriDayo/NekoBoxForAndroid) version 1.3.1 or above, and install [mieru plugin](https://github.com/enfein/NekoBoxPlugins).
|
1. The server software supports socks5 outbound (proxy chain).
|
||||||
10. The server software supports socks5 outbound (proxy chain).
|
1. The client software supports Windows, Mac OS, Linux and Android. Android clients include
|
||||||
11. If you need advanced features like global proxy or customized routing rules, you can use mieru as the backend of a proxy platform such as [Xray](https://github.com/XTLS/Xray-core) and [sing-box](https://github.com/SagerNet/sing-box).
|
- [NekoBox](https://github.com/MatsuriDayo/NekoBoxForAndroid) version 1.3.1 or above, with [mieru plugin](https://github.com/enfein/NekoBoxPlugins).
|
||||||
|
- [Exclave](https://github.com/dyhkwong/Exclave), with mieru plugin.
|
||||||
|
1. If you need advanced features like global proxy or customized routing rules, you can use mieru as the backend of a proxy platform such as [Xray](https://github.com/XTLS/Xray-core) and [sing-box](https://github.com/SagerNet/sing-box).
|
||||||
|
|
||||||
## User Guide
|
## User Guide
|
||||||
|
|
||||||
1. [Server Installation & Configuration](./docs/server-install.md)
|
1. [Server Installation & Configuration](./docs/server-install.md)
|
||||||
2. [Client Installation & Configuration](./docs/client-install.md)
|
1. [Client Installation & Configuration](./docs/client-install.md)
|
||||||
3. [Client Installation & Configuration - OpenWrt](./docs/client-install-openwrt.md)
|
1. [Client Installation & Configuration - OpenWrt](./docs/client-install-openwrt.md)
|
||||||
4. [Maintenance & Troubleshooting](./docs/operation.md)
|
1. [Maintenance & Troubleshooting](./docs/operation.md)
|
||||||
5. [Security Guide](./docs/security.md)
|
1. [Security Guide](./docs/security.md)
|
||||||
6. [Compilation](./docs/compile.md)
|
1. [Compilation](./docs/compile.md)
|
||||||
|
|
||||||
## Share
|
## Share
|
||||||
|
|
||||||
|
@@ -18,25 +18,27 @@ mieru 的翻墙原理与 shadowsocks / v2ray 等软件类似,在客户端和
|
|||||||
## 特性
|
## 特性
|
||||||
|
|
||||||
1. 使用高强度的 XChaCha20-Poly1305 加密算法,基于用户名、密码和系统时间生成密钥。
|
1. 使用高强度的 XChaCha20-Poly1305 加密算法,基于用户名、密码和系统时间生成密钥。
|
||||||
2. mieru 实现了客户端和代理服务器之间所有传输内容的完整加密,不传输任何明文信息。
|
1. mieru 实现了客户端和代理服务器之间所有传输内容的完整加密,不传输任何明文信息。
|
||||||
3. 当 mieru 发送数据包时,会在尾部填充随机信息。即便是传输相同的内容,数据包大小也不相同。
|
1. 当 mieru 发送数据包时,会在尾部填充随机信息。即便是传输相同的内容,数据包大小也不相同。
|
||||||
4. 在使用 UDP 传输协议时,mieru 不需要客户端和服务器进行握手,即可直接发送数据。
|
1. 在使用 UDP 传输协议时,mieru 不需要客户端和服务器进行握手,即可直接发送数据。
|
||||||
5. 当服务器无法解密客户端发送的数据时,不会返回任何内容。GFW 很难通过主动探测发现 mieru 服务。
|
1. 当服务器无法解密客户端发送的数据时,不会返回任何内容。GFW 很难通过主动探测发现 mieru 服务。
|
||||||
6. mieru 支持多个用户共享代理服务器。
|
1. mieru 支持多个用户共享代理服务器。
|
||||||
7. mieru 支持 IPv4 和 IPv6。
|
1. mieru 支持 IPv4 和 IPv6。
|
||||||
8. mieru 提供 socks5, HTTP 和 HTTPS 代理。
|
1. mieru 提供 socks5, HTTP 和 HTTPS 代理。
|
||||||
9. 客户端软件支持 Windows, Mac OS, Linux 和 Android 系统。Android 用户请使用 [NekoBox](https://github.com/MatsuriDayo/NekoBoxForAndroid) 1.3.1 及以上版本,并安装 [mieru 插件](https://github.com/enfein/NekoBoxPlugins)。
|
1. 服务器软件支持 socks5 出站(链式代理)。
|
||||||
10. 服务器软件支持 socks5 出站(链式代理)。
|
1. 客户端软件支持 Windows, Mac OS, Linux 和 Android 系统。Android 客户端包括
|
||||||
11. 如果需要全局代理或自定义路由规则等高级功能,可以将 mieru 作为 [Xray](https://github.com/XTLS/Xray-core) 和 [sing-box](https://github.com/SagerNet/sing-box) 等代理平台的后端。
|
- [NekoBox](https://github.com/MatsuriDayo/NekoBoxForAndroid) 1.3.1 及以上版本,并安装 [mieru 插件](https://github.com/enfein/NekoBoxPlugins)。
|
||||||
|
- [Exclave](https://github.com/dyhkwong/Exclave) 并安装 mieru 插件。
|
||||||
|
1. 如果需要全局代理或自定义路由规则等高级功能,可以将 mieru 作为 [Xray](https://github.com/XTLS/Xray-core) 和 [sing-box](https://github.com/SagerNet/sing-box) 等代理平台的后端。
|
||||||
|
|
||||||
## 使用教程
|
## 使用教程
|
||||||
|
|
||||||
1. [服务器安装与配置](./docs/server-install.zh_CN.md)
|
1. [服务器安装与配置](./docs/server-install.zh_CN.md)
|
||||||
2. [客户端安装与配置](./docs/client-install.zh_CN.md)
|
1. [客户端安装与配置](./docs/client-install.zh_CN.md)
|
||||||
3. [客户端安装与配置 - OpenWrt](./docs/client-install-openwrt.zh_CN.md)
|
1. [客户端安装与配置 - OpenWrt](./docs/client-install-openwrt.zh_CN.md)
|
||||||
4. [运营维护与故障排查](./docs/operation.zh_CN.md)
|
1. [运营维护与故障排查](./docs/operation.zh_CN.md)
|
||||||
5. [翻墙安全指南](./docs/security.zh_CN.md)
|
1. [翻墙安全指南](./docs/security.zh_CN.md)
|
||||||
6. [编译](./docs/compile.zh_CN.md)
|
1. [编译](./docs/compile.zh_CN.md)
|
||||||
|
|
||||||
## 分享
|
## 分享
|
||||||
|
|
||||||
|
@@ -1,9 +1,32 @@
|
|||||||
|
|
||||||
config alist
|
config alist
|
||||||
option 'enabled' '0'
|
option enabled '0'
|
||||||
option 'port' '5244'
|
option port '5244'
|
||||||
option 'temp_dir' '/tmp/alist'
|
option delayed_start '0'
|
||||||
option 'ssl' '0'
|
option allow_wan '0'
|
||||||
option 'token_expires_in' '48'
|
option force '1'
|
||||||
option 'max_connections' '0'
|
option token_expires_in '48'
|
||||||
option 'site_url' ''
|
option max_connections '0'
|
||||||
option 'delayed_start' '0'
|
option tls_insecure_skip_verify '1'
|
||||||
|
option data_dir '/etc/alist'
|
||||||
|
option temp_dir '/tmp/alist'
|
||||||
|
option log '1'
|
||||||
|
option log_max_size '10'
|
||||||
|
option log_max_backups '5'
|
||||||
|
option log_max_age '28'
|
||||||
|
option log_compress '0'
|
||||||
|
option database_type 'sqlite3'
|
||||||
|
option ssl '0'
|
||||||
|
option download_workers '5'
|
||||||
|
option download_max_retry '1'
|
||||||
|
option transfer_workers '5'
|
||||||
|
option transfer_max_retry '2'
|
||||||
|
option upload_workers '5'
|
||||||
|
option upload_max_retry '0'
|
||||||
|
option copy_workers '5'
|
||||||
|
option copy_max_retry '2'
|
||||||
|
option cors_allow_origins '*'
|
||||||
|
option cors_allow_methods '*'
|
||||||
|
option cors_allow_headers '*'
|
||||||
|
option s3 '0'
|
||||||
|
|
||||||
|
@@ -10,21 +10,34 @@ LOG_FILE=/var/log/alist.log
|
|||||||
get_config() {
|
get_config() {
|
||||||
config_get_bool enabled $1 enabled 1
|
config_get_bool enabled $1 enabled 1
|
||||||
config_get port $1 port 5244
|
config_get port $1 port 5244
|
||||||
config_get log $1 log 1
|
|
||||||
config_get site_url $1 site_url ""
|
|
||||||
config_get data_dir $1 data_dir "/etc/alist"
|
|
||||||
config_get temp_dir $1 temp_dir "/tmp/alist"
|
|
||||||
config_get ssl $1 ssl 0
|
|
||||||
config_get ssl_cert $1 ssl_cert ""
|
|
||||||
config_get ssl_key $1 ssl_key ""
|
|
||||||
config_get token_expires_in $1 token_expires_in 48
|
|
||||||
config_get allow_wan $1 allow_wan 0
|
config_get allow_wan $1 allow_wan 0
|
||||||
config_get max_connections $1 max_connections 0
|
|
||||||
config_get delayed_start $1 delayed_start 0
|
config_get delayed_start $1 delayed_start 0
|
||||||
|
|
||||||
# mysql
|
config_get force $1 force 1
|
||||||
config_get mysql $1 mysql 0
|
config_get site_url $1 site_url ""
|
||||||
config_get mysql_type $1 mysql_type "mysql"
|
config_get cdn $1 cdn ""
|
||||||
|
config_get jwt_secret $1 jwt_secret ""
|
||||||
|
config_get data_dir $1 data_dir "/etc/alist"
|
||||||
|
config_get temp_dir $1 temp_dir "/tmp/alist"
|
||||||
|
config_get token_expires_in $1 token_expires_in 48
|
||||||
|
config_get max_connections $1 max_connections 0
|
||||||
|
config_get tls_insecure_skip_verify $1 tls_insecure_skip_verify 1
|
||||||
|
|
||||||
|
# log
|
||||||
|
config_get log $1 log 1
|
||||||
|
config_get log_max_size $1 log_max_size 10
|
||||||
|
config_get log_max_backups $1 log_max_backups 5
|
||||||
|
config_get log_max_age $1 log_max_age 28
|
||||||
|
config_get log_compress $1 log_compress 0
|
||||||
|
|
||||||
|
# scheme
|
||||||
|
config_get ssl $1 ssl 0
|
||||||
|
config_get force_https $1 force_https 0
|
||||||
|
config_get ssl_cert $1 ssl_cert ""
|
||||||
|
config_get ssl_key $1 ssl_key ""
|
||||||
|
|
||||||
|
# database
|
||||||
|
config_get database_type $1 database_type "sqlite3"
|
||||||
config_get mysql_host $1 mysql_host ""
|
config_get mysql_host $1 mysql_host ""
|
||||||
config_get mysql_port $1 mysql_port "3306"
|
config_get mysql_port $1 mysql_port "3306"
|
||||||
config_get mysql_username $1 mysql_username ""
|
config_get mysql_username $1 mysql_username ""
|
||||||
@@ -34,6 +47,26 @@ get_config() {
|
|||||||
config_get mysql_ssl_mode $1 mysql_ssl_mode ""
|
config_get mysql_ssl_mode $1 mysql_ssl_mode ""
|
||||||
config_get mysql_dsn $1 mysql_dsn ""
|
config_get mysql_dsn $1 mysql_dsn ""
|
||||||
|
|
||||||
|
# tasks
|
||||||
|
config_get download_workers $1 download_workers 5
|
||||||
|
config_get download_max_retry $1 download_max_retry 1
|
||||||
|
config_get transfer_workers $1 transfer_workers 5
|
||||||
|
config_get transfer_max_retry $1 transfer_max_retry 2
|
||||||
|
config_get upload_workers $1 upload_workers 5
|
||||||
|
config_get upload_max_retry $1 upload_max_retry 0
|
||||||
|
config_get copy_workers $1 copy_workers 5
|
||||||
|
config_get copy_max_retry $1 copy_max_retry 2
|
||||||
|
|
||||||
|
# cors
|
||||||
|
config_get cors_allow_origins $1 cors_allow_origins '*'
|
||||||
|
config_get cors_allow_methods $1 cors_allow_methods '*'
|
||||||
|
config_get cors_allow_headers $1 cors_allow_headers '*'
|
||||||
|
|
||||||
|
# s3
|
||||||
|
config_get s3 $1 s3 0
|
||||||
|
config_get s3_port $1 s3_port 5246
|
||||||
|
config_get s3_ssl $1 s3_ssl 0
|
||||||
|
|
||||||
config_load network
|
config_load network
|
||||||
config_get lan_addr lan ipaddr "0.0.0.0"
|
config_get lan_addr lan ipaddr "0.0.0.0"
|
||||||
if echo "${lan_addr}" | grep -Fq ' '; then
|
if echo "${lan_addr}" | grep -Fq ' '; then
|
||||||
@@ -41,6 +74,11 @@ get_config() {
|
|||||||
else
|
else
|
||||||
lan_addr=${lan_addr%%/*}
|
lan_addr=${lan_addr%%/*}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# init jwt_secret
|
||||||
|
[ -z "$jwt_secret" ] && jwt_secret=$(tr -cd "a-zA-Z0-9" < "/dev/urandom" | head -c16)
|
||||||
|
uci -q set alist.@alist[0].jwt_secret="$jwt_secret"
|
||||||
|
uci commit alist
|
||||||
}
|
}
|
||||||
|
|
||||||
set_firewall() {
|
set_firewall() {
|
||||||
@@ -81,23 +119,20 @@ start_service() {
|
|||||||
external_access="deny"
|
external_access="deny"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# mysql
|
|
||||||
[ "$mysql" -eq 1 ] && database=$mysql_type || database=sqlite3
|
|
||||||
|
|
||||||
set_firewall
|
set_firewall
|
||||||
true > $LOG_FILE
|
true > $LOG_FILE
|
||||||
|
|
||||||
# init config
|
# init config
|
||||||
json_init
|
json_init
|
||||||
json_add_boolean "force" "1"
|
json_add_boolean "force" "$force"
|
||||||
json_add_string "site_url" "$site_url"
|
json_add_string "site_url" "$site_url"
|
||||||
json_add_string "cdn" ""
|
json_add_string "cdn" "$cdn"
|
||||||
json_add_string "jwt_secret" ""
|
json_add_string "jwt_secret" "$jwt_secret"
|
||||||
json_add_int "token_expires_in" "$token_expires_in"
|
json_add_int "token_expires_in" "$token_expires_in"
|
||||||
|
|
||||||
# database
|
# database
|
||||||
json_add_object 'database'
|
json_add_object 'database'
|
||||||
json_add_string "type" "$database"
|
json_add_string "type" "$database_type"
|
||||||
json_add_string "host" "$mysql_host"
|
json_add_string "host" "$mysql_host"
|
||||||
json_add_int "port" "$mysql_port"
|
json_add_int "port" "$mysql_port"
|
||||||
json_add_string "user" "$mysql_username"
|
json_add_string "user" "$mysql_username"
|
||||||
@@ -121,7 +156,7 @@ start_service() {
|
|||||||
json_add_string "address" "$listen_addr"
|
json_add_string "address" "$listen_addr"
|
||||||
json_add_int "http_port" "$http_port"
|
json_add_int "http_port" "$http_port"
|
||||||
json_add_int "https_port" "$https_port"
|
json_add_int "https_port" "$https_port"
|
||||||
json_add_boolean "force_https" "0"
|
json_add_boolean "force_https" "$force_https"
|
||||||
json_add_string "cert_file" "$ssl_cert"
|
json_add_string "cert_file" "$ssl_cert"
|
||||||
json_add_string "key_file" "$ssl_key"
|
json_add_string "key_file" "$ssl_key"
|
||||||
json_add_string "unix_file" ""
|
json_add_string "unix_file" ""
|
||||||
@@ -136,61 +171,62 @@ start_service() {
|
|||||||
json_add_object "log"
|
json_add_object "log"
|
||||||
json_add_boolean "enable" "$log"
|
json_add_boolean "enable" "$log"
|
||||||
json_add_string "name" "$LOG_FILE"
|
json_add_string "name" "$LOG_FILE"
|
||||||
json_add_int "max_size" "10"
|
json_add_int "max_size" "$log_max_size"
|
||||||
json_add_int "max_backups" "5"
|
json_add_int "max_backups" "$log_max_backups"
|
||||||
json_add_int "max_age" "28"
|
json_add_int "max_age" "$log_max_age"
|
||||||
json_add_boolean "compress" "0"
|
json_add_boolean "compress" "$log_compress"
|
||||||
json_close_object
|
json_close_object
|
||||||
|
|
||||||
json_add_int "delayed_start" "$delayed_start"
|
json_add_int "delayed_start" "$delayed_start"
|
||||||
json_add_int "max_connections" "$max_connections"
|
json_add_int "max_connections" "$max_connections"
|
||||||
json_add_boolean "tls_insecure_skip_verify" "1"
|
json_add_boolean "tls_insecure_skip_verify" "$tls_insecure_skip_verify"
|
||||||
|
|
||||||
# tasks
|
# tasks
|
||||||
json_add_object "tasks"
|
json_add_object "tasks"
|
||||||
json_add_object "download"
|
json_add_object "download"
|
||||||
json_add_int "workers" "5"
|
json_add_int "workers" "$download_workers"
|
||||||
json_add_int "max_retry" "1"
|
json_add_int "max_retry" "$download_max_retry"
|
||||||
json_close_object
|
json_close_object
|
||||||
json_add_object "transfer"
|
json_add_object "transfer"
|
||||||
json_add_int "workers" "5"
|
json_add_int "workers" "$transfer_workers"
|
||||||
json_add_int "max_retry" "2"
|
json_add_int "max_retry" "$transfer_max_retry"
|
||||||
json_close_object
|
json_close_object
|
||||||
json_add_object "upload"
|
json_add_object "upload"
|
||||||
json_add_int "workers" "5"
|
json_add_int "workers" "$upload_workers"
|
||||||
json_add_int "max_retry" "0"
|
json_add_int "max_retry" "$upload_max_retry"
|
||||||
json_close_object
|
json_close_object
|
||||||
json_add_object "copy"
|
json_add_object "copy"
|
||||||
json_add_int "workers" "5"
|
json_add_int "workers" "$copy_workers"
|
||||||
json_add_int "max_retry" "2"
|
json_add_int "max_retry" "$copy_max_retry"
|
||||||
json_close_object
|
json_close_object
|
||||||
json_close_object
|
json_close_object
|
||||||
|
|
||||||
# cors
|
# cors
|
||||||
json_add_object "cors"
|
json_add_object "cors"
|
||||||
json_add_array "allow_origins"
|
json_add_array "allow_origins"
|
||||||
json_add_string "" "*"
|
json_add_string "" "$cors_allow_origins"
|
||||||
json_close_array
|
json_close_array
|
||||||
json_add_array "allow_methods"
|
json_add_array "allow_methods"
|
||||||
json_add_string "" "*"
|
json_add_string "" "$cors_allow_methods"
|
||||||
json_close_array
|
json_close_array
|
||||||
json_add_array "allow_headers"
|
json_add_array "allow_headers"
|
||||||
json_add_string "" "*"
|
json_add_string "" "$cors_allow_headers"
|
||||||
json_close_array
|
json_close_array
|
||||||
json_close_object
|
json_close_object
|
||||||
|
|
||||||
# s3
|
# s3
|
||||||
json_add_object "s3"
|
json_add_object "s3"
|
||||||
json_add_boolean "enable" "0"
|
json_add_boolean "enable" "$s3"
|
||||||
json_add_int "port" "5246"
|
json_add_int "port" "$s3_port"
|
||||||
json_add_boolean "ssl" "0"
|
json_add_boolean "ssl" "$s3_ssl"
|
||||||
json_close_object
|
json_close_object
|
||||||
|
|
||||||
json_dump > $data_dir/config.json
|
json_dump > $data_dir/config.json
|
||||||
|
|
||||||
procd_open_instance alist
|
procd_open_instance alist
|
||||||
procd_set_param command $PROG
|
procd_set_param command $PROG
|
||||||
procd_append_param command server --data $data_dir
|
procd_append_param command server
|
||||||
|
procd_append_param command --data $data_dir
|
||||||
procd_set_param stdout 0
|
procd_set_param stdout 0
|
||||||
procd_set_param stderr 0
|
procd_set_param stderr 0
|
||||||
procd_set_param respawn
|
procd_set_param respawn
|
||||||
@@ -201,7 +237,7 @@ start_service() {
|
|||||||
|
|
||||||
reload_service() {
|
reload_service() {
|
||||||
stop
|
stop
|
||||||
sleep 3
|
sleep 2
|
||||||
start
|
start
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -658,8 +658,11 @@ run_chinadns_ng() {
|
|||||||
([ -z "${_default_tag}" ] || [ "${_default_tag}" = "smart" ] || [ "${_default_tag}" = "none_noip" ]) && _default_tag="none"
|
([ -z "${_default_tag}" ] || [ "${_default_tag}" = "smart" ] || [ "${_default_tag}" = "none_noip" ]) && _default_tag="none"
|
||||||
echo "default-tag ${_default_tag}" >> ${_CONF_FILE}
|
echo "default-tag ${_default_tag}" >> ${_CONF_FILE}
|
||||||
|
|
||||||
|
echo "cache 4096" >> ${_CONF_FILE}
|
||||||
|
echo "cache-stale 3600" >> ${_CONF_FILE}
|
||||||
|
|
||||||
[ "${_flag}" = "default" ] && [ "${_default_tag}" = "none" ] && {
|
[ "${_flag}" = "default" ] && [ "${_default_tag}" = "none" ] && {
|
||||||
echo "verdict-cache 4096" >> ${_CONF_FILE}
|
echo "verdict-cache 5000" >> ${_CONF_FILE}
|
||||||
}
|
}
|
||||||
|
|
||||||
ln_run "$(first_type chinadns-ng)" chinadns-ng "${_LOG_FILE}" -C ${_CONF_FILE}
|
ln_run "$(first_type chinadns-ng)" chinadns-ng "${_LOG_FILE}" -C ${_CONF_FILE}
|
||||||
@@ -1379,7 +1382,6 @@ start_dns() {
|
|||||||
LOCAL_DNS=$(config_t_get global direct_dns_udp 223.5.5.5 | sed 's/:/#/g')
|
LOCAL_DNS=$(config_t_get global direct_dns_udp 223.5.5.5 | sed 's/:/#/g')
|
||||||
china_ng_local_dns=${LOCAL_DNS}
|
china_ng_local_dns=${LOCAL_DNS}
|
||||||
sing_box_local_dns="direct_dns_udp_server=${LOCAL_DNS}"
|
sing_box_local_dns="direct_dns_udp_server=${LOCAL_DNS}"
|
||||||
IPT_APPEND_DNS=${LOCAL_DNS}
|
|
||||||
;;
|
;;
|
||||||
tcp)
|
tcp)
|
||||||
LOCAL_DNS="127.0.0.1#${dns_listen_port}"
|
LOCAL_DNS="127.0.0.1#${dns_listen_port}"
|
||||||
@@ -1387,7 +1389,6 @@ start_dns() {
|
|||||||
local DIRECT_DNS=$(config_t_get global direct_dns_tcp 223.5.5.5 | sed 's/:/#/g')
|
local DIRECT_DNS=$(config_t_get global direct_dns_tcp 223.5.5.5 | sed 's/:/#/g')
|
||||||
china_ng_local_dns="tcp://${DIRECT_DNS}"
|
china_ng_local_dns="tcp://${DIRECT_DNS}"
|
||||||
sing_box_local_dns="direct_dns_tcp_server=${DIRECT_DNS}"
|
sing_box_local_dns="direct_dns_tcp_server=${DIRECT_DNS}"
|
||||||
IPT_APPEND_DNS="${LOCAL_DNS},${DIRECT_DNS}"
|
|
||||||
ln_run "$(first_type dns2tcp)" dns2tcp "/dev/null" -L "${LOCAL_DNS}" -R "$(get_first_dns DIRECT_DNS 53)" -v
|
ln_run "$(first_type dns2tcp)" dns2tcp "/dev/null" -L "${LOCAL_DNS}" -R "$(get_first_dns DIRECT_DNS 53)" -v
|
||||||
echolog " - dns2tcp(${LOCAL_DNS}) -> tcp://$(get_first_dns DIRECT_DNS 53 | sed 's/#/:/g')"
|
echolog " - dns2tcp(${LOCAL_DNS}) -> tcp://$(get_first_dns DIRECT_DNS 53 | sed 's/#/:/g')"
|
||||||
echolog " * 请确保上游直连 DNS 支持 TCP 查询。"
|
echolog " * 请确保上游直连 DNS 支持 TCP 查询。"
|
||||||
@@ -1405,8 +1406,8 @@ start_dns() {
|
|||||||
|
|
||||||
local tmp_dot_ip=$(echo "$DIRECT_DNS" | sed -n 's/.*:\/\/\([^@#]*@\)*\([^@#]*\).*/\2/p')
|
local tmp_dot_ip=$(echo "$DIRECT_DNS" | sed -n 's/.*:\/\/\([^@#]*@\)*\([^@#]*\).*/\2/p')
|
||||||
local tmp_dot_port=$(echo "$DIRECT_DNS" | sed -n 's/.*#\([0-9]\+\).*/\1/p')
|
local tmp_dot_port=$(echo "$DIRECT_DNS" | sed -n 's/.*#\([0-9]\+\).*/\1/p')
|
||||||
sing_box_local_dns="direct_dns_dot_server=$tmp_dot_ip#${tmp_dot_port:-853}"
|
DIRECT_DNS=$tmp_dot_ip#${tmp_dot_port:-853}
|
||||||
IPT_APPEND_DNS="${LOCAL_DNS},$tmp_dot_ip#${tmp_dot_port:-853}"
|
sing_box_local_dns="direct_dns_dot_server=${DIRECT_DNS}"
|
||||||
else
|
else
|
||||||
echolog " - 你的ChinaDNS-NG版本不支持DoT,直连DNS将使用默认地址。"
|
echolog " - 你的ChinaDNS-NG版本不支持DoT,直连DNS将使用默认地址。"
|
||||||
fi
|
fi
|
||||||
@@ -1417,6 +1418,21 @@ start_dns() {
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
# 追加直连DNS到iptables/nftables
|
||||||
|
[ "$(config_t_get global_haproxy balancing_enable 0)" != "1" ] && IPT_APPEND_DNS=
|
||||||
|
add_default_port() {
|
||||||
|
[ -z "$1" ] && echo "" || echo "$1" | awk -F',' '{for(i=1;i<=NF;i++){if($i !~ /#/) $i=$i"#53";} print $0;}' OFS=','
|
||||||
|
}
|
||||||
|
LOCAL_DNS=$(add_default_port "$LOCAL_DNS")
|
||||||
|
IPT_APPEND_DNS=$(add_default_port "${IPT_APPEND_DNS:-$LOCAL_DNS}")
|
||||||
|
echo "$IPT_APPEND_DNS" | grep -q -E "(^|,)$LOCAL_DNS(,|$)" || IPT_APPEND_DNS="${IPT_APPEND_DNS:+$IPT_APPEND_DNS,}$LOCAL_DNS"
|
||||||
|
[ -n "$DIRECT_DNS" ] && {
|
||||||
|
DIRECT_DNS=$(add_default_port "$DIRECT_DNS")
|
||||||
|
echo "$IPT_APPEND_DNS" | grep -q -E "(^|,)$DIRECT_DNS(,|$)" || IPT_APPEND_DNS="${IPT_APPEND_DNS:+$IPT_APPEND_DNS,}$DIRECT_DNS"
|
||||||
|
}
|
||||||
|
# 排除127.0.0.1的条目
|
||||||
|
IPT_APPEND_DNS=$(echo "$IPT_APPEND_DNS" | awk -F',' '{for(i=1;i<=NF;i++) if($i !~ /^127\.0\.0\.1/) printf (i>1?",":"") $i; print ""}' | sed 's/^,\|,$//g')
|
||||||
|
|
||||||
TUN_DNS="127.0.0.1#${dns_listen_port}"
|
TUN_DNS="127.0.0.1#${dns_listen_port}"
|
||||||
[ "${resolve_dns}" == "1" ] && TUN_DNS="127.0.0.1#${resolve_dns_port}"
|
[ "${resolve_dns}" == "1" ] && TUN_DNS="127.0.0.1#${resolve_dns_port}"
|
||||||
|
|
||||||
|
50
small/luci-app-homeproxy/.prepare.sh
Executable file
50
small/luci-app-homeproxy/.prepare.sh
Executable file
@@ -0,0 +1,50 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
PKG_NAME="$1"
|
||||||
|
CURDIR="$2"
|
||||||
|
PKG_BUILD_DIR="$3"
|
||||||
|
PKG_BUILD_BIN="$PKG_BUILD_DIR/bin"
|
||||||
|
export PATH="$PATH:$PKG_BUILD_BIN"
|
||||||
|
OS=linux
|
||||||
|
ARCH=amd64
|
||||||
|
JQVERSION=1.7.1
|
||||||
|
DOCNAME=Ruleset-URI-Scheme
|
||||||
|
|
||||||
|
mkdir -p "$PKG_BUILD_BIN"
|
||||||
|
curl -L "https://github.com/jqlang/jq/releases/download/jq-${JQVERSION}/jq-${OS}-${ARCH}" -o "$PKG_BUILD_BIN"/jq
|
||||||
|
chmod +x "$PKG_BUILD_BIN"/jq
|
||||||
|
latest="$(curl -L https://api.github.com/repos/kpym/gm/releases/latest | jq -rc '.tag_name' 2>/dev/null)"
|
||||||
|
curl -L "https://github.com/kpym/gm/releases/download/${latest}/gm_${latest#v}_Linux_64bit.tar.gz" -o- | tar -xz -C "$PKG_BUILD_BIN"
|
||||||
|
latest="$(curl -L https://api.github.com/repos/tdewolff/minify/releases/latest | jq -rc '.tag_name' 2>/dev/null)"
|
||||||
|
curl -L "https://github.com/tdewolff/minify/releases/download/${latest}/minify_${OS}_${ARCH}.tar.gz" -o- | tar -xz -C "$PKG_BUILD_BIN"
|
||||||
|
chmod -R +x "$PKG_BUILD_BIN"
|
||||||
|
|
||||||
|
cp "$CURDIR"/docs/$DOCNAME.md "$PKG_BUILD_DIR"
|
||||||
|
pushd "$PKG_BUILD_DIR"
|
||||||
|
gm $DOCNAME.md
|
||||||
|
p=$(sed -n '/github.min.css/=' $DOCNAME.html)
|
||||||
|
{
|
||||||
|
head -n$(( $p -1 )) $DOCNAME.html
|
||||||
|
echo '<style>'
|
||||||
|
cat "$CURDIR"/docs/css/ClearnessDark.css
|
||||||
|
echo '</style>'
|
||||||
|
tail -n +$(( $p +1 )) $DOCNAME.html
|
||||||
|
} > buildin.html
|
||||||
|
popd
|
||||||
|
minify "$PKG_BUILD_DIR"/buildin.html | base64 | tr -d '\n' > "$PKG_BUILD_DIR"/base64
|
||||||
|
sed -i "s|'cmxzdHBsYWNlaG9sZGVy'|'$(cat "$PKG_BUILD_DIR"/base64)'|" "$PKG_BUILD_DIR"/htdocs/luci-static/resources/view/homeproxy/ruleset.js
|
||||||
|
|
||||||
|
if [ -d "$CURDIR/.git" ]; then
|
||||||
|
config="$CURDIR/.git/config"
|
||||||
|
else
|
||||||
|
config="$(sed "s|^gitdir:\s*|$CURDIR/|;s|$|/config|" "$CURDIR/.git")"
|
||||||
|
fi
|
||||||
|
[ -n "$(sed -En '/^\[remote /{h;:top;n;/^\[/b;s,(https?://gitcode\.(com|net)),\1,;T top;H;x;s|\n\s*|: |;p;}' "$config")" ] && {
|
||||||
|
for d in luasrc ucode htdocs root src; do
|
||||||
|
rm -rf "$PKG_BUILD_DIR"/$d
|
||||||
|
done
|
||||||
|
mkdir -p "$PKG_BUILD_DIR"/htdocs/luci-static/resources/view
|
||||||
|
touch "$PKG_BUILD_DIR"/htdocs/luci-static/resources/view/$PKG_NAME.js
|
||||||
|
mkdir -p "$PKG_BUILD_DIR"/root/usr/share/luci/menu.d
|
||||||
|
touch "$PKG_BUILD_DIR"/root/usr/share/luci/menu.d/$PKG_NAME.json
|
||||||
|
}
|
||||||
|
exit 0
|
@@ -10,7 +10,8 @@ LUCI_DEPENDS:= \
|
|||||||
+sing-box \
|
+sing-box \
|
||||||
+chinadns-ng \
|
+chinadns-ng \
|
||||||
+firewall4 \
|
+firewall4 \
|
||||||
+kmod-nft-tproxy
|
+kmod-nft-tproxy \
|
||||||
|
+unzip
|
||||||
|
|
||||||
PKG_NAME:=luci-app-homeproxy
|
PKG_NAME:=luci-app-homeproxy
|
||||||
|
|
||||||
@@ -20,8 +21,13 @@ define Package/luci-app-homeproxy/conffiles
|
|||||||
/etc/homeproxy/ruleset/
|
/etc/homeproxy/ruleset/
|
||||||
/etc/homeproxy/resources/direct_list.txt
|
/etc/homeproxy/resources/direct_list.txt
|
||||||
/etc/homeproxy/resources/proxy_list.txt
|
/etc/homeproxy/resources/proxy_list.txt
|
||||||
|
/etc/homeproxy/resources/clash_dashboard.ver
|
||||||
|
/etc/homeproxy/resources/*.zip
|
||||||
|
/etc/homeproxy/cache.db
|
||||||
endef
|
endef
|
||||||
|
|
||||||
|
PKG_UNPACK=$(CURDIR)/.prepare.sh $(PKG_NAME) $(CURDIR) $(PKG_BUILD_DIR)
|
||||||
|
|
||||||
include $(TOPDIR)/feeds/luci/luci.mk
|
include $(TOPDIR)/feeds/luci/luci.mk
|
||||||
|
|
||||||
# call BuildPackage - OpenWrt buildroot signature
|
# call BuildPackage - OpenWrt buildroot signature
|
||||||
|
@@ -1,5 +1,14 @@
|
|||||||
|
Recently, sagernet blocked my account unilaterally and without any reason.
|
||||||
|
So this fork will not continue to provide support for new SB features in the future.
|
||||||
|
Goodbye. everyone.
|
||||||
|
|
||||||
|
For developers:
|
||||||
|
The code of the dev/* branch has supported new SB features up to 1.10.0-beta.3.
|
||||||
|
You can merge it yourself if necessary.
|
||||||
|
|
||||||
TODO:
|
TODO:
|
||||||
- Subscription page slow response with a large number of nodes
|
- Subscription page slow response with a large number of nodes
|
||||||
- Refactor nft rules
|
- Refactor nft rules
|
||||||
|
- Support Clash selector, urltest etc.
|
||||||
- Move ACL settings to a dedicated page
|
- Move ACL settings to a dedicated page
|
||||||
- Any other improvements
|
- Any other improvements
|
||||||
|
54
small/luci-app-homeproxy/docs/Ruleset-URI-Scheme.md
Normal file
54
small/luci-app-homeproxy/docs/Ruleset-URI-Scheme.md
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Import rule-set links format
|
||||||
|
|
||||||
|
## Structure
|
||||||
|
|
||||||
|
**remote:** `http[s]://[auth@]<host><path>?file=<rulefmt>[&key=value][#label]`
|
||||||
|
**local:** `file://[host]<path>?file=<rulefmt>[&key=value][#label]`
|
||||||
|
**inline:** `inline://<base64edJsonStr>[#label]`
|
||||||
|
|
||||||
|
## Components
|
||||||
|
|
||||||
|
### Scheme
|
||||||
|
|
||||||
|
Can be `http` or `https` or `file` or `inline`.
|
||||||
|
|
||||||
|
### Auth
|
||||||
|
|
||||||
|
Add it only if required by the target host.
|
||||||
|
|
||||||
|
### Host
|
||||||
|
|
||||||
|
The format is `hostname[:port]`.
|
||||||
|
`hostname` can be **Domain** or **IP Address**.
|
||||||
|
`:port` is optional, add it only if required by the target host.
|
||||||
|
|
||||||
|
### Path
|
||||||
|
|
||||||
|
The shortest format is `/`.
|
||||||
|
|
||||||
|
### Base64edJsonStr
|
||||||
|
|
||||||
|
Generation steps:
|
||||||
|
|
||||||
|
1. Base64 encode **Headless Rule** `.rules`.
|
||||||
|
2. Replace all `+` with `-` and all `/` with `_` in base64 string.
|
||||||
|
3. Remove all `=` from the EOF the base64 string.
|
||||||
|
|
||||||
|
### QueryParameters
|
||||||
|
|
||||||
|
+ `file`: Required. Available values refer to **Rulefmt**.
|
||||||
|
+ `rawquery`: Optional. Available values refer to **rawQuery**.
|
||||||
|
|
||||||
|
#### Rulefmt
|
||||||
|
|
||||||
|
Can be `json` or `srs`. Rule file format.
|
||||||
|
|
||||||
|
#### rawQuery
|
||||||
|
|
||||||
|
This parameter is required if the original link contains a url query.
|
||||||
|
Encrypt the part `key1=value1&key2=value2` after `?` in the original link with `encodeURIComponent` and use it as the payload of this parameter.
|
||||||
|
|
||||||
|
### URIFragment
|
||||||
|
|
||||||
|
Ruleset label. Empty strings are not recommended.
|
||||||
|
Need encoded by `encodeURIComponent`.
|
209
small/luci-app-homeproxy/docs/css/ClearnessDark.css
Normal file
209
small/luci-app-homeproxy/docs/css/ClearnessDark.css
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
h1,
|
||||||
|
h2,
|
||||||
|
h3,
|
||||||
|
h4,
|
||||||
|
h5,
|
||||||
|
h6,
|
||||||
|
p,
|
||||||
|
blockquote {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
body {
|
||||||
|
font-family: "Helvetica Neue", Helvetica, "Hiragino Sans GB", Arial, sans-serif;
|
||||||
|
font-size: 13px;
|
||||||
|
line-height: 18px;
|
||||||
|
color: #fff;
|
||||||
|
background-color: #282a36;
|
||||||
|
margin: 10px 13px 10px 13px;
|
||||||
|
}
|
||||||
|
a {
|
||||||
|
color: #59acf3;
|
||||||
|
}
|
||||||
|
a:hover {
|
||||||
|
color: #a7d8ff;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
a img {
|
||||||
|
border: none;
|
||||||
|
}
|
||||||
|
p {
|
||||||
|
margin-bottom: 9px;
|
||||||
|
}
|
||||||
|
h1,
|
||||||
|
h2,
|
||||||
|
h3,
|
||||||
|
h4,
|
||||||
|
h5,
|
||||||
|
h6 {
|
||||||
|
color: #fff;
|
||||||
|
line-height: 36px;
|
||||||
|
}
|
||||||
|
h1 {
|
||||||
|
margin-bottom: 18px;
|
||||||
|
font-size: 30px;
|
||||||
|
}
|
||||||
|
h2 {
|
||||||
|
font-size: 24px;
|
||||||
|
}
|
||||||
|
h3 {
|
||||||
|
font-size: 18px;
|
||||||
|
}
|
||||||
|
h4 {
|
||||||
|
font-size: 16px;
|
||||||
|
}
|
||||||
|
h5 {
|
||||||
|
font-size: 14px;
|
||||||
|
}
|
||||||
|
h6 {
|
||||||
|
font-size: 13px;
|
||||||
|
}
|
||||||
|
hr {
|
||||||
|
margin: 0 0 19px;
|
||||||
|
border: 0;
|
||||||
|
border-bottom: 1px solid #ccc;
|
||||||
|
}
|
||||||
|
blockquote {
|
||||||
|
padding: 13px 13px 21px 15px;
|
||||||
|
margin-bottom: 18px;
|
||||||
|
font-family:georgia,serif;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
blockquote:before {
|
||||||
|
content:"\201C";
|
||||||
|
font-size:40px;
|
||||||
|
margin-left:-10px;
|
||||||
|
font-family:georgia,serif;
|
||||||
|
color:#eee;
|
||||||
|
}
|
||||||
|
blockquote p {
|
||||||
|
font-size: 14px;
|
||||||
|
font-weight: 300;
|
||||||
|
line-height: 18px;
|
||||||
|
margin-bottom: 0;
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
code, pre {
|
||||||
|
font-family: Monaco, Andale Mono, Courier New, monospace;
|
||||||
|
}
|
||||||
|
code {
|
||||||
|
color: #ff4a14;
|
||||||
|
padding: 1px 3px;
|
||||||
|
font-size: 12px;
|
||||||
|
-webkit-border-radius: 3px;
|
||||||
|
-moz-border-radius: 3px;
|
||||||
|
border-radius: 3px;
|
||||||
|
}
|
||||||
|
pre {
|
||||||
|
display: block;
|
||||||
|
padding: 14px;
|
||||||
|
margin: 0 0 18px;
|
||||||
|
line-height: 16px;
|
||||||
|
font-size: 11px;
|
||||||
|
border: 1px solid #bf370f;
|
||||||
|
white-space: pre;
|
||||||
|
white-space: pre-wrap;
|
||||||
|
word-wrap: break-word;
|
||||||
|
}
|
||||||
|
pre code {
|
||||||
|
background-color: #282a36;
|
||||||
|
color: #ff4a14;
|
||||||
|
font-size: 11px;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
@media screen and (min-width: 768px) {
|
||||||
|
body {
|
||||||
|
width: 748px;
|
||||||
|
margin:10px auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* obsidian.css
|
||||||
|
* Obsidian style
|
||||||
|
* ported by Alexander Marenin (http://github.com/ioncreature)
|
||||||
|
*/
|
||||||
|
|
||||||
|
.hljs {
|
||||||
|
display: block;
|
||||||
|
overflow-x: auto;
|
||||||
|
padding: 0.5em;
|
||||||
|
background: #282b2e;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-keyword,
|
||||||
|
.hljs-selector-tag,
|
||||||
|
.hljs-literal,
|
||||||
|
.hljs-selector-id {
|
||||||
|
color: #93c763;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-number {
|
||||||
|
color: #ffcd22;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs {
|
||||||
|
color: #e0e2e4;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-attribute {
|
||||||
|
color: #668bb0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-code,
|
||||||
|
.hljs-class .hljs-title,
|
||||||
|
.hljs-section {
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-regexp,
|
||||||
|
.hljs-link {
|
||||||
|
color: #d39745;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-meta {
|
||||||
|
color: #557182;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-tag,
|
||||||
|
.hljs-name,
|
||||||
|
.hljs-bullet,
|
||||||
|
.hljs-subst,
|
||||||
|
.hljs-emphasis,
|
||||||
|
.hljs-type,
|
||||||
|
.hljs-built_in,
|
||||||
|
.hljs-selector-attr,
|
||||||
|
.hljs-selector-pseudo,
|
||||||
|
.hljs-addition,
|
||||||
|
.hljs-variable,
|
||||||
|
.hljs-template-tag,
|
||||||
|
.hljs-template-variable {
|
||||||
|
color: #8cbbad;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-string,
|
||||||
|
.hljs-symbol {
|
||||||
|
color: #ec7600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-comment,
|
||||||
|
.hljs-quote,
|
||||||
|
.hljs-deletion {
|
||||||
|
color: #818e96;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-selector-class {
|
||||||
|
color: #A082BD
|
||||||
|
}
|
||||||
|
|
||||||
|
.hljs-keyword,
|
||||||
|
.hljs-selector-tag,
|
||||||
|
.hljs-literal,
|
||||||
|
.hljs-doctag,
|
||||||
|
.hljs-title,
|
||||||
|
.hljs-section,
|
||||||
|
.hljs-type,
|
||||||
|
.hljs-name,
|
||||||
|
.hljs-strong {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
@@ -11,6 +11,7 @@
|
|||||||
'require rpc';
|
'require rpc';
|
||||||
'require uci';
|
'require uci';
|
||||||
'require ui';
|
'require ui';
|
||||||
|
'require validation';
|
||||||
|
|
||||||
return baseclass.extend({
|
return baseclass.extend({
|
||||||
dns_strategy: {
|
dns_strategy: {
|
||||||
@@ -183,7 +184,8 @@ return baseclass.extend({
|
|||||||
).join('');
|
).join('');
|
||||||
case 'uuid':
|
case 'uuid':
|
||||||
/* Thanks to https://stackoverflow.com/a/2117523 */
|
/* Thanks to https://stackoverflow.com/a/2117523 */
|
||||||
return ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, (c) =>
|
return (location.protocol === 'https:') ? crypto.randomUUID() :
|
||||||
|
([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, (c) =>
|
||||||
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
|
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
|
||||||
);
|
);
|
||||||
default:
|
default:
|
||||||
@@ -206,19 +208,55 @@ return baseclass.extend({
|
|||||||
return label ? title + ' » ' + label : addtitle;
|
return label ? title + ' » ' + label : addtitle;
|
||||||
},
|
},
|
||||||
|
|
||||||
renderSectionAdd: function(section, extra_class) {
|
loadSubscriptionInfo: function(uciconfig) {
|
||||||
|
var subs = {};
|
||||||
|
for (var suburl of (uci.get(uciconfig, 'subscription', 'subscription_url') || [])) {
|
||||||
|
const url = new URL(suburl);
|
||||||
|
const urlhash = this.calcStringMD5(suburl.replace(/#.*$/, ''));
|
||||||
|
subs[urlhash] = {
|
||||||
|
"url": suburl.replace(/#.*$/, ''),
|
||||||
|
"name": url.hash ? decodeURIComponent(url.hash.slice(1)) : url.hostname
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return subs;
|
||||||
|
},
|
||||||
|
|
||||||
|
loadNodesList: function(uciconfig, subinfo) {
|
||||||
|
var nodelist = {};
|
||||||
|
uci.sections(uciconfig, 'node', (res) => {
|
||||||
|
var nodeaddr = ((res.type === 'direct') ? res.override_address : res.address) || '',
|
||||||
|
nodeport = ((res.type === 'direct') ? res.override_port : res.port) || '';
|
||||||
|
|
||||||
|
nodelist[res['.name']] =
|
||||||
|
String.format('%s [%s] %s', res.grouphash ?
|
||||||
|
String.format('[%s]', subinfo[res.grouphash]?.name || res.grouphash) : '',
|
||||||
|
res.type, res.label || ((validation.parseIPv6(nodeaddr) ?
|
||||||
|
String.format('[%s]', nodeaddr) : nodeaddr) + ':' + nodeport));
|
||||||
|
});
|
||||||
|
return nodelist;
|
||||||
|
},
|
||||||
|
|
||||||
|
renderSectionAdd: function(section, prefmt, LC, extra_class) {
|
||||||
var el = form.GridSection.prototype.renderSectionAdd.apply(section, [ extra_class ]),
|
var el = form.GridSection.prototype.renderSectionAdd.apply(section, [ extra_class ]),
|
||||||
nameEl = el.querySelector('.cbi-section-create-name');
|
nameEl = el.querySelector('.cbi-section-create-name');
|
||||||
ui.addValidator(nameEl, 'uciname', true, (v) => {
|
ui.addValidator(nameEl, 'uciname', true, (v) => {
|
||||||
var button = el.querySelector('.cbi-section-create > .cbi-button-add');
|
var button = el.querySelector('.cbi-section-create > .cbi-button-add');
|
||||||
var uciconfig = section.uciconfig || section.map.config;
|
var uciconfig = section.uciconfig || section.map.config;
|
||||||
|
var prefix = prefmt?.prefix ? prefmt.prefix : '',
|
||||||
|
suffix = prefmt?.suffix ? prefmt.suffix : '';
|
||||||
|
|
||||||
if (!v) {
|
if (!v) {
|
||||||
button.disabled = true;
|
button.disabled = true;
|
||||||
return true;
|
return true;
|
||||||
|
} else if (LC && (v !== v.toLowerCase())) {
|
||||||
|
button.disabled = true;
|
||||||
|
return _('Expecting: %s').format(_('Lowercase only'));
|
||||||
} else if (uci.get(uciconfig, v)) {
|
} else if (uci.get(uciconfig, v)) {
|
||||||
button.disabled = true;
|
button.disabled = true;
|
||||||
return _('Expecting: %s').format(_('unique UCI identifier'));
|
return _('Expecting: %s').format(_('unique UCI identifier'));
|
||||||
|
} else if (uci.get(uciconfig, prefix + v + suffix)) {
|
||||||
|
button.disabled = true;
|
||||||
|
return _('Expecting: %s').format(_('unique label'));
|
||||||
} else {
|
} else {
|
||||||
button.disabled = null;
|
button.disabled = null;
|
||||||
return true;
|
return true;
|
||||||
@@ -228,6 +266,13 @@ return baseclass.extend({
|
|||||||
return el;
|
return el;
|
||||||
},
|
},
|
||||||
|
|
||||||
|
handleAdd: function(section, prefmt, ev, name) {
|
||||||
|
var prefix = prefmt?.prefix ? prefmt.prefix : '',
|
||||||
|
suffix = prefmt?.suffix ? prefmt.suffix : '';
|
||||||
|
|
||||||
|
return form.GridSection.prototype.handleAdd.apply(section, [ ev, prefix + name + suffix ]);
|
||||||
|
},
|
||||||
|
|
||||||
uploadCertificate: function(option, type, filename, ev) {
|
uploadCertificate: function(option, type, filename, ev) {
|
||||||
var callWriteCertificate = rpc.declare({
|
var callWriteCertificate = rpc.declare({
|
||||||
object: 'luci.homeproxy',
|
object: 'luci.homeproxy',
|
||||||
|
@@ -6,10 +6,12 @@
|
|||||||
|
|
||||||
'use strict';
|
'use strict';
|
||||||
'require form';
|
'require form';
|
||||||
|
'require fs';
|
||||||
'require network';
|
'require network';
|
||||||
'require poll';
|
'require poll';
|
||||||
'require rpc';
|
'require rpc';
|
||||||
'require uci';
|
'require uci';
|
||||||
|
'require ui';
|
||||||
'require validation';
|
'require validation';
|
||||||
'require view';
|
'require view';
|
||||||
|
|
||||||
@@ -38,6 +40,20 @@ var callWriteDomainList = rpc.declare({
|
|||||||
expect: { '': {} }
|
expect: { '': {} }
|
||||||
});
|
});
|
||||||
|
|
||||||
|
var callGetAPISecret = rpc.declare({
|
||||||
|
object: 'luci.homeproxy',
|
||||||
|
method: 'clash_api_get_secret',
|
||||||
|
params: [],
|
||||||
|
expect: { '': {} }
|
||||||
|
});
|
||||||
|
|
||||||
|
var callResVersion = rpc.declare({
|
||||||
|
object: 'luci.homeproxy',
|
||||||
|
method: 'resources_get_version',
|
||||||
|
params: ['type', 'repo'],
|
||||||
|
expect: { '': {} }
|
||||||
|
});
|
||||||
|
|
||||||
function getServiceStatus() {
|
function getServiceStatus() {
|
||||||
return L.resolveDefault(callServiceList('homeproxy'), {}).then((res) => {
|
return L.resolveDefault(callServiceList('homeproxy'), {}).then((res) => {
|
||||||
var isRunning = false;
|
var isRunning = false;
|
||||||
@@ -48,12 +64,35 @@ function getServiceStatus() {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function renderStatus(isRunning) {
|
function renderStatus(isRunning, args) {
|
||||||
|
let nginx = args.features.hp_has_nginx && args.nginx_support === '1';
|
||||||
var spanTemp = '<em><span style="color:%s"><strong>%s %s</strong></span></em>';
|
var spanTemp = '<em><span style="color:%s"><strong>%s %s</strong></span></em>';
|
||||||
|
var urlParams;
|
||||||
var renderHTML;
|
var renderHTML;
|
||||||
if (isRunning)
|
if (isRunning) {
|
||||||
renderHTML = spanTemp.format('green', _('HomeProxy'), _('RUNNING'));
|
if (args.set_dash_backend) {
|
||||||
else
|
switch (args.dashboard_repo) {
|
||||||
|
case 'metacubex/metacubexd':
|
||||||
|
urlParams = String.format('#/setup?hostname=%s&port=%s&secret=%s', window.location.hostname, args.api_port, args.api_secret);
|
||||||
|
break;
|
||||||
|
case 'metacubex/yacd-meta':
|
||||||
|
urlParams = String.format('?hostname=%s&port=%s&secret=%s', window.location.hostname, args.api_port, args.api_secret);
|
||||||
|
break;
|
||||||
|
case 'metacubex/razord-meta':
|
||||||
|
urlParams = String.format('?host=%s&port=%s&secret=%s', window.location.hostname, args.api_port, args.api_secret);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (args.dashboard_repo) {
|
||||||
|
var button = String.format(' <a class="btn cbi-button-apply" href="%s" target="_blank" rel="noreferrer noopener">%s</a>',
|
||||||
|
(nginx ? 'https:' : 'http:') + '//' + window.location.hostname +
|
||||||
|
(nginx ? '/homeproxy' : ':' + args.api_port) + '/ui/' + (urlParams || ''),
|
||||||
|
_('Open Clash Dashboard'));
|
||||||
|
}
|
||||||
|
renderHTML = spanTemp.format('green', _('HomeProxy'), _('RUNNING')) + (button || '');
|
||||||
|
} else
|
||||||
renderHTML = spanTemp.format('red', _('HomeProxy'), _('NOT RUNNING'));
|
renderHTML = spanTemp.format('red', _('HomeProxy'), _('NOT RUNNING'));
|
||||||
|
|
||||||
return renderHTML;
|
return renderHTML;
|
||||||
@@ -96,7 +135,8 @@ return view.extend({
|
|||||||
return Promise.all([
|
return Promise.all([
|
||||||
uci.load('homeproxy'),
|
uci.load('homeproxy'),
|
||||||
hp.getBuiltinFeatures(),
|
hp.getBuiltinFeatures(),
|
||||||
network.getHostHints()
|
network.getHostHints(),
|
||||||
|
L.resolveDefault(callGetAPISecret(), {})
|
||||||
]);
|
]);
|
||||||
},
|
},
|
||||||
|
|
||||||
@@ -104,18 +144,12 @@ return view.extend({
|
|||||||
var m, s, o, ss, so;
|
var m, s, o, ss, so;
|
||||||
|
|
||||||
var features = data[1],
|
var features = data[1],
|
||||||
hosts = data[2]?.hosts;
|
hosts = data[2]?.hosts,
|
||||||
|
api_port = uci.get(data[0], 'experimental', 'clash_api_port'),
|
||||||
/* Cache all configured proxy nodes, they will be called multiple times */
|
api_secret = data[3]?.secret || '',
|
||||||
var proxy_nodes = {};
|
nginx_support = uci.get(data[0], 'experimental', 'nginx_support') || '0',
|
||||||
uci.sections(data[0], 'node', (res) => {
|
dashboard_repo = uci.get(data[0], 'experimental', 'dashboard_repo'),
|
||||||
var nodeaddr = ((res.type === 'direct') ? res.override_address : res.address) || '',
|
set_dash_backend = uci.get(data[0], 'experimental', 'set_dash_backend');
|
||||||
nodeport = ((res.type === 'direct') ? res.override_port : res.port) || '';
|
|
||||||
|
|
||||||
proxy_nodes[res['.name']] =
|
|
||||||
String.format('[%s] %s', res.type, res.label || ((stubValidator.apply('ip6addr', nodeaddr) ?
|
|
||||||
String.format('[%s]', nodeaddr) : nodeaddr) + ':' + nodeport));
|
|
||||||
});
|
|
||||||
|
|
||||||
m = new form.Map('homeproxy', _('HomeProxy'),
|
m = new form.Map('homeproxy', _('HomeProxy'),
|
||||||
_('The modern ImmortalWrt proxy platform for ARM64/AMD64.'));
|
_('The modern ImmortalWrt proxy platform for ARM64/AMD64.'));
|
||||||
@@ -125,7 +159,7 @@ return view.extend({
|
|||||||
poll.add(function () {
|
poll.add(function () {
|
||||||
return L.resolveDefault(getServiceStatus()).then((res) => {
|
return L.resolveDefault(getServiceStatus()).then((res) => {
|
||||||
var view = document.getElementById('service_status');
|
var view = document.getElementById('service_status');
|
||||||
view.innerHTML = renderStatus(res);
|
view.innerHTML = renderStatus(res, {features, nginx_support, dashboard_repo, set_dash_backend, api_port, api_secret});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -134,14 +168,20 @@ return view.extend({
|
|||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* Cache all subscription info, they will be called multiple times */
|
||||||
|
var subs_info = hp.loadSubscriptionInfo(data[0]);
|
||||||
|
|
||||||
|
/* Cache all configured proxy nodes, they will be called multiple times */
|
||||||
|
var proxy_nodes = hp.loadNodesList(data[0], subs_info);
|
||||||
|
|
||||||
s = m.section(form.NamedSection, 'config', 'homeproxy');
|
s = m.section(form.NamedSection, 'config', 'homeproxy');
|
||||||
|
|
||||||
s.tab('routing', _('Routing Settings'));
|
s.tab('routing', _('Routing Settings'));
|
||||||
|
|
||||||
o = s.taboption('routing', form.ListValue, 'main_node', _('Main node'));
|
o = s.taboption('routing', form.ListValue, 'main_node', _('Main node'));
|
||||||
o.value('nil', _('Disable'));
|
o.value('nil', _('Disable'));
|
||||||
for (var i in proxy_nodes)
|
for (var k in proxy_nodes)
|
||||||
o.value(i, proxy_nodes[i]);
|
o.value(k, proxy_nodes[k]);
|
||||||
o.default = 'nil';
|
o.default = 'nil';
|
||||||
o.depends({'routing_mode': 'custom', '!reverse': true});
|
o.depends({'routing_mode': 'custom', '!reverse': true});
|
||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
@@ -149,8 +189,8 @@ return view.extend({
|
|||||||
o = s.taboption('routing', form.ListValue, 'main_udp_node', _('Main UDP node'));
|
o = s.taboption('routing', form.ListValue, 'main_udp_node', _('Main UDP node'));
|
||||||
o.value('nil', _('Disable'));
|
o.value('nil', _('Disable'));
|
||||||
o.value('same', _('Same as main node'));
|
o.value('same', _('Same as main node'));
|
||||||
for (var i in proxy_nodes)
|
for (var k in proxy_nodes)
|
||||||
o.value(i, proxy_nodes[i]);
|
o.value(k, proxy_nodes[k]);
|
||||||
o.default = 'nil';
|
o.default = 'nil';
|
||||||
o.depends({'routing_mode': /^((?!custom).)+$/, 'proxy_mode': /^((?!redirect$).)+$/});
|
o.depends({'routing_mode': /^((?!custom).)+$/, 'proxy_mode': /^((?!redirect$).)+$/});
|
||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
@@ -164,7 +204,7 @@ return view.extend({
|
|||||||
o.value('', '---');
|
o.value('', '---');
|
||||||
o.value('223.5.5.5', _('Aliyun Public DNS (223.5.5.5)'));
|
o.value('223.5.5.5', _('Aliyun Public DNS (223.5.5.5)'));
|
||||||
o.value('119.29.29.29', _('Tencent Public DNS (119.29.29.29)'));
|
o.value('119.29.29.29', _('Tencent Public DNS (119.29.29.29)'));
|
||||||
o.value('117.50.10.10', _('ThreatBook Public DNS (117.50.10.10)'));
|
o.value('114.114.114.114', _('Xinfeng Public DNS (114.114.114.114)'));
|
||||||
o.default = '8.8.8.8';
|
o.default = '8.8.8.8';
|
||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
o.depends({'routing_mode': 'custom', '!reverse': true});
|
o.depends({'routing_mode': 'custom', '!reverse': true});
|
||||||
@@ -187,7 +227,7 @@ return view.extend({
|
|||||||
o.value('223.5.5.5', _('Aliyun Public DNS (223.5.5.5)'));
|
o.value('223.5.5.5', _('Aliyun Public DNS (223.5.5.5)'));
|
||||||
o.value('210.2.4.8', _('CNNIC Public DNS (210.2.4.8)'));
|
o.value('210.2.4.8', _('CNNIC Public DNS (210.2.4.8)'));
|
||||||
o.value('119.29.29.29', _('Tencent Public DNS (119.29.29.29)'));
|
o.value('119.29.29.29', _('Tencent Public DNS (119.29.29.29)'));
|
||||||
o.value('117.50.10.10', _('ThreatBook Public DNS (117.50.10.10)'));
|
o.value('114.114.114.114', _('Xinfeng Public DNS (114.114.114.114)'));
|
||||||
o.depends('routing_mode', 'bypass_mainland_china');
|
o.depends('routing_mode', 'bypass_mainland_china');
|
||||||
o.validate = function(section_id) {
|
o.validate = function(section_id) {
|
||||||
if (section_id) {
|
if (section_id) {
|
||||||
@@ -234,11 +274,14 @@ return view.extend({
|
|||||||
|
|
||||||
o = s.taboption('routing', form.Value, 'routing_port', _('Routing ports'),
|
o = s.taboption('routing', form.Value, 'routing_port', _('Routing ports'),
|
||||||
_('Specify target ports to be proxied. Multiple ports must be separated by commas.'));
|
_('Specify target ports to be proxied. Multiple ports must be separated by commas.'));
|
||||||
o.value('', _('All ports'));
|
o.value('all', _('All ports'));
|
||||||
o.value('common', _('Common ports only (bypass P2P traffic)'));
|
o.value('common', _('Common ports only (bypass P2P traffic)'));
|
||||||
o.default = 'common';
|
o.default = 'common';
|
||||||
|
o.rmempty = false;
|
||||||
o.validate = function(section_id, value) {
|
o.validate = function(section_id, value) {
|
||||||
if (section_id && value && value !== 'common') {
|
if (section_id && value !== 'all' && value !== 'common') {
|
||||||
|
if (!value)
|
||||||
|
return _('Expecting: %s').format(_('valid port value'));
|
||||||
|
|
||||||
var ports = [];
|
var ports = [];
|
||||||
for (var i of value.split(',')) {
|
for (var i of value.split(',')) {
|
||||||
@@ -324,11 +367,6 @@ return view.extend({
|
|||||||
so.default = so.disabled;
|
so.default = so.disabled;
|
||||||
so.rmempty = false;
|
so.rmempty = false;
|
||||||
|
|
||||||
so = ss.option(form.ListValue, 'domain_strategy', _('Domain strategy'),
|
|
||||||
_('If set, the requested domain name will be resolved to IP before routing.'));
|
|
||||||
for (var i in hp.dns_strategy)
|
|
||||||
so.value(i, hp.dns_strategy[i])
|
|
||||||
|
|
||||||
so = ss.option(form.Flag, 'sniff_override', _('Override destination'),
|
so = ss.option(form.Flag, 'sniff_override', _('Override destination'),
|
||||||
_('Override the connection destination address with the sniffed domain.'));
|
_('Override the connection destination address with the sniffed domain.'));
|
||||||
so.default = so.enabled;
|
so.default = so.enabled;
|
||||||
@@ -351,6 +389,17 @@ return view.extend({
|
|||||||
}
|
}
|
||||||
so.default = 'nil';
|
so.default = 'nil';
|
||||||
so.rmempty = false;
|
so.rmempty = false;
|
||||||
|
|
||||||
|
so = ss.option(form.Button, '_reload_client', _('Quick Reload'));
|
||||||
|
so.inputtitle = _('Reload');
|
||||||
|
so.inputstyle = 'apply';
|
||||||
|
so.onclick = function() {
|
||||||
|
return fs.exec('/etc/init.d/homeproxy', ['reload', 'client'])
|
||||||
|
.then((res) => { return window.location = window.location.href.split('#')[0] })
|
||||||
|
.catch((e) => {
|
||||||
|
ui.addNotification(null, E('p', _('Failed to execute "/etc/init.d/homeproxy %s %s" reason: %s').format('reload', 'client', e)));
|
||||||
|
});
|
||||||
|
};
|
||||||
/* Routing settings end */
|
/* Routing settings end */
|
||||||
|
|
||||||
/* Routing nodes start */
|
/* Routing nodes start */
|
||||||
@@ -365,7 +414,8 @@ return view.extend({
|
|||||||
ss.nodescriptions = true;
|
ss.nodescriptions = true;
|
||||||
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('Routing node'), _('Add a routing node'), data[0]);
|
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('Routing node'), _('Add a routing node'), data[0]);
|
||||||
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
|
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss, {}, true);
|
||||||
|
ss.handleAdd = L.bind(hp.handleAdd, this, ss, {});
|
||||||
|
|
||||||
so = ss.option(form.Value, 'label', _('Label'));
|
so = ss.option(form.Value, 'label', _('Label'));
|
||||||
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
|
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
@@ -379,13 +429,13 @@ return view.extend({
|
|||||||
|
|
||||||
so = ss.option(form.ListValue, 'node', _('Node'),
|
so = ss.option(form.ListValue, 'node', _('Node'),
|
||||||
_('Outbound node'));
|
_('Outbound node'));
|
||||||
for (var i in proxy_nodes)
|
for (var k in proxy_nodes)
|
||||||
so.value(i, proxy_nodes[i]);
|
so.value(k, proxy_nodes[k]);
|
||||||
so.validate = L.bind(hp.validateUniqueValue, this, data[0], 'routing_node', 'node');
|
so.validate = L.bind(hp.validateUniqueValue, this, data[0], 'routing_node', 'node');
|
||||||
so.editable = true;
|
so.editable = true;
|
||||||
|
|
||||||
so = ss.option(form.ListValue, 'domain_strategy', _('Domain strategy'),
|
so = ss.option(form.ListValue, 'domain_strategy', _('Domain strategy'),
|
||||||
_('If set, the server domain name will be resolved to IP before connecting.<br/>'));
|
_('If set, the server domain name will be resolved to IP before connecting.<br/>dns.strategy will be used if empty.'));
|
||||||
for (var i in hp.dns_strategy)
|
for (var i in hp.dns_strategy)
|
||||||
so.value(i, hp.dns_strategy[i]);
|
so.value(i, hp.dns_strategy[i]);
|
||||||
so.modalonly = true;
|
so.modalonly = true;
|
||||||
@@ -435,13 +485,15 @@ return view.extend({
|
|||||||
o.depends('routing_mode', 'custom');
|
o.depends('routing_mode', 'custom');
|
||||||
|
|
||||||
ss = o.subsection;
|
ss = o.subsection;
|
||||||
|
var prefmt = { 'prefix': '', 'suffix': '_host' };
|
||||||
ss.addremove = true;
|
ss.addremove = true;
|
||||||
ss.rowcolors = true;
|
ss.rowcolors = true;
|
||||||
ss.sortable = true;
|
ss.sortable = true;
|
||||||
ss.nodescriptions = true;
|
ss.nodescriptions = true;
|
||||||
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('Routing rule'), _('Add a routing rule'), data[0]);
|
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('Routing rule'), _('Add a routing rule'), data[0]);
|
||||||
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
|
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss, prefmt, false);
|
||||||
|
ss.handleAdd = L.bind(hp.handleAdd, this, ss, prefmt);
|
||||||
|
|
||||||
ss.tab('field_other', _('Other fields'));
|
ss.tab('field_other', _('Other fields'));
|
||||||
ss.tab('field_host', _('Host fields'));
|
ss.tab('field_host', _('Host fields'));
|
||||||
@@ -516,7 +568,6 @@ return view.extend({
|
|||||||
so = ss.taboption('field_source_ip', form.Flag, 'source_ip_is_private', _('Private source IP'),
|
so = ss.taboption('field_source_ip', form.Flag, 'source_ip_is_private', _('Private source IP'),
|
||||||
_('Match private source IP.'));
|
_('Match private source IP.'));
|
||||||
so.default = so.disabled;
|
so.default = so.disabled;
|
||||||
so.rmempty = false;
|
|
||||||
so.modalonly = true;
|
so.modalonly = true;
|
||||||
|
|
||||||
so = ss.taboption('field_host', form.DynamicList, 'ip_cidr', _('IP CIDR'),
|
so = ss.taboption('field_host', form.DynamicList, 'ip_cidr', _('IP CIDR'),
|
||||||
@@ -527,7 +578,6 @@ return view.extend({
|
|||||||
so = ss.taboption('field_host', form.Flag, 'ip_is_private', _('Private IP'),
|
so = ss.taboption('field_host', form.Flag, 'ip_is_private', _('Private IP'),
|
||||||
_('Match private IP.'));
|
_('Match private IP.'));
|
||||||
so.default = so.disabled;
|
so.default = so.disabled;
|
||||||
so.rmempty = false;
|
|
||||||
so.modalonly = true;
|
so.modalonly = true;
|
||||||
|
|
||||||
so = ss.taboption('field_source_port', form.DynamicList, 'source_port', _('Source port'),
|
so = ss.taboption('field_source_port', form.DynamicList, 'source_port', _('Source port'),
|
||||||
@@ -562,6 +612,14 @@ return view.extend({
|
|||||||
_('Match user name.'));
|
_('Match user name.'));
|
||||||
so.modalonly = true;
|
so.modalonly = true;
|
||||||
|
|
||||||
|
so = ss.taboption('field_other', form.ListValue, 'clash_mode', _('Clash mode'),
|
||||||
|
_('Match clash mode.'));
|
||||||
|
so.value('', _('None'));
|
||||||
|
so.value('global', _('Global'));
|
||||||
|
so.value('rule', _('Rule'));
|
||||||
|
so.value('direct', _('Direct'));
|
||||||
|
so.modalonly = true;
|
||||||
|
|
||||||
so = ss.taboption('field_other', form.MultiValue, 'rule_set', _('Rule set'),
|
so = ss.taboption('field_other', form.MultiValue, 'rule_set', _('Rule set'),
|
||||||
_('Match rule set.'));
|
_('Match rule set.'));
|
||||||
so.load = function(section_id) {
|
so.load = function(section_id) {
|
||||||
@@ -581,7 +639,6 @@ return view.extend({
|
|||||||
so = ss.taboption('field_other', form.Flag, 'rule_set_ipcidr_match_source', _('Match source IP via rule set'),
|
so = ss.taboption('field_other', form.Flag, 'rule_set_ipcidr_match_source', _('Match source IP via rule set'),
|
||||||
_('Make IP CIDR in rule set used to match the source IP.'));
|
_('Make IP CIDR in rule set used to match the source IP.'));
|
||||||
so.default = so.disabled;
|
so.default = so.disabled;
|
||||||
so.rmempty = false;
|
|
||||||
so.modalonly = true;
|
so.modalonly = true;
|
||||||
|
|
||||||
so = ss.taboption('field_other', form.Flag, 'invert', _('Invert'),
|
so = ss.taboption('field_other', form.Flag, 'invert', _('Invert'),
|
||||||
@@ -672,13 +729,15 @@ return view.extend({
|
|||||||
o.depends('routing_mode', 'custom');
|
o.depends('routing_mode', 'custom');
|
||||||
|
|
||||||
ss = o.subsection;
|
ss = o.subsection;
|
||||||
|
var prefmt = { 'prefix': 'dns_', 'suffix': '' };
|
||||||
ss.addremove = true;
|
ss.addremove = true;
|
||||||
ss.rowcolors = true;
|
ss.rowcolors = true;
|
||||||
ss.sortable = true;
|
ss.sortable = true;
|
||||||
ss.nodescriptions = true;
|
ss.nodescriptions = true;
|
||||||
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('DNS server'), _('Add a DNS server'), data[0]);
|
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('DNS server'), _('Add a DNS server'), data[0]);
|
||||||
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
|
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss, prefmt, true);
|
||||||
|
ss.handleAdd = L.bind(hp.handleAdd, this, ss, prefmt);
|
||||||
|
|
||||||
so = ss.option(form.Value, 'label', _('Label'));
|
so = ss.option(form.Value, 'label', _('Label'));
|
||||||
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
|
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
@@ -727,7 +786,7 @@ return view.extend({
|
|||||||
so.modalonly = true;
|
so.modalonly = true;
|
||||||
|
|
||||||
so = ss.option(form.ListValue, 'address_strategy', _('Address strategy'),
|
so = ss.option(form.ListValue, 'address_strategy', _('Address strategy'),
|
||||||
_('The domain strategy for resolving the domain name in the address.'));
|
_('The domain strategy for resolving the domain name in the address. dns.strategy will be used if empty.'));
|
||||||
for (var i in hp.dns_strategy)
|
for (var i in hp.dns_strategy)
|
||||||
so.value(i, hp.dns_strategy[i]);
|
so.value(i, hp.dns_strategy[i]);
|
||||||
so.modalonly = true;
|
so.modalonly = true;
|
||||||
@@ -767,13 +826,15 @@ return view.extend({
|
|||||||
o.depends('routing_mode', 'custom');
|
o.depends('routing_mode', 'custom');
|
||||||
|
|
||||||
ss = o.subsection;
|
ss = o.subsection;
|
||||||
|
var prefmt = { 'prefix': '', 'suffix': '_domain' };
|
||||||
ss.addremove = true;
|
ss.addremove = true;
|
||||||
ss.rowcolors = true;
|
ss.rowcolors = true;
|
||||||
ss.sortable = true;
|
ss.sortable = true;
|
||||||
ss.nodescriptions = true;
|
ss.nodescriptions = true;
|
||||||
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('DNS rule'), _('Add a DNS rule'), data[0]);
|
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('DNS rule'), _('Add a DNS rule'), data[0]);
|
||||||
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
|
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss, prefmt, false);
|
||||||
|
ss.handleAdd = L.bind(hp.handleAdd, this, ss, prefmt);
|
||||||
|
|
||||||
ss.tab('field_other', _('Other fields'));
|
ss.tab('field_other', _('Other fields'));
|
||||||
ss.tab('field_host', _('Host fields'));
|
ss.tab('field_host', _('Host fields'));
|
||||||
@@ -896,6 +957,14 @@ return view.extend({
|
|||||||
_('Match user name.'));
|
_('Match user name.'));
|
||||||
so.modalonly = true;
|
so.modalonly = true;
|
||||||
|
|
||||||
|
so = ss.taboption('field_other', form.ListValue, 'clash_mode', _('Clash mode'),
|
||||||
|
_('Match clash mode.'));
|
||||||
|
so.value('', _('None'));
|
||||||
|
so.value('global', _('Global'));
|
||||||
|
so.value('rule', _('Rule'));
|
||||||
|
so.value('direct', _('Direct'));
|
||||||
|
so.modalonly = true;
|
||||||
|
|
||||||
so = ss.taboption('field_other', form.MultiValue, 'rule_set', _('Rule set'),
|
so = ss.taboption('field_other', form.MultiValue, 'rule_set', _('Rule set'),
|
||||||
_('Match rule set.'));
|
_('Match rule set.'));
|
||||||
so.load = function(section_id) {
|
so.load = function(section_id) {
|
||||||
@@ -938,6 +1007,13 @@ return view.extend({
|
|||||||
|
|
||||||
return this.super('load', section_id);
|
return this.super('load', section_id);
|
||||||
}
|
}
|
||||||
|
so.validate = function(section_id, value) {
|
||||||
|
let arr = value.trim().split(' ');
|
||||||
|
if (arr.length > 1 && arr.includes('any-out'))
|
||||||
|
return _('Expecting: %s').format(_('If Any is selected, uncheck others'));
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
so.modalonly = true;
|
so.modalonly = true;
|
||||||
|
|
||||||
so = ss.taboption('field_other', form.ListValue, 'server', _('Server'),
|
so = ss.taboption('field_other', form.ListValue, 'server', _('Server'),
|
||||||
@@ -976,93 +1052,82 @@ return view.extend({
|
|||||||
/* DNS rules end */
|
/* DNS rules end */
|
||||||
/* Custom routing settings end */
|
/* Custom routing settings end */
|
||||||
|
|
||||||
/* Rule set settings start */
|
/* Clash API settings start */
|
||||||
s.tab('ruleset', _('Rule set'));
|
s.tab('clash', _('Clash API settings'));
|
||||||
o = s.taboption('ruleset', form.SectionValue, '_ruleset', form.GridSection, 'ruleset');
|
o = s.taboption('clash', form.SectionValue, '_clash', form.NamedSection, 'experimental');
|
||||||
o.depends('routing_mode', 'custom');
|
o.depends('routing_mode', 'custom');
|
||||||
|
|
||||||
ss = o.subsection;
|
ss = o.subsection;
|
||||||
ss.addremove = true;
|
so = ss.option(form.Flag, 'clash_api_enabled', _('Enable Clash API'));
|
||||||
ss.rowcolors = true;
|
so.default = so.disabled;
|
||||||
ss.sortable = true;
|
|
||||||
ss.nodescriptions = true;
|
|
||||||
ss.modaltitle = L.bind(hp.loadModalTitle, this, _('Rule set'), _('Add a rule set'), data[0]);
|
|
||||||
ss.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
|
||||||
ss.renderSectionAdd = L.bind(hp.renderSectionAdd, this, ss);
|
|
||||||
|
|
||||||
so = ss.option(form.Value, 'label', _('Label'));
|
so = ss.option(form.Flag, 'nginx_support', _('Nginx Support'));
|
||||||
so.load = L.bind(hp.loadDefaultLabel, this, data[0]);
|
so.rmempty = true;
|
||||||
so.validate = L.bind(hp.validateUniqueValue, this, data[0], 'ruleset', 'label');
|
if (! features.hp_has_nginx) {
|
||||||
so.modalonly = true;
|
so.description = _('To enable this feature you need install <b>luci-nginx</b> and <b>luci-ssl-nginx</b><br/> first');
|
||||||
|
so.readonly = true;
|
||||||
so = ss.option(form.Flag, 'enabled', _('Enable'));
|
}
|
||||||
so.default = so.enabled;
|
so.write = function(section_id, value) {
|
||||||
so.rmempty = false;
|
return uci.set(data[0], section_id, 'nginx_support', features.hp_has_nginx ? value : null);
|
||||||
so.editable = true;
|
|
||||||
|
|
||||||
so = ss.option(form.ListValue, 'type', _('Type'));
|
|
||||||
so.value('local', _('Local'));
|
|
||||||
so.value('remote', _('Remote'));
|
|
||||||
so.default = 'remote';
|
|
||||||
so.rmempty = false;
|
|
||||||
|
|
||||||
so = ss.option(form.ListValue, 'format', _('Format'));
|
|
||||||
so.value('source', _('Source file'));
|
|
||||||
so.value('binary', _('Binary file'));
|
|
||||||
so.default = 'source';
|
|
||||||
so.rmempty = false;
|
|
||||||
|
|
||||||
so = ss.option(form.Value, 'path', _('Path'));
|
|
||||||
so.datatype = 'file';
|
|
||||||
so.placeholder = '/etc/homeproxy/ruleset/example.json';
|
|
||||||
so.rmempty = false;
|
|
||||||
so.depends('type', 'local');
|
|
||||||
so.modalonly = true;
|
|
||||||
|
|
||||||
so = ss.option(form.Value, 'url', _('Rule set URL'));
|
|
||||||
so.validate = function(section_id, value) {
|
|
||||||
if (section_id) {
|
|
||||||
if (!value)
|
|
||||||
return _('Expecting: %s').format(_('non-empty value'));
|
|
||||||
|
|
||||||
try {
|
|
||||||
var url = new URL(value);
|
|
||||||
if (!url.hostname)
|
|
||||||
return _('Expecting: %s').format(_('valid URL'));
|
|
||||||
}
|
|
||||||
catch(e) {
|
|
||||||
return _('Expecting: %s').format(_('valid URL'));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
so.rmempty = false;
|
|
||||||
so.depends('type', 'remote');
|
|
||||||
so.modalonly = true;
|
|
||||||
|
|
||||||
so = ss.option(form.ListValue, 'outbound', _('Outbound'),
|
so = ss.option(form.ListValue, 'clash_api_log_level', _('Log level'));
|
||||||
_('Tag of the outbound to download rule set.'));
|
so.value('trace', 'Trace');
|
||||||
|
so.value('debug', 'Debug');
|
||||||
|
so.value('info', 'Info');
|
||||||
|
so.value('warn', 'Warning');
|
||||||
|
so.value('error', 'Error');
|
||||||
|
so.value('fatal', 'Fatal');
|
||||||
|
so.value('panic', 'Panic');
|
||||||
|
so.default = 'warn';
|
||||||
|
|
||||||
|
so = ss.option(form.ListValue, 'dashboard_repo', _('Select Clash Dashboard'),
|
||||||
|
_('If the selected dashboard is <code>') + _('Not Installed') + _('</code>.<br/> you will need to check update via <code>') +
|
||||||
|
_('Service Status') + _('</code> » <code>') + _('Clash dashboard version') + _('</code>.'));
|
||||||
so.load = function(section_id) {
|
so.load = function(section_id) {
|
||||||
delete this.keylist;
|
delete this.keylist;
|
||||||
delete this.vallist;
|
delete this.vallist;
|
||||||
|
|
||||||
this.value('direct-out', _('Direct'));
|
let repos = [
|
||||||
uci.sections(data[0], 'routing_node', (res) => {
|
['metacubex/metacubexd', _('metacubexd')],
|
||||||
if (res.enabled === '1')
|
['metacubex/yacd-meta', _('yacd-meta')],
|
||||||
this.value(res['.name'], res.label);
|
['metacubex/razord-meta', _('razord-meta')]
|
||||||
|
];
|
||||||
|
|
||||||
|
this.value('', _('Use Online Dashboard'));
|
||||||
|
repos.forEach((repo) => {
|
||||||
|
callResVersion('clash_dashboard', repo[0]).then((res) => {
|
||||||
|
this.value(repo[0], repo[1] + ' - ' + (res.error ? _('Not Installed') : _('Installed')));
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
return this.super('load', section_id);
|
return this.super('load', section_id);
|
||||||
}
|
}
|
||||||
so.default = 'direct-out';
|
so.default = '';
|
||||||
so.rmempty = false;
|
if (api_secret) {
|
||||||
so.depends('type', 'remote');
|
if (features.hp_has_nginx && nginx_support === '1') {
|
||||||
|
so.description = _('The current API URL is <code>%s</code>')
|
||||||
|
.format('https://' + window.location.hostname + '/homeproxy/');
|
||||||
|
} else {
|
||||||
|
so.description = _('The current API URL is <code>%s</code>')
|
||||||
|
.format('http://' + window.location.hostname + ':' + api_port);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
so = ss.option(form.Value, 'update_interval', _('Update interval'),
|
so = ss.option(form.Flag, 'set_dash_backend', _('Auto set backend'),
|
||||||
_('Update interval of rule set.<br/><code>1d</code> will be used if empty.'));
|
_('Auto set backend address for dashboard.'));
|
||||||
so.depends('type', 'remote');
|
so.default = so.disabled;
|
||||||
/* Rule set settings end */
|
|
||||||
|
so = ss.option(form.Value, 'clash_api_port', _('Port'));
|
||||||
|
so.datatype = "and(port, min(1))";
|
||||||
|
so.default = '9090';
|
||||||
|
so.rmempty = false;
|
||||||
|
|
||||||
|
so = ss.option(form.Value, 'clash_api_secret', _('Secret'), _('Automatically generated if empty'));
|
||||||
|
so.password = true;
|
||||||
|
if (api_secret)
|
||||||
|
so.description = _('The current Secret is <code>' + api_secret + '</code>');
|
||||||
|
/* Clash API settings end */
|
||||||
|
|
||||||
/* ACL settings start */
|
/* ACL settings start */
|
||||||
s.tab('control', _('Access Control'));
|
s.tab('control', _('Access Control'));
|
||||||
|
@@ -9,6 +9,7 @@
|
|||||||
'require fs';
|
'require fs';
|
||||||
'require uci';
|
'require uci';
|
||||||
'require ui';
|
'require ui';
|
||||||
|
'require dom';
|
||||||
'require view';
|
'require view';
|
||||||
|
|
||||||
'require homeproxy as hp';
|
'require homeproxy as hp';
|
||||||
@@ -355,7 +356,7 @@ function parseShareLink(uri, features) {
|
|||||||
return config;
|
return config;
|
||||||
}
|
}
|
||||||
|
|
||||||
function renderNodeSettings(section, data, features, main_node, routing_mode) {
|
function renderNodeSettings(section, data, features, main_node, routing_mode, subs_info, proxy_nodes) {
|
||||||
var s = section, o;
|
var s = section, o;
|
||||||
s.rowcolors = true;
|
s.rowcolors = true;
|
||||||
s.sortable = true;
|
s.sortable = true;
|
||||||
@@ -408,16 +409,18 @@ function renderNodeSettings(section, data, features, main_node, routing_mode) {
|
|||||||
o.value('wireguard', _('WireGuard'));
|
o.value('wireguard', _('WireGuard'));
|
||||||
o.value('vless', _('VLESS'));
|
o.value('vless', _('VLESS'));
|
||||||
o.value('vmess', _('VMess'));
|
o.value('vmess', _('VMess'));
|
||||||
|
o.value('selector', _('Selector'));
|
||||||
|
o.value('urltest', _('URLTest'));
|
||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
|
|
||||||
o = s.option(form.Value, 'address', _('Address'));
|
o = s.option(form.Value, 'address', _('Address'));
|
||||||
o.datatype = 'host';
|
o.datatype = 'host';
|
||||||
o.depends({'type': 'direct', '!reverse': true});
|
o.depends({'type': /^(direct|selector|urltest)$/, '!reverse': true});
|
||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
|
|
||||||
o = s.option(form.Value, 'port', _('Port'));
|
o = s.option(form.Value, 'port', _('Port'));
|
||||||
o.datatype = 'port';
|
o.datatype = 'port';
|
||||||
o.depends({'type': 'direct', '!reverse': true});
|
o.depends({'type': /^(direct|selector|urltest)$/, '!reverse': true});
|
||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
|
|
||||||
o = s.option(form.Value, 'username', _('Username'));
|
o = s.option(form.Value, 'username', _('Username'));
|
||||||
@@ -462,18 +465,24 @@ function renderNodeSettings(section, data, features, main_node, routing_mode) {
|
|||||||
_('Override the connection destination address.'));
|
_('Override the connection destination address.'));
|
||||||
o.datatype = 'host';
|
o.datatype = 'host';
|
||||||
o.depends('type', 'direct');
|
o.depends('type', 'direct');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
o = s.option(form.Value, 'override_port', _('Override port'),
|
o = s.option(form.Value, 'override_port', _('Override port'),
|
||||||
_('Override the connection destination port.'));
|
_('Override the connection destination port.'));
|
||||||
o.datatype = 'port';
|
o.datatype = 'port';
|
||||||
o.depends('type', 'direct');
|
o.depends('type', 'direct');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
o = s.option(form.ListValue, 'proxy_protocol', _('Proxy protocol'),
|
o = s.option(form.Flag, 'proxy_protocol', _('Proxy protocol'),
|
||||||
_('Write proxy protocol in the connection header.'));
|
_('Write proxy protocol in the connection header.'));
|
||||||
o.value('', _('Disable'));
|
o.depends('type', 'direct');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.ListValue, 'proxy_protocol_version', _('Proxy protocol version'));
|
||||||
o.value('1', _('v1'));
|
o.value('1', _('v1'));
|
||||||
o.value('2', _('v2'));
|
o.value('2', _('v2'));
|
||||||
o.depends('type', 'direct');
|
o.default = '2';
|
||||||
|
o.depends('proxy_protocol', '1');
|
||||||
o.modalonly = true;
|
o.modalonly = true;
|
||||||
|
|
||||||
/* Hysteria (2) config start */
|
/* Hysteria (2) config start */
|
||||||
@@ -707,6 +716,89 @@ function renderNodeSettings(section, data, features, main_node, routing_mode) {
|
|||||||
o.modalonly = true;
|
o.modalonly = true;
|
||||||
/* VMess config end */
|
/* VMess config end */
|
||||||
|
|
||||||
|
/* Selector config start */
|
||||||
|
o = s.option(form.MultiValue, 'group', _('Subscription Groups'),
|
||||||
|
_('List of subscription groups.'));
|
||||||
|
o.value('', _('-- Please choose --'));
|
||||||
|
for (var key in subs_info) {
|
||||||
|
let title = subs_info[key].name;
|
||||||
|
o.value(key, _('Sub (%s)').format(title));
|
||||||
|
}
|
||||||
|
o.depends('type', 'selector');
|
||||||
|
o.depends('type', 'urltest');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.MultiValue, 'order', _('Outbounds'),
|
||||||
|
_('List of outbound tags.'));
|
||||||
|
o.value('direct-out', _('Direct'));
|
||||||
|
o.value('block-out', _('Block'));
|
||||||
|
for (var key in proxy_nodes)
|
||||||
|
o.value(key, proxy_nodes[key]);
|
||||||
|
o.depends({'group': /^$/, 'type': /^(selector|urltest)$/});
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.Value, 'default_selected', _('Default Outbound'),
|
||||||
|
_('The default outbound tag. The first outbound will be used if empty.'));
|
||||||
|
o.value('', _('Default'));
|
||||||
|
o.value('direct-out', _('Direct'));
|
||||||
|
o.value('block-out', _('Block'));
|
||||||
|
for (var key in proxy_nodes)
|
||||||
|
o.value(key, proxy_nodes[key]);
|
||||||
|
o.default = '';
|
||||||
|
o.depends({'group': /^$/, 'type': 'selector'});
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.ListValue, 'filter_nodes', _('Filter nodes'),
|
||||||
|
_('Drop/keep specific nodes from outbounds.'));
|
||||||
|
o.value('', _('Disable'));
|
||||||
|
o.value('blacklist', _('Blacklist mode'));
|
||||||
|
o.value('whitelist', _('Whitelist mode'));
|
||||||
|
o.default = '';
|
||||||
|
o.depends('type', 'selector');
|
||||||
|
o.depends('type', 'urltest');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.DynamicList, 'filter_keywords', _('Filter keywords'),
|
||||||
|
_('Drop/keep nodes that contain the specific keywords. <a target="_blank" href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions">Regex</a> is supported.'));
|
||||||
|
o.depends({'filter_nodes': '', '!reverse': true});
|
||||||
|
o.modalonly = true;
|
||||||
|
/* Selector config end */
|
||||||
|
|
||||||
|
/* URLTest config start */
|
||||||
|
o = s.option(form.Value, 'test_url', _('Test URL'),
|
||||||
|
_('The URL to test. https://www.gstatic.com/generate_204 will be used if empty.'));
|
||||||
|
o.value('', _('Default'));
|
||||||
|
o.default = 'http://cp.cloudflare.com/';
|
||||||
|
o.depends('type', 'urltest');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.Value, 'interval', _('Interval'),
|
||||||
|
_('The test interval. <code>3m</code> will be used if empty.'));
|
||||||
|
o.value('', _('Default'));
|
||||||
|
o.default = '10m';
|
||||||
|
o.depends('type', 'urltest');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.Value, 'tolerance', _('Tolerance'),
|
||||||
|
_('The test tolerance in milliseconds. 50 will be used if empty.'));
|
||||||
|
o.datatype = 'uinteger';
|
||||||
|
o.depends('type', 'urltest');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.Value, 'idle_timeout', _('Idle timeout'),
|
||||||
|
_('The idle timeout. <code>30m</code> will be used if empty.'));
|
||||||
|
o.default = '30m';
|
||||||
|
o.depends('type', 'urltest');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.Flag, 'interrupt_exist_connections', _('Interrupt existing connections'),
|
||||||
|
_('Interrupt existing connections when the selected outbound has changed.'));
|
||||||
|
o.default = o.disabled;
|
||||||
|
o.depends('type', 'selector');
|
||||||
|
o.depends('type', 'urltest');
|
||||||
|
o.modalonly = true;
|
||||||
|
/* URLTest config end */
|
||||||
|
|
||||||
/* Transport config start */
|
/* Transport config start */
|
||||||
o = s.option(form.ListValue, 'transport', _('Transport'),
|
o = s.option(form.ListValue, 'transport', _('Transport'),
|
||||||
_('No TCP transport, plain HTTP is merged into the HTTP transport.'));
|
_('No TCP transport, plain HTTP is merged into the HTTP transport.'));
|
||||||
@@ -1109,15 +1201,18 @@ function renderNodeSettings(section, data, features, main_node, routing_mode) {
|
|||||||
/* Extra settings start */
|
/* Extra settings start */
|
||||||
o = s.option(form.Flag, 'tcp_fast_open', _('TCP fast open'));
|
o = s.option(form.Flag, 'tcp_fast_open', _('TCP fast open'));
|
||||||
o.default = o.disabled;
|
o.default = o.disabled;
|
||||||
|
o.depends({'type': /^(selector|urltest)$/, '!reverse': true});
|
||||||
o.modalonly = true;
|
o.modalonly = true;
|
||||||
|
|
||||||
o = s.option(form.Flag, 'tcp_multi_path', _('MultiPath TCP'));
|
o = s.option(form.Flag, 'tcp_multi_path', _('MultiPath TCP'));
|
||||||
o.default = o.disabled;
|
o.default = o.disabled;
|
||||||
|
o.depends({'type': /^(selector|urltest)$/, '!reverse': true});
|
||||||
o.modalonly = true;
|
o.modalonly = true;
|
||||||
|
|
||||||
o = s.option(form.Flag, 'udp_fragment', _('UDP Fragment'),
|
o = s.option(form.Flag, 'udp_fragment', _('UDP Fragment'),
|
||||||
_('Enable UDP fragmentation.'));
|
_('Enable UDP fragmentation.'));
|
||||||
o.default = o.disabled;
|
o.default = o.disabled;
|
||||||
|
o.depends({'type': /^(selector|urltest)$/, '!reverse': true});
|
||||||
o.modalonly = true;
|
o.modalonly = true;
|
||||||
|
|
||||||
o = s.option(form.Flag, 'udp_over_tcp', _('UDP over TCP'),
|
o = s.option(form.Flag, 'udp_over_tcp', _('UDP over TCP'),
|
||||||
@@ -1152,31 +1247,25 @@ return view.extend({
|
|||||||
var routing_mode = uci.get(data[0], 'config', 'routing_mode');
|
var routing_mode = uci.get(data[0], 'config', 'routing_mode');
|
||||||
var features = data[1];
|
var features = data[1];
|
||||||
|
|
||||||
/* Cache subscription information, it will be called multiple times */
|
|
||||||
var subinfo = [];
|
|
||||||
for (var suburl of (uci.get(data[0], 'subscription', 'subscription_url') || [])) {
|
|
||||||
const url = new URL(suburl);
|
|
||||||
const urlhash = hp.calcStringMD5(suburl.replace(/#.*$/, ''));
|
|
||||||
const title = url.hash ? decodeURIComponent(url.hash.slice(1)) : url.hostname;
|
|
||||||
subinfo.push({ 'hash': urlhash, 'title': title });
|
|
||||||
}
|
|
||||||
|
|
||||||
m = new form.Map('homeproxy', _('Edit nodes'));
|
m = new form.Map('homeproxy', _('Edit nodes'));
|
||||||
|
|
||||||
|
/* Cache all subscription info, they will be called multiple times */
|
||||||
|
var subs_info = hp.loadSubscriptionInfo(data[0]);
|
||||||
|
|
||||||
|
/* Cache all configured proxy nodes, they will be called multiple times */
|
||||||
|
var proxy_nodes = hp.loadNodesList(data[0], subs_info);
|
||||||
|
|
||||||
s = m.section(form.NamedSection, 'subscription', 'homeproxy');
|
s = m.section(form.NamedSection, 'subscription', 'homeproxy');
|
||||||
|
|
||||||
/* Node settings start */
|
/* Node settings start */
|
||||||
/* User nodes start */
|
/* User nodes start */
|
||||||
s.tab('node', _('Nodes'));
|
s.tab('node', _('Nodes'));
|
||||||
|
|
||||||
o = s.taboption('node', form.SectionValue, '_node', form.GridSection, 'node');
|
o = s.taboption('node', form.SectionValue, '_node', form.GridSection, 'node');
|
||||||
ss = renderNodeSettings(o.subsection, data, features, main_node, routing_mode);
|
ss = renderNodeSettings(o.subsection, data, features, main_node, routing_mode, subs_info, proxy_nodes);
|
||||||
ss.addremove = true;
|
ss.addremove = true;
|
||||||
ss.filter = function(section_id) {
|
ss.filter = function(section_id) {
|
||||||
for (var info of subinfo)
|
return uci.get(data[0], section_id, 'grouphash') ? false : true;
|
||||||
if (info.hash === uci.get(data[0], section_id, 'grouphash'))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
/* Import subscription links start */
|
/* Import subscription links start */
|
||||||
/* Thanks to luci-app-shadowsocks-libev */
|
/* Thanks to luci-app-shadowsocks-libev */
|
||||||
@@ -1239,13 +1328,31 @@ return view.extend({
|
|||||||
])
|
])
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
ss.renderSectionAdd = function(/* ... */) {
|
ss.renderSectionAdd = function(extra_class) {
|
||||||
var el = form.GridSection.prototype.renderSectionAdd.apply(this, arguments),
|
var el = form.GridSection.prototype.renderSectionAdd.apply(this, arguments),
|
||||||
|
selectEl = E('select', {
|
||||||
|
class: 'cbi-input-select',
|
||||||
|
change: L.bind(function(section, ev) {
|
||||||
|
var el = dom.parent(ev.target, '.cbi-section-create'),
|
||||||
|
button = el.querySelector('.cbi-section-create > .cbi-button-add'),
|
||||||
|
inputname = el.querySelector('.cbi-section-create-name').value || '';
|
||||||
|
var uciconfig = section.uciconfig || section.map.config;
|
||||||
|
|
||||||
|
button.toggleAttribute('disabled',
|
||||||
|
!inputname ||
|
||||||
|
uci.get(uciconfig, inputname) ||
|
||||||
|
uci.get(uciconfig, ev.target.value + inputname));
|
||||||
|
}, this, ss)
|
||||||
|
}, [
|
||||||
|
E('option', { value: 'node_' }, _('node')),
|
||||||
|
E('option', { value: 'sub_' }, _('sub'))
|
||||||
|
]),
|
||||||
nameEl = el.querySelector('.cbi-section-create-name');
|
nameEl = el.querySelector('.cbi-section-create-name');
|
||||||
|
|
||||||
ui.addValidator(nameEl, 'uciname', true, (v) => {
|
ui.addValidator(nameEl, 'uciname', true, (v) => {
|
||||||
var button = el.querySelector('.cbi-section-create > .cbi-button-add');
|
var button = el.querySelector('.cbi-section-create > .cbi-button-add');
|
||||||
var uciconfig = this.uciconfig || this.map.config;
|
var uciconfig = this.uciconfig || this.map.config;
|
||||||
|
var prefix = el.querySelector('.cbi-input-select').value;
|
||||||
|
|
||||||
if (!v) {
|
if (!v) {
|
||||||
button.disabled = true;
|
button.disabled = true;
|
||||||
@@ -1253,12 +1360,17 @@ return view.extend({
|
|||||||
} else if (uci.get(uciconfig, v)) {
|
} else if (uci.get(uciconfig, v)) {
|
||||||
button.disabled = true;
|
button.disabled = true;
|
||||||
return _('Expecting: %s').format(_('unique UCI identifier'));
|
return _('Expecting: %s').format(_('unique UCI identifier'));
|
||||||
|
} else if (uci.get(uciconfig, prefix + v)) {
|
||||||
|
button.disabled = true;
|
||||||
|
return _('Expecting: %s').format(_('unique label'));
|
||||||
} else {
|
} else {
|
||||||
button.disabled = null;
|
button.disabled = null;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}, 'blur', 'keyup');
|
}, 'blur', 'keyup');
|
||||||
|
|
||||||
|
el.prepend(E('div', {}, selectEl));
|
||||||
|
|
||||||
el.appendChild(E('button', {
|
el.appendChild(E('button', {
|
||||||
'class': 'cbi-button cbi-button-add',
|
'class': 'cbi-button cbi-button-add',
|
||||||
'title': _('Import share links'),
|
'title': _('Import share links'),
|
||||||
@@ -1267,16 +1379,26 @@ return view.extend({
|
|||||||
|
|
||||||
return el;
|
return el;
|
||||||
}
|
}
|
||||||
|
ss.handleAdd = function(ev, name) {
|
||||||
|
var selectEl = ev.target.parentElement.firstElementChild.firstElementChild,
|
||||||
|
prefix = selectEl.value;
|
||||||
|
|
||||||
|
return form.GridSection.prototype.handleAdd.apply(this, [ ev, prefix + name ]);
|
||||||
|
}
|
||||||
/* Import subscription links end */
|
/* Import subscription links end */
|
||||||
/* User nodes end */
|
/* User nodes end */
|
||||||
|
|
||||||
/* Subscription nodes start */
|
/* Subscription nodes start */
|
||||||
for (const info of subinfo) {
|
for (var key in subs_info) {
|
||||||
s.tab('sub_' + info.hash, _('Sub (%s)').format(info.title));
|
const urlhash = key,
|
||||||
o = s.taboption('sub_' + info.hash, form.SectionValue, '_sub_' + info.hash, form.GridSection, 'node');
|
title = subs_info[key].name;
|
||||||
ss = renderNodeSettings(o.subsection, data, features, main_node, routing_mode);
|
|
||||||
|
s.tab('sub_' + urlhash, _('Sub (%s)').format(title));
|
||||||
|
|
||||||
|
o = s.taboption('sub_' + urlhash, form.SectionValue, '_sub_' + urlhash, form.GridSection, 'node');
|
||||||
|
ss = renderNodeSettings(o.subsection, data, features, main_node, routing_mode, subs_info, proxy_nodes);
|
||||||
ss.filter = function(section_id) {
|
ss.filter = function(section_id) {
|
||||||
return (uci.get(data[0], section_id, 'grouphash') === info.hash);
|
return (uci.get(data[0], section_id, 'grouphash') === urlhash);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* Subscription nodes end */
|
/* Subscription nodes end */
|
||||||
@@ -1286,14 +1408,16 @@ return view.extend({
|
|||||||
s.tab('subscription', _('Subscriptions'));
|
s.tab('subscription', _('Subscriptions'));
|
||||||
|
|
||||||
o = s.taboption('subscription', form.Flag, 'auto_update', _('Auto update'),
|
o = s.taboption('subscription', form.Flag, 'auto_update', _('Auto update'),
|
||||||
_('Auto update subscriptions and geodata.'));
|
_('Auto update subscriptions.'));
|
||||||
o.default = o.disabled;
|
o.default = o.disabled;
|
||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
|
|
||||||
o = s.taboption('subscription', form.ListValue, 'auto_update_time', _('Update time'));
|
o = s.taboption('subscription', form.Value, 'auto_update_expr', _('Cron expression'),
|
||||||
for (var i = 0; i < 24; i++)
|
_('The default value is 2:00 every day'));
|
||||||
o.value(i, i + ':00');
|
o.default = '0 2 * * *';
|
||||||
o.default = '2';
|
o.placeholder = '0 2 * * *';
|
||||||
|
o.rmempty = false;
|
||||||
|
o.retain = true;
|
||||||
o.depends('auto_update', '1');
|
o.depends('auto_update', '1');
|
||||||
|
|
||||||
o = s.taboption('subscription', form.Flag, 'update_via_proxy', _('Update via proxy'),
|
o = s.taboption('subscription', form.Flag, 'update_via_proxy', _('Update via proxy'),
|
||||||
@@ -1303,6 +1427,7 @@ return view.extend({
|
|||||||
|
|
||||||
o = s.taboption('subscription', form.DynamicList, 'subscription_url', _('Subscription URL-s'),
|
o = s.taboption('subscription', form.DynamicList, 'subscription_url', _('Subscription URL-s'),
|
||||||
_('Support Hysteria, Shadowsocks, Trojan, v2rayN (VMess), and XTLS (VLESS) online configuration delivery standard.'));
|
_('Support Hysteria, Shadowsocks, Trojan, v2rayN (VMess), and XTLS (VLESS) online configuration delivery standard.'));
|
||||||
|
o.placeholder = 'https://sub_url#sub_name';
|
||||||
o.validate = function(section_id, value) {
|
o.validate = function(section_id, value) {
|
||||||
if (section_id && value) {
|
if (section_id && value) {
|
||||||
try {
|
try {
|
||||||
@@ -1320,16 +1445,15 @@ return view.extend({
|
|||||||
|
|
||||||
o = s.taboption('subscription', form.ListValue, 'filter_nodes', _('Filter nodes'),
|
o = s.taboption('subscription', form.ListValue, 'filter_nodes', _('Filter nodes'),
|
||||||
_('Drop/keep specific nodes from subscriptions.'));
|
_('Drop/keep specific nodes from subscriptions.'));
|
||||||
o.value('disabled', _('Disable'));
|
o.value('', _('Disable'));
|
||||||
o.value('blacklist', _('Blacklist mode'));
|
o.value('blacklist', _('Blacklist mode'));
|
||||||
o.value('whitelist', _('Whitelist mode'));
|
o.value('whitelist', _('Whitelist mode'));
|
||||||
o.default = 'disabled';
|
o.default = '';
|
||||||
o.rmempty = false;
|
|
||||||
|
|
||||||
o = s.taboption('subscription', form.DynamicList, 'filter_keywords', _('Filter keywords'),
|
o = s.taboption('subscription', form.DynamicList, 'filter_keywords', _('Filter keywords'),
|
||||||
_('Drop/keep nodes that contain the specific keywords. <a target="_blank" href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions">Regex</a> is supported.'));
|
_('Drop/keep nodes that contain the specific keywords. <a target="_blank" href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions">Regex</a> is supported.'));
|
||||||
o.depends({'filter_nodes': 'disabled', '!reverse': true});
|
o.depends({'filter_nodes': '', '!reverse': true});
|
||||||
o.rmempty = false;
|
o.retain = true;
|
||||||
|
|
||||||
o = s.taboption('subscription', form.Flag, 'allow_insecure', _('Allow insecure'),
|
o = s.taboption('subscription', form.Flag, 'allow_insecure', _('Allow insecure'),
|
||||||
_('Allow insecure connection by default when add nodes from subscriptions.') +
|
_('Allow insecure connection by default when add nodes from subscriptions.') +
|
||||||
|
@@ -0,0 +1,277 @@
|
|||||||
|
/*
|
||||||
|
* SPDX-License-Identifier: GPL-3.0-only
|
||||||
|
*
|
||||||
|
* Copyright (C) 2023 ImmortalWrt.org
|
||||||
|
*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
'require form';
|
||||||
|
'require fs';
|
||||||
|
'require uci';
|
||||||
|
'require ui';
|
||||||
|
'require view';
|
||||||
|
|
||||||
|
'require homeproxy as hp';
|
||||||
|
|
||||||
|
const docdata = 'base64,' + 'cmxzdHBsYWNlaG9sZGVy'
|
||||||
|
|
||||||
|
function parseRulesetLink(uri) {
|
||||||
|
var config,
|
||||||
|
filefmt = new RegExp(/^(json|srs)$/),
|
||||||
|
unuciname = new RegExp(/[^a-zA-Z0-9_]+/, "g");
|
||||||
|
|
||||||
|
uri = uri.split('://');
|
||||||
|
if (uri[0] && uri[1]) {
|
||||||
|
switch (uri[0]) {
|
||||||
|
case 'http':
|
||||||
|
case 'https':
|
||||||
|
var url = new URL('http://' + uri[1]);
|
||||||
|
var file = url.searchParams.get('file');
|
||||||
|
var rawquery = url.searchParams.get('rawquery');
|
||||||
|
var name = decodeURIComponent(url.pathname.split('/').pop())
|
||||||
|
.replace(/[\s\.-]/g, '_').replace(unuciname, '');
|
||||||
|
|
||||||
|
if (filefmt.test(file)) {
|
||||||
|
var fullpath = (url.username ? url.username + '@' : '') + url.host + url.pathname + (rawquery ? '?' + decodeURIComponent(rawquery) : '');
|
||||||
|
config = {
|
||||||
|
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : name ? name : null,
|
||||||
|
type: 'remote',
|
||||||
|
format: file.match(/^json$/) ? 'source' : file.match(/^srs$/) ? 'binary' : 'unknown',
|
||||||
|
url: String.format('%s://%s', uri[0], fullpath),
|
||||||
|
href: String.format('http://%s', fullpath)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
case 'file':
|
||||||
|
var url = new URL('file://' + uri[1]);
|
||||||
|
var file = url.searchParams.get('file');
|
||||||
|
var name = decodeURIComponent(url.pathname.split('/').pop())
|
||||||
|
.replace(/[\s\.-]/g, '_').replace(unuciname, '');
|
||||||
|
|
||||||
|
if (filefmt.test(file)) {
|
||||||
|
config = {
|
||||||
|
label: url.hash ? decodeURIComponent(url.hash.slice(1)) : name ? name : null,
|
||||||
|
type: 'local',
|
||||||
|
format: file.match(/^json$/) ? 'source' : file.match(/^srs$/) ? 'binary' : 'unknown',
|
||||||
|
path: url.pathname,
|
||||||
|
href: String.format('file://%s%s', url.host, url.pathname)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config) {
|
||||||
|
if (!config.type || !config.href)
|
||||||
|
return null;
|
||||||
|
else if (!config.label)
|
||||||
|
config.label = hp.calcStringMD5(config.href);
|
||||||
|
}
|
||||||
|
|
||||||
|
return config;
|
||||||
|
}
|
||||||
|
|
||||||
|
return view.extend({
|
||||||
|
load: function() {
|
||||||
|
return Promise.all([
|
||||||
|
uci.load('homeproxy')
|
||||||
|
]);
|
||||||
|
},
|
||||||
|
|
||||||
|
render: function(data) {
|
||||||
|
var m, s, o;
|
||||||
|
|
||||||
|
m = new form.Map('homeproxy', _('Edit ruleset'));
|
||||||
|
|
||||||
|
/* Rule set settings start */
|
||||||
|
var prefix = 'rule_';
|
||||||
|
s = m.section(form.GridSection, 'ruleset');
|
||||||
|
s.addremove = true;
|
||||||
|
s.rowcolors = true;
|
||||||
|
s.sortable = true;
|
||||||
|
s.nodescriptions = true;
|
||||||
|
s.modaltitle = L.bind(hp.loadModalTitle, this, _('Rule set'), _('Add a rule set'), data[0]);
|
||||||
|
s.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
|
/* Import rule-set links start */
|
||||||
|
s.handleLinkImport = function() {
|
||||||
|
var textarea = new ui.Textarea('', {
|
||||||
|
'placeholder': 'http(s)://github.com/sagernet/sing-geoip/raw/rule-set/geoip-hk.srs?file=srs&rawquery=good%3Djob#GeoIP-HK\n' +
|
||||||
|
'file:///etc/homeproxy/ruleset/example.json?file=json#Example%20file\n'
|
||||||
|
});
|
||||||
|
ui.showModal(_('Import rule-set links'), [
|
||||||
|
E('p', _('Supports rule-set links of type: <code>local, remote</code> and format: <code>source, binary</code>.</br>') +
|
||||||
|
_('Please refer to <a href="%s" target="_blank">%s</a> for link format standards.')
|
||||||
|
.format('data:text/html;' + docdata, _('Ruleset-URI-Scheme'))),
|
||||||
|
textarea.render(),
|
||||||
|
E('div', { class: 'right' }, [
|
||||||
|
E('button', {
|
||||||
|
class: 'btn',
|
||||||
|
click: ui.hideModal
|
||||||
|
}, [ _('Cancel') ]),
|
||||||
|
'',
|
||||||
|
E('button', {
|
||||||
|
class: 'btn cbi-button-action',
|
||||||
|
click: ui.createHandlerFn(this, function() {
|
||||||
|
var input_links = textarea.getValue().trim().split('\n');
|
||||||
|
if (input_links && input_links[0]) {
|
||||||
|
/* Remove duplicate lines */
|
||||||
|
input_links = input_links.reduce((pre, cur) =>
|
||||||
|
(!pre.includes(cur) && pre.push(cur), pre), []);
|
||||||
|
|
||||||
|
var imported_ruleset = 0;
|
||||||
|
input_links.forEach((l) => {
|
||||||
|
var config = parseRulesetLink(l);
|
||||||
|
if (config) {
|
||||||
|
var hrefHash = hp.calcStringMD5(config.href);
|
||||||
|
config.href = null;
|
||||||
|
var sid = uci.add(data[0], 'ruleset', hrefHash);
|
||||||
|
Object.keys(config).forEach((k) => {
|
||||||
|
uci.set(data[0], sid, k, config[k]);
|
||||||
|
});
|
||||||
|
imported_ruleset++;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (imported_ruleset === 0)
|
||||||
|
ui.addNotification(null, E('p', _('No valid rule-set link found.')));
|
||||||
|
else
|
||||||
|
ui.addNotification(null, E('p', _('Successfully imported %s rule-set of total %s.').format(
|
||||||
|
imported_ruleset, input_links.length)));
|
||||||
|
|
||||||
|
return uci.save()
|
||||||
|
.then(L.bind(this.map.load, this.map))
|
||||||
|
.then(L.bind(this.map.reset, this.map))
|
||||||
|
.then(L.ui.hideModal)
|
||||||
|
.catch(function() {});
|
||||||
|
} else {
|
||||||
|
return ui.hideModal();
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}, [ _('Import') ])
|
||||||
|
])
|
||||||
|
])
|
||||||
|
}
|
||||||
|
s.renderSectionAdd = function(extra_class) {
|
||||||
|
var el = form.GridSection.prototype.renderSectionAdd.apply(this, arguments),
|
||||||
|
nameEl = el.querySelector('.cbi-section-create-name');
|
||||||
|
|
||||||
|
ui.addValidator(nameEl, 'uciname', true, (v) => {
|
||||||
|
var button = el.querySelector('.cbi-section-create > .cbi-button-add');
|
||||||
|
var uciconfig = this.uciconfig || this.map.config;
|
||||||
|
|
||||||
|
if (!v) {
|
||||||
|
button.disabled = true;
|
||||||
|
return true;
|
||||||
|
} else if (uci.get(uciconfig, v)) {
|
||||||
|
button.disabled = true;
|
||||||
|
return _('Expecting: %s').format(_('unique UCI identifier'));
|
||||||
|
} else if (uci.get(uciconfig, prefix + v)) {
|
||||||
|
button.disabled = true;
|
||||||
|
return _('Expecting: %s').format(_('unique label'));
|
||||||
|
} else {
|
||||||
|
button.disabled = null;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}, 'blur', 'keyup');
|
||||||
|
|
||||||
|
el.appendChild(E('button', {
|
||||||
|
'class': 'cbi-button cbi-button-add',
|
||||||
|
'title': _('Import rule-set links'),
|
||||||
|
'click': ui.createHandlerFn(this, 'handleLinkImport')
|
||||||
|
}, [ _('Import rule-set links') ]));
|
||||||
|
|
||||||
|
return el;
|
||||||
|
}
|
||||||
|
s.handleAdd = function(ev, name) {
|
||||||
|
return form.GridSection.prototype.handleAdd.apply(this, [ ev, prefix + name ]);
|
||||||
|
}
|
||||||
|
/* Import rule-set links end */
|
||||||
|
|
||||||
|
o = s.option(form.Value, 'label', _('Label'));
|
||||||
|
o.load = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
|
o.validate = L.bind(hp.validateUniqueValue, this, data[0], 'ruleset', 'label');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.Flag, 'enabled', _('Enable'));
|
||||||
|
o.default = o.enabled;
|
||||||
|
o.rmempty = false;
|
||||||
|
o.editable = true;
|
||||||
|
|
||||||
|
o = s.option(form.ListValue, 'type', _('Type'));
|
||||||
|
o.value('local', _('Local'));
|
||||||
|
o.value('remote', _('Remote'));
|
||||||
|
o.default = 'remote';
|
||||||
|
o.rmempty = false;
|
||||||
|
|
||||||
|
o = s.option(form.ListValue, 'format', _('Format'));
|
||||||
|
o.value('source', _('Source file'));
|
||||||
|
o.value('binary', _('Binary file'));
|
||||||
|
o.default = 'source';
|
||||||
|
o.rmempty = false;
|
||||||
|
|
||||||
|
o = s.option(form.Value, 'path', _('Path'));
|
||||||
|
o.datatype = 'file';
|
||||||
|
o.placeholder = '/etc/homeproxy/ruleset/example.json';
|
||||||
|
o.rmempty = false;
|
||||||
|
o.depends('type', 'local');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.Value, 'url', _('Rule set URL'));
|
||||||
|
o.validate = function(section_id, value) {
|
||||||
|
if (section_id) {
|
||||||
|
if (!value)
|
||||||
|
return _('Expecting: %s').format(_('non-empty value'));
|
||||||
|
|
||||||
|
try {
|
||||||
|
var url = new URL(value);
|
||||||
|
if (!url.hostname)
|
||||||
|
return _('Expecting: %s').format(_('valid URL'));
|
||||||
|
}
|
||||||
|
catch(e) {
|
||||||
|
return _('Expecting: %s').format(_('valid URL'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
o.rmempty = false;
|
||||||
|
o.depends('type', 'remote');
|
||||||
|
o.modalonly = true;
|
||||||
|
|
||||||
|
o = s.option(form.ListValue, 'outbound', _('Outbound'),
|
||||||
|
_('Tag of the outbound to download rule set.'));
|
||||||
|
o.load = function(section_id) {
|
||||||
|
delete this.keylist;
|
||||||
|
delete this.vallist;
|
||||||
|
|
||||||
|
this.value('direct-out', _('Direct'));
|
||||||
|
uci.sections(data[0], 'routing_node', (res) => {
|
||||||
|
if (res.enabled === '1')
|
||||||
|
this.value(res['.name'], res.label);
|
||||||
|
});
|
||||||
|
|
||||||
|
return this.super('load', section_id);
|
||||||
|
}
|
||||||
|
o.default = 'direct-out';
|
||||||
|
o.rmempty = false;
|
||||||
|
//o.editable = true;
|
||||||
|
o.textvalue = function(section_id) {
|
||||||
|
var cval = this.cfgvalue(section_id) || this.default;
|
||||||
|
var remote = L.bind(function() {
|
||||||
|
let cval = this.cfgvalue(section_id) || this.default;
|
||||||
|
return (cval === 'remote') ? true : false;
|
||||||
|
}, s.getOption('type'))
|
||||||
|
return remote() ? cval : _('none');
|
||||||
|
};
|
||||||
|
o.depends('type', 'remote');
|
||||||
|
|
||||||
|
o = s.option(form.Value, 'update_interval', _('Update interval'),
|
||||||
|
_('Update interval of rule set.<br/><code>1d</code> will be used if empty.'));
|
||||||
|
o.depends('type', 'remote');
|
||||||
|
/* Rule set settings end */
|
||||||
|
|
||||||
|
return m.render();
|
||||||
|
}
|
||||||
|
});
|
@@ -6,6 +6,7 @@
|
|||||||
|
|
||||||
'use strict';
|
'use strict';
|
||||||
'require form';
|
'require form';
|
||||||
|
'require fs';
|
||||||
'require poll';
|
'require poll';
|
||||||
'require rpc';
|
'require rpc';
|
||||||
'require uci';
|
'require uci';
|
||||||
@@ -54,6 +55,7 @@ function handleGenKey(option) {
|
|||||||
required_method = this.section.getOption('shadowsocks_encrypt_method')?.formvalue(section_id);
|
required_method = this.section.getOption('shadowsocks_encrypt_method')?.formvalue(section_id);
|
||||||
|
|
||||||
switch (required_method) {
|
switch (required_method) {
|
||||||
|
/* AEAD */
|
||||||
case 'aes-128-gcm':
|
case 'aes-128-gcm':
|
||||||
case '2022-blake3-aes-128-gcm':
|
case '2022-blake3-aes-128-gcm':
|
||||||
password = hp.generateRand('base64', 16);
|
password = hp.generateRand('base64', 16);
|
||||||
@@ -68,12 +70,15 @@ function handleGenKey(option) {
|
|||||||
case '2022-blake3-chacha20-poly1305':
|
case '2022-blake3-chacha20-poly1305':
|
||||||
password = hp.generateRand('base64', 32);
|
password = hp.generateRand('base64', 32);
|
||||||
break;
|
break;
|
||||||
|
/* NONE */
|
||||||
case 'none':
|
case 'none':
|
||||||
password = '';
|
password = '';
|
||||||
break;
|
break;
|
||||||
|
/* UUID */
|
||||||
case 'uuid':
|
case 'uuid':
|
||||||
password = hp.generateRand('uuid');
|
password = hp.generateRand('uuid');
|
||||||
break;
|
break;
|
||||||
|
/* PLAIN */
|
||||||
default:
|
default:
|
||||||
password = hp.generateRand('hex', 16);
|
password = hp.generateRand('hex', 16);
|
||||||
break;
|
break;
|
||||||
@@ -113,6 +118,17 @@ return view.extend({
|
|||||||
|
|
||||||
s = m.section(form.NamedSection, 'server', 'homeproxy', _('Global settings'));
|
s = m.section(form.NamedSection, 'server', 'homeproxy', _('Global settings'));
|
||||||
|
|
||||||
|
o = s.option(form.Button, '_reload_server', _('Quick Reload'));
|
||||||
|
o.inputtitle = _('Reload');
|
||||||
|
o.inputstyle = 'apply';
|
||||||
|
o.onclick = function() {
|
||||||
|
return fs.exec('/etc/init.d/homeproxy', ['reload', 'server'])
|
||||||
|
.then((res) => { return window.location = window.location.href.split('#')[0] })
|
||||||
|
.catch((e) => {
|
||||||
|
ui.addNotification(null, E('p', _('Failed to execute "/etc/init.d/homeproxy %s %s" reason: %s').format('reload', 'server', e)));
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
o = s.option(form.Flag, 'enabled', _('Enable'));
|
o = s.option(form.Flag, 'enabled', _('Enable'));
|
||||||
o.default = o.disabled;
|
o.default = o.disabled;
|
||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
@@ -122,13 +138,15 @@ return view.extend({
|
|||||||
o.rmempty = false;
|
o.rmempty = false;
|
||||||
|
|
||||||
s = m.section(form.GridSection, 'server', _('Server settings'));
|
s = m.section(form.GridSection, 'server', _('Server settings'));
|
||||||
|
var prefmt = { 'prefix': 'server_', 'suffix': '' };
|
||||||
s.addremove = true;
|
s.addremove = true;
|
||||||
s.rowcolors = true;
|
s.rowcolors = true;
|
||||||
s.sortable = true;
|
s.sortable = true;
|
||||||
s.nodescriptions = true;
|
s.nodescriptions = true;
|
||||||
s.modaltitle = L.bind(hp.loadModalTitle, this, _('Server'), _('Add a server'), data[0]);
|
s.modaltitle = L.bind(hp.loadModalTitle, this, _('Server'), _('Add a server'), data[0]);
|
||||||
s.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
s.sectiontitle = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
s.renderSectionAdd = L.bind(hp.renderSectionAdd, this, s);
|
s.renderSectionAdd = L.bind(hp.renderSectionAdd, this, s, prefmt, false);
|
||||||
|
s.handleAdd = L.bind(hp.handleAdd, this, s, prefmt);
|
||||||
|
|
||||||
o = s.option(form.Value, 'label', _('Label'));
|
o = s.option(form.Value, 'label', _('Label'));
|
||||||
o.load = L.bind(hp.loadDefaultLabel, this, data[0]);
|
o.load = L.bind(hp.loadDefaultLabel, this, data[0]);
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
/*
|
/* SPDX-License-Identifier: GPL-2.0-only
|
||||||
* SPDX-License-Identifier: GPL-2.0-only
|
|
||||||
*
|
*
|
||||||
* Copyright (C) 2022-2023 ImmortalWrt.org
|
* Copyright (C) 2022-2023 ImmortalWrt.org
|
||||||
*/
|
*/
|
||||||
@@ -60,27 +59,27 @@ function getConnStat(self, site) {
|
|||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getResVersion(self, type) {
|
function getResVersion(self, type, repo) {
|
||||||
var callResVersion = rpc.declare({
|
var callResVersion = rpc.declare({
|
||||||
object: 'luci.homeproxy',
|
object: 'luci.homeproxy',
|
||||||
method: 'resources_get_version',
|
method: 'resources_get_version',
|
||||||
params: ['type'],
|
params: ['type', 'repo'],
|
||||||
expect: { '': {} }
|
expect: { '': {} }
|
||||||
});
|
});
|
||||||
|
|
||||||
var callResUpdate = rpc.declare({
|
var callResUpdate = rpc.declare({
|
||||||
object: 'luci.homeproxy',
|
object: 'luci.homeproxy',
|
||||||
method: 'resources_update',
|
method: 'resources_update',
|
||||||
params: ['type'],
|
params: ['type', 'repo'],
|
||||||
expect: { '': {} }
|
expect: { '': {} }
|
||||||
});
|
});
|
||||||
|
|
||||||
return L.resolveDefault(callResVersion(type), {}).then((res) => {
|
return L.resolveDefault(callResVersion(type, repo), {}).then((res) => {
|
||||||
var spanTemp = E('div', { 'style': 'cbi-value-field' }, [
|
var spanTemp = E('div', { 'style': 'cbi-value-field' }, [
|
||||||
E('button', {
|
E('button', {
|
||||||
'class': 'btn cbi-button cbi-button-action',
|
'class': 'btn cbi-button cbi-button-action',
|
||||||
'click': ui.createHandlerFn(this, function() {
|
'click': ui.createHandlerFn(this, function() {
|
||||||
return L.resolveDefault(callResUpdate(type), {}).then((res) => {
|
return L.resolveDefault(callResUpdate(type, repo), {}).then((res) => {
|
||||||
switch (res.status) {
|
switch (res.status) {
|
||||||
case 0:
|
case 0:
|
||||||
self.description = _('Successfully updated.');
|
self.description = _('Successfully updated.');
|
||||||
@@ -184,7 +183,8 @@ return view.extend({
|
|||||||
|
|
||||||
render: function(data) {
|
render: function(data) {
|
||||||
var m, s, o;
|
var m, s, o;
|
||||||
var routing_mode = uci.get(data[0], 'config', 'routing_mode') || 'bypass_mainland_china';
|
var routing_mode = uci.get(data[0], 'config', 'routing_mode') || 'bypass_mainland_china',
|
||||||
|
dashboard_repo = uci.get(data[0], 'experimental', 'dashboard_repo') || '';
|
||||||
|
|
||||||
m = new form.Map('homeproxy');
|
m = new form.Map('homeproxy');
|
||||||
|
|
||||||
@@ -201,6 +201,12 @@ return view.extend({
|
|||||||
s = m.section(form.NamedSection, 'config', 'homeproxy', _('Resources management'));
|
s = m.section(form.NamedSection, 'config', 'homeproxy', _('Resources management'));
|
||||||
s.anonymous = true;
|
s.anonymous = true;
|
||||||
|
|
||||||
|
if (routing_mode === 'custom' && dashboard_repo !== '') {
|
||||||
|
o = s.option(form.DummyValue, '_clash_dashboard_version', _('Clash dashboard version'));
|
||||||
|
o.cfgvalue = function() { return getResVersion(this, 'clash_dashboard', dashboard_repo) };
|
||||||
|
o.rawhtml = true;
|
||||||
|
}
|
||||||
|
|
||||||
o = s.option(form.DummyValue, '_china_ip4_version', _('China IPv4 list version'));
|
o = s.option(form.DummyValue, '_china_ip4_version', _('China IPv4 list version'));
|
||||||
o.cfgvalue = function() { return getResVersion(this, 'china_ip4') };
|
o.cfgvalue = function() { return getResVersion(this, 'china_ip4') };
|
||||||
o.rawhtml = true;
|
o.rawhtml = true;
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -27,6 +27,9 @@ config homeproxy 'config'
|
|||||||
option proxy_mode 'redirect_tproxy'
|
option proxy_mode 'redirect_tproxy'
|
||||||
option ipv6_support '1'
|
option ipv6_support '1'
|
||||||
|
|
||||||
|
config homeproxy 'experimental'
|
||||||
|
option clash_api_port '9090'
|
||||||
|
|
||||||
config homeproxy 'control'
|
config homeproxy 'control'
|
||||||
option lan_proxy_mode 'disabled'
|
option lan_proxy_mode 'disabled'
|
||||||
list wan_proxy_ipv4_ips '91.105.192.0/23'
|
list wan_proxy_ipv4_ips '91.105.192.0/23'
|
||||||
|
@@ -50,7 +50,7 @@ if (routing_mode !== 'custom') {
|
|||||||
bypass_cn_traffic = uci.get(cfgname, 'routing', 'bypass_cn_traffic') || '0';
|
bypass_cn_traffic = uci.get(cfgname, 'routing', 'bypass_cn_traffic') || '0';
|
||||||
}
|
}
|
||||||
|
|
||||||
let routing_port = uci.get(cfgname, 'config', 'routing_port');
|
let routing_port = uci.get(cfgname, 'config', 'routing_port') || 'common';
|
||||||
if (routing_port === 'common')
|
if (routing_port === 'common')
|
||||||
routing_port = uci.get(cfgname, 'infra', 'common_port') || '22,53,80,143,443,465,587,853,873,993,995,8080,8443,9418';
|
routing_port = uci.get(cfgname, 'infra', 'common_port') || '22,53,80,143,443,465,587,853,873,993,995,8080,8443,9418';
|
||||||
|
|
||||||
@@ -88,9 +88,6 @@ const control_info = {};
|
|||||||
|
|
||||||
for (let i in control_options)
|
for (let i in control_options)
|
||||||
control_info[i] = uci.get(cfgname, 'control', i);
|
control_info[i] = uci.get(cfgname, 'control', i);
|
||||||
|
|
||||||
const dns_hijacked = uci.get('dhcp', '@dnsmasq[0]', 'dns_redirect') || '0',
|
|
||||||
dns_port = uci.get('dhcp', '@dnsmasq[0]', 'port') || '53';
|
|
||||||
/* UCI config end */
|
/* UCI config end */
|
||||||
-%}
|
-%}
|
||||||
|
|
||||||
@@ -222,7 +219,7 @@ set homeproxy_wan_direct_addr_v6 {
|
|||||||
}
|
}
|
||||||
{% endif /* ipv6_support */ %}
|
{% endif /* ipv6_support */ %}
|
||||||
|
|
||||||
{% if (routing_port): %}
|
{% if (routing_port !== 'all'): %}
|
||||||
set homeproxy_routing_port {
|
set homeproxy_routing_port {
|
||||||
type inet_service
|
type inet_service
|
||||||
flags interval
|
flags interval
|
||||||
@@ -231,16 +228,6 @@ set homeproxy_routing_port {
|
|||||||
}
|
}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{# DNS hijack & TCP redirect #}
|
|
||||||
chain dstnat {
|
|
||||||
{% if (dns_hijacked !== '1'): %}
|
|
||||||
meta nfproto { ipv4, ipv6 } udp dport 53 counter redirect to :{{ dns_port }} comment "!{{ cfgname }}: DNS hijack"
|
|
||||||
{% endif /* dns_hijacked */ %}
|
|
||||||
{% if (match(proxy_mode, /redirect/)): %}
|
|
||||||
meta nfproto { {{ (ipv6_support === '1') ? 'ipv4, ipv6' : 'ipv4' }} } meta l4proto tcp jump homeproxy_redirect_lanac
|
|
||||||
{% endif /* proxy_mode */ %}
|
|
||||||
}
|
|
||||||
|
|
||||||
{# TCP redirect #}
|
{# TCP redirect #}
|
||||||
{% if (match(proxy_mode, /redirect/)): %}
|
{% if (match(proxy_mode, /redirect/)): %}
|
||||||
chain homeproxy_redirect_proxy {
|
chain homeproxy_redirect_proxy {
|
||||||
@@ -248,7 +235,7 @@ chain homeproxy_redirect_proxy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
chain homeproxy_redirect_proxy_port {
|
chain homeproxy_redirect_proxy_port {
|
||||||
{% if (routing_port): %}
|
{% if (routing_port !== 'all'): %}
|
||||||
tcp dport != @homeproxy_routing_port counter return
|
tcp dport != @homeproxy_routing_port counter return
|
||||||
{% endif %}
|
{% endif %}
|
||||||
goto homeproxy_redirect_proxy
|
goto homeproxy_redirect_proxy
|
||||||
@@ -351,6 +338,10 @@ chain homeproxy_output_redir {
|
|||||||
type nat hook output priority filter -105; policy accept
|
type nat hook output priority filter -105; policy accept
|
||||||
meta nfproto { {{ (ipv6_support === '1') ? 'ipv4, ipv6' : 'ipv4' }} } meta l4proto tcp jump homeproxy_redirect
|
meta nfproto { {{ (ipv6_support === '1') ? 'ipv4, ipv6' : 'ipv4' }} } meta l4proto tcp jump homeproxy_redirect
|
||||||
}
|
}
|
||||||
|
|
||||||
|
chain dstnat {
|
||||||
|
meta nfproto { {{ (ipv6_support === '1') ? 'ipv4, ipv6' : 'ipv4' }} } meta l4proto tcp jump homeproxy_redirect_lanac
|
||||||
|
}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{# UDP tproxy #}
|
{# UDP tproxy #}
|
||||||
@@ -363,14 +354,14 @@ chain homeproxy_mangle_tproxy {
|
|||||||
}
|
}
|
||||||
|
|
||||||
chain homeproxy_mangle_tproxy_port {
|
chain homeproxy_mangle_tproxy_port {
|
||||||
{% if (routing_port): %}
|
{% if (routing_port !== 'all'): %}
|
||||||
udp dport != @homeproxy_routing_port counter return
|
udp dport != @homeproxy_routing_port counter return
|
||||||
{% endif %}
|
{% endif %}
|
||||||
goto homeproxy_mangle_tproxy
|
goto homeproxy_mangle_tproxy
|
||||||
}
|
}
|
||||||
|
|
||||||
chain homeproxy_mangle_mark {
|
chain homeproxy_mangle_mark {
|
||||||
{% if (routing_port): %}
|
{% if (routing_port !== 'all'): %}
|
||||||
udp dport != @homeproxy_routing_port counter return
|
udp dport != @homeproxy_routing_port counter return
|
||||||
{% endif %}
|
{% endif %}
|
||||||
meta l4proto udp mark set {{ tproxy_mark }} counter accept
|
meta l4proto udp mark set {{ tproxy_mark }} counter accept
|
||||||
@@ -380,7 +371,6 @@ chain homeproxy_mangle_lanac {
|
|||||||
{% if (control_info.listen_interfaces): %}
|
{% if (control_info.listen_interfaces): %}
|
||||||
meta iifname != {{ array_to_nftarr(split(join(' ', control_info.listen_interfaces) + ' lo', ' ')) }} counter return
|
meta iifname != {{ array_to_nftarr(split(join(' ', control_info.listen_interfaces) + ' lo', ' ')) }} counter return
|
||||||
{% endif %}
|
{% endif %}
|
||||||
meta iifname != lo udp dport 53 counter return
|
|
||||||
meta mark {{ self_mark }} counter return
|
meta mark {{ self_mark }} counter return
|
||||||
|
|
||||||
{% if (control_info.lan_proxy_mode === 'listed_only'): %}
|
{% if (control_info.lan_proxy_mode === 'listed_only'): %}
|
||||||
@@ -523,7 +513,6 @@ chain mangle_output {
|
|||||||
{% if (match(proxy_mode, /tun/)): %}
|
{% if (match(proxy_mode, /tun/)): %}
|
||||||
chain homeproxy_mangle_lanac {
|
chain homeproxy_mangle_lanac {
|
||||||
iifname {{ tun_name }} counter return
|
iifname {{ tun_name }} counter return
|
||||||
udp dport 53 counter return
|
|
||||||
|
|
||||||
{% if (control_info.listen_interfaces): %}
|
{% if (control_info.listen_interfaces): %}
|
||||||
meta iifname != {{ array_to_nftarr(control_info.listen_interfaces) }} counter return
|
meta iifname != {{ array_to_nftarr(control_info.listen_interfaces) }} counter return
|
||||||
@@ -557,7 +546,7 @@ chain homeproxy_mangle_lanac {
|
|||||||
}
|
}
|
||||||
|
|
||||||
chain homeproxy_mangle_tun_mark {
|
chain homeproxy_mangle_tun_mark {
|
||||||
{% if (routing_port): %}
|
{% if (routing_port !== 'all'): %}
|
||||||
{% if (proxy_mode === 'tun'): %}
|
{% if (proxy_mode === 'tun'): %}
|
||||||
tcp dport != @homeproxy_routing_port counter return
|
tcp dport != @homeproxy_routing_port counter return
|
||||||
{% endif /* proxy_mode */ %}
|
{% endif /* proxy_mode */ %}
|
||||||
|
@@ -11,9 +11,11 @@ import { readfile, writefile } from 'fs';
|
|||||||
import { isnan } from 'math';
|
import { isnan } from 'math';
|
||||||
import { cursor } from 'uci';
|
import { cursor } from 'uci';
|
||||||
|
|
||||||
|
import { urldecode } from 'luci.http';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
executeCommand, isEmpty, strToBool, strToInt,
|
executeCommand, shellQuote, calcStringCRC8, calcStringMD5, isEmpty, strToBool, strToInt,
|
||||||
removeBlankAttrs, validateHostname, validation,
|
removeBlankAttrs, parseURL, validateHostname, validation, filterCheck,
|
||||||
HP_DIR, RUN_DIR
|
HP_DIR, RUN_DIR
|
||||||
} from 'homeproxy';
|
} from 'homeproxy';
|
||||||
|
|
||||||
@@ -25,6 +27,7 @@ uci.load(uciconfig);
|
|||||||
|
|
||||||
const uciinfra = 'infra',
|
const uciinfra = 'infra',
|
||||||
ucimain = 'config',
|
ucimain = 'config',
|
||||||
|
ucisub = 'subscription',
|
||||||
uciexp = 'experimental',
|
uciexp = 'experimental',
|
||||||
ucicontrol = 'control';
|
ucicontrol = 'control';
|
||||||
|
|
||||||
@@ -49,7 +52,7 @@ else
|
|||||||
|
|
||||||
const dns_port = uci.get(uciconfig, uciinfra, 'dns_port') || '5333';
|
const dns_port = uci.get(uciconfig, uciinfra, 'dns_port') || '5333';
|
||||||
|
|
||||||
let main_node, main_udp_node, dedicated_udp_node, default_outbound, domain_strategy, sniff_override = '1',
|
let main_node, main_udp_node, dedicated_udp_node, default_outbound, sniff_override = '1',
|
||||||
dns_server, dns_default_strategy, dns_default_server, dns_disable_cache, dns_disable_cache_expire,
|
dns_server, dns_default_strategy, dns_default_server, dns_disable_cache, dns_disable_cache_expire,
|
||||||
dns_independent_cache, dns_client_subnet, direct_domain_list, proxy_domain_list;
|
dns_independent_cache, dns_client_subnet, direct_domain_list, proxy_domain_list;
|
||||||
|
|
||||||
@@ -80,7 +83,6 @@ if (routing_mode !== 'custom') {
|
|||||||
|
|
||||||
/* Routing settings */
|
/* Routing settings */
|
||||||
default_outbound = uci.get(uciconfig, uciroutingsetting, 'default_outbound') || 'nil';
|
default_outbound = uci.get(uciconfig, uciroutingsetting, 'default_outbound') || 'nil';
|
||||||
domain_strategy = uci.get(uciconfig, uciroutingsetting, 'domain_strategy');
|
|
||||||
sniff_override = uci.get(uciconfig, uciroutingsetting, 'sniff_override');
|
sniff_override = uci.get(uciconfig, uciroutingsetting, 'sniff_override');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,6 +93,13 @@ const proxy_mode = uci.get(uciconfig, ucimain, 'proxy_mode') || 'redirect_tproxy
|
|||||||
const cache_file_store_rdrc = uci.get(uciconfig, uciexp, 'cache_file_store_rdrc'),
|
const cache_file_store_rdrc = uci.get(uciconfig, uciexp, 'cache_file_store_rdrc'),
|
||||||
cache_file_rdrc_timeout = uci.get(uciconfig, uciexp, 'cache_file_rdrc_timeout');
|
cache_file_rdrc_timeout = uci.get(uciconfig, uciexp, 'cache_file_rdrc_timeout');
|
||||||
|
|
||||||
|
const clash_api_enabled = uci.get(uciconfig, uciexp, 'clash_api_enabled'),
|
||||||
|
nginx_support = uci.get(uciconfig, uciexp, 'nginx_support'),
|
||||||
|
clash_api_log_level = uci.get(uciconfig, uciexp, 'clash_api_log_level') || 'warn',
|
||||||
|
dashboard_repo = uci.get(uciconfig, uciexp, 'dashboard_repo'),
|
||||||
|
clash_api_port = uci.get(uciconfig, uciexp, 'clash_api_port') || '9090',
|
||||||
|
clash_api_secret = uci.get(uciconfig, uciexp, 'clash_api_secret') || trim(readfile('/proc/sys/kernel/random/uuid'));
|
||||||
|
|
||||||
const mixed_port = uci.get(uciconfig, uciinfra, 'mixed_port') || '5330';
|
const mixed_port = uci.get(uciconfig, uciinfra, 'mixed_port') || '5330';
|
||||||
let self_mark, redirect_port, tproxy_port,
|
let self_mark, redirect_port, tproxy_port,
|
||||||
tun_name, tun_addr4, tun_addr6, tun_mtu, tun_gso,
|
tun_name, tun_addr4, tun_addr6, tun_mtu, tun_gso,
|
||||||
@@ -118,6 +127,24 @@ if (match(proxy_mode), /tun/) {
|
|||||||
endpoint_independent_nat = uci.get(uciconfig, uciroutingsetting, 'endpoint_independent_nat');
|
endpoint_independent_nat = uci.get(uciconfig, uciroutingsetting, 'endpoint_independent_nat');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let subs_info = {};
|
||||||
|
{
|
||||||
|
const suburls = uci.get(uciconfig, ucisub, 'subscription_url') || [];
|
||||||
|
for (let i = 0; i < length(suburls); i++) {
|
||||||
|
const url = parseURL(suburls[i]);
|
||||||
|
const urlhash = calcStringMD5(replace(suburls[i], /#.*$/, ''));
|
||||||
|
subs_info[urlhash] = {
|
||||||
|
"url": replace(suburls[i], /#.*$/, ''),
|
||||||
|
"name": url.hash ? urldecode(url.hash) : url.hostname
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let checkedout_nodes = [],
|
||||||
|
nodes_tobe_checkedout = [],
|
||||||
|
checkedout_groups = [],
|
||||||
|
groups_tobe_checkedout = [];
|
||||||
/* UCI config end */
|
/* UCI config end */
|
||||||
|
|
||||||
/* Config helper start */
|
/* Config helper start */
|
||||||
@@ -145,13 +172,75 @@ function parse_dnsquery(strquery) {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function get_tag(cfg, failback_tag, filterable) {
|
||||||
|
if (isEmpty(cfg))
|
||||||
|
return null;
|
||||||
|
|
||||||
|
let node = {};
|
||||||
|
if (type(cfg) === 'object')
|
||||||
|
node = cfg;
|
||||||
|
else {
|
||||||
|
if (cfg in ['direct-out', 'block-out'])
|
||||||
|
return cfg;
|
||||||
|
else
|
||||||
|
node = uci.get_all(uciconfig, cfg);
|
||||||
|
}
|
||||||
|
|
||||||
|
//filter check
|
||||||
|
if (!isEmpty(filterable))
|
||||||
|
if (filterCheck(node.label, filterable.filter_nodes, filterable.filter_keywords))
|
||||||
|
return null;
|
||||||
|
|
||||||
|
const sub_info = subs_info[node.grouphash];
|
||||||
|
return node.label ? sprintf("%s%s", node.grouphash ?
|
||||||
|
sprintf("[%s] ", sub_info ? sub_info.name : calcStringCRC8(node.grouphash)) : '',
|
||||||
|
node.label) :
|
||||||
|
(failback_tag || null);
|
||||||
|
}
|
||||||
|
|
||||||
function generate_outbound(node) {
|
function generate_outbound(node) {
|
||||||
if (type(node) !== 'object' || isEmpty(node))
|
if (type(node) !== 'object' || isEmpty(node))
|
||||||
return null;
|
return null;
|
||||||
|
|
||||||
|
push(checkedout_nodes, node['.name']);
|
||||||
|
|
||||||
|
if (node.type in ['selector', 'urltest']) {
|
||||||
|
let outbounds = [];
|
||||||
|
for (let grouphash in node.group) {
|
||||||
|
if (!isEmpty(grouphash)) {
|
||||||
|
const output = executeCommand(`/sbin/uci -q show ${shellQuote(uciconfig)} | /bin/grep "\.grouphash='*${shellQuote(grouphash)}'*" | /usr/bin/cut -f2 -d'.'`) || {};
|
||||||
|
if (!isEmpty(trim(output.stdout)))
|
||||||
|
for (let order in split(trim(output.stdout), /\n/))
|
||||||
|
push(outbounds, get_tag(order, 'cfg-' + order + '-out', { "filter_nodes": node.filter_nodes, "filter_keywords": node.filter_keywords }));
|
||||||
|
if (!(grouphash in groups_tobe_checkedout))
|
||||||
|
push(groups_tobe_checkedout, grouphash);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (let order in node.order) {
|
||||||
|
push(outbounds, get_tag(order, 'cfg-' + order + '-out', { "filter_nodes": node.filter_nodes, "filter_keywords": node.filter_keywords }));
|
||||||
|
if (!(order in ['direct-out', 'block-out']) && !(order in nodes_tobe_checkedout))
|
||||||
|
push(nodes_tobe_checkedout, order);
|
||||||
|
}
|
||||||
|
if (length(outbounds) === 0)
|
||||||
|
push(outbounds, 'direct-out', 'block-out');
|
||||||
|
return {
|
||||||
|
type: node.type,
|
||||||
|
tag: get_tag(node, 'cfg-' + node['.name'] + '-out'),
|
||||||
|
/* Selector */
|
||||||
|
outbounds: outbounds,
|
||||||
|
default: node.default_selected ? (get_tag(node.default_selected, 'cfg-' + node.default_selected + '-out')) : null,
|
||||||
|
/* URLTest */
|
||||||
|
url: node.test_url,
|
||||||
|
interval: node.interval,
|
||||||
|
tolerance: strToInt(node.tolerance),
|
||||||
|
idle_timeout: node.idle_timeout,
|
||||||
|
interrupt_exist_connections: strToBool(node.interrupt_exist_connections)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const outbound = {
|
const outbound = {
|
||||||
type: node.type,
|
type: node.type,
|
||||||
tag: 'cfg-' + node['.name'] + '-out',
|
tag: get_tag(node, 'cfg-' + node['.name'] + '-out'),
|
||||||
routing_mark: strToInt(self_mark),
|
routing_mark: strToInt(self_mark),
|
||||||
|
|
||||||
server: node.address,
|
server: node.address,
|
||||||
@@ -164,7 +253,10 @@ function generate_outbound(node) {
|
|||||||
/* Direct */
|
/* Direct */
|
||||||
override_address: node.override_address,
|
override_address: node.override_address,
|
||||||
override_port: strToInt(node.override_port),
|
override_port: strToInt(node.override_port),
|
||||||
proxy_protocol: strToInt(node.proxy_protocol),
|
proxy_protocol: (node.proxy_protocol === '1') ? {
|
||||||
|
enabled: true,
|
||||||
|
version: strToInt(node.proxy_protocol_version)
|
||||||
|
} : null,
|
||||||
/* Hysteria (2) */
|
/* Hysteria (2) */
|
||||||
up_mbps: strToInt(node.hysteria_up_mbps),
|
up_mbps: strToInt(node.hysteria_up_mbps),
|
||||||
down_mbps: strToInt(node.hysteria_down_mbps),
|
down_mbps: strToInt(node.hysteria_down_mbps),
|
||||||
@@ -299,7 +391,7 @@ function get_outbound(cfg) {
|
|||||||
if (isEmpty(node))
|
if (isEmpty(node))
|
||||||
die(sprintf("%s's node is missing, please check your configuration.", cfg));
|
die(sprintf("%s's node is missing, please check your configuration.", cfg));
|
||||||
else
|
else
|
||||||
return 'cfg-' + node + '-out';
|
return get_tag(node, 'cfg-' + node + '-out');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -330,7 +422,7 @@ const config = {};
|
|||||||
/* Log */
|
/* Log */
|
||||||
config.log = {
|
config.log = {
|
||||||
disabled: false,
|
disabled: false,
|
||||||
level: 'warn',
|
level: (clash_api_enabled === '1') ? clash_api_log_level : 'warn',
|
||||||
output: RUN_DIR + '/sing-box-c.log',
|
output: RUN_DIR + '/sing-box-c.log',
|
||||||
timestamp: true
|
timestamp: true
|
||||||
};
|
};
|
||||||
@@ -454,6 +546,7 @@ if (!isEmpty(main_node)) {
|
|||||||
process_name: cfg.process_name,
|
process_name: cfg.process_name,
|
||||||
process_path: cfg.process_path,
|
process_path: cfg.process_path,
|
||||||
user: cfg.user,
|
user: cfg.user,
|
||||||
|
clash_mode: cfg.clash_mode,
|
||||||
rule_set: get_ruleset(cfg.rule_set),
|
rule_set: get_ruleset(cfg.rule_set),
|
||||||
rule_set_ipcidr_match_source: (cfg.rule_set_ipcidr_match_source === '1') || null,
|
rule_set_ipcidr_match_source: (cfg.rule_set_ipcidr_match_source === '1') || null,
|
||||||
invert: (cfg.invert === '1') || null,
|
invert: (cfg.invert === '1') || null,
|
||||||
@@ -490,7 +583,6 @@ push(config.inbounds, {
|
|||||||
udp_timeout: udp_timeout ? (udp_timeout + 's') : null,
|
udp_timeout: udp_timeout ? (udp_timeout + 's') : null,
|
||||||
sniff: true,
|
sniff: true,
|
||||||
sniff_override_destination: (sniff_override === '1'),
|
sniff_override_destination: (sniff_override === '1'),
|
||||||
domain_strategy: domain_strategy,
|
|
||||||
set_system_proxy: false
|
set_system_proxy: false
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -502,8 +594,7 @@ if (match(proxy_mode, /redirect/))
|
|||||||
listen: '::',
|
listen: '::',
|
||||||
listen_port: int(redirect_port),
|
listen_port: int(redirect_port),
|
||||||
sniff: true,
|
sniff: true,
|
||||||
sniff_override_destination: (sniff_override === '1'),
|
sniff_override_destination: (sniff_override === '1')
|
||||||
domain_strategy: domain_strategy,
|
|
||||||
});
|
});
|
||||||
if (match(proxy_mode, /tproxy/))
|
if (match(proxy_mode, /tproxy/))
|
||||||
push(config.inbounds, {
|
push(config.inbounds, {
|
||||||
@@ -515,8 +606,7 @@ if (match(proxy_mode, /tproxy/))
|
|||||||
network: 'udp',
|
network: 'udp',
|
||||||
udp_timeout: udp_timeout ? (udp_timeout + 's') : null,
|
udp_timeout: udp_timeout ? (udp_timeout + 's') : null,
|
||||||
sniff: true,
|
sniff: true,
|
||||||
sniff_override_destination: (sniff_override === '1'),
|
sniff_override_destination: (sniff_override === '1')
|
||||||
domain_strategy: domain_strategy,
|
|
||||||
});
|
});
|
||||||
if (match(proxy_mode, /tun/))
|
if (match(proxy_mode, /tun/))
|
||||||
push(config.inbounds, {
|
push(config.inbounds, {
|
||||||
@@ -534,7 +624,6 @@ if (match(proxy_mode, /tun/))
|
|||||||
stack: tcpip_stack,
|
stack: tcpip_stack,
|
||||||
sniff: true,
|
sniff: true,
|
||||||
sniff_override_destination: (sniff_override === '1'),
|
sniff_override_destination: (sniff_override === '1'),
|
||||||
domain_strategy: domain_strategy,
|
|
||||||
});
|
});
|
||||||
/* Inbound end */
|
/* Inbound end */
|
||||||
|
|
||||||
@@ -574,10 +663,45 @@ if (!isEmpty(main_node)) {
|
|||||||
|
|
||||||
const outbound = uci.get_all(uciconfig, cfg.node) || {};
|
const outbound = uci.get_all(uciconfig, cfg.node) || {};
|
||||||
push(config.outbounds, generate_outbound(outbound));
|
push(config.outbounds, generate_outbound(outbound));
|
||||||
config.outbounds[length(config.outbounds)-1].domain_strategy = cfg.domain_strategy;
|
const type = config.outbounds[length(config.outbounds)-1].type;
|
||||||
config.outbounds[length(config.outbounds)-1].bind_interface = cfg.bind_interface;
|
if (!(type in ['selector', 'urltest'])) {
|
||||||
config.outbounds[length(config.outbounds)-1].detour = get_outbound(cfg.outbound);
|
config.outbounds[length(config.outbounds)-1].domain_strategy = cfg.domain_strategy;
|
||||||
|
config.outbounds[length(config.outbounds)-1].bind_interface = cfg.bind_interface;
|
||||||
|
config.outbounds[length(config.outbounds)-1].detour = get_outbound(cfg.outbound);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
/* Second level outbounds */
|
||||||
|
while (length(nodes_tobe_checkedout) > 0) {
|
||||||
|
const oldarr = uniq(nodes_tobe_checkedout);
|
||||||
|
|
||||||
|
nodes_tobe_checkedout = [];
|
||||||
|
map(oldarr, (k) => {
|
||||||
|
if (!(k in checkedout_nodes)) {
|
||||||
|
const outbound = uci.get_all(uciconfig, k) || {};
|
||||||
|
push(config.outbounds, generate_outbound(outbound));
|
||||||
|
push(checkedout_nodes, k);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
while (length(groups_tobe_checkedout) > 0) {
|
||||||
|
const oldarr = uniq(groups_tobe_checkedout);
|
||||||
|
let newarr = [];
|
||||||
|
|
||||||
|
groups_tobe_checkedout = [];
|
||||||
|
map(oldarr, (k) => {
|
||||||
|
if (!(k in checkedout_groups)) {
|
||||||
|
push(newarr, k);
|
||||||
|
push(checkedout_groups, k);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
const hashexp = regexp('^' + replace(replace(replace(sprintf("%J", newarr), /^\[(.*)\]$/g, "($1)"), /[" ]/g, ''), ',', '|') + '$', 'is');
|
||||||
|
uci.foreach(uciconfig, ucinode, (cfg) => {
|
||||||
|
if (!(cfg['.name'] in checkedout_nodes) && match(cfg?.grouphash, hashexp)) {
|
||||||
|
push(config.outbounds, generate_outbound(cfg));
|
||||||
|
push(checkedout_nodes, cfg['.name']);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
/* Outbound end */
|
/* Outbound end */
|
||||||
|
|
||||||
/* Routing rules start */
|
/* Routing rules start */
|
||||||
@@ -639,6 +763,7 @@ if (!isEmpty(main_node)) {
|
|||||||
process_name: cfg.process_name,
|
process_name: cfg.process_name,
|
||||||
process_path: cfg.process_path,
|
process_path: cfg.process_path,
|
||||||
user: cfg.user,
|
user: cfg.user,
|
||||||
|
clash_mode: cfg.clash_mode,
|
||||||
rule_set: get_ruleset(cfg.rule_set),
|
rule_set: get_ruleset(cfg.rule_set),
|
||||||
rule_set_ipcidr_match_source: (cfg.rule_set_ipcidr_match_source === '1') || null,
|
rule_set_ipcidr_match_source: (cfg.rule_set_ipcidr_match_source === '1') || null,
|
||||||
invert: (cfg.invert === '1') || null,
|
invert: (cfg.invert === '1') || null,
|
||||||
@@ -673,11 +798,23 @@ if (routing_mode === 'custom') {
|
|||||||
config.experimental = {
|
config.experimental = {
|
||||||
cache_file: {
|
cache_file: {
|
||||||
enabled: true,
|
enabled: true,
|
||||||
path: RUN_DIR + '/cache.db',
|
path: HP_DIR + '/cache.db',
|
||||||
store_rdrc: (cache_file_store_rdrc === '1') || null,
|
store_rdrc: (cache_file_store_rdrc === '1') || null,
|
||||||
rdrc_timeout: cache_file_rdrc_timeout
|
rdrc_timeout: cache_file_rdrc_timeout
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
/* Clash API */
|
||||||
|
if (dashboard_repo) {
|
||||||
|
system('rm -rf ' + RUN_DIR + '/ui');
|
||||||
|
const dashpkg = HP_DIR + '/resources/' + replace(dashboard_repo, '/', '_') + '.zip';
|
||||||
|
system('unzip -qo ' + dashpkg + ' -d ' + RUN_DIR + '/');
|
||||||
|
system('mv ' + RUN_DIR + '/*-gh-pages/ ' + RUN_DIR + '/ui/');
|
||||||
|
}
|
||||||
|
config.experimental.clash_api = {
|
||||||
|
external_controller: (clash_api_enabled === '1') ? (nginx_support ? '[::1]:' : '[::]:') + clash_api_port : null,
|
||||||
|
external_ui: dashboard_repo ? RUN_DIR + '/ui' : null,
|
||||||
|
secret: clash_api_secret
|
||||||
|
};
|
||||||
}
|
}
|
||||||
/* Experimental end */
|
/* Experimental end */
|
||||||
|
|
||||||
|
@@ -52,6 +52,48 @@ export function executeCommand(...args) {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export function hexencArray(str) {
|
||||||
|
if (!str || type(str) !== 'string')
|
||||||
|
return null;
|
||||||
|
|
||||||
|
const hexstr = hexenc(str);
|
||||||
|
let arr = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < length(hexstr) / 2; i++)
|
||||||
|
push(arr, hex('0x' + substr(hexstr, i * 2, 2)));
|
||||||
|
return arr;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function calcStringCRC8(str) {
|
||||||
|
if (!str || type(str) !== 'string')
|
||||||
|
return null;
|
||||||
|
|
||||||
|
const crc8Table = [
|
||||||
|
0, 7, 14, 9, 28, 27, 18, 21, 56, 63, 54, 49, 36, 35, 42, 45,
|
||||||
|
112, 119, 126, 121, 108, 107, 98, 101, 72, 79, 70, 65, 84, 83, 90, 93,
|
||||||
|
224, 231, 238, 233, 252, 251, 242, 245, 216, 223, 214, 209, 196, 195, 202, 205,
|
||||||
|
144, 151, 158, 153, 140, 139, 130, 133, 168, 175, 166, 161, 180, 179, 186, 189,
|
||||||
|
199, 192, 201, 206, 219, 220, 213, 210, 255, 248, 241, 246, 227, 228, 237, 234,
|
||||||
|
183, 176, 185, 190, 171, 172, 165, 162, 143, 136, 129, 134, 147, 148, 157, 154,
|
||||||
|
39, 32, 41, 46, 59, 60, 53, 50, 31, 24, 17, 22, 3, 4, 13, 10,
|
||||||
|
87, 80, 89, 94, 75, 76, 69, 66, 111, 104, 97, 102, 115, 116, 125, 122,
|
||||||
|
137, 142, 135, 128, 149, 146, 155, 156, 177, 182, 191, 184, 173, 170, 163, 164,
|
||||||
|
249, 254, 247, 240, 229, 226, 235, 236, 193, 198, 207, 200, 221, 218, 211, 212,
|
||||||
|
105, 110, 103, 96, 117, 114, 123, 124, 81, 86, 95, 88, 77, 74, 67, 68,
|
||||||
|
25, 30, 23, 16, 5, 2, 11, 12, 33, 38, 47, 40, 61, 58, 51, 52,
|
||||||
|
78, 73, 64, 71, 82, 85, 92, 91, 118, 113, 120, 127, 106, 109, 100, 99,
|
||||||
|
62, 57, 48, 55, 34, 37, 44, 43, 6, 1, 8, 15, 26, 29, 20, 19,
|
||||||
|
174, 169, 160, 167, 178, 181, 188, 187, 150, 145, 152, 159, 138, 141, 132, 131,
|
||||||
|
222, 217, 208, 215, 194, 197, 204, 203, 230, 225, 232, 239, 250, 253, 244, 243
|
||||||
|
];
|
||||||
|
const strArray = hexencArray(str);
|
||||||
|
let crc8 = 0;
|
||||||
|
|
||||||
|
for (let i = 0; i < length(strArray); i++)
|
||||||
|
crc8 = crc8Table[(crc8 ^ strArray[i]) & 255];
|
||||||
|
return substr('00' + sprintf("%X", crc8), -2);
|
||||||
|
};
|
||||||
|
|
||||||
export function calcStringMD5(str) {
|
export function calcStringMD5(str) {
|
||||||
if (!str || type(str) !== 'string')
|
if (!str || type(str) !== 'string')
|
||||||
return null;
|
return null;
|
||||||
@@ -134,6 +176,22 @@ export function validation(datatype, data) {
|
|||||||
const ret = system(`/sbin/validate_data ${shellQuote(datatype)} ${shellQuote(data)} 2>/dev/null`);
|
const ret = system(`/sbin/validate_data ${shellQuote(datatype)} ${shellQuote(data)} 2>/dev/null`);
|
||||||
return (ret === 0);
|
return (ret === 0);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export function filterCheck(name, filter_mode, filter_keywords) {
|
||||||
|
if (isEmpty(name) || isEmpty(filter_mode) || isEmpty(filter_keywords))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
let ret = false;
|
||||||
|
for (let i in filter_keywords) {
|
||||||
|
const patten = regexp(i);
|
||||||
|
if (match(name, patten))
|
||||||
|
ret = true;
|
||||||
|
}
|
||||||
|
if (filter_mode === 'whitelist')
|
||||||
|
ret = !ret;
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
};
|
||||||
/* String helper end */
|
/* String helper end */
|
||||||
|
|
||||||
/* String parser start */
|
/* String parser start */
|
||||||
|
@@ -3,6 +3,8 @@
|
|||||||
#
|
#
|
||||||
# Copyright (C) 2022-2023 ImmortalWrt.org
|
# Copyright (C) 2022-2023 ImmortalWrt.org
|
||||||
|
|
||||||
|
. /usr/share/libubox/jshn.sh
|
||||||
|
|
||||||
NAME="homeproxy"
|
NAME="homeproxy"
|
||||||
|
|
||||||
RESOURCES_DIR="/etc/$NAME/resources"
|
RESOURCES_DIR="/etc/$NAME/resources"
|
||||||
@@ -37,6 +39,67 @@ to_upper() {
|
|||||||
echo -e "$1" | tr "[a-z]" "[A-Z]"
|
echo -e "$1" | tr "[a-z]" "[A-Z]"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
get_local_vers() {
|
||||||
|
local ver_file="$1"
|
||||||
|
local repoid="$2"
|
||||||
|
|
||||||
|
local ver="$(eval "jsonfilter -qi \"$ver_file\" -e '@[\"$repoid\"].version'")"
|
||||||
|
[ -n "$ver" ] && echo "$ver" || return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
check_clash_dashboard_update() {
|
||||||
|
local dashtype="$1"
|
||||||
|
local dashrepo="$2"
|
||||||
|
local dashrepoid="$(echo -n "$dashrepo" | md5sum | cut -f1 -d' ')"
|
||||||
|
local wget="wget --timeout=10 -q"
|
||||||
|
|
||||||
|
set_lock "set" "$dashtype"
|
||||||
|
|
||||||
|
local dashdata_ver="$($wget -O- "https://api.github.com/repos/$dashrepo/releases/latest" | jsonfilter -e "@.tag_name")"
|
||||||
|
[ -n "$dashdata_ver" ] || {
|
||||||
|
dashdata_ver="$($wget -O- "https://api.github.com/repos/$dashrepo/tags" | jsonfilter -e "@[*].name" | head -n1)"
|
||||||
|
}
|
||||||
|
if [ -z "$dashdata_ver" ]; then
|
||||||
|
log "[$(to_upper "$dashtype")] [$dashrepo] Failed to get the latest version, please retry later."
|
||||||
|
|
||||||
|
set_lock "remove" "$dashtype"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local local_dashdata_ver="$(get_local_vers "$RESOURCES_DIR/$dashtype.ver" "$dashrepoid" || echo "NOT FOUND")"
|
||||||
|
if [ "$local_dashdata_ver" = "$dashdata_ver" ]; then
|
||||||
|
log "[$(to_upper "$dashtype")] [$dashrepo] Current version: $dashdata_ver."
|
||||||
|
log "[$(to_upper "$dashtype")] [$dashrepo] You're already at the latest version."
|
||||||
|
|
||||||
|
set_lock "remove" "$dashtype"
|
||||||
|
return 3
|
||||||
|
else
|
||||||
|
log "[$(to_upper "$dashtype")] [$dashrepo] Local version: $local_dashdata_ver, latest version: $dashdata_ver."
|
||||||
|
fi
|
||||||
|
|
||||||
|
$wget "https://codeload.github.com/$dashrepo/zip/refs/heads/gh-pages" -O "$RUN_DIR/$dashtype.zip"
|
||||||
|
if [ ! -s "$RUN_DIR/$dashtype.zip" ]; then
|
||||||
|
rm -f "$RUN_DIR/$dashtype.zip"
|
||||||
|
log "[$(to_upper "$dashtype")] [$dashrepo] Update failed."
|
||||||
|
|
||||||
|
set_lock "remove" "$dashtype"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mv -f "$RUN_DIR/$dashtype.zip" "$RESOURCES_DIR/${dashrepo//\//_}.zip"
|
||||||
|
touch "$RESOURCES_DIR/$dashtype.ver"
|
||||||
|
json_init
|
||||||
|
json_load_file "$RESOURCES_DIR/$dashtype.ver"
|
||||||
|
json_select "$dashrepoid" 2>/dev/null || json_add_object "$dashrepoid"
|
||||||
|
json_add_string repo "$dashrepo"
|
||||||
|
json_add_string version "$dashdata_ver"
|
||||||
|
json_dump > "$RESOURCES_DIR/$dashtype.ver"
|
||||||
|
log "[$(to_upper "$dashtype")] [$dashrepo] Successfully updated."
|
||||||
|
|
||||||
|
set_lock "remove" "$dashtype"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
check_list_update() {
|
check_list_update() {
|
||||||
local listtype="$1"
|
local listtype="$1"
|
||||||
local listrepo="$2"
|
local listrepo="$2"
|
||||||
@@ -85,6 +148,9 @@ check_list_update() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
case "$1" in
|
case "$1" in
|
||||||
|
"clash_dashboard")
|
||||||
|
check_clash_dashboard_update "$1" "$2"
|
||||||
|
;;
|
||||||
"china_ip4")
|
"china_ip4")
|
||||||
check_list_update "$1" "1715173329/IPCIDR-CHINA" "master" "ipv4.txt"
|
check_list_update "$1" "1715173329/IPCIDR-CHINA" "master" "ipv4.txt"
|
||||||
;;
|
;;
|
||||||
@@ -99,7 +165,7 @@ case "$1" in
|
|||||||
sed -i -e "s/full://g" -e "/:/d" "$RESOURCES_DIR/china_list.txt"
|
sed -i -e "s/full://g" -e "/:/d" "$RESOURCES_DIR/china_list.txt"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
echo -e "Usage: $0 <china_ip4 / china_ip6 / gfw_list / china_list>"
|
echo -e "Usage: $0 <clash_dashboard / china_ip4 / china_ip6 / gfw_list / china_list>"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
@@ -16,7 +16,7 @@ import { init_action } from 'luci.sys';
|
|||||||
|
|
||||||
import {
|
import {
|
||||||
calcStringMD5, wGET, executeCommand, decodeBase64Str,
|
calcStringMD5, wGET, executeCommand, decodeBase64Str,
|
||||||
getTime, isEmpty, parseURL, validation,
|
getTime, isEmpty, parseURL, validation, filterCheck,
|
||||||
HP_DIR, RUN_DIR
|
HP_DIR, RUN_DIR
|
||||||
} from 'homeproxy';
|
} from 'homeproxy';
|
||||||
|
|
||||||
@@ -31,7 +31,7 @@ const ucimain = 'config',
|
|||||||
ucisubscription = 'subscription';
|
ucisubscription = 'subscription';
|
||||||
|
|
||||||
const allow_insecure = uci.get(uciconfig, ucisubscription, 'allow_insecure') || '0',
|
const allow_insecure = uci.get(uciconfig, ucisubscription, 'allow_insecure') || '0',
|
||||||
filter_mode = uci.get(uciconfig, ucisubscription, 'filter_nodes') || 'disabled',
|
filter_mode = uci.get(uciconfig, ucisubscription, 'filter_nodes') || 'nil',
|
||||||
filter_keywords = uci.get(uciconfig, ucisubscription, 'filter_keywords') || [],
|
filter_keywords = uci.get(uciconfig, ucisubscription, 'filter_keywords') || [],
|
||||||
packet_encoding = uci.get(uciconfig, ucisubscription, 'packet_encoding') || 'xudp',
|
packet_encoding = uci.get(uciconfig, ucisubscription, 'packet_encoding') || 'xudp',
|
||||||
subscription_urls = uci.get(uciconfig, ucisubscription, 'subscription_url') || [],
|
subscription_urls = uci.get(uciconfig, ucisubscription, 'subscription_url') || [],
|
||||||
@@ -46,21 +46,6 @@ if (routing_mode !== 'custom') {
|
|||||||
/* UCI config end */
|
/* UCI config end */
|
||||||
|
|
||||||
/* String helper start */
|
/* String helper start */
|
||||||
function filter_check(name) {
|
|
||||||
if (isEmpty(name) || filter_mode === 'disabled' || isEmpty(filter_keywords))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
let ret = false;
|
|
||||||
for (let i in filter_keywords) {
|
|
||||||
const patten = regexp(i);
|
|
||||||
if (match(name, patten))
|
|
||||||
ret = true;
|
|
||||||
}
|
|
||||||
if (filter_mode === 'whitelist')
|
|
||||||
ret = !ret;
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
/* String helper end */
|
/* String helper end */
|
||||||
|
|
||||||
/* Common var start */
|
/* Common var start */
|
||||||
@@ -489,7 +474,7 @@ function main() {
|
|||||||
nameHash = calcStringMD5(label);
|
nameHash = calcStringMD5(label);
|
||||||
config.label = label;
|
config.label = label;
|
||||||
|
|
||||||
if (filter_check(config.label))
|
if (filterCheck(config.label, filter_mode, filter_keywords))
|
||||||
log(sprintf('Skipping blacklist node: %s.', config.label));
|
log(sprintf('Skipping blacklist node: %s.', config.label));
|
||||||
else if (node_cache[groupHash][confHash] || node_cache[groupHash][nameHash])
|
else if (node_cache[groupHash][confHash] || node_cache[groupHash][nameHash])
|
||||||
log(sprintf('Skipping duplicate node: %s.', config.label));
|
log(sprintf('Skipping duplicate node: %s.', config.label));
|
||||||
@@ -543,10 +528,7 @@ function main() {
|
|||||||
log(sprintf('Removing node: %s.', cfg.label || cfg['name']));
|
log(sprintf('Removing node: %s.', cfg.label || cfg['name']));
|
||||||
} else {
|
} else {
|
||||||
map(keys(node_cache[cfg.grouphash][cfg['.name']]), (v) => {
|
map(keys(node_cache[cfg.grouphash][cfg['.name']]), (v) => {
|
||||||
if (v in node_cache[cfg.grouphash][cfg['.name']])
|
uci.set(uciconfig, cfg['.name'], v, node_cache[cfg.grouphash][cfg['.name']][v]);
|
||||||
uci.set(uciconfig, cfg['.name'], v, node_cache[cfg.grouphash][cfg['.name']][v]);
|
|
||||||
else
|
|
||||||
uci.delete(uciconfig, cfg['.name'], v);
|
|
||||||
});
|
});
|
||||||
node_cache[cfg.grouphash][cfg['.name']].isExisting = true;
|
node_cache[cfg.grouphash][cfg['.name']].isExisting = true;
|
||||||
}
|
}
|
||||||
|
@@ -15,6 +15,7 @@ HP_DIR="/etc/homeproxy"
|
|||||||
RUN_DIR="/var/run/homeproxy"
|
RUN_DIR="/var/run/homeproxy"
|
||||||
LOG_PATH="$RUN_DIR/homeproxy.log"
|
LOG_PATH="$RUN_DIR/homeproxy.log"
|
||||||
DNSMASQ_DIR="/tmp/dnsmasq.d/dnsmasq-homeproxy.d"
|
DNSMASQ_DIR="/tmp/dnsmasq.d/dnsmasq-homeproxy.d"
|
||||||
|
APILOCATION_PATH="/etc/nginx/conf.d/homeproxy.locations"
|
||||||
|
|
||||||
log() {
|
log() {
|
||||||
echo -e "$(date "+%Y-%m-%d %H:%M:%S") [DAEMON] $*" >> "$LOG_PATH"
|
echo -e "$(date "+%Y-%m-%d %H:%M:%S") [DAEMON] $*" >> "$LOG_PATH"
|
||||||
@@ -56,14 +57,24 @@ start_service() {
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Auto update
|
# Auto update
|
||||||
local auto_update auto_update_time
|
local auto_update auto_update_expr
|
||||||
config_get_bool auto_update "subscription" "auto_update" "0"
|
config_get_bool auto_update "subscription" "auto_update" "0"
|
||||||
if [ "$auto_update" = "1" ]; then
|
if [ "$auto_update" = "1" ]; then
|
||||||
config_get auto_update_time "subscription" "auto_update_time" "2"
|
config_get auto_update_expr "subscription" "auto_update_expr" "0 2 * * *"
|
||||||
echo -e "0 $auto_update_time * * * $HP_DIR/scripts/update_crond.sh" >> "/etc/crontabs/root"
|
echo -e "$auto_update_expr $HP_DIR/scripts/update_crond.sh" >> "/etc/crontabs/root"
|
||||||
/etc/init.d/cron restart
|
/etc/init.d/cron restart
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Clash API uses Nginx reverse proxy
|
||||||
|
local clash_api_enabled clash_api_port nginx_support
|
||||||
|
config_get_bool clash_api_enabled "experimental" "clash_api_enabled" "0"
|
||||||
|
config_get_bool nginx_support "experimental" "nginx_support" "0"
|
||||||
|
if [ "$clash_api_enabled" = "1" -a "$nginx_support" = "1" ]; then
|
||||||
|
config_get clash_api_port "experimental" "clash_api_port" "9090"
|
||||||
|
[ "$(sed -En "s|^\s*proxy_pass\s+https?://[^:]+:(\d+).*|\1|p" "$APILOCATION_PATH")" = "$clash_api_port" ] || sed -Ei "/\bproxy_pass\b/{s|(proxy_pass\s+https?://[^:]+:)(\d+)(.*)|\1$clash_api_port\3|}" "$APILOCATION_PATH"
|
||||||
|
/etc/init.d/nginx reload
|
||||||
|
fi
|
||||||
|
|
||||||
# DNSMasq rules
|
# DNSMasq rules
|
||||||
local ipv6_support
|
local ipv6_support
|
||||||
config_get_bool ipv6_support "config" "ipv6_support" "0"
|
config_get_bool ipv6_support "config" "ipv6_support" "0"
|
||||||
|
@@ -0,0 +1,12 @@
|
|||||||
|
location /homeproxy/ {
|
||||||
|
proxy_set_header Host $host;
|
||||||
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
proxy_set_header X-Forwarded-Proto $scheme;
|
||||||
|
proxy_pass http://localhost:9090/;
|
||||||
|
proxy_redirect default;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
add_header Cache-Control no-cache;
|
||||||
|
}
|
@@ -15,4 +15,6 @@ uci -q batch <<-EOF >"/dev/null"
|
|||||||
commit firewall
|
commit firewall
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
|
[ -z "$(uci -q get homeproxy.experimental)" ] && uci set homeproxy.experimental=homeproxy && uci commit homeproxy
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
@@ -11,11 +11,18 @@ elif echo "$china_dns_server" | grep -q ","; then
|
|||||||
uci -q add_list "homeproxy.config.china_dns_server"="$dns"
|
uci -q add_list "homeproxy.config.china_dns_server"="$dns"
|
||||||
done
|
done
|
||||||
fi
|
fi
|
||||||
|
# rm Subscription Name-s
|
||||||
if [ "$(uci -q get homeproxy.config.routing_port)" = "all" ]; then
|
subscription_urls="$(uci -q get "homeproxy.subscription.subscription_url")"
|
||||||
uci -q delete "homeproxy.config.routing_port"
|
subscription_names="$(uci -q get "homeproxy.subscription.subscription_name")"
|
||||||
|
if [ -n "$subscription_names" ]; then
|
||||||
|
uci -q delete "homeproxy.subscription.subscription_url"
|
||||||
|
uci -q delete "homeproxy.subscription.subscription_name"
|
||||||
|
i=1
|
||||||
|
for suburl in $subscription_urls; do
|
||||||
|
uci -q add_list "homeproxy.subscription.subscription_url"="${suburl}#$(echo "$subscription_names" | cut -f$i -d' ')"
|
||||||
|
let i++
|
||||||
|
done
|
||||||
fi
|
fi
|
||||||
|
|
||||||
[ -z "$(uci -q changes "homeproxy")" ] || uci -q commit "homeproxy"
|
[ -z "$(uci -q changes "homeproxy")" ] || uci -q commit "homeproxy"
|
||||||
|
|
||||||
exit 0
|
exit 0
|
||||||
|
@@ -26,6 +26,14 @@
|
|||||||
"path": "homeproxy/node"
|
"path": "homeproxy/node"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"admin/services/homeproxy/ruleset": {
|
||||||
|
"title": "Ruleset Settings",
|
||||||
|
"order": 18,
|
||||||
|
"action": {
|
||||||
|
"type": "view",
|
||||||
|
"path": "homeproxy/ruleset"
|
||||||
|
}
|
||||||
|
},
|
||||||
"admin/services/homeproxy/server": {
|
"admin/services/homeproxy/server": {
|
||||||
"title": "Server Settings",
|
"title": "Server Settings",
|
||||||
"order": 20,
|
"order": 20,
|
||||||
|
@@ -4,6 +4,7 @@
|
|||||||
"read": {
|
"read": {
|
||||||
"file": {
|
"file": {
|
||||||
"/etc/homeproxy/scripts/update_subscriptions.uc": [ "exec" ],
|
"/etc/homeproxy/scripts/update_subscriptions.uc": [ "exec" ],
|
||||||
|
"/etc/init.d/homeproxy reload *": [ "exec" ],
|
||||||
"/var/run/homeproxy/homeproxy.log": [ "read" ],
|
"/var/run/homeproxy/homeproxy.log": [ "read" ],
|
||||||
"/var/run/homeproxy/sing-box-c.log": [ "read" ],
|
"/var/run/homeproxy/sing-box-c.log": [ "read" ],
|
||||||
"/var/run/homeproxy/sing-box-s.log": [ "read" ]
|
"/var/run/homeproxy/sing-box-s.log": [ "read" ]
|
||||||
|
@@ -179,28 +179,45 @@ const methods = {
|
|||||||
features.hp_has_tcp_brutal = hasKernelModule('brutal.ko');
|
features.hp_has_tcp_brutal = hasKernelModule('brutal.ko');
|
||||||
features.hp_has_tproxy = hasKernelModule('nft_tproxy.ko') || access('/etc/modules.d/nft-tproxy');
|
features.hp_has_tproxy = hasKernelModule('nft_tproxy.ko') || access('/etc/modules.d/nft-tproxy');
|
||||||
features.hp_has_tun = hasKernelModule('tun.ko') || access('/etc/modules.d/30-tun');
|
features.hp_has_tun = hasKernelModule('tun.ko') || access('/etc/modules.d/30-tun');
|
||||||
|
features.hp_has_nginx = access('/usr/sbin/nginx');
|
||||||
|
|
||||||
return features;
|
return features;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
resources_get_version: {
|
resources_get_version: {
|
||||||
args: { type: 'type' },
|
args: { type: 'type', repo: 'repo' },
|
||||||
call: function(req) {
|
call: function(req) {
|
||||||
const version = trim(readfile(`${HP_DIR}/resources/${req.args?.type}.ver`));
|
const versions = trim(readfile(`${HP_DIR}/resources/${req.args?.type}.ver`));
|
||||||
return { version: version, error: error() };
|
if (req.args?.repo && versions) {
|
||||||
|
const vers_arr = values(json(versions));
|
||||||
|
for (obj in vers_arr) {
|
||||||
|
if (obj.repo === req.args?.repo)
|
||||||
|
return { version: obj.version, error: 0 };
|
||||||
|
}
|
||||||
|
return { version: '', error: 1 };
|
||||||
|
} else
|
||||||
|
return { version: versions, error: error() };
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
resources_update: {
|
resources_update: {
|
||||||
args: { type: 'type' },
|
args: { type: 'type', repo: 'repo' },
|
||||||
call: function(req) {
|
call: function(req) {
|
||||||
if (req.args?.type) {
|
if (req.args?.type) {
|
||||||
const type = shellquote(req.args?.type);
|
const type = shellquote(req.args?.type),
|
||||||
const exit_code = system(`${HP_DIR}/scripts/update_resources.sh ${type}`);
|
repo = shellquote(req.args?.repo);
|
||||||
|
const exit_code = system(`${HP_DIR}/scripts/update_resources.sh ${type} ${repo}`);
|
||||||
return { status: exit_code };
|
return { status: exit_code };
|
||||||
} else
|
} else
|
||||||
return { status: 255, error: 'illegal type' };
|
return { status: 255, error: 'illegal type' };
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
clash_api_get_secret: {
|
||||||
|
call: function() {
|
||||||
|
const client_json = json(trim(readfile(`${RUN_DIR}/sing-box-c.json`)));
|
||||||
|
return { secret: client_json.experimental.clash_api.secret };
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@@ -658,8 +658,11 @@ run_chinadns_ng() {
|
|||||||
([ -z "${_default_tag}" ] || [ "${_default_tag}" = "smart" ] || [ "${_default_tag}" = "none_noip" ]) && _default_tag="none"
|
([ -z "${_default_tag}" ] || [ "${_default_tag}" = "smart" ] || [ "${_default_tag}" = "none_noip" ]) && _default_tag="none"
|
||||||
echo "default-tag ${_default_tag}" >> ${_CONF_FILE}
|
echo "default-tag ${_default_tag}" >> ${_CONF_FILE}
|
||||||
|
|
||||||
|
echo "cache 4096" >> ${_CONF_FILE}
|
||||||
|
echo "cache-stale 3600" >> ${_CONF_FILE}
|
||||||
|
|
||||||
[ "${_flag}" = "default" ] && [ "${_default_tag}" = "none" ] && {
|
[ "${_flag}" = "default" ] && [ "${_default_tag}" = "none" ] && {
|
||||||
echo "verdict-cache 4096" >> ${_CONF_FILE}
|
echo "verdict-cache 5000" >> ${_CONF_FILE}
|
||||||
}
|
}
|
||||||
|
|
||||||
ln_run "$(first_type chinadns-ng)" chinadns-ng "${_LOG_FILE}" -C ${_CONF_FILE}
|
ln_run "$(first_type chinadns-ng)" chinadns-ng "${_LOG_FILE}" -C ${_CONF_FILE}
|
||||||
@@ -1379,7 +1382,6 @@ start_dns() {
|
|||||||
LOCAL_DNS=$(config_t_get global direct_dns_udp 223.5.5.5 | sed 's/:/#/g')
|
LOCAL_DNS=$(config_t_get global direct_dns_udp 223.5.5.5 | sed 's/:/#/g')
|
||||||
china_ng_local_dns=${LOCAL_DNS}
|
china_ng_local_dns=${LOCAL_DNS}
|
||||||
sing_box_local_dns="direct_dns_udp_server=${LOCAL_DNS}"
|
sing_box_local_dns="direct_dns_udp_server=${LOCAL_DNS}"
|
||||||
IPT_APPEND_DNS=${LOCAL_DNS}
|
|
||||||
;;
|
;;
|
||||||
tcp)
|
tcp)
|
||||||
LOCAL_DNS="127.0.0.1#${dns_listen_port}"
|
LOCAL_DNS="127.0.0.1#${dns_listen_port}"
|
||||||
@@ -1387,7 +1389,6 @@ start_dns() {
|
|||||||
local DIRECT_DNS=$(config_t_get global direct_dns_tcp 223.5.5.5 | sed 's/:/#/g')
|
local DIRECT_DNS=$(config_t_get global direct_dns_tcp 223.5.5.5 | sed 's/:/#/g')
|
||||||
china_ng_local_dns="tcp://${DIRECT_DNS}"
|
china_ng_local_dns="tcp://${DIRECT_DNS}"
|
||||||
sing_box_local_dns="direct_dns_tcp_server=${DIRECT_DNS}"
|
sing_box_local_dns="direct_dns_tcp_server=${DIRECT_DNS}"
|
||||||
IPT_APPEND_DNS="${LOCAL_DNS},${DIRECT_DNS}"
|
|
||||||
ln_run "$(first_type dns2tcp)" dns2tcp "/dev/null" -L "${LOCAL_DNS}" -R "$(get_first_dns DIRECT_DNS 53)" -v
|
ln_run "$(first_type dns2tcp)" dns2tcp "/dev/null" -L "${LOCAL_DNS}" -R "$(get_first_dns DIRECT_DNS 53)" -v
|
||||||
echolog " - dns2tcp(${LOCAL_DNS}) -> tcp://$(get_first_dns DIRECT_DNS 53 | sed 's/#/:/g')"
|
echolog " - dns2tcp(${LOCAL_DNS}) -> tcp://$(get_first_dns DIRECT_DNS 53 | sed 's/#/:/g')"
|
||||||
echolog " * 请确保上游直连 DNS 支持 TCP 查询。"
|
echolog " * 请确保上游直连 DNS 支持 TCP 查询。"
|
||||||
@@ -1405,8 +1406,8 @@ start_dns() {
|
|||||||
|
|
||||||
local tmp_dot_ip=$(echo "$DIRECT_DNS" | sed -n 's/.*:\/\/\([^@#]*@\)*\([^@#]*\).*/\2/p')
|
local tmp_dot_ip=$(echo "$DIRECT_DNS" | sed -n 's/.*:\/\/\([^@#]*@\)*\([^@#]*\).*/\2/p')
|
||||||
local tmp_dot_port=$(echo "$DIRECT_DNS" | sed -n 's/.*#\([0-9]\+\).*/\1/p')
|
local tmp_dot_port=$(echo "$DIRECT_DNS" | sed -n 's/.*#\([0-9]\+\).*/\1/p')
|
||||||
sing_box_local_dns="direct_dns_dot_server=$tmp_dot_ip#${tmp_dot_port:-853}"
|
DIRECT_DNS=$tmp_dot_ip#${tmp_dot_port:-853}
|
||||||
IPT_APPEND_DNS="${LOCAL_DNS},$tmp_dot_ip#${tmp_dot_port:-853}"
|
sing_box_local_dns="direct_dns_dot_server=${DIRECT_DNS}"
|
||||||
else
|
else
|
||||||
echolog " - 你的ChinaDNS-NG版本不支持DoT,直连DNS将使用默认地址。"
|
echolog " - 你的ChinaDNS-NG版本不支持DoT,直连DNS将使用默认地址。"
|
||||||
fi
|
fi
|
||||||
@@ -1417,6 +1418,21 @@ start_dns() {
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
|
# 追加直连DNS到iptables/nftables
|
||||||
|
[ "$(config_t_get global_haproxy balancing_enable 0)" != "1" ] && IPT_APPEND_DNS=
|
||||||
|
add_default_port() {
|
||||||
|
[ -z "$1" ] && echo "" || echo "$1" | awk -F',' '{for(i=1;i<=NF;i++){if($i !~ /#/) $i=$i"#53";} print $0;}' OFS=','
|
||||||
|
}
|
||||||
|
LOCAL_DNS=$(add_default_port "$LOCAL_DNS")
|
||||||
|
IPT_APPEND_DNS=$(add_default_port "${IPT_APPEND_DNS:-$LOCAL_DNS}")
|
||||||
|
echo "$IPT_APPEND_DNS" | grep -q -E "(^|,)$LOCAL_DNS(,|$)" || IPT_APPEND_DNS="${IPT_APPEND_DNS:+$IPT_APPEND_DNS,}$LOCAL_DNS"
|
||||||
|
[ -n "$DIRECT_DNS" ] && {
|
||||||
|
DIRECT_DNS=$(add_default_port "$DIRECT_DNS")
|
||||||
|
echo "$IPT_APPEND_DNS" | grep -q -E "(^|,)$DIRECT_DNS(,|$)" || IPT_APPEND_DNS="${IPT_APPEND_DNS:+$IPT_APPEND_DNS,}$DIRECT_DNS"
|
||||||
|
}
|
||||||
|
# 排除127.0.0.1的条目
|
||||||
|
IPT_APPEND_DNS=$(echo "$IPT_APPEND_DNS" | awk -F',' '{for(i=1;i<=NF;i++) if($i !~ /^127\.0\.0\.1/) printf (i>1?",":"") $i; print ""}' | sed 's/^,\|,$//g')
|
||||||
|
|
||||||
TUN_DNS="127.0.0.1#${dns_listen_port}"
|
TUN_DNS="127.0.0.1#${dns_listen_port}"
|
||||||
[ "${resolve_dns}" == "1" ] && TUN_DNS="127.0.0.1#${resolve_dns_port}"
|
[ "${resolve_dns}" == "1" ] && TUN_DNS="127.0.0.1#${resolve_dns_port}"
|
||||||
|
|
||||||
|
@@ -21,13 +21,13 @@ define Download/geoip
|
|||||||
HASH:=944465ad5f3a3cccebf2930624f528cae3ca054f69295979cf4c4e002a575e90
|
HASH:=944465ad5f3a3cccebf2930624f528cae3ca054f69295979cf4c4e002a575e90
|
||||||
endef
|
endef
|
||||||
|
|
||||||
GEOSITE_VER:=20240905094227
|
GEOSITE_VER:=20240905162746
|
||||||
GEOSITE_FILE:=dlc.dat.$(GEOSITE_VER)
|
GEOSITE_FILE:=dlc.dat.$(GEOSITE_VER)
|
||||||
define Download/geosite
|
define Download/geosite
|
||||||
URL:=https://github.com/v2fly/domain-list-community/releases/download/$(GEOSITE_VER)/
|
URL:=https://github.com/v2fly/domain-list-community/releases/download/$(GEOSITE_VER)/
|
||||||
URL_FILE:=dlc.dat
|
URL_FILE:=dlc.dat
|
||||||
FILE:=$(GEOSITE_FILE)
|
FILE:=$(GEOSITE_FILE)
|
||||||
HASH:=8edb9186aea5ef40b310f29b89bcf2be67ea65b04c010b4cdb9ddb02408557f0
|
HASH:=859306b7bc3a7891d5e0f5c8f38c2eaa8ede776c3a0aa1512b96c4956cf511c1
|
||||||
endef
|
endef
|
||||||
|
|
||||||
GEOSITE_IRAN_VER:=202409020032
|
GEOSITE_IRAN_VER:=202409020032
|
||||||
|
@@ -14,6 +14,7 @@ var (
|
|||||||
DialUDP = net.DialUDP
|
DialUDP = net.DialUDP
|
||||||
DialUnix = net.DialUnix
|
DialUnix = net.DialUnix
|
||||||
FileConn = net.FileConn
|
FileConn = net.FileConn
|
||||||
|
FileListener = net.FileListener
|
||||||
Listen = net.Listen
|
Listen = net.Listen
|
||||||
ListenTCP = net.ListenTCP
|
ListenTCP = net.ListenTCP
|
||||||
ListenUDP = net.ListenUDP
|
ListenUDP = net.ListenUDP
|
||||||
|
@@ -15,7 +15,7 @@ require (
|
|||||||
github.com/google/go-cmp v0.6.0
|
github.com/google/go-cmp v0.6.0
|
||||||
github.com/google/gopacket v1.1.19
|
github.com/google/gopacket v1.1.19
|
||||||
github.com/gorilla/websocket v1.5.3
|
github.com/gorilla/websocket v1.5.3
|
||||||
github.com/jhump/protoreflect v1.16.0
|
github.com/jhump/protoreflect v1.17.0
|
||||||
github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40
|
github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40
|
||||||
github.com/miekg/dns v1.1.62
|
github.com/miekg/dns v1.1.62
|
||||||
github.com/mustafaturan/bus v1.0.2
|
github.com/mustafaturan/bus v1.0.2
|
||||||
@@ -34,10 +34,10 @@ require (
|
|||||||
github.com/xiaokangwang/VLite v0.0.0-20220418190619-cff95160a432
|
github.com/xiaokangwang/VLite v0.0.0-20220418190619-cff95160a432
|
||||||
go.starlark.net v0.0.0-20230612165344-9532f5667272
|
go.starlark.net v0.0.0-20230612165344-9532f5667272
|
||||||
go4.org/netipx v0.0.0-20230303233057-f1b76eb4bb35
|
go4.org/netipx v0.0.0-20230303233057-f1b76eb4bb35
|
||||||
golang.org/x/crypto v0.26.0
|
golang.org/x/crypto v0.27.0
|
||||||
golang.org/x/net v0.28.0
|
golang.org/x/net v0.29.0
|
||||||
golang.org/x/sync v0.8.0
|
golang.org/x/sync v0.8.0
|
||||||
golang.org/x/sys v0.24.0
|
golang.org/x/sys v0.25.0
|
||||||
google.golang.org/grpc v1.65.0
|
google.golang.org/grpc v1.65.0
|
||||||
google.golang.org/protobuf v1.34.2
|
google.golang.org/protobuf v1.34.2
|
||||||
gopkg.in/yaml.v3 v3.0.1
|
gopkg.in/yaml.v3 v3.0.1
|
||||||
@@ -51,7 +51,7 @@ require (
|
|||||||
github.com/ajg/form v1.5.1 // indirect
|
github.com/ajg/form v1.5.1 // indirect
|
||||||
github.com/andybalholm/brotli v1.0.6 // indirect
|
github.com/andybalholm/brotli v1.0.6 // indirect
|
||||||
github.com/boljen/go-bitmap v0.0.0-20151001105940-23cd2fb0ce7d // indirect
|
github.com/boljen/go-bitmap v0.0.0-20151001105940-23cd2fb0ce7d // indirect
|
||||||
github.com/bufbuild/protocompile v0.10.0 // indirect
|
github.com/bufbuild/protocompile v0.14.1 // indirect
|
||||||
github.com/cloudflare/circl v1.3.7 // indirect
|
github.com/cloudflare/circl v1.3.7 // indirect
|
||||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||||
github.com/dgryski/go-metro v0.0.0-20211217172704-adc40b04c140 // indirect
|
github.com/dgryski/go-metro v0.0.0-20211217172704-adc40b04c140 // indirect
|
||||||
@@ -80,7 +80,7 @@ require (
|
|||||||
go.uber.org/mock v0.4.0 // indirect
|
go.uber.org/mock v0.4.0 // indirect
|
||||||
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect
|
golang.org/x/exp v0.0.0-20240506185415-9bf2ced13842 // indirect
|
||||||
golang.org/x/mod v0.18.0 // indirect
|
golang.org/x/mod v0.18.0 // indirect
|
||||||
golang.org/x/text v0.17.0 // indirect
|
golang.org/x/text v0.18.0 // indirect
|
||||||
golang.org/x/time v0.5.0 // indirect
|
golang.org/x/time v0.5.0 // indirect
|
||||||
golang.org/x/tools v0.22.0 // indirect
|
golang.org/x/tools v0.22.0 // indirect
|
||||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157 // indirect
|
google.golang.org/genproto/googleapis/rpc v0.0.0-20240528184218-531527333157 // indirect
|
||||||
|
@@ -34,8 +34,8 @@ github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kB
|
|||||||
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84=
|
||||||
github.com/boljen/go-bitmap v0.0.0-20151001105940-23cd2fb0ce7d h1:zsO4lp+bjv5XvPTF58Vq+qgmZEYZttJK+CWtSZhKenI=
|
github.com/boljen/go-bitmap v0.0.0-20151001105940-23cd2fb0ce7d h1:zsO4lp+bjv5XvPTF58Vq+qgmZEYZttJK+CWtSZhKenI=
|
||||||
github.com/boljen/go-bitmap v0.0.0-20151001105940-23cd2fb0ce7d/go.mod h1:f1iKL6ZhUWvbk7PdWVmOaak10o86cqMUYEmn1CZNGEI=
|
github.com/boljen/go-bitmap v0.0.0-20151001105940-23cd2fb0ce7d/go.mod h1:f1iKL6ZhUWvbk7PdWVmOaak10o86cqMUYEmn1CZNGEI=
|
||||||
github.com/bufbuild/protocompile v0.10.0 h1:+jW/wnLMLxaCEG8AX9lD0bQ5v9h1RUiMKOBOT5ll9dM=
|
github.com/bufbuild/protocompile v0.14.1 h1:iA73zAf/fyljNjQKwYzUHD6AD4R8KMasmwa/FBatYVw=
|
||||||
github.com/bufbuild/protocompile v0.10.0/go.mod h1:G9qQIQo0xZ6Uyj6CMNz0saGmx2so+KONo8/KrELABiY=
|
github.com/bufbuild/protocompile v0.14.1/go.mod h1:ppVdAIhbr2H8asPk6k4pY7t9zB1OU5DoEw9xY/FUi1c=
|
||||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||||
@@ -168,8 +168,8 @@ github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0m
|
|||||||
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||||
github.com/jhump/protoreflect v1.16.0 h1:54fZg+49widqXYQ0b+usAFHbMkBGR4PpXrsHc8+TBDg=
|
github.com/jhump/protoreflect v1.17.0 h1:qOEr613fac2lOuTgWN4tPAtLL7fUSbuJL5X5XumQh94=
|
||||||
github.com/jhump/protoreflect v1.16.0/go.mod h1:oYPd7nPvcBw/5wlDfm/AVmU9zH9BgqGCI469pGxfj/8=
|
github.com/jhump/protoreflect v1.17.0/go.mod h1:h9+vUUL38jiBzck8ck+6G/aeMX8Z4QUY/NiJPwPNi+8=
|
||||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||||
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU=
|
||||||
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
|
||||||
@@ -366,8 +366,8 @@ golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm
|
|||||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
|
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
|
||||||
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||||
golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw=
|
golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
|
||||||
golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54=
|
golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70=
|
||||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||||
@@ -420,8 +420,8 @@ golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
|||||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
||||||
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
|
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
|
||||||
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
|
||||||
golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
|
golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo=
|
||||||
golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
|
golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||||
@@ -469,8 +469,8 @@ golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|||||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg=
|
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
|
||||||
golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.0.0-20220526004731-065cf7ba2467/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20220526004731-065cf7ba2467/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
@@ -489,8 +489,8 @@ golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
|||||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||||
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
|
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
|
||||||
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
||||||
|
@@ -14,6 +14,7 @@ import (
|
|||||||
"github.com/v2fly/v2ray-core/v5/common/cmdarg"
|
"github.com/v2fly/v2ray-core/v5/common/cmdarg"
|
||||||
"github.com/v2fly/v2ray-core/v5/common/platform"
|
"github.com/v2fly/v2ray-core/v5/common/platform"
|
||||||
"github.com/v2fly/v2ray-core/v5/main/commands/base"
|
"github.com/v2fly/v2ray-core/v5/main/commands/base"
|
||||||
|
"github.com/v2fly/v2ray-core/v5/main/plugins"
|
||||||
)
|
)
|
||||||
|
|
||||||
// CmdRun runs V2Ray with config
|
// CmdRun runs V2Ray with config
|
||||||
@@ -75,6 +76,12 @@ func setConfigFlags(cmd *base.Command) {
|
|||||||
|
|
||||||
func executeRun(cmd *base.Command, args []string) {
|
func executeRun(cmd *base.Command, args []string) {
|
||||||
setConfigFlags(cmd)
|
setConfigFlags(cmd)
|
||||||
|
var pluginFuncs []func() error
|
||||||
|
for _, plugin := range plugins.Plugins {
|
||||||
|
if f := plugin(cmd); f != nil {
|
||||||
|
pluginFuncs = append(pluginFuncs, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
cmd.Flag.Parse(args)
|
cmd.Flag.Parse(args)
|
||||||
printVersion()
|
printVersion()
|
||||||
configFiles = getConfigFilePath()
|
configFiles = getConfigFilePath()
|
||||||
@@ -83,6 +90,14 @@ func executeRun(cmd *base.Command, args []string) {
|
|||||||
base.Fatalf("Failed to start: %s", err)
|
base.Fatalf("Failed to start: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for _, f := range pluginFuncs {
|
||||||
|
go func(f func() error) {
|
||||||
|
if err := f(); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
}(f)
|
||||||
|
}
|
||||||
|
|
||||||
if err := server.Start(); err != nil {
|
if err := server.Start(); err != nil {
|
||||||
base.Fatalf("Failed to start: %s", err)
|
base.Fatalf("Failed to start: %s", err)
|
||||||
}
|
}
|
||||||
|
@@ -33,7 +33,6 @@ import (
|
|||||||
// Developer preview features
|
// Developer preview features
|
||||||
_ "github.com/v2fly/v2ray-core/v5/app/instman"
|
_ "github.com/v2fly/v2ray-core/v5/app/instman"
|
||||||
_ "github.com/v2fly/v2ray-core/v5/app/observatory"
|
_ "github.com/v2fly/v2ray-core/v5/app/observatory"
|
||||||
_ "github.com/v2fly/v2ray-core/v5/app/restfulapi"
|
|
||||||
_ "github.com/v2fly/v2ray-core/v5/app/tun"
|
_ "github.com/v2fly/v2ray-core/v5/app/tun"
|
||||||
|
|
||||||
// Inbound and outbound proxies.
|
// Inbound and outbound proxies.
|
||||||
|
11
v2ray-core/main/plugins/plugin.go
Normal file
11
v2ray-core/main/plugins/plugin.go
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package plugins
|
||||||
|
|
||||||
|
import "github.com/v2fly/v2ray-core/v5/main/commands/base"
|
||||||
|
|
||||||
|
var Plugins []Plugin
|
||||||
|
|
||||||
|
type Plugin func(*base.Command) func() error
|
||||||
|
|
||||||
|
func RegisterPlugin(plugin Plugin) {
|
||||||
|
Plugins = append(Plugins, plugin)
|
||||||
|
}
|
29
v2ray-core/main/plugins/plugin_pprof/plugin_pprof.go
Normal file
29
v2ray-core/main/plugins/plugin_pprof/plugin_pprof.go
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
package plugin_pprof
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/v2fly/v2ray-core/v5/main/plugins"
|
||||||
|
"net/http"
|
||||||
|
"net/http/pprof"
|
||||||
|
|
||||||
|
"github.com/v2fly/v2ray-core/v5/main/commands/base"
|
||||||
|
)
|
||||||
|
|
||||||
|
var pprofPlugin plugins.Plugin = func(cmd *base.Command) func() error {
|
||||||
|
addr := cmd.Flag.String("pprof", "", "")
|
||||||
|
return func() error {
|
||||||
|
if *addr != "" {
|
||||||
|
h := http.NewServeMux()
|
||||||
|
h.HandleFunc("/debug/pprof/", pprof.Index)
|
||||||
|
h.HandleFunc("/debug/pprof/cmdline", pprof.Cmdline)
|
||||||
|
h.HandleFunc("/debug/pprof/profile", pprof.Profile)
|
||||||
|
h.HandleFunc("/debug/pprof/symbol", pprof.Symbol)
|
||||||
|
h.HandleFunc("/debug/pprof/trace", pprof.Trace)
|
||||||
|
return (&http.Server{Addr: *addr, Handler: h}).ListenAndServe()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
plugins.RegisterPlugin(pprofPlugin)
|
||||||
|
}
|
13
v2ray-core/transport/internet/socket_activation_other.go
Normal file
13
v2ray-core/transport/internet/socket_activation_other.go
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
//go:build !unix
|
||||||
|
// +build !unix
|
||||||
|
|
||||||
|
package internet
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/v2fly/v2ray-core/v5/common/net"
|
||||||
|
)
|
||||||
|
|
||||||
|
func activateSocket(address string, f func(network, address string, fd uintptr)) (net.Listener, error) {
|
||||||
|
return nil, fmt.Errorf("socket activation is not supported on this platform")
|
||||||
|
}
|
63
v2ray-core/transport/internet/socket_activation_unix.go
Normal file
63
v2ray-core/transport/internet/socket_activation_unix.go
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
//go:build unix
|
||||||
|
// +build unix
|
||||||
|
|
||||||
|
package internet
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strconv"
|
||||||
|
"syscall"
|
||||||
|
|
||||||
|
"github.com/v2fly/v2ray-core/v5/common/net"
|
||||||
|
)
|
||||||
|
|
||||||
|
func activateSocket(address string, f func(network, address string, fd uintptr)) (net.Listener, error) {
|
||||||
|
fd, err := strconv.Atoi(path.Base(address))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = syscall.SetNonblock(fd, true)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
acceptConn, err := syscall.GetsockoptInt(fd, syscall.SOL_SOCKET, syscall.SO_ACCEPTCONN)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if acceptConn == 0 {
|
||||||
|
return nil, fmt.Errorf("socket '%s' has not been marked to accept connections", address)
|
||||||
|
}
|
||||||
|
|
||||||
|
sockType, err := syscall.GetsockoptInt(fd, syscall.SOL_SOCKET, syscall.SO_TYPE)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if sockType != syscall.SOCK_STREAM {
|
||||||
|
// XXX: currently only stream socks are supported
|
||||||
|
return nil, fmt.Errorf("socket '%s' is not a stream socket", address)
|
||||||
|
}
|
||||||
|
|
||||||
|
ufd := uintptr(fd)
|
||||||
|
|
||||||
|
sa, err := syscall.Getsockname(fd)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
switch sa := sa.(type) {
|
||||||
|
case *syscall.SockaddrInet4:
|
||||||
|
addr := net.TCPAddr{IP: sa.Addr[:], Port: sa.Port, Zone: ""}
|
||||||
|
f("tcp4", addr.String(), ufd)
|
||||||
|
case *syscall.SockaddrInet6:
|
||||||
|
addr := net.TCPAddr{IP: sa.Addr[:], Port: sa.Port, Zone: strconv.Itoa(int(sa.ZoneId))}
|
||||||
|
f("tcp6", addr.String(), ufd)
|
||||||
|
}
|
||||||
|
|
||||||
|
file := os.NewFile(ufd, address)
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
return net.FileListener(file)
|
||||||
|
}
|
@@ -36,29 +36,35 @@ func (l *combinedListener) Close() error {
|
|||||||
return l.Listener.Close()
|
return l.Listener.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getRawControlFunc(network, address string, ctx context.Context, sockopt *SocketConfig, controllers []controller) func(fd uintptr) {
|
||||||
|
return func(fd uintptr) {
|
||||||
|
if sockopt != nil {
|
||||||
|
if err := applyInboundSocketOptions(network, fd, sockopt); err != nil {
|
||||||
|
newError("failed to apply socket options to incoming connection").Base(err).WriteToLog(session.ExportIDToError(ctx))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setReusePort(fd) // nolint: staticcheck
|
||||||
|
|
||||||
|
for _, controller := range controllers {
|
||||||
|
if err := controller(network, address, fd); err != nil {
|
||||||
|
newError("failed to apply external controller").Base(err).WriteToLog(session.ExportIDToError(ctx))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func getControlFunc(ctx context.Context, sockopt *SocketConfig, controllers []controller) func(network, address string, c syscall.RawConn) error {
|
func getControlFunc(ctx context.Context, sockopt *SocketConfig, controllers []controller) func(network, address string, c syscall.RawConn) error {
|
||||||
return func(network, address string, c syscall.RawConn) error {
|
return func(network, address string, c syscall.RawConn) error {
|
||||||
return c.Control(func(fd uintptr) {
|
return c.Control(getRawControlFunc(network, address, ctx, sockopt, controllers))
|
||||||
if sockopt != nil {
|
|
||||||
if err := applyInboundSocketOptions(network, fd, sockopt); err != nil {
|
|
||||||
newError("failed to apply socket options to incoming connection").Base(err).WriteToLog(session.ExportIDToError(ctx))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
setReusePort(fd) // nolint: staticcheck
|
|
||||||
|
|
||||||
for _, controller := range controllers {
|
|
||||||
if err := controller(network, address, fd); err != nil {
|
|
||||||
newError("failed to apply external controller").Base(err).WriteToLog(session.ExportIDToError(ctx))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dl *DefaultListener) Listen(ctx context.Context, addr net.Addr, sockopt *SocketConfig) (net.Listener, error) {
|
func (dl *DefaultListener) Listen(ctx context.Context, addr net.Addr, sockopt *SocketConfig) (net.Listener, error) {
|
||||||
var lc net.ListenConfig
|
var lc net.ListenConfig
|
||||||
var network, address string
|
var network, address string
|
||||||
|
var l net.Listener
|
||||||
|
var err error
|
||||||
// callback is called after the Listen function returns
|
// callback is called after the Listen function returns
|
||||||
// this is used to wrap the listener and do some post processing
|
// this is used to wrap the listener and do some post processing
|
||||||
callback := func(l net.Listener, err error) (net.Listener, error) {
|
callback := func(l net.Listener, err error) (net.Listener, error) {
|
||||||
@@ -93,6 +99,14 @@ func (dl *DefaultListener) Listen(ctx context.Context, addr net.Addr, sockopt *S
|
|||||||
copy(fullAddr, address[1:])
|
copy(fullAddr, address[1:])
|
||||||
address = string(fullAddr)
|
address = string(fullAddr)
|
||||||
}
|
}
|
||||||
|
} else if strings.HasPrefix(address, "/dev/fd/") {
|
||||||
|
// socket activation
|
||||||
|
l, err = activateSocket(address, func(network, address string, fd uintptr) {
|
||||||
|
getRawControlFunc(network, address, ctx, sockopt, dl.controllers)(fd)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
// normal unix domain socket
|
// normal unix domain socket
|
||||||
var fileMode *os.FileMode
|
var fileMode *os.FileMode
|
||||||
@@ -133,13 +147,18 @@ func (dl *DefaultListener) Listen(ctx context.Context, addr net.Addr, sockopt *S
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
l, err := lc.Listen(ctx, network, address)
|
if l == nil {
|
||||||
l, err = callback(l, err)
|
l, err = lc.Listen(ctx, network, address)
|
||||||
if err == nil && sockopt != nil && sockopt.AcceptProxyProtocol {
|
l, err = callback(l, err)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if sockopt != nil && sockopt.AcceptProxyProtocol {
|
||||||
policyFunc := func(upstream net.Addr) (proxyproto.Policy, error) { return proxyproto.REQUIRE, nil }
|
policyFunc := func(upstream net.Addr) (proxyproto.Policy, error) { return proxyproto.REQUIRE, nil }
|
||||||
l = &proxyproto.Listener{Listener: l, Policy: policyFunc}
|
l = &proxyproto.Listener{Listener: l, Policy: policyFunc}
|
||||||
}
|
}
|
||||||
return l, err
|
return l, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (dl *DefaultListener) ListenPacket(ctx context.Context, addr net.Addr, sockopt *SocketConfig) (net.PacketConn, error) {
|
func (dl *DefaultListener) ListenPacket(ctx context.Context, addr net.Addr, sockopt *SocketConfig) (net.PacketConn, error) {
|
||||||
|
@@ -237,6 +237,10 @@ func (c *Config) GetTLSConfig(opts ...Option) *tls.Config {
|
|||||||
ClientCAs: clientRoot,
|
ClientCAs: clientRoot,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if c.AllowInsecureIfPinnedPeerCertificate && c.PinnedPeerCertificateChainSha256 != nil {
|
||||||
|
config.InsecureSkipVerify = true
|
||||||
|
}
|
||||||
|
|
||||||
for _, opt := range opts {
|
for _, opt := range opts {
|
||||||
opt(config)
|
opt(config)
|
||||||
}
|
}
|
||||||
|
@@ -232,6 +232,8 @@ type Config struct {
|
|||||||
MinVersion Config_TLSVersion `protobuf:"varint,9,opt,name=min_version,json=minVersion,proto3,enum=v2ray.core.transport.internet.tls.Config_TLSVersion" json:"min_version,omitempty"`
|
MinVersion Config_TLSVersion `protobuf:"varint,9,opt,name=min_version,json=minVersion,proto3,enum=v2ray.core.transport.internet.tls.Config_TLSVersion" json:"min_version,omitempty"`
|
||||||
// Maximum TLS version to support.
|
// Maximum TLS version to support.
|
||||||
MaxVersion Config_TLSVersion `protobuf:"varint,10,opt,name=max_version,json=maxVersion,proto3,enum=v2ray.core.transport.internet.tls.Config_TLSVersion" json:"max_version,omitempty"`
|
MaxVersion Config_TLSVersion `protobuf:"varint,10,opt,name=max_version,json=maxVersion,proto3,enum=v2ray.core.transport.internet.tls.Config_TLSVersion" json:"max_version,omitempty"`
|
||||||
|
// Whether or not to allow self-signed certificates when pinned_peer_certificate_chain_sha256 is present.
|
||||||
|
AllowInsecureIfPinnedPeerCertificate bool `protobuf:"varint,11,opt,name=allow_insecure_if_pinned_peer_certificate,json=allowInsecureIfPinnedPeerCertificate,proto3" json:"allow_insecure_if_pinned_peer_certificate,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (x *Config) Reset() {
|
func (x *Config) Reset() {
|
||||||
@@ -336,6 +338,13 @@ func (x *Config) GetMaxVersion() Config_TLSVersion {
|
|||||||
return Config_Default
|
return Config_Default
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (x *Config) GetAllowInsecureIfPinnedPeerCertificate() bool {
|
||||||
|
if x != nil {
|
||||||
|
return x.AllowInsecureIfPinnedPeerCertificate
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
var File_transport_internet_tls_config_proto protoreflect.FileDescriptor
|
var File_transport_internet_tls_config_proto protoreflect.FileDescriptor
|
||||||
|
|
||||||
var file_transport_internet_tls_config_proto_rawDesc = []byte{
|
var file_transport_internet_tls_config_proto_rawDesc = []byte{
|
||||||
@@ -367,7 +376,7 @@ var file_transport_internet_tls_config_proto_rawDesc = []byte{
|
|||||||
0x59, 0x10, 0x01, 0x12, 0x13, 0x0a, 0x0f, 0x41, 0x55, 0x54, 0x48, 0x4f, 0x52, 0x49, 0x54, 0x59,
|
0x59, 0x10, 0x01, 0x12, 0x13, 0x0a, 0x0f, 0x41, 0x55, 0x54, 0x48, 0x4f, 0x52, 0x49, 0x54, 0x59,
|
||||||
0x5f, 0x49, 0x53, 0x53, 0x55, 0x45, 0x10, 0x02, 0x12, 0x1b, 0x0a, 0x17, 0x41, 0x55, 0x54, 0x48,
|
0x5f, 0x49, 0x53, 0x53, 0x55, 0x45, 0x10, 0x02, 0x12, 0x1b, 0x0a, 0x17, 0x41, 0x55, 0x54, 0x48,
|
||||||
0x4f, 0x52, 0x49, 0x54, 0x59, 0x5f, 0x56, 0x45, 0x52, 0x49, 0x46, 0x59, 0x5f, 0x43, 0x4c, 0x49,
|
0x4f, 0x52, 0x49, 0x54, 0x59, 0x5f, 0x56, 0x45, 0x52, 0x49, 0x46, 0x59, 0x5f, 0x43, 0x4c, 0x49,
|
||||||
0x45, 0x4e, 0x54, 0x10, 0x03, 0x22, 0xd9, 0x05, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67,
|
0x45, 0x4e, 0x54, 0x10, 0x03, 0x22, 0xb2, 0x06, 0x0a, 0x06, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67,
|
||||||
0x12, 0x2d, 0x0a, 0x0e, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x6e, 0x73, 0x65, 0x63, 0x75,
|
0x12, 0x2d, 0x0a, 0x0e, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x6e, 0x73, 0x65, 0x63, 0x75,
|
||||||
0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x42, 0x06, 0x82, 0xb5, 0x18, 0x02, 0x28, 0x01,
|
0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x42, 0x06, 0x82, 0xb5, 0x18, 0x02, 0x28, 0x01,
|
||||||
0x52, 0x0d, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x49, 0x6e, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x12,
|
0x52, 0x0d, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x49, 0x6e, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x12,
|
||||||
@@ -406,22 +415,28 @@ var file_transport_internet_tls_config_proto_rawDesc = []byte{
|
|||||||
0x72, 0x61, 0x79, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f,
|
0x72, 0x61, 0x79, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f,
|
||||||
0x72, 0x74, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x2e, 0x74, 0x6c, 0x73, 0x2e,
|
0x72, 0x74, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x2e, 0x74, 0x6c, 0x73, 0x2e,
|
||||||
0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x54, 0x4c, 0x53, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f,
|
0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x54, 0x4c, 0x53, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f,
|
||||||
0x6e, 0x52, 0x0a, 0x6d, 0x61, 0x78, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x49, 0x0a,
|
0x6e, 0x52, 0x0a, 0x6d, 0x61, 0x78, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x57, 0x0a,
|
||||||
0x0a, 0x54, 0x4c, 0x53, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0b, 0x0a, 0x07, 0x44,
|
0x29, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x5f, 0x69, 0x6e, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x5f,
|
||||||
0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x10, 0x00, 0x12, 0x0a, 0x0a, 0x06, 0x54, 0x4c, 0x53, 0x31,
|
0x69, 0x66, 0x5f, 0x70, 0x69, 0x6e, 0x6e, 0x65, 0x64, 0x5f, 0x70, 0x65, 0x65, 0x72, 0x5f, 0x63,
|
||||||
0x5f, 0x30, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x54, 0x4c, 0x53, 0x31, 0x5f, 0x31, 0x10, 0x02,
|
0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x08,
|
||||||
0x12, 0x0a, 0x0a, 0x06, 0x54, 0x4c, 0x53, 0x31, 0x5f, 0x32, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06,
|
0x52, 0x24, 0x61, 0x6c, 0x6c, 0x6f, 0x77, 0x49, 0x6e, 0x73, 0x65, 0x63, 0x75, 0x72, 0x65, 0x49,
|
||||||
0x54, 0x4c, 0x53, 0x31, 0x5f, 0x33, 0x10, 0x04, 0x3a, 0x17, 0x82, 0xb5, 0x18, 0x13, 0x0a, 0x08,
|
0x66, 0x50, 0x69, 0x6e, 0x6e, 0x65, 0x64, 0x50, 0x65, 0x65, 0x72, 0x43, 0x65, 0x72, 0x74, 0x69,
|
||||||
0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x12, 0x03, 0x74, 0x6c, 0x73, 0x90, 0xff, 0x29,
|
0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x22, 0x49, 0x0a, 0x0a, 0x54, 0x4c, 0x53, 0x56, 0x65, 0x72,
|
||||||
0x01, 0x42, 0x84, 0x01, 0x0a, 0x25, 0x63, 0x6f, 0x6d, 0x2e, 0x76, 0x32, 0x72, 0x61, 0x79, 0x2e,
|
0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0b, 0x0a, 0x07, 0x44, 0x65, 0x66, 0x61, 0x75, 0x6c, 0x74, 0x10,
|
||||||
0x63, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x72, 0x74, 0x2e, 0x69,
|
0x00, 0x12, 0x0a, 0x0a, 0x06, 0x54, 0x4c, 0x53, 0x31, 0x5f, 0x30, 0x10, 0x01, 0x12, 0x0a, 0x0a,
|
||||||
0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x2e, 0x74, 0x6c, 0x73, 0x50, 0x01, 0x5a, 0x35, 0x67,
|
0x06, 0x54, 0x4c, 0x53, 0x31, 0x5f, 0x31, 0x10, 0x02, 0x12, 0x0a, 0x0a, 0x06, 0x54, 0x4c, 0x53,
|
||||||
0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x76, 0x32, 0x66, 0x6c, 0x79, 0x2f,
|
0x31, 0x5f, 0x32, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x54, 0x4c, 0x53, 0x31, 0x5f, 0x33, 0x10,
|
||||||
0x76, 0x32, 0x72, 0x61, 0x79, 0x2d, 0x63, 0x6f, 0x72, 0x65, 0x2f, 0x76, 0x35, 0x2f, 0x74, 0x72,
|
0x04, 0x3a, 0x17, 0x82, 0xb5, 0x18, 0x13, 0x0a, 0x08, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74,
|
||||||
0x61, 0x6e, 0x73, 0x70, 0x6f, 0x72, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65, 0x74,
|
0x79, 0x12, 0x03, 0x74, 0x6c, 0x73, 0x90, 0xff, 0x29, 0x01, 0x42, 0x84, 0x01, 0x0a, 0x25, 0x63,
|
||||||
0x2f, 0x74, 0x6c, 0x73, 0xaa, 0x02, 0x21, 0x56, 0x32, 0x52, 0x61, 0x79, 0x2e, 0x43, 0x6f, 0x72,
|
0x6f, 0x6d, 0x2e, 0x76, 0x32, 0x72, 0x61, 0x79, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72,
|
||||||
0x65, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x72, 0x74, 0x2e, 0x49, 0x6e, 0x74, 0x65,
|
0x61, 0x6e, 0x73, 0x70, 0x6f, 0x72, 0x74, 0x2e, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65, 0x74,
|
||||||
0x72, 0x6e, 0x65, 0x74, 0x2e, 0x54, 0x6c, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
0x2e, 0x74, 0x6c, 0x73, 0x50, 0x01, 0x5a, 0x35, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63,
|
||||||
|
0x6f, 0x6d, 0x2f, 0x76, 0x32, 0x66, 0x6c, 0x79, 0x2f, 0x76, 0x32, 0x72, 0x61, 0x79, 0x2d, 0x63,
|
||||||
|
0x6f, 0x72, 0x65, 0x2f, 0x76, 0x35, 0x2f, 0x74, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x6f, 0x72, 0x74,
|
||||||
|
0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x2f, 0x74, 0x6c, 0x73, 0xaa, 0x02, 0x21,
|
||||||
|
0x56, 0x32, 0x52, 0x61, 0x79, 0x2e, 0x43, 0x6f, 0x72, 0x65, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73,
|
||||||
|
0x70, 0x6f, 0x72, 0x74, 0x2e, 0x49, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x2e, 0x54, 0x6c,
|
||||||
|
0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@@ -438,7 +453,7 @@ func file_transport_internet_tls_config_proto_rawDescGZIP() []byte {
|
|||||||
|
|
||||||
var file_transport_internet_tls_config_proto_enumTypes = make([]protoimpl.EnumInfo, 2)
|
var file_transport_internet_tls_config_proto_enumTypes = make([]protoimpl.EnumInfo, 2)
|
||||||
var file_transport_internet_tls_config_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
|
var file_transport_internet_tls_config_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
|
||||||
var file_transport_internet_tls_config_proto_goTypes = []interface{}{
|
var file_transport_internet_tls_config_proto_goTypes = []any{
|
||||||
(Certificate_Usage)(0), // 0: v2ray.core.transport.internet.tls.Certificate.Usage
|
(Certificate_Usage)(0), // 0: v2ray.core.transport.internet.tls.Certificate.Usage
|
||||||
(Config_TLSVersion)(0), // 1: v2ray.core.transport.internet.tls.Config.TLSVersion
|
(Config_TLSVersion)(0), // 1: v2ray.core.transport.internet.tls.Config.TLSVersion
|
||||||
(*Certificate)(nil), // 2: v2ray.core.transport.internet.tls.Certificate
|
(*Certificate)(nil), // 2: v2ray.core.transport.internet.tls.Certificate
|
||||||
@@ -462,7 +477,7 @@ func file_transport_internet_tls_config_proto_init() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
if !protoimpl.UnsafeEnabled {
|
if !protoimpl.UnsafeEnabled {
|
||||||
file_transport_internet_tls_config_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
|
file_transport_internet_tls_config_proto_msgTypes[0].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*Certificate); i {
|
switch v := v.(*Certificate); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
@@ -474,7 +489,7 @@ func file_transport_internet_tls_config_proto_init() {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
file_transport_internet_tls_config_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
file_transport_internet_tls_config_proto_msgTypes[1].Exporter = func(v any, i int) any {
|
||||||
switch v := v.(*Config); i {
|
switch v := v.(*Config); i {
|
||||||
case 0:
|
case 0:
|
||||||
return &v.state
|
return &v.state
|
||||||
|
@@ -76,4 +76,6 @@ message Config {
|
|||||||
// Maximum TLS version to support.
|
// Maximum TLS version to support.
|
||||||
TLSVersion max_version = 10;
|
TLSVersion max_version = 10;
|
||||||
|
|
||||||
|
// Whether or not to allow self-signed certificates when pinned_peer_certificate_chain_sha256 is present.
|
||||||
|
bool allow_insecure_if_pinned_peer_certificate = 11;
|
||||||
}
|
}
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
using YamlDotNet.Serialization;
|
using YamlDotNet.Core;
|
||||||
|
using YamlDotNet.Serialization;
|
||||||
using YamlDotNet.Serialization.NamingConventions;
|
using YamlDotNet.Serialization.NamingConventions;
|
||||||
|
|
||||||
namespace ServiceLib.Common
|
namespace ServiceLib.Common
|
||||||
@@ -35,13 +36,17 @@ namespace ServiceLib.Common
|
|||||||
/// </summary>
|
/// </summary>
|
||||||
/// <param name="obj"></param>
|
/// <param name="obj"></param>
|
||||||
/// <returns></returns>
|
/// <returns></returns>
|
||||||
public static string ToYaml(Object obj)
|
public static string ToYaml(Object? obj)
|
||||||
{
|
{
|
||||||
|
string result = string.Empty;
|
||||||
|
if (obj == null)
|
||||||
|
{
|
||||||
|
return result;
|
||||||
|
}
|
||||||
var serializer = new SerializerBuilder()
|
var serializer = new SerializerBuilder()
|
||||||
.WithNamingConvention(HyphenatedNamingConvention.Instance)
|
.WithNamingConvention(HyphenatedNamingConvention.Instance)
|
||||||
.Build();
|
.Build();
|
||||||
|
|
||||||
string result = string.Empty;
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
result = serializer.Serialize(obj);
|
result = serializer.Serialize(obj);
|
||||||
@@ -53,6 +58,24 @@ namespace ServiceLib.Common
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static string? PreprocessYaml(string str)
|
||||||
|
{
|
||||||
|
var deserializer = new DeserializerBuilder()
|
||||||
|
.WithNamingConvention(PascalCaseNamingConvention.Instance)
|
||||||
|
.Build();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
var mergingParser = new MergingParser(new Parser(new StringReader(str)));
|
||||||
|
var obj = new DeserializerBuilder().Build().Deserialize(mergingParser);
|
||||||
|
return ToYaml(obj);
|
||||||
|
}
|
||||||
|
catch (Exception ex)
|
||||||
|
{
|
||||||
|
Logging.SaveLog("PreprocessYaml", ex);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#endregion YAML
|
#endregion YAML
|
||||||
}
|
}
|
||||||
}
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user