fix: clippy errors with new config (#4428)

* refactor: improve code quality with clippy fixes and standardized logging

- Replace dangerous unwrap()/expect() calls with proper error handling
- Standardize logging from log:: to logging\! macro with Type:: classifications
- Fix app handle panics with graceful fallback patterns
- Improve error resilience across 35+ modules without breaking functionality
- Reduce clippy warnings from 300+ to 0 in main library code

* chore: update Cargo.toml configuration

* refactor: resolve all clippy warnings
- Fix Arc clone warnings using explicit Arc::clone syntax across 9 files
- Add #[allow(clippy::expect_used)] to test functions for appropriate expect usage
- Remove no-effect statements from debug code cleanup
- Apply clippy auto-fixes for dbg\! macro removals and path statements
- Achieve zero clippy warnings on all targets with -D warnings flag

* chore: update Cargo.toml clippy configuration

* refactor: simplify macOS job configuration and improve caching

* refactor: remove unnecessary async/await from service and proxy functions

* refactor: streamline pnpm installation in CI configuration

* refactor: simplify error handling and remove unnecessary else statements

* refactor: replace async/await with synchronous locks for core management

* refactor: add workflow_dispatch trigger to clippy job

* refactor: convert async functions to synchronous for service management

* refactor: convert async functions to synchronous for UWP tool invocation

* fix: change wrong logging

* refactor: convert proxy restoration functions to async

* Revert "refactor: convert proxy restoration functions to async"

This reverts commit b82f5d250b2af7151e4dfd7dd411630b34ed2c18.

* refactor: update proxy restoration functions to return Result types

* fix: handle errors during proxy restoration and update async function signatures

* fix: handle errors during proxy restoration and update async function signatures

* refactor: update restore_pac_proxy and restore_sys_proxy functions to async

* fix: convert restore_pac_proxy and restore_sys_proxy functions to async

* fix: await restore_sys_proxy calls in proxy restoration logic

* fix: suppress clippy warnings for unused async functions in proxy restoration

* fix: suppress clippy warnings for unused async functions in proxy restoration
This commit is contained in:
Tunglies
2025-08-18 02:02:25 +08:00
committed by GitHub
parent a5fdd3f1a2
commit 537d27d10b
49 changed files with 1275 additions and 923 deletions

View File

@@ -18,7 +18,10 @@ pub fn use_merge(merge: Mapping, config: Mapping) -> Mapping {
deep_merge(&mut config, &Value::from(merge));
let config = config.as_mapping().unwrap().clone();
let config = config.as_mapping().cloned().unwrap_or_else(|| {
log::error!("Failed to convert merged config to mapping, using empty mapping");
Mapping::new()
});
config
}

View File

@@ -14,7 +14,7 @@ type ResultLog = Vec<(String, String)>;
/// Enhance mode
/// 返回最终订阅、该订阅包含的键、和script执行的结果
pub async fn enhance() -> (Mapping, Vec<String>, HashMap<String, ResultLog>) {
pub fn enhance() -> (Mapping, Vec<String>, HashMap<String, ResultLog>) {
// config.yaml 的订阅
let clash_config = { Config::clash().latest_ref().0.clone() };
@@ -274,7 +274,7 @@ pub async fn enhance() -> (Mapping, Vec<String>, HashMap<String, ResultLog>) {
});
}
config = use_tun(config, enable_tun).await;
config = use_tun(config, enable_tun);
config = use_sort(config);
// 应用独立的DNS配置如果启用

View File

@@ -7,23 +7,42 @@ pub fn use_script(
config: Mapping,
name: String,
) -> Result<(Mapping, Vec<(String, String)>)> {
use boa_engine::{native_function::NativeFunction, Context, JsValue, Source};
use boa_engine::{native_function::NativeFunction, Context, JsString, JsValue, Source};
use std::{cell::RefCell, rc::Rc};
let mut context = Context::default();
let outputs = Rc::new(RefCell::new(vec![]));
let copy_outputs = outputs.clone();
let copy_outputs = Rc::clone(&outputs);
unsafe {
let _ = context.register_global_builtin_callable(
"__verge_log__".into(),
2,
NativeFunction::from_closure(
move |_: &JsValue, args: &[JsValue], context: &mut Context| {
let level = args.first().unwrap().to_string(context)?;
let level = level.to_std_string().unwrap();
let data = args.get(1).unwrap().to_string(context)?;
let data = data.to_std_string().unwrap();
let level = args.first().ok_or_else(|| {
boa_engine::JsError::from_opaque(
JsString::from("Missing level argument").into(),
)
})?;
let level = level.to_string(context)?;
let level = level.to_std_string().map_err(|_| {
boa_engine::JsError::from_opaque(
JsString::from("Failed to convert level to string").into(),
)
})?;
let data = args.get(1).ok_or_else(|| {
boa_engine::JsError::from_opaque(
JsString::from("Missing data argument").into(),
)
})?;
let data = data.to_string(context)?;
let data = data.to_std_string().map_err(|_| {
boa_engine::JsError::from_opaque(
JsString::from("Failed to convert data to string").into(),
)
})?;
let mut out = copy_outputs.borrow_mut();
out.push((level, data));
Ok(JsValue::undefined())
@@ -49,20 +68,24 @@ pub fn use_script(
let safe_name = escape_js_string_for_single_quote(&name);
let code = format!(
r#"try{{
r"try{{
{script};
JSON.stringify(main({config_str},'{safe_name}')||'')
}} catch(err) {{
`__error_flag__ ${{err.toString()}}`
}}"#
}}"
);
if let Ok(result) = context.eval(Source::from_bytes(code.as_str())) {
if !result.is_string() {
anyhow::bail!("main function should return object");
}
let result = result.to_string(&mut context).unwrap();
let result = result.to_std_string().unwrap();
let result = result
.to_string(&mut context)
.map_err(|e| anyhow::anyhow!("Failed to convert JS result to string: {}", e))?;
let result = result
.to_std_string()
.map_err(|_| anyhow::anyhow!("Failed to convert JS string to std string"))?;
// 直接解析JSON结果,不做其他解析
let res: Result<Mapping, Error> = parse_json_safely(&result);
@@ -102,6 +125,8 @@ fn escape_js_string_for_single_quote(s: &str) -> String {
}
#[test]
#[allow(unused_variables)]
#[allow(clippy::expect_used)]
fn test_script() {
let script = r#"
function main(config) {
@@ -114,7 +139,7 @@ fn test_script() {
}
"#;
let config = r#"
let config = r"
rules:
- 111
- 222
@@ -122,22 +147,21 @@ fn test_script() {
enable: false
dns:
enable: false
"#;
";
let config = serde_yaml::from_str(config).unwrap();
let (config, results) = use_script(script.into(), config, "".to_string()).unwrap();
let config = serde_yaml::from_str(config).expect("Failed to parse test config YAML");
let (config, results) = use_script(script.into(), config, "".to_string())
.expect("Script execution should succeed in test");
let _ = serde_yaml::to_string(&config).unwrap();
let _ = serde_yaml::to_string(&config).expect("Failed to serialize config to YAML");
let yaml_config_size = std::mem::size_of_val(&config);
dbg!(yaml_config_size);
let box_yaml_config_size = std::mem::size_of_val(&Box::new(config));
dbg!(box_yaml_config_size);
dbg!(results);
assert!(box_yaml_config_size < yaml_config_size);
}
// 测试特殊字符转义功能
#[test]
#[allow(clippy::expect_used)]
fn test_escape_unescape() {
let test_string = r#"Hello "World"!\nThis is a test with \u00A9 copyright symbol."#;
let escaped = escape_js_string_for_single_quote(test_string);
@@ -145,13 +169,14 @@ fn test_escape_unescape() {
println!("Escaped: {escaped}");
let json_str = r#"{"key":"value","nested":{"key":"value"}}"#;
let parsed = parse_json_safely(json_str).unwrap();
let parsed = parse_json_safely(json_str).expect("Failed to parse test JSON safely");
assert!(parsed.contains_key("key"));
assert!(parsed.contains_key("nested"));
let quoted_json_str = r#""{"key":"value","nested":{"key":"value"}}""#;
let parsed_quoted = parse_json_safely(quoted_json_str).unwrap();
let parsed_quoted =
parse_json_safely(quoted_json_str).expect("Failed to parse quoted test JSON safely");
assert!(parsed_quoted.contains_key("key"));
assert!(parsed_quoted.contains_key("nested"));

View File

@@ -89,6 +89,8 @@ mod tests {
use serde_yaml::Value;
#[test]
#[allow(clippy::unwrap_used)]
#[allow(clippy::expect_used)]
fn test_delete_proxy_and_references() {
let config_str = r#"
proxies:
@@ -107,7 +109,8 @@ proxy-groups:
proxies:
- "proxy1"
"#;
let mut config: Mapping = serde_yaml::from_str(config_str).unwrap();
let mut config: Mapping =
serde_yaml::from_str(config_str).expect("Failed to parse test config YAML");
let seq = SeqMap {
prepend: Sequence::new(),
@@ -118,38 +121,51 @@ proxy-groups:
config = use_seq(seq, config, "proxies");
// Check if proxy1 is removed from proxies
let proxies = config.get("proxies").unwrap().as_sequence().unwrap();
let proxies = config
.get("proxies")
.expect("proxies field should exist")
.as_sequence()
.expect("proxies should be a sequence");
assert_eq!(proxies.len(), 1);
assert_eq!(
proxies[0]
.as_mapping()
.unwrap()
.expect("proxy should be a mapping")
.get("name")
.unwrap()
.expect("proxy should have name")
.as_str()
.unwrap(),
.expect("name should be string"),
"proxy2"
);
// Check if proxy1 is removed from all groups
let groups = config.get("proxy-groups").unwrap().as_sequence().unwrap();
let groups = config
.get("proxy-groups")
.expect("proxy-groups field should exist")
.as_sequence()
.expect("proxy-groups should be a sequence");
let group1_proxies = groups[0]
.as_mapping()
.unwrap()
.expect("group should be a mapping")
.get("proxies")
.unwrap()
.expect("group should have proxies")
.as_sequence()
.unwrap();
.expect("group proxies should be a sequence");
let group2_proxies = groups[1]
.as_mapping()
.unwrap()
.expect("group should be a mapping")
.get("proxies")
.unwrap()
.expect("group should have proxies")
.as_sequence()
.unwrap();
.expect("group proxies should be a sequence");
assert_eq!(group1_proxies.len(), 1);
assert_eq!(group1_proxies[0].as_str().unwrap(), "proxy2");
assert_eq!(
group1_proxies[0]
.as_str()
.expect("proxy name should be string"),
"proxy2"
);
assert_eq!(group2_proxies.len(), 0);
}
}

View File

@@ -18,7 +18,7 @@ macro_rules! append {
};
}
pub async fn use_tun(mut config: Mapping, enable: bool) -> Mapping {
pub fn use_tun(mut config: Mapping, enable: bool) -> Mapping {
let tun_key = Value::from("tun");
let tun_val = config.get(&tun_key);
let mut tun_val = tun_val.map_or(Mapping::new(), |val| {
@@ -59,8 +59,10 @@ pub async fn use_tun(mut config: Mapping, enable: bool) -> Mapping {
#[cfg(target_os = "macos")]
{
crate::utils::resolve::restore_public_dns().await;
crate::utils::resolve::set_public_dns("223.6.6.6".to_string()).await;
tokio::spawn(async {
crate::utils::resolve::restore_public_dns().await;
crate::utils::resolve::set_public_dns("223.6.6.6".to_string()).await;
});
}
}
@@ -69,7 +71,9 @@ pub async fn use_tun(mut config: Mapping, enable: bool) -> Mapping {
} else {
// TUN未启用时仅恢复系统DNS不修改配置文件中的DNS设置
#[cfg(target_os = "macos")]
crate::utils::resolve::restore_public_dns().await;
tokio::spawn(async {
crate::utils::resolve::restore_public_dns().await;
});
}
// 更新TUN配置