592 Commits
v0.2.7 ... dev

Author SHA1 Message Date
Tunglies
4a7859bdae refactor: replace hardcoded DNS config filename with constant reference (#5280)
* refactor: replace hardcoded DNS config filename with constant reference

* refactor: remove redundant import of constants in IClashTemp template method

* refactor: add conditional compilation for DEFAULT_REDIR based on OS

* refactor: simplify default TPROXY port handling and remove unused trace_err macro

* refactor: simplify default TPROXY port fallback logic
2025-11-01 22:50:19 +08:00
Tunglies
c0f9920531 refactor: remove orphaned process cleanup functionality
It might breaks mihomo starting.

Due to potentiall process name processing, permissions verifing, permissions and safty FORCE KILL, find process faillure.
2025-11-01 22:11:16 +08:00
Tunglies
d3d32006c3 feat: add logging check to pre-commit and CI workflow 2025-11-01 21:22:41 +08:00
Tunglies
fb260fb33d Refactor logging to use a centralized logging utility across the application (#5277)
- Replaced direct log calls with a new logging macro that includes a logging type for better categorization.
- Updated logging in various modules including `merge.rs`, `mod.rs`, `tun.rs`, `clash.rs`, `profile.rs`, `proxy.rs`, `window.rs`, `lightweight.rs`, `guard.rs`, `autostart.rs`, `dirs.rs`, `dns.rs`, `scheme.rs`, `server.rs`, and `window_manager.rs`.
- Introduced logging types such as `Core`, `Network`, `ProxyMode`, `Window`, `Lightweight`, `Service`, and `File` to enhance log clarity and filtering.
2025-11-01 20:47:01 +08:00
Slinetrac
50567d9b97 refactor(profiles): remove import verification and simplify post-import refresh 2025-11-01 20:09:00 +08:00
Tunglies
9370a56337 refactor: reduce clone operation (#5268)
* refactor: optimize item handling and improve profile management

* refactor: update IVerge references to use references instead of owned values

* refactor: update patch_verge to use data_ref for improved data handling

* refactor: move handle_copy function to improve resource initialization logic

* refactor: update profile handling to use references for improved memory efficiency

* refactor: simplify get_item method and update profile item retrieval to use string slices

* refactor: update profile validation and patching to use references for improved performance

* refactor: update profile functions to use references for improved performance and memory efficiency

* refactor: update profile patching functions to use references for improved memory efficiency

* refactor: simplify merge function in PrfOption to enhance readability

* refactor: update change_core function to accept a reference for improved memory efficiency

* refactor: update PrfItem and profile functions to use references for improved memory efficiency

* refactor: update resolve_scheme function to accept a reference for improved memory efficiency

* refactor: update resolve_scheme function to accept a string slice for improved flexibility

* refactor: simplify update_profile parameters and logic
2025-11-01 20:03:56 +08:00
renovate[bot]
73e53eb33f chore(deps): update npm dependencies (#5278)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-11-01 19:42:34 +08:00
Slinetrac
30d1655e07 docs: UPDATELOG.md 2025-11-01 19:25:04 +08:00
Slinetrac
9dc50da167 fix: profile auto refresh #5274 2025-11-01 19:24:54 +08:00
Tunglies
b3b8eeb577 refactor: convert file operations to async using tokio fs (#5267)
* refactor: convert file operations to async using tokio fs

* refactor: integrate AsyncHandler for file operations in backup processes
2025-11-01 19:24:52 +08:00
Sline
413f29e22a fix: linux theme sync (#5273) 2025-11-01 19:24:47 +08:00
Tunglies
ae319279ae chore(deps): update cc, clash_verge_logger, and version-compare to latest versions 2025-11-01 10:15:12 +08:00
Tunglies
c0e111e756 fix: resolve macOS lightweight mode exit synchronization issues and improve logging levels #5241 2025-11-01 10:09:51 +08:00
Slinetrac
52545a626c chore(lint): enforce no warnings in pre hooks 2025-11-01 09:49:52 +08:00
Tunglies
518875acde refactor: update draft handling and improve benchmark structure 2025-10-31 23:31:04 +08:00
renovate[bot]
b672dd7055 chore(deps): update dependency sass to ^1.93.3 (#5265)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-31 22:54:52 +08:00
renovate[bot]
804641425b chore(deps): update dependency vitest to ^4.0.6 (#5264)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-31 22:51:12 +08:00
Tunglies
d3386908ff fix: improve caching strategy for autobuild jobs 2025-10-31 19:48:36 +08:00
Slinetrac
59e7095b0f chore: up sysproxy git hash 2025-10-31 19:20:07 +08:00
Tunglies
8fc8eb1789 chore: add acknowledgments for contributors in update log 2025-10-31 18:15:46 +08:00
Slinetrac
0f1537ef48 chore: up Cargo.lock 2025-10-31 17:36:33 +08:00
oomeow
8c734a5a35 fix: disable tun mode menu on tray when tun mode is unavailable (#4975)
* fix: check if service installed when toggle tun mode on tray

* chore: cargo fmt

* fix: auto disable tun mode

* docs: update UPDATELOG.md

* fix: init Tun mode status

* chore: update

* feat: disable tun mode tray menu when tun mode is unavailable

* fix: restart core when uninstall service is canceled

* chore: remove check notification when toggle tun mode

* chore: fix updatelog

---------

Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-10-31 17:31:40 +08:00
Tunglies
5187712a71 chore(deps): remove zustand and update vite-plugin-monaco-editor to esm version 2025-10-31 16:55:52 +08:00
renovate[bot]
7b7fa2239b chore(deps): update dependency dayjs to v1.11.19 (#5261)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-31 13:36:35 +08:00
Slinetrac
85ff296912 chore(deps): update deps 2025-10-31 11:28:14 +08:00
renovate[bot]
5e7adf76ca chore(deps): update npm dependencies (#5258)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-31 10:46:42 +08:00
Slinetrac
d094d3885c chore: move CONTRIBUTING_i18n.md to /docs 2025-10-31 10:41:48 +08:00
Tunglies
648c93c066 chore(i18n): update localization files sorting and add i18n contribution guidline
- Add i18n contribution guidline
- Chinese (zh.json): fix TPROXY port missing translation
2025-10-31 05:15:47 +08:00
Tunglies
1e9df69ffc fix: remove unused dependencies from Cargo.toml and Cargo.lock 2025-10-31 00:55:50 +08:00
Tunglies
ef35752d84 fix: specify version for sysproxy dependency in Cargo.toml 2025-10-31 00:33:11 +08:00
Tunglies
ffb7400a22 fix: add updateCargoLock to postUpdateOptions in renovate.json 2025-10-31 00:11:55 +08:00
oomeow
4d5f1f4327 fix: incorrect proxies route 2025-10-30 20:28:56 +08:00
Tunglies
999830aaf5 fix: correct download link for ARM64 Windows setup in autobuild workflow 2025-10-30 20:18:23 +08:00
Tunglies
6d7efbbf28 fix: reorder import statements and enhance normalizeDetailsTags function 2025-10-30 20:08:57 +08:00
Tunglies
a869dbb441 Revert "refactor: profile switch (#5197)"
This reverts commit c2dcd86722.
2025-10-30 18:11:04 +08:00
Tunglies
928f226d10 fix: update clash_verge_service_ipc version to 2.0.21 2025-10-30 18:02:24 +08:00
Tunglies
c27ad3fdcb feat: add log opening functionality in tray menu and update localization 2025-10-30 17:34:41 +08:00
Sline
c2dcd86722 refactor: profile switch (#5197)
* refactor: proxy refresh

* fix(proxy-store): properly hydrate and filter backend provider snapshots

* fix(proxy-store): add monotonic fetch guard and event bridge cleanup

* fix(proxy-store): tweak fetch sequencing guard to prevent snapshot invalidation from wiping fast responses

* docs: UPDATELOG.md

* fix(proxy-snapshot, proxy-groups): restore last-selected proxy and group info

* fix(proxy): merge static and provider entries in snapshot; fix Virtuoso viewport height

* fix(proxy-groups): restrict reduced-height viewport to chain-mode column

* refactor(profiles): introduce a state machine

* refactor:replace state machine with reducer

* refactor:introduce a profile switch worker

* refactor: hooked up a backend-driven profile switch flow

* refactor(profile-switch): serialize switches with async queue and enrich frontend events

* feat(profiles): centralize profile switching with reducer/driver queue to fix stuck UI on rapid toggles

* chore: translate comments and log messages to English to avoid encoding issues

* refactor: migrate backend queue to SwitchDriver actor

* fix(profile): unify error string types in validation helper

* refactor(profile): make switch driver fully async and handle panics safely

* refactor(cmd): move switch-validation helper into new profile_switch module

* refactor(profile): modularize switch logic into profile_switch.rs

* refactor(profile_switch): modularize switch handler

- Break monolithic switch handler into proper module hierarchy
- Move shared globals, constants, and SwitchScope guard to state.rs
- Isolate queue orchestration and async task spawning in driver.rs
- Consolidate switch pipeline and config patching in workflow.rs
- Extract request pre-checks/YAML validation into validation.rs

* refactor(profile_switch): centralize state management and add cancellation flow

- Introduced SwitchManager in state.rs to unify mutex, sequencing, and SwitchScope handling.
- Added SwitchCancellation and SwitchRequest wrappers to encapsulate cancel tokens and notifications.
- Updated driver to allocate task IDs via SwitchManager, cancel old tokens, and queue next jobs in order.
- Updated workflow to check cancellation and sequence at each phase, replacing global flags with manager APIs.

* feat(profile_switch): integrate explicit state machine for profile switching

- workflow.rs:24 now delegates each switch to SwitchStateMachine, passing an owned SwitchRequest.
  Queue cancellation and state-sequence checks are centralized inside the machine instead of scattered guards.
- workflow.rs:176 replaces the old helper with `SwitchStateMachine::new(manager(), None, profiles).run().await`,
  ensuring manual profile patches follow the same workflow (locking, validation, rollback) as queued switches.
- workflow.rs:180 & 275 expose `validate_profile_yaml` and `restore_previous_profile` for reuse inside the state machine.

- workflow/state_machine.rs:1 introduces a dedicated state machine module.
  It manages global mutex acquisition, request/cancellation state, YAML validation, draft patching,
  `CoreManager::update_config`, failure rollback, and tray/notification side-effects.
  Transitions check for cancellations and stale sequences; completions release guards via `SwitchScope` drop.

* refactor(profile-switch): integrate stage-aware panic handling

- src-tauri/src/cmd/profile_switch/workflow/state_machine.rs:1
  Defines SwitchStage and SwitchPanicInfo as crate-visible, wraps each transition in with_stage(...) with catch_unwind, and propagates CmdResult<bool> to distinguish validation failures from panics while keeping cancellation semantics.

- src-tauri/src/cmd/profile_switch/workflow.rs:25
  Updates run_switch_job to return Result<bool, SwitchPanicInfo>, routing timeout, validation, config, and stage panic cases separately. Reuses SwitchPanicInfo for logging/UI notifications; patch_profiles_config maps state-machine panics into user-facing error strings.

- src-tauri/src/cmd/profile_switch/driver.rs:1
  Adds SwitchJobOutcome to unify workflow results: normal completions carry bool, and panics propagate SwitchPanicInfo. The driver loop now logs panics explicitly and uses AssertUnwindSafe(...).catch_unwind() to guard setup-phase panics.

* refactor(profile-switch): add watchdog, heartbeat, and async timeout guards

- Introduce SwitchHeartbeat for stage tracking and timing; log stage transitions with elapsed durations.
- Add watchdog in driver to cancel stalled switches (5s heartbeat timeout).
- Wrap blocking ops (Config::apply, tray updates, profiles_save_file_safe, etc.) with time::timeout to prevent async stalls.
- Improve logs for stage transitions and watchdog timeouts to clarify cancellation points.

* refactor(profile-switch): async post-switch tasks, early lock release, and spawn_blocking for IO

* feat(profile-switch): track cleanup and coordinate pipeline

- Add explicit cleanup tracking in the driver (`cleanup_profiles` map + `CleanupDone` messages) to know when background post-switch work is still running before starting a new workflow. (driver.rs:29-50)
- Update `handle_enqueue` to detect “cleanup in progress”: same-profile retries are short-circuited; other requests collapse the pending queue, cancelling old tokens so only the latest intent survives. (driver.rs:176-247)
- Rework scheduling helpers: `start_next_job` refuses to start while cleanup is outstanding; discarded requests release cancellation tokens; cleanup completion explicitly restarts the pipeline. (driver.rs:258-442)

* feat(profile-switch): unify post-switch cleanup handling

- workflow.rs (25-427) returns `SwitchWorkflowResult` (success + CleanupHandle) or `SwitchWorkflowError`.
  All failure/timeout paths stash post-switch work into a single CleanupHandle.
  Cleanup helpers (`notify_profile_switch_finished` and `close_connections_after_switch`) run inside that task for proper lifetime handling.

- driver.rs (29-439) propagates CleanupHandle through `SwitchJobOutcome`, spawns a bridge to wait for completion, and blocks `start_next_job` until done.
  Direct driver-side panics now schedule failure cleanup via the shared helper.

* tmp

* Revert "tmp"

This reverts commit e582cf4a652231a67a7c951802cb19b385f6afd7.

* refactor: queue frontend events through async dispatcher

* refactor: queue frontend switch/proxy events and throttle notices

* chore: frontend debug log

* fix: re-enable only ProfileSwitchFinished events - keep others suppressed for crash isolation

- Re-enabled only ProfileSwitchFinished events; RefreshClash, RefreshProxy, and ProfileChanged remain suppressed (they log suppression messages)
- Allows frontend to receive task completion notifications for UI feedback while crash isolation continues
- src-tauri/src/core/handle.rs now only suppresses notify_profile_changed
- Serialized emitter, frontend logging bridge, and other diagnostics unchanged

* refactor: refreshClashData

* refactor(proxy): stabilize proxy switch pipeline and rendering

- Add coalescing buffer in notification.rs to emit only the latest proxies-updated snapshot
- Replace nextTick with queueMicrotask in asyncQueue.ts for same-frame hydration
- Hide auto-generated GLOBAL snapshot and preserve optional metadata in proxy-snapshot.ts
- Introduce stable proxy rendering state in AppDataProvider (proxyTargetProfileId, proxyDisplayProfileId, isProxyRefreshPending)
- Update proxy page to fade content during refresh and overlay status banner instead of showing incomplete snapshot

* refactor(profiles): move manual activating logic to reducer for deterministic queue tracking

* refactor: replace proxy-data event bridge with pure polling and simplify proxy store

- Replaced the proxy-data event bridge with pure polling: AppDataProvider now fetches the initial snapshot and drives refreshes from the polled switchStatus, removing verge://refresh-* listeners (src/providers/app-data-provider.tsx).
- Simplified proxy-store by dropping the proxies-updated listener queue and unused payload/normalizer helpers; relies on SWR/provider fetch path + calcuProxies for live updates (src/stores/proxy-store.ts).
- Trimmed layout-level event wiring to keep only notice/show/hide subscriptions, removing obsolete refresh listeners (src/pages/_layout/useLayoutEvents.ts).

* refactor(proxy): streamline proxies-updated handling and store event flow

- AppDataProvider now treats `proxies-updated` as the fast path: the listener
  calls `applyLiveProxyPayload` immediately and schedules only a single fallback
  `fetchLiveProxies` ~600 ms later (replacing the old 0/250/1000/2000 cascade).
  Expensive provider/rule refreshes run in parallel via `Promise.allSettled`, and
  the multi-stage queue on profile updates completion was removed
  (src/providers/app-data-provider.tsx).

- Rebuilt proxy-store to support the event flow: restored `setLive`, provider
  normalization, and an animation-frame + async queue that applies payloads without
  blocking. Exposed `applyLiveProxyPayload` so providers can push events directly
  into the store (src/stores/proxy-store.ts).

* refactor: switch delay

* refactor(app-data-provider): trigger getProfileSwitchStatus revalidation on profile-switch-finished

- AppDataProvider now listens to `profile-switch-finished` and calls `mutate("getProfileSwitchStatus")` to immediately update state and unlock buttons (src/providers/app-data-provider.tsx).
- Retain existing detailed timing logs for monitoring other stages.
- Frontend success notifications remain instant; background refreshes continue asynchronously.

* fix(profiles): prevent duplicate toast on page remount

* refactor(profile-switch): make active switches preemptible and prevent queue piling

- Add notify mechanism to SwitchCancellation to await cancellation without busy-waiting (state.rs:82)
- Collapse pending queue to a single entry in the driver; cancel in-flight task on newer request (driver.rs:232)
- Update handle_update_core to watch cancel token and 30s timeout; release locks, discard draft, and exit early if canceled (state_machine.rs:301)
- Providers revalidate status immediately on profile-switch-finished events (app-data-provider.tsx:208)

* refactor(core): make core reload phase controllable, reduce 0xcfffffff risk

- CoreManager::apply_config now calls `reload_config_with_retry`, each attempt waits up to 5s, retries 3 times; on failure, returns error with duration logged and triggers core restart if needed (src-tauri/src/core/manager/config.rs:175, 205)
- `reload_config_with_retry` logs attempt info on timeout or error; if error is a Mihomo connection issue, fallback to original restart logic (src-tauri/src/core/manager/config.rs:211)
- `reload_config_once` retains original Mihomo call for retry wrapper usage (src-tauri/src/core/manager/config.rs:247)

* chore(frontend-logs): downgrade routine event logs from info to debug

- Logs like `emit_via_app entering spawn_blocking`, `Async emit…`, `Buffered proxies…` are now debug-level (src-tauri/src/core/notification.rs:155, :265, :309…)
- Genuine warnings/errors (failures/timeouts) remain at warn/error
- Core stage logs remain info to keep backend tracking visible

* refactor(frontend-emit): make emit_via_app fire-and-forget async

- `emit_via_app` now a regular function; spawns with `tokio::spawn` and logs a warn if `emit_to` fails, caller returns immediately (src-tauri/src/core/notification.rs:269)
- Removed `.await` at Async emit and flush_proxies calls; only record dispatch duration and warn on failure (src-tauri/src/core/notification.rs:211, :329)

* refactor(ui): restructure profile switch for event-driven speed + polling stability

- Backend
  - SwitchManager maintains a lightweight event queue: added `event_sequence`, `recent_events`, and `SwitchResultEvent`; provides `push_event` / `events_after` (state.rs)
  - `handle_completion` pushes events on success/failure and keeps `last_result` (driver.rs) for frontend incremental fetch
  - New Tauri command `get_profile_switch_events(after_sequence)` exposes `events_after` (profile_switch/mod.rs → profile.rs → lib.rs)
- Notification system
  - `NotificationSystem::process_event` only logs debug, disables WebView `emit_to`, fixes 0xcfffffff
  - Related emit/buffer functions now safe no-op, removed unused structures and warnings (notification.rs)
- Frontend
  - services/cmds.ts defines `SwitchResultEvent` and `getProfileSwitchEvents`
  - `AppDataProvider` holds `switchEventSeqRef`, polls incremental events every 0.25s (busy) / 1s (idle); each event triggers:
      - immediate `globalMutate("getProfiles")` to refresh current profile
      - background refresh of proxies/providers/rules via `Promise.allSettled` (failures logged, non-blocking)
      - forced `mutateSwitchStatus` to correct state
  - original switchStatus effect calls `handleSwitchResult` as fallback; other toast/activation logic handled in profiles.tsx
- Commands / API cleanup
  - removed `pub use profile_switch::*;` in cmd::mod.rs to avoid conflicts; frontend uses new command polling

* refactor(frontend): optimize profile switch with optimistic updates

* refactor(profile-switch): switch to event-driven flow with Profile Store

- SwitchManager pushes events; frontend polls get_profile_switch_events
- Zustand store handles optimistic profiles; AppDataProvider applies updates and background-fetches
- UI flicker removed

* fix(app-data): re-hook profile store updates during switch hydration

* fix(notification): restore frontend event dispatch and non-blocking emits

* fix(app-data-provider): restore proxy refresh and seed snapshot after refactor

* fix: ensure switch completion events are received and handle proxies-updated

* fix(app-data-provider): dedupe switch results by taskId and fix stale profile state

* fix(profile-switch): ensure patch_profiles_config_by_profile_index waits for real completion and handle join failures in apply_config_with_timeout

* docs: UPDATELOG.md

* chore: add necessary comments

* fix(core): always dispatch async proxy snapshot after RefreshClash event

* fix(proxy-store, provider): handle pending snapshots and proxy profiles

- Added pending snapshot tracking in proxy-store so `lastAppliedFetchId` no longer jumps on seed. Profile adoption is deferred until a qualifying fetch completes. Exposed `clearPendingProfile` for rollback support.
- Cleared pending snapshot state whenever live payloads apply or the store resets, preventing stale optimistic profile IDs after failures.
- In provider integration, subscribed to the pending proxy profile and fed it into target-profile derivation. Cleared it on failed switch results so hydration can advance and UI status remains accurate.

* fix(proxy): re-hook tray refresh events into proxy refresh queue

- Reattached listen("verge://refresh-proxy-config", …) at src/providers/app-data-provider.tsx:402 and registered it for cleanup.
- Added matching window fallback handler at src/providers/app-data-provider.tsx:430 so in-app dispatches share the same refresh path.

* fix(proxy-snapshot/proxy-groups): address review findings on snapshot placeholders

- src/utils/proxy-snapshot.ts:72-95 now derives snapshot group members solely from proxy-groups.proxies, so provider ids under `use` no longer generate placeholder proxy items.
- src/components/proxy/proxy-groups.tsx:665-677 lets the hydration overlay capture pointer events (and shows a wait cursor) so users can’t interact with snapshot-only placeholders before live data is ready.

* fix(profile-switch): preserve queued requests and avoid stale connection teardown

- Keep earlier queued switches intact by dropping the blanket “collapse” call: after removing duplicates for the same profile, new requests are simply appended, leaving other profiles pending (driver.rs:376). Resolves queue-loss scenario.
- Gate connection cleanup on real successes so cancelled/stale runs no longer tear down Mihomo connections; success handler now skips close_connections_after_switch when success == false (workflow.rs:419).

* fix(profile-switch, layout): improve profile validation and restore backend refresh

- Hardened profile validation using `tokio::fs` with a 5s timeout and offloading YAML parsing to `AsyncHandler::spawn_blocking`, preventing slow disks or malformed files from freezing the runtime (src-tauri/src/cmd/profile_switch/validation.rs:9, 71).
- Restored backend-triggered refresh handling by listening for `verge://refresh-clash-config` / `verge://refresh-verge-config` and invoking shared refresh services so SWR caches stay in sync with core events (src/pages/_layout/useLayoutEvents.ts:6, 45, 55).

* feat(profile-switch): handle cancellations for superseded requests

- Added a `cancelled` flag and constructor so superseded requests publish an explicit cancellation instead of a failure (src-tauri/src/cmd/profile_switch/state.rs:249, src-tauri/src/cmd/profile_switch/driver.rs:482)
- Updated the profile switch effect to log cancellations as info, retain the shared `mutate` call, and skip emitting error toasts while still refreshing follow-up work (src/pages/profiles.tsx:554, src/pages/profiles.tsx:581)
- Exposed the new flag on the TypeScript contract to keep downstream consumers type-safe (src/services/cmds.ts:20)

* fix(profiles): wrap logging payload for Tauri frontend_log

* fix(profile-switch): add rollback and error propagation for failed persistence

- Added rollback on apply failure so Mihomo restores to the previous profile
  before exiting the success path early (state_machine.rs:474).
- Reworked persist_profiles_with_timeout to surface timeout/join/save errors,
  convert them into CmdResult failures, and trigger rollback + error propagation
  when persistence fails (state_machine.rs:703).

* fix(profile-switch): prevent mid-finalize reentrancy and lingering tasks

* fix(profile-switch): preserve pending queue and surface discarded switches

* fix(profile-switch): avoid draining Mihomo sockets on failed/cancelled switches

* fix(app-data-provider): restore backend-driven refresh and reattach fallbacks

* fix(profile-switch): queue concurrent updates and add bounded wait/backoff

* fix(proxy): trigger live refresh on app start for proxy snapshot

* refactor(profile-switch): split flow into layers and centralize async cleanup

- Introduced `SwitchDriver` to encapsulate queue and driver logic while keeping the public Tauri command API.
- Added workflow/cleanup helpers for notification dispatch and Mihomo connection draining, re-exported for API consistency.
- Replaced monolithic state machine with `core.rs`, `context.rs`, and `stages.rs`, plus a thin `mod.rs` re-export layer; stage methods are now individually testable.
- Removed legacy `workflow/state_machine.rs` and adjusted visibility on re-exported types/constants to ensure compilation.
2025-10-30 17:29:15 +08:00
renovate[bot]
af79bcd1cf chore(deps): update dependency react-i18next to v16.2.2 (#5251)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-30 17:12:20 +08:00
Tunglies
2d73afdff2 style: update UPDATELOG.md using details and summary 2025-10-30 17:00:00 +08:00
miewx
e7a9f8f755 add support x-oss-meta-subscription-userinfo (#5234)
* add support x-oss-meta-subscription-userinfo

* Update prfitem.rs

match any subscription-userinfo

* Update prfitem.rs

改为 ends_with 更好

* feat(config): enforce stricter header match for subscription usage

---------

Co-authored-by: i18n <i18n.site@gmail.com>
Co-authored-by: Slinetrac <realakayuki@gmail.com>
2025-10-30 11:24:40 +08:00
renovate[bot]
d209238009 chore(deps): update npm dependencies (#5245)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-30 10:24:35 +08:00
Sukka
dfcdb33e58 chore: use vite-swc-react (#5246) 2025-10-30 10:19:29 +08:00
Tunglies
37359ffc27 fix: add check for allow_auto_update in timer task filtering 2025-10-30 01:40:43 +08:00
oomeow
fb09e6c85d fix: notification can not notify frontend (#5243) 2025-10-30 00:34:45 +08:00
oomeow
d10665091b chore: update eslint ignorePattern 2025-10-29 21:16:18 +08:00
Tunglies
d8b0e9929c fix: include Mihomo-go122 by default for macOS 10.15+ to resolve Intel architecture compatibility issues 2025-10-29 21:14:02 +08:00
Tunglies
73323edf06 chore(deps): update clash_verge_service_ipc to version 2.0.20
Reduce memory usage, avoid duplicated clients
2025-10-29 20:35:45 +08:00
Tunglies
f4de4738f1 refactor(logger): replace ClashLogger with CLASH_LOGGER and update log handling; improve log retrieval and management 2025-10-29 17:58:02 +08:00
Tunglies
2e9f6dd174 fix: prevent service duplicate start_core and early-return after stop_core; fix start failures
Update clash_verge_service_ipc version to 2.0.18
2025-10-29 16:09:19 +08:00
renovate[bot]
e928089a77 chore(deps): update npm dependencies (#5231)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-29 08:29:47 +08:00
Tunglies
f41998284a fix(verge_patch): add tray_inline_proxy_groups handling to update flags and refresh tray 2025-10-29 02:41:07 +08:00
Tunglies
9375674c91 refactor(validate): simplify validation process management and remove unused code 2025-10-28 19:36:17 +08:00
Tunglies
2a7ccb5bde refactor(core): optimize RunningMode handling and improve state management 2025-10-28 19:16:42 +08:00
Tunglies
2af0af0837 refactor(tray): comment out enable_tray_icon references for future removal #5161
Since network speed display in Tray on menu has been removed
2025-10-28 14:37:57 +08:00
renovate[bot]
0fcf168b08 chore(deps): update dependency axios to ^1.13.0 (#5225)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-28 10:47:36 +08:00
Tunglies
f39436f1d0 refactor(i18n): optimize translation handling with Arc<str> for better memory efficiency
refactor(tray): change menu text storage to use Arc<str> for improved performance
refactor(service): utilize SmartString for error messages to enhance memory management
2025-10-28 00:26:20 +08:00
❤是纱雾酱哟~
a9eb512f20 docs(autobuild): update download links for release assets (#5224)
- To match those in actual "Assets" section

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>
2025-10-28 00:00:01 +08:00
Tunglies
713162ca37 perf(i18n): change TRANSLATIONS type to use Box<Value> for better memory management
This reduce memory usage from 72 to 48
2025-10-27 23:17:40 +08:00
Tunglies
87168b6ce0 perf(tray): improve menu handling localization support
refactor(tray): replace string literals with MenuIds for menu event handling
2025-10-27 23:08:05 +08:00
Slinetrac
2ee8d164fd chore(i18n): upload zhtw.json
Co-authored-by: LiMoon <azhe9335@gmail.com>
2025-10-27 21:39:45 +08:00
Tunglies
c736796380 feat(clippy): cognitive-complexity rule (#5215)
* feat(config): enhance configuration initialization and validation process

* refactor(profile): streamline profile update logic and enhance error handling

* refactor(config): simplify profile item checks and streamline update flag processing

* refactor(disney_plus): add cognitive complexity allowance for check_disney_plus function

* refactor(enhance): restructure configuration and profile item handling for improved clarity and maintainability

* refactor(tray): add cognitive complexity allowance for create_tray_menu function

* refactor(config): add cognitive complexity allowance for patch_config function

* refactor(profiles): simplify item removal logic by introducing take_item_file_by_uid helper function

* refactor(profile): add new validation logic for profile configuration syntax

* refactor(profiles): improve formatting and readability of take_item_file_by_uid function

* refactor(cargo): change cognitive complexity level from warn to deny

* refactor(cargo): ensure cognitive complexity is denied in Cargo.toml

* refactor(i18n): clean up imports and improve code readability
refactor(proxy): simplify system proxy toggle logic
refactor(service): remove unnecessary `as_str()` conversion in error handling
refactor(tray): modularize tray menu creation for better maintainability

* refactor(tray): update menu item text handling to use references for improved performance
2025-10-27 20:55:51 +08:00
renovate[bot]
6df1e137f3 chore(deps): update dependency vitest to ^4.0.4 (#5221)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-27 20:40:13 +08:00
renovate[bot]
5a29508407 chore(deps): update cargo dependencies (#5217)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-27 20:39:00 +08:00
renovate[bot]
45c68424f0 chore(deps): update npm dependencies (#5218)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-27 20:38:17 +08:00
renovate[bot]
9426fc1b1c chore(deps): update dependency react-i18next to v16.2.1 (#5216)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-27 18:34:00 +08:00
Slinetrac
c234d1dc16 fix(groups-editor): persist deletions/restorations and normalize YAML
Prevent state resets while editing groups so deletions/restorations persist instead of being overwritten.
Ensure YAML is normalized and the latest visual state is saved.

- Add `normalizeDeleteSeq` to handle legacy `{name: ...}` entries and `buildGroupsYaml` for consistent serialization.
- Guard reassigning `deleteSeq` unless normalized value changes to avoid effect loops.
- Normalize proxy deletions and deduplicate policy names without extra backend writes.
- Split “on open” effect from proxy-policy refresh; toggling delete no longer triggers `fetchContent()`.
- Write composed YAML in `handleSave`, keep `currData`/`prevData` aligned, and provide accurate payloads to `onSave`.
2025-10-27 16:20:47 +08:00
Tunglies
11035db307 feat: add signal handling for graceful shutdown on Windows and Unix (#5023)
* feat: add signal handling for graceful shutdown on Windows and Unix

Co-authored-by: oomeow <oomeow@outlook.com>

* chore: update Cargo.lock

* fix(windows): restore shutdown hook build by enabling missing Win32 APIs and removing stray tracing call

Includes the required windows-sys feature expansions and replaces a leftover tracing reference so the Windows shutdown hook builds successfully.

* fix: add deprecation warnings for encrypt_data and decrypt_data functions

---------

Co-authored-by: oomeow <oomeow@outlook.com>
Co-authored-by: Slinetrac <realakayuki@gmail.com>
2025-10-27 14:02:27 +08:00
renovate[bot]
d2614396da chore(deps): update npm dependencies (#5212)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-27 13:06:15 +08:00
renovate[bot]
a2bbb69b73 chore(deps): update cargo dependencies (#5210)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-27 08:30:10 +08:00
renovate[bot]
b23b2a95c5 chore(deps): update npm dependencies (#5211)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-27 08:29:39 +08:00
❤是纱雾酱哟~
8fa9bcc650 Refactor(zhtw): Refines terminology for consistency (#5189)
* Refactor(zhtw): Refines terminology for consistency

- Updates: "連接" -> "連線"
- Updates: "高級" -> "進階"
- Updates: "局域網連接" -> "區域網路連線"

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>

* chore(i18n): Refine Traditional Chinese translations

- Improves the quality and consistency to match Traditional Chinese localized expressions.

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>

* chore(i18n): Refines Traditional Chinese translations

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>

---------

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>
2025-10-26 14:01:56 +08:00
renovate[bot]
e544203ca0 chore(deps): update dependency commander to ^14.0.2 (#5205)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-25 21:19:05 +08:00
renovate[bot]
ed9eed226d chore(deps): update npm dependencies (#5198)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-25 09:59:54 +08:00
renovate[bot]
806769b307 chore(deps): update npm dependencies (#5185)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-24 09:55:18 +08:00
Tunglies
d291fc5c64 chore(dependencies): update clash_verge_logger source to specific commit 2025-10-24 00:08:10 +08:00
Tunglies
bf45e487f9 feat(tauri-plugin-mihomo): configure IPC pool settings with connection limits and health checks 2025-10-23 22:32:23 +08:00
Tunglies
99ef0e51fc feat(init_logger): improve log level retrieval from environment variable 2025-10-23 22:13:43 +08:00
oomeow
cc2dc66d5f chore: update tauri-plugin-mihomo dep 2025-10-23 20:45:34 +08:00
renovate[bot]
001c11913a chore(deps): update dependency vite to ^7.1.12 (#5180)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-23 16:25:38 +08:00
Tunglies
f061bce2a1 refactor(window_manager): simplify window state handling and improve method organization 2025-10-23 16:21:51 +08:00
oomeow
d7859b07a6 fix: parse hotkey (#5167)
* fix: incorrectly parse hotkey

* refactor: parse hotkey

* fix: panic on linux

* chore: update

* chore: update style

* fix: register hotkey error on windows

* chore: update style

---------

Co-authored-by: Tunglies <tunglies.dev@outlook.com>
2025-10-23 15:54:48 +08:00
renovate[bot]
585963e751 chore(deps): update dependency lint-staged to ^16.2.6 (#5175)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-23 13:18:51 +08:00
renovate[bot]
d84b762ef3 chore(deps): update dependency vitest to v4 (#5176)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-23 13:18:30 +08:00
Sline
8657cedca0 feat: add configurable hover jump navigator delay (#5178)
* fix: hover options

* feat: add configurable hover jump navigator delay

- Added `hover_jump_navigator_delay` to Verge config defaults, patch flow, and response payload for persistent app-wide settings.
- Made proxy navigator respect configurable delay via `DEFAULT_HOVER_DELAY` and new `hoverDelay` prop.
- Threaded stored delay through proxy list so hover scrolling uses Verge-configured value.
- Added "Hover Jump Navigator Delay" control in Layout settings with clamped numeric input, tooltip, and toggle-aware disabling.
- Localized new labels in English, Simplified Chinese, and Traditional Chinese.
- Extended frontend Verge config type to include delay field for type-safe access.

* docs: UPDATELOG.md
2025-10-23 13:14:01 +08:00
Tunglies
9ea9704bbf refactor(sysopt): replace Arc<TokioMutex> with AtomicBool for sysproxy state management 2025-10-23 00:53:57 +08:00
renovate[bot]
302677aed0 chore(deps): update dependency react-i18next to v16.1.5 (#5171)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-22 22:01:37 +08:00
renovate[bot]
1415df1d23 chore(deps): update cargo dependencies (#5169)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-22 21:59:53 +08:00
renovate[bot]
b7d2bc7c74 chore(deps): update dependency @tauri-apps/cli to v2.9.1 (#5170)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-22 21:59:31 +08:00
Sline
4f7d069f19 feat(ui): add left menu lock/unlock with reorder mode and context menu (#5168)
* feat: free menu

* feat(ui): add left menu lock/unlock with reorder mode and context menu

* docs: UPDATELOG.md
2025-10-22 21:39:12 +08:00
Sline
3bedf7ec35 feat(backup): add dns_config into backup (#5166) 2025-10-22 19:52:44 +08:00
Sline
c4c37bf291 feat(tray): close all connections (#5165) 2025-10-22 19:28:00 +08:00
Tunglies
815a865265 perf(tray): ignore unnecessary tray icon events to reduce refreshes 2025-10-22 18:49:28 +08:00
Tunglies
2d2167e048 refactor: replace unwrap_or with unwrap_or_else for improved error handling (#5163)
In Rust, the `or` and `or_else` methods have distinct behavioral differences. The `or` method always eagerly evaluates its argument and executes any associated function calls. This can lead to unnecessary performance costs—especially in expensive operations like string processing or file handling—and may even trigger unintended side effects.

In contrast, `or_else` evaluates its closure lazily, only when necessary. Introducing a Clippy lint to disallow `or` sacrifices a bit of code simplicity but ensures predictable behavior and enforces lazy evaluation for better performance.
2025-10-22 17:33:55 +08:00
Tunglies
a05ea64bcd perf: utilize smartstring for string handling (#5149)
* perf: utilize smartstring for string handling

- Updated various modules to replace standard String with smartstring::alias::String for improved performance and memory efficiency.
- Adjusted string manipulations and conversions throughout the codebase to ensure compatibility with the new smartstring type.
- Enhanced readability and maintainability by using `.into()` for conversions where applicable.
- Ensured that all instances of string handling in configuration, logging, and network management leverage the benefits of smartstring.

* fix: replace wrap_err with stringify_err for better error handling in UWP tool invocation

* refactor: update import path for StringifyErr and adjust string handling in sysopt

* fix: correct import path for CmdResult in UWP module

* fix: update argument type for execute_sysproxy_command to use std::string::String

* fix: add missing CmdResult import in UWP platform module

* fix: improve string handling and error messaging across multiple files

* style: format code for improved readability and consistency across multiple files

* fix: remove unused file
2025-10-22 16:25:44 +08:00
renovate[bot]
fe96a7030a chore(deps): update rust crate boa_engine to 0.21.0 (#5159)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-22 09:11:57 +08:00
renovate[bot]
9050e56cdb chore(deps): update npm dependencies (#5158)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-22 09:02:32 +08:00
Tunglies
b813cbdfc8 fix: update clash_verge_service_ipc to version 2.0.17 2025-10-22 01:51:11 +08:00
Slinetrac
95aee6ec81 chore: better pre-hooks 2025-10-22 00:08:16 +08:00
Slinetrac
4d2c1b4dc2 fix: resolve clippy lint in linux utils 2025-10-21 23:20:27 +08:00
Tunglies
d9fdf261d1 fix: optimize process ID collection and remove unnecessary async handler in window destruction
fix: remove unnecessary condition check in process name matching
2025-10-21 23:14:57 +08:00
Slinetrac
a8b17926ed refactor: adjust MIME detection to merge duplicates and follow Freedesktop standard
- Honor Freedesktop precedence when locating mimeapps.list
- Replace per-scheme HashSet with index-tracking HashMap
- Merge duplicate handler entries instead of discarding them
- Ensure all schemes exist using the new tracking structure
2025-10-21 22:53:47 +08:00
Tunglies
afb049ca17 fix: simplify conditional checks and improve async handler usage across multiple files (#5156)
* fix: simplify conditional checks and improve async handler usage across multiple files

* fix: add missing AsyncHandler import in find_processes_by_name function

* fix: remove redundant AsyncHandler import in find_processes_by_name function
2025-10-21 22:39:32 +08:00
Sline
9c9aefe4cd fix: MIME config (#5154)
* fix: MIME config #2487

* fix: path

* refactor: enhance logic
2025-10-21 22:02:41 +08:00
Tunglies
e7a4415d1f ci: improve clippy lint workflow to handle manual triggers and src-tauri changes 2025-10-21 18:10:44 +08:00
xmk23333
ef3f8e1839 style: clean up whitespace and improve code formatting across multiple files 2025-10-21 17:53:02 +08:00
xmk23333
0e933597f5 refactor: streamline SWR configuration and improve error handling in AppDataProvider 2025-10-21 17:51:12 +08:00
Slinetrac
bafe2ae164 fix: home card save 2025-10-21 15:10:48 +08:00
renovate[bot]
6d93e21bc7 chore(deps): update dependency react-i18next to v16.1.3 (#5150)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-21 13:35:33 +08:00
renovate[bot]
91fb0d9ffa chore(deps): update npm dependencies (#5147)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-21 13:31:52 +08:00
Tunglies
8fc72814c8 build: optimize release profile for better performance
Upstream Tauri fixed the stack overflow in the invoke handler
(https://github.com/tauri-apps/tauri/pull/14170) in v2.9.0,
so we can safely use opt-level 3 now.
2025-10-21 00:19:27 +08:00
renovate[bot]
a5d3d6fc50 chore(deps): update dependency @tauri-apps/cli to v2.9.0 (#5145)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-20 23:57:28 +08:00
renovate[bot]
b6d51d6fe4 chore(deps): update cargo dependencies (#5141)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-20 23:57:19 +08:00
oomeow
a5c00ecb12 fix: faile to reset sysproxy (#5139) 2025-10-20 23:09:13 +08:00
renovate[bot]
91e12798e4 chore(deps): update dependency @tauri-apps/api to v2.9.0 (#5142)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-20 22:19:00 +08:00
Sline
9ee4b067d0 fix(current-proxy-card): stabilize match rule lookup and dependencies (#5138)
- memoize policy name normalization and include it in hook deps
- guard MATCH rule checks against partial controller data
- register MATCH policy groups when rebuilding selector selectors
2025-10-20 17:59:21 +08:00
xmk23333
366deb2756 Merge branch 'dev' of https://github.com/clash-verge-rev/clash-verge-rev into dev 2025-10-20 16:48:41 +08:00
xmk23333
98778fe6a3 refactor: improve code formatting and enhance logging consistency across core and event handling modules 2025-10-20 16:47:26 +08:00
xmk23333
b9dd62e2e6 refactor: enhance error handling and logging in core components and server initialization 2025-10-20 16:34:38 +08:00
Tunglies
a1dcdd04a7 fix: improve TUN mode handling logic to prevent unnecessary state changes #5122 (#5124) 2025-10-20 16:09:29 +08:00
Slinetrac
8ebf915330 docs: README.md i18n 2025-10-20 15:56:38 +08:00
AltZed
5281449e26 Translate readme to russian (#5136)
* Add Russian translation README_ru.md and update README.md

* add language badges for Chinese and Russian README

* add language badges for Chinese and Russian README

* Update README.md

Co-authored-by: Sline <realakayuki@gmail.com>

* Update README_ru.md

---------

Co-authored-by: Sline <realakayuki@gmail.com>
2025-10-20 15:33:52 +08:00
renovate[bot]
b2f0bf2f69 chore(deps): update dependency vite to ^7.1.11 (#5134)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-20 13:44:30 +08:00
xmk23333
d6bba4f68c Merge branch 'dev' of https://github.com/clash-verge-rev/clash-verge-rev into dev 2025-10-20 13:27:57 +08:00
xmk23333
278ab30d40 refactor: improve code formatting and enhance logging in resolve_setup_async function 2025-10-20 13:27:01 +08:00
xmk23333
786c981fe0 refactor: improve error handling in AppDataProvider and enhance configuration update logic in CoreManager 2025-10-20 13:26:24 +08:00
xmk23333
b77cc012e1 refactor: streamline app initialization and enhance WebSocket cleanup logic 2025-10-20 13:15:51 +08:00
renovate[bot]
dc31ec524b chore(deps): update dependency eslint-plugin-unused-imports to ^4.3.0 (#5132)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-20 12:54:57 +08:00
Slinetrac
a0ef64cda8 docs: UPDATELOG.md 2025-10-19 18:43:20 +08:00
oomeow
7be790c6fb chore: remove unused file and improve traffic monitor 2025-10-19 16:34:45 +08:00
Slinetrac
fbe6cefbdb fix: hover jump navigator 2025-10-19 16:00:45 +08:00
Tunglies
a60cab989d feat: add tracing support to logger initialization (#5120) 2025-10-18 22:57:51 +08:00
Tunglies
385ffafc67 fix: WindowProvider awful performance (#5119) 2025-10-18 22:49:55 +08:00
Tunglies
462b11d96a feat: implement caching for version and file hash to optimize prebuild process 2025-10-18 20:48:53 +08:00
oomeow
96ce529b16 refactor: react router (#5073)
* refactor: react router

* chore: update

* fix: router

* refactor: generate router children by navItems

* chore: set start page when create window

* docs: update UPDATELOG.md
2025-10-18 20:25:31 +08:00
Tunglies
8e20b1b0a0 feat: enhance profile update logic to include auto-update option handling 2025-10-18 17:40:55 +08:00
Tunglies
c2d7bf296a feat: add allow auto update option for profiles and update UI components 2025-10-18 17:04:03 +08:00
Slinetrac
98725bbecf fix: TS errors 2025-10-18 16:11:42 +08:00
Sline
c465000178 fix: update fallback (#5115)
* fix: update fallback

* test: introduce Vitest and add semver helper tests

* chore: merge vitest config into vite
2025-10-18 15:51:34 +08:00
Slinetrac
3d09cf0666 chore: remove unused imports 2025-10-18 14:51:27 +08:00
Tunglies
70770b3c13 refactor: optimize TUN mode cleanup process and improve task execution flow 2025-10-18 14:26:56 +08:00
Tunglies
0cdb9a05ce refactor: remove immediate window hide on quit for improved cleanup flow 2025-10-18 14:01:22 +08:00
Tunglies
c97c4cbd41 refactor: optimize async setup flow and add config verification 2025-10-18 14:01:21 +08:00
Sline
fecae38c63 refactor: Linux environment detection logic (#5108)
* fix: wayland framebuffer

* refactor(utils): move linux env heuristics into platform helper

* refactor(linux): let DMABUF override helper use resolved decision

* fix: clippy

* fix: clippy

* feat: NVIDIA detection

* fix: clippy
2025-10-18 12:13:00 +08:00
Tunglies
210c12a74e feat: implement CoreConfigValidator for configuration validation and enhance logging types (#5112) 2025-10-18 10:57:57 +08:00
Sline
a1c0a09423 refactor(core): elegant retry (#5113) 2025-10-18 10:12:36 +08:00
renovate[bot]
fc99f24802 chore(deps): update npm dependencies (#5111)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-18 10:10:18 +08:00
Tunglies
28bcdc3706 feat: enhance ProxyControlSwitches with Tun Mode functionality and state management 2025-10-18 00:00:20 +08:00
wonfen
859d09ff8c fix: undefined is not an object on connection table 2025-10-17 21:48:58 +08:00
Sline
10f155da78 refactor: retry with backoff (#5104)
* Reapply "refactor: retry with backoff"

This reverts commit 4e31dc8728.

* fix: clippy

* fix: clippy
2025-10-17 20:43:06 +08:00
Slinetrac
4e31dc8728 Revert "refactor: retry with backoff"
This reverts commit 0b63bebb6c.
2025-10-17 20:14:15 +08:00
Tunglies
98a52c5c33 fix: remove dead code from event-driven proxy and handle modules (#5103)
* refactor: remove dead code from event-driven proxy and handle modules

* refactor: remove dead code for set_activation_policy_prohibited function
2025-10-17 20:12:03 +08:00
Slinetrac
0b63bebb6c refactor: retry with backoff 2025-10-17 19:48:50 +08:00
Tunglies
bccde5ef6d feat(locales): add "App Log Max Size" and "App Log Max Count" entries to English and Chinese localization files 2025-10-17 19:20:51 +08:00
Slinetrac
215ba4da63 perf(delay): cache latency updates and smooth proxy list refresh
- track delay as structured updates with TTL-backed cache
- batch listener notifications to avoid render storms during checks
- surface cached latency in proxy items for quicker, steadier UI feedback
2025-10-17 18:27:21 +08:00
Slinetrac
886d1a551a refactor: useLayoutEffect 2025-10-17 15:11:36 +08:00
Slinetrac
d05bcc17f7 fix: untested and testing sort 2025-10-17 15:01:38 +08:00
Slinetrac
c63584daca fix: timeout sort 2025-10-17 14:51:33 +08:00
renovate[bot]
c2f59ffc02 chore(deps): update dependency @mui/x-data-grid to ^8.14.1 (#5093)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-17 07:41:57 +08:00
oomeow
f90361f8e4 chore: temporarily allow clippy to pass 2025-10-16 20:22:46 +08:00
Slinetrac
67d254236d fix(profile): verify saved proxy exists before activation
- check selector-like groups’ all list before applying saved nodes
- warn and fall back when a stored proxy disappeared from the group
- keep existing auto-switch flow for matching nodes to avoid regressions
2025-10-16 19:29:19 +08:00
Slinetrac
fd5bddeb80 fix(backup): prevent immediate deletion before confirmation dialog 2025-10-16 16:54:25 +08:00
Slinetrac
4835d68222 docs: UPDATELOG.md 2025-10-16 16:17:13 +08:00
Slinetrac
fe78e2d5cd fix(windows): wait for service readiness before enabling TUN 2025-10-16 15:09:17 +08:00
Slinetrac
e73217ad5f fix(core): restart core when config reload fails
- add retry path that restarts Mihomo on connection-related reload errors
- guard runtime config state by discarding on repeated failures and returning rich errors
2025-10-16 14:30:57 +08:00
Slinetrac
88cde5d99d fix(proxy): place timeout nodes at the end when sorting by latency 2025-10-16 11:51:57 +08:00
Slinetrac
41bc0e62a1 fix(home): scope cached proxy selection to active profile to prevent fallback reset on profile switch
Previously, Home card used global localStorage keys for selected proxy/group
(`clash-verge-selected-proxy(-group)`), causing cached selections from other
profiles to override the current one and reset the fallback to Direct when switching.

Now the cache keys are namespaced per profile, with migration of legacy values
and unified helpers (src/components/home/current-proxy-card.tsx:110–147),
ensuring each profile restores its own proxy state independently.
2025-10-16 09:39:57 +08:00
Slinetrac
b05799cfae fix: clippy warnings 2025-10-16 09:01:16 +08:00
oomeow
592e7f846d fix: incorrect delay status 2025-10-15 23:44:38 +08:00
renovate[bot]
bcd54bf995 chore(deps): update rust crate gethostname to 1.1.0 (#5065)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-15 19:02:22 +08:00
Sline
0b4403b67b refactor: frontend (#5068)
* refactor: setting components

* refactor: frontend

* fix: settings router
2025-10-15 18:57:44 +08:00
renovate[bot]
a591ee1efc chore(deps): update dependency eslint-plugin-react-refresh to ^0.4.24 (#5070)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-15 18:20:00 +08:00
Slinetrac
ef9ccafe61 refactor: proxy components 2025-10-15 09:00:03 +08:00
Tunglies
e6b7d512fb feat: implement draft management system for concurrent editing and committing of data 2025-10-15 08:32:52 +08:00
renovate[bot]
6113be3b6c chore(deps): update rust crate getrandom to 0.3.4 (#5064)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-15 08:22:56 +08:00
Tunglies
7fab8eeaf6 feat: add "Prefer System Titlebar" localization to English and Chinese JSON files 2025-10-15 07:24:36 +08:00
renovate[bot]
15d5113729 chore(deps): update npm dependencies (#5060)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-14 23:52:56 +08:00
renovate[bot]
e5eaff37a4 chore(deps): update rust crate tokio to 1.48.0 (#5063)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-14 23:50:37 +08:00
Slinetrac
4f2633a62b refactor: profile components 2025-10-14 23:45:12 +08:00
Slinetrac
5d114806f7 refactor: layout and log components 2025-10-14 22:13:14 +08:00
Slinetrac
06dc7a6ef4 refactor: home components 2025-10-14 21:57:33 +08:00
Slinetrac
8dbe3f8c48 refactor: connection components 2025-10-14 21:33:36 +08:00
Slinetrac
778d506be7 refactor: common components 2025-10-14 21:22:48 +08:00
Slinetrac
65cf6c387b refactor: base components 2025-10-14 21:13:10 +08:00
Slinetrac
2e3174baa7 feat(proxy-groups, current-proxy-card): auto-refresh delay sorting
- proxy-groups: recalculate active group head and reapply delay sort after tests so list reorders automatically when "按延迟排序" is active.
- current-proxy-card: add delaySortRefresh trigger after auto/manual latency checks to immediately refresh selector and proxy list ordering.
- current-proxy-card: listen for delaySortRefresh to keep displayed delay chips and option ordering aligned with latest measurements.
2025-10-14 20:25:52 +08:00
Tunglies
7c71d07ad2 refactor: convert synchronous file operations to asynchronous for improved performance (#5059)
* refactor: convert synchronous file operations to asynchronous for improved performance

* fix: update copy_icon_file to use asynchronous directory creation

* refactor: remove unnecessary variable assignments in shortcut management functions
2025-10-14 19:55:22 +08:00
Slinetrac
8760ed17dc feat(current-proxy-card): add toggle for automatic delay detection 2025-10-14 19:27:41 +08:00
Tunglies
02b44d83af chore: replace pre-commit hook with cargo alias 2025-10-14 18:21:25 +08:00
Tunglies
bb2059c76f fix: resolve issue with file deletion during subscription removal 2025-10-14 17:56:38 +08:00
Sline
f541464ff4 feat: tray enhance (#5058)
* feat: proxy group sorting for tray

* feat(tray): add inline proxy groups toggle
2025-10-14 17:03:37 +08:00
Slinetrac
98527d5038 feat: import profiles with enter 2025-10-14 15:54:26 +08:00
Slinetrac
2ba2f4d42c refactor: use logging 2025-10-14 15:10:06 +08:00
Sline
51b08be87e feat: local backup (#5054)
* feat: local backup

* refactor(backup): make local backup helpers synchronous and clean up redundant checks

- Converted local backup helpers to synchronous functions to remove unused async warnings and align command signatures.
- Updated list/delete/export commands to call the sync feature functions directly without awaits while preserving behavior.
- Simplified destination directory creation to always ensure parent folders exist without redundant checks, satisfying Clippy.
2025-10-14 14:52:04 +08:00
Tunglies
4dd811330b feat: add Clippy alias commands and improve build process for Clippy integration (#5055)
* feat: add Clippy alias commands and improve build process for Clippy integration

* fix(lint-clippy): update Clippy run command to use working directory for src-tauri
2025-10-14 14:43:03 +08:00
Tunglies
76ca24086b fix: specify type for mode and host variables to improve clarity and type safety (#5052)
* fix: specify type for mode and host variables to improve clarity and type safety

* fix: specify types for pac_url and host variables to enhance type safety

* fix: change type of pac_url from Url to String for consistency in handling PAC output
2025-10-14 13:16:11 +08:00
Tunglies
3d96a575c0 refactor: streamline profile import logic and enhance error handling (#5051) 2025-10-14 12:39:22 +08:00
Slinetrac
db091f5d2e feat(current-proxy-card): add automatic delay checks 2025-10-14 12:27:12 +08:00
Slinetrac
baebce4aad fix(connection-table): patch DataGrid event handling to prevent Safari crash
- Ensure api.publishEvent is patched only once, retrying until the API is ready.
- Normalize missing event objects for Safari to avoid crashes.
- Restore the original handler and clear timers on unmount to keep the grid stable.
2025-10-14 11:52:51 +08:00
Tunglies
fefc5c23fd fix: simplify error handling in change_clash_core and validate_dns_config functions 2025-10-14 11:48:53 +08:00
Tunglies
924e7d1022 Refactor string handling to use into() instead of to_string() for improved performance and consistency across the codebase. This change affects various modules including app.rs, clash.rs, config.rs, core.rs, service.rs, and others, ensuring that string conversions are streamlined and more idiomatic. 2025-10-14 09:26:20 +08:00
renovate[bot]
44eb781060 chore(deps): update npm dependencies (#5045)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-14 09:02:49 +08:00
renovate[bot]
3bd981d47b chore(deps): update cargo dependencies (#5048)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-14 08:56:51 +08:00
Slinetrac
1d725b8bde feat: add claude, spotify and tiktok into unlock checker 2025-10-13 23:55:40 +08:00
Moon
15a0c30ccb fix: linux border render (#5046) 2025-10-13 22:42:25 +08:00
Slinetrac
537a3000b6 chore: rm success notice 2025-10-13 19:04:40 +08:00
Sline
965ee9844d refactor(unlock): restructure media unlock checker (#5044)
- Split the monolithic unlock checker into a module tree (mod.rs:9–133), wiring service-specific tasks while keeping exported Tauri commands untouched.
- Centralize shared data and helpers in types.rs (1–40) and utils.rs (1–21) for reusable timestamp and emoji logic.
- Move each provider’s logic into its own file (bilibili.rs, disney_plus.rs, netflix.rs, etc.), preserving behavior and making future additions or fixes localized.
2025-10-13 18:56:15 +08:00
Tunglies
fa39cfc41b fix: reorganize imports in logging.rs for clarity and consistency 2025-10-13 13:10:55 +08:00
Tunglies
902c8fcaf2 fix: update clash_verge_service_ipc to version 2.0.16 and improve log handling 2025-10-13 11:39:16 +08:00
renovate[bot]
5fb770c113 chore(deps): update dependency @eslint-react/eslint-plugin to ^2.1.1 (#5040)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-13 11:24:43 +08:00
Tunglies
ca3fa869d5 Squashed commit of the following:
commit 2a9f2f20e9c6d88c2f96fd40589740e1f236f64a
Author: Tunglies <77394545+Tunglies@users.noreply.github.com>
Date:   Mon Oct 13 11:07:31 2025 +0800

    fix: improve message handling in CommandEvent logging with CompactString

commit c77fc18accefeaf471594035d61bd13e235c87d6
Author: Tunglies <77394545+Tunglies@users.noreply.github.com>
Date:   Mon Oct 13 10:47:16 2025 +0800

    fix: optimize shared writer locking in CommandEvent handling

commit d5286ee5f1612f17b7a97eead84d430669816d98
Author: Tunglies <77394545+Tunglies@users.noreply.github.com>
Date:   Mon Oct 13 10:30:19 2025 +0800

    feat: integrate CompactString for improved logging and dependency management

commit 951fb2b120ce159c00dc57d43c5a519990f34cee
Author: Tunglies <77394545+Tunglies@users.noreply.github.com>
Date:   Mon Oct 13 09:39:29 2025 +0800

    refactor: remove write_sidecar_log function and streamline logging in CommandEvent handling

commit fd48d66c55a2c62fd32741fd3c65cc06d4cc693f
Author: Tunglies <77394545+Tunglies@users.noreply.github.com>
Date:   Mon Oct 13 09:38:05 2025 +0800

    Revert "refactor(core): stabilize 'static backing for sidecar logging"

    This reverts commit fe7eb59f18.
2025-10-13 11:08:44 +08:00
Slinetrac
51ba1d1e34 refactor(nsis): use tauri v2 installer template 2025-10-13 10:30:03 +08:00
Slinetrac
7e3804bf34 chore: issue template 2025-10-13 08:46:04 +08:00
oomeow
55e8582ee4 chore: bump clash_verge_service_ipc to 2.0.15 2025-10-13 00:09:12 +08:00
oomeow
0e9595f255 feat: add get clash logs from service
chore: log message
2025-10-13 00:02:46 +08:00
Sline
19246ac616 fix(profile): fix false failure notice after successful import (#5038)
- normalize profile urls so matching ignores casing/trailing slashes
- capture baseline profile state and confirm landing before showing success
- reuse shared success handler for normal and clash proxy retries
2025-10-12 23:21:32 +08:00
oomeow
b91087e175 feat: support for reopen app via desktop shortcuts (#5037)
* fix: singleton check

* docs: update UPDATELOG.md

---------

Co-authored-by: Slinetrac <realakayuki@gmail.com>
2025-10-12 22:55:40 +08:00
Slinetrac
7789d0bd5c Revert "chore: update ipc crate version"
This reverts commit 8e5c150a4f.
2025-10-12 21:26:45 +08:00
oomeow
1875e1b513 fix: incorrect maximize status on custom windows controller (#5033)
* fix: windows controller

* chore: update style

* chore: update style
2025-10-12 20:51:25 +08:00
Slinetrac
8e5c150a4f chore: update ipc crate version 2025-10-12 16:18:17 +08:00
oomeow
85f4afe2a1 perf: reduce reset scroller position 2025-10-12 15:09:20 +08:00
oomeow
0b8b3c5a1a feat: set clash log level default to info 2025-10-12 13:18:26 +08:00
oomeow
5ce95d74a9 chore: cleanup 2025-10-12 13:03:42 +08:00
Tunglies
d531432f4a fix: improve Service connection method and permissions for Windows and Unix 2025-10-11 23:41:24 +08:00
Tunglies
4f1d61a56e Revert "fix: improve Service connection method and permissions for Windows and Unix"
This reverts commit 601e99f0b5.

Revert "refactor: clash-verge-rev-service-ipc (#4841)"

This reverts commit 5370bd45ed.
2025-10-11 21:21:23 +08:00
oomeow
121b8c433b chore: update tauri-plugin-mihomo dep 2025-10-11 20:57:30 +08:00
Sline
3d2507430b fix(shutdown): mark shutdown as exiting to stop background tasks (#5024)
* fix(shutdown): mark shutdown as exiting to stop background tasks

- lib.rs:570 → Flag app as exiting on ExitRequested, notify proxy guard, start cleanup immediately, with fallback in Exit event
- tray/mod.rs:190 → Add unified exit checks around tray init/updates to prevent UI recreation during shutdown
- event_driven_proxy.rs:252 → Ensure proxy guard skips all restore/re-enable work (including sysproxy.exe calls) once exit flag is set

* fix(shutdown): refine exit handling and proxy guard notifications

* fix(shutdown): add guard to run shutdown routine only once per lifecycle
2025-10-11 16:49:47 +08:00
Tunglies
601e99f0b5 fix: improve Service connection method and permissions for Windows and Unix 2025-10-11 15:40:59 +08:00
Tunglies
5370bd45ed refactor: clash-verge-rev-service-ipc (#4841)
* feat: update service installation scripts and IPC integration

- Updated `Cargo.toml` to use version 2.0.8 of `clash_verge_service_ipc` with "client" feature.
- Renamed service installation and uninstallation scripts in `post-install.sh` and `pre-remove.sh`.
- Removed `service_ipc` module and refactored IPC handling in `service.rs` to use the new `clash_verge_service_ipc` directly.
- Adjusted service version checking and core management to align with the new IPC structure.
- Simplified directory checks in `dirs.rs` and updated logging configurations in `init.rs`.
- Updated Linux configuration file to reflect new script names.
- Enhanced service installer hook to manage state more effectively.

* refactor: simplify ClashConfig instantiation and remove unused service log file function

* feat: update clash_verge_service_ipc to version 2.0.9 and enhance service initialization logging

* chore: update clash_verge_service_ipc to version 2.0.10 and refactor async service manager initialization

* fix: update clash_verge_service_ipc to version 2.0.11 and improve service manager initialization

* fix: increase sleep duration for socket readiness check to improve stability

* fix: update clash_verge_service_ipc to version 2.0.12 and kode-bridge to version 0.3.4; refactor service management and IPC path checks

* fix: update clash_verge_service_ipc to version 2.0.13; refactor service connection and initialization logic
2025-10-11 15:35:26 +08:00
renovate[bot]
1246a66b35 chore(deps): update dependency lint-staged to ^16.2.4 (#5021)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-11 15:17:53 +08:00
renovate[bot]
7d6fb54783 chore(deps): update npm dependencies (#5015)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-11 13:14:56 +08:00
renovate[bot]
79d14f1d51 chore(deps): update rust crate regex to 1.12.1 (#5017)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-11 13:12:27 +08:00
Tunglies
59dd71ebaa refactor: simplify logging calls by removing unnecessary parameters 2025-10-10 13:46:26 +08:00
Tunglies
ea319e951c fix: update clippy command to include all features and targets 2025-10-10 13:05:59 +08:00
Tunglies
8c0af66ca9 Refactor logging macros to remove print control parameter
- Updated logging macros to eliminate the boolean parameter for print control, simplifying the logging calls throughout the codebase.
- Adjusted all logging calls in various modules (lib.rs, lightweight.rs, help.rs, init.rs, logging.rs, resolve/mod.rs, resolve/scheme.rs, resolve/ui.rs, resolve/window.rs, server.rs, singleton.rs, window_manager.rs) to reflect the new macro structure.
- Ensured consistent logging behavior across the application by standardizing the logging format.
2025-10-10 13:05:37 +08:00
Sline
a4d94c8bc9 refactor: enhance compositor detection logic (#5007)
* refactor: enhance compositor detection logic

* docs: UPDATELOG.md
2025-10-10 10:13:24 +08:00
renovate[bot]
ea8ca1b739 chore(deps): update dependency @mui/x-data-grid to ^8.14.0 (#5004)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-10 08:38:22 +08:00
Slinetrac
e4f1bab8fb Reapply "fix: windows title bar only"
This reverts commit 7e05b8f13b.
2025-10-10 08:25:08 +08:00
Tunglies
7e05b8f13b Revert "fix: windows title bar and refactor old code (#4988)" this breaks UI page switch function
This reverts commit 03ab2410cc.
2025-10-10 07:38:52 +08:00
renovate[bot]
0a771bd67a chore(deps): update rust crate libc to 0.2.177 (#5003)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-09 22:24:19 +08:00
renovate[bot]
2798e930ac chore(deps): update rust crate zip to v6 (#5000)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-09 22:17:45 +08:00
oomeow
1357913f8b fix: restart app failed (#4974)
* fix: failed to restart app

* chore: cleanup

* chore: cargo fmt

* chore: use AsyncHandler

* chore: clippy

* chore: update
2025-10-09 20:08:25 +08:00
❤是纱雾酱哟~
14b990ad9f Revert "build(tauri): add pkexec dependency for Linux packages (#4833)" (#4996)
- This reverts commit b608a38
- This may reopen issue #4831
- Issue #4992 may still remain unresolved

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>
2025-10-09 18:45:02 +08:00
Slinetrac
58bd2032f3 refactor(sidecar): inline Record construction for safer logging
- Build Record inline so fmt::Arguments temporary only lives through writer.write, avoiding dropped-temporary borrow.
- Add explicit 'static annotation on leaked string before reboxing for clarity.
2025-10-09 18:41:09 +08:00
Slinetrac
fe7eb59f18 refactor(core): stabilize 'static backing for sidecar logging
Introduced `write_sidecar_log` to prevent temporary `format_args!` values
from dropping early.

- src-tauri/src/core/core.rs:60 — adds `write_sidecar_log`, which temporarily
  leaks the message into a `Box<str>`, builds the `Record`, writes it, then
  immediately reclaims the boxed string. The `unsafe` block is limited to
  `Box::from_raw` needed to undo `Box::leak`.
- src-tauri/src/core/core.rs:794, 802, 806 — all three sidecar events now route
  through this helper, reusing the returned string for the in-memory log and
  avoiding extra UTF-8 decoding.
2025-10-09 16:46:11 +08:00
Sline
e3cd16189b fix: linux app theme (#4997) 2025-10-09 16:03:28 +08:00
Sline
bd9db1b4f7 fix: linux webkit error (#4995)
* fix: linux webkit error

* docs: UPDATELOG.md
2025-10-09 15:44:11 +08:00
Sline
5db4677ff8 fix: linux tun timeout (#4993)
* fix: linux tun timeout

* docs: UPDATELOG.md
2025-10-09 15:09:17 +08:00
Slinetrac
44280b23e4 Merge remote-tracking branch 'origin/dev' into dev 2025-10-09 14:05:37 +08:00
Slinetrac
7cfc31b6e5 docs: UPDATELOG.md 2025-10-09 14:05:05 +08:00
Sline
c7cd47fbdc fix: silent start (#4990) 2025-10-09 14:02:27 +08:00
renovate[bot]
a9d91a09c4 chore(deps): update dependency eslint-plugin-react-hooks to v7 (#4987)
* chore(deps): update dependency eslint-plugin-react-hooks to v7

* style: format

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Slinetrac <realakayuki@gmail.com>
2025-10-09 12:58:14 +08:00
Slinetrac
d18b98304b fix: windows title bar only 2025-10-09 12:34:36 +08:00
Slinetrac
a80bc10719 Revert "fix: windows title bar and refactor old code (#4988)"
This reverts commit 03ab2410cc.
2025-10-09 12:31:54 +08:00
Slinetrac
0f34d63b6d docs: up UPDATELOG.md 2025-10-09 11:44:52 +08:00
Tunglies
02c271dfb2 feat: update tauri-plugin-mihomo to version 0.1.1 with new source reference 2025-10-09 11:32:09 +08:00
Sline
03ab2410cc fix: windows title bar and refactor old code (#4988) 2025-10-09 10:53:20 +08:00
Slinetrac
f5c2b2a23d chore: light hook 2025-10-09 10:29:20 +08:00
Tunglies
4417fe6cd9 feat: update tray tooltip to include reassembled version format #4727 2025-10-09 05:53:49 +08:00
renovate[bot]
57c031a8f8 chore(deps): update dependency react-router-dom to v7.9.4 (#4985)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-09 05:10:48 +08:00
renovate[bot]
a9733d9746 chore(deps): update rust crate flexi_logger to 0.31.7 (#4986)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-09 05:10:32 +08:00
❤是纱雾酱哟~
936764e6ce feat: Enable git hooks with husky (#4984)
* build(deps): Adds husky for Git hooks

- Integrates the husky package as a development dependency.
- Enables the configuration and enforcement of pre-commit and pre-push Git hooks.
- Improves code quality and consistency by automating checks before commits.

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>

* feat: Add Husky prepare hook

- Automatically installs Git hooks for developers
- Ensures consistent code quality checks before commits or pushes
- Streamlines the developer setup process

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>

* ci: Avoid installing Git Hooks on GitHub Workflows

- Adds `HUSKY: 0` environment variable to all workflow definitions.
- Prevents local development hooks from executing in CI, which can cause unnecessary failures or overhead.
- See https://typicode.github.io/husky/how-to.html#ci-server-and-docker

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>

---------

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>
2025-10-09 05:10:03 +08:00
Sline
9750cd3ce6 fix: multi-monitor-crash (#4980) 2025-10-08 22:02:17 +08:00
Tunglies
bfd1274a8c feat: Implement custom window controls and titlebar management (#4919)
- Added WindowControls component for managing window actions (minimize, maximize, close) based on the operating system.
- Integrated window decoration toggle functionality to allow users to prefer system titlebar.
- Updated layout styles to accommodate new titlebar and window controls.
- Refactored layout components to utilize new window management hooks.
- Enhanced layout viewer to include a switch for enabling/disabling window decorations.
- Improved overall window management by introducing useWindow and useWindowDecorations hooks for better state handling.
2025-10-08 20:23:26 +08:00
renovate[bot]
f195b3bccf chore(deps): update rust crate flexi_logger to 0.31.6 (#4979)
* chore(deps): update rust crate flexi_logger to 0.31.6

* chore: up lock

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Slinetrac <realakayuki@gmail.com>
2025-10-08 19:14:00 +08:00
Tunglies
5376d50cfb feat: add clash_verge_logger and clash_verge_service_ipc dependencies; refactor logging and process management 2025-10-08 18:06:11 +08:00
Slinetrac
0b6681436a style: lint 2025-10-08 14:19:07 +08:00
oomeow
7fc238c27b refactor: invock mihomo api by use tauri-plugin-mihomo (#4926)
* feat: add tauri-plugin-mihomo

* refactor: invock mihomo api by use tauri-plugin-mihomo

* chore: todo

* chore: update

* chore: update

* chore: update

* chore: update

* fix: incorrect delay status and update pretty config

* chore: update

* chore: remove cache

* chore: update

* chore: update

* fix: app freezed when change group proxy

* chore: update

* chore: update

* chore: add rustfmt.toml to tauri-plugin-mihomo

* chore: happy clippy

* refactor: connect mihomo websocket

* chore: update

* chore: update

* fix: parse bigint to number

* chore: update

* Revert "fix: parse bigint to number"

This reverts commit 74c006522e23aa52cf8979a8fb47d2b1ae0bb043.

* chore: use number instead of bigint

* chore: cleanup

* fix: rule data not refresh when switch profile

* chore: update

* chore: cleanup

* chore: update

* fix: traffic graph data display

* feat: add ipc connection pool

* chore: update

* chore: clippy

* fix: incorrect delay status

* fix: typo

* fix: empty proxies tray menu

* chore: clippy

* chore: import tauri-plugin-mihomo by using git repo

* chore: cleanup

* fix: mihomo api

* fix: incorrect delay status

* chore: update tauri-plugin-mihomo dep

chore: update
2025-10-08 12:32:40 +08:00
Sline
72aa56007c feat(ui): implement profiles batch select and i18n (#4972)
* feat(ui): implement profiles batch select and i18n

* refactor: adjust button position and icon

* style: lint fmt
2025-10-08 12:02:55 +08:00
renovate[bot]
2bc720534d chore(deps): update rust crate flexi_logger to 0.31.5 (#4965)
* chore(deps): update rust crate flexi_logger to 0.31.5

* chore: up lock

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Slinetrac <realakayuki@gmail.com>
2025-10-08 08:34:41 +08:00
renovate[bot]
1e88f95b43 chore(deps): update dependency lint-staged to v16 (#4968)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Slinetrac <realakayuki@gmail.com>
2025-10-08 08:34:03 +08:00
Slinetrac
9d47bc66c4 chore: unify hook behavior to prevent cross-platform CI failures 2025-10-08 08:12:16 +08:00
Slinetrac
10f250b7e7 Revert "fix(windows): show UAC prompt for TUN service install/uninstall (#4959)"
This reverts commit c05395c258.
2025-10-08 07:47:05 +08:00
oomeow
f492580864 chore: pretty lint-staged.config.js 2025-10-07 18:50:18 +08:00
Slinetrac
86b4712beb chore: more friendly lint 2025-10-07 18:28:32 +08:00
Slinetrac
0d12103085 chore: add lint-staged and edit pre-commit 2025-10-07 18:02:37 +08:00
Sline
bf4e1a3270 feat: url test button for proxy card and type safety (#4964)
* feat: url test button for proxy card and type safety

* fix: resolve ESLint hook dependency error in current-proxy-card.tsx
2025-10-07 16:39:22 +08:00
renovate[bot]
3f1f53434c chore(deps): update npm dependencies (#4962)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-07 16:38:32 +08:00
Sline
d3477159a8 fix: improve Linux tray support and add --no-tray option (#4958) 2025-10-07 10:02:11 +08:00
Sline
c05395c258 fix(windows): show UAC prompt for TUN service install/uninstall (#4959) 2025-10-07 10:01:35 +08:00
Sline
d25eb49bfe fix(tray): resolve "Restart App" failure on Windows (#4960)
* fix(tray): resolve "Restart App" failure on Windows

* style: rm useless comment
2025-10-07 10:01:15 +08:00
renovate[bot]
f3f8ea0481 chore(deps): update cargo dependencies (#4899)
* chore(deps): update cargo dependencies

* chore: up lock

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Slinetrac <realakayuki@gmail.com>
2025-10-07 08:22:35 +08:00
renovate[bot]
0af971b08a chore(deps): update npm dependencies (#4955)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-10-07 07:53:24 +08:00
Sline
a7fa63f054 fix: implement proper error handling for "Restart App" failures (#4951)
* fix: implement proper error handling for "Restart App" failures

* fix: make clippy happy
2025-10-07 07:32:01 +08:00
Sline
b20b30baad refactor(config): move verify_config_initialization with backoff retry (#4952) 2025-10-07 07:31:35 +08:00
Sline
cb15a38cb3 fix: restore periodic proxy guard checks removed in event-driven migration (#4954)
* fix: restore periodic proxy guard checks removed in event-driven migration

* style: cargo fmt
2025-10-07 07:18:07 +08:00
Sline
39673af46f fix: ensure frontend sync after profile create/delete (#4956) 2025-10-07 07:17:21 +08:00
wonfen
c6afbf6ee8 chore: resolve deprecation warnings and add missing translations 2025-10-06 23:40:12 +08:00
Tunglies
abb0df59df refactor: simplify auto proxy disabling logic in clean_async function 2025-10-06 18:26:01 +08:00
Sline
b0decf824e fix(webdav/app): reset client on errors and improve app restart (#4941)
* fix(webdav/app): reset client on errors and improve app restart

* refactor: rm unused function
2025-10-06 16:54:35 +08:00
Sline
5ec5fdcfc7 fix(init): ensure runtime config is ready before core manager startup (#4942)
* fix(init): ensure runtime config is ready before core manager startup

* refactor: simplify verify_config_initialization function
2025-10-06 16:51:47 +08:00
wonfen
f9bc739c51 feat: add system proxy cleanup on system shutdown & prevent DLL errors 2025-10-06 11:19:23 +08:00
Tunglies
a1b3f267de feat: enhance exit handling to prevent initialization and event processing during application exit 2025-10-05 18:07:47 +08:00
wonfen
dbcad24093 refactor: Uses tokio Command with CREATE_NO_WINDOW flag to avoid DLL initialization issues during shutdown 2025-10-05 11:11:14 +08:00
Tunglies
1176f8c863 feat: refactor app data provider and context for improved data management and performance 2025-10-04 21:20:31 +08:00
oomeow
90b98f695b fix: app freeze when core run by service mode and open app window (#4922)
* fix: app freeze when core run by service mode

* chore: update

* chore: update UPDATELOG

---------

Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-10-04 20:44:49 +08:00
renovate[bot]
600b0b52f4 chore(deps): update npm dependencies (#4939)
* chore(deps): update npm dependencies

* Refactor components to use function syntax instead of forwardRef for better type handling and clarity. Updated imports and adjusted prop types accordingly across multiple viewer components including TrafficGraph, ProfileViewer, BackupViewer, ClashCoreViewer, ControllerViewer, DnsViewer, LiteModeViewer, NetworkInterfaceViewer, ThemeViewer, TunViewer, UpdateViewer, and WebUIViewer.

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-10-04 20:26:10 +08:00
renovate[bot]
8b3bc18ea8 chore(deps): update dependency eslint-plugin-react-hooks to v6 (#4940)
* chore(deps): update dependency eslint-plugin-react-hooks to v6

* fix: update ESLint configuration to use correct imports and recommended settings

* chore: clean up unused code and improve readability across components

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-10-04 19:42:34 +08:00
Tunglies
9a9c9a2da1 feat: enhance versioning script to include latest Tauri commit hash in autobuild and deploytest tags 2025-10-04 17:03:05 +08:00
oomeow
c5023b4505 ci: use ubuntu-22.04 on dev bundle workflow 2025-10-02 12:46:56 +08:00
oomeow
18b79d3693 ci: add linux dev bundle 2025-10-02 12:33:48 +08:00
oomeow
982c8b4df2 fix: drag and drop (#4924) 2025-10-02 12:32:55 +08:00
Tunglies
8a4f2de887 Revert "Refactor components to remove forwardRef and simplify props handling"
This reverts commit 1cd013fb94.
2025-09-30 18:13:02 +08:00
Tunglies
14288568bf feat: optimize backend i18n resource usage and improve language loading 2025-09-30 15:22:08 +08:00
Tunglies
1cd013fb94 Refactor components to remove forwardRef and simplify props handling
- Updated multiple components to remove the use of forwardRef, simplifying the props structure.
- Adjusted imports and component definitions accordingly.
- Ensured consistent handling of refs and props across various viewer components.
- Improved readability and maintainability of the codebase.
2025-09-30 14:26:40 +08:00
Sukka
0c88568cd7 chore: make eslint happy (part 1) (#4890) 2025-09-30 14:19:49 +08:00
Tunglies
ecdeadfe1e feat: enhance CI workflows with paths filtering for Rust and web changes 2025-09-30 03:35:38 +08:00
Junkai W.
d86bdea127 feat: add Quick navigation bar in the rule mode agent group (#4889) 2025-09-29 11:51:53 +08:00
renovate[bot]
40f0e1bb19 chore(deps): update dependency @types/react to v19.1.15 (#4888)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-28 19:55:08 +08:00
renovate[bot]
78496312ec chore(deps): update npm dependencies (#4857)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-28 19:54:58 +08:00
renovate[bot]
3e23609b68 chore(deps): update cargo dependencies (#4842)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-28 19:51:25 +08:00
renovate[bot]
8488a92026 chore(deps): update npm dependencies (#4843)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-28 19:51:10 +08:00
wonfen
1b4691d0ac chore: update log 2025-09-27 18:40:59 +08:00
wonfen
fae2c27648 refactor: remove duplicate and inconsistent isTunAvailable definitions 2025-09-26 14:08:32 +08:00
wonfen
7a14e90802 feat: unify TUN mode availability checks across components 2025-09-26 14:00:57 +08:00
Tunglies
c8c79d9baa feat: group all GitHub Actions updates into a single PR 2025-09-25 19:24:26 +08:00
Tunglies
a2d33c5447 fix: update rust-toolchain action to use master branch for consistency 2025-09-25 19:21:06 +08:00
Sukka
fb5d5a7d37 chore(eslint): replace eslint-plugin-react w/ eslint-react (#4844)
* chore(eslint): replace `eslint-plugin-react` w/ `eslint-react`

* chore(eslint): replace `eslint-plugin-import` w/ `import-x`
2025-09-24 13:25:22 +08:00
❤是纱雾酱哟~
b608a389c5 build(tauri): add pkexec dependency for linux packages (#4833)
* build(tauri): add pkexec dependency for linux packages

- Include pkexec in deb package dependencies
- Include pkexec in rpm package dependencies
- Update dependency arrays formatting in tauri linux config

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>

* refactor(tauri): Prettify configurations according to suggestions

- Format `src-tauri/tauri.linux.conf.json` using Prettier

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>

* feat: add pkexec dependency for Linux .deb and .rpm packaging

---------

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>
Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-09-24 08:42:14 +08:00
Hank
9de90200f5 fix: main thread block by sidecar launching. (#4795)
* fix: main thread block by sidecar launching.

#4791

* Refactor async runtime spawn to use AsyncHandler::spawn

for unify debugging.

* Fix compile error on non-windows platform

by remove Windows-specific configuration import.
2025-09-24 08:38:06 +08:00
renovate[bot]
860f154d54 chore(deps): update cargo dependencies (#4796)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-23 14:02:43 +08:00
renovate[bot]
f7d4040ac7 chore(deps): update npm dependencies (#4783)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-23 14:02:37 +08:00
Thomas
b3075cce24 recommit #4765 overwritten by #4815 (#4834) 2025-09-23 13:59:41 +08:00
Junkai W.
909c4028f1 添加链式代理下规则适配 2025-09-22 18:16:17 +08:00
Tunglies
620922f82e feat: add configurable log size and count options in settings 2025-09-22 16:31:38 +08:00
Tunglies
22e1d329cb fix: breaks the behavior of entering lightweight mode from tray menu #4815 2025-09-21 21:34:47 +08:00
Tunglies
33fdcc38b5 Revert "还原轻量模式的进入和退出行为 (#4817)"
This reverts commit 8f88270cdf.
2025-09-21 21:33:01 +08:00
infinite-illusion
fef2728a7c remove incorrect setupCloseListener from _layout.tsx (#4818) 2025-09-21 15:05:37 +08:00
Junkai W.
8f88270cdf 还原轻量模式的进入和退出行为 (#4817)
同时把轻量模式系统托盘id改为lightweight_mode
2025-09-21 15:02:55 +08:00
Junkai W.
3493580236 修复win下系统托盘代理组与gui顺序不一致 (#4815) 2025-09-21 09:45:26 +08:00
Tunglies
7f2729fd0f fix: restore LogLineFilter import for proper logging functionality 2025-09-21 01:59:02 +08:00
Tunglies
b8c82320d3 refactor: enhance logging system with NoExternModule for better log filtering 2025-09-21 01:31:08 +08:00
Tunglies
94ec25c193 feat: add logging for ClashVergeRev version in async setup 2025-09-20 23:24:43 +08:00
Tunglies
2366530622 refactor: change lto option to "thin" for improved build performance 2025-09-20 22:54:52 +08:00
Tunglies
1cf8e2384e fix: Cargo.toml dependencies option (#4805)
* chore: update cargo lock

* refactor: streamline dependency declarations in Cargo.toml

* refactor: reorder import statements and comment out Cargo.toml version update

* fix: enable Cargo version update in release script and update tauri-plugin-devtools dependency
2025-09-20 15:15:24 +08:00
Tunglies
d9a5c11d6a refactor: improve code readability and consistency in proxy-chain and uri-parser utilities
refactor: add keys to icons in routers for improved rendering and performance
refactor: optimize RegExp polyfill by using Object.prototype.hasOwnProperty.call
refactor: reorder imports in chain-proxy-provider for consistency
refactor: remove unused "obfs-opts" property from IProxySnellConfig interface

refactor: reorganize imports and enhance refresh logic in app data provider

refactor: re-enable prop-types linting for better type safety in BaseDialog component

refactor: update dependencies in effect hooks for improved stability and performance
2025-09-20 11:19:36 +08:00
Tunglies
7811714f89 refactor: enhance logging system and add new development commands (#4803)
* refactor: enhance logging system and add new development commands

* refactor: add cfg-if dependency and improve logging configuration
2025-09-20 00:04:46 +08:00
Tunglies
e869da8d4c refactor: remove unused HTTP-specific structs to streamline service code 2025-09-19 00:04:51 +08:00
Tunglies
e414b49879 Refactor imports across multiple components for consistency and clarity
- Reorganized import statements in various components to ensure consistent ordering and grouping.
- Removed unnecessary imports and added missing ones where applicable.
- Improved readability and maintainability of the codebase by standardizing import styles.
2025-09-19 00:01:04 +08:00
Tunglies
627119bb22 Refactor imports and improve code organization across multiple components and hooks
- Consolidated and reordered imports in various files for better readability and maintainability.
- Removed unused imports and ensured consistent import styles.
- Enhanced the structure of components by grouping related imports together.
- Updated the layout and organization of hooks to streamline functionality.
- Improved the overall code quality by following best practices in import management.
2025-09-18 23:34:38 +08:00
Tunglies
74ade3ee41 refactor: update ESLint configuration and improve lint command with cache 2025-09-18 23:13:13 +08:00
Tunglies
324628dd3d refactor: replace 'let' with 'const' for better variable scoping and immutability 2025-09-18 23:07:18 +08:00
Tunglies
9d96ac0f6a feat: Integrate HTTP plugin and update IP detection to use fetch API #4712 2025-09-18 19:35:14 +08:00
Tunglies
409571f54b refactor: remove unused notification permission hook and related code 2025-09-18 19:13:23 +08:00
TianHua Liu
a995a13163 chore: use jsx-runtime presets of eslint-plugin-react (#4794) 2025-09-18 19:04:12 +08:00
Tunglies
7848d6b1de refactor: window handle usage (#4788)
* refactor: Remove unused UI reset function and streamline window creation logic

* refactor: Remove debug print statements and streamline lightweight mode initialization

* fix: Ensure tray status refresh during silent startup and lightweight mode entry is independent of window creation

* refactor: Simplify window creation process and remove debug print statements
2025-09-18 10:22:43 +08:00
Tunglies
5d2e114b4d fix: Update tray menu event handling to toggle lightweight mode based on current state #4785 2025-09-18 00:46:49 +08:00
Tunglies
c207516b47 refactor: clash-verge-service management (#4674)
* refactor: clash-verge-service management

* fix: correct service state checks in ProxyControlSwitches component
refactor: improve logging in service state update functions

* fix: add missing async handler for Windows and adjust logging import for macOS

* fix: streamline logging imports and add missing async handler for Windows

* refactor: remove unused useServiceStateSync hook and update imports in _layout

* refactor: remove unused useServiceStateSync import and clean up code in ProxyControlSwitches and _layout

* refactor: simplify service status checks and reduce wait time in useServiceInstaller hook

* refactor: remove unnecessary logging statements in service checks and IPC connection

* refactor: extract SwitchRow component for better code organization and readability

* refactor: enhance service state management and update related mutations in layout

* refactor: streamline core stopping logic and improve IPC connection logging

* refactor: consolidate service uninstallation logic and improve error handling

* fix: simplify conditional statements in CoreManager and service functions

* feat: add backoff dependency and implement retry strategy for IPC requests

* refactor: remove redundant Windows conditional and improve error handling in IPC tests

* test: improve error handling in IPC tests for message signing and verification

* fix: adjust IPC backoff retry parameters

* refactor: Remove service state tracking and related logic from service management

* feat: Enhance service status handling with logging and running mode updates

* fix: Improve service status handling with enhanced error logging

* fix: Ensure proper handling of service operations with error propagation

* refactor: Simplify service operation execution and enhance service status handling

* fix: Improve error message formatting in service operation execution and simplify service status retrieval

* refactor: Replace Cache with CacheProxy in multiple modules and update CacheEntry to be generic

* fix: Remove unnecessary success message from config validation

* refactor: Comment out logging statements in service version check and IPC request handling
2025-09-17 22:59:02 +08:00
Tunglies
6724f1ae35 feat: Implement caching mechanism with Cache struct and update related commands 2025-09-17 19:37:42 +08:00
Tunglies
1787d5372e fix: Update OS_PLATFORM definition to reflect the current platform 2025-09-17 16:10:22 +08:00
Tunglies
4c41144dd0 fix: Update error message to include details of accumulated startup errors 2025-09-17 16:10:01 +08:00
Tunglies
27636c848f fix: update changelog to reflect removal of hidden groups in tray node switching #4765 2025-09-17 13:39:38 +08:00
Tunglies
8060d699f0 fix: enhance prebuild script to support shorthand for force update #4777 2025-09-17 13:36:31 +08:00
renovate[bot]
f36f31a636 chore(deps): update npm dependencies (#4686)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-17 12:45:57 +08:00
renovate[bot]
d300fac3d9 chore(deps): update cargo dependencies (#4687)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-17 12:45:50 +08:00
Thomas
8c2262dd95 stop showing hidden groups in the tray menu (#4765)
托盘菜单不显示隐藏代理组
2025-09-17 12:45:35 +08:00
wonfen
c438e916ca perf: remove system-level unlock test timeout notice 2025-09-15 22:16:13 +08:00
ZShab Niba
0855bd4896 fix: Fix icon to RGBA/sRGB Colorspace (#4753)
* fix: Fixed icon pixel size

* fix: fix icon to RGBA/sRGB Colorspace

* chore: update UPDATELOG

---------

Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-09-15 12:29:30 +08:00
Junkai W.
f2073a2f83 Add Func 链式代理 (#4624)
* 添加链式代理gui和语言支持
在Iruntime中添跟新链式代理配置方法
同时添加了cmd

* 修复读取运行时代理链配置文件bug

* t

* 完成链式代理配置构造

* 修复获取链式代理运行时配置的bug

* 完整的链式代理功能
2025-09-15 07:44:54 +08:00
Tunglies
a1f468202f Revert "fix: Fixed icon pixel size (#4698)"
revert due to runtime error [[Setup]] Error: failed to process image: Format error decoding Ico: The PNG is not in RGBA format!

This reverts commit a24bf4042c.
2025-09-15 00:56:59 +08:00
ZShab Niba
a24bf4042c fix: Fixed icon pixel size (#4698) 2025-09-13 10:56:07 +08:00
❤是纱雾酱哟~
15d22b4bf6 chore(issue template): disable blank issue template in GitHub config (#4731)
- Prevent users from creating issue without a template
- Always use a template for guiding users to provide necessary information for us

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>
2025-09-13 10:11:40 +08:00
Tunglies
231d264652 fix: update alpha version description in README 2025-09-12 18:45:40 +08:00
Tunglies
67ac353fd5 chore: update cron schedule for auto build workflow 2025-09-12 18:43:13 +08:00
Tunglies
1c5534ad36 fix: remove deprecated updater for alpha endpoints in tauri configuration 2025-09-11 17:14:57 +08:00
Tunglies
251678493c edition 2024 (#4702)
* feat: update Cargo.toml for 2024 edition and optimize release profiles

* feat: refactor environment variable settings for Linux and improve code organization

* Refactor conditional statements to use `&&` for improved readability

- Updated multiple files to combine nested `if let` statements using `&&` for better clarity and conciseness.
- This change enhances the readability of the code by reducing indentation levels and making the conditions more straightforward.
- Affected files include: media_unlock_checker.rs, profile.rs, clash.rs, profiles.rs, async_proxy_query.rs, core.rs, handle.rs, hotkey.rs, service.rs, timer.rs, tray/mod.rs, merge.rs, seq.rs, config.rs, proxy.rs, window.rs, general.rs, dirs.rs, i18n.rs, init.rs, network.rs, and window.rs in the resolve module.

* refactor: streamline conditional checks using `&&` for improved readability

* fix: update release profile settings for panic behavior and optimization

* fix: adjust optimization level in Cargo.toml and reorder imports in lightweight.rs
2025-09-10 09:49:06 +08:00
Tunglies
ccbffa14f0 fix: replace toggle with show for main window in lightweight mode exit #4697 2025-09-09 21:25:38 +08:00
Tunglies
dfc1f736af fix: resolve from lightweight cause crash (#4682)
* refactor: streamline lightweight mode handling and improve window management

* refactor: replace mutex-based window creation lock with atomic operations for improved performance

* refactor: remove startup completed event handling and simplify initialization logic

* refactor: remove conditional compilation for emit_update_event function

* refactor: simplify return statements and clean up commented code in lightweight and window manager modules

* refactor: streamline lightweight mode handling by consolidating window management calls

* refactor: prevent unnecessary window toggle when exiting lightweight mode

* refactor: reorder imports for consistency in lightweight module

* refactor: move macOS specific logging_error import for clarity
2025-09-09 18:50:24 +08:00
Tunglies
c54d89a465 feat: add support for Windows ARM64 in development workflow 2025-09-09 16:55:46 +08:00
Tunglies
55b95a1985 Revert "feat: update Cargo.toml for 2024 edition and optimize release profiles (#4681)"
This reverts commit 31e3104c7f.
2025-09-08 21:48:09 +08:00
Tunglies
31e3104c7f feat: update Cargo.toml for 2024 edition and optimize release profiles (#4681)
* feat: update Cargo.toml for 2024 edition and optimize release profiles

* feat: refactor environment variable settings for Linux and improve code organization

* Refactor conditional statements to use `&&` for improved readability

- Updated multiple files to combine nested `if let` statements using `&&` for better clarity and conciseness.
- This change enhances the readability of the code by reducing indentation levels and making the conditions more straightforward.
- Affected files include: media_unlock_checker.rs, profile.rs, clash.rs, profiles.rs, async_proxy_query.rs, core.rs, handle.rs, hotkey.rs, service.rs, timer.rs, tray/mod.rs, merge.rs, seq.rs, config.rs, proxy.rs, window.rs, general.rs, dirs.rs, i18n.rs, init.rs, network.rs, and window.rs in the resolve module.

* refactor: streamline conditional checks using `&&` for improved readability
2025-09-08 13:57:32 +08:00
wonfen
58a0089b19 fix: workflow file name 2025-09-07 15:24:14 +08:00
Tunglies
043ed4cb31 feat: add known issues section and update lightweight mode handling 2025-09-07 13:00:51 +08:00
Tunglies
f64c01044c feat: bump version to 2.4.3 and update changelog for macOS intel Mihomo compatibility 2025-09-07 12:06:33 +08:00
Tunglies
5dca724017 feat: update Vite configuration for improved chunking and build options 2025-09-06 21:24:46 +08:00
Tunglies
579f9bd1f8 feat: add path and process dependencies; remove unused SCSS preprocessor options 2025-09-06 21:04:58 +08:00
Tunglies
7c9104a5b9 feat: optimize home page loading with lazy loading and improve card rendering logic 2025-09-06 20:38:33 +08:00
renovate[bot]
14d1531469 chore(deps): update npm dependencies (#4567)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-06 14:11:30 +08:00
renovate[bot]
74e1e92607 chore(deps): update rust crate zip to v5 (#4653)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-06 14:11:09 +08:00
renovate[bot]
f7a56c0eb3 chore(deps): update cargo dependencies (#4586)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-09-06 14:10:35 +08:00
Tunglies
e2fa76332a fix: remove unused ProxyRequestCache import and improve proxy update error handling 2025-09-06 14:10:09 +08:00
Tunglies
0daa8720cd feat: implement i18n lazy loading optimization
🚀 Performance improvements:
- Replace static language imports with dynamic imports
- Load only current language on startup instead of all 13 languages
- Implement on-demand loading when switching languages

📦 Bundle optimization:
- Reduce initial bundle size by avoiding preloading all language files
- Add resource caching to prevent reloading same language
- Support all 13 languages: en, ru, zh, fa, tt, id, ar, ko, tr, de, es, jp, zhtw

🔧 Technical changes:
- Convert i18n.ts to use dynamic import() for language resources
- Add async initializeLanguage() for app startup
- Create useI18n hook for language management with loading states
- Update main.tsx for async language initialization
- Fix language display labels in settings dropdown
- Maintain backward compatibility with existing language system

 Fixed issues:
- Resolve infinite loop in React components
- Fix missing language labels in settings UI
- Prevent circular dependencies in language loading
- Add proper error handling and fallback mechanisms
2025-09-06 14:05:36 +08:00
Tunglies
f70b8b1213 Revert "fix: auto-detect KDE/Plasma and switch to X11 as backend to fix titlebar button freeze (#4523)"
This reverts commit d58c0a7df5.
2025-09-06 11:59:33 +08:00
wonfen
feb3dfbe86 chore: use UTF+8 time to build 2025-09-05 11:21:34 +08:00
Tunglies
f38e4a6cac fix: refactor proxy fetching to use command methods and improve error handling 2025-09-04 15:32:46 +08:00
Tunglies
893188d693 fix: add basic authorization header support for URL parsing in NetworkManager #4618 2025-09-03 01:07:45 +08:00
Tunglies
b989aeb7b0 refactor: clean up imports and remove unused initialization in NetworkManager 2025-09-02 23:48:27 +08:00
Tunglies
40f87c834d fix: update ClientConfig settings for improved connection management 2025-09-02 23:21:05 +08:00
Tunglies
0bb9cb5097 fix: enhance startup speed and fix connection issues during initialization 2025-09-02 23:10:02 +08:00
Tunglies
b51797e238 fix: update logging types and clean up ProxyRequestCache usage 2025-09-02 22:19:22 +08:00
Tunglies
926c095409 perf: update ProxyRequestCache to use boxed CacheEntry for improved memory management 2025-09-02 19:09:44 +08:00
Tunglies
0c65f8ebad fix: remove macOS specific conditional compilation for logging_error import 2025-09-02 18:36:13 +08:00
Tunglies
63f4295063 fix: update required service version to 1.1.2 2025-09-02 16:09:21 +08:00
Tunglies
d2b38a8a3c fix: optimize async handler usage in singleton checks and resource initialization #4576, #4590, #4609 2025-09-02 13:37:14 +08:00
Tunglies
45ddb15d56 fix: remove redundant service stop call in CoreManager and clean up unused Mutex import 2025-09-02 12:12:19 +08:00
Tunglies
7aef9d2a5a fix: resolve lightweight mode state detection issues and improve logging #3814 2025-09-02 08:00:53 +08:00
wonfen
45fdebeaca style: simplify and improve proxy settings UI 2025-09-01 13:57:04 +08:00
wonfen
0ea875f7f7 fix: unify homepage node selection 2025-09-01 11:30:27 +08:00
Guanghui Qin
1b54c9fc1b fix rpm/deb package naming issues (#4582)
* fix arm64 deb package name

* fix rpm package naming issue

* fix rpm package naming issue

---------

Co-authored-by: hiaoxui <hiaoxui@gmail.com>
2025-09-01 01:12:40 +08:00
Tunglies
89f3adcbef fix: add redirect policy to HTTP client builder 2025-09-01 00:45:39 +08:00
Tunglies
b13fef5ad9 fix: add missing allow(dead_code) attribute to block_on method 2025-08-31 16:22:35 +08:00
Tunglies
9110955b63 fix: streamline service availability checks and improve logging for core startup 2025-08-31 16:19:31 +08:00
Tunglies
4508d062f1 chore: bump version to 2.4.2 2025-08-31 15:53:50 +08:00
Junkai W.
bea0dde074 Win 下添加代理节点的系统托盘 (#4562)
* add proxy memu in tray

* 添加win下系统托盘 节点
代理->代理组->nodes
同时添加了对应gui同步

* 添加win 系统托盘显示代理节点
且gui和托盘刷新机制

* rust format

* 添加 win下系统托盘节点延迟

* Squashed commit of the following:

commit 44caaa62c54be198718ad93638c97f2b56560149
Merge: 1916e539 3939741a
Author: Junkai W. <129588175+Be-Forever223@users.noreply.github.com>
Date:   Sat Aug 30 02:37:07 2025 +0800

    Merge branch 'dev' into dev

commit 3939741a06
Author: Tunglies <tunglies.dev@outlook.com>
Date:   Sat Aug 30 02:24:47 2025 +0800

    refactor: migrate from serde_yaml to serde_yaml_ng for improved YAML handling (#4568)

    * refactor: migrate from serde_yaml to serde_yaml_ng for improved YAML handling

    * refactor: format code for better readability in DNS configuration

commit f86a1816e0
Author: Tunglies <tunglies.dev@outlook.com>
Date:   Sat Aug 30 02:15:34 2025 +0800

    chore(deps): update sysinfo to 0.37.0 and zip to 4.5.0 in Cargo.toml (#4564)

    * chore(deps): update sysinfo to 0.37.0 and zip to 4.5.0 in Cargo.toml

    * chore(deps): remove libnghttp2-sys dependency and update isahc features in Cargo.toml

    * chore(deps): remove sysinfo and zip from ignoreDeps in renovate.json

commit 9cbd8b4529
Author: Tunglies <77394545+Tunglies@users.noreply.github.com>
Date:   Sat Aug 30 01:30:48 2025 +0800

    feat: add x86 OpenSSL installation step for macOS in workflows

commit 5dea73fc2a
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Sat Aug 30 01:21:53 2025 +0800

    chore(deps): update npm dependencies (#4542)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit 01af1bea23
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Sat Aug 30 01:21:46 2025 +0800

    chore(deps): update rust crate reqwest_dav to 0.2.2 (#4554)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit 1227e86134
Author: Tunglies <77394545+Tunglies@users.noreply.github.com>
Date:   Sat Aug 30 01:12:03 2025 +0800

    Remove unnecessary "rustls-tls" feature from reqwest dependency in Cargo.toml

commit c6a6ea48dd
Author: Tunglies <tunglies.dev@outlook.com>
Date:   Fri Aug 29 23:51:09 2025 +0800

    refactor: enhance async initialization and streamline setup process (#4560)

    * feat: Implement DNS management for macOS

    - Added `set_public_dns` and `restore_public_dns` functions in `dns.rs` to manage system DNS settings.
    - Introduced `resolve` module to encapsulate DNS and scheme resolution functionalities.
    - Implemented `resolve_scheme` function in `scheme.rs` to handle deep links and profile imports.
    - Created UI readiness management in `ui.rs` to track and update UI loading states.
    - Developed window management logic in `window.rs` to handle window creation and visibility.
    - Added initial loading overlay script in `window_script.rs` for better user experience during startup.
    - Updated server handling in `server.rs` to integrate new resolve functionalities.
    - Refactored window creation calls in `window_manager.rs` to use the new window management logic.

    * refactor: streamline asynchronous handling in config and resolve setup

    * Revert "refactor: streamline asynchronous handling in config and resolve setup"

    This reverts commit 23d7dc86d5b87a3a34df2ae69c2caacef803ef81.

    * fix: optimize asynchronous memory handling

    * fix: enhance task logging by adding size check for special cases

    * refactor: enhance async initialization and streamline setup process

    * refactor: optimize async setup by consolidating initialization tasks

    * chore: update changelog for Mihomo(Meta) kernel upgrade to v1.19.13

    * fix: improve startup phase initialization performance

    * refactor: optimize file read/write performance to reduce application wait time

    * refactor: simplify app instance exit logic and adjust system proxy guard initialization

    * refactor: change resolve_setup_async to synchronous execution for improved performance

    * refactor: update resolve_setup_async to accept AppHandle for improved initialization flow

    * refactor: remove unnecessary initialization of portable flag in run function

    * refactor: consolidate async initialization tasks into a single blocking call for improved execution flow

    * refactor: optimize resolve_setup_async by restructuring async tasks for improved concurrency

    * refactor: streamline resolve_setup_async and embed_server for improved async handling

    * refactor: separate synchronous and asynchronous setup functions for improved clarity

    * refactor: simplify async notification handling and remove redundant network manager initialization

    * refactor: enhance async handling in proxy request cache and window creation logic

    * refactor: improve code formatting and readability in ProxyRequestCache

    * refactor: adjust singleton check timeout and optimize trace size conditions

    * refactor: update TRACE_SPECIAL_SIZE to include additional size condition

    * refactor: update kode-bridge dependency to version 0.2.1-rc2

    * refactor: replace RwLock with AtomicBool for UI readiness and implement event-driven monitoring

    * refactor: convert async functions to synchronous for window management

    * Update src-tauri/src/utils/resolve/window.rs

    * fix: handle missing app_handle in create_window function

    * Update src-tauri/src/module/lightweight.rs

* format
2025-08-31 14:20:57 +08:00
wonfen
3e674b186f fix: refine release workflow 2025-08-31 08:49:30 +08:00
Tunglies
92d9c94e87 fix: resolve crashes when exiting lightweight mode by ensuring async window operations 2025-08-30 20:04:21 +08:00
Tunglies
c09066c0a3 refactor: restructure async initialization and standardize logging system
### Major Improvements

- **Async initialization refactoring**: Complete async migration of init_config, improving app startup performance and stability
  - Change init_work_config from blocking to async execution
  - Optimize error handling for directory creation and config file initialization
  - Enhance structure and robustness of initialization process

- **Logging system standardization**: Unify usage of project's built-in logging! macro
  - Replace all log::info!/warn!/error!/debug! with logging!(level, Type::Setup, true, ...) format
  - Maintain consistency in log categorization and formatting
  - Improve convenience for log tracking and debugging

### Technical Optimizations

- **Error handling improvements**: Remove crate::log_err! macro, use standard Result error propagation
- **Directory management optimization**: Refactor ensure_directories function with clearer directory creation logic
- **Config initialization enhancement**: Separate initialize_config_files function for better code maintainability
- **Async task management**: Use AsyncHandler::spawn to optimize background log cleanup tasks

### Bug Fixes

- Fix potential race conditions in async config initialization
- Improve error feedback and logging during app startup
- Enhance error handling for DNS config and resource file initialization

### Updates

- Update wording in UPDATELOG.md issue descriptions
2025-08-30 17:58:26 +08:00
Tunglies
3a7be3dfb7 refactor: streamline resolve_scheme function calls and visibility in utils 2025-08-30 17:22:52 +08:00
Tunglies
09f14c23e4 fix: update required service version to 1.1.1 2025-08-30 11:02:58 +08:00
Tunglies
eaaae3b393 fix: improve stability during profile switching to prevent crashes 2025-08-30 07:40:31 +08:00
Tunglies
3939741a06 refactor: migrate from serde_yaml to serde_yaml_ng for improved YAML handling (#4568)
* refactor: migrate from serde_yaml to serde_yaml_ng for improved YAML handling

* refactor: format code for better readability in DNS configuration
2025-08-30 02:24:47 +08:00
Tunglies
f86a1816e0 chore(deps): update sysinfo to 0.37.0 and zip to 4.5.0 in Cargo.toml (#4564)
* chore(deps): update sysinfo to 0.37.0 and zip to 4.5.0 in Cargo.toml

* chore(deps): remove libnghttp2-sys dependency and update isahc features in Cargo.toml

* chore(deps): remove sysinfo and zip from ignoreDeps in renovate.json
2025-08-30 02:15:34 +08:00
Tunglies
9cbd8b4529 feat: add x86 OpenSSL installation step for macOS in workflows 2025-08-30 01:31:02 +08:00
renovate[bot]
5dea73fc2a chore(deps): update npm dependencies (#4542)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-30 01:21:53 +08:00
renovate[bot]
01af1bea23 chore(deps): update rust crate reqwest_dav to 0.2.2 (#4554)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-30 01:21:46 +08:00
Tunglies
1227e86134 Remove unnecessary "rustls-tls" feature from reqwest dependency in Cargo.toml 2025-08-30 01:12:03 +08:00
Tunglies
c6a6ea48dd refactor: enhance async initialization and streamline setup process (#4560)
* feat: Implement DNS management for macOS

- Added `set_public_dns` and `restore_public_dns` functions in `dns.rs` to manage system DNS settings.
- Introduced `resolve` module to encapsulate DNS and scheme resolution functionalities.
- Implemented `resolve_scheme` function in `scheme.rs` to handle deep links and profile imports.
- Created UI readiness management in `ui.rs` to track and update UI loading states.
- Developed window management logic in `window.rs` to handle window creation and visibility.
- Added initial loading overlay script in `window_script.rs` for better user experience during startup.
- Updated server handling in `server.rs` to integrate new resolve functionalities.
- Refactored window creation calls in `window_manager.rs` to use the new window management logic.

* refactor: streamline asynchronous handling in config and resolve setup

* Revert "refactor: streamline asynchronous handling in config and resolve setup"

This reverts commit 23d7dc86d5b87a3a34df2ae69c2caacef803ef81.

* fix: optimize asynchronous memory handling

* fix: enhance task logging by adding size check for special cases

* refactor: enhance async initialization and streamline setup process

* refactor: optimize async setup by consolidating initialization tasks

* chore: update changelog for Mihomo(Meta) kernel upgrade to v1.19.13

* fix: improve startup phase initialization performance

* refactor: optimize file read/write performance to reduce application wait time

* refactor: simplify app instance exit logic and adjust system proxy guard initialization

* refactor: change resolve_setup_async to synchronous execution for improved performance

* refactor: update resolve_setup_async to accept AppHandle for improved initialization flow

* refactor: remove unnecessary initialization of portable flag in run function

* refactor: consolidate async initialization tasks into a single blocking call for improved execution flow

* refactor: optimize resolve_setup_async by restructuring async tasks for improved concurrency

* refactor: streamline resolve_setup_async and embed_server for improved async handling

* refactor: separate synchronous and asynchronous setup functions for improved clarity

* refactor: simplify async notification handling and remove redundant network manager initialization

* refactor: enhance async handling in proxy request cache and window creation logic

* refactor: improve code formatting and readability in ProxyRequestCache

* refactor: adjust singleton check timeout and optimize trace size conditions

* refactor: update TRACE_SPECIAL_SIZE to include additional size condition

* refactor: update kode-bridge dependency to version 0.2.1-rc2

* refactor: replace RwLock with AtomicBool for UI readiness and implement event-driven monitoring

* refactor: convert async functions to synchronous for window management

* Update src-tauri/src/utils/resolve/window.rs

* fix: handle missing app_handle in create_window function

* Update src-tauri/src/module/lightweight.rs
2025-08-29 23:57:42 +08:00
wonfen
2080dbdc0f refactor: proxy control component and system settings UI
fix: handle tun toggle state after service uninstall
2025-08-29 20:46:45 +08:00
Tunglies
6eecd70bd5 fix(subscription): resolve issues causing import failures in some cases #4534, #4436, #4552, #4519, #4517, #4503, #4336, #4301 (#4553)
* fix(subscription): resolve issues causing import failures in some cases #4534, #4436, #4552, #4519, #4517, #4503, #4336, #4301

* fix(profile): update profile creation to include file data handling

* fix(app): improve singleton instance exit handling

* fix: remove unsued handle method
2025-08-29 17:46:46 +08:00
Tunglies
a9951e4eca refactor: replace AppHandleManager with handle::Handle for macOS activation policy management 2025-08-28 17:27:54 +08:00
Tunglies
53688f332f fix: replace tokio::runtime::Handle with tauri::async_runtime::handle 2025-08-28 04:58:24 +08:00
Tunglies
d23d1d9a1d fix: remove auto clean up profiles behavior in resolve process 2025-08-28 04:41:12 +08:00
Tunglies
51ff9e1851 fix: resolve issue with application not restoring after tray restart 2025-08-27 22:20:04 +08:00
Tunglies
824814da56 fix: unexpected restart behavior #4438 2025-08-27 22:04:44 +08:00
renovate[bot]
040fcd059f chore(deps): update npm dependencies (#4467)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-27 21:19:22 +08:00
renovate[bot]
f2339620a5 chore(deps): update cargo dependencies (#4468)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-27 21:19:13 +08:00
Sergey Kharenko
d58c0a7df5 fix: auto-detect KDE/Plasma and switch to X11 as backend to fix titlebar button freeze (#4523) 2025-08-26 01:50:44 +08:00
Tunglies
355a18e5eb refactor(async): migrate from sync-blocking async execution to true async with unified AsyncHandler::spawn (#4502)
* feat: replace all tokio::spawn with unified AsyncHandler::spawn

- 🚀 Core Improvements:
  * Replace all tokio::spawn calls with AsyncHandler::spawn for unified Tauri async task management
  * Prioritize converting sync functions to async functions to reduce spawn usage
  * Use .await directly in async contexts instead of spawn

- 🔧 Major Changes:
  * core/hotkey.rs: Use AsyncHandler::spawn for hotkey callback functions
  * module/lightweight.rs: Async lightweight mode switching
  * feat/window.rs: Convert window operation functions to async, use .await internally
  * feat/proxy.rs, feat/clash.rs: Async proxy and mode switching functions
  * lib.rs: Window focus handling with AsyncHandler::spawn
  * core/tray/mod.rs: Complete async tray event handling

-  Technical Advantages:
  * Unified task tracking and debugging capabilities (via tokio-trace feature)
  * Better error handling and task management
  * Consistency with Tauri runtime
  * Reduced async boundaries for better performance

- 🧪 Verification:
  * Compilation successful with 0 errors, 0 warnings
  * Maintains complete original functionality
  * Optimized async execution flow

* feat: complete tokio fs migration and replace tokio::spawn with AsyncHandler

🚀 Major achievements:
- Migrate 8 core modules from std::fs to tokio::fs
- Create 6 Send-safe wrapper functions using spawn_blocking pattern
- Replace all tokio::spawn calls with AsyncHandler::spawn for unified async task management
- Solve all 19 Send trait compilation errors through innovative spawn_blocking architecture

🔧 Core changes:
- config/profiles.rs: Add profiles_*_safe functions to handle Send trait constraints
- cmd/profile.rs: Update all Tauri commands to use Send-safe operations
- config/prfitem.rs: Replace append_item calls with profiles_append_item_safe
- utils/help.rs: Convert YAML operations to async (read_yaml, save_yaml)
- Multiple modules: Replace tokio::task::spawn_blocking with AsyncHandler::spawn_blocking

 Technical innovations:
- spawn_blocking wrapper pattern resolves parking_lot RwLock Send trait conflicts
- Maintain parking_lot performance while achieving Tauri async command compatibility
- Preserve backwards compatibility with gradual migration strategy

🎯 Results:
- Zero compilation errors
- Zero warnings
- All async file operations working correctly
- Complete Send trait compliance for Tauri commands

* feat: refactor app handle and command functions to use async/await for improved performance

* feat: update async handling in profiles and logging functions for improved error handling and performance

* fix: update TRACE_MINI_SIZE constant to improve task logging threshold

* fix(windows): convert service management functions to async for improved performance

* fix: convert service management functions to async for improved responsiveness

* fix(ubuntu): convert install and reinstall service functions to async for improved performance

* fix(linux): convert uninstall_service function to async for improved performance

* fix: convert uninstall_service call to async for improved performance

* fix: convert file and directory creation calls to async for improved performance

* fix: convert hotkey functions to async for improved responsiveness

* chore: update UPDATELOG.md for v2.4.1 with major improvements and performance optimizations
2025-08-26 01:49:51 +08:00
Tunglies
4598c805eb refactor: remove Oxlint workflow and update linting to use ESLint 2025-08-24 16:40:05 +08:00
Tunglies
aa204649fa fix: add web asset build step before running Clippy 2025-08-24 16:38:17 +08:00
Tunglies
fbaff3e90c fix: remove logLevel parameter from fetchLogsViaIPCPeriodically for consistency 2025-08-23 00:28:39 +08:00
Tunglies
0d070fb934 refactor: update AppHandle usage to use Arc<AppHandle> for improved memory management (#4491)
* refactor: update AppHandle usage to use Arc<AppHandle> for improved memory management

* fix: clippy ci

* fix: ensure default_latency_test is safely accessed with non-null assertion
2025-08-23 00:20:58 +08:00
Tunglies
c416bd5755 fix: reorder pnpm installation step in Clippy workflow 2025-08-23 00:15:25 +08:00
Tunglies
90406ae883 fix: clippy ci 2025-08-23 00:11:44 +08:00
wonfen
600b3dfbac fix: release workflow & refine telegram notify format 2025-08-22 20:47:02 +08:00
Tunglies
72e4491dc4 Implement code changes to enhance functionality and improve performance 2025-08-22 19:56:51 +08:00
Tunglies
76c3695567 feat: add Clippy and Oxlint workflows for enhanced linting 2025-08-22 19:20:35 +08:00
Tunglies
475a09bb54 feat: comprehensive oxlint cleanup - remove unused code
🧹 Cleanup Summary:
- Fixed 83 oxlint warnings across 50+ files
- Removed unused imports, variables, and functions
- Maintained all functional code and error handling
- Improved bundle size and code maintainability

📝 Key Changes:
- Cleaned unused React hooks (useState, useEffect, useClashInfo)
- Removed unused Material-UI imports (useTheme, styled components)
- Deleted unused interfaces and type definitions
- Fixed spread operator usage and boolean casting
- Simplified catch parameters where appropriate

🎯 Files Modified:
- React components: home.tsx, settings, profiles, etc.
- Custom hooks: use-*.ts files
- Utility functions and type definitions
- Configuration files

 Result: 0 oxlint warnings (from 83 warnings)
🔧 All functionality preserved
📦 Reduced bundle size through dead code elimination
2025-08-22 18:48:56 +08:00
Tunglies
6a1fce69e0 refactor: comment out includeUpdaterJson in autobuild jobs for clarity 2025-08-22 17:41:33 +08:00
Tunglies
485fd0169b chore: bump version to 2.4.1 2025-08-22 17:31:32 +08:00
wonfen
a9464ff776 chore: update release log & fix workflow2 2025-08-22 16:30:07 +08:00
Tunglies
335ca817d2 refactor: restrict AsyncHandler usage to Windows platform only 2025-08-22 04:18:21 +08:00
Tunglies
6d112c387d refactor: replace tokio::task::spawn_blocking with AsyncHandler::spawn_blocking for improved task management 2025-08-22 04:05:35 +08:00
Tunglies
e4c243de2d refactor: Replace tokio::spawn with AsyncHandler::spawn for better task management
- Replace direct tokio::spawn calls with AsyncHandler::spawn across multiple modules
- Improves task lifecycle management and error handling consistency
- Affected files:
  - src-tauri/src/cmd/network.rs
  - src-tauri/src/core/core.rs
  - src-tauri/src/core/event_driven_proxy.rs
  - src-tauri/src/enhance/tun.rs
  - src-tauri/src/ipc/logs.rs
  - src-tauri/src/ipc/memory.rs
  - src-tauri/src/ipc/monitor.rs
  - src-tauri/src/ipc/traffic.rs
  - src-tauri/src/utils/network.rs
  - src-tauri/src/utils/resolve.rs

This change provides better control over async task spawning and helps prevent
potential issues with unmanaged background tasks.
2025-08-22 03:41:14 +08:00
Tunglies
02f67961a9 feat: add tokio-stream dependency and refactor event loop handling in EventDrivenProxyManager 2025-08-22 03:16:59 +08:00
Tunglies
7d5fd295ed feat: add dev:trace script for enhanced debugging in development 2025-08-22 02:28:44 +08:00
Tunglies
daa0b1592d chore: bump version to 2.4.1 2025-08-22 00:29:42 +08:00
Tunglies
b411783bbe fix: enhance tag version check for consistency in release workflow 2025-08-22 00:15:56 +08:00
Tunglies
40a59bbc1a fix: disable automatic generation of release notes in workflow 2025-08-21 23:56:02 +08:00
wonfen
93fc4932ee Release 2.4.0 2025-08-21 22:45:55 +08:00
Tunglies
2277d7232e refactor: improve code formatting and readability in autobuild and telegram scripts 2025-08-21 21:23:47 +08:00
Tunglies
435318cf1d fix: simplify return statements in updateProxy method for clarity 2025-08-21 21:23:46 +08:00
wonfen
a9a9d8a78f fix(workflow): file version & format 2025-08-21 21:21:52 +08:00
❤是纱雾酱哟~
a2544d237e ci: improve commit checking and update release version script (#4471)
- Increase `fetch-depth` to 50 for more accurate commit history in CI
- Update `release-version.mjs` to use `bash` explicitly for improved compatibility
  - Also avoid errors when invoking the script

Signed-off-by: Dragon1573 <49941141+Dragon1573@users.noreply.github.com>
2025-08-21 21:16:15 +08:00
wonfen
9397ac0174 fix(workflow): download files name & release note 2025-08-21 19:49:17 +08:00
wonfen
4c719da096 chore(workflow): add release info & telegram notification 2025-08-21 18:33:54 +08:00
Tunglies
7613417c33 feat: enhance Tauri build steps with clearer naming and include updater JSON 2025-08-20 23:06:55 +08:00
renovate[bot]
a2a65cade7 chore(deps): update npm dependencies (#4461)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-20 22:54:47 +08:00
renovate[bot]
3a07402aa2 chore(deps): update npm dependencies (#4454)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-20 22:49:45 +08:00
renovate[bot]
59b67f5d3f chore(deps): update cargo dependencies (#4451)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Tunglies <tunglies.dev@outlook.com>
2025-08-20 22:49:37 +08:00
Tunglies
e30cfc3a2f chore: update dependencies and improve IPC request handling 2025-08-20 22:48:08 +08:00
Tunglies
52655d9702 feat: add configuration options to IpcManager for improved client setup 2025-08-20 18:05:31 +08:00
Tunglies
e93846ddc1 refactor: improve log management by introducing a constant for max logs and simplifying log level handling 2025-08-19 22:43:36 +08:00
renovate[bot]
4cf2f6b1e6 chore(deps): update cargo dependencies (#4443)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-19 16:36:36 +08:00
renovate[bot]
43a3cb74ac chore(deps): update npm dependencies (#4430)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-19 16:36:24 +08:00
TianHua Liu
3360339c08 docs: move Windows ARM devices note to CONTRIBUTING.md (#4446)
* docs: how to develop locally

* docs: move Windows ARM devices note to CONTRIBUTING.md
2025-08-19 16:28:42 +08:00
Tunglies
f0dbe9fa60 feat: add console-subscriber for improved logging and tracing support 2025-08-18 23:40:25 +08:00
Tunglies
756751b765 refactor: simplify app restart logic and improve error handling 2025-08-18 22:33:29 +08:00
Tunglies
85a9f6c8d4 fix: correct log cleanup day mapping and update logging level #4434 2025-08-18 19:14:08 +08:00
Ahao
7fe0381850 renew: remove whether to enable_random_port (#4401)
* refactor: streamline clean old assets job by using reusable workflow

* refactor: update clean old assets job to include steps section

* refactor: add checkout step in clean_old_assets job for improved repository access

* fix: correct path to clean old assets workflow in autobuild.yml

* fix: update path to clean old assets workflow in autobuild.yml

* refactor: simplify clean_old_assets job by removing unnecessary steps

* refactor: enhance clean_old_assets job dependencies for improved execution flow

* Revert "refactor: enhance clean_old_assets job dependencies for improved execution flow"

This reverts commit 1a5108b5ad932e888a2b52f13616b483ebb9856e.

* feat: implement get_latest_tauri_commit script and update release versioning logic

* renew: remove whether to enable_random_port

---------

Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-08-18 02:03:06 +08:00
Tunglies
537d27d10b fix: clippy errors with new config (#4428)
* refactor: improve code quality with clippy fixes and standardized logging

- Replace dangerous unwrap()/expect() calls with proper error handling
- Standardize logging from log:: to logging\! macro with Type:: classifications
- Fix app handle panics with graceful fallback patterns
- Improve error resilience across 35+ modules without breaking functionality
- Reduce clippy warnings from 300+ to 0 in main library code

* chore: update Cargo.toml configuration

* refactor: resolve all clippy warnings
- Fix Arc clone warnings using explicit Arc::clone syntax across 9 files
- Add #[allow(clippy::expect_used)] to test functions for appropriate expect usage
- Remove no-effect statements from debug code cleanup
- Apply clippy auto-fixes for dbg\! macro removals and path statements
- Achieve zero clippy warnings on all targets with -D warnings flag

* chore: update Cargo.toml clippy configuration

* refactor: simplify macOS job configuration and improve caching

* refactor: remove unnecessary async/await from service and proxy functions

* refactor: streamline pnpm installation in CI configuration

* refactor: simplify error handling and remove unnecessary else statements

* refactor: replace async/await with synchronous locks for core management

* refactor: add workflow_dispatch trigger to clippy job

* refactor: convert async functions to synchronous for service management

* refactor: convert async functions to synchronous for UWP tool invocation

* fix: change wrong logging

* refactor: convert proxy restoration functions to async

* Revert "refactor: convert proxy restoration functions to async"

This reverts commit b82f5d250b2af7151e4dfd7dd411630b34ed2c18.

* refactor: update proxy restoration functions to return Result types

* fix: handle errors during proxy restoration and update async function signatures

* fix: handle errors during proxy restoration and update async function signatures

* refactor: update restore_pac_proxy and restore_sys_proxy functions to async

* fix: convert restore_pac_proxy and restore_sys_proxy functions to async

* fix: await restore_sys_proxy calls in proxy restoration logic

* fix: suppress clippy warnings for unused async functions in proxy restoration

* fix: suppress clippy warnings for unused async functions in proxy restoration
2025-08-18 02:02:25 +08:00
renovate[bot]
a5fdd3f1a2 chore(deps): update npm dependencies (#4400)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-16 15:43:15 +08:00
renovate[bot]
be8a632a09 chore(deps): update rust crate async-trait to 0.1.89 (#4404)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-16 15:42:31 +08:00
ONEUI8
95c34f1df5 Update profile.rs (#4408)
Co-authored-by: ONEUI-7 <150009830+ONEUI-7@users.noreply.github.com>
2025-08-16 15:42:21 +08:00
TianHua Liu
8b53a7bd99 docs: how to develop locally (#4409) 2025-08-16 15:38:32 +08:00
Tunglies
b169ee8149 Revert "refactor: replace tokio Mutex with parking_lot Mutex for improved performance"
This reverts commit 9cc6dde999.
2025-08-16 04:24:03 +00:00
Tunglies
9cc6dde999 refactor: replace tokio Mutex with parking_lot Mutex for improved performance 2025-08-16 02:44:00 +08:00
Tunglies
cf1fbb63c4 refactor: replace log macros with logging utility for IPC monitoring 2025-08-15 20:41:45 +08:00
Tunglies
2aa629ff5d feat: refactor check_commit job to use new workflow inputs and streamline build checks
fix: update tag_name in check_commit job to use a static value

refactor: streamline latest.json handling and improve commit hash comparison logic

fix: update curl command to follow redirects when fetching latest.json
2025-08-14 20:42:26 +08:00
Tunglies
1e2b453c24 refactor: streamline clean old assets job by using reusable workflow
refactor: update clean old assets job to include steps section

refactor: add checkout step in clean_old_assets job for improved repository access

fix: correct path to clean old assets workflow in autobuild.yml

fix: update path to clean old assets workflow in autobuild.yml

refactor: simplify clean_old_assets job by removing unnecessary steps

refactor: enhance clean_old_assets job dependencies for improved execution flow

Revert "refactor: enhance clean_old_assets job dependencies for improved execution flow"

This reverts commit 1a5108b5ad932e888a2b52f13616b483ebb9856e.

feat: implement get_latest_tauri_commit script and update release versioning logic
2025-08-14 20:42:24 +08:00
renovate[bot]
331e4a4970 chore(deps): update cargo dependencies (#4360)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-13 16:27:14 +08:00
Tunglies
ee3ffaef1d refactor: simplify log warning message formatting and remove unused LogLevel enum 2025-08-13 16:25:36 +08:00
renovate[bot]
7cc3bc83a0 chore(deps): update npm dependencies (#4367)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-13 16:09:56 +08:00
Tunglies
558e28ddaf refactor: simplify log retrieval by removing level parameter and relying on server-side filtering #4293 2025-08-13 01:15:33 +08:00
Sergey Kharenko
45e69543b3 fix: auto-detect KDE/Plasma and disable GTK CSD to fix titlebar button freeze (#4380)
* fix: auto-detect KDE/Plasma and disable GTK CSD to fix titlebar button freeze

* chore: update UPDATELOG

---------

Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-08-12 19:27:46 +08:00
wonfen
67ee41c5ea fix: ensure TUN is disabled before core exit 2025-08-11 23:19:49 +08:00
wonfen
160ed05178 fix: unify TUN status detection logic 2025-08-11 20:45:23 +08:00
renovate[bot]
5ecfe121b3 chore(deps): update dependency swr to ^2.3.5 (#4359)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-09 17:42:09 +08:00
Tunglies
a654137af9 fix: adjust cleanup function and reduce refresh interval to 1 second 2025-08-09 10:04:38 +08:00
Tunglies
cb591f19fb feat: add workflow to clean old release assets with versioning and dry run support 2025-08-08 23:50:52 +08:00
renovate[bot]
4823a348be chore(deps): update rust crate warp to 0.4.1 (#4342)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-08 23:49:09 +08:00
renovate[bot]
32da6ae808 chore(deps): update npm dependencies (#4347)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-08 23:48:55 +08:00
renovate[bot]
7eb70b0f0d chore(deps): update dependency @vitejs/plugin-react to v5 (#4348)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-08 23:47:18 +08:00
Tunglies
52e8e45daf fix: correct log level hierarchy and enhance log filtering logic 2025-08-08 23:30:24 +08:00
Tunglies
319c5b84fa Revert "feat: enhance log filtering by adding support for debug level and updating log hierarchy #4293"
This reverts commit a9cfb2cfaa.
2025-08-08 15:17:46 +08:00
Tunglies
6a93ff1fc1 fix: streamline autobuild versioning by utilizing outputs from check_commit job 2025-08-08 15:15:27 +08:00
Tunglies
6069b654d1 fix: ensure app quit notification is sent before quitting 2025-08-08 14:57:47 +08:00
Tunglies
2af8c32497 Revert "add: home card drag (#4215)"
This reverts commit 84989e0ea3.
2025-08-07 20:05:36 +08:00
Tunglies
a3957289c8 fix: enhance asset cleanup and versioning logic in autobuild workflow 2025-08-07 19:47:31 +08:00
Tunglies
3f5cd6c26a fix: resolve system proxy status detection and display inconsistency
- Fixed getSystemProxyActualState logic to properly check actual system status
- Unified system proxy state display across all components
- Replaced systemProxyIndicator with actualState for consistent UI display
- Updated components: setting-system, ProxyControlSwitches, proxy-tun-card
- Added entry to v2.4.0 changelog
2025-08-07 02:18:50 +08:00
renovate[bot]
8046dad56d chore(deps): update npm dependencies (#4324)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-06 22:47:01 +08:00
renovate[bot]
cdc1fd2d87 chore(deps): update cargo dependencies (#4329)
* chore(deps): update cargo dependencies

* fix: update warp dependency to include server feature

* fix: update return type of scheme_handler to String for consistency

* fix: add hyper-util dependency and update warp path handlers for response status

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Tunglies <tunglies.dev@outlook.com>
Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-08-06 22:46:46 +08:00
Tunglies
499626b946 fix: resolve intermittent startup deadlock issues
- Optimize configuration access locks to prevent race conditions
- Enhance UI monitoring thread with non-blocking lock operations
- Improve window creation timing and synchronization
- Add comprehensive deadlock detection and debugging logs
- Simplify code structure with better error handling patterns
- Update changelog with user-friendly descriptions
2025-08-06 22:12:00 +08:00
Tunglies
a9cfb2cfaa feat: enhance log filtering by adding support for debug level and updating log hierarchy #4293 2025-08-06 20:49:54 +08:00
Tunglies
7b976c16eb feat: enhance autobuild logic to check for Tauri-related changes and manage versioning 2025-08-06 20:34:57 +08:00
Tunglies
44e8a035aa fix: improve profile import validation and handle async lock correctly
fix: refactor import_profile function for improved readability and maintainability
2025-08-05 23:23:11 +08:00
Tunglies
6b57607926 fix: update autobuild versioning logic and improve asset checking regex 2025-08-05 23:12:15 +08:00
Tunglies
c3675e48fd fix: update tauri.conf.json version to use full version information including build metadata 2025-08-05 22:56:34 +08:00
Tunglies
a66393c609 feat: enhance profile import functionality with timeout and robust refresh strategy 2025-08-05 20:29:36 +08:00
Tunglies
776abaf56d fix: update service messages to use PRODUCTNAME variable for consistency 2025-08-05 20:09:35 +08:00
Tunglies
18808004f4 fix: update Cargo.toml version handling to use the provided version directly 2025-08-05 20:02:47 +08:00
Tunglies
db8761946d feat: add autobuild check logic workflow for version and source changes 2025-08-05 06:31:49 +00:00
renovate[bot]
2194a96145 chore(deps): update npm dependencies (#4294)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-04 14:57:24 +08:00
renovate[bot]
ecd396d70f chore(deps): update cargo dependencies (#4297)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-08-04 14:46:54 +08:00
Tunglies
5ab0438397 fix: duplicated build version checks 2025-08-04 14:44:51 +08:00
Tunglies
bcde047695 refactor: enhance logging in autobuild workflow with emojis for better visibility 2025-08-03 18:52:26 +08:00
Tunglies
109f5f9648 fix: resolve macOS window management issues and improve logging during app reopen events 2025-08-03 18:40:19 +08:00
Tunglies
d16c691c0f fix: can not filiter log level as expected 2025-08-03 09:52:27 +08:00
Tunglies
3eb2a5b3ef refactor: optimize timer and network management with atomic operations 2025-08-01 23:02:11 +08:00
Tunglies
569e2d5192 refactor: enhance traffic monitoring system with unified data management
 New Features:
- Implement unified traffic monitoring hook with reference counting
- Add intelligent data sampling and compression for better performance
- Introduce enhanced canvas traffic graph with mouse hover tooltips
- Add Y-axis labels and improved time axis display strategies
- Support multiple time ranges (1, 5, 10 minutes) with adaptive formatting

🚀 Performance Improvements:
- Smart data compression reduces memory usage by 80%
- Reference counting prevents unnecessary data collection when no components need it
- Debounced data updates reduce UI thrashing
- Optimized canvas rendering with controlled frame rates

🔧 Technical Improvements:
- Consolidate traffic monitoring logic into single hook (use-traffic-monitor.ts)
- Remove duplicate hook implementations
- Improve error handling with fallback to last valid data
- Add comprehensive traffic statistics and monitoring diagnostics
- Enhance tooltip system with precise data point highlighting

🐞 Bug Fixes:
- Fix connection speed display issues after IPC migration
- Improve data freshness indicators
- Better handling of network errors and stale data
- Consistent traffic parsing across all components

📝 Code Quality:
- Add TypeScript interfaces for better type safety
- Implement proper cleanup for animation frames and references
- Add error boundaries for traffic components
- Improve component naming and organization

This refactoring provides a more robust, performant, and feature-rich traffic monitoring system while maintaining backward compatibility.
2025-07-31 20:38:11 +08:00
renovate[bot]
0077157d28 chore(deps): update cargo dependencies (#4275)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-31 14:43:09 +08:00
renovate[bot]
9e19bab5a7 chore(deps): update dependency zustand to ^5.0.7 (#4286)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-31 14:42:45 +08:00
Tunglies
b1e2940db6 refactor: optimize singleton macro usage with Default trait implementations (#4279)
* refactor: implement DRY principle improvements across backend

Major DRY violations identified and addressed:

1. **IPC Stream Monitor Pattern**:
   - Created `utils/ipc_monitor.rs` with generic `IpcStreamMonitor` trait
   - Added `IpcMonitorManager` for common async task management patterns
   - Eliminates duplication across traffic.rs, memory.rs, and logs.rs

2. **Singleton Pattern Duplication**:
   - Created `utils/singleton.rs` with `singleton\!` and `singleton_with_logging\!` macros
   - Replaces 16+ duplicate singleton implementations across codebase
   - Provides consistent, tested patterns for global instances

3. **macOS Activation Policy Refactoring**:
   - Consolidated 3 duplicate methods into single parameterized `set_activation_policy()`
   - Eliminated code duplication while maintaining backward compatibility
   - Reduced maintenance burden for macOS-specific functionality

These improvements enhance maintainability, reduce bug potential, and ensure consistent patterns across the backend codebase.

* fix: resolve test failures and clippy warnings

- Fix doctest in singleton.rs by using rust,ignore syntax and proper code examples
- Remove unused time::Instant import from ipc_monitor.rs
- Add #[allow(dead_code)] attributes to future-use utility modules
- All 11 unit tests now pass successfully
- All clippy checks pass with -D warnings strict mode
- Documentation tests properly ignore example code that requires full context

* refactor: migrate code to use new utility tools (partial)

Progress on systematic migration to use created utility tools:

1. **Reorganized IPC Monitor**:
   - Moved ipc_monitor.rs to src-tauri/src/ipc/monitor.rs for better organization
   - Updated module structure to emphasize IPC relationship

2. **IpcManager Singleton Migration**:
   - Replaced manual OnceLock singleton pattern with singleton_with_logging\! macro
   - Simplified initialization code and added consistent logging
   - Removed unused imports (OnceLock, logging::Type)

3. **ProxyRequestCache Singleton Migration**:
   - Migrated from once_cell::sync::OnceCell to singleton\! macro
   - Cleaner, more maintainable singleton pattern
   - Consistent with project-wide singleton approach

These migrations demonstrate the utility and effectiveness of the created tools:
- Less boilerplate code
- Consistent patterns across codebase
- Easier maintenance and debugging

* feat: complete migration to new utility tools - phase 1

Successfully migrated core components to use the created utility tools:

- Moved `ipc_monitor.rs` to `src-tauri/src/ipc/monitor.rs`
- Better organization emphasizing IPC relationship
- Updated module exports and imports

- **IpcManager**: Migrated to `singleton_with_logging\!` macro
- **ProxyRequestCache**: Migrated to `singleton\!` macro
- Eliminated ~30 lines of boilerplate singleton code
- Consistent logging and initialization patterns

- Removed unused imports (OnceLock, once_cell, logging::Type)
- Cleaner, more maintainable code structure
- All 11 unit tests pass successfully
- Zero compilation warnings

- **Lines of code reduced**: ~50+ lines of boilerplate
- **Consistency improved**: Unified singleton patterns
- **Maintainability enhanced**: Centralized utility functions
- **Test coverage maintained**: 100% test pass rate

Remaining complex monitors (traffic, memory, logs) will be migrated to use the shared IPC monitoring patterns in the next phase, which requires careful refactoring of their streaming logic.

* refactor: complete singleton pattern migration to utility macros

Migrate remaining singleton patterns across the backend to use standardized
utility macros, achieving significant code reduction and consistency improvements.

- **LogsMonitor** (ipc/logs.rs): `OnceLock` → `singleton_with_logging\!`
- **Sysopt** (core/sysopt.rs): `OnceCell` → `singleton_lazy\!`
- **Tray** (core/tray/mod.rs): Complex `OnceCell` → `singleton_lazy\!`
- **Handle** (core/handle.rs): `OnceCell` → `singleton\!`
- **CoreManager** (core/core.rs): `OnceCell` → `singleton_lazy\!`
- **TrafficMonitor** (ipc/traffic.rs): `OnceLock` → `singleton_lazy_with_logging\!`
- **MemoryMonitor** (ipc/memory.rs): `OnceLock` → `singleton_lazy_with_logging\!`

- `singleton_lazy\!` - For complex initialization patterns
- `singleton_lazy_with_logging\!` - For complex initialization with logging

- **Code Reduction**: -33 lines of boilerplate singleton code
- **DRY Compliance**: Eliminated duplicate initialization patterns
- **Consistency**: Unified singleton approach across codebase
- **Maintainability**: Centralized singleton logic in utility macros
- **Zero Breaking Changes**: All existing APIs remain compatible

All tests pass and clippy warnings resolved.

* refactor: optimize singleton macros using Default trait implementation

Simplify singleton macro usage by implementing Default trait for complex
initialization patterns, significantly improving code readability and maintainability.

- **MemoryMonitor**: Move IPC client initialization to Default impl
- **TrafficMonitor**: Move IPC client initialization to Default impl
- **Sysopt**: Move Arc<Mutex> initialization to Default impl
- **Tray**: Move struct field initialization to Default impl
- **CoreManager**: Move Arc<Mutex> initialization to Default impl

```rust
singleton_lazy_with_logging\!(MemoryMonitor, INSTANCE, "MemoryMonitor", || {
    let ipc_path_buf = ipc_path().unwrap();
    let ipc_path = ipc_path_buf.to_str().unwrap_or_default();
    let client = IpcStreamClient::new(ipc_path).unwrap();
    MemoryMonitor::new(client)
});
```

```rust
impl Default for MemoryMonitor { /* initialization logic */ }
singleton_lazy_with_logging\!(MemoryMonitor, INSTANCE, "MemoryMonitor", MemoryMonitor::default);
```

- **Code Reduction**: -17 lines of macro closure code (80%+ simplification)
- **Separation of Concerns**: Initialization logic moved to proper Default impl
- **Readability**: Single-line macro calls vs multi-line closures
- **Testability**: Default implementations can be tested independently
- **Rust Idioms**: Using standard Default trait pattern
- **Performance**: Function calls more efficient than closures

All tests pass and clippy warnings resolved.

* refactor: implement MonitorData and StreamingParser traits for IPC monitors

* refactor: add timeout and retry_interval fields to IpcStreamMonitor; update TrafficMonitorState to derive Default

* refactor: migrate AppHandleManager to unified singleton control

- Replace manual singleton implementation with singleton_with_logging\! macro
- Remove std::sync::Once dependency in favor of OnceLock-based pattern
- Improve error handling for macOS activation policy methods
- Maintain thread safety with parking_lot::Mutex for AppHandle storage
- Add proper initialization check to prevent duplicate handle assignment
- Enhance logging consistency across AppHandleManager operations

* refactor: improve hotkey management with enum-based operations

- Add HotkeyFunction enum for type-safe function selection
- Add SystemHotkey enum for predefined system shortcuts
- Implement Display and FromStr traits for type conversions
- Replace string-based hotkey registration with enum methods
- Add register_system_hotkey() and unregister_system_hotkey() methods
- Maintain backward compatibility with string-based register() method
- Migrate singleton pattern to use singleton_with_logging\! macro
- Extract hotkey function execution logic into centralized execute_function()
- Update lib.rs to use new enum-based SystemHotkey operations
- Improve type safety and reduce string manipulation errors

Benefits:
- Type safety prevents invalid hotkey function names
- Centralized function execution reduces code duplication
- Enum-based API provides better IDE autocomplete support
- Maintains full backward compatibility with existing configurations

* fix: resolve LightWeightState initialization order panic

- Modify with_lightweight_status() to safely handle unmanaged state using try_state()
- Return Option<R> instead of R to gracefully handle state unavailability
- Update is_in_lightweight_mode() to use unwrap_or(false) for safe defaults
- Add state availability check in auto_lightweight_mode_init() before access
- Maintain singleton check priority while preventing early state access panics
- Fix clippy warnings for redundant pattern matching

Resolves runtime panic: "state() called before manage() for LightWeightState"

* refactor: add unreachable patterns for non-macOS in hotkey handling

* refactor: simplify SystemHotkey enum by removing redundant cfg attributes

* refactor: add macOS conditional compilation for system hotkey registration methods

* refactor: streamline hotkey unregistration and error logging for macOS
2025-07-31 14:35:13 +08:00
Tunglies
4113cd619c feat: add devcontainer configuration for Docker and Codespaces support 2025-07-31 11:39:14 +08:00
Tunglies
1f78d576a3 feat: migrate logs API from REST to IPC streaming (#4277)
* feat: migrate logs API from REST to IPC streaming

- Replace REST API `/logs` calls with IPC streaming implementation
- Add new `src-tauri/src/ipc/logs.rs` with `LogsMonitor` for real-time log streaming
- Implement duplicate stream prevention with level tracking
- Add frontend-backend communication via Tauri commands for log management
- Remove WebSocket compatibility, maintain IPC-only mode
- Fix duplicate monitoring task startup when toggling log service
- Add proper task lifecycle management with JoinHandle cleanup

* refactor: remove dead code from logs.rs to fix clippy warnings

- Remove unused `timestamp` field from LogItem struct
- Remove unused `client` field from LogsMonitor struct
- Remove unused methods: `is_fresh`, `get_current_monitoring_level`, `get_current_logs`
- Simplify LogsMonitor initialization by removing client dependency
- All clippy warnings with -D warnings now resolved

* refactor: extract duplicate fmt_bytes function to utils module

- Create new utils/format.rs module with fmt_bytes function
- Remove duplicate fmt_bytes implementations from traffic.rs and memory.rs
- Update imports to use shared utils::format::fmt_bytes
- Add comprehensive unit tests for fmt_bytes function
- Ensure DRY principle compliance and code maintainability
2025-07-30 23:11:21 +08:00
Tunglies
e2a548f6a5 fix: update bzip2 and sysinfo versions, remove unused libbz2-rs-sys package 2025-07-30 23:00:46 +08:00
renovate[bot]
2956725e66 chore(deps): update cargo dependencies (#4231)
* chore(deps): update cargo dependencies

* chore(deps): update cargo dependencies

* fix: sysinfo crate use limit features

* chore(deps): update npm dependencies (#4254)

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

* chore: update sysinfo and zip dependency versions in Cargo.toml; add ignored dependencies in renovate.json

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
2025-07-30 21:22:33 +08:00
Dyna
5131d37d58 fix: the issue of home page port being out of sync (#4271)
* synchronize port 7897

* Update UPDATELOG.md
2025-07-30 19:59:11 +08:00
renovate[bot]
1dfba159e0 chore(deps): update npm dependencies (#4254)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-29 20:12:54 +08:00
Tunglies
631718a138 feat: enhance autobuild workflow to check for existing assets before running 2025-07-29 20:09:55 +08:00
Tunglies
3172ab2d1c chore: update Mihomo version references in prebuild script 2025-07-29 19:55:25 +08:00
Dyna
6a6bde3764 refactor: external-controller-cors to always show (#4243)
* refactor: external-controller-cors to always show

* i18n: update translation

* Update UPDATELOG.md
2025-07-28 11:24:16 +08:00
wonfen
f756b37f97 i18n: add missing trans 2025-07-28 09:12:40 +08:00
Tunglies
a26c28517a chore: update dependencies and versions in Cargo files 2025-07-27 22:51:51 +08:00
Tunglies
894428642b fix: add sysinfo to ignored dependencies in renovate configuration 2025-07-27 08:42:44 +08:00
Tunglies
36d58d05b3 chore: downgrade sysinfo version 2025-07-27 08:30:23 +08:00
Tunglies
80de055fc2 fix: resolve connection speed display issues after IPC migration #4208 (#4229)
🐞 Bug Fixes:
- Fix missing upload/download rate display in connections page after IPC migration
- Implement real-time connection speed calculation based on data differences
- Add connection speed data types and calculation logic

🔧 Technical Improvements:
- Add connection speed calculation function in AppDataProvider
- Use useRef to store previous connection data for speed difference calculation
- Add curUpload and curDownload fields to connection data
- Optimize connection data processing flow for accurate speed calculations

📝 Changelog:
- Update relevant issue descriptions in UPDATELOG.md
2025-07-27 03:00:08 +08:00
Tunglies
4905b44c8a fix: resolve speed test functionality issue after IPC migration #4221, #4218 (#4228)
* chore(deps): update cargo dependencies

* fix: sysinfo crate use limit features

* fix: update headers-core dependency and kode-bridge version; enhance system monitor status validation

* fix: extend overall_status type in ISystemMonitorOverview to include 'healthy'

* refactor: update URL encoding strategy in IpcManager and cmdGetProxyDelay function

* fix: resolve speed test functionality issue after IPC migration

* fix: resolve speed test functionality issue after IPC migration #4221, #4218

---------

Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-27 02:07:00 +08:00
renovate[bot]
02e19bb132 chore(deps): update dependency cross-env to v10 (#4214)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-26 22:55:16 +08:00
Dyna
84989e0ea3 add: home card drag (#4215)
* add home page control buttons

* Update UPDATELOG.md

* update zh en

* Revert "add home page control buttons"

This reverts commit e184843855932905facd55bae9847cf0b4e0bd46.

* Update UPDATELOG.md

* Revert "update zh en"

This reverts commit cbaddf59fb583af255d501f6784d3a4bd7ade8b0.

* remove unnecessary code

* fix: home.tsx

* add  react-beautiful-dnd script

* add home page drag

* fix: react-beautiful-dndA setup problem was encountered

* Revert "fix: react-beautiful-dndA setup problem was encountered"

This reverts commit 81c34dd4721cf7e60bed5bb1feeb43d230c4ca80.

* fix: react-beautiful-dndA setup problem was encountered

* Update types.d.ts

* Revert "Update types.d.ts"

This reverts commit 854046cf2f87ce2b2d66682dfbdf96d3638a103d.

* update @types/react-beautiful-dnd

* Update home.tsx

* Update UPDATELOG.md

* remove unnecessary components

* Revert "add  react-beautiful-dnd script"

This reverts commit e84d56922568a53084f3bf0b2322428d5102863d.

* Reapply "add  react-beautiful-dnd script"

This reverts commit 2379fd27c4b33a23773562657ca4d192de28522a.

* fix: home page error
2025-07-26 22:13:44 +08:00
Dyna
9661c5fd82 fix: Windows installer parameter problem (#4213)
* repair and delete the startup item

* fix: parameter usage issues

* Update UPDATELOG.md

* Update UPDATELOG.md

* fix: install
2025-07-26 07:52:21 +08:00
Tunglies
c8dfdb7a5a feat: add shared cache key for autobuild workflows 2025-07-25 18:36:27 +08:00
Tunglies
df5897c908 Squashed commit of the following:
commit 8928e6438277995f7167e400d4d77657a0ab0113
Author: Tunglies <77394545+Tunglies@users.noreply.github.com>
Date:   Fri Jul 25 18:25:13 2025 +0800

    feat: add release step to development workflow for versioning

commit 14085c4f7c8943669fdacae3bd2b6a07c0c0389a
Author: Tunglies <77394545+Tunglies@users.noreply.github.com>
Date:   Fri Jul 25 18:19:36 2025 +0800

    feat: add release commands for autobuild and deploytest to package.json and update version script
2025-07-25 18:25:40 +08:00
renovate[bot]
8d0af75145 chore(deps): update npm dependencies (#4098)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-25 17:58:20 +08:00
Mimi
36c6f0ca8d feat: unify switch component styles and BaseDialog behavior in the UI (#4183)
* feat: unify switch component styles in the UI

* fix: prettier

* feat: unify Item styles
2025-07-25 17:57:33 +08:00
Tunglies
3048a2ae08 feat: Implement configuration caching mechanism and force-refresh feature**
 **New Features**:

* Added API and frontend support for forcibly refreshing Clash configuration cache
* Implemented a configuration cache TTL mechanism (60 seconds) to reduce redundant requests
* Introduced `ProxyRequestCache` system to manage backend data caching
* Automatically refresh frontend state after core operations to enhance user experience

🚀 **Performance Optimizations**:

* Increased Clash configuration refresh interval from 5 seconds to 60 seconds
* Force refresh cache after configuration updates to resolve data inconsistency
* Automatically trigger state refresh after core switch, start, stop, and restart actions

🔧 **Technical Improvements**:

* Removed unused dependencies: `ab_glyph`, `owned_ttf_parser`, `ttf-parser`
* Simplified WebSocket dependency management, unified `tungstenite` version
* Refactored configuration save validation process, improved merge file handling
* Improved error handling and overall user experience
2025-07-24 01:59:25 +08:00
Tunglies
27535c7bb7 refactor: remove unused dependencies from Cargo.toml and Cargo.lock (#4179) 2025-07-24 00:51:36 +08:00
Tunglies
15a1770ee9 feat: migrate mihomo to use kode-bridge IPC on Windows and Unix (#4051)
* Refactor Mihomo API integration and remove crate_mihomo_api

- Removed the `mihomo_api` crate and its dependencies from the project.
- Introduced `IpcManager` for handling IPC communication with Mihomo.
- Implemented IPC methods for managing proxies, connections, and configurations.
- Updated `MihomoManager` to utilize `IpcManager` instead of the removed crate.
- Added platform-specific IPC socket path handling for macOS, Linux, and Windows.
- Cleaned up related tests and configuration files.

* fix: remove duplicate permission entry in desktop capabilities

* refactor: replace MihomoManager with IpcManager and remove Mihomo module

* fix: restore tempfile dependency in dev-dependencies

* fix: update kode-bridge dependency to use git source from the dev branch

* feat: migrate mihomo to use kode-bridge IPC on Windows

This commit implements a comprehensive migration from legacy service IPC to the kode-bridge library for Windows IPC communication. Key changes include:

Replace service_ipc with kode-bridge IpcManager for all mihomo communications
Simplify proxy commands using new caching mechanism with ProxyRequestCache
Add Windows named pipe (\.\pipe\mihomo) and Unix socket IPC endpoint configuration
Update Tauri permissions and dependencies (dashmap, tauri-plugin-notification)
Add IPC logging support and improve error handling
Fix Windows IPC path handling in directory utilities
This migration enables better cross-platform IPC support and improved performance for mihomo proxy core communication.

* doc: add IPC communication with Mihomo kernel, removing Restful API dependency

* fix: standardize logging type naming from IPC to Ipc for consistency

* refactor: clean up and optimize code structure across multiple components and services

- Removed unnecessary comments and whitespace in various files.
- Improved code readability and maintainability by restructuring functions and components.
- Updated localization files for consistency and accuracy.
- Enhanced performance by optimizing hooks and utility functions.
- General code cleanup in settings, pages, and services to adhere to best practices.

* fix: simplify URL formatting in test_proxy_delay method

* fix: update kode-bridge dependency to version 0.1.3 and change source to crates.io

* fix: update macOS target versions in development workflow

* Revert "fix: update macOS target versions in development workflow"

This reverts commit b9831357e462e0f308d11a9a53cb718f98ae1295.

* feat: enhance IPC path handling for Unix systems and improve directory safety checks

* feat: add conditional compilation for Unix-specific IPC path handling

* chore: update cagro.lock

* feat: add external controller configuration and UI support

* Refactor proxy and connection management to use IPC-based commands

- Updated `get_proxies` function in `proxy.rs` to call the new IPC command.
- Renamed `get_refresh_proxies` to `get_proxies` in `ipc/general.rs` for consistency.
- Added new IPC commands for managing proxies, connections, and configurations in `cmds.ts`.
- Refactored API calls in various components to use the new IPC commands instead of HTTP requests.
- Improved error handling and response management in the new IPC functions.
- Cleaned up unused API functions in `api.ts` and redirected relevant calls to `cmds.ts`.
- Enhanced connection management features including health checks and updates for proxy providers.

* chore: update dependencies and improve error handling in IPC manager

* fix: downgrade zip dependency from 4.3.0 to 4.2.0

* feat: Implement traffic and memory data monitoring service

- Added `TrafficService` and `TrafficManager` to manage traffic and memory data collection.
- Introduced commands to get traffic and memory data, start and stop the traffic service.
- Integrated IPC calls for traffic and memory data retrieval in the frontend.
- Updated `AppDataProvider` and `EnhancedTrafficStats` components to utilize new data fetching methods.
- Removed WebSocket connections for traffic and memory data, replaced with IPC polling.
- Added logging for better traceability of data fetching and service status.

* refactor: unify external controller handling and improve IPC path resolution

* fix: replace direct IPC path retrieval with guard function for external controller

* fix: convert external controller IPC path to string for proper insertion in config map

* fix: update dependencies and improve IPC response handling

* fix: remove unnecessary unix conditional for ipc path import

* Refactor traffic and memory monitoring to use IPC stream; remove TrafficService and TrafficManager. Introduce new IPC-based data retrieval methods for traffic and memory, including formatted data and system overview. Update frontend components to utilize new APIs for enhanced data display and management.

* chore: bump crate rand version to 0.9.2

* feat: Implement enhanced traffic monitoring system with data compression and sampling

- Introduced `useTrafficMonitorEnhanced` hook for advanced traffic data management.
- Added `TrafficDataSampler` class for handling raw and compressed traffic data.
- Implemented reference counting to manage data collection based on component usage.
- Enhanced data validation with `SystemMonitorValidator` for API responses.
- Created diagnostic tools for monitoring performance and error tracking.
- Updated existing hooks to utilize the new enhanced monitoring features.
- Added utility functions for generating and formatting diagnostic reports.

* feat(ipc): improve URL encoding and error handling for IPC requests

- Add percent-encoding for URL paths to handle special characters properly
- Enhance error handling in update_proxy with proper logging
- Remove excessive debug logging to reduce noise
- Update kode-bridge dependency to v0.1.5
- Fix JSON parsing error handling in PUT requests

Changes include:
- Proper URL encoding for connection IDs, proxy names, and test URLs
- Enhanced error handling with fallback responses in updateProxy
- Comment out verbose debug logs in traffic monitoring and data validation
- Update dependency version for improved IPC functionality

* feat: major improvements in architecture, traffic monitoring, and data validation

* Refactor traffic graph components: Replace EnhancedTrafficGraph with EnhancedCanvasTrafficGraph, improve rendering performance, and enhance visual elements. Remove deprecated code and ensure compatibility with global data management.

* chore: update UPDATELOG.md for v2.4.0 release, refine traffic monitoring system details, and enhance IPC functionality

* chore: update UPDATELOG.md to reflect removal of deprecated MihomoManager and unify IPC control

* refactor: remove global traffic service testing method from cmds.ts

* Update src/components/home/enhanced-canvas-traffic-graph.tsx

* Update src/hooks/use-traffic-monitor-enhanced.ts

* Update src/components/layout/layout-traffic.tsx

* refactor: remove debug state management from LayoutTraffic component

---------
2025-07-24 00:49:55 +08:00
Dyna
f580409ade add: Verge Version copy button (#4164) 2025-07-23 11:11:34 +08:00
Tunglies
f209d17e3c chore: bump clash-verge-rev version to 2.4.0 2025-07-22 18:42:25 +08:00
Dyna
5047b0f614 fix: the problem of inconsistent color of system theme window (#4133)
* fix-title

* Update UPDATELOG.md
2025-07-22 11:01:23 +08:00
wonfen
ad623da86d chore: update i18n (#4128) 2025-07-22 08:41:02 +08:00
Tunglies
3ec7772e5d fix: update sysinfo and zip dependencies to specific versions in Cargo.toml 2025-07-22 08:05:19 +08:00
Tunglies
e6b57561e3 chore: update cargo.lock 2025-07-22 08:02:07 +08:00
Tunglies
3b1c2c95ec Revert "chore(deps): update rust crate zip to 4.3.0 (#4073)"
This reverts commit fed64bcd08.
2025-07-22 07:59:26 +08:00
Tunglies
364abafffe fix: update prerequisites link in CONTRIBUTING.md and update dependencies in Cargo.lock
- Updated the link for installing Rust and Node.js in CONTRIBUTING.md.
- Added new dependencies: wayland-backend, wayland-client, wayland-protocols.
- Updated versions for several dependencies including async-channel, async-io, async-process, and others.
- Removed deprecated dispatch2 version and added new dlib dependency.
- Updated various dependencies to their latest versions for improved stability and features.
2025-07-20 04:19:25 +08:00
Copilot
389281b96c Revert sysinfo dependency from 0.36.0 to 0.35.2 (#4086) 2025-07-16 09:34:03 +08:00
renovate[bot]
fed64bcd08 chore(deps): update rust crate zip to 4.3.0 (#4073)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-15 22:25:25 +08:00
Tunglies
17a5c4d094 chore: update sysinfo dependency to version 0.36.0 (#4082)
* chore: update sysinfo dependency to version 0.36.0

* fix: update sysinfo dependency to enable specific features
2025-07-15 22:23:24 +08:00
Dyna
f2317c7816 fix: log levels will not display their respective logs correctly (#4074)
* fix-log levels will not display their respective logs correctly

* fix-logs

* Update UPDATELOG.md

* Update UPDATELOG.md
2025-07-15 18:55:53 +08:00
Tunglies
7fce7ca62f Revert "chore(deps): update cargo dependencies (#4023)" (#4072)
This reverts commit 2c9aa4bca7.
2025-07-14 23:55:39 +08:00
Tunglies
167bcb222d fix: add platform-specific input options for workflow dispatch in dev.yml 2025-07-14 20:14:35 +08:00
Tunglies
108a599666 fix: add platform input options for workflow dispatch in dev.yml 2025-07-14 20:09:46 +08:00
Tunglies
57476741cd fix: improve logging format for old shortcut removal failure 2025-07-14 19:31:19 +08:00
Tunglies
077f3e79f8 fix: unify startup shortcut name to "Clash Verge" on Windows 2025-07-14 04:22:06 +08:00
renovate[bot]
2c9aa4bca7 chore(deps): update cargo dependencies (#4023)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-11 23:34:47 +08:00
renovate[bot]
9cf0f1e0a7 chore(deps): update npm dependencies (#4014)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-11 23:34:38 +08:00
Dyna
660c03a564 optimization: (fix: the crash when inputting special characters in the search box (#4026)) (#4029)
* add a test rule

* add translation support
2025-07-10 21:09:02 +08:00
Dyna
1d49d79af2 fix: the crash when inputting special characters in the search box (#4026)
* add special character escapes

* update logs
2025-07-10 13:34:37 +08:00
Dyna
4ccb17dde6 fix: the problem that the DNS override nameserver-policy field cannot correctly recognize multiple writing methods (#4011)
* fix: the problem that the DNS override nameserver-policy field cannot correctly recognize multiple writing methods

* update logs

* fix-dns-viewer.tsx
2025-07-08 17:05:17 +08:00
renovate[bot]
8bc433711d chore(deps): update rust crate tokio to 1.46.1 (#3989)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-07 18:04:40 +08:00
renovate[bot]
2e82bd8624 chore(deps): update npm dependencies (#3986)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-07 18:04:32 +08:00
❤是纱雾酱哟~
4b57d513a2 Chore (Renovate): Disable Dependency Dashboard (#3999)
* Chore (Renovate): Disable Dependency Dashboard

- Close #3658

* Fix (Renovate): Prettify JSON format according to "prettier"
2025-07-07 18:02:29 +08:00
Dyna
d36c3a83a9 fix:DNS override server support is left blank by default (#3997)
* fix:DNS server support being left blank by default

* update logs
2025-07-06 11:01:05 +08:00
Tunglies
ac3afe4dee fix: manage setup Mutex crash (#3995)
* Revert "Revert "refactor: Replace std::sync::Mutex with parking_lot::Mutex for improved performance and consistency in multiple files" (#3990)"

This reverts commit 667844aa12.

* refactor: Manage lightweight state in the app setup and clean up unused proxy client code

* refactor: Move macOS-specific Manager import under conditional compilation
2025-07-06 02:14:48 +08:00
Dyna
667844aa12 Revert "refactor: Replace std::sync::Mutex with parking_lot::Mutex for improved performance and consistency in multiple files" (#3990) 2025-07-05 12:17:02 +08:00
Tunglies
6d192233d1 refactor: Replace std::sync::Mutex with parking_lot::Mutex for improved performance and consistency in multiple files 2025-07-05 00:33:53 +08:00
Tunglies
764ef48fd1 refactor(Draft): Replace latest() with latest_ref() and data() with data_mut() in multiple files for improved mutability handling and consistency across the codebase (#3987)
* feat: add benchmarking for draft operations and new draft management structure

* Refactor Config Access: Replace `latest()` with `latest_ref()` and `data()` with `data_mut()` in multiple files for improved mutability handling and consistency across the codebase.

* refactor: remove DraftNew implementation and related benchmarks for cleaner codebase
2025-07-04 22:43:23 +08:00
renovate[bot]
3f95c81243 chore(deps): update rust crate tokio to 1.46.0 (#3943)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-04 18:51:25 +08:00
renovate[bot]
7c385d329d chore(deps): update npm dependencies (#3944)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-07-04 18:51:18 +08:00
Dyna
553438d219 fix windows installer (#3979)
* add loop detection Clash Verge Lnk

* add delete the window-state.json file .window-state.json file

* add cycle through the registry and delete useless registry entries

* add delete the specified registry path

* update logs
2025-07-04 12:28:15 +08:00
rebecca554owen
941921904c fix: remove unused redir-port and tproxy-port on Windows (#3969)
Directly remove redir-port and tproxy-port configurations on Windows platform
2025-07-04 12:27:59 +08:00
wonfen
7d808ed798 fix: prevent freeze caused by enabling Proxy Guard 2025-07-04 12:22:22 +08:00
❤是纱雾酱哟~
7704072a65 Chore (ISSUE_TEMPLATE): 进一步优化问题反馈模板 (#3975)
- 当前模板强依赖于用户把日志贴到正确的位置上
- 参考 WinGet 的问题模板,放弃日志折叠功能,强制用户以代码块形式粘贴日志
2025-07-03 18:41:46 +08:00
❤是纱雾酱哟~
9076baf1c8 Chore (ISSUE_TEMPLATE): 修复错误的 YAML 语法 (#3951)
- 修改「OS」分类的类型:"dropdown" -> "checkbox"
  - 一个问题可能不只影响一个特定的操作系统
- 修改「软件版本」分类,增加 "render" 属性
  - 这能让 GFM 直接把这部分内容渲染为 render 指定语法的代码片段
2025-07-03 14:30:46 +08:00
Sukka
954ff53d9b refactor: use React in its intended way (#3963)
* refactor: replace `useEffect` w/ `useLocalStorage`

* refactor: replace `useEffect` w/ `useSWR`

* refactor: replace `useEffect` and `useSWR`. clean up `useRef`

* refactor: use `requestIdleCallback`

* refactor: replace `useEffect` w/ `useMemo`

* fix: clean up `useEffect`

* refactor: replace `useEffect` w/ `useSWR`

* refactor: remove unused `useCallback`

* refactor: enhance performance and memory management in frontend processes

* refactor: improve pre-push script structure and readability

---------

Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
Co-authored-by: Tunglies <tunglies.dev@outlook.com>
2025-07-02 23:34:13 +08:00
Tunglies
37d268bb16 chore(deps): remove deprecated dependencies (#3960)
* chore(deps):  remove deprecated dependencies

* refactor: simplify string formatting in make_key function
2025-07-02 03:07:50 +08:00
Tunglies
5befa90f81 chore(deps): update dashmap to version 7.0.0-rc2 2025-07-01 02:02:14 +08:00
Tunglies
6e3313f00d chore: remove redundant bug fix section from UPDATELOG.md 2025-06-30 21:11:01 +08:00
Dyna
7b5afb7afe Add cors (#3909)
* add external `cors` control panel

* optimize format

* fix-script.rs

* fix-service.rs

* fix-rs

async_proxy_query.rs

event_driven_proxy.rs

service_ipc.rs

service.rs

sysopt.rs

* lower the prettier version number to 3.5.3

* Revert "lower the prettier version number to 3.5.3"

This reverts commit 0f1c3dfa8abad9f451f32d2da6211e86341bda84.

* fix: prttier erros

* add developer environment detection and controlled the display of development environment URL

* submit required

* fix-external-controller-cors

* use the custom component ToggleButton to ensure a uniform button style

* fix-tsx

hotkey-viewer.tsx

external-controller-cors.tsx

* fix-bug_report.yml

* remove the annoying title

* fix-write overload problem

* Individual button settings

* fix-setting-clash.tsx

---------

Co-authored-by: Tunglies <77394545+Tunglies@users.noreply.github.com>
Co-authored-by: Tunglies <tunglies.dev@outlook.com>
2025-06-30 20:48:20 +08:00
renovate[bot]
fe13dad06f chore(deps): update rust crate ab_glyph to 0.2.30 (#3932)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-30 20:22:43 +08:00
renovate[bot]
b41bad0ae2 chore(deps): update npm dependencies (#3918)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-06-30 20:22:37 +08:00
Tunglies
18ef7f0272 feat: enhance proxy management with caching and refresh logic 2025-06-30 20:14:04 +08:00
345 changed files with 37434 additions and 22872 deletions

View File

@@ -3,3 +3,7 @@ linker = "aarch64-linux-gnu-gcc"
[target.armv7-unknown-linux-gnueabihf] [target.armv7-unknown-linux-gnueabihf]
linker = "arm-linux-gnueabihf-gcc" linker = "arm-linux-gnueabihf-gcc"
[alias]
clippy-all = "clippy --all-targets --all-features -- -D warnings"
clippy-only = "clippy --all-targets --features clippy -- -D warnings"

View File

@@ -0,0 +1,101 @@
{
"name": "Clash Verge Rev Development Environment",
"image": "mcr.microsoft.com/devcontainers/base:ubuntu-22.04",
"features": {
"ghcr.io/devcontainers/features/node:1": {
"version": "20"
},
"ghcr.io/devcontainers/features/rust:1": {
"version": "latest",
"profile": "default"
},
"ghcr.io/devcontainers/features/git:1": {},
"ghcr.io/devcontainers/features/github-cli:1": {},
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
},
"customizations": {
"vscode": {
"extensions": [
"rust-lang.rust-analyzer",
"tauri-apps.tauri-vscode",
"ms-vscode.vscode-typescript-next",
"esbenp.prettier-vscode",
"bradlc.vscode-tailwindcss",
"ms-vscode.vscode-json",
"redhat.vscode-yaml",
"formulahendry.auto-rename-tag",
"ms-vscode.hexeditor",
"christian-kohler.path-intellisense",
"yzhang.markdown-all-in-one",
"streetsidesoftware.code-spell-checker",
"ms-vscode.vscode-eslint"
],
"settings": {
"rust-analyzer.cargo.features": ["verge-dev"],
"rust-analyzer.check.command": "clippy",
"editor.formatOnSave": true,
"editor.defaultFormatter": "esbenp.prettier-vscode",
"[rust]": {
"editor.defaultFormatter": "rust-lang.rust-analyzer"
},
"[json]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[yaml]": {
"editor.defaultFormatter": "redhat.vscode-yaml"
},
"[typescript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"[typescriptreact]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
}
}
}
},
"forwardPorts": [1420, 3000, 8080, 9090, 7890, 7891],
"portsAttributes": {
"1420": {
"label": "Tauri Dev Server",
"onAutoForward": "notify"
},
"3000": {
"label": "Vite Dev Server",
"onAutoForward": "notify"
},
"7890": {
"label": "Clash HTTP Proxy",
"onAutoForward": "silent"
},
"7891": {
"label": "Clash SOCKS Proxy",
"onAutoForward": "silent"
},
"9090": {
"label": "Clash API",
"onAutoForward": "silent"
}
},
"postCreateCommand": "bash .devcontainer/post-create.sh",
"mounts": [
"source=clash-verge-node-modules,target=${containerWorkspaceFolder}/node_modules,type=volume",
"source=clash-verge-cargo-registry,target=/usr/local/cargo/registry,type=volume",
"source=clash-verge-cargo-git,target=/usr/local/cargo/git,type=volume"
],
"containerEnv": {
"RUST_BACKTRACE": "1",
"NODE_OPTIONS": "--max-old-space-size=4096",
"TAURI_DEV_WATCHER_IGNORE_FILE": ".taurignore"
},
"remoteUser": "vscode",
"workspaceFolder": "/workspaces/clash-verge-rev",
"shutdownAction": "stopContainer"
}

View File

@@ -9,18 +9,21 @@ body:
attributes: attributes:
value: | value: |
## 在提交问题之前,请确认以下事项: ## 在提交问题之前,请确认以下事项:
1. 请 **确保** 您已经查阅了 [Clash Verge Rev 官方文档](https://clash-verge-rev.github.io/guide/term.html) 以及 [常见问题](https://clash-verge-rev.github.io/faq/windows.html) 1. 请 **确保** 您已经查阅了 [Clash Verge Rev 官方文档](https://clash-verge-rev.github.io/guide/term.html) 以及 [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
2. 请 **确保** [已有的问题](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue) 中没有人提交过相似issue否则请在已有的issue下进行讨论 2. 请 **确保** [已有的问题](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue) 中没有人提交过相似issue否则请在已有的issue下进行讨论
3. 请 **务必** 给issue填写一个简洁明了的标题以便他人快速检索 3. 请 **务必** 给issue填写一个简洁明了的标题以便他人快速检索
4. 请 **务必** 查看 [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) 版本更新日志 4. 请 **务必** 查看 [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) 版本更新日志
5. 请 **务必** 尝试 [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) 版本,确定问题是否仍然存在 5. 请 **务必** 尝试 [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) 版本,确定问题是否仍然存在
6. 请 **务必** 按照模板规范详细描述问题以及尝试更新 Alpha 版本否则issue将会被直接关闭 6. 请 **务必** 按照模板规范详细描述问题以及尝试更新 Alpha 版本否则issue将会被直接关闭
## Before submitting the issue, please make sure of the following checklist: ## Before submitting the issue, please make sure of the following checklist:
1. Please make sure you have read the [Clash Verge Rev official documentation](https://clash-verge-rev.github.io/guide/term.html) and [FAQ](https://clash-verge-rev.github.io/faq/windows.html) 1. Please make sure you have read the [Clash Verge Rev official documentation](https://clash-verge-rev.github.io/guide/term.html) and [FAQ](https://clash-verge-rev.github.io/faq/windows.html)
2. Please make sure there is no similar issue in the [existing issues](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue), otherwise please discuss under the existing issue 2. Please make sure there is no similar issue in the [existing issues](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue), otherwise please discuss under the existing issue
3. Please be sure to fill in a concise and clear title for the issue so that others can quickly search 3. Please be sure to fill in a concise and clear title for the issue so that others can quickly search
4. Please be sure to check out [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) version update log 4. Please be sure to check out [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) version update log
5. Please be sure to try the [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) version to ensure that the problem still exists 5. Please be sure to try the [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) version to ensure that the problem still exists
6. Please describe the problem in detail according to the template specification and try to update the Alpha version, otherwise the issue will be closed 6. Please describe the problem in detail according to the template specification and try to update the Alpha version, otherwise the issue will be closed
- type: textarea - type: textarea
@@ -32,8 +35,9 @@ body:
required: true required: true
- type: textarea - type: textarea
attributes: attributes:
label: 软件版本 / Verge Version label: 软件版本 / CVR Version
description: 请提供Verge的具体版本,如果是alpha版本,请注明下载时间(精确到小时分钟) / Please provide the specific version of Verge. If it is an alpha version, please indicate the download time (accurate to hours and minutes) description: 请提供 CVR 的具体版本,如果是 AutoBuild 版本,请注明下载时间(精确到小时分钟) / Please provide the specific version of CVR. If it is an AutoBuild version, please indicate the download time (accurate to hours and minutes)
render: text
validations: validations:
required: true required: true
- type: textarea - type: textarea
@@ -42,13 +46,13 @@ body:
description: 请提供复现问题的步骤 / Steps to reproduce the behavior description: 请提供复现问题的步骤 / Steps to reproduce the behavior
validations: validations:
required: true required: true
- type: dropdown - type: checkboxes
attributes: attributes:
label: 操作系统 / OS label: 操作系统 / OS
options: options:
- Windows - label: Windows
- Linux - label: Linux
- MacOS - label: MacOS
validations: validations:
required: true required: true
- type: input - type: input
@@ -61,12 +65,9 @@ body:
attributes: attributes:
label: 日志(勿上传日志文件,请粘贴日志内容) / Log (Do not upload the log file, paste the log content directly) label: 日志(勿上传日志文件,请粘贴日志内容) / Log (Do not upload the log file, paste the log content directly)
description: 请提供完整或相关部分的Debug日志请在“软件左侧菜单”->“设置”->“日志等级”调整到debugVerge错误请把“杂项设置”->“app日志等级”调整到debug并重启Verge生效。日志文件在“软件左侧菜单”->“设置”->“日志目录”下) / Please provide a complete or relevant part of the Debug log (please adjust the "Log level" to debug in "Software left menu" -> "Settings" -> "Log level". If there is a Verge error, please adjust "Miscellaneous settings" -> "app log level" to debug, and restart Verge to take effect. The log file is under "Software left menu" -> "Settings" -> "Log directory") description: 请提供完整或相关部分的Debug日志请在“软件左侧菜单”->“设置”->“日志等级”调整到debugVerge错误请把“杂项设置”->“app日志等级”调整到debug并重启Verge生效。日志文件在“软件左侧菜单”->“设置”->“日志目录”下) / Please provide a complete or relevant part of the Debug log (please adjust the "Log level" to debug in "Software left menu" -> "Settings" -> "Log level". If there is a Verge error, please adjust "Miscellaneous settings" -> "app log level" to debug, and restart Verge to take effect. The log file is under "Software left menu" -> "Settings" -> "Log directory")
value: | placeholder: |
<details><summary>日志内容 / Log Content</summary> 日志目录一般位于 Clash Verge Rev 安装目录的 "logs/" 子目录中,请将日志内容粘贴到此处。
```log Log directory is usually located in the "logs/" subdirectory of the Clash Verge Rev installation directory, please paste the log content here.
<!-- 在此处粘贴完整日志 / Paste the full log here --> render: log
```
</details>
validations: validations:
required: true required: true

View File

@@ -1,3 +1,4 @@
blank_issues_enabled: false
contact_links: contact_links:
- name: 讨论交流 / Communication - name: 讨论交流 / Communication
url: https://t.me/clash_verge_rev url: https://t.me/clash_verge_rev

View File

@@ -12,13 +12,13 @@ body:
1. 请 **确保** 您已经查阅了 [Clash Verge Rev 官方文档](https://clash-verge-rev.github.io/guide/term.html) 确认软件不存在类似的功能 1. 请 **确保** 您已经查阅了 [Clash Verge Rev 官方文档](https://clash-verge-rev.github.io/guide/term.html) 确认软件不存在类似的功能
2. 请 **确保** [已有的问题](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue) 中没有人提交过相似issue否则请在已有的issue下进行讨论 2. 请 **确保** [已有的问题](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue) 中没有人提交过相似issue否则请在已有的issue下进行讨论
3. 请 **务必** 给issue填写一个简洁明了的标题以便他人快速检索 3. 请 **务必** 给issue填写一个简洁明了的标题以便他人快速检索
4. 请 **务必** 先下载 [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) 版本测试,确保该功能还未实现 4. 请 **务必** 先下载 [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) 版本测试,确保该功能还未实现
5. 请 **务必** 按照模板规范详细描述问题否则issue将会被关闭 5. 请 **务必** 按照模板规范详细描述问题否则issue将会被关闭
## Before submitting the issue, please make sure of the following checklist: ## Before submitting the issue, please make sure of the following checklist:
1. Please make sure you have read the [Clash Verge Rev official documentation](https://clash-verge-rev.github.io/guide/term.html) to confirm that the software does not have similar functions 1. Please make sure you have read the [Clash Verge Rev official documentation](https://clash-verge-rev.github.io/guide/term.html) to confirm that the software does not have similar functions
2. Please make sure there is no similar issue in the [existing issues](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue), otherwise please discuss under the existing issue 2. Please make sure there is no similar issue in the [existing issues](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue), otherwise please discuss under the existing issue
3. Please be sure to fill in a concise and clear title for the issue so that others can quickly search 3. Please be sure to fill in a concise and clear title for the issue so that others can quickly search
4. Please be sure to download the [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) version for testing to ensure that the function has not been implemented 4. Please be sure to download the [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) version for testing to ensure that the function has not been implemented
5. Please describe the problem in detail according to the template specification, otherwise the issue will be closed 5. Please describe the problem in detail according to the template specification, otherwise the issue will be closed
- type: textarea - type: textarea

View File

@@ -52,7 +52,7 @@ body:
- type: input - type: input
id: verge-version id: verge-version
attributes: attributes:
label: 软件版本 / Verge Version label: 软件版本 / CVR Version
description: 请提供你使用的 Verge 具体版本 / Please provide the specific version of Verge you are using description: 请提供你使用的 CVR 具体版本 / Please provide the specific version of CVR you are using
validations: validations:
required: true required: true

View File

@@ -25,6 +25,7 @@ env:
TAG_CHANNEL: Alpha TAG_CHANNEL: Alpha
CARGO_INCREMENTAL: 0 CARGO_INCREMENTAL: 0
RUST_BACKTRACE: short RUST_BACKTRACE: short
HUSKY: 0
concurrency: concurrency:
group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}" group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}"
@@ -294,6 +295,15 @@ jobs:
sudo apt-get update sudo apt-get update
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
- name: Install x86 OpenSSL (macOS only)
if: matrix.target == 'x86_64-apple-darwin'
run: |
arch -x86_64 brew install openssl@3
echo "OPENSSL_DIR=$(brew --prefix openssl@3)" >> $GITHUB_ENV
echo "OPENSSL_INCLUDE_DIR=$(brew --prefix openssl@3)/include" >> $GITHUB_ENV
echo "OPENSSL_LIB_DIR=$(brew --prefix openssl@3)/lib" >> $GITHUB_ENV
echo "PKG_CONFIG_PATH=$(brew --prefix openssl@3)/lib/pkgconfig" >> $GITHUB_ENV
- name: Install Node - name: Install Node
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:

View File

@@ -0,0 +1,133 @@
name: Autobuild Check Logic Test
on:
workflow_dispatch:
jobs:
check_autobuild_logic:
name: Check Autobuild Should Run Logic
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Check if version or source changed, or assets already exist
id: check
run: |
# # 仅用于测试逻辑,手动触发自动跳过
# if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
# echo "should_run=skip" >> $GITHUB_OUTPUT
# echo "🟡 手动触发,跳过 should_run 检查"
# exit 0
# fi
# 确保有 HEAD~1
if ! git rev-parse HEAD~1 > /dev/null 2>&1; then
echo "should_run=true" >> $GITHUB_OUTPUT
echo "🟢 没有前一个提交,默认需要构建"
exit 0
fi
# 版本号变更判断
CURRENT_VERSION=$(jq -r '.version' package.json)
PREVIOUS_VERSION=$(git show HEAD~1:package.json | jq -r '.version' 2>/dev/null || echo "")
if [ "$CURRENT_VERSION" != "$PREVIOUS_VERSION" ]; then
echo "should_run=true" >> $GITHUB_OUTPUT
echo "🟢 版本号变更: $PREVIOUS_VERSION → $CURRENT_VERSION"
exit 0
fi
# 检查 src 变更(排除常见产物与缓存)
SRC_DIFF=$(git diff --name-only HEAD~1 HEAD -- src/ | grep -Ev '^src/(dist|build|node_modules|\.next|\.cache)' || true)
TAURI_DIFF=$(git diff --name-only HEAD~1 HEAD -- src-tauri/ | grep -Ev '^src-tauri/(target|node_modules|dist|\.cache)' || true)
if [ -n "$SRC_DIFF" ] || [ -n "$TAURI_DIFF" ]; then
echo "should_run=true" >> $GITHUB_OUTPUT
echo "🟢 源码变更 detected"
exit 0
fi
# 找到最后一个修改 Tauri 相关文件的 commit
echo "🔍 查找最后一个 Tauri 相关变更的 commit..."
LAST_TAURI_COMMIT=""
for commit in $(git rev-list HEAD --max-count=50); do
# 检查此 commit 是否修改了 Tauri 相关文件
CHANGED_FILES=$(git show --name-only --pretty=format: $commit | tr '\n' ' ')
HAS_TAURI_CHANGES=false
# 检查各个模式
if echo "$CHANGED_FILES" | grep -q "src/" && echo "$CHANGED_FILES" | grep -qvE "src/(dist|build|node_modules|\.next|\.cache)"; then
HAS_TAURI_CHANGES=true
elif echo "$CHANGED_FILES" | grep -qE "src-tauri/(src|Cargo\.(toml|lock)|tauri\..*\.conf\.json|build\.rs|capabilities)"; then
HAS_TAURI_CHANGES=true
fi
if [ "$HAS_TAURI_CHANGES" = true ]; then
LAST_TAURI_COMMIT=$(git rev-parse --short $commit)
break
fi
done
if [ -z "$LAST_TAURI_COMMIT" ]; then
echo "⚠️ 最近的 commits 中未找到 Tauri 相关变更,使用当前 commit"
LAST_TAURI_COMMIT=$(git rev-parse --short HEAD)
fi
CURRENT_COMMIT=$(git rev-parse --short HEAD)
echo "📝 最后 Tauri 相关 commit: $LAST_TAURI_COMMIT"
echo "📝 当前 commit: $CURRENT_COMMIT"
# 检查 autobuild release 是否存在
AUTOBUILD_RELEASE_EXISTS=$(gh release view "autobuild" --json id -q '.id' 2>/dev/null || echo "")
if [ -z "$AUTOBUILD_RELEASE_EXISTS" ]; then
echo "should_run=true" >> $GITHUB_OUTPUT
echo "🟢 没有 autobuild release需构建"
else
# 检查 latest.json 是否存在
LATEST_JSON_EXISTS=$(gh release view "autobuild" --json assets -q '.assets[] | select(.name == "latest.json") | .name' 2>/dev/null || echo "")
if [ -z "$LATEST_JSON_EXISTS" ]; then
echo "should_run=true" >> $GITHUB_OUTPUT
echo "🟢 没有 latest.json需构建"
else
# 下载并解析 latest.json 检查版本和 commit hash
echo "📥 下载 latest.json 检查版本..."
LATEST_JSON_URL=$(gh release view "autobuild" --json assets -q '.assets[] | select(.name == "latest.json") | .browser_download_url' 2>/dev/null)
if [ -n "$LATEST_JSON_URL" ]; then
LATEST_JSON_CONTENT=$(curl -s "$LATEST_JSON_URL" 2>/dev/null || echo "")
if [ -n "$LATEST_JSON_CONTENT" ]; then
LATEST_VERSION=$(echo "$LATEST_JSON_CONTENT" | jq -r '.version' 2>/dev/null || echo "")
echo "📦 最新 autobuild 版本: $LATEST_VERSION"
# 从版本字符串中提取 commit hash (格式: X.Y.Z+autobuild.MMDD.commit)
LATEST_COMMIT=$(echo "$LATEST_VERSION" | sed -n 's/.*+autobuild\.[0-9]\{4\}\.\([a-f0-9]*\)$/\1/p' || echo "")
echo "📝 最新 autobuild commit: $LATEST_COMMIT"
if [ "$LAST_TAURI_COMMIT" != "$LATEST_COMMIT" ]; then
echo "should_run=true" >> $GITHUB_OUTPUT
echo "🟢 Tauri commit hash 不匹配 ($LAST_TAURI_COMMIT != $LATEST_COMMIT),需构建"
else
echo "should_run=false" >> $GITHUB_OUTPUT
echo "🔴 相同 Tauri commit hash ($LAST_TAURI_COMMIT),不需构建"
fi
else
echo "should_run=true" >> $GITHUB_OUTPUT
echo "⚠️ 无法下载或解析 latest.json需构建"
fi
else
echo "should_run=true" >> $GITHUB_OUTPUT
echo "⚠️ 无法获取 latest.json 下载 URL需构建"
fi
fi
fi
- name: Output should_run result
run: |
echo "Result: ${{ steps.check.outputs.should_run }}"

View File

@@ -3,14 +3,15 @@ name: Auto Build
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
# UTC+8 0,6,12,18 # UTC+8 12:00, 18:00 -> UTC 4:00, 10:00
- cron: "0 16,22,4,10 * * *" - cron: "0 4,10 * * *"
permissions: write-all permissions: write-all
env: env:
TAG_NAME: autobuild TAG_NAME: autobuild
TAG_CHANNEL: AutoBuild TAG_CHANNEL: AutoBuild
CARGO_INCREMENTAL: 0 CARGO_INCREMENTAL: 0
RUST_BACKTRACE: short RUST_BACKTRACE: short
HUSKY: 0
concurrency: concurrency:
group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}" group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}"
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
@@ -18,55 +19,10 @@ concurrency:
jobs: jobs:
check_commit: check_commit:
name: Check Commit Needs Build name: Check Commit Needs Build
runs-on: ubuntu-latest uses: clash-verge-rev/clash-verge-rev/.github/workflows/check-commit-needs-build.yml@dev
outputs: with:
should_run: ${{ steps.check.outputs.should_run }} tag_name: autobuild
steps: force_build: ${{ github.event_name == 'workflow_dispatch' }}
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Check if version changed or src changed
id: check
run: |
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
echo "should_run=true" >> $GITHUB_OUTPUT
exit 0
fi
CURRENT_VERSION=$(cat package.json | jq -r '.version')
echo "Current version: $CURRENT_VERSION"
git checkout HEAD~1 package.json
PREVIOUS_VERSION=$(cat package.json | jq -r '.version')
echo "Previous version: $PREVIOUS_VERSION"
git checkout HEAD package.json
if [ "$CURRENT_VERSION" != "$PREVIOUS_VERSION" ]; then
echo "Version changed from $PREVIOUS_VERSION to $CURRENT_VERSION"
echo "should_run=true" >> $GITHUB_OUTPUT
exit 0
fi
CURRENT_SRC_HASH=$(git rev-parse HEAD:src)
PREVIOUS_SRC_HASH=$(git rev-parse HEAD~1:src 2>/dev/null || echo "")
CURRENT_TAURI_HASH=$(git rev-parse HEAD:src-tauri 2>/dev/null || echo "")
PREVIOUS_TAURI_HASH=$(git rev-parse HEAD~1:src-tauri 2>/dev/null || echo "")
echo "Current src hash: $CURRENT_SRC_HASH"
echo "Previous src hash: $PREVIOUS_SRC_HASH"
echo "Current tauri hash: $CURRENT_TAURI_HASH"
echo "Previous tauri hash: $PREVIOUS_TAURI_HASH"
if [ "$CURRENT_SRC_HASH" != "$PREVIOUS_SRC_HASH" ] || [ "$CURRENT_TAURI_HASH" != "$PREVIOUS_TAURI_HASH" ]; then
echo "Source directories changed"
echo "should_run=true" >> $GITHUB_OUTPUT
else
echo "Version and source directories unchanged"
echo "should_run=false" >> $GITHUB_OUTPUT
fi
update_tag: update_tag:
name: Update tag name: Update tag
@@ -95,9 +51,28 @@ jobs:
fi fi
shell: bash shell: bash
- uses: pnpm/action-setup@v4
name: Install pnpm
with:
run_install: false
- name: Install Node
uses: actions/setup-node@v4
with:
node-version: "22"
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Release AutoBuild Version
run: pnpm release-version autobuild-latest
- name: Set Env - name: Set Env
run: | run: |
echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV
VERSION=$(jq -r .version package.json)
echo "VERSION=$VERSION" >> $GITHUB_ENV
echo "DOWNLOAD_URL=https://github.com/clash-verge-rev/clash-verge-rev/releases/download/autobuild" >> $GITHUB_ENV
shell: bash shell: bash
- run: | - run: |
@@ -111,25 +86,24 @@ jobs:
cat > release.txt << EOF cat > release.txt << EOF
$UPDATE_LOGS $UPDATE_LOGS
## 我应该下载哪个版本? ## 下载地址
### MacOS
- MacOS intel芯片: x64.dmg
- MacOS apple M芯片: aarch64.dmg
### Linux
- Linux 64位: amd64.deb/amd64.rpm
- Linux arm64 architecture: arm64.deb/aarch64.rpm
- Linux armv7架构: armhf.deb/armhfp.rpm
### Windows (不再支持Win7) ### Windows (不再支持Win7)
#### 正常版本(推荐) #### 正常版本(推荐)
- 64位: x64-setup.exe - [64位(常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64-setup_windows.exe) | [ARM64(不常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64-setup.exe)
- arm64架构: arm64-setup.exe
#### 便携版问题很多不再提供
#### 内置Webview2版(体积较大仅在企业版系统或无法安装webview2时使用) #### 内置Webview2版(体积较大仅在企业版系统或无法安装webview2时使用)
- 64位: x64_fixed_webview2-setup.exe - [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_fixed_webview2-setup.exe) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64_fixed_webview2-setup.exe)
- arm64架构: arm64_fixed_webview2-setup.exe
### macOS
- [Apple M芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_aarch64_darwin.dmg) | [Intel芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_darwin.dmg)
### Linux
#### DEB包(Debian系) 使用 apt ./路径 安装
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_amd64_linux.deb) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64.deb) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_armhf.deb)
#### RPM包(Redhat系) 使用 dnf ./路径 安装
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.x86_64_linux.rpm) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.aarch64.rpm) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.armhfp.rpm)
### FAQ ### FAQ
- [常见问题](https://clash-verge-rev.github.io/faq/windows.html) - [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
@@ -152,29 +126,18 @@ jobs:
clean_old_assets: clean_old_assets:
name: Clean Old Release Assets name: Clean Old Release Assets
runs-on: ubuntu-latest needs: [check_commit, update_tag]
needs: update_tag if: ${{ needs.check_commit.outputs.should_run == 'true' && needs.update_tag.result == 'success' }}
if: ${{ needs.update_tag.result == 'success' }}
steps: uses: clash-verge-rev/clash-verge-rev/.github/workflows/clean-old-assets.yml@dev
- name: Checkout repository with:
uses: actions/checkout@v4 tag_name: autobuild
- name: Remove old assets from release dry_run: false
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAG_NAME: ${{ env.TAG_NAME }}
run: |
VERSION=$(cat package.json | jq -r '.version')
assets=$(gh release view "$TAG_NAME" --json assets -q '.assets[].name' || true)
for asset in $assets; do
if [[ "$asset" != *"$VERSION"* ]]; then
echo "Deleting old asset: $asset"
gh release delete-asset "$TAG_NAME" "$asset" -y
fi
done
autobuild-x86-windows-macos-linux: autobuild-x86-windows-macos-linux:
name: Autobuild x86 Windows, MacOS and Linux name: Autobuild x86 Windows, MacOS and Linux
needs: update_tag needs: [check_commit, update_tag]
if: ${{ needs.check_commit.outputs.should_run == 'true' }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -206,6 +169,8 @@ jobs:
workspaces: src-tauri workspaces: src-tauri
cache-all-crates: true cache-all-crates: true
save-if: ${{ github.ref == 'refs/heads/dev' }} save-if: ${{ github.ref == 'refs/heads/dev' }}
shared-key: autobuild-${{ runner.os }}-${{ matrix.target }}
key: ${{ runner.os }}-${{ matrix.target }}-${{ hashFiles('src-tauri/Cargo.lock') }}
- name: Install dependencies (ubuntu only) - name: Install dependencies (ubuntu only)
if: matrix.os == 'ubuntu-22.04' if: matrix.os == 'ubuntu-22.04'
@@ -213,6 +178,15 @@ jobs:
sudo apt-get update sudo apt-get update
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
- name: Install x86 OpenSSL (macOS only)
if: matrix.target == 'x86_64-apple-darwin'
run: |
arch -x86_64 brew install openssl@3
echo "OPENSSL_DIR=$(brew --prefix openssl@3)" >> $GITHUB_ENV
echo "OPENSSL_INCLUDE_DIR=$(brew --prefix openssl@3)/include" >> $GITHUB_ENV
echo "OPENSSL_LIB_DIR=$(brew --prefix openssl@3)/lib" >> $GITHUB_ENV
echo "PKG_CONFIG_PATH=$(brew --prefix openssl@3)/lib/pkgconfig" >> $GITHUB_ENV
- uses: pnpm/action-setup@v4 - uses: pnpm/action-setup@v4
name: Install pnpm name: Install pnpm
with: with:
@@ -224,15 +198,23 @@ jobs:
node-version: "22" node-version: "22"
cache: "pnpm" cache: "pnpm"
- name: Cache pnpm store
uses: actions/cache@v4
with:
path: ~/.pnpm-store
key: ${{ runner.os }}-pnpm-${{ hashFiles('pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-
- name: Pnpm install and check - name: Pnpm install and check
run: | run: |
pnpm i pnpm i
pnpm run prebuild ${{ matrix.target }} pnpm run prebuild ${{ matrix.target }}
- name: Release ${{ env.TAG_CHANNEL }} Version - name: Release ${{ env.TAG_CHANNEL }} Version
run: pnpm release-version ${{ env.TAG_NAME }} run: pnpm release-version autobuild-latest
- name: Tauri build - name: Tauri build for Windows-macOS-Linux
uses: tauri-apps/tauri-action@v0 uses: tauri-apps/tauri-action@v0
env: env:
NODE_OPTIONS: "--max_old_space_size=4096" NODE_OPTIONS: "--max_old_space_size=4096"
@@ -253,10 +235,12 @@ jobs:
prerelease: true prerelease: true
tauriScript: pnpm tauriScript: pnpm
args: --target ${{ matrix.target }} args: --target ${{ matrix.target }}
# includeUpdaterJson: true
autobuild-arm-linux: autobuild-arm-linux:
name: Autobuild ARM Linux name: Autobuild ARM Linux
needs: update_tag needs: [check_commit, update_tag]
if: ${{ needs.check_commit.outputs.should_run == 'true' }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -284,6 +268,8 @@ jobs:
workspaces: src-tauri workspaces: src-tauri
cache-all-crates: true cache-all-crates: true
save-if: ${{ github.ref == 'refs/heads/dev' }} save-if: ${{ github.ref == 'refs/heads/dev' }}
shared-key: autobuild-${{ runner.os }}-${{ matrix.target }}
key: ${{ runner.os }}-${{ matrix.target }}-${{ hashFiles('src-tauri/Cargo.lock') }}
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
@@ -296,13 +282,21 @@ jobs:
node-version: "22" node-version: "22"
cache: "pnpm" cache: "pnpm"
- name: Cache pnpm store
uses: actions/cache@v4
with:
path: ~/.pnpm-store
key: ${{ runner.os }}-pnpm-${{ hashFiles('pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-
- name: Pnpm install and check - name: Pnpm install and check
run: | run: |
pnpm i pnpm i
pnpm run prebuild ${{ matrix.target }} pnpm run prebuild ${{ matrix.target }}
- name: Release ${{ env.TAG_CHANNEL }} Version - name: Release ${{ env.TAG_CHANNEL }} Version
run: pnpm release-version ${{ env.TAG_NAME }} run: pnpm release-version autobuild-latest
- name: Setup for linux - name: Setup for linux
run: | run: |
@@ -353,7 +347,7 @@ jobs:
gcc-arm-linux-gnueabihf \ gcc-arm-linux-gnueabihf \
g++-arm-linux-gnueabihf g++-arm-linux-gnueabihf
- name: Build for Linux - name: Tauri Build for Linux
run: | run: |
export PKG_CONFIG_ALLOW_CROSS=1 export PKG_CONFIG_ALLOW_CROSS=1
if [ "${{ matrix.target }}" == "aarch64-unknown-linux-gnu" ]; then if [ "${{ matrix.target }}" == "aarch64-unknown-linux-gnu" ]; then
@@ -389,7 +383,8 @@ jobs:
autobuild-x86-arm-windows_webview2: autobuild-x86-arm-windows_webview2:
name: Autobuild x86 and ARM Windows with WebView2 name: Autobuild x86 and ARM Windows with WebView2
needs: update_tag needs: [check_commit, update_tag]
if: ${{ needs.check_commit.outputs.should_run == 'true' }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -414,6 +409,8 @@ jobs:
workspaces: src-tauri workspaces: src-tauri
cache-all-crates: true cache-all-crates: true
save-if: ${{ github.ref == 'refs/heads/dev' }} save-if: ${{ github.ref == 'refs/heads/dev' }}
shared-key: autobuild-${{ runner.os }}-${{ matrix.target }}
key: ${{ runner.os }}-${{ matrix.target }}-${{ hashFiles('src-tauri/Cargo.lock') }}
- name: Install pnpm - name: Install pnpm
uses: pnpm/action-setup@v4 uses: pnpm/action-setup@v4
@@ -426,13 +423,21 @@ jobs:
node-version: "22" node-version: "22"
cache: "pnpm" cache: "pnpm"
- name: Cache pnpm store
uses: actions/cache@v4
with:
path: ~/.pnpm-store
key: ${{ runner.os }}-pnpm-${{ hashFiles('pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-
- name: Pnpm install and check - name: Pnpm install and check
run: | run: |
pnpm i pnpm i
pnpm run prebuild ${{ matrix.target }} pnpm run prebuild ${{ matrix.target }}
- name: Release ${{ env.TAG_CHANNEL }} Version - name: Release ${{ env.TAG_CHANNEL }} Version
run: pnpm release-version ${{ env.TAG_NAME }} run: pnpm release-version autobuild-latest
- name: Download WebView2 Runtime - name: Download WebView2 Runtime
run: | run: |
@@ -441,7 +446,7 @@ jobs:
Remove-Item .\src-tauri\tauri.windows.conf.json Remove-Item .\src-tauri\tauri.windows.conf.json
Rename-Item .\src-tauri\webview2.${{ matrix.arch }}.json tauri.windows.conf.json Rename-Item .\src-tauri\webview2.${{ matrix.arch }}.json tauri.windows.conf.json
- name: Tauri build - name: Tauri build for Windows
id: build id: build
uses: tauri-apps/tauri-action@v0 uses: tauri-apps/tauri-action@v0
env: env:
@@ -452,6 +457,7 @@ jobs:
with: with:
tauriScript: pnpm tauriScript: pnpm
args: --target ${{ matrix.target }} args: --target ${{ matrix.target }}
# includeUpdaterJson: true
- name: Rename - name: Rename
run: | run: |
@@ -486,3 +492,107 @@ jobs:
run: pnpm portable-fixed-webview2 ${{ matrix.target }} --${{ env.TAG_NAME }} run: pnpm portable-fixed-webview2 ${{ matrix.target }} --${{ env.TAG_NAME }}
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
notify-telegram:
name: Notify Telegram
runs-on: ubuntu-latest
needs:
[
update_tag,
autobuild-x86-windows-macos-linux,
autobuild-arm-linux,
autobuild-x86-arm-windows_webview2,
]
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Fetch UPDATE logs
id: fetch_update_logs
run: |
if [ -f "UPDATELOG.md" ]; then
UPDATE_LOGS=$(awk '/^## v/{if(flag) exit; flag=1} flag' UPDATELOG.md)
if [ -n "$UPDATE_LOGS" ]; then
echo "Found update logs"
echo "UPDATE_LOGS<<EOF" >> $GITHUB_ENV
echo "$UPDATE_LOGS" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
else
echo "No update sections found in UPDATELOG.md"
fi
else
echo "UPDATELOG.md file not found"
fi
shell: bash
- name: Install Node
uses: actions/setup-node@v4
with:
node-version: "22"
- uses: pnpm/action-setup@v4
name: Install pnpm
with:
run_install: false
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Release AutoBuild Version
run: pnpm release-version autobuild-latest
- name: Get Version and Release Info
run: |
sudo apt-get update
sudo apt-get install jq
echo "VERSION=$(cat package.json | jq '.version' | tr -d '"')" >> $GITHUB_ENV
echo "DOWNLOAD_URL=https://github.com/clash-verge-rev/clash-verge-rev/releases/download/autobuild" >> $GITHUB_ENV
echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV
- name: Generate release.txt
run: |
if [ -z "$UPDATE_LOGS" ]; then
echo "No update logs found, using default message"
UPDATE_LOGS="More new features are now supported. Check for detailed changelog soon."
else
echo "Using found update logs"
fi
cat > release.txt << EOF
$UPDATE_LOGS
## 下载地址
### Windows (不再支持Win7)
#### 正常版本(推荐)
- [64位(常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64-setup_windows.exe) | [ARM64(不常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64-setup_windows.exe)
#### 内置Webview2版(体积较大仅在企业版系统或无法安装webview2时使用)
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_fixed_webview2-setup.exe) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64_fixed_webview2-setup.exe)
### macOS
- [Apple M芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_aarch64_darwin.dmg) | [Intel芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_darwin.dmg)
### Linux
#### DEB包(Debian系) 使用 apt ./路径 安装
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_amd64_linux.deb) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64.deb) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_armhf.deb)
#### RPM包(Redhat系) 使用 dnf ./路径 安装
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.x86_64_linux.rpm) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.aarch64.rpm) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.armhfp.rpm)
### FAQ
- [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
### 稳定机场VPN推荐
- [狗狗加速](https://verge.dginv.click/#/register?code=oaxsAGo6)
Created at ${{ env.BUILDTIME }}.
EOF
- name: Send Telegram Notification
run: node scripts/telegram.mjs
env:
TELEGRAM_BOT_TOKEN: ${{ secrets.TELEGRAM_BOT_TOKEN }}
BUILD_TYPE: autobuild
VERSION: ${{ env.VERSION }}
DOWNLOAD_URL: ${{ env.DOWNLOAD_URL }}

View File

@@ -0,0 +1,159 @@
name: Check Commit Needs Build
on:
workflow_dispatch:
inputs:
tag_name:
description: "Release tag name to check against (default: autobuild)"
required: false
default: "autobuild"
type: string
force_build:
description: "Force build regardless of checks"
required: false
default: false
type: boolean
workflow_call:
inputs:
tag_name:
description: "Release tag name to check against (default: autobuild)"
required: false
default: "autobuild"
type: string
force_build:
description: "Force build regardless of checks"
required: false
default: false
type: boolean
outputs:
should_run:
description: "Whether the build should run"
value: ${{ jobs.check_commit.outputs.should_run }}
last_tauri_commit:
description: "The last commit hash with Tauri-related changes"
value: ${{ jobs.check_commit.outputs.last_tauri_commit }}
autobuild_version:
description: "The generated autobuild version string"
value: ${{ jobs.check_commit.outputs.autobuild_version }}
permissions:
contents: read
actions: read
env:
TAG_NAME: ${{ inputs.tag_name || 'autobuild' }}
jobs:
check_commit:
name: Check Commit Needs Build
runs-on: ubuntu-latest
outputs:
should_run: ${{ steps.check.outputs.should_run }}
last_tauri_commit: ${{ steps.check.outputs.last_tauri_commit }}
autobuild_version: ${{ steps.check.outputs.autobuild_version }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 50
- name: Check if version changed or src changed
id: check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
# Force build if requested
if [ "${{ inputs.force_build }}" == "true" ]; then
echo "🚀 Force build requested"
echo "should_run=true" >> $GITHUB_OUTPUT
exit 0
fi
CURRENT_VERSION=$(cat package.json | jq -r '.version')
echo "📦 Current version: $CURRENT_VERSION"
git checkout HEAD~1 package.json
PREVIOUS_VERSION=$(cat package.json | jq -r '.version')
echo "📦 Previous version: $PREVIOUS_VERSION"
git checkout HEAD package.json
if [ "$CURRENT_VERSION" != "$PREVIOUS_VERSION" ]; then
echo "✅ Version changed from $PREVIOUS_VERSION to $CURRENT_VERSION"
echo "should_run=true" >> $GITHUB_OUTPUT
exit 0
fi
# Use get_latest_tauri_commit.bash to find the latest Tauri-related commit
echo "🔍 Finding last commit with Tauri-related changes using script..."
# Make script executable
chmod +x scripts-workflow/get_latest_tauri_commit.bash
# Get the latest Tauri-related commit hash (full hash)
LAST_TAURI_COMMIT_FULL=$(./scripts-workflow/get_latest_tauri_commit.bash)
if [[ $? -ne 0 ]] || [[ -z "$LAST_TAURI_COMMIT_FULL" ]]; then
echo "❌ Failed to get Tauri-related commit, using current commit"
LAST_TAURI_COMMIT_FULL=$(git rev-parse HEAD)
fi
# Get short hash for display and version tagging
LAST_TAURI_COMMIT=$(git rev-parse --short "$LAST_TAURI_COMMIT_FULL")
echo "📝 Last Tauri-related commit: $LAST_TAURI_COMMIT"
# Generate autobuild version using autobuild-latest format
CURRENT_BASE_VERSION=$(echo "$CURRENT_VERSION" | sed -E 's/-(alpha|beta|rc)(\.[0-9]+)?//g' | sed -E 's/\+[a-zA-Z0-9.-]+//g')
MONTH=$(TZ=Asia/Shanghai date +%m)
DAY=$(TZ=Asia/Shanghai date +%d)
AUTOBUILD_VERSION="${CURRENT_BASE_VERSION}+autobuild.${MONTH}${DAY}.${LAST_TAURI_COMMIT}"
echo "🏷️ Autobuild version: $AUTOBUILD_VERSION"
echo "📝 Last Tauri commit: $LAST_TAURI_COMMIT"
# Set outputs for other jobs to use
echo "last_tauri_commit=$LAST_TAURI_COMMIT" >> $GITHUB_OUTPUT
echo "autobuild_version=$AUTOBUILD_VERSION" >> $GITHUB_OUTPUT
# Check if autobuild release exists
echo "🔍 Checking autobuild release and latest.json..."
AUTOBUILD_RELEASE_EXISTS=$(gh release view "${{ env.TAG_NAME }}" --json id -q '.id' 2>/dev/null || echo "")
if [ -z "$AUTOBUILD_RELEASE_EXISTS" ]; then
echo "✅ No autobuild release exists, build needed"
echo "should_run=true" >> $GITHUB_OUTPUT
else
# Check if latest.json exists in the release
LATEST_JSON_EXISTS=$(gh release view "${{ env.TAG_NAME }}" --json assets -q '.assets[] | select(.name == "latest.json") | .name' 2>/dev/null || echo "")
if [ -z "$LATEST_JSON_EXISTS" ]; then
echo "✅ No latest.json found in autobuild release, build needed"
echo "should_run=true" >> $GITHUB_OUTPUT
else
# Download and parse latest.json to check version and commit hash
echo "📥 Downloading latest.json to check version..."
LATEST_JSON_URL="https://github.com/clash-verge-rev/clash-verge-rev/releases/download/autobuild/latest.json"
LATEST_JSON_CONTENT=$(curl -sL "$LATEST_JSON_URL" 2>/dev/null || echo "")
if [ -n "$LATEST_JSON_CONTENT" ]; then
LATEST_VERSION=$(echo "$LATEST_JSON_CONTENT" | jq -r '.version' 2>/dev/null || echo "")
echo "📦 Latest autobuild version: $LATEST_VERSION"
# Extract commit hash from version string (format: X.Y.Z+autobuild.MMDD.commit)
LATEST_COMMIT=$(echo "$LATEST_VERSION" | sed -n 's/.*+autobuild\.[0-9]\{4\}\.\([a-f0-9]*\)$/\1/p' || echo "")
echo "📝 Latest autobuild commit: $LATEST_COMMIT"
if [ "$LAST_TAURI_COMMIT" != "$LATEST_COMMIT" ]; then
echo "✅ Tauri commit hash mismatch ($LAST_TAURI_COMMIT != $LATEST_COMMIT), build needed"
echo "should_run=true" >> $GITHUB_OUTPUT
else
echo "❌ Same Tauri commit hash ($LAST_TAURI_COMMIT), no build needed"
echo "should_run=false" >> $GITHUB_OUTPUT
fi
else
echo "⚠️ Failed to download or parse latest.json, build needed"
echo "should_run=true" >> $GITHUB_OUTPUT
fi
fi
fi

220
.github/workflows/clean-old-assets.yml vendored Normal file
View File

@@ -0,0 +1,220 @@
name: Clean Old Assets
on:
workflow_dispatch:
inputs:
tag_name:
description: "Release tag name to clean (default: autobuild)"
required: false
default: "autobuild"
type: string
dry_run:
description: "Dry run mode (only show what would be deleted)"
required: false
default: false
type: boolean
workflow_call:
inputs:
tag_name:
description: "Release tag name to clean (default: autobuild)"
required: false
default: "autobuild"
type: string
dry_run:
description: "Dry run mode (only show what would be deleted)"
required: false
default: false
type: boolean
permissions: write-all
env:
TAG_NAME: ${{ inputs.tag_name || 'autobuild' }}
TAG_CHANNEL: AutoBuild
jobs:
check_current_version:
name: Check Current Version and Commit
runs-on: ubuntu-latest
outputs:
current_version: ${{ steps.check.outputs.current_version }}
last_tauri_commit: ${{ steps.check.outputs.last_tauri_commit }}
autobuild_version: ${{ steps.check.outputs.autobuild_version }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 50
- name: Get current version and find last Tauri commit
id: check
run: |
CURRENT_VERSION=$(cat package.json | jq -r '.version')
echo "📦 Current version: $CURRENT_VERSION"
# Find the last commit that changed Tauri-related files
echo "🔍 Finding last commit with Tauri-related changes..."
# Define patterns for Tauri-related files
TAURI_PATTERNS="src/ src-tauri/src src-tauri/Cargo.toml src-tauri/Cargo.lock src-tauri/tauri.*.conf.json src-tauri/build.rs src-tauri/capabilities"
# Get the last commit that changed any of these patterns (excluding build artifacts)
LAST_TAURI_COMMIT=""
for commit in $(git rev-list HEAD --max-count=50); do
# Check if this commit changed any Tauri-related files
CHANGED_FILES=$(git show --name-only --pretty=format: $commit | tr '\n' ' ')
HAS_TAURI_CHANGES=false
# Check each pattern
if echo "$CHANGED_FILES" | grep -q "src/" && echo "$CHANGED_FILES" | grep -qvE "src/(dist|build|node_modules|\.next|\.cache)"; then
HAS_TAURI_CHANGES=true
elif echo "$CHANGED_FILES" | grep -qE "src-tauri/(src|Cargo\.(toml|lock)|tauri\..*\.conf\.json|build\.rs|capabilities)"; then
HAS_TAURI_CHANGES=true
fi
if [ "$HAS_TAURI_CHANGES" = true ]; then
LAST_TAURI_COMMIT=$(git rev-parse --short $commit)
break
fi
done
if [ -z "$LAST_TAURI_COMMIT" ]; then
echo "⚠️ No Tauri-related changes found in recent commits, using current commit"
LAST_TAURI_COMMIT=$(git rev-parse --short HEAD)
fi
echo "📝 Last Tauri-related commit: $LAST_TAURI_COMMIT"
echo "📝 Current commit: $(git rev-parse --short HEAD)"
# Generate autobuild version for consistency
CURRENT_BASE_VERSION=$(echo "$CURRENT_VERSION" | sed -E 's/-(alpha|beta|rc)(\.[0-9]+)?//g' | sed -E 's/\+[a-zA-Z0-9.-]+//g')
MONTH=$(TZ=Asia/Shanghai date +%m)
DAY=$(TZ=Asia/Shanghai date +%d)
AUTOBUILD_VERSION="${CURRENT_BASE_VERSION}+autobuild.${MONTH}${DAY}.${LAST_TAURI_COMMIT}"
echo "🏷️ Current autobuild version: $AUTOBUILD_VERSION"
# Set outputs for other jobs to use
echo "current_version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
echo "last_tauri_commit=$LAST_TAURI_COMMIT" >> $GITHUB_OUTPUT
echo "autobuild_version=$AUTOBUILD_VERSION" >> $GITHUB_OUTPUT
clean_old_assets:
name: Clean Old Release Assets
runs-on: ubuntu-latest
needs: check_current_version
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Clean old assets from release
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
TAG_NAME: ${{ env.TAG_NAME }}
DRY_RUN: ${{ inputs.dry_run }}
run: |
# Use values from check_current_version job
CURRENT_AUTOBUILD_VERSION="${{ needs.check_current_version.outputs.autobuild_version }}"
LAST_TAURI_COMMIT="${{ needs.check_current_version.outputs.last_tauri_commit }}"
CURRENT_VERSION="${{ needs.check_current_version.outputs.current_version }}"
echo "📦 Current version: $CURRENT_VERSION"
echo "📦 Current autobuild version: $CURRENT_AUTOBUILD_VERSION"
echo "📝 Last Tauri commit: $LAST_TAURI_COMMIT"
echo "🏷️ Target tag: $TAG_NAME"
echo "🔍 Dry run mode: $DRY_RUN"
# Check if release exists
RELEASE_EXISTS=$(gh release view "$TAG_NAME" --json id -q '.id' 2>/dev/null || echo "")
if [ -z "$RELEASE_EXISTS" ]; then
echo "❌ Release '$TAG_NAME' not found"
exit 1
fi
echo "✅ Found release '$TAG_NAME'"
# Get all assets
echo "📋 Getting list of all assets..."
assets=$(gh release view "$TAG_NAME" --json assets -q '.assets[].name' || true)
if [ -z "$assets" ]; then
echo " No assets found in release '$TAG_NAME'"
exit 0
fi
echo "📋 Found assets:"
echo "$assets" | sed 's/^/ - /'
# Count assets to keep and delete
ASSETS_TO_KEEP=""
ASSETS_TO_DELETE=""
for asset in $assets; do
# Keep assets that match current autobuild version or are non-versioned files (like latest.json)
if [[ "$asset" == *"$CURRENT_AUTOBUILD_VERSION"* ]] || [[ "$asset" == "latest.json" ]]; then
ASSETS_TO_KEEP="$ASSETS_TO_KEEP$asset\n"
else
ASSETS_TO_DELETE="$ASSETS_TO_DELETE$asset\n"
fi
done
echo ""
echo "🔒 Assets to keep (current version: $CURRENT_AUTOBUILD_VERSION):"
if [ -n "$ASSETS_TO_KEEP" ]; then
echo -e "$ASSETS_TO_KEEP" | grep -v '^$' | sed 's/^/ - /'
else
echo " - None"
fi
echo ""
echo "🗑️ Assets to delete:"
if [ -n "$ASSETS_TO_DELETE" ]; then
echo -e "$ASSETS_TO_DELETE" | grep -v '^$' | sed 's/^/ - /'
else
echo " - None"
echo " No old assets to clean"
exit 0
fi
if [ "$DRY_RUN" = "true" ]; then
echo ""
echo "🔍 DRY RUN MODE: No assets will actually be deleted"
echo " To actually delete these assets, run this workflow again with dry_run=false"
else
echo ""
echo "🗑️ Deleting old assets..."
DELETED_COUNT=0
FAILED_COUNT=0
for asset in $assets; do
# Skip assets that should be kept
if [[ "$asset" == *"$CURRENT_AUTOBUILD_VERSION"* ]] || [[ "$asset" == "latest.json" ]]; then
continue
fi
echo " Deleting: $asset"
if gh release delete-asset "$TAG_NAME" "$asset" -y 2>/dev/null; then
DELETED_COUNT=$((DELETED_COUNT + 1))
else
echo " ⚠️ Failed to delete $asset"
FAILED_COUNT=$((FAILED_COUNT + 1))
fi
done
echo ""
echo "📊 Cleanup summary:"
echo " - Deleted: $DELETED_COUNT assets"
if [ $FAILED_COUNT -gt 0 ]; then
echo " - Failed: $FAILED_COUNT assets"
fi
echo " - Kept: $(echo -e "$ASSETS_TO_KEEP" | grep -v '^$' | wc -l) assets"
if [ $FAILED_COUNT -gt 0 ]; then
echo "⚠️ Some assets failed to delete. Please check the logs above."
exit 1
else
echo "✅ Cleanup completed successfully!"
fi
fi

View File

@@ -1,63 +0,0 @@
name: Clippy Lint
on:
pull_request:
jobs:
clippy:
strategy:
fail-fast: false
matrix:
include:
- os: windows-latest
target: x86_64-pc-windows-msvc
- os: macos-latest
target: aarch64-apple-darwin
- os: ubuntu-22.04
target: x86_64-unknown-linux-gnu
runs-on: ${{ matrix.os }}
steps:
- name: Checkout Repository
uses: actions/checkout@v4
- name: Install Rust Stable
uses: dtolnay/rust-toolchain@stable
- name: Add Rust Target
run: rustup target add ${{ matrix.target }}
- name: Rust Cache
uses: Swatinem/rust-cache@v2
with:
workspaces: src-tauri
save-if: false
- name: Install dependencies (ubuntu only)
if: matrix.os == 'ubuntu-22.04'
run: |
sudo apt-get update
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
- name: Install Node
uses: actions/setup-node@v4
with:
node-version: "22"
- name: Install pnpm
uses: pnpm/action-setup@v4
with:
run_install: false
- name: Pnpm install and check
run: |
pnpm i
pnpm run prebuild ${{ matrix.target }}
- name: Build Web Assets
run: pnpm run web:build
env:
NODE_OPTIONS: "--max_old_space_size=4096"
- name: Run Clippy
run: cargo clippy --manifest-path src-tauri/Cargo.toml --all-targets --all-features -- -D warnings

View File

@@ -9,6 +9,9 @@ on:
permissions: permissions:
contents: read contents: read
env:
HUSKY: 0
jobs: jobs:
cargo-check: cargo-check:
# Treat all Rust compiler warnings as errors # Treat all Rust compiler warnings as errors

View File

@@ -2,12 +2,36 @@ name: Development Test
on: on:
workflow_dispatch: workflow_dispatch:
inputs:
run_windows:
description: "运行 Windows"
required: false
type: boolean
default: true
run_macos_aarch64:
description: "运行 macOS aarch64"
required: false
type: boolean
default: true
run_windows_arm64:
description: "运行 Windows ARM64"
required: false
type: boolean
default: true
run_linux_amd64:
description: "运行 Linux amd64"
required: false
type: boolean
default: true
permissions: write-all permissions: write-all
env: env:
TAG_NAME: deploytest
TAG_CHANNEL: DeployTest
CARGO_INCREMENTAL: 0 CARGO_INCREMENTAL: 0
RUST_BACKTRACE: short RUST_BACKTRACE: short
HUSKY: 0
concurrency: concurrency:
# only allow per workflow per commit (and not pr) to run at a time
group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}" group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}"
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }} cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
@@ -20,46 +44,82 @@ jobs:
- os: windows-latest - os: windows-latest
target: x86_64-pc-windows-msvc target: x86_64-pc-windows-msvc
bundle: nsis bundle: nsis
id: windows
input: run_windows
- os: macos-latest - os: macos-latest
target: aarch64-apple-darwin target: aarch64-apple-darwin
bundle: dmg bundle: dmg
- os: macos-latest id: macos-aarch64
target: x86_64-apple-darwin input: run_macos_aarch64
bundle: dmg - os: windows-latest
target: aarch64-pc-windows-msvc
bundle: nsis
id: windows-arm64
input: run_windows_arm64
- os: ubuntu-22.04
target: x86_64-unknown-linux-gnu
bundle: deb
id: linux-amd64
input: run_linux_amd64
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:
- name: Skip job if not selected
if: github.event.inputs[matrix.input] != 'true'
run: echo "Job ${{ matrix.id }} skipped as requested"
- name: Checkout Repository - name: Checkout Repository
if: github.event.inputs[matrix.input] == 'true'
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install Rust Stable - name: Install Rust Stable
if: github.event.inputs[matrix.input] == 'true'
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable
- name: Add Rust Target - name: Add Rust Target
if: github.event.inputs[matrix.input] == 'true'
run: rustup target add ${{ matrix.target }} run: rustup target add ${{ matrix.target }}
- name: Rust Cache - name: Rust Cache
if: github.event.inputs[matrix.input] == 'true'
uses: Swatinem/rust-cache@v2 uses: Swatinem/rust-cache@v2
with: with:
workspaces: src-tauri workspaces: src-tauri
save-if: false save-if: false
cache-all-crates: true
shared-key: autobuild-shared
- name: Install Node - name: Install dependencies (ubuntu only)
uses: actions/setup-node@v4 if: matrix.os == 'ubuntu-22.04' && github.event.inputs[matrix.input] == 'true'
with: run: |
node-version: "20" sudo apt-get update
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
- uses: pnpm/action-setup@v4 - uses: pnpm/action-setup@v4
name: Install pnpm name: Install pnpm
if: github.event.inputs[matrix.input] == 'true'
with: with:
run_install: false run_install: false
- name: Install Node
if: github.event.inputs[matrix.input] == 'true'
uses: actions/setup-node@v4
with:
node-version: "20"
cache: "pnpm"
- name: Pnpm install and check - name: Pnpm install and check
if: github.event.inputs[matrix.input] == 'true'
run: | run: |
pnpm i pnpm i
pnpm run prebuild ${{ matrix.target }} pnpm run prebuild ${{ matrix.target }}
- name: Release ${{ env.TAG_CHANNEL }} Version
if: github.event.inputs[matrix.input] == 'true'
run: pnpm release-version ${{ env.TAG_NAME }}
- name: Tauri build - name: Tauri build
if: github.event.inputs[matrix.input] == 'true'
uses: tauri-apps/tauri-action@v0 uses: tauri-apps/tauri-action@v0
env: env:
NODE_OPTIONS: "--max_old_space_size=4096" NODE_OPTIONS: "--max_old_space_size=4096"
@@ -76,18 +136,26 @@ jobs:
tauriScript: pnpm tauriScript: pnpm
args: --target ${{ matrix.target }} -b ${{ matrix.bundle }} args: --target ${{ matrix.target }} -b ${{ matrix.bundle }}
- name: Upload Artifacts - name: Upload Artifacts (macOS)
if: matrix.os == 'macos-latest' if: matrix.os == 'macos-latest' && github.event.inputs[matrix.input] == 'true'
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.target }} name: ${{ matrix.target }}
path: src-tauri/target/${{ matrix.target }}/release/bundle/dmg/*.dmg path: src-tauri/target/${{ matrix.target }}/release/bundle/dmg/*.dmg
if-no-files-found: error if-no-files-found: error
- name: Upload Artifacts - name: Upload Artifacts (Windows)
if: matrix.os == 'windows-latest' if: matrix.os == 'windows-latest' && github.event.inputs[matrix.input] == 'true'
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: ${{ matrix.target }} name: ${{ matrix.target }}
path: src-tauri/target/${{ matrix.target }}/release/bundle/nsis/*.exe path: src-tauri/target/${{ matrix.target }}/release/bundle/nsis/*.exe
if-no-files-found: error if-no-files-found: error
- name: Upload Artifacts (Linux)
if: matrix.os == 'ubuntu-22.04' && github.event.inputs[matrix.input] == 'true'
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.target }}
path: src-tauri/target/${{ matrix.target }}/release/bundle/deb/*.deb
if-no-files-found: error

View File

@@ -7,31 +7,73 @@ name: Check Formatting
on: on:
pull_request: pull_request:
env:
HUSKY: 0
jobs: jobs:
rustfmt: rustfmt:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Check Rust changes
id: check_rust
uses: dorny/paths-filter@v3
with:
filters: |
rust:
- 'src-tauri/**'
- '**/*.rs'
- name: Skip if no Rust changes
if: steps.check_rust.outputs.rust != 'true'
run: echo "No Rust changes, skipping rustfmt."
- name: install Rust stable and rustfmt - name: install Rust stable and rustfmt
if: steps.check_rust.outputs.rust == 'true'
uses: dtolnay/rust-toolchain@stable uses: dtolnay/rust-toolchain@stable
with: with:
components: rustfmt components: rustfmt
- name: run cargo fmt - name: run cargo fmt
if: steps.check_rust.outputs.rust == 'true'
run: cargo fmt --manifest-path ./src-tauri/Cargo.toml --all -- --check run: cargo fmt --manifest-path ./src-tauri/Cargo.toml --all -- --check
prettier: prettier:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Check Web changes
id: check_web
uses: dorny/paths-filter@v3
with:
filters: |
web:
- 'src/**'
- '**/*.js'
- '**/*.ts'
- '**/*.tsx'
- '**/*.css'
- '**/*.scss'
- '**/*.json'
- '**/*.md'
- '**/*.json'
- name: Skip if no Web changes
if: steps.check_web.outputs.web != 'true'
run: echo "No web changes, skipping prettier."
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
if: steps.check_web.outputs.web == 'true'
with: with:
node-version: "lts/*" node-version: "lts/*"
- run: corepack enable - run: corepack enable
if: steps.check_web.outputs.web == 'true'
- run: pnpm install --frozen-lockfile - run: pnpm install --frozen-lockfile
if: steps.check_web.outputs.web == 'true'
- run: pnpm format:check - run: pnpm format:check
if: steps.check_web.outputs.web == 'true'
# taplo: # taplo:
# name: taplo (.toml files) # name: taplo (.toml files)

82
.github/workflows/lint-clippy.yml vendored Normal file
View File

@@ -0,0 +1,82 @@
name: Clippy Lint
on:
pull_request:
workflow_dispatch:
env:
HUSKY: 0
jobs:
clippy:
strategy:
fail-fast: false
matrix:
include:
- os: windows-latest
target: x86_64-pc-windows-msvc
- os: macos-latest
target: aarch64-apple-darwin
- os: ubuntu-22.04
target: x86_64-unknown-linux-gnu
runs-on: ${{ matrix.os }}
steps:
- name: Check src-tauri changes
if: github.event_name != 'workflow_dispatch'
id: check_changes
uses: dorny/paths-filter@v3
with:
filters: |
rust:
- 'src-tauri/**'
- name: Skip if src-tauri not changed
if: github.event_name != 'workflow_dispatch' && steps.check_changes.outputs.rust != 'true'
run: echo "No src-tauri changes, skipping clippy lint."
- name: Continue if src-tauri changed
if: github.event_name != 'workflow_dispatch' && steps.check_changes.outputs.rust == 'true'
run: echo "src-tauri changed, running clippy lint."
- name: Manual trigger - always run
if: github.event_name == 'workflow_dispatch'
run: |
echo "Manual trigger detected: skipping changes check and running clippy."
- name: Checkout Repository
uses: actions/checkout@v4
- name: Install Rust Stable
uses: dtolnay/rust-toolchain@master
with:
toolchain: stable
components: clippy
- name: Add Rust Target
run: rustup target add ${{ matrix.target }}
- name: Rust Cache
uses: Swatinem/rust-cache@v2
with:
workspaces: src-tauri
cache-all-crates: true
save-if: false
shared-key: autobuild-${{ runner.os }}-${{ matrix.target }}
key: ${{ runner.os }}-${{ matrix.target }}-${{ hashFiles('src-tauri/Cargo.lock') }}
- name: Install dependencies (ubuntu only)
if: matrix.os == 'ubuntu-22.04'
run: |
sudo apt-get update
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
- name: Run Clippy
working-directory: ./src-tauri
run: cargo clippy-all
- name: Run Logging Check
working-directory: ./src-tauri
shell: bash
run: |
cargo install --git https://github.com/clash-verge-rev/clash-verge-logging-check.git
clash-verge-logging-check

View File

@@ -5,16 +5,13 @@ on:
# ! 不再使用 workflow_dispatch 触发。 # ! 不再使用 workflow_dispatch 触发。
# workflow_dispatch: # workflow_dispatch:
push: push:
# 应当限制在 main 分支上触发发布。
branches:
- main
# 应当限制 v*.*.* 的 tag 触发发布。
tags: tags:
- "v*.*.*" - "v*.*.*"
permissions: write-all permissions: write-all
env: env:
CARGO_INCREMENTAL: 0 CARGO_INCREMENTAL: 0
RUST_BACKTRACE: short RUST_BACKTRACE: short
HUSKY: 0
concurrency: concurrency:
# only allow per workflow per commit (and not pr) to run at a time # only allow per workflow per commit (and not pr) to run at a time
group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}" group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}"
@@ -27,22 +24,132 @@ jobs:
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Check if tag is from main branch
run: |
TAG_REF="${GITHUB_REF##*/}"
echo "Checking if tag $TAG_REF is from main branch..."
TAG_COMMIT=$(git rev-list -n 1 $TAG_REF)
MAIN_COMMITS=$(git rev-list origin/main)
if echo "$MAIN_COMMITS" | grep -q "$TAG_COMMIT"; then
echo "✅ Tag $TAG_REF is from main branch"
else
echo "❌ Tag $TAG_REF is not from main branch"
echo "This release workflow only accepts tags from main branch."
exit 1
fi
- name: Check tag and package.json version - name: Check tag and package.json version
run: | run: |
TAG_REF="${GITHUB_REF##*/}" TAG_REF="${GITHUB_REF_NAME:-${GITHUB_REF##*/}}"
echo "Current tag: $TAG_REF" echo "Current tag: $TAG_REF"
PKG_VERSION=$(jq -r .version package.json) PKG_VERSION=$(jq -r .version package.json)
echo "package.json version: $PKG_VERSION" echo "package.json version: $PKG_VERSION"
if [[ "$TAG_REF" != "v$PKG_VERSION" ]]; then
echo "Tag ($TAG_REF) does not match package.json version (v$PKG_VERSION)." EXPECTED_TAG="v$PKG_VERSION"
if [[ "$TAG_REF" != "$EXPECTED_TAG" ]]; then
echo "❌ Version mismatch:"
echo " Git tag : $TAG_REF"
echo " package.json : $EXPECTED_TAG"
exit 1 exit 1
fi fi
echo "Tag and package.json version are consistent."
echo "✅ Tag and package.json version are consistent."
update_tag:
name: Update tag
runs-on: ubuntu-latest
needs: [release, release-for-linux-arm, release-for-fixed-webview2]
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Fetch UPDATE logs
id: fetch_update_logs
run: |
if [ -f "UPDATELOG.md" ]; then
UPDATE_LOGS=$(awk '/^## v/{if(flag) exit; flag=1} flag' UPDATELOG.md)
if [ -n "$UPDATE_LOGS" ]; then
echo "Found update logs"
echo "UPDATE_LOGS<<EOF" >> $GITHUB_ENV
echo "$UPDATE_LOGS" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
else
echo "No update sections found in UPDATELOG.md"
fi
else
echo "UPDATELOG.md file not found"
fi
shell: bash
- name: Set Env
run: |
echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV
TAG_REF="${GITHUB_REF##*/}"
echo "TAG_NAME=$TAG_REF" >> $GITHUB_ENV
VERSION=$(echo "$TAG_REF" | sed 's/^v//')
echo "VERSION=$VERSION" >> $GITHUB_ENV
echo "DOWNLOAD_URL=https://github.com/clash-verge-rev/clash-verge-rev/releases/download/$TAG_REF" >> $GITHUB_ENV
shell: bash
- run: |
if [ -z "$UPDATE_LOGS" ]; then
echo "No update logs found, using default message"
UPDATE_LOGS="More new features are now supported. Check for detailed changelog soon."
else
echo "Using found update logs"
fi
cat > release.txt << EOF
$UPDATE_LOGS
## 下载地址
### Windows (不再支持Win7)
#### 正常版本(推荐)
- [64位(常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64-setup.exe) | [ARM64(不常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64-setup.exe)
#### 内置Webview2版(体积较大仅在企业版系统或无法安装webview2时使用)
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_fixed_webview2-setup.exe) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64_fixed_webview2-setup.exe)
### macOS
- [Apple M芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_aarch64.dmg) | [Intel芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64.dmg)
### Linux
#### DEB包(Debian系) 使用 apt ./路径 安装
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_amd64.deb) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64.deb) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_armhf.deb)
#### RPM包(Redhat系) 使用 dnf ./路径 安装
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.x86_64.rpm) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.aarch64.rpm) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.armhfp.rpm)
### FAQ
- [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
### 稳定机场VPN推荐
- [狗狗加速](https://verge.dginv.click/#/register?code=oaxsAGo6)
Created at ${{ env.BUILDTIME }}.
EOF
- name: Upload Release
uses: softprops/action-gh-release@v2
with:
tag_name: ${{ env.TAG_NAME }}
name: "Clash Verge Rev ${{ env.TAG_NAME }}"
body_path: release.txt
draft: false
prerelease: false
token: ${{ secrets.GITHUB_TOKEN }}
# generate_release_notes: true
release: release:
name: Release Build name: Release Build
needs: check_tag_version needs: [check_tag_version]
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -81,6 +188,15 @@ jobs:
sudo apt-get update sudo apt-get update
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
- name: Install x86 OpenSSL (macOS only)
if: matrix.target == 'x86_64-apple-darwin'
run: |
arch -x86_64 brew install openssl@3
echo "OPENSSL_DIR=$(brew --prefix openssl@3)" >> $GITHUB_ENV
echo "OPENSSL_INCLUDE_DIR=$(brew --prefix openssl@3)/include" >> $GITHUB_ENV
echo "OPENSSL_LIB_DIR=$(brew --prefix openssl@3)/lib" >> $GITHUB_ENV
echo "PKG_CONFIG_PATH=$(brew --prefix openssl@3)/lib/pkgconfig" >> $GITHUB_ENV
- name: Install Node - name: Install Node
uses: actions/setup-node@v4 uses: actions/setup-node@v4
with: with:
@@ -110,14 +226,18 @@ jobs:
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }} APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }} APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
with: with:
tagName: v__VERSION__ tagName: ${{ github.ref_name }}
releaseName: "Clash Verge Rev v__VERSION__" releaseName: "Clash Verge Rev ${{ github.ref_name }}"
releaseBody: "More new features are now supported." releaseBody: "Draft release, will be updated later."
releaseDraft: true
prerelease: false
tauriScript: pnpm tauriScript: pnpm
args: --target ${{ matrix.target }} args: --target ${{ matrix.target }}
includeUpdaterJson: true
release-for-linux-arm: release-for-linux-arm:
name: Release Build for Linux ARM name: Release Build for Linux ARM
needs: [check_tag_version]
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -232,7 +352,7 @@ jobs:
with: with:
tag_name: v${{env.VERSION}} tag_name: v${{env.VERSION}}
name: "Clash Verge Rev v${{env.VERSION}}" name: "Clash Verge Rev v${{env.VERSION}}"
body: "More new features are now supported." body: "See release notes for detailed changelog."
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
files: | files: |
src-tauri/target/${{ matrix.target }}/release/bundle/deb/*.deb src-tauri/target/${{ matrix.target }}/release/bundle/deb/*.deb
@@ -240,6 +360,7 @@ jobs:
release-for-fixed-webview2: release-for-fixed-webview2:
name: Release Build for Fixed WebView2 name: Release Build for Fixed WebView2
needs: [check_tag_version]
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
@@ -323,7 +444,7 @@ jobs:
with: with:
tag_name: v${{steps.build.outputs.appVersion}} tag_name: v${{steps.build.outputs.appVersion}}
name: "Clash Verge Rev v${{steps.build.outputs.appVersion}}" name: "Clash Verge Rev v${{steps.build.outputs.appVersion}}"
body: "More new features are now supported." body: "See release notes for detailed changelog."
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
files: src-tauri/target/${{ matrix.target }}/release/bundle/nsis/*setup* files: src-tauri/target/${{ matrix.target }}/release/bundle/nsis/*setup*
@@ -335,7 +456,7 @@ jobs:
release-update: release-update:
name: Release Update name: Release Update
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [release, release-for-linux-arm] needs: [update_tag]
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -360,7 +481,7 @@ jobs:
release-update-for-fixed-webview2: release-update-for-fixed-webview2:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [release-for-fixed-webview2] needs: [update_tag]
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -386,7 +507,7 @@ jobs:
submit-to-winget: submit-to-winget:
name: Submit to Winget name: Submit to Winget
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [release-update] needs: [update_tag, release-update]
steps: steps:
- name: Checkout repository - name: Checkout repository
uses: actions/checkout@v4 uses: actions/checkout@v4
@@ -405,3 +526,103 @@ jobs:
release-tag: v${{env.VERSION}} release-tag: v${{env.VERSION}}
installers-regex: '_(arm64|x64|x86)-setup\.exe$' installers-regex: '_(arm64|x64|x86)-setup\.exe$'
token: ${{ secrets.WINGET_TOKEN }} token: ${{ secrets.WINGET_TOKEN }}
notify-telegram:
name: Notify Telegram
runs-on: ubuntu-latest
needs:
[
update_tag,
release-update,
release-update-for-fixed-webview2,
submit-to-winget,
]
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Fetch UPDATE logs
id: fetch_update_logs
run: |
if [ -f "UPDATELOG.md" ]; then
UPDATE_LOGS=$(awk '/^## v/{if(flag) exit; flag=1} flag' UPDATELOG.md)
if [ -n "$UPDATE_LOGS" ]; then
echo "Found update logs"
echo "UPDATE_LOGS<<EOF" >> $GITHUB_ENV
echo "$UPDATE_LOGS" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
else
echo "No update sections found in UPDATELOG.md"
fi
else
echo "UPDATELOG.md file not found"
fi
shell: bash
- name: Install Node
uses: actions/setup-node@v4
with:
node-version: "22"
- uses: pnpm/action-setup@v4
name: Install pnpm
with:
run_install: false
- name: Install dependencies
run: pnpm install --frozen-lockfile
- name: Get Version and Release Info
run: |
sudo apt-get update
sudo apt-get install jq
echo "VERSION=$(cat package.json | jq '.version' | tr -d '"')" >> $GITHUB_ENV
echo "DOWNLOAD_URL=https://github.com/clash-verge-rev/clash-verge-rev/releases/download/v$(cat package.json | jq '.version' | tr -d '"')" >> $GITHUB_ENV
echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV
- name: Generate release.txt
run: |
if [ -z "$UPDATE_LOGS" ]; then
echo "No update logs found, using default message"
UPDATE_LOGS="More new features are now supported. Check for detailed changelog soon."
else
echo "Using found update logs"
fi
cat > release.txt << EOF
$UPDATE_LOGS
## 下载地址
### Windows (不再支持Win7)
#### 正常版本(推荐)
- [64位(常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64-setup.exe) | [ARM64(不常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64-setup.exe)
#### 内置Webview2版(体积较大仅在企业版系统或无法安装webview2时使用)
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_fixed_webview2-setup.exe) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64_fixed_webview2-setup.exe)
### macOS
- [Apple M芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_aarch64.dmg) | [Intel芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64.dmg)
### Linux
#### DEB包(Debian系) 使用 apt ./路径 安装
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_amd64.deb) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64.deb) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_armhf.deb)
#### RPM包(Redhat系) 使用 dnf ./路径 安装
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.x86_64.rpm) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.aarch64.rpm) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.armhfp.rpm)
### FAQ
- [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
### 稳定机场VPN推荐
- [狗狗加速](https://verge.dginv.click/#/register?code=oaxsAGo6)
Created at ${{ env.BUILDTIME }}.
EOF
- name: Send Telegram Notification
run: node scripts/telegram.mjs
env:
TELEGRAM_BOT_TOKEN: ${{ secrets.TELEGRAM_BOT_TOKEN }}
BUILD_TYPE: release
VERSION: ${{ env.VERSION }}
DOWNLOAD_URL: ${{ env.DOWNLOAD_URL }}

View File

@@ -2,6 +2,9 @@ name: Updater CI
on: workflow_dispatch on: workflow_dispatch
permissions: write-all permissions: write-all
env:
HUSKY: 0
jobs: jobs:
release-update: release-update:
runs-on: ubuntu-latest runs-on: ubuntu-latest

2
.gitignore vendored
View File

@@ -10,3 +10,5 @@ scripts/_env.sh
.tool-versions .tool-versions
.idea .idea
.old .old
.eslintcache
target

View File

@@ -1,26 +1,44 @@
#!/bin/bash #!/bin/bash
set -euo pipefail
#pnpm pretty-quick --staged ROOT_DIR="$(git rev-parse --show-toplevel)"
cd "$ROOT_DIR"
if git diff --cached --name-only | grep -q '^src/'; then if ! command -v pnpm >/dev/null 2>&1; then
pnpm format:check echo "❌ pnpm is required for pre-commit checks."
if [ $? -ne 0 ]; then exit 1
echo "Code format check failed in src/. Please fix formatting issues."
exit 1
fi
fi fi
if git diff --cached --name-only | grep -q '^src-tauri/'; then echo "[pre-commit] Running lint-staged for JS/TS files..."
pnpm exec lint-staged
RUST_FILES="$(git diff --cached --name-only --diff-filter=ACMR | grep -E '^src-tauri/.*\.rs$' || true)"
if [ -n "$RUST_FILES" ]; then
echo "[pre-commit] Formatting Rust changes with cargo fmt..."
(
cd src-tauri cd src-tauri
cargo fmt cargo fmt
if [ $? -ne 0 ]; then )
echo "rustfmt failed to format the code. Please fix the issues and try again." while IFS= read -r file; do
exit 1 [ -n "$file" ] && git add "$file"
done <<< "$RUST_FILES"
echo "[pre-commit] Linting Rust changes with cargo clippy..."
(
cd src-tauri
cargo clippy-all
if ! command -v clash-verge-logging-check >/dev/null 2>&1; then
echo "[pre-commit] Installing clash-verge-logging-check..."
cargo install --git https://github.com/clash-verge-rev/clash-verge-logging-check.git
fi fi
cd .. clash-verge-logging-check
)
fi fi
#git add . TS_FILES="$(git diff --cached --name-only --diff-filter=ACMR | grep -E '\.(ts|tsx)$' || true)"
if [ -n "$TS_FILES" ]; then
echo "[pre-commit] Running TypeScript type check..."
pnpm typecheck
fi
# 允许提交 echo "[pre-commit] All checks completed successfully."
exit 0

View File

@@ -1,28 +1,43 @@
#!/bin/bash #!/bin/bash
set -euo pipefail
# $1: remote name (e.g., origin) remote_name="${1:-origin}"
# $2: remote url (e.g., git@github.com:clash-verge-rev/clash-verge-rev.git) remote_url="${2:-unknown}"
if git diff --cached --name-only | grep -q '^src-tauri/'; then ROOT_DIR="$(git rev-parse --show-toplevel)"
cargo clippy --manifest-path ./src-tauri/Cargo.toml cd "$ROOT_DIR"
if [ $? -ne 0 ]; then
echo "Clippy found issues in src-tauri. Please fix them before pushing." if ! command -v pnpm >/dev/null 2>&1; then
exit 1 echo "❌ pnpm is required for pre-push checks."
fi exit 1
fi fi
# 只在 push 到 origin 并且 origin 指向目标仓库时执行格式检查 echo "[pre-push] Preparing to push to '$remote_name' ($remote_url). Running full validation..."
if [ "$1" = "origin" ] && echo "$2" | grep -Eq 'github\.com[:/]+clash-verge-rev/clash-verge-rev(\.git)?$'; then
echo "[pre-push] Detected push to origin (clash-verge-rev/clash-verge-rev)"
echo "[pre-push] Running pnpm format:check..."
pnpm format:check echo "[pre-push] Checking Prettier formatting..."
if [ $? -ne 0 ]; then pnpm format:check
echo "❌ Code format check failed. Please fix formatting before pushing."
exit 1 echo "[pre-push] Running ESLint..."
fi pnpm lint
echo "[pre-push] Running TypeScript type checking..."
pnpm typecheck
if command -v cargo >/dev/null 2>&1; then
echo "[pre-push] Verifying Rust formatting..."
(
cd src-tauri
cargo fmt --check
)
echo "[pre-push] Running cargo clippy..."
(
cd src-tauri
cargo clippy-all
)
else else
echo "[pre-push] Not pushing to target repo. Skipping format check." echo "[pre-push] ⚠️ cargo not found; skipping Rust checks."
fi fi
echo "[pre-push] All checks passed."
exit 0 exit 0

View File

@@ -6,3 +6,5 @@ pnpm-lock.yaml
src-tauri/target/ src-tauri/target/
src-tauri/gen/ src-tauri/gen/
target

View File

@@ -11,6 +11,6 @@
"arrowParens": "always", "arrowParens": "always",
"proseWrap": "preserve", "proseWrap": "preserve",
"htmlWhitespaceSensitivity": "css", "htmlWhitespaceSensitivity": "css",
"endOfLine": "lf", "endOfLine": "auto",
"embeddedLanguageFormatting": "auto" "embeddedLanguageFormatting": "auto"
} }

View File

@@ -2,16 +2,25 @@
Thank you for your interest in contributing to Clash Verge Rev! This document provides guidelines and instructions to help you set up your development environment and start contributing. Thank you for your interest in contributing to Clash Verge Rev! This document provides guidelines and instructions to help you set up your development environment and start contributing.
## Internationalization (i18n)
We welcome translations and improvements to existing locales. Please follow the detailed guidelines in [CONTRIBUTING_i18n.md](docs/CONTRIBUTING_i18n.md) for instructions on extracting strings, file naming conventions, testing translations, and submitting translation PRs.
## Development Setup ## Development Setup
Before you start contributing to the project, you need to set up your development environment. Here are the steps you need to follow: Before you start contributing to the project, you need to set up your development environment. Here are the steps you need to follow:
### Prerequisites ### Prerequisites
1. **Install Rust and Node.js**: Our project requires both Rust and Node.js. Please follow the instructions provided [here](https://tauri.app/v1/guides/getting-started/prerequisites) to install them on your system. 1. **Install Rust and Node.js**: Our project requires both Rust and Node.js. Please follow the instructions provided [here](https://tauri.app/start/prerequisites/) to install them on your system.
### Setup for Windows Users ### Setup for Windows Users
> [!NOTE]
> **If you are using a Windows ARM device, you additionally need to install [LLVM](https://github.com/llvm/llvm-project/releases) (including clang) and set the environment variable.**
>
> Because the `ring` crate is compiled based on `clang` under Windows ARM.
If you're a Windows user, you may need to perform some additional steps: If you're a Windows user, you may need to perform some additional steps:
- Make sure to add Rust and Node.js to your system's PATH. This is usually done during the installation process, but you can verify and manually add them if necessary. - Make sure to add Rust and Node.js to your system's PATH. This is usually done during the installation process, but you can verify and manually add them if necessary.
@@ -51,11 +60,14 @@ apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev
You have two options for downloading the clash binary: You have two options for downloading the clash binary:
- Automatically download it via the provided script: - Automatically download it via the provided script:
```shell ```shell
pnpm run prebuild pnpm run prebuild
# Use '--force' to force update to the latest version # Use '--force' or '-f' to update both the Mihomo core version
# pnpm run prebuild --force # and the Clash Verge Rev service version to the latest available.
pnpm run prebuild --force
``` ```
- Manually download it from the [Mihomo release](https://github.com/MetaCubeX/mihomo/releases). After downloading, rename the binary according to the [Tauri configuration](https://tauri.app/v1/api/config#bundleconfig.externalbin). - Manually download it from the [Mihomo release](https://github.com/MetaCubeX/mihomo/releases). After downloading, rename the binary according to the [Tauri configuration](https://tauri.app/v1/api/config#bundleconfig.externalbin).
### Run the Development Server ### Run the Development Server
@@ -66,6 +78,8 @@ To run the development server, use the following command:
pnpm dev pnpm dev
# If an app instance already exists, use a different command # If an app instance already exists, use a different command
pnpm dev:diff pnpm dev:diff
# To using tauri built-in dev tool
pnpm dev:tauri
``` ```
### Build the Project ### Build the Project

View File

@@ -9,6 +9,15 @@
A Clash Meta GUI based on <a href="https://github.com/tauri-apps/tauri">Tauri</a>. A Clash Meta GUI based on <a href="https://github.com/tauri-apps/tauri">Tauri</a>.
</h3> </h3>
<p align="center">
Languages:
<a href="./README.md">简体中文</a> ·
<a href="./docs/README_en.md">English</a> ·
<a href="./docs/README_es.md">Español</a> ·
<a href="./docs/README_ru.md">Русский</a> ·
<a href="./docs/README_ja.md">日本語</a>
</p>
## Preview ## Preview
| Dark | Light | | Dark | Light |
@@ -23,11 +32,11 @@ Supports Windows (x64/x86), Linux (x64/arm64) and macOS 10.15+ (intel/apple).
#### 我应当怎样选择发行版 #### 我应当怎样选择发行版
| 版本 | 特征 | 链接 | | 版本 | 特征 | 链接 |
| :-------- | :--------------------------------------- | :------------------------------------------------------------------------------------- | | :---------- | :--------------------------------------- | :------------------------------------------------------------------------------------- |
| Stable | 正式版,高可靠性,适合日常使用。 | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) | | Stable | 正式版,高可靠性,适合日常使用。 | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
| Alpha | 早期测试版,功能未完善,可能存在缺陷。 | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) | | Alpha(废弃) | 测试发布流程。 | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
| AutoBuild | 滚动更新版,持续集成更新,适合开发测试。 | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) | | AutoBuild | 滚动更新版,适合测试反馈,可能存在缺陷。 | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
#### 安装说明和常见问题,请到 [文档页](https://clash-verge-rev.github.io/) 查看 #### 安装说明和常见问题,请到 [文档页](https://clash-verge-rev.github.io/) 查看
@@ -88,7 +97,7 @@ To run the development server, execute the following commands after all prerequi
```shell ```shell
pnpm i pnpm i
pnpm run check pnpm run prebuild
pnpm dev pnpm dev
``` ```

View File

@@ -1,3 +1,218 @@
## v2.4.3
感谢 @Slinetrac, @oomeow 以及 @Lythrilla 的出色贡献
### 🐞 修复问题
- 优化服务模式重装逻辑,避免不必要的重复检查
- 修复轻量模式退出无响应的问题
- 修复托盘轻量模式支持退出/进入
- 修复静默启动和自动进入轻量模式时,托盘状态刷新不再依赖窗口创建流程
- macOS Tun/系统代理 模式下图标大小不统一
- 托盘节点切换不再显示隐藏组
- 修复前端 IP 检测无法使用 ipapi, ipsb 提供商
- 修复MacOS 下 Tun开启后 系统代理无法打开的问题
- 修复服务模式启动时,修改、生成配置文件或重启内核可能导致页面卡死的问题
- 修复 Webdav 恢复备份不重启
- 修复 Linux 开机后无法正常代理需要手动设置
- 修复增加订阅或导入订阅文件时订阅页面无更新
- 修复系统代理守卫功能不工作
- 修复 KDE + Wayland 下多屏显示 UI 异常
- 修复 Windows 深色模式下首次启动客户端标题栏颜色异常
- 修复静默启动不加载完整 WebView 的问题
- 修复 Linux WebKit 网络进程的崩溃
- 修复无法导入订阅
- 修复实际导入成功但显示导入失败的问题
- 修复服务不可用时,自动关闭 Tun 模式导致应用卡死问题
- 修复删除订阅时未能实际删除相关文件
- 修复 macOS 连接界面显示异常
- 修复规则配置项在不同配置文件间全局共享导致切换被重置的问题
- 修复 Linux Wayland 下部分 GPU 可能出现的 UI 渲染问题
- 修复自动更新使版本回退的问题
- 修复首页自定义卡片在切换轻量模式时失效
- 修复悬浮跳转导航失效
- 修复小键盘热键映射错误
- 修复前端无法及时刷新操作状态
- 修复 macOS 从 Dock 栏退出轻量模式状态不同步
- 修复 Linux 系统主题切换不生效
- 修复 `允许自动更新` 字段使手动订阅刷新失效
<details>
<summary><strong> ✨ 新增功能 </strong></summary>
- **Mihomo(Meta) 内核升级至 v1.19.15**
- 支持前端修改日志(最大文件大小、最大保留数量)
- 新增链式代理图形化设置功能
- 新增系统标题栏与程序标题栏切换 (设置-页面设置-倾向系统标题栏)
- 监听关机事件,自动关闭系统代理
- 主界面“当前节点”卡片新增“延迟测试”按钮
- 新增批量选择配置文件功能
- Windows / Linux / MacOS 监听关机信号,优雅恢复网络设置
- 新增本地备份功能
- 主界面“当前节点”卡片新增自动延迟检测开关(默认关闭)
- 允许独立控制订阅自动更新
- 托盘 `更多` 中新增 `关闭所有连接` 按钮
- 新增左侧菜单栏的排序功能(右键点击左侧菜单栏)
- 托盘 `打开目录` 中新增 `应用日志``内核日志`
</details>
<details>
<summary><strong> 🚀 优化改进 </strong></summary>
- 重构并简化服务模式启动检测流程,消除重复检测
- 重构并简化窗口创建流程
- 重构日志系统,单个日志默认最大 10 MB
- 优化前端资源占用
- 改进 macos 下系统代理设置的方法
- 优化 TUN 模式可用性的判断
- 移除流媒体检测的系统级提示(使用软件内通知)
- 优化后端 i18n 资源占用
- 改进 Linux 托盘支持并添加 `--no-tray` 选项
- Linux 现在在新生成的配置中默认将 TUN 栈恢复为 mixed 模式
- 为代理延迟测试的 URL 设置增加了保护以及添加了安全的备用 URL
- 更新了 Wayland 合成器检测逻辑,从而在 Hyprland 会话中保留原生 Wayland 后端
- 改进 Windows 和 Unix 的 服务连接方式以及权限,避免无法连接服务或内核
- 修改内核默认日志级别为 Info
- 支持通过桌面快捷方式重新打开应用
- 支持订阅界面输入链接后回车导入
- 选择按延迟排序时每次延迟测试自动刷新节点顺序
- 配置重载失败时自动重启核心
- 启用 TUN 前等待服务就绪
- 卸载 TUN 时会先关闭
- 优化应用启动页
- 优化首页当前节点对MATCH规则的支持
- 允许在 `界面设置` 修改 `悬浮跳转导航延迟`
- 添加热键绑定错误的提示信息
- 在 macOS 10.15 及更高版本默认包含 Mihomo-go122以解决 Intel 架构 Mac 无法运行内核的问题
- Tun 模式不可用时,禁用系统托盘的 Tun 模式菜单
</details>
## v2.4.2
### ✨ 新增功能
- 增加托盘节点选择
### 🚀 性能优化
- 优化前端首页加载速度
- 优化前端未使用 i18n 文件缓存
- 优化后端内存占用
- 优化后端启动速度
### 🐞 修复问题
- 修复首页节点切换失效的问题
- 修复和优化服务检查流程
- 修复2.4.1引入的订阅地址重定向报错问题
- 修复 rpm/deb 包名称问题
- 修复托盘轻量模式状态检测异常
- 修复通过 scheme 导入订阅崩溃
- 修复单例检测实效
- 修复启动阶段可能导致的无法连接内核
- 修复导入订阅无法 Auth Basic
### 👙 界面样式
- 简化和改进代理设置样式
## v2.4.1
### 🏆 重大改进
- **应用响应速度提升**:采用全新异步处理架构,大幅提升应用响应速度和稳定性
### ✨ 新增功能
- **Mihomo(Meta) 内核升级至 v1.19.13**
### 🚀 性能优化
- 优化热键响应速度,提升快捷键操作体验
- 改进服务管理响应性,减少系统服务操作等待时间
- 提升文件和配置处理性能
- 优化任务管理和日志记录效率
- 优化异步内存管理,减少内存占用并提升多任务处理效率
- 优化启动阶段初始化性能
### 🐞 修复问题
- 修复应用在某些操作中可能出现的响应延迟问题
- 修复任务管理中的潜在并发问题
- 修复通过托盘重启应用无法恢复
- 修复订阅在某些情况下无法导入
- 修复无法新建订阅时使用远程链接
- 修复卸载服务后的 tun 开关状态问题
- 修复页面快速切换订阅时导致崩溃
- 修复丢失工作目录时无法恢复环境
- 修复从轻量模式恢复导致崩溃
### 👙 界面样式
- 统一代理设置样式
### 🗑️ 移除内容
- 移除启动阶段自动清理过期订阅
## v2.4.0
**发行代号:融**
代号释义: 「融」象征融合与贯通,寓意新版本通过全新 IPC 通信机制 将系统各部分紧密衔接,打破壁垒,实现更高效的 数据流通与全面性能优化。
### 🏆 重大改进
- **核心通信架构升级**:采用全新通信机制,提升应用性能和稳定性
- **流量监控系统重构**:全新的流量监控界面,支持更丰富的数据展示
- **数据缓存优化**:改进配置和节点数据缓存,提升响应速度
### ✨ 新增功能
- **Mihomo(Meta) 内核升级至 v1.19.12**
- 新增版本信息复制按钮
- 增强型流量监控,支持更详细的数据分析
- 新增流量图表多种显示模式
- 新增强制刷新配置和节点缓存功能
- 首页流量统计支持查看刻度线详情
### 🚀 性能优化
- 全面提升数据传输和处理效率
- 优化内存使用,减少系统资源消耗
- 改进流量图表渲染性能
- 优化配置和节点刷新策略从5秒延长到60秒
- 改进数据缓存机制,减少重复请求
- 优化异步程序性能
### 🐞 修复问题
- 修复系统代理状态检测和显示不一致问题
- 修复系统主题窗口颜色不一致问题
- 修复特殊字符 URL 处理问题
- 修复配置修改后缓存不同步问题
- 修复 Windows 安装器自启设置问题
- 修复 macOS 下 Dock 图标恢复窗口问题
- 修复 linux 下 KDE/Plasma 异常标题栏按钮
- 修复架构升级后节点测速功能异常
- 修复架构升级后流量统计功能异常
- 修复架构升级后日志功能异常
- 修复外部控制器跨域配置保存问题
- 修复首页端口显示不一致问题
- 修复首页流量统计刻度线显示问题
- 修复日志页面按钮功能混淆问题
- 修复日志等级设置保存问题
- 修复日志等级异常过滤
- 修复清理日志天数功能异常
- 修复偶发性启动卡死问题
- 修复首页虚拟网卡开关在管理模式下的状态问题
### 🔧 技术改进
- 统一使用新的内核通信方式
- 新增外部控制器配置界面
- 改进跨平台兼容性支持
## v2.3.2 ## v2.3.2
### 🐞 修复问题 ### 🐞 修复问题
@@ -8,18 +223,28 @@
- 修复同时开启静默启动与自动进入轻量模式后,自动进入轻量模式失效的问题 - 修复同时开启静默启动与自动进入轻量模式后,自动进入轻量模式失效的问题
- 修复静默启动时托盘工具栏轻量模式开启与关闭状态的同步 - 修复静默启动时托盘工具栏轻量模式开启与关闭状态的同步
- 修复导入订阅时非 http 协议链接被错误尝试导入 - 修复导入订阅时非 http 协议链接被错误尝试导入
- 修复切换节点后页面长时间 loading 及缓存过期导致的数据不同步问题
- 修复将快捷键名称更名为 `Clash Verge`之后无法删除图标和无法删除注册表
- 修复`DNS`覆写 `fallback` `proxy server` `nameserver` `direct Nameserver` 字段支持留空
- 修复`DNS`覆写 `nameserver-policy` 字段无法正确识别 `geo`
- 修复搜索框输入特殊字符崩溃
- 修复 Windows 下 Start UP 名称与 exe 名称不统一
- 修复显示 Mihomo 内核日志等级应该大于设置等级
### ✨ 新增功能 ### ✨ 新增功能
- `sidecar` 模式下清理多余的内核进程,防止运行出现异常 - `sidecar` 模式下清理多余的内核进程,防止运行出现异常
- 新 macOS 下 TUN 和系统代理模式托盘图标(暂测) - 新 macOS 下 TUN 和系统代理模式托盘图标(暂测)
- 快捷键事件通过系统通知 - 快捷键事件通过系统通知
- 添加外部 `cors` 控制面板
### 🚀 优化改进 ### 🚀 优化改进
- 优化重构订阅切换逻辑,可以随时中断载入过程,防止卡死 - 优化重构订阅切换逻辑,可以随时中断载入过程,防止卡死
- 引入事件驱动代理管理器,优化代理配置更新逻辑,防止卡死 - 引入事件驱动代理管理器,优化代理配置更新逻辑,防止卡死
- 改进主页订阅卡流量已使用比例计算精度 - 改进主页订阅卡流量已使用比例计算精度
- 优化后端缓存刷新机制,支持毫秒级 TTL默认 3000ms减少重复请求并提升性能切换节点时强制刷新后端数据前端 UI 实时更新,操作更流畅
- 解耦前端数据拉取与后端缓存刷新,提升节点切换速度和一致性
### 🗑️ 移除内容 ### 🗑️ 移除内容
@@ -49,7 +274,8 @@
- 优化 托盘 统一响应 - 优化 托盘 统一响应
- 优化 静默启动+自启动轻量模式 运行方式 - 优化 静默启动+自启动轻量模式 运行方式
- 升级依赖 - 降低前端潜在内存泄漏风险,提升运行时性能
- 优化 React 状态、副作用、数据获取、清理等流程。
## v2.3.0 ## v2.3.0
@@ -379,7 +605,7 @@
- 新增窗口状态实时监控与自动保存功能 - 新增窗口状态实时监控与自动保存功能
- 增强核心配置变更时的验证与错误处理机制 - 增强核心配置变更时的验证与错误处理机制
- 支持通过环境变量`CLASH_VERGE_REV_IP`自定义复制IP地址 - 支持通过环境变量 `CLASH_VERGE_REV_IP`自定义复制IP地址
- 添加连接表列宽持久化设置与进程过滤功能 - 添加连接表列宽持久化设置与进程过滤功能
- 新增代理组首字母导航与动态滚动定位功能 - 新增代理组首字母导航与动态滚动定位功能
- 实现连接追踪暂停/恢复功能 - 实现连接追踪暂停/恢复功能
@@ -700,7 +926,7 @@
- 禁用部分 Webview2 快捷键 - 禁用部分 Webview2 快捷键
- 热键配置新增连接符 + 号 - 热键配置新增连接符 + 号
- 新增部分悬浮提示按钮,用于解释说明 - 新增部分悬浮提示按钮,用于解释说明
- 当日志等级为`Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字) - 当日志等级为 `Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字)
- 设置页面右上角新增 TG 频道链接 - 设置页面右上角新增 TG 频道链接
- 各种细节优化和界面性能优化 - 各种细节优化和界面性能优化
@@ -740,7 +966,7 @@
- 禁用部分 Webview2 快捷键 - 禁用部分 Webview2 快捷键
- 热键配置新增连接符 + 号 - 热键配置新增连接符 + 号
- 新增部分悬浮提示按钮,用于解释说明 - 新增部分悬浮提示按钮,用于解释说明
- 当日志等级为`Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字) - 当日志等级为 `Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字)
- 设置页面右上角新增 TG 频道链接 - 设置页面右上角新增 TG 频道链接
- 各种细节优化和界面性能优化 - 各种细节优化和界面性能优化
@@ -776,7 +1002,7 @@
- 禁用部分 Webview2 快捷键 - 禁用部分 Webview2 快捷键
- 热键配置新增连接符 + 号 - 热键配置新增连接符 + 号
- 新增部分悬浮提示按钮,用于解释说明 - 新增部分悬浮提示按钮,用于解释说明
- 当日志等级为`Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字) - 当日志等级为 `Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字)
- 设置页面右上角新增 TG 频道链接 - 设置页面右上角新增 TG 频道链接
### Bugs Fixes ### Bugs Fixes
@@ -950,7 +1176,7 @@
### Features ### Features
- 缓存代理组图标 - 缓存代理组图标
- 使用`boa_engine` 代替 `rquickjs` - 使用 `boa_engine` 代替 `rquickjs`
- 支持 Linux armv7 - 支持 Linux armv7
### Bugs Fixes ### Bugs Fixes
@@ -1015,7 +1241,7 @@
- 支持自定义托盘图标 - 支持自定义托盘图标
- 支持禁用代理组图标 - 支持禁用代理组图标
- 代理组显示当前代理 - 代理组显示当前代理
- 修改 `打开面板` 快捷键为`打开/关闭面板` - 修改 `打开面板` 快捷键为 `打开/关闭面板`
--- ---
@@ -1179,7 +1405,7 @@
### Bugs Fixes ### Bugs Fixes
- Windows 下更新时无法覆盖`clash-verge-service.exe`的问题(需要卸载重装一次服务,下次更新生效) - Windows 下更新时无法覆盖 `clash-verge-service.exe`的问题(需要卸载重装一次服务,下次更新生效)
- 窗口最大化按钮变化问题 - 窗口最大化按钮变化问题
- 窗口尺寸保存错误问题 - 窗口尺寸保存错误问题
- 复制环境变量类型无法切换问题 - 复制环境变量类型无法切换问题

81
docs/CONTRIBUTING_i18n.md Normal file
View File

@@ -0,0 +1,81 @@
# CONTRIBUTING — i18n
Thank you for considering contributing to our localization work — your help is appreciated.
Quick overview
- cvr-i18 is a CLI that helps manage simple top-level JSON locale files:
- Detect duplicated top-level keys
- Find keys missing versus a base file (default: en.json)
- Export missing entries for translators
- Reorder keys to match the base file for predictable diffs
- Operate on a directory or a single file
Get the CLI (No binary provided yet)
```bash
git clone https://github.com/clash-verge-rev/clash-verge-rev-i18n-cli
cd clash-verge-rev-i18n-cli
cargo install --path .
# or
cargo install --git https://github.com/clash-verge-rev/clash-verge-rev-i18n-cli
```
Common commands
- Show help: `cvr-i18`
- Directory (auto-detects `./locales` or `./src/locales`): `cvr-i18 -d /path/to/locales`
- Check duplicates: `cvr-i18 -k`
- Check missing keys: `cvr-i18 -m`
- Export missing keys: `cvr-i18 -m -e ./exports`
- Sort keys to base file: `cvr-i18 -s`
- Use a base file: `cvr-i18 -b base.json`
- Single file: `cvr-i18 -f locales/zh.json`
Options (short)
- `-d, --directory <DIR>`
- `-f, --file <FILE>`
- `-k, --duplicated-key`
- `-m, --missing-key`
- `-e, --export <DIR>`
- `-s, --sort`
- `-b, --base <FILE>`
Exit codes
- `0` — success (no issues)
- `1` — issues found (duplicates/missing)
- `2` — error (IO/parse/runtime)
How to contribute (recommended steps)
- Start small: fix typos, improve phrasing, or refine tone and consistency.
- Run the CLI against your locale files to detect duplicates or missing keys.
- Export starter JSONs for translators with `-m -e <DIR>`.
- Prefer incremental PRs or draft PRs; leave a comment on the issue if you want guidance.
- Open an issue to report missing strings, UI context, or localization bugs.
- Add or improve docs and tests to make future contributions easier.
PR checklist
- Keep JSON files UTF-8 encoded.
- Follow the repos locale file structure and naming conventions.
- Reorder keys to match the base file (`-s`) for minimal diffs.
- Test translations in a local dev build before opening a PR.
- Reference related issues and explain any context for translations or changes.
Notes
- The tool expects simple top-level JSON key/value maps.
- Exported JSONs are starter files for translators (fill in values, keep keys).
- Sorting keeps diffs consistent and reviewable.
Repository
https://github.com/clash-verge-rev/clash-verge-rev-i18n-cli
## Feedback & Contributions
- For tool usage issues or feedback: please open an Issue in the [repository](https://github.com/clash-verge-rev/clash-verge-rev-i18n-cli) so it can be tracked and addressed.
- For localization contributions (translations, fixes, context notes, etc.): submit a PR or Issue in this repository and include examples, context, and testing instructions when possible.
- If you need help or a review, leave a comment on your submission requesting assistance.

124
docs/README_en.md Normal file
View File

@@ -0,0 +1,124 @@
<h1 align="center">
<img src="../src-tauri/icons/icon.png" alt="Clash" width="128" />
<br>
Continuation of <a href="https://github.com/zzzgydi/clash-verge">Clash Verge</a>
<br>
</h1>
<h3 align="center">
A Clash Meta GUI built with <a href="https://github.com/tauri-apps/tauri">Tauri</a>.
</h3>
<p align="center">
Languages:
<a href="../README.md">简体中文</a> ·
<a href="./README_en.md">English</a> ·
<a href="./README_es.md">Español</a> ·
<a href="./README_ru.md">Русский</a> ·
<a href="./README_ja.md">日本語</a>
</p>
## Preview
| Dark | Light |
| ----------------------------------- | ------------------------------------- |
| ![Dark Preview](./preview_dark.png) | ![Light Preview](./preview_light.png) |
## Install
Visit the [Release page](https://github.com/clash-verge-rev/clash-verge-rev/releases) to download the installer that matches your platform.<br>
We provide packages for Windows (x64/x86), Linux (x64/arm64), and macOS 10.15+ (Intel/Apple).
#### Choosing a Release Channel
| Channel | Description | Link |
| :---------- | :-------------------------------------------------------------------- | :------------------------------------------------------------------------------------- |
| Stable | Official builds with high reliability, ideal for daily use. | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
| Alpha (EOL) | Legacy builds used to validate the publish pipeline. | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
| AutoBuild | Rolling builds for testing and feedback. Expect experimental changes. | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
#### Installation Guides & FAQ
Read the [project documentation](https://clash-verge-rev.github.io/) for install steps, troubleshooting, and frequently asked questions.
---
### Telegram Channel
Join [@clash_verge_rev](https://t.me/clash_verge_re) for update announcements.
## Promotion
#### [Doggygo VPN — Performance-oriented global accelerator](https://verge.dginv.click/#/register?code=oaxsAGo6)
- High-performance overseas network service with free trials, discounted plans, streaming unlocks, and first-class Hysteria protocol support.
- Register through the exclusive Clash Verge link to get a 3-day trial with 1 GB of traffic per day: [Sign up](https://verge.dginv.click/#/register?code=oaxsAGo6)
- Exclusive 20% off coupon for Clash Verge users: `verge20` (limited to 500 uses)
- Discounted bundle from ¥15.8 per month for 160 GB, plus an additional 20% off for yearly billing
- Operated by an overseas team with reliable service and up to 50% revenue share
- Load-balanced clusters with high-speed dedicated routes (compatible with legacy clients), exceptionally low latency, smooth 4K playback
- First global provider to support the `Hysteria2` protocol—perfect fit for the Clash Verge client
- Supports streaming services and ChatGPT access
- Official site: [https://狗狗加速.com](https://verge.dginv.click/#/register?code=oaxsAGo6)
#### Build Infrastructure Sponsor — [YXVM Dedicated Servers](https://yxvm.com/aff.php?aff=827)
Our builds and releases run on YXVM dedicated servers that deliver premium resources, strong performance, and high-speed networking. If downloads feel fast and usage feels snappy, it is thanks to robust hardware.
🧩 Highlights of YXVM Dedicated Servers:
- 🌎 Optimized global routes for dramatically faster downloads
- 🔧 Bare-metal resources instead of shared VPS capacity for maximum performance
- 🧠 Great for proxy workloads, hosting web/CDN services, CI/CD pipelines, or any high-load tasks
- 💡 Ready to use instantly with multiple datacenter options, including CN2 and IEPL
- 📦 The configuration used by this project is on sale—feel free to get the same setup
- 🎯 Want the same build environment? [Order a YXVM server today](https://yxvm.com/aff.php?aff=827)
## Features
- Built on high-performance Rust with the Tauri 2 framework
- Ships with the embedded [Clash.Meta (mihomo)](https://github.com/MetaCubeX/mihomo) core and supports switching to the `Alpha` channel
- Clean, polished UI with theme color controls, proxy group/tray icons, and `CSS Injection`
- Enhanced profile management (Merge and Script helpers) with configuration syntax hints
- System proxy controls, guard mode, and `TUN` (virtual network adapter) support
- Visual editors for nodes and rules
- WebDAV-based backup and sync for configurations
### FAQ
See the [FAQ page](https://clash-verge-rev.github.io/faq/windows.html) for platform-specific guidance.
### Donation
[Support Clash Verge Rev development](https://github.com/sponsors/clash-verge-rev)
## Development
See [CONTRIBUTING.md](../CONTRIBUTING.md) for detailed contribution guidelines.
After installing all **Tauri** prerequisites, run the development shell with:
```shell
pnpm i
pnpm run prebuild
pnpm dev
```
## Contributions
Issues and pull requests are welcome!
## Acknowledgement
Clash Verge Rev builds on or draws inspiration from these projects:
- [zzzgydi/clash-verge](https://github.com/zzzgydi/clash-verge): A Tauri-based Clash GUI for Windows, macOS, and Linux.
- [tauri-apps/tauri](https://github.com/tauri-apps/tauri): Build smaller, faster, more secure desktop apps with a web frontend.
- [Dreamacro/clash](https://github.com/Dreamacro/clash): A rule-based tunnel written in Go.
- [MetaCubeX/mihomo](https://github.com/MetaCubeX/mihomo): A rule-based tunnel written in Go.
- [Fndroid/clash_for_windows_pkg](https://github.com/Fndroid/clash_for_windows_pkg): A Clash GUI for Windows and macOS.
- [vitejs/vite](https://github.com/vitejs/vite): Next-generation frontend tooling with blazing-fast DX.
## License
GPL-3.0 License. See the [license file](../LICENSE) for details.

124
docs/README_es.md Normal file
View File

@@ -0,0 +1,124 @@
<h1 align="center">
<img src="../src-tauri/icons/icon.png" alt="Clash" width="128" />
<br>
Continuación de <a href="https://github.com/zzzgydi/clash-verge">Clash Verge</a>
<br>
</h1>
<h3 align="center">
Una interfaz gráfica para Clash Meta construida con <a href="https://github.com/tauri-apps/tauri">Tauri</a>.
</h3>
<p align="center">
Idiomas:
<a href="../README.md">简体中文</a> ·
<a href="./README_en.md">English</a> ·
<a href="./README_es.md">Español</a> ·
<a href="./README_ru.md">Русский</a> ·
<a href="./README_ja.md">日本語</a>
</p>
## Vista previa
| Oscuro | Claro |
| ----------------------------------- | ----------------------------------- |
| ![Vista oscura](./preview_dark.png) | ![Vista clara](./preview_light.png) |
## Instalación
Visita la [página de lanzamientos](https://github.com/clash-verge-rev/clash-verge-rev/releases) y descarga el instalador que corresponda a tu plataforma.<br>
Ofrecemos paquetes para Windows (x64/x86), Linux (x64/arm64) y macOS 10.15+ (Intel/Apple).
#### Cómo elegir el canal de lanzamiento
| Canal | Descripción | Enlace |
| :---------- | :----------------------------------------------------------------------------- | :------------------------------------------------------------------------------------- |
| Stable | Compilaciones oficiales de alta fiabilidad; ideales para el uso diario. | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
| Alpha (EOL) | Compilaciones heredadas usadas para validar el flujo de publicación. | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
| AutoBuild | Compilaciones continuas para pruebas y retroalimentación. Espera cambios beta. | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
#### Guías de instalación y preguntas frecuentes
Consulta la [documentación del proyecto](https://clash-verge-rev.github.io/) para encontrar los pasos de instalación, solución de problemas y preguntas frecuentes.
---
### Canal de Telegram
Únete a [@clash_verge_rev](https://t.me/clash_verge_re) para enterarte de las novedades.
## Promociones
#### [Doggygo VPN — Acelerador global orientado al rendimiento](https://verge.dginv.click/#/register?code=oaxsAGo6)
- Servicio internacional de alto rendimiento con prueba gratuita, planes con descuento, desbloqueo de streaming y soporte de protocolo Hysteria de primera clase.
- Regístrate mediante el enlace exclusivo de Clash Verge y obtén una prueba de 3 días con 1 GB de tráfico diario: [Regístrate](https://verge.dginv.click/#/register?code=oaxsAGo6)
- Cupón exclusivo de 20% de descuento para usuarios de Clash Verge: `verge20` (limitado a 500 usos)
- Plan promocional desde ¥15.8 al mes con 160 GB, más 20% de descuento adicional por pago anual
- Equipo ubicado en el extranjero para un servicio confiable, con hasta 50% de comisión compartida
- Clústeres balanceados con rutas dedicadas de alta velocidad (compatibles con clientes antiguos), latencia extremadamente baja, reproducción 4K sin interrupciones
- Primer proveedor global que soporta el protocolo `Hysteria2`, ideal para el cliente Clash Verge
- Desbloquea servicios de streaming y acceso a ChatGPT
- Sitio oficial: [https://狗狗加速.com](https://verge.dginv.click/#/register?code=oaxsAGo6)
#### Patrocinador de la infraestructura de compilación — [Servidores dedicados YXVM](https://yxvm.com/aff.php?aff=827)
Las compilaciones y lanzamientos del proyecto se ejecutan en servidores dedicados de YXVM, que proporcionan recursos premium, alto rendimiento y redes de alta velocidad. Si las descargas son rápidas y el uso es fluido, es gracias a este hardware robusto.
🧩 Ventajas de los servidores dedicados YXVM:
- 🌎 Rutas globales optimizadas para descargas significativamente más rápidas
- 🔧 Recursos bare-metal, en lugar de VPS compartidos, para obtener el máximo rendimiento
- 🧠 Ideales para proxys, alojamiento de sitios web/CDN, pipelines de CI/CD o cualquier carga elevada
- 💡 Listos para usar al instante, con múltiples centros de datos disponibles (incluidos CN2 e IEPL)
- 📦 La misma configuración utilizada por este proyecto está disponible para su compra
- 🎯 ¿Quieres el mismo entorno de compilación? [Solicita un servidor YXVM hoy](https://yxvm.com/aff.php?aff=827)
## Funciones
- Basado en Rust de alto rendimiento y en el framework Tauri 2
- Incluye el núcleo integrado [Clash.Meta (mihomo)](https://github.com/MetaCubeX/mihomo) y permite cambiar al canal `Alpha`
- Interfaz limpia y elegante con controles de color de tema, iconos de grupos proxy/bandeja y `CSS Injection`
- Gestión avanzada de perfiles (herramientas Merge y Script) con sugerencias de sintaxis para configuraciones
- Control del proxy del sistema, modo guardián y soporte para `TUN` (adaptador de red virtual)
- Editores visuales para nodos y reglas
- Copias de seguridad y sincronización mediante WebDAV
### Preguntas frecuentes
Visita la [página de FAQ](https://clash-verge-rev.github.io/faq/windows.html) para obtener instrucciones específicas por plataforma.
### Donaciones
[Apoya el desarrollo de Clash Verge Rev](https://github.com/sponsors/clash-verge-rev)
## Desarrollo
Consulta [CONTRIBUTING.md](../CONTRIBUTING.md) para conocer las pautas de contribución.
Después de instalar todos los requisitos de **Tauri**, ejecuta el entorno de desarrollo con:
```shell
pnpm i
pnpm run prebuild
pnpm dev
```
## Contribuciones
Se agradecen los issues y pull requests.
## Agradecimientos
Clash Verge Rev se basa en, o se inspira en, los siguientes proyectos:
- [zzzgydi/clash-verge](https://github.com/zzzgydi/clash-verge): Interfaz gráfica para Clash basada en Tauri. Compatible con Windows, macOS y Linux.
- [tauri-apps/tauri](https://github.com/tauri-apps/tauri): Construye aplicaciones de escritorio más pequeñas, rápidas y seguras con un frontend web.
- [Dreamacro/clash](https://github.com/Dreamacro/clash): Túnel basado en reglas escrito en Go.
- [MetaCubeX/mihomo](https://github.com/MetaCubeX/mihomo): Túnel basado en reglas escrito en Go.
- [Fndroid/clash_for_windows_pkg](https://github.com/Fndroid/clash_for_windows_pkg): Interfaz de Clash para Windows y macOS.
- [vitejs/vite](https://github.com/vitejs/vite): Herramientas de frontend de nueva generación con una experiencia rapidísima.
## Licencia
Licencia GPL-3.0. Consulta el [archivo de licencia](../LICENSE) para más detalles.

124
docs/README_ja.md Normal file
View File

@@ -0,0 +1,124 @@
<h1 align="center">
<img src="../src-tauri/icons/icon.png" alt="Clash" width="128" />
<br>
<a href="https://github.com/zzzgydi/clash-verge">Clash Verge</a> の継続プロジェクト
<br>
</h1>
<h3 align="center">
<a href="https://github.com/tauri-apps/tauri">Tauri</a> で構築された Clash Meta GUI。
</h3>
<p align="center">
言語:
<a href="../README.md">简体中文</a> ·
<a href="./README_en.md">English</a> ·
<a href="./README_es.md">Español</a> ·
<a href="./README_ru.md">Русский</a> ·
<a href="./README_ja.md">日本語</a>
</p>
## プレビュー
| ダーク | ライト |
| --------------------------------------- | ---------------------------------------- |
| ![ダークプレビュー](./preview_dark.png) | ![ライトプレビュー](./preview_light.png) |
## インストール
[リリースページ](https://github.com/clash-verge-rev/clash-verge-rev/releases) から、ご利用のプラットフォームに対応したインストーラーをダウンロードしてください。<br>
Windows (x64/x86)、Linux (x64/arm64)、macOS 10.15+ (Intel/Apple) をサポートしています。
#### リリースチャンネルの選び方
| チャンネル | 説明 | リンク |
| :---------- | :--------------------------------------------------------------- | :------------------------------------------------------------------------------------- |
| Stable | 安定版。信頼性が高く、日常利用に最適です。 | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
| Alpha (EOL) | 公開フローの検証に使用した旧テスト版。 | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
| AutoBuild | 継続的に更新されるテスト版。フィードバックや新機能検証向けです。 | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
#### インストール手順と FAQ
詳しい導入手順やトラブルシュートは [ドキュメントサイト](https://clash-verge-rev.github.io/) を参照してください。
---
### Telegram チャンネル
更新情報は [@clash_verge_rev](https://t.me/clash_verge_re) をフォローしてください。
## プロモーション
#### [Doggygo VPN — 高性能グローバルアクセラレータ](https://verge.dginv.click/#/register?code=oaxsAGo6)
- 無料トライアル、割引プラン、ストリーミング解放、世界初の Hysteria プロトコル対応を備えた高性能海外ネットワークサービス。
- Clash Verge 専用リンクから登録すると、3 日間・1 日 1 GB の無料体験が利用できます。 [登録はこちら](https://verge.dginv.click/#/register?code=oaxsAGo6)
- Clash Verge 利用者限定 20% オフクーポン: `verge20`(先着 500 名)
- 月額 15.8 元で 160 GB を利用できるプラン、年額契約ならさらに 20% オフ
- 海外チーム運営による高信頼サービス、収益シェアは最大 50%
- 負荷分散クラスタと高速専用回線(旧クライアント互換)、極低レイテンシで 4K も快適
- 世界初の `Hysteria2` プロトコル対応。Clash Verge クライアントとの相性抜群
- ストリーミングおよび ChatGPT の利用にも対応
- 公式サイト: [https://狗狗加速.com](https://verge.dginv.click/#/register?code=oaxsAGo6)
#### ビルド環境スポンサー — [YXVM 専用サーバー](https://yxvm.com/aff.php?aff=827)
本プロジェクトのビルドとリリースは、YXVM の専用サーバーによって支えられています。高速ダウンロードや快適な操作性は、強力なハードウェアがあってこそです。
🧩 YXVM 専用サーバーの特長:
- 🌎 最適化されたグローバル回線で圧倒的なダウンロード速度
- 🔧 VPS とは異なるベアメタル資源で最高性能を発揮
- 🧠 プロキシ運用、Web/CDN ホスティング、CI/CD など高負荷ワークロードに最適
- 💡 複数データセンターから即時利用可能。CN2 や IEPL も選択可
- 📦 本プロジェクトが使用している構成も販売中。同じ環境を入手できます
- 🎯 同じビルド体験をしたい方は [今すぐ YXVM サーバーを注文](https://yxvm.com/aff.php?aff=827)
## 機能
- 高性能な Rust と Tauri 2 フレームワークに基づくデスクトップアプリ
- 組み込みの [Clash.Meta (mihomo)](https://github.com/MetaCubeX/mihomo) コアを搭載し、`Alpha` チャンネルへの切り替えも可能
- テーマカラーやプロキシグループ/トレイアイコン、`CSS Injection` をカスタマイズできる洗練された UI
- 設定ファイルの管理および拡張Merge・Script 支援)、構成シンタックスヒントを提供
- システムプロキシ制御、ガード機能、`TUN`(仮想ネットワークアダプタ)モード
- ノードとルールのビジュアルエディタ
- WebDAV による設定のバックアップと同期
### FAQ
プラットフォーム別の案内は [FAQ ページ](https://clash-verge-rev.github.io/faq/windows.html) を参照してください。
### 寄付
[Clash Verge Rev の開発を支援する](https://github.com/sponsors/clash-verge-rev)
## 開発
詳細な貢献ガイドは [CONTRIBUTING.md](../CONTRIBUTING.md) をご覧ください。
**Tauri** の前提条件を整えたら、以下のコマンドで開発サーバーを起動できます:
```shell
pnpm i
pnpm run prebuild
pnpm dev
```
## コントリビューション
Issue や Pull Request を歓迎します。
## 謝辞
Clash Verge Rev は、以下のプロジェクトに影響を受けています。
- [zzzgydi/clash-verge](https://github.com/zzzgydi/clash-verge): Tauri ベースの Clash GUI。Windows / macOS / Linux に対応。
- [tauri-apps/tauri](https://github.com/tauri-apps/tauri): Web フロントエンドで小型・高速・安全なデスクトップアプリを構築するためのフレームワーク。
- [Dreamacro/clash](https://github.com/Dreamacro/clash): Go 製のルールベーストンネル。
- [MetaCubeX/mihomo](https://github.com/MetaCubeX/mihomo): Go 製のルールベーストンネル。
- [Fndroid/clash_for_windows_pkg](https://github.com/Fndroid/clash_for_windows_pkg): Windows / macOS 向けの Clash GUI。
- [vitejs/vite](https://github.com/vitejs/vite): 次世代のフロントエンドツール群。高速な開発体験を提供。
## ライセンス
GPL-3.0 ライセンス。詳細は [LICENSE](../LICENSE) を参照してください。

120
docs/README_ru.md Normal file
View File

@@ -0,0 +1,120 @@
<h1 align="center">
<img src="../src-tauri/icons/icon.png" alt="Clash" width="128" />
<br>
Continuation of <a href="https://github.com/zzzgydi/clash-verge">Clash Verge</a>
<br>
</h1>
<h3 align="center">
Clash Meta GUI базируется на <a href="https://github.com/tauri-apps/tauri">Tauri</a>.
</h3>
<p align="center">
Языки:
<a href="../README.md">简体中文</a> ·
<a href="./README_en.md">English</a> ·
<a href="./README_es.md">Español</a> ·
<a href="./README_ru.md">Русский</a> ·
<a href="./README_ja.md">日本語</a>
</p>
## Предпросмотр
| Тёмная тема | Светлая тема |
| ---------------------------------- | ------------------------------------ |
| ![Тёмная тема](./preview_dark.png) | ![Светлая тема](./preview_light.png) |
## Установка
Пожалуйста, перейдите на страницу релизов, чтобы скачать соответствующий установочный пакет: [Страница релизов](https://github.com/clash-verge-rev/clash-verge-rev/releases)<br>
Перейти на [Страницу релизов](https://github.com/clash-verge-rev/clash-verge-rev/releases) to download the corresponding installation package<br>
Поддержка Windows (x64/x86), Linux (x64/arm64) и macOS 10.15+ (intel/apple).
#### Как выбрать дистрибутив?
| Версия | Характеристики | Ссылка |
| :-------------------- | :------------------------------------------------------------------------------------------------------ | :------------------------------------------------------------------------------------- |
| Stable | Официальный релиз, высокая надежность, подходит для повседневного использования. | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
| Alpha(неиспользуемый) | Тестирование процесса публикации. | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
| AutoBuild | Версия с постоянным обновлением, подходящая для тестирования и обратной связи. Может содержать дефекты. | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
#### Инструкции по установке и ответы на часто задаваемые вопросы можно найти на [странице документации](https://clash-verge-rev.github.io/)
---
### TG канал: [@clash_verge_rev](https://t.me/clash_verge_re)
## Продвижение
#### [Doggygo VPN —— технический VPN-сервис (айрпорт)](https://verge.dginv.click/#/register?code=oaxsAGo6)
- Высокопроизводительный иностранный VPN-сервис (айрпорт) с бесплатным пробным периодом, выгодными тарифами, возможностью разблокировки потокового ТВ и первым в мире поддержкой протокола Hysteria.
- Зарегистрируйтесь по эксклюзивной ссылке Clash Verge и получите 3 дня бесплатного использования, 1 Гб трафика в день: [регистрация](https://verge.dginv.click/#/register?code=oaxsAGo6)
- Эксклюзивный промо-код на скидку 20% для Clash Verge: verge20 (только 500 штук)
- Специальный тарифный план всего за 15,8 юаней в месяц, 160 Гб трафика, скидка 20% при оплате за год
- Команда за рубежом, без риска побега, до 50% кэшбэка
- Архитектура с балансировкойнагрузки, высокоскоростная выделенная линия (совместима со старыми клиентами), чрезвычайно низкая задержка, без проблем в часы пик, 4K видео загружается мгновенно
- Первый в мире VPN-сервис (айрпорт), поддерживающий протокол Hysteria, теперь доступен более быстрый протокол `Hysteria2` (лучшее сочетание с клиентом Clash Verge)
- Разблокировка потоковые сервисы и ChatGPT
- Официальный сайт: [https://狗狗加速.com](https://verge.dginv.click/#/register?code=oaxsAGo6)
#### Среда сборки и публикации этого проекта полностью поддерживается выделенным сервером [YXVM](https://yxvm.com/aff.php?aff=827)
Благодарим вас за предоставление надежной бэкэнд-среды с эксклюзивными ресурсами, высокой производительностью и высокоскоростной сетью. Если вы считаете, что загрузка файлов происходит достаточно быстро, а использование — достаточно плавно, то это потому, что мы используем серверы высшего уровня!
🧩 Преимущества выделенного сервера YXVM:
- 🌎 Премиум-сеть с оптимизацией обратного пути для молниеносной скорости загрузки
- 🔧 Выделенные физические серверные ресурсы, не имеющие аналогов среди VPS, обеспечивающие максимальную производительность
- 🧠 Идеально подходит для прокси, хостинга веб-сайтов/CDN-сайтов, рабочих процессов CI/CD или любых приложений с высокой нагрузкой
- 💡 Поддержка использования сразу после включения, выбор нескольких дата-центров, CN2 / IEPL на выбор
- 📦 Эта конфигурация в настоящее время доступна для покупки — не стесняйтесь заказывать ту же модель!
- 🎯 Хотите попробовать такую же сборку? [Закажите выделенный сервер YXVM прямо сейчас!](https://yxvm.com/aff.php?aff=827)
## Фичи
- Основан на произвоительном Rust и фреймворке Tauri 2
- Имеет встроенное ядро [Clash.Meta(mihomo)](https://github.com/MetaCubeX/mihomo) и поддерживает переключение на ядро версии `Alpha`.
- Чистый и эстетичный пользовательский интерфейс, поддержка настраиваемых цветов темы, значков прокси-группы/системного трея и `CSS Injection`
- Управление и расширение конфигурационными файлами (Merge и Script), подсказки по синтаксису конфигурационных файлов.
- Режим системного прокси и защита, `TUN (Tunneled Network Interface)` режим.
- Визуальное редактирование узлов и правил
- Резервное копирование и синхронизация конфигурации WebDAV
### FAQ
Смотрите [Страница часто задаваемых вопросов](https://clash-verge-rev.github.io/faq/windows.html)
### Донат
[Поддержите развитие Clash Verge Rev](https://github.com/sponsors/clash-verge-rev)
## Разработка
Дополнительные сведения смотреть в файле [CONTRIBUTING.md](../CONTRIBUTING.md).
Для запуска сервера разработки выполните следующие команды после установки всех необходимых компонентов для **Tauri**:
```shell
pnpm i
pnpm run prebuild
pnpm dev
```
## Вклад
Обращения и запросы на PR приветствуются!
## Благодарность
Clash Verge rev был основан на этих проектах или вдохновлен ими, и так далее:
- [zzzgydi/clash-verge](https://github.com/zzzgydi/clash-verge): Графический интерфейс Clash на основе tauri. Поддерживает Windows, macOS и Linux.
- [tauri-apps/tauri](https://github.com/tauri-apps/tauri): Создавайте более компактные, быстрые и безопасные настольные приложения с веб-интерфейсом.
- [Dreamacro/clash](https://github.com/Dreamacro/clash): Правило-ориентированный туннель на Go.
- [MetaCubeX/mihomo](https://github.com/MetaCubeX/mihomo): Правило-ориентированный туннель на Go.
- [Fndroid/clash_for_windows_pkg](https://github.com/Fndroid/clash_for_windows_pkg): Графический интерфейс пользователя для Windows/macOS на основе Clash.
- [vitejs/vite](https://github.com/vitejs/vite): Инструменты нового поколения для фронтенда. Они быстрые!
## Лицензия
GPL-3.0 License. Подробности смотрите в [Лицензии](../LICENSE).

135
eslint.config.ts Normal file
View File

@@ -0,0 +1,135 @@
import eslintJS from "@eslint/js";
import eslintReact from "@eslint-react/eslint-plugin";
import { defineConfig } from "eslint/config";
import configPrettier from "eslint-config-prettier";
import { createTypeScriptImportResolver } from "eslint-import-resolver-typescript";
import pluginImportX from "eslint-plugin-import-x";
import pluginPrettier from "eslint-plugin-prettier";
import pluginReactHooks from "eslint-plugin-react-hooks";
import pluginReactRefresh from "eslint-plugin-react-refresh";
import pluginUnusedImports from "eslint-plugin-unused-imports";
import globals from "globals";
import tseslint from "typescript-eslint";
export default defineConfig([
{
files: ["**/*.{js,mjs,cjs,ts,mts,cts,jsx,tsx}"],
plugins: {
js: eslintJS,
"react-hooks": pluginReactHooks,
// @ts-expect-error -- https://github.com/un-ts/eslint-plugin-import-x/issues/421
"import-x": pluginImportX,
"react-refresh": pluginReactRefresh,
"unused-imports": pluginUnusedImports,
prettier: pluginPrettier,
},
extends: [
eslintJS.configs.recommended,
tseslint.configs.recommended,
eslintReact.configs["recommended-typescript"],
configPrettier,
],
languageOptions: {
globals: globals.browser,
},
settings: {
react: {
version: "detect",
},
"import-x/resolver-next": [
createTypeScriptImportResolver({
project: "./tsconfig.json",
}),
],
},
rules: {
// React
"react-hooks/rules-of-hooks": "error",
"react-hooks/exhaustive-deps": "error",
"react-refresh/only-export-components": [
"warn",
{ allowConstantExport: true },
],
"@eslint-react/no-forward-ref": "off",
// React performance and production quality rules
"@eslint-react/no-array-index-key": "warn",
"@eslint-react/no-children-count": "error",
"@eslint-react/no-children-for-each": "error",
"@eslint-react/no-children-map": "error",
"@eslint-react/no-children-only": "error",
"@eslint-react/no-children-prop": "error",
"@eslint-react/no-children-to-array": "error",
"@eslint-react/no-class-component": "error",
"@eslint-react/no-clone-element": "error",
"@eslint-react/no-create-ref": "error",
"@eslint-react/no-default-props": "error",
"@eslint-react/no-direct-mutation-state": "error",
"@eslint-react/no-implicit-key": "error",
"@eslint-react/no-prop-types": "error",
"@eslint-react/no-set-state-in-component-did-mount": "error",
"@eslint-react/no-set-state-in-component-did-update": "error",
"@eslint-react/no-set-state-in-component-will-update": "error",
"@eslint-react/no-string-refs": "error",
"@eslint-react/no-unstable-context-value": "warn",
"@eslint-react/no-unstable-default-props": "warn",
"@eslint-react/no-unused-class-component-members": "error",
"@eslint-react/no-unused-state": "error",
"@eslint-react/no-useless-fragment": "warn",
"@eslint-react/prefer-destructuring-assignment": "warn",
// TypeScript
"@typescript-eslint/no-explicit-any": "off",
// unused-imports 代替 no-unused-vars
"@typescript-eslint/no-unused-vars": "off",
"unused-imports/no-unused-imports": "error",
"unused-imports/no-unused-vars": [
"warn",
{
vars: "all",
varsIgnorePattern: "^_",
args: "after-used",
argsIgnorePattern: "^_",
caughtErrorsIgnorePattern: "^ignore",
},
],
// Import
"import-x/no-unresolved": "error",
"import-x/order": [
"warn",
{
groups: [
"builtin",
"external",
"internal",
"parent",
"sibling",
"index",
],
"newlines-between": "always",
alphabetize: {
order: "asc",
caseInsensitive: true,
},
},
],
// 其他常见
"prefer-const": "warn",
"no-case-declarations": "error",
"no-fallthrough": "error",
"no-empty": ["warn", { allowEmptyCatch: true }],
// Prettier 格式化问题
"prettier/prettier": "warn",
},
},
]);

View File

@@ -1,10 +1,13 @@
{ {
"name": "clash-verge", "name": "clash-verge",
"version": "2.3.2", "version": "2.4.3",
"license": "GPL-3.0-only", "license": "GPL-3.0-only",
"scripts": { "scripts": {
"dev": "cross-env RUST_BACKTRACE=1 tauri dev -f verge-dev", "prepare": "husky || true",
"dev:diff": "cross-env RUST_BACKTRACE=1 tauri dev -f verge-dev", "dev": "cross-env RUST_BACKTRACE=full tauri dev -f verge-dev",
"dev:diff": "cross-env RUST_BACKTRACE=full tauri dev -f verge-dev",
"dev:trace": "cross-env RUST_BACKTRACE=full RUSTFLAGS=\"--cfg tokio_unstable\" tauri dev -f verge-dev tokio-trace",
"dev:tauri": "cross-env RUST_BACKTRACE=full tauri dev -f tauri-dev",
"build": "cross-env NODE_OPTIONS='--max-old-space-size=4096' tauri build", "build": "cross-env NODE_OPTIONS='--max-old-space-size=4096' tauri build",
"build:fast": "cross-env NODE_OPTIONS='--max-old-space-size=4096' tauri build -- --profile fast-release", "build:fast": "cross-env NODE_OPTIONS='--max-old-space-size=4096' tauri build -- --profile fast-release",
"tauri": "tauri", "tauri": "tauri",
@@ -18,11 +21,17 @@
"portable-fixed-webview2": "node scripts/portable-fixed-webview2.mjs", "portable-fixed-webview2": "node scripts/portable-fixed-webview2.mjs",
"fix-alpha-version": "node scripts/fix-alpha_version.mjs", "fix-alpha-version": "node scripts/fix-alpha_version.mjs",
"release-version": "node scripts/release-version.mjs", "release-version": "node scripts/release-version.mjs",
"release:autobuild": "pnpm release-version autobuild",
"release:deploytest": "pnpm release-version deploytest",
"publish-version": "node scripts/publish-version.mjs", "publish-version": "node scripts/publish-version.mjs",
"fmt": "cargo fmt --manifest-path ./src-tauri/Cargo.toml", "fmt": "cargo fmt --manifest-path ./src-tauri/Cargo.toml",
"clippy": "cargo clippy --manifest-path ./src-tauri/Cargo.toml", "clippy": "cargo clippy --all-features --all-targets --manifest-path ./src-tauri/Cargo.toml",
"lint": "eslint -c eslint.config.ts --max-warnings=0 --cache --cache-location .eslintcache src",
"lint:fix": "eslint -c eslint.config.ts --max-warnings=0 --cache --cache-location .eslintcache --fix src",
"format": "prettier --write .", "format": "prettier --write .",
"format:check": "prettier --check ." "format:check": "prettier --check .",
"typecheck": "tsc --noEmit",
"test": "vitest run"
}, },
"dependencies": { "dependencies": {
"@dnd-kit/core": "^6.3.1", "@dnd-kit/core": "^6.3.1",
@@ -31,74 +40,96 @@
"@emotion/react": "^11.14.0", "@emotion/react": "^11.14.0",
"@emotion/styled": "^11.14.1", "@emotion/styled": "^11.14.1",
"@juggle/resize-observer": "^3.4.0", "@juggle/resize-observer": "^3.4.0",
"@mui/icons-material": "^7.1.2", "@mui/icons-material": "^7.3.4",
"@mui/lab": "7.0.0-beta.14", "@mui/lab": "7.0.0-beta.17",
"@mui/material": "^7.1.2", "@mui/material": "^7.3.4",
"@mui/x-data-grid": "^8.6.0", "@mui/x-data-grid": "^8.16.0",
"@tauri-apps/api": "2.6.0", "@tauri-apps/api": "2.9.0",
"@tauri-apps/plugin-clipboard-manager": "^2.3.0", "@tauri-apps/plugin-clipboard-manager": "^2.3.2",
"@tauri-apps/plugin-dialog": "^2.3.0", "@tauri-apps/plugin-dialog": "^2.4.2",
"@tauri-apps/plugin-fs": "^2.4.0", "@tauri-apps/plugin-fs": "^2.4.4",
"@tauri-apps/plugin-global-shortcut": "^2.3.0", "@tauri-apps/plugin-http": "~2.5.4",
"@tauri-apps/plugin-notification": "^2.3.0", "@tauri-apps/plugin-process": "^2.3.1",
"@tauri-apps/plugin-process": "^2.3.0", "@tauri-apps/plugin-shell": "2.3.3",
"@tauri-apps/plugin-shell": "2.3.0",
"@tauri-apps/plugin-updater": "2.9.0", "@tauri-apps/plugin-updater": "2.9.0",
"@tauri-apps/plugin-window-state": "^2.3.0",
"@types/json-schema": "^7.0.15", "@types/json-schema": "^7.0.15",
"ahooks": "^3.8.5", "ahooks": "^3.9.6",
"axios": "^1.10.0", "axios": "^1.13.1",
"chart.js": "^4.5.0", "dayjs": "1.11.19",
"cli-color": "^2.0.4",
"dayjs": "1.11.13",
"foxact": "^0.2.49", "foxact": "^0.2.49",
"glob": "^11.0.3", "i18next": "^25.6.0",
"i18next": "^25.2.1",
"js-yaml": "^4.1.0", "js-yaml": "^4.1.0",
"json-schema": "^0.4.0", "json-schema": "^0.4.0",
"lodash-es": "^4.17.21", "lodash-es": "^4.17.21",
"monaco-editor": "^0.52.2", "monaco-editor": "^0.54.0",
"monaco-yaml": "^5.4.0", "monaco-yaml": "^5.4.0",
"nanoid": "^5.1.5", "nanoid": "^5.1.6",
"react": "19.1.0", "react": "19.2.0",
"react-chartjs-2": "^5.3.0", "react-dom": "19.2.0",
"react-dom": "19.1.0",
"react-error-boundary": "6.0.0", "react-error-boundary": "6.0.0",
"react-hook-form": "^7.58.1", "react-hook-form": "^7.66.0",
"react-i18next": "15.5.3", "react-i18next": "16.2.3",
"react-markdown": "10.1.0", "react-markdown": "10.1.0",
"react-monaco-editor": "0.58.0", "react-monaco-editor": "0.59.0",
"react-router-dom": "7.6.2", "react-router": "^7.9.5",
"react-virtuoso": "^4.13.0", "react-virtuoso": "^4.14.1",
"sockette": "^2.0.6", "swr": "^2.3.6",
"swr": "^2.3.3", "tauri-plugin-mihomo-api": "git+https://github.com/clash-verge-rev/tauri-plugin-mihomo",
"tar": "^7.4.3", "types-pac": "^1.0.3"
"types-pac": "^1.0.3",
"zustand": "^5.0.6"
}, },
"devDependencies": { "devDependencies": {
"@actions/github": "^6.0.1", "@actions/github": "^6.0.1",
"@tauri-apps/cli": "2.6.1", "@eslint-react/eslint-plugin": "^2.2.4",
"@eslint/js": "^9.39.0",
"@tauri-apps/cli": "2.9.2",
"@types/js-yaml": "^4.0.9", "@types/js-yaml": "^4.0.9",
"@types/lodash-es": "^4.17.12", "@types/lodash-es": "^4.17.12",
"@types/react": "19.1.8", "@types/node": "^24.9.2",
"@types/react-dom": "19.1.6", "@types/react": "19.2.2",
"@vitejs/plugin-legacy": "^7.0.0", "@types/react-dom": "19.2.2",
"@vitejs/plugin-react": "4.6.0", "@vitejs/plugin-legacy": "^7.2.1",
"@vitejs/plugin-react-swc": "^4.2.0",
"adm-zip": "^0.5.16", "adm-zip": "^0.5.16",
"commander": "^14.0.0", "cli-color": "^2.0.4",
"cross-env": "^7.0.3", "commander": "^14.0.2",
"cross-env": "^10.1.0",
"eslint": "^9.39.0",
"eslint-config-prettier": "^10.1.8",
"eslint-import-resolver-typescript": "^4.4.4",
"eslint-plugin-import-x": "^4.16.1",
"eslint-plugin-prettier": "^5.5.4",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.24",
"eslint-plugin-unused-imports": "^4.3.0",
"glob": "^11.0.3",
"globals": "^16.5.0",
"https-proxy-agent": "^7.0.6", "https-proxy-agent": "^7.0.6",
"meta-json-schema": "^1.19.11", "husky": "^9.1.7",
"jiti": "^2.6.1",
"lint-staged": "^16.2.6",
"meta-json-schema": "^1.19.14",
"node-fetch": "^3.3.2", "node-fetch": "^3.3.2",
"prettier": "^3.6.2", "prettier": "^3.6.2",
"prettier-plugin-organize-imports": "^4.1.0", "sass": "^1.93.3",
"sass": "^1.89.2", "tar": "^7.5.2",
"terser": "^5.43.1", "terser": "^5.44.0",
"typescript": "^5.8.3", "typescript": "^5.9.3",
"vite": "^7.0.0", "typescript-eslint": "^8.46.2",
"vite-plugin-monaco-editor": "^1.1.0", "vite": "^7.1.12",
"vite-plugin-svgr": "^4.3.0" "vite-plugin-monaco-editor-esm": "^2.0.2",
"vite-plugin-svgr": "^4.5.0",
"vitest": "^4.0.6"
},
"lint-staged": {
"*.{ts,tsx,js,jsx}": [
"eslint --fix --max-warnings=0",
"prettier --write",
"git add"
],
"*.{css,scss,json,md}": [
"prettier --write",
"git add"
]
}, },
"type": "module", "type": "module",
"packageManager": "pnpm@9.13.2" "packageManager": "pnpm@9.13.2"

5606
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +1,5 @@
{ {
"extends": ["config:recommended"], "extends": ["config:recommended", ":disableDependencyDashboard"],
"baseBranches": ["dev"], "baseBranches": ["dev"],
"enabledManagers": ["cargo", "npm"], "enabledManagers": ["cargo", "npm"],
"labels": ["dependencies"], "labels": ["dependencies"],
@@ -35,8 +35,13 @@
"description": "Group all npm dependencies into a single PR", "description": "Group all npm dependencies into a single PR",
"matchManagers": ["npm"], "matchManagers": ["npm"],
"groupName": "npm dependencies" "groupName": "npm dependencies"
},
{
"description": "Group all GitHub Actions updates into a single PR",
"matchManagers": ["github-actions"],
"groupName": "github actions"
} }
], ],
"postUpdateOptions": ["pnpmDedupe"], "postUpdateOptions": ["pnpmDedupe", "updateCargoLock"],
"ignoreDeps": ["serde_yaml"] "ignoreDeps": ["criterion"]
} }

View File

@@ -0,0 +1,56 @@
#!/bin/bash
# 获取最近一个和 Tauri 相关的改动的 commit hash
# This script finds the latest commit that modified Tauri-related files
# Tauri 相关文件的模式
TAURI_PATTERNS=(
"src-tauri/"
"Cargo.toml"
"Cargo.lock"
"tauri.*.conf.json"
"package.json"
"pnpm-lock.yaml"
"src/"
)
# 排除的文件模式build artifacts 等)
EXCLUDE_PATTERNS=(
"src-tauri/target/"
"src-tauri/gen/"
"*.log"
"*.tmp"
"node_modules/"
".git/"
)
# 构建 git log 的路径过滤参数
PATHS=""
for pattern in "${TAURI_PATTERNS[@]}"; do
if [[ -e "$pattern" ]]; then
PATHS="$PATHS $pattern"
fi
done
# 如果没有找到相关路径,返回错误
if [[ -z "$PATHS" ]]; then
echo "Error: No Tauri-related paths found in current directory" >&2
exit 1
fi
# 获取最新的 commit hash
# 使用 git log 查找最近修改了 Tauri 相关文件的提交
LATEST_COMMIT=$(git log --format="%H" -n 1 -- $PATHS)
# 验证是否找到了 commit
if [[ -z "$LATEST_COMMIT" ]]; then
echo "Error: No commits found for Tauri-related files" >&2
exit 1
fi
# 输出结果
echo "$LATEST_COMMIT"
# 如果需要更多信息,可以取消注释以下行
# echo "Latest Tauri-related commit: $LATEST_COMMIT"
# git show --stat --oneline "$LATEST_COMMIT"

View File

@@ -1,18 +1,30 @@
import AdmZip from "adm-zip";
import { execSync } from "child_process";
import { createHash } from "crypto";
import fs from "fs"; import fs from "fs";
import fsp from "fs/promises"; import fsp from "fs/promises";
import zlib from "zlib";
import { extract } from "tar";
import path from "path";
import AdmZip from "adm-zip";
import fetch from "node-fetch";
import { HttpsProxyAgent } from "https-proxy-agent";
import { execSync } from "child_process";
import { log_info, log_debug, log_error, log_success } from "./utils.mjs";
import { glob } from "glob"; import { glob } from "glob";
import { HttpsProxyAgent } from "https-proxy-agent";
import fetch from "node-fetch";
import path from "path";
import { extract } from "tar";
import zlib from "zlib";
import { log_debug, log_error, log_info, log_success } from "./utils.mjs";
/**
* Prebuild script with optimization features:
* 1. Skip downloading mihomo core if it already exists (unless --force is used)
* 2. Cache version information for 1 hour to avoid repeated version checks
* 3. Use file hash to detect changes and skip unnecessary chmod/copy operations
* 4. Use --force or -f flag to force re-download and update all resources
*
*/
const cwd = process.cwd(); const cwd = process.cwd();
const TEMP_DIR = path.join(cwd, "node_modules/.verge"); const TEMP_DIR = path.join(cwd, "node_modules/.verge");
const FORCE = process.argv.includes("--force"); const FORCE = process.argv.includes("--force") || process.argv.includes("-f");
const VERSION_CACHE_FILE = path.join(TEMP_DIR, ".version_cache.json");
const HASH_CACHE_FILE = path.join(TEMP_DIR, ".hash_cache.json");
const PLATFORM_MAP = { const PLATFORM_MAP = {
"x86_64-pc-windows-msvc": "win32", "x86_64-pc-windows-msvc": "win32",
@@ -43,7 +55,7 @@ const ARCH_MAP = {
const arg1 = process.argv.slice(2)[0]; const arg1 = process.argv.slice(2)[0];
const arg2 = process.argv.slice(2)[1]; const arg2 = process.argv.slice(2)[1];
const target = arg1 === "--force" ? arg2 : arg1; let target = arg1 === "--force" || arg1 === "-f" ? arg2 : arg1;
const { platform, arch } = target const { platform, arch } = target
? { platform: PLATFORM_MAP[target], arch: ARCH_MAP[target] } ? { platform: PLATFORM_MAP[target], arch: ARCH_MAP[target] }
: process; : process;
@@ -54,66 +66,120 @@ const SIDECAR_HOST = target
.toString() .toString()
.match(/(?<=host: ).+(?=\s*)/g)[0]; .match(/(?<=host: ).+(?=\s*)/g)[0];
/* ======= clash meta alpha======= */ // =======================
// Version Cache
// =======================
async function loadVersionCache() {
try {
if (fs.existsSync(VERSION_CACHE_FILE)) {
const data = await fsp.readFile(VERSION_CACHE_FILE, "utf-8");
return JSON.parse(data);
}
} catch (err) {
log_debug("Failed to load version cache:", err.message);
}
return {};
}
async function saveVersionCache(cache) {
try {
await fsp.mkdir(TEMP_DIR, { recursive: true });
await fsp.writeFile(VERSION_CACHE_FILE, JSON.stringify(cache, null, 2));
log_debug("Version cache saved");
} catch (err) {
log_debug("Failed to save version cache:", err.message);
}
}
async function getCachedVersion(key) {
const cache = await loadVersionCache();
const cached = cache[key];
if (cached && Date.now() - cached.timestamp < 3600000) {
log_info(`Using cached version for ${key}: ${cached.version}`);
return cached.version;
}
return null;
}
async function setCachedVersion(key, version) {
const cache = await loadVersionCache();
cache[key] = { version, timestamp: Date.now() };
await saveVersionCache(cache);
}
// =======================
// Hash Cache & File Hash
// =======================
async function calculateFileHash(filePath) {
try {
const fileBuffer = await fsp.readFile(filePath);
const hashSum = createHash("sha256");
hashSum.update(fileBuffer);
return hashSum.digest("hex");
} catch (err) {
return null;
}
}
async function loadHashCache() {
try {
if (fs.existsSync(HASH_CACHE_FILE)) {
const data = await fsp.readFile(HASH_CACHE_FILE, "utf-8");
return JSON.parse(data);
}
} catch (err) {
log_debug("Failed to load hash cache:", err.message);
}
return {};
}
async function saveHashCache(cache) {
try {
await fsp.mkdir(TEMP_DIR, { recursive: true });
await fsp.writeFile(HASH_CACHE_FILE, JSON.stringify(cache, null, 2));
log_debug("Hash cache saved");
} catch (err) {
log_debug("Failed to save hash cache:", err.message);
}
}
async function hasFileChanged(filePath, targetPath) {
if (FORCE) return true;
if (!fs.existsSync(targetPath)) return true;
const hashCache = await loadHashCache();
const sourceHash = await calculateFileHash(filePath);
const targetHash = await calculateFileHash(targetPath);
if (!sourceHash || !targetHash) return true;
const cacheKey = targetPath;
const cachedHash = hashCache[cacheKey];
if (cachedHash === sourceHash && sourceHash === targetHash) {
return false;
}
return true;
}
async function updateHashCache(targetPath) {
const hashCache = await loadHashCache();
const hash = await calculateFileHash(targetPath);
if (hash) {
hashCache[targetPath] = hash;
await saveHashCache(hashCache);
}
}
// =======================
// Meta maps (stable & alpha)
// =======================
const META_ALPHA_VERSION_URL = const META_ALPHA_VERSION_URL =
"https://github.com/MetaCubeX/mihomo/releases/download/Prerelease-Alpha/version.txt"; "https://github.com/MetaCubeX/mihomo/releases/download/Prerelease-Alpha/version.txt";
const META_ALPHA_URL_PREFIX = `https://github.com/MetaCubeX/mihomo/releases/download/Prerelease-Alpha`; const META_ALPHA_URL_PREFIX = `https://github.com/MetaCubeX/mihomo/releases/download/Prerelease-Alpha`;
let META_ALPHA_VERSION; let META_ALPHA_VERSION;
const META_ALPHA_MAP = {
"win32-x64": "mihomo-windows-amd64-compatible",
"win32-ia32": "mihomo-windows-386",
"win32-arm64": "mihomo-windows-arm64",
"darwin-x64": "mihomo-darwin-amd64-compatible",
"darwin-arm64": "mihomo-darwin-arm64",
"linux-x64": "mihomo-linux-amd64-compatible",
"linux-ia32": "mihomo-linux-386",
"linux-arm64": "mihomo-linux-arm64",
"linux-arm": "mihomo-linux-armv7",
"linux-riscv64": "mihomo-linux-riscv64",
"linux-loong64": "mihomo-linux-loong64",
};
// Fetch the latest alpha release version from the version.txt file
async function getLatestAlphaVersion() {
const options = {};
const httpProxy =
process.env.HTTP_PROXY ||
process.env.http_proxy ||
process.env.HTTPS_PROXY ||
process.env.https_proxy;
if (httpProxy) {
options.agent = new HttpsProxyAgent(httpProxy);
}
try {
const response = await fetch(META_ALPHA_VERSION_URL, {
...options,
method: "GET",
});
let v = await response.text();
META_ALPHA_VERSION = v.trim(); // Trim to remove extra whitespaces
log_info(`Latest alpha version: ${META_ALPHA_VERSION}`);
} catch (error) {
log_error("Error fetching latest alpha version:", error.message);
process.exit(1);
}
}
/* ======= clash meta stable ======= */
const META_VERSION_URL = const META_VERSION_URL =
"https://github.com/MetaCubeX/mihomo/releases/latest/download/version.txt"; "https://github.com/MetaCubeX/mihomo/releases/latest/download/version.txt";
const META_URL_PREFIX = `https://github.com/MetaCubeX/mihomo/releases/download`; const META_URL_PREFIX = `https://github.com/MetaCubeX/mihomo/releases/download`;
let META_VERSION; let META_VERSION;
const META_MAP = { const META_ALPHA_MAP = {
"win32-x64": "mihomo-windows-amd64-compatible", "win32-x64": "mihomo-windows-amd64-v2",
"win32-ia32": "mihomo-windows-386", "win32-ia32": "mihomo-windows-386",
"win32-arm64": "mihomo-windows-arm64", "win32-arm64": "mihomo-windows-arm64",
"darwin-x64": "mihomo-darwin-amd64-compatible", "darwin-x64": "mihomo-darwin-amd64-v1-go122",
"darwin-arm64": "mihomo-darwin-arm64", "darwin-arm64": "mihomo-darwin-arm64-go122",
"linux-x64": "mihomo-linux-amd64-compatible", "linux-x64": "mihomo-linux-amd64-v2",
"linux-ia32": "mihomo-linux-386", "linux-ia32": "mihomo-linux-386",
"linux-arm64": "mihomo-linux-arm64", "linux-arm64": "mihomo-linux-arm64",
"linux-arm": "mihomo-linux-armv7", "linux-arm": "mihomo-linux-armv7",
@@ -121,65 +187,116 @@ const META_MAP = {
"linux-loong64": "mihomo-linux-loong64", "linux-loong64": "mihomo-linux-loong64",
}; };
// Fetch the latest release version from the version.txt file const META_MAP = {
async function getLatestReleaseVersion() { "win32-x64": "mihomo-windows-amd64-v2",
const options = {}; "win32-ia32": "mihomo-windows-386",
"win32-arm64": "mihomo-windows-arm64",
"darwin-x64": "mihomo-darwin-amd64-v2-go122",
"darwin-arm64": "mihomo-darwin-arm64-go122",
"linux-x64": "mihomo-linux-amd64-v2",
"linux-ia32": "mihomo-linux-386",
"linux-arm64": "mihomo-linux-arm64",
"linux-arm": "mihomo-linux-armv7",
"linux-riscv64": "mihomo-linux-riscv64",
"linux-loong64": "mihomo-linux-loong64",
};
// =======================
// Fetch latest versions
// =======================
async function getLatestAlphaVersion() {
if (!FORCE) {
const cached = await getCachedVersion("META_ALPHA_VERSION");
if (cached) {
META_ALPHA_VERSION = cached;
return;
}
}
const options = {};
const httpProxy = const httpProxy =
process.env.HTTP_PROXY || process.env.HTTP_PROXY ||
process.env.http_proxy || process.env.http_proxy ||
process.env.HTTPS_PROXY || process.env.HTTPS_PROXY ||
process.env.https_proxy; process.env.https_proxy;
if (httpProxy) options.agent = new HttpsProxyAgent(httpProxy);
if (httpProxy) { try {
options.agent = new HttpsProxyAgent(httpProxy); const response = await fetch(META_ALPHA_VERSION_URL, {
...options,
method: "GET",
});
if (!response.ok)
throw new Error(
`Failed to fetch ${META_ALPHA_VERSION_URL}: ${response.status}`,
);
META_ALPHA_VERSION = (await response.text()).trim();
log_info(`Latest alpha version: ${META_ALPHA_VERSION}`);
await setCachedVersion("META_ALPHA_VERSION", META_ALPHA_VERSION);
} catch (err) {
log_error("Error fetching latest alpha version:", err.message);
process.exit(1);
} }
}
async function getLatestReleaseVersion() {
if (!FORCE) {
const cached = await getCachedVersion("META_VERSION");
if (cached) {
META_VERSION = cached;
return;
}
}
const options = {};
const httpProxy =
process.env.HTTP_PROXY ||
process.env.http_proxy ||
process.env.HTTPS_PROXY ||
process.env.https_proxy;
if (httpProxy) options.agent = new HttpsProxyAgent(httpProxy);
try { try {
const response = await fetch(META_VERSION_URL, { const response = await fetch(META_VERSION_URL, {
...options, ...options,
method: "GET", method: "GET",
}); });
let v = await response.text(); if (!response.ok)
META_VERSION = v.trim(); // Trim to remove extra whitespaces throw new Error(
`Failed to fetch ${META_VERSION_URL}: ${response.status}`,
);
META_VERSION = (await response.text()).trim();
log_info(`Latest release version: ${META_VERSION}`); log_info(`Latest release version: ${META_VERSION}`);
} catch (error) { await setCachedVersion("META_VERSION", META_VERSION);
log_error("Error fetching latest release version:", error.message); } catch (err) {
log_error("Error fetching latest release version:", err.message);
process.exit(1); process.exit(1);
} }
} }
/* // =======================
* check available // Validate availability
*/ // =======================
if (!META_MAP[`${platform}-${arch}`]) { if (!META_MAP[`${platform}-${arch}`]) {
throw new Error( throw new Error(`clash meta unsupported platform "${platform}-${arch}"`);
`clash meta alpha unsupported platform "${platform}-${arch}"`,
);
} }
if (!META_ALPHA_MAP[`${platform}-${arch}`]) { if (!META_ALPHA_MAP[`${platform}-${arch}`]) {
throw new Error( throw new Error(
`clash meta alpha unsupported platform "${platform}-${arch}"`, `clash meta alpha unsupported platform "${platform}-${arch}"`,
); );
} }
/** // =======================
* core info // Build meta objects
*/ // =======================
function clashMetaAlpha() { function clashMetaAlpha() {
const name = META_ALPHA_MAP[`${platform}-${arch}`]; const name = META_ALPHA_MAP[`${platform}-${arch}`];
const isWin = platform === "win32"; const isWin = platform === "win32";
const urlExt = isWin ? "zip" : "gz"; const urlExt = isWin ? "zip" : "gz";
const downloadURL = `${META_ALPHA_URL_PREFIX}/${name}-${META_ALPHA_VERSION}.${urlExt}`;
const exeFile = `${name}${isWin ? ".exe" : ""}`;
const zipFile = `${name}-${META_ALPHA_VERSION}.${urlExt}`;
return { return {
name: "verge-mihomo-alpha", name: "verge-mihomo-alpha",
targetFile: `verge-mihomo-alpha-${SIDECAR_HOST}${isWin ? ".exe" : ""}`, targetFile: `verge-mihomo-alpha-${SIDECAR_HOST}${isWin ? ".exe" : ""}`,
exeFile, exeFile: `${name}${isWin ? ".exe" : ""}`,
zipFile, zipFile: `${name}-${META_ALPHA_VERSION}.${urlExt}`,
downloadURL, downloadURL: `${META_ALPHA_URL_PREFIX}/${name}-${META_ALPHA_VERSION}.${urlExt}`,
}; };
} }
@@ -187,35 +304,83 @@ function clashMeta() {
const name = META_MAP[`${platform}-${arch}`]; const name = META_MAP[`${platform}-${arch}`];
const isWin = platform === "win32"; const isWin = platform === "win32";
const urlExt = isWin ? "zip" : "gz"; const urlExt = isWin ? "zip" : "gz";
const downloadURL = `${META_URL_PREFIX}/${META_VERSION}/${name}-${META_VERSION}.${urlExt}`;
const exeFile = `${name}${isWin ? ".exe" : ""}`;
const zipFile = `${name}-${META_VERSION}.${urlExt}`;
return { return {
name: "verge-mihomo", name: "verge-mihomo",
targetFile: `verge-mihomo-${SIDECAR_HOST}${isWin ? ".exe" : ""}`, targetFile: `verge-mihomo-${SIDECAR_HOST}${isWin ? ".exe" : ""}`,
exeFile, exeFile: `${name}${isWin ? ".exe" : ""}`,
zipFile, zipFile: `${name}-${META_VERSION}.${urlExt}`,
downloadURL, downloadURL: `${META_URL_PREFIX}/${META_VERSION}/${name}-${META_VERSION}.${urlExt}`,
}; };
} }
/**
* download sidecar and rename // =======================
*/ // download helper (增强status + magic bytes)
// =======================
async function downloadFile(url, outPath) {
const options = {};
const httpProxy =
process.env.HTTP_PROXY ||
process.env.http_proxy ||
process.env.HTTPS_PROXY ||
process.env.https_proxy;
if (httpProxy) options.agent = new HttpsProxyAgent(httpProxy);
const response = await fetch(url, {
...options,
method: "GET",
headers: { "Content-Type": "application/octet-stream" },
});
if (!response.ok) {
const body = await response.text().catch(() => "");
// 将 body 写到文件以便排查(可通过临时目录查看)
await fsp.mkdir(path.dirname(outPath), { recursive: true });
await fsp.writeFile(outPath, body);
throw new Error(`Failed to download ${url}: status ${response.status}`);
}
const buf = Buffer.from(await response.arrayBuffer());
await fsp.mkdir(path.dirname(outPath), { recursive: true });
// 简单 magic 字节检查
if (url.endsWith(".gz") || url.endsWith(".tgz")) {
if (!(buf[0] === 0x1f && buf[1] === 0x8b)) {
await fsp.writeFile(outPath, buf);
throw new Error(
`Downloaded file for ${url} is not a valid gzip (magic mismatch).`,
);
}
} else if (url.endsWith(".zip")) {
if (!(buf[0] === 0x50 && buf[1] === 0x4b)) {
await fsp.writeFile(outPath, buf);
throw new Error(
`Downloaded file for ${url} is not a valid zip (magic mismatch).`,
);
}
}
await fsp.writeFile(outPath, buf);
log_success(`download finished: ${url}`);
}
// =======================
// resolveSidecar (支持 zip / tgz / gz)
// =======================
async function resolveSidecar(binInfo) { async function resolveSidecar(binInfo) {
const { name, targetFile, zipFile, exeFile, downloadURL } = binInfo; const { name, targetFile, zipFile, exeFile, downloadURL } = binInfo;
const sidecarDir = path.join(cwd, "src-tauri", "sidecar"); const sidecarDir = path.join(cwd, "src-tauri", "sidecar");
const sidecarPath = path.join(sidecarDir, targetFile); const sidecarPath = path.join(sidecarDir, targetFile);
await fsp.mkdir(sidecarDir, { recursive: true }); await fsp.mkdir(sidecarDir, { recursive: true });
if (!FORCE && fs.existsSync(sidecarPath)) return;
if (!FORCE && fs.existsSync(sidecarPath)) {
log_success(`"${name}" already exists, skipping download`);
return;
}
const tempDir = path.join(TEMP_DIR, name); const tempDir = path.join(TEMP_DIR, name);
const tempZip = path.join(tempDir, zipFile); const tempZip = path.join(tempDir, zipFile);
const tempExe = path.join(tempDir, exeFile); const tempExe = path.join(tempDir, exeFile);
await fsp.mkdir(tempDir, { recursive: true }); await fsp.mkdir(tempDir, { recursive: true });
try { try {
if (!fs.existsSync(tempZip)) { if (!fs.existsSync(tempZip)) {
await downloadFile(downloadURL, tempZip); await downloadFile(downloadURL, tempZip);
@@ -224,140 +389,118 @@ async function resolveSidecar(binInfo) {
if (zipFile.endsWith(".zip")) { if (zipFile.endsWith(".zip")) {
const zip = new AdmZip(tempZip); const zip = new AdmZip(tempZip);
zip.getEntries().forEach((entry) => { zip.getEntries().forEach((entry) => {
log_debug(`"${name}" entry name`, entry.entryName); log_debug(`"${name}" entry: ${entry.entryName}`);
}); });
zip.extractAllTo(tempDir, true); zip.extractAllTo(tempDir, true);
await fsp.rename(tempExe, sidecarPath); // 尝试按 exeFile 重命名,否则找第一个可执行文件
if (fs.existsSync(tempExe)) {
await fsp.rename(tempExe, sidecarPath);
} else {
// 搜索候选
const files = await fsp.readdir(tempDir);
const candidate = files.find(
(f) =>
f === path.basename(exeFile) ||
f.endsWith(".exe") ||
!f.includes("."),
);
if (!candidate)
throw new Error(`Expected binary not found in ${tempDir}`);
await fsp.rename(path.join(tempDir, candidate), sidecarPath);
}
if (platform !== "win32") execSync(`chmod 755 ${sidecarPath}`);
log_success(`unzip finished: "${name}"`); log_success(`unzip finished: "${name}"`);
} else if (zipFile.endsWith(".tgz")) { } else if (zipFile.endsWith(".tgz")) {
// tgz await extract({ cwd: tempDir, file: tempZip });
await fsp.mkdir(tempDir, { recursive: true });
await extract({
cwd: tempDir,
file: tempZip,
//strip: 1, // 可能需要根据实际的 .tgz 文件结构调整
});
const files = await fsp.readdir(tempDir); const files = await fsp.readdir(tempDir);
log_debug(`"${name}" files in tempDir:`, files); log_debug(`"${name}" extracted files:`, files);
const extractedFile = files.find((file) => file.startsWith("虚空终端-")); // 优先寻找给定 exeFile 或已知前缀
if (extractedFile) { let extracted = files.find(
const extractedFilePath = path.join(tempDir, extractedFile); (f) =>
await fsp.rename(extractedFilePath, sidecarPath); f === path.basename(exeFile) ||
log_success(`"${name}" file renamed to "${sidecarPath}"`); f.startsWith("虚空终端-") ||
execSync(`chmod 755 ${sidecarPath}`); !f.includes("."),
log_success(`chmod binary finished: "${name}"`); );
} else { if (!extracted) extracted = files[0];
throw new Error(`Expected file not found in ${tempDir}`); if (!extracted) throw new Error(`Expected file not found in ${tempDir}`);
} await fsp.rename(path.join(tempDir, extracted), sidecarPath);
execSync(`chmod 755 ${sidecarPath}`);
log_success(`tgz processed: "${name}"`);
} else { } else {
// gz // .gz
const readStream = fs.createReadStream(tempZip); const readStream = fs.createReadStream(tempZip);
const writeStream = fs.createWriteStream(sidecarPath); const writeStream = fs.createWriteStream(sidecarPath);
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
const onError = (error) => {
log_error(`"${name}" gz failed:`, error.message);
reject(error);
};
readStream readStream
.pipe(zlib.createGunzip().on("error", onError)) .pipe(zlib.createGunzip())
.on("error", (e) => {
log_error(`gunzip error for ${name}:`, e.message);
reject(e);
})
.pipe(writeStream) .pipe(writeStream)
.on("finish", () => { .on("finish", () => {
execSync(`chmod 755 ${sidecarPath}`); if (platform !== "win32") execSync(`chmod 755 ${sidecarPath}`);
log_success(`chmod binary finished: "${name}"`);
resolve(); resolve();
}) })
.on("error", onError); .on("error", (e) => {
log_error(`write stream error for ${name}:`, e.message);
reject(e);
});
}); });
log_success(`gz binary processed: "${name}"`);
} }
} catch (err) { } catch (err) {
// 需要删除文件
await fsp.rm(sidecarPath, { recursive: true, force: true }); await fsp.rm(sidecarPath, { recursive: true, force: true });
throw err; throw err;
} finally { } finally {
// delete temp dir
await fsp.rm(tempDir, { recursive: true, force: true }); await fsp.rm(tempDir, { recursive: true, force: true });
} }
} }
const resolveSetDnsScript = () =>
resolveResource({
file: "set_dns.sh",
localPath: path.join(cwd, "scripts/set_dns.sh"),
});
const resolveUnSetDnsScript = () =>
resolveResource({
file: "unset_dns.sh",
localPath: path.join(cwd, "scripts/unset_dns.sh"),
});
/**
* download the file to the resources dir
*/
async function resolveResource(binInfo) { async function resolveResource(binInfo) {
const { file, downloadURL, localPath } = binInfo; const { file, downloadURL, localPath } = binInfo;
const resDir = path.join(cwd, "src-tauri/resources"); const resDir = path.join(cwd, "src-tauri/resources");
const targetPath = path.join(resDir, file); const targetPath = path.join(resDir, file);
if (!FORCE && fs.existsSync(targetPath)) return; if (!FORCE && fs.existsSync(targetPath) && !downloadURL && !localPath) {
log_success(`"${file}" already exists, skipping`);
return;
}
if (downloadURL) { if (downloadURL) {
if (!FORCE && fs.existsSync(targetPath)) {
log_success(`"${file}" already exists, skipping download`);
return;
}
await fsp.mkdir(resDir, { recursive: true }); await fsp.mkdir(resDir, { recursive: true });
await downloadFile(downloadURL, targetPath); await downloadFile(downloadURL, targetPath);
await updateHashCache(targetPath);
} }
if (localPath) { if (localPath) {
await fs.copyFile(localPath, targetPath, (err) => { if (!(await hasFileChanged(localPath, targetPath))) {
if (err) { return;
console.error("Error copying file:", err); }
} else { await fsp.mkdir(resDir, { recursive: true });
console.log("File was copied successfully"); await fsp.copyFile(localPath, targetPath);
} await updateHashCache(targetPath);
}); log_success(`Copied file: ${file}`);
log_debug(`copy file finished: "${localPath}"`);
} }
log_success(`${file} finished`); log_success(`${file} finished`);
} }
/** // SimpleSC.dll (win plugin)
* download file and save to `path`
*/ async function downloadFile(url, path) {
const options = {};
const httpProxy =
process.env.HTTP_PROXY ||
process.env.http_proxy ||
process.env.HTTPS_PROXY ||
process.env.https_proxy;
if (httpProxy) {
options.agent = new HttpsProxyAgent(httpProxy);
}
const response = await fetch(url, {
...options,
method: "GET",
headers: { "Content-Type": "application/octet-stream" },
});
const buffer = await response.arrayBuffer();
await fsp.writeFile(path, new Uint8Array(buffer));
log_success(`download finished: ${url}`);
}
// SimpleSC.dll
const resolvePlugin = async () => { const resolvePlugin = async () => {
const url = const url =
"https://nsis.sourceforge.io/mediawiki/images/e/ef/NSIS_Simple_Service_Plugin_Unicode_1.30.zip"; "https://nsis.sourceforge.io/mediawiki/images/e/ef/NSIS_Simple_Service_Plugin_Unicode_1.30.zip";
const tempDir = path.join(TEMP_DIR, "SimpleSC"); const tempDir = path.join(TEMP_DIR, "SimpleSC");
const tempZip = path.join( const tempZip = path.join(
tempDir, tempDir,
"NSIS_Simple_Service_Plugin_Unicode_1.30.zip", "NSIS_Simple_Service_Plugin_Unicode_1.30.zip",
); );
const tempDll = path.join(tempDir, "SimpleSC.dll"); const tempDll = path.join(tempDir, "SimpleSC.dll");
const pluginDir = path.join(process.env.APPDATA, "Local/NSIS"); const pluginDir = path.join(process.env.APPDATA || "", "Local/NSIS");
const pluginPath = path.join(pluginDir, "SimpleSC.dll"); const pluginPath = path.join(pluginDir, "SimpleSC.dll");
await fsp.mkdir(pluginDir, { recursive: true }); await fsp.mkdir(pluginDir, { recursive: true });
await fsp.mkdir(tempDir, { recursive: true }); await fsp.mkdir(tempDir, { recursive: true });
@@ -367,95 +510,118 @@ const resolvePlugin = async () => {
await downloadFile(url, tempZip); await downloadFile(url, tempZip);
} }
const zip = new AdmZip(tempZip); const zip = new AdmZip(tempZip);
zip.getEntries().forEach((entry) => { zip
log_debug(`"SimpleSC" entry name`, entry.entryName); .getEntries()
}); .forEach((entry) => log_debug(`"SimpleSC" entry`, entry.entryName));
zip.extractAllTo(tempDir, true); zip.extractAllTo(tempDir, true);
await fsp.cp(tempDll, pluginPath, { recursive: true, force: true }); if (fs.existsSync(tempDll)) {
log_success(`unzip finished: "SimpleSC"`); await fsp.cp(tempDll, pluginPath, { recursive: true, force: true });
log_success(`unzip finished: "SimpleSC"`);
} else {
// 如果 dll 名称不同,尝试找到 dll
const files = await fsp.readdir(tempDir);
const dll = files.find((f) => f.toLowerCase().endsWith(".dll"));
if (dll) {
await fsp.cp(path.join(tempDir, dll), pluginPath, {
recursive: true,
force: true,
});
log_success(`unzip finished: "SimpleSC" (found ${dll})`);
} else {
throw new Error("SimpleSC.dll not found in zip");
}
}
} finally { } finally {
await fsp.rm(tempDir, { recursive: true, force: true }); await fsp.rm(tempDir, { recursive: true, force: true });
} }
}; };
// service chmod // service chmod (保留并使用 glob)
const resolveServicePermission = async () => { const resolveServicePermission = async () => {
const serviceExecutables = [ const serviceExecutables = [
"clash-verge-service*", "clash-verge-service*",
"install-service*", "clash-verge-service-install*",
"uninstall-service*", "clash-verge-service-uninstall*",
]; ];
const resDir = path.join(cwd, "src-tauri/resources"); const resDir = path.join(cwd, "src-tauri/resources");
const hashCache = await loadHashCache();
let hasChanges = false;
for (let f of serviceExecutables) { for (let f of serviceExecutables) {
// 使用glob模块来处理通配符
const files = glob.sync(path.join(resDir, f)); const files = glob.sync(path.join(resDir, f));
for (let filePath of files) { for (let filePath of files) {
if (fs.existsSync(filePath)) { if (fs.existsSync(filePath)) {
execSync(`chmod 755 ${filePath}`); const currentHash = await calculateFileHash(filePath);
log_success(`chmod finished: "${filePath}"`); const cacheKey = `${filePath}_chmod`;
if (!FORCE && hashCache[cacheKey] === currentHash) {
continue;
}
try {
execSync(`chmod 755 ${filePath}`);
log_success(`chmod finished: "${filePath}"`);
} catch (e) {
log_error(`chmod failed for ${filePath}:`, e.message);
}
hashCache[cacheKey] = currentHash;
hasChanges = true;
} }
} }
} }
if (hasChanges) {
await saveHashCache(hashCache);
}
}; };
// resolveResource 函数后添加新函数 // resolve locales (从 src/locales 复制到 resources/locales并使用 hash 检查)
async function resolveLocales() { async function resolveLocales() {
const srcLocalesDir = path.join(cwd, "src/locales"); const srcLocalesDir = path.join(cwd, "src/locales");
const targetLocalesDir = path.join(cwd, "src-tauri/resources/locales"); const targetLocalesDir = path.join(cwd, "src-tauri/resources/locales");
try { try {
// 确保目标目录存在
await fsp.mkdir(targetLocalesDir, { recursive: true }); await fsp.mkdir(targetLocalesDir, { recursive: true });
// 读取所有语言文件
const files = await fsp.readdir(srcLocalesDir); const files = await fsp.readdir(srcLocalesDir);
// 复制每个文件
for (const file of files) { for (const file of files) {
const srcPath = path.join(srcLocalesDir, file); const srcPath = path.join(srcLocalesDir, file);
const targetPath = path.join(targetLocalesDir, file); const targetPath = path.join(targetLocalesDir, file);
if (!(await hasFileChanged(srcPath, targetPath))) continue;
await fsp.copyFile(srcPath, targetPath); await fsp.copyFile(srcPath, targetPath);
await updateHashCache(targetPath);
log_success(`Copied locale file: ${file}`); log_success(`Copied locale file: ${file}`);
} }
log_success("All locale files processed successfully");
log_success("All locale files copied successfully");
} catch (err) { } catch (err) {
log_error("Error copying locale files:", err.message); log_error("Error copying locale files:", err.message);
throw err; throw err;
} }
} }
/** // =======================
* main // Other resource resolvers (service, mmdb, geosite, geoip, enableLoopback, sysproxy)
*/ // =======================
const SERVICE_URL = `https://github.com/clash-verge-rev/clash-verge-service/releases/download/${SIDECAR_HOST}`; const SERVICE_URL = `https://github.com/clash-verge-rev/clash-verge-service-ipc/releases/download/${SIDECAR_HOST}`;
const resolveService = () => { const resolveService = () => {
let ext = platform === "win32" ? ".exe" : ""; let ext = platform === "win32" ? ".exe" : "";
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : ""; let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
resolveResource({ return resolveResource({
file: "clash-verge-service" + suffix + ext, file: "clash-verge-service" + suffix + ext,
downloadURL: `${SERVICE_URL}/clash-verge-service${ext}`, downloadURL: `${SERVICE_URL}/clash-verge-service${ext}`,
}); });
}; };
const resolveInstall = () => { const resolveInstall = () => {
let ext = platform === "win32" ? ".exe" : ""; let ext = platform === "win32" ? ".exe" : "";
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : ""; let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
resolveResource({ return resolveResource({
file: "install-service" + suffix + ext, file: "clash-verge-service-install" + suffix + ext,
downloadURL: `${SERVICE_URL}/install-service${ext}`, downloadURL: `${SERVICE_URL}/clash-verge-service-install${ext}`,
}); });
}; };
const resolveUninstall = () => { const resolveUninstall = () => {
let ext = platform === "win32" ? ".exe" : ""; let ext = platform === "win32" ? ".exe" : "";
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : ""; let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
return resolveResource({
resolveResource({ file: "clash-verge-service-uninstall" + suffix + ext,
file: "uninstall-service" + suffix + ext, downloadURL: `${SERVICE_URL}/clash-verge-service-uninstall${ext}`,
downloadURL: `${SERVICE_URL}/uninstall-service${ext}`,
}); });
}; };
@@ -479,15 +645,27 @@ const resolveEnableLoopback = () =>
file: "enableLoopback.exe", file: "enableLoopback.exe",
downloadURL: `https://github.com/Kuingsmile/uwp-tool/releases/download/latest/enableLoopback.exe`, downloadURL: `https://github.com/Kuingsmile/uwp-tool/releases/download/latest/enableLoopback.exe`,
}); });
const resolveWinSysproxy = () => const resolveWinSysproxy = () =>
resolveResource({ resolveResource({
file: "sysproxy.exe", file: "sysproxy.exe",
downloadURL: `https://github.com/clash-verge-rev/sysproxy/releases/download/${arch}/sysproxy.exe`, downloadURL: `https://github.com/clash-verge-rev/sysproxy/releases/download/${arch}/sysproxy.exe`,
}); });
const resolveSetDnsScript = () =>
resolveResource({
file: "set_dns.sh",
localPath: path.join(cwd, "scripts/set_dns.sh"),
});
const resolveUnSetDnsScript = () =>
resolveResource({
file: "unset_dns.sh",
localPath: path.join(cwd, "scripts/unset_dns.sh"),
});
// =======================
// Tasks
// =======================
const tasks = [ const tasks = [
// { name: "clash", func: resolveClash, retry: 5 },
{ {
name: "verge-mihomo-alpha", name: "verge-mihomo-alpha",
func: () => func: () =>
@@ -537,11 +715,7 @@ const tasks = [
retry: 5, retry: 5,
macosOnly: true, macosOnly: true,
}, },
{ { name: "locales", func: resolveLocales, retry: 2 },
name: "locales",
func: resolveLocales,
retry: 2,
},
]; ];
async function runTask() { async function runTask() {

View File

@@ -54,7 +54,7 @@ async function run() {
execSync(`git tag ${tag}`, { stdio: "inherit" }); execSync(`git tag ${tag}`, { stdio: "inherit" });
execSync(`git push origin ${tag}`, { stdio: "inherit" }); execSync(`git push origin ${tag}`, { stdio: "inherit" });
console.log(`[INFO]: Git tag ${tag} created and pushed.`); console.log(`[INFO]: Git tag ${tag} created and pushed.`);
} catch (e) { } catch {
console.error(`[ERROR]: Failed to create or push git tag: ${tag}`); console.error(`[ERROR]: Failed to create or push git tag: ${tag}`);
process.exit(1); process.exit(1);
} }

View File

@@ -6,15 +6,19 @@
* *
* <version> can be: * <version> can be:
* - A full semver version (e.g., 1.2.3, v1.2.3, 1.2.3-beta, v1.2.3+build) * - A full semver version (e.g., 1.2.3, v1.2.3, 1.2.3-beta, v1.2.3+build)
* - A tag: "alpha", "beta", "rc", or "autobuild" * - A tag: "alpha", "beta", "rc", "autobuild", "autobuild-latest", or "deploytest"
* - "alpha", "beta", "rc": Appends the tag to the current base version (e.g., 1.2.3-beta) * - "alpha", "beta", "rc": Appends the tag to the current base version (e.g., 1.2.3-beta)
* - "autobuild": Appends a timestamped autobuild tag (e.g., 1.2.3+autobuild.2406101530) * - "autobuild": Appends a timestamped autobuild tag (e.g., 1.2.3+autobuild.2406101530)
* - "autobuild-latest": Appends an autobuild tag with latest Tauri commit (e.g., 1.2.3+autobuild.0614.a1b2c3d)
* - "deploytest": Appends a timestamped deploytest tag (e.g., 1.2.3+deploytest.2406101530)
* *
* Examples: * Examples:
* pnpm release-version 1.2.3 * pnpm release-version 1.2.3
* pnpm release-version v1.2.3-beta * pnpm release-version v1.2.3-beta
* pnpm release-version beta * pnpm release-version beta
* pnpm release-version autobuild * pnpm release-version autobuild
* pnpm release-version autobuild-latest
* pnpm release-version deploytest
* *
* The script will: * The script will:
* - Validate and normalize the version argument * - Validate and normalize the version argument
@@ -25,10 +29,10 @@
* Errors are logged and the process exits with code 1 on failure. * Errors are logged and the process exits with code 1 on failure.
*/ */
import { execSync } from "child_process";
import { program } from "commander";
import fs from "fs/promises"; import fs from "fs/promises";
import path from "path"; import path from "path";
import { program } from "commander";
import { execSync } from "child_process";
/** /**
* 获取当前 git 短 commit hash * 获取当前 git 短 commit hash
@@ -37,23 +41,61 @@ import { execSync } from "child_process";
function getGitShortCommit() { function getGitShortCommit() {
try { try {
return execSync("git rev-parse --short HEAD").toString().trim(); return execSync("git rev-parse --short HEAD").toString().trim();
} catch (e) { } catch {
console.warn("[WARN]: Failed to get git short commit, fallback to 'nogit'"); console.warn("[WARN]: Failed to get git short commit, fallback to 'nogit'");
return "nogit"; return "nogit";
} }
} }
/** /**
* 生成短时间戳格式YYMMDD或带 commit格式YYMMDD.cc39b27 * 获取最新 Tauri 相关提交的短 hash
* @param {boolean} withCommit 是否带 commit
* @returns {string} * @returns {string}
*/ */
function generateShortTimestamp(withCommit = false) { function getLatestTauriCommit() {
try {
const fullHash = execSync(
"bash ./scripts-workflow/get_latest_tauri_commit.bash",
)
.toString()
.trim();
const shortHash = execSync(`git rev-parse --short ${fullHash}`)
.toString()
.trim();
console.log(`[INFO]: Latest Tauri-related commit: ${shortHash}`);
return shortHash;
} catch (error) {
console.warn(
"[WARN]: Failed to get latest Tauri commit, fallback to current git short commit",
);
console.warn(`[WARN]: Error details: ${error.message}`);
return getGitShortCommit();
}
}
/**
* 生成短时间戳格式MMDD或带 commit格式MMDD.cc39b27
* 使用 Asia/Shanghai 时区
* @param {boolean} withCommit 是否带 commit
* @param {boolean} useTauriCommit 是否使用 Tauri 相关的 commit仅当 withCommit 为 true 时有效)
* @returns {string}
*/
function generateShortTimestamp(withCommit = false, useTauriCommit = false) {
const now = new Date(); const now = new Date();
const month = String(now.getMonth() + 1).padStart(2, "0");
const day = String(now.getDate()).padStart(2, "0"); const formatter = new Intl.DateTimeFormat("en-CA", {
timeZone: "Asia/Shanghai",
month: "2-digit",
day: "2-digit",
});
const parts = formatter.formatToParts(now);
const month = parts.find((part) => part.type === "month").value;
const day = parts.find((part) => part.type === "day").value;
if (withCommit) { if (withCommit) {
const gitShort = getGitShortCommit(); const gitShort = useTauriCommit
? getLatestTauriCommit()
: getGitShortCommit();
return `${month}${day}.${gitShort}`; return `${month}${day}.${gitShort}`;
} }
return `${month}${day}`; return `${month}${day}`;
@@ -135,20 +177,19 @@ async function updateCargoVersion(newVersion) {
const versionWithoutV = newVersion.startsWith("v") const versionWithoutV = newVersion.startsWith("v")
? newVersion.slice(1) ? newVersion.slice(1)
: newVersion; : newVersion;
const baseVersion = getBaseVersion(versionWithoutV);
const updatedLines = lines.map((line) => { const updatedLines = lines.map((line) => {
if (line.trim().startsWith("version =")) { if (line.trim().startsWith("version =")) {
return line.replace( return line.replace(
/version\s*=\s*"[^"]+"/, /version\s*=\s*"[^"]+"/,
`version = "${baseVersion}"`, `version = "${versionWithoutV}"`,
); );
} }
return line; return line;
}); });
await fs.writeFile(cargoTomlPath, updatedLines.join("\n"), "utf8"); await fs.writeFile(cargoTomlPath, updatedLines.join("\n"), "utf8");
console.log(`[INFO]: Cargo.toml version updated to: ${baseVersion}`); console.log(`[INFO]: Cargo.toml version updated to: ${versionWithoutV}`);
} catch (error) { } catch (error) {
console.error("Error updating Cargo.toml version:", error); console.error("Error updating Cargo.toml version:", error);
throw error; throw error;
@@ -168,19 +209,23 @@ async function updateTauriConfigVersion(newVersion) {
const versionWithoutV = newVersion.startsWith("v") const versionWithoutV = newVersion.startsWith("v")
? newVersion.slice(1) ? newVersion.slice(1)
: newVersion; : newVersion;
const baseVersion = getBaseVersion(versionWithoutV);
console.log( console.log(
"[INFO]: Current tauri.conf.json version is: ", "[INFO]: Current tauri.conf.json version is: ",
tauriConfig.version, tauriConfig.version,
); );
tauriConfig.version = baseVersion;
// 使用完整版本信息包含build metadata
tauriConfig.version = versionWithoutV;
await fs.writeFile( await fs.writeFile(
tauriConfigPath, tauriConfigPath,
JSON.stringify(tauriConfig, null, 2), JSON.stringify(tauriConfig, null, 2),
"utf8", "utf8",
); );
console.log(`[INFO]: tauri.conf.json version updated to: ${baseVersion}`); console.log(
`[INFO]: tauri.conf.json version updated to: ${versionWithoutV}`,
);
} catch (error) { } catch (error) {
console.error("Error updating tauri.conf.json version:", error); console.error("Error updating tauri.conf.json version:", error);
throw error; throw error;
@@ -214,15 +259,31 @@ async function main(versionArg) {
try { try {
let newVersion; let newVersion;
const validTags = ["alpha", "beta", "rc", "autobuild"]; const validTags = [
"alpha",
"beta",
"rc",
"autobuild",
"autobuild-latest",
"deploytest",
];
if (validTags.includes(versionArg.toLowerCase())) { if (validTags.includes(versionArg.toLowerCase())) {
const currentVersion = await getCurrentVersion(); const currentVersion = await getCurrentVersion();
const baseVersion = getBaseVersion(currentVersion); const baseVersion = getBaseVersion(currentVersion);
if (versionArg.toLowerCase() === "autobuild") { if (versionArg.toLowerCase() === "autobuild") {
// 格式: 2.3.0+autobuild.250613.cc39b27 // 格式: 2.3.0+autobuild.1004.cc39b27
newVersion = `${baseVersion}+autobuild.${generateShortTimestamp(true)}`; // 使用 Tauri 相关的最新 commit hash
newVersion = `${baseVersion}+autobuild.${generateShortTimestamp(true, true)}`;
} else if (versionArg.toLowerCase() === "autobuild-latest") {
// 格式: 2.3.0+autobuild.1004.a1b2c3d (使用最新 Tauri 提交)
const latestTauriCommit = getLatestTauriCommit();
newVersion = `${baseVersion}+autobuild.${generateShortTimestamp()}.${latestTauriCommit}`;
} else if (versionArg.toLowerCase() === "deploytest") {
// 格式: 2.3.0+deploytest.1004.cc39b27
// 使用 Tauri 相关的最新 commit hash
newVersion = `${baseVersion}+deploytest.${generateShortTimestamp(true, true)}`;
} else { } else {
newVersion = `${baseVersion}-${versionArg.toLowerCase()}`; newVersion = `${baseVersion}-${versionArg.toLowerCase()}`;
} }

123
scripts/telegram.mjs Normal file
View File

@@ -0,0 +1,123 @@
import axios from "axios";
import { readFileSync } from "fs";
import { log_error, log_info, log_success } from "./utils.mjs";
const CHAT_ID_RELEASE = "@clash_verge_re"; // 正式发布频道
const CHAT_ID_TEST = "@vergetest"; // 测试频道
async function sendTelegramNotification() {
if (!process.env.TELEGRAM_BOT_TOKEN) {
throw new Error("TELEGRAM_BOT_TOKEN is required");
}
const version =
process.env.VERSION ||
(() => {
const pkg = readFileSync("package.json", "utf-8");
return JSON.parse(pkg).version;
})();
const downloadUrl =
process.env.DOWNLOAD_URL ||
`https://github.com/clash-verge-rev/clash-verge-rev/releases/download/v${version}`;
const isAutobuild =
process.env.BUILD_TYPE === "autobuild" || version.includes("autobuild");
const chatId = isAutobuild ? CHAT_ID_TEST : CHAT_ID_RELEASE;
const buildType = isAutobuild ? "滚动更新版" : "正式版";
log_info(`Preparing Telegram notification for ${buildType} ${version}`);
log_info(`Target channel: ${chatId}`);
log_info(`Download URL: ${downloadUrl}`);
// 读取发布说明和下载地址
let releaseContent = "";
try {
releaseContent = readFileSync("release.txt", "utf-8");
log_info("成功读取 release.txt 文件");
} catch (error) {
log_error("无法读取 release.txt使用默认发布说明", error);
releaseContent = "更多新功能现已支持,详细更新日志请查看发布页面。";
}
// Markdown 转换为 HTML
function convertMarkdownToTelegramHTML(content) {
return content
.split("\n")
.map((line) => {
if (line.trim().length === 0) {
return "";
} else if (line.startsWith("## ")) {
return `<b>${line.replace("## ", "")}</b>`;
} else if (line.startsWith("### ")) {
return `<b>${line.replace("### ", "")}</b>`;
} else if (line.startsWith("#### ")) {
return `<b>${line.replace("#### ", "")}</b>`;
} else {
let processedLine = line.replace(
/\[([^\]]+)\]\(([^)]+)\)/g,
(match, text, url) => {
const encodedUrl = encodeURI(url);
return `<a href="${encodedUrl}">${text}</a>`;
},
);
processedLine = processedLine.replace(
/\*\*([^*]+)\*\*/g,
"<b>$1</b>",
);
return processedLine;
}
})
.join("\n");
}
function normalizeDetailsTags(content) {
return content
.replace(
/<summary>\s*<strong>\s*(.*?)\s*<\/strong>\s*<\/summary>/g,
"\n<b>$1</b>\n",
)
.replace(/<summary>\s*(.*?)\s*<\/summary>/g, "\n<b>$1</b>\n")
.replace(/<\/?details>/g, "")
.replace(/<\/?strong>/g, (m) => (m === "</strong>" ? "</b>" : "<b>"))
.replace(/<br\s*\/?>/g, "\n");
}
releaseContent = normalizeDetailsTags(releaseContent);
const formattedContent = convertMarkdownToTelegramHTML(releaseContent);
const releaseTitle = isAutobuild ? "滚动更新版发布" : "正式发布";
const encodedVersion = encodeURIComponent(version);
const content = `<b>🎉 <a href="https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild">Clash Verge Rev v${version}</a> ${releaseTitle}</b>\n\n${formattedContent}`;
// 发送到 Telegram
try {
await axios.post(
`https://api.telegram.org/bot${process.env.TELEGRAM_BOT_TOKEN}/sendMessage`,
{
chat_id: chatId,
text: content,
link_preview_options: {
is_disabled: false,
url: `https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/v${encodedVersion}`,
prefer_large_media: true,
},
parse_mode: "HTML",
},
);
log_success(`✅ Telegram 通知发送成功到 ${chatId}`);
} catch (error) {
log_error(
`❌ Telegram 通知发送失败到 ${chatId}:`,
error.response?.data || error.message,
error,
);
process.exit(1);
}
}
// 执行函数
sendTelegramNotification().catch((error) => {
log_error("脚本执行失败:", error);
process.exit(1);
});

View File

@@ -8,7 +8,7 @@ const UPDATE_LOG = "UPDATELOG.md";
export async function resolveUpdateLog(tag) { export async function resolveUpdateLog(tag) {
const cwd = process.cwd(); const cwd = process.cwd();
const reTitle = /^## v[\d\.]+/; const reTitle = /^## v[\d.]+/;
const reEnd = /^---/; const reEnd = /^---/;
const file = path.join(cwd, UPDATE_LOG); const file = path.join(cwd, UPDATE_LOG);
@@ -54,7 +54,7 @@ export async function resolveUpdateLogDefault() {
const data = await fsp.readFile(file, "utf-8"); const data = await fsp.readFile(file, "utf-8");
const reTitle = /^## v[\d\.]+/; const reTitle = /^## v[\d.]+/;
const reEnd = /^---/; const reEnd = /^---/;
let isCapturing = false; let isCapturing = false;

View File

@@ -1 +1,2 @@
avoid-breaking-exported-api = true avoid-breaking-exported-api = true
cognitive-complexity-threshold = 25

4416
src-tauri/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,87 +1,91 @@
[package] [package]
name = "clash-verge" name = "clash-verge"
version = "2.3.2" version = "2.4.3"
description = "clash verge" description = "clash verge"
authors = ["zzzgydi", "wonfen", "MystiPanda"] authors = ["zzzgydi", "Tunglies", "wonfen", "MystiPanda"]
license = "GPL-3.0-only" license = "GPL-3.0-only"
repository = "https://github.com/clash-verge-rev/clash-verge-rev.git" repository = "https://github.com/clash-verge-rev/clash-verge-rev.git"
default-run = "clash-verge" default-run = "clash-verge"
edition = "2021" edition = "2024"
build = "build.rs" build = "build.rs"
[package.metadata.bundle] [package.metadata.bundle]
identifier = "io.github.clash-verge-rev.clash-verge-rev" identifier = "io.github.clash-verge-rev.clash-verge-rev"
[build-dependencies] [build-dependencies]
tauri-build = { version = "2.3.0", features = [] } tauri-build = { version = "2.5.1", features = [] }
[dependencies] [dependencies]
warp = "0.3.7" warp = { version = "0.4.2", features = ["server"] }
anyhow = "1.0.98" anyhow = "1.0.100"
dirs = "6.0"
open = "5.3.2" open = "5.3.2"
log = "0.4.27" log = "0.4.28"
dunce = "1.0.5" dunce = "1.0.5"
log4rs = "1.3.0"
nanoid = "0.4" nanoid = "0.4"
chrono = "0.4.41" chrono = "0.4.42"
sysinfo = "0.35.2" sysinfo = { version = "0.37.2", features = ["network", "system"] }
boa_engine = "0.20.0" boa_engine = "0.21.0"
serde_json = "1.0.140" serde_json = "1.0.145"
serde_yaml = "0.9.34-deprecated" serde_yaml_ng = "0.10.0"
once_cell = "1.21.3" once_cell = "1.21.3"
lazy_static = "1.5.0"
port_scanner = "0.1.5" port_scanner = "0.1.5"
delay_timer = "0.11.6" delay_timer = "0.11.6"
parking_lot = "0.12.4" parking_lot = "0.12.5"
percent-encoding = "2.3.1" percent-encoding = "2.3.2"
tokio = { version = "1.45.1", features = [ tokio = { version = "1.48.0", features = [
"rt-multi-thread", "rt-multi-thread",
"macros", "macros",
"time", "time",
"sync", "sync",
] } ] }
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.228", features = ["derive"] }
reqwest = { version = "0.12.20", features = ["json", "rustls-tls", "cookies"] } reqwest = { version = "0.12.24", features = ["json", "cookies"] }
regex = "1.11.1" regex = "1.12.2"
sysproxy = { git = "https://github.com/clash-verge-rev/sysproxy-rs" } sysproxy = { git = "https://github.com/clash-verge-rev/sysproxy-rs" }
image = "0.25.6" tauri = { version = "2.9.2", features = [
imageproc = "0.25.0"
tauri = { version = "2.6.2", features = [
"protocol-asset", "protocol-asset",
"devtools", "devtools",
"tray-icon", "tray-icon",
"image-ico", "image-ico",
"image-png", "image-png",
] } ] }
network-interface = { version = "2.0.1", features = ["serde"] } network-interface = { version = "2.0.3", features = ["serde"] }
tauri-plugin-shell = "2.3.0" tauri-plugin-shell = "2.3.3"
tauri-plugin-dialog = "2.3.0" tauri-plugin-dialog = "2.4.2"
tauri-plugin-fs = "2.4.0" tauri-plugin-fs = "2.4.4"
tauri-plugin-process = "2.3.0" tauri-plugin-process = "2.3.1"
tauri-plugin-clipboard-manager = "2.3.0" tauri-plugin-clipboard-manager = "2.3.2"
tauri-plugin-deep-link = "2.4.0" tauri-plugin-deep-link = "2.4.5"
tauri-plugin-devtools = "2.0.0" tauri-plugin-window-state = "2.4.1"
tauri-plugin-window-state = "2.3.0" zip = "6.0.0"
zip = "4.2.0" reqwest_dav = "0.2.2"
reqwest_dav = "0.2.1"
aes-gcm = { version = "0.10.3", features = ["std"] } aes-gcm = { version = "0.10.3", features = ["std"] }
base64 = "0.22.1" base64 = "0.22.1"
getrandom = "0.3.3" getrandom = "0.3.4"
tokio-tungstenite = "0.27.0"
futures = "0.3.31" futures = "0.3.31"
sys-locale = "0.3.2" sys-locale = "0.3.2"
async-trait = "0.1.88" libc = "0.2.177"
mihomo_api = { path = "src_crates/crate_mihomo_api" } gethostname = "1.1.0"
ab_glyph = "0.2.29"
tungstenite = "0.27.0"
libc = "0.2.174"
gethostname = "1.0.2"
hmac = "0.12.1"
sha2 = "0.10.9"
hex = "0.4.3"
scopeguard = "1.2.0" scopeguard = "1.2.0"
tauri-plugin-notification = "2.3.0" tauri-plugin-notification = "2.3.3"
tokio-stream = "0.1.17"
isahc = { version = "1.7.2", default-features = false, features = [
"text-decoding",
"parking_lot",
] }
backoff = { version = "0.4.0", features = ["tokio"] }
compact_str = { version = "0.9.0", features = ["serde"] }
tauri-plugin-http = "2.5.4"
flexi_logger = "0.31.7"
console-subscriber = { version = "0.5.0", optional = true }
tauri-plugin-devtools = { version = "2.0.1" }
tauri-plugin-mihomo = { git = "https://github.com/clash-verge-rev/tauri-plugin-mihomo" }
clash_verge_logger = { git = "https://github.com/clash-verge-rev/clash-verge-logger" }
async-trait = "0.1.89"
smartstring = { version = "1.0.1", features = ["serde"] }
clash_verge_service_ipc = { version = "2.0.21", features = [
"client",
], git = "https://github.com/clash-verge-rev/clash-verge-service-ipc" }
[target.'cfg(windows)'.dependencies] [target.'cfg(windows)'.dependencies]
runas = "=1.2.0" runas = "=1.2.0"
@@ -100,54 +104,124 @@ winapi = { version = "0.3.9", features = [
"winhttp", "winhttp",
"winreg", "winreg",
] } ] }
windows-sys = { version = "0.61.2", features = [
"Win32_Foundation",
"Win32_Graphics_Gdi",
"Win32_System_SystemServices",
"Win32_UI_WindowsAndMessaging",
] }
[target.'cfg(target_os = "linux")'.dependencies] [target.'cfg(target_os = "linux")'.dependencies]
users = "0.11.0" users = "0.11.0"
[target.'cfg(unix)'.dependencies]
signal-hook = "0.3.18"
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies] [target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
tauri-plugin-autostart = "2.5.0" tauri-plugin-autostart = "2.5.1"
tauri-plugin-global-shortcut = "2.3.0" tauri-plugin-global-shortcut = "2.3.1"
tauri-plugin-updater = "2.9.0" tauri-plugin-updater = "2.9.0"
[features] [features]
default = ["custom-protocol"] default = ["custom-protocol"]
custom-protocol = ["tauri/custom-protocol"] custom-protocol = ["tauri/custom-protocol"]
verge-dev = [] verge-dev = ["clash_verge_logger/color"]
tauri-dev = []
tokio-trace = ["console-subscriber"]
clippy = ["tauri/test"]
tracing = []
[[bench]]
name = "draft_benchmark"
path = "benches/draft_benchmark.rs"
harness = false
[profile.release] [profile.release]
panic = "abort" panic = "abort"
codegen-units = 1 codegen-units = 1
lto = true lto = "thin"
opt-level = "s" opt-level = 3
debug = false
strip = true strip = true
overflow-checks = false
rpath = false
[profile.dev] [profile.dev]
incremental = true incremental = true
codegen-units = 256 # 增加编译单元,提升编译速度 codegen-units = 64
opt-level = 0 # 禁用优化,进一步提升编译速度 opt-level = 0
debug = true # 保留调试信息 debug = true
strip = false # 不剥离符号,保留调试信息 strip = "none"
overflow-checks = true
lto = false
rpath = false
[profile.fast-release] [profile.fast-release]
inherits = "release" # 继承 release 的配置 inherits = "release"
panic = "abort" # 与 release 相同 codegen-units = 64
codegen-units = 256 # 增加编译单元,提升编译速度 incremental = true
lto = false # 禁用 LTO提升编译速度 lto = false
opt-level = 0 # 禁用优化,大幅提升编译速度 opt-level = 0
debug = true # 保留调试信息 debug = true
strip = false # 不剥离符号,保留调试信息 strip = false
[lib] [lib]
name = "app_lib" name = "app_lib"
crate-type = ["staticlib", "cdylib", "rlib"] crate-type = ["staticlib", "cdylib", "rlib"]
[dev-dependencies] [dev-dependencies]
tempfile = "3.20.0" criterion = { version = "0.7.0", features = ["async_tokio"] }
[workspace] [lints.clippy]
members = ["src_crates/crate_mihomo_api"] # Core categories - most important for code safety and correctness
correctness = { level = "deny", priority = -1 }
suspicious = { level = "deny", priority = -1 }
# [patch.crates-io] # Critical safety lints - warn for now due to extensive existing usage
# bitflags = { git = "https://github.com/bitflags/bitflags", rev = "2.9.0" } unwrap_used = "warn"
# zerocopy = { git = "https://github.com/google/zerocopy", rev = "v0.8.24" } expect_used = "warn"
# tungstenite = { git = "https://github.com/snapview/tungstenite-rs", rev = "v0.26.2" } panic = "deny"
unimplemented = "deny"
# Development quality lints
todo = "warn"
dbg_macro = "warn"
#print_stdout = "warn"
#print_stderr = "warn"
# Performance lints for proxy application
clone_on_ref_ptr = "warn"
rc_clone_in_vec_init = "warn"
large_stack_arrays = "warn"
large_const_arrays = "warn"
# Security lints
#integer_division = "warn"
#lossy_float_literal = "warn"
#default_numeric_fallback = "warn"
# Mutex and async lints - strict control
async_yields_async = "deny" # Prevents missing await in async blocks
mutex_atomic = "deny" # Use atomics instead of Mutex<bool/int>
mutex_integer = "deny" # Use AtomicInt instead of Mutex<int>
rc_mutex = "deny" # Single-threaded Rc with Mutex is wrong
unused_async = "deny" # Too many false positives in Tauri/framework code
await_holding_lock = "deny"
large_futures = "deny"
future_not_send = "deny"
# Common style improvements
redundant_else = "deny" # Too many in existing code
needless_continue = "deny" # Too many in existing code
needless_raw_string_hashes = "deny" # Too many in existing code
# Disable noisy categories for existing codebase but keep them available
#style = { level = "allow", priority = -1 }
#complexity = { level = "allow", priority = -1 }
#perf = { level = "allow", priority = -1 }
#pedantic = { level = "allow", priority = -1 }
#nursery = { level = "allow", priority = -1 }
#restriction = { level = "allow", priority = -1 }
or_fun_call = "deny"
cognitive_complexity = "deny"

View File

@@ -0,0 +1,111 @@
use criterion::{Criterion, criterion_group, criterion_main};
use std::hint::black_box;
use std::process;
use tokio::runtime::Runtime;
use app_lib::config::IVerge;
use app_lib::utils::Draft as DraftNew;
/// 创建测试数据
fn make_draft() -> DraftNew<Box<IVerge>> {
let verge = Box::new(IVerge {
enable_auto_launch: Some(true),
enable_tun_mode: Some(false),
..Default::default()
});
DraftNew::from(verge)
}
pub fn bench_draft(c: &mut Criterion) {
let rt = Runtime::new().unwrap_or_else(|e| {
eprintln!("Tokio runtime init failed: {e}");
process::exit(1);
});
let mut group = c.benchmark_group("draft");
group.sample_size(100);
group.warm_up_time(std::time::Duration::from_millis(300));
group.measurement_time(std::time::Duration::from_secs(1));
group.bench_function("data_mut", |b| {
b.iter(|| {
let draft = black_box(make_draft());
let mut data = draft.data_mut();
data.enable_tun_mode = Some(true);
black_box(&data.enable_tun_mode);
});
});
group.bench_function("draft_mut_first", |b| {
b.iter(|| {
let draft = black_box(make_draft());
let mut d = draft.draft_mut();
d.enable_auto_launch = Some(false);
black_box(&d.enable_auto_launch);
});
});
group.bench_function("draft_mut_existing", |b| {
b.iter(|| {
let draft = black_box(make_draft());
{
let mut first = draft.draft_mut();
first.enable_tun_mode = Some(true);
black_box(&first.enable_tun_mode);
}
let mut second = draft.draft_mut();
second.enable_tun_mode = Some(false);
black_box(&second.enable_tun_mode);
});
});
group.bench_function("latest_ref", |b| {
b.iter(|| {
let draft = black_box(make_draft());
let latest = draft.latest_ref();
black_box(&latest.enable_auto_launch);
});
});
group.bench_function("apply", |b| {
b.iter(|| {
let draft = black_box(make_draft());
{
let mut d = draft.draft_mut();
d.enable_auto_launch = Some(false);
}
draft.apply();
black_box(&draft);
});
});
group.bench_function("discard", |b| {
b.iter(|| {
let draft = black_box(make_draft());
{
let mut d = draft.draft_mut();
d.enable_auto_launch = Some(false);
}
draft.discard();
black_box(&draft);
});
});
group.bench_function("with_data_modify_async", |b| {
b.to_async(&rt).iter(|| async {
let draft = black_box(make_draft());
let _: Result<(), anyhow::Error> = draft
.with_data_modify::<_, _, _, anyhow::Error>(|mut box_data| async move {
box_data.enable_auto_launch =
Some(!box_data.enable_auto_launch.unwrap_or(false));
Ok((box_data, ()))
})
.await;
});
});
group.finish();
}
criterion_group!(benches, bench_draft);
criterion_main!(benches);

View File

@@ -1,3 +1,9 @@
fn main() { fn main() {
tauri_build::build() #[cfg(feature = "clippy")]
{
println!("cargo:warning=Skipping tauri_build during Clippy");
}
#[cfg(not(feature = "clippy"))]
tauri_build::build();
} }

View File

@@ -18,6 +18,13 @@
"autostart:allow-disable", "autostart:allow-disable",
"autostart:allow-is-enabled", "autostart:allow-is-enabled",
"core:window:allow-set-theme", "core:window:allow-set-theme",
"notification:default" "notification:default",
"http:default",
"http:allow-fetch",
{
"identifier": "http:default",
"allow": [{ "url": "https://*/*" }, { "url": "http://*/*" }]
},
"mihomo:default"
] ]
} }

Binary file not shown.

Before

Width:  |  Height:  |  Size: 111 KiB

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 107 KiB

After

Width:  |  Height:  |  Size: 41 KiB

View File

@@ -1,4 +1,4 @@
#!/bin/bash #!/bin/bash
chmod +x /usr/bin/install-service chmod +x /usr/bin/clash-verge-service-install
chmod +x /usr/bin/uninstall-service chmod +x /usr/bin/clash-verge-service-uninstall
chmod +x /usr/bin/clash-verge-service chmod +x /usr/bin/clash-verge-service

View File

@@ -1,2 +1,2 @@
#!/bin/bash #!/bin/bash
/usr/bin/uninstall-service /usr/bin/clash-verge-service-uninstall

File diff suppressed because it is too large Load Diff

View File

@@ -1,42 +1,64 @@
use super::CmdResult; use super::CmdResult;
use crate::core::sysopt::Sysopt;
use crate::{ use crate::{
cmd::StringifyErr,
feat, logging, feat, logging,
utils::{dirs, logging::Type}, utils::{
wrap_err, dirs::{self, PathBufExec},
logging::Type,
},
}; };
use tauri::Manager; use smartstring::alias::String;
use std::path::Path;
use tauri::{AppHandle, Manager};
use tokio::fs;
use tokio::io::AsyncWriteExt;
/// 打开应用程序所在目录 /// 打开应用程序所在目录
#[tauri::command] #[tauri::command]
pub fn open_app_dir() -> CmdResult<()> { pub async fn open_app_dir() -> CmdResult<()> {
let app_dir = wrap_err!(dirs::app_home_dir())?; let app_dir = dirs::app_home_dir().stringify_err()?;
wrap_err!(open::that(app_dir)) open::that(app_dir).stringify_err()
} }
/// 打开核心所在目录 /// 打开核心所在目录
#[tauri::command] #[tauri::command]
pub fn open_core_dir() -> CmdResult<()> { pub async fn open_core_dir() -> CmdResult<()> {
let core_dir = wrap_err!(tauri::utils::platform::current_exe())?; let core_dir = tauri::utils::platform::current_exe().stringify_err()?;
let core_dir = core_dir.parent().ok_or("failed to get core dir")?; let core_dir = core_dir.parent().ok_or("failed to get core dir")?;
wrap_err!(open::that(core_dir)) open::that(core_dir).stringify_err()
} }
/// 打开日志目录 /// 打开日志目录
#[tauri::command] #[tauri::command]
pub fn open_logs_dir() -> CmdResult<()> { pub async fn open_logs_dir() -> CmdResult<()> {
let log_dir = wrap_err!(dirs::app_logs_dir())?; let log_dir = dirs::app_logs_dir().stringify_err()?;
wrap_err!(open::that(log_dir)) open::that(log_dir).stringify_err()
} }
/// 打开网页链接 /// 打开网页链接
#[tauri::command] #[tauri::command]
pub fn open_web_url(url: String) -> CmdResult<()> { pub fn open_web_url(url: String) -> CmdResult<()> {
wrap_err!(open::that(url)) open::that(url.as_str()).stringify_err()
}
// TODO 后续可以为前端提供接口,当前作为托盘菜单使用
/// 打开 Verge 最新日志
#[tauri::command]
pub async fn open_app_log() -> CmdResult<()> {
open::that(dirs::app_latest_log().stringify_err()?).stringify_err()
}
// TODO 后续可以为前端提供接口,当前作为托盘菜单使用
/// 打开 Clash 最新日志
#[tauri::command]
pub async fn open_core_log() -> CmdResult<()> {
open::that(dirs::clash_latest_log().stringify_err()?).stringify_err()
} }
/// 打开/关闭开发者工具 /// 打开/关闭开发者工具
#[tauri::command] #[tauri::command]
pub fn open_devtools(app_handle: tauri::AppHandle) { pub fn open_devtools(app_handle: AppHandle) {
if let Some(window) = app_handle.get_webview_window("main") { if let Some(window) = app_handle.get_webview_window("main") {
if !window.is_devtools_open() { if !window.is_devtools_open() {
window.open_devtools(); window.open_devtools();
@@ -48,14 +70,14 @@ pub fn open_devtools(app_handle: tauri::AppHandle) {
/// 退出应用 /// 退出应用
#[tauri::command] #[tauri::command]
pub fn exit_app() { pub async fn exit_app() {
feat::quit(); feat::quit().await;
} }
/// 重启应用 /// 重启应用
#[tauri::command] #[tauri::command]
pub async fn restart_app() -> CmdResult<()> { pub async fn restart_app() -> CmdResult<()> {
feat::restart_app(); feat::restart_app().await;
Ok(()) Ok(())
} }
@@ -68,36 +90,39 @@ pub fn get_portable_flag() -> CmdResult<bool> {
/// 获取应用目录 /// 获取应用目录
#[tauri::command] #[tauri::command]
pub fn get_app_dir() -> CmdResult<String> { pub fn get_app_dir() -> CmdResult<String> {
let app_home_dir = wrap_err!(dirs::app_home_dir())? let app_home_dir = dirs::app_home_dir()
.stringify_err()?
.to_string_lossy() .to_string_lossy()
.to_string(); .into();
Ok(app_home_dir) Ok(app_home_dir)
} }
/// 获取当前自启动状态 /// 获取当前自启动状态
#[tauri::command] #[tauri::command]
pub fn get_auto_launch_status() -> CmdResult<bool> { pub fn get_auto_launch_status() -> CmdResult<bool> {
use crate::core::sysopt::Sysopt; Sysopt::global().get_launch_status().stringify_err()
wrap_err!(Sysopt::global().get_launch_status())
} }
/// 下载图标缓存 /// 下载图标缓存
#[tauri::command] #[tauri::command]
pub async fn download_icon_cache(url: String, name: String) -> CmdResult<String> { pub async fn download_icon_cache(url: String, name: String) -> CmdResult<String> {
let icon_cache_dir = wrap_err!(dirs::app_home_dir())?.join("icons").join("cache"); let icon_cache_dir = dirs::app_home_dir()
let icon_path = icon_cache_dir.join(&name); .stringify_err()?
.join("icons")
.join("cache");
let icon_path = icon_cache_dir.join(name.as_str());
if icon_path.exists() { if icon_path.exists() {
return Ok(icon_path.to_string_lossy().to_string()); return Ok(icon_path.to_string_lossy().into());
} }
if !icon_cache_dir.exists() { if !icon_cache_dir.exists() {
let _ = std::fs::create_dir_all(&icon_cache_dir); let _ = fs::create_dir_all(&icon_cache_dir).await;
} }
let temp_path = icon_cache_dir.join(format!("{}.downloading", &name)); let temp_path = icon_cache_dir.join(format!("{}.downloading", name.as_str()));
let response = wrap_err!(reqwest::get(&url).await)?; let response = reqwest::get(url.as_str()).await.stringify_err()?;
let content_type = response let content_type = response
.headers() .headers()
@@ -107,7 +132,7 @@ pub async fn download_icon_cache(url: String, name: String) -> CmdResult<String>
let is_image = content_type.starts_with("image/"); let is_image = content_type.starts_with("image/");
let content = wrap_err!(response.bytes().await)?; let content = response.bytes().await.stringify_err()?;
let is_html = content.len() > 15 let is_html = content.len() > 15
&& (content.starts_with(b"<!DOCTYPE html") && (content.starts_with(b"<!DOCTYPE html")
@@ -116,38 +141,37 @@ pub async fn download_icon_cache(url: String, name: String) -> CmdResult<String>
if is_image && !is_html { if is_image && !is_html {
{ {
let mut file = match std::fs::File::create(&temp_path) { let mut file = match fs::File::create(&temp_path).await {
Ok(file) => file, Ok(file) => file,
Err(_) => { Err(_) => {
if icon_path.exists() { if icon_path.exists() {
return Ok(icon_path.to_string_lossy().to_string()); return Ok(icon_path.to_string_lossy().into());
} else {
return Err("Failed to create temporary file".into());
} }
return Err("Failed to create temporary file".into());
} }
}; };
file.write_all(content.as_ref()).await.stringify_err()?;
wrap_err!(std::io::copy(&mut content.as_ref(), &mut file))?; file.flush().await.stringify_err()?;
} }
if !icon_path.exists() { if !icon_path.exists() {
match std::fs::rename(&temp_path, &icon_path) { match fs::rename(&temp_path, &icon_path).await {
Ok(_) => {} Ok(_) => {}
Err(_) => { Err(_) => {
let _ = std::fs::remove_file(&temp_path); let _ = temp_path.remove_if_exists().await;
if icon_path.exists() { if icon_path.exists() {
return Ok(icon_path.to_string_lossy().to_string()); return Ok(icon_path.to_string_lossy().into());
} }
} }
} }
} else { } else {
let _ = std::fs::remove_file(&temp_path); let _ = temp_path.remove_if_exists().await;
} }
Ok(icon_path.to_string_lossy().to_string()) Ok(icon_path.to_string_lossy().into())
} else { } else {
let _ = std::fs::remove_file(&temp_path); let _ = temp_path.remove_if_exists().await;
Err(format!("下载的内容不是有效图片: {url}")) Err(format!("下载的内容不是有效图片: {}", url.as_str()).into())
} }
} }
@@ -160,66 +184,74 @@ pub struct IconInfo {
/// 复制图标文件 /// 复制图标文件
#[tauri::command] #[tauri::command]
pub fn copy_icon_file(path: String, icon_info: IconInfo) -> CmdResult<String> { pub async fn copy_icon_file(path: String, icon_info: IconInfo) -> CmdResult<String> {
use std::{fs, path::Path}; let file_path = Path::new(path.as_str());
let file_path = Path::new(&path); let icon_dir = dirs::app_home_dir().stringify_err()?.join("icons");
let icon_dir = wrap_err!(dirs::app_home_dir())?.join("icons");
if !icon_dir.exists() { if !icon_dir.exists() {
let _ = fs::create_dir_all(&icon_dir); let _ = fs::create_dir_all(&icon_dir).await;
} }
let ext = match file_path.extension() { let ext: String = match file_path.extension() {
Some(e) => e.to_string_lossy().to_string(), Some(e) => e.to_string_lossy().into(),
None => "ico".to_string(), None => "ico".into(),
}; };
let dest_path = icon_dir.join(format!( let dest_path = icon_dir.join(format!(
"{0}-{1}.{ext}", "{0}-{1}.{ext}",
icon_info.name, icon_info.current_t icon_info.name.as_str(),
icon_info.current_t.as_str()
)); ));
if file_path.exists() { if file_path.exists() {
if icon_info.previous_t.trim() != "" { if icon_info.previous_t.trim() != "" {
fs::remove_file( icon_dir
icon_dir.join(format!("{0}-{1}.png", icon_info.name, icon_info.previous_t)), .join(format!(
) "{0}-{1}.png",
.unwrap_or_default(); icon_info.name.as_str(),
fs::remove_file( icon_info.previous_t.as_str()
icon_dir.join(format!("{0}-{1}.ico", icon_info.name, icon_info.previous_t)), ))
) .remove_if_exists()
.unwrap_or_default(); .await
.unwrap_or_default();
icon_dir
.join(format!(
"{0}-{1}.ico",
icon_info.name.as_str(),
icon_info.previous_t.as_str()
))
.remove_if_exists()
.await
.unwrap_or_default();
} }
logging!( logging!(
info, info,
Type::Cmd, Type::Cmd,
true,
"Copying icon file path: {:?} -> file dist: {:?}", "Copying icon file path: {:?} -> file dist: {:?}",
path, path,
dest_path dest_path
); );
match fs::copy(file_path, &dest_path) { match fs::copy(file_path, &dest_path).await {
Ok(_) => Ok(dest_path.to_string_lossy().to_string()), Ok(_) => Ok(dest_path.to_string_lossy().into()),
Err(err) => Err(err.to_string()), Err(err) => Err(err.to_string().into()),
} }
} else { } else {
Err("file not found".to_string()) Err("file not found".into())
} }
} }
/// 通知UI已准备就绪 /// 通知UI已准备就绪
#[tauri::command] #[tauri::command]
pub fn notify_ui_ready() -> CmdResult<()> { pub fn notify_ui_ready() -> CmdResult<()> {
log::info!(target: "app", "前端UI已准备就绪"); logging!(info, Type::Cmd, "前端UI已准备就绪");
crate::utils::resolve::mark_ui_ready(); crate::utils::resolve::ui::mark_ui_ready();
Ok(()) Ok(())
} }
/// UI加载阶段 /// UI加载阶段
#[tauri::command] #[tauri::command]
pub fn update_ui_stage(stage: String) -> CmdResult<()> { pub fn update_ui_stage(stage: String) -> CmdResult<()> {
log::info!(target: "app", "UI加载阶段更新: {stage}"); logging!(info, Type::Cmd, "UI加载阶段更新: {}", stage.as_str());
use crate::utils::resolve::UiReadyStage; use crate::utils::resolve::ui::UiReadyStage;
let stage_enum = match stage.as_str() { let stage_enum = match stage.as_str() {
"NotStarted" => UiReadyStage::NotStarted, "NotStarted" => UiReadyStage::NotStarted,
@@ -228,19 +260,16 @@ pub fn update_ui_stage(stage: String) -> CmdResult<()> {
"ResourcesLoaded" => UiReadyStage::ResourcesLoaded, "ResourcesLoaded" => UiReadyStage::ResourcesLoaded,
"Ready" => UiReadyStage::Ready, "Ready" => UiReadyStage::Ready,
_ => { _ => {
log::warn!(target: "app", "未知的UI加载阶段: {stage}"); logging!(
return Err(format!("未知的UI加载阶段: {stage}")); warn,
Type::Cmd,
"Warning: 未知的UI加载阶段: {}",
stage.as_str()
);
return Err(format!("未知的UI加载阶段: {}", stage.as_str()).into());
} }
}; };
crate::utils::resolve::update_ui_ready_stage(stage_enum); crate::utils::resolve::ui::update_ui_ready_stage(stage_enum);
Ok(())
}
/// 重置UI就绪状态
#[tauri::command]
pub fn reset_ui_ready_state() -> CmdResult<()> {
log::info!(target: "app", "重置UI就绪状态");
crate::utils::resolve::reset_ui_ready();
Ok(()) Ok(())
} }

View File

@@ -0,0 +1,36 @@
use super::CmdResult;
use crate::{cmd::StringifyErr, feat};
use feat::LocalBackupFile;
use smartstring::alias::String;
/// Create a local backup
#[tauri::command]
pub async fn create_local_backup() -> CmdResult<()> {
feat::create_local_backup().await.stringify_err()
}
/// List local backups
#[tauri::command]
pub async fn list_local_backup() -> CmdResult<Vec<LocalBackupFile>> {
feat::list_local_backup().await.stringify_err()
}
/// Delete local backup
#[tauri::command]
pub async fn delete_local_backup(filename: String) -> CmdResult<()> {
feat::delete_local_backup(filename).await.stringify_err()
}
/// Restore local backup
#[tauri::command]
pub async fn restore_local_backup(filename: String) -> CmdResult<()> {
feat::restore_local_backup(filename).await.stringify_err()
}
/// Export local backup to a user selected destination
#[tauri::command]
pub async fn export_local_backup(filename: String, destination: String) -> CmdResult<()> {
feat::export_local_backup(filename, destination)
.await
.stringify_err()
}

View File

@@ -1,65 +1,75 @@
use super::CmdResult; use super::CmdResult;
use crate::utils::dirs;
use crate::{ use crate::{
config::*, core::*, feat, module::mihomo::MihomoManager, process::AsyncHandler, wrap_err, cmd::StringifyErr,
config::Config,
constants,
core::{CoreManager, handle, validate::CoreConfigValidator},
}; };
use serde_yaml::Mapping; use crate::{config::*, feat, logging, utils::logging::Type};
use compact_str::CompactString;
use serde_yaml_ng::Mapping;
use smartstring::alias::String;
use tokio::fs;
/// 复制Clash环境变量 /// 复制Clash环境变量
#[tauri::command] #[tauri::command]
pub fn copy_clash_env() -> CmdResult { pub async fn copy_clash_env() -> CmdResult {
feat::copy_clash_env(); feat::copy_clash_env().await;
Ok(()) Ok(())
} }
/// 获取Clash信息 /// 获取Clash信息
#[tauri::command] #[tauri::command]
pub fn get_clash_info() -> CmdResult<ClashInfo> { pub async fn get_clash_info() -> CmdResult<ClashInfo> {
Ok(Config::clash().latest().get_client_info()) Ok(Config::clash().await.latest_ref().get_client_info())
} }
/// 修改Clash配置 /// 修改Clash配置
#[tauri::command] #[tauri::command]
pub async fn patch_clash_config(payload: Mapping) -> CmdResult { pub async fn patch_clash_config(payload: Mapping) -> CmdResult {
wrap_err!(feat::patch_clash(payload).await) feat::patch_clash(payload).await.stringify_err()
} }
/// 修改Clash模式 /// 修改Clash模式
#[tauri::command] #[tauri::command]
pub async fn patch_clash_mode(payload: String) -> CmdResult { pub async fn patch_clash_mode(payload: String) -> CmdResult {
feat::change_clash_mode(payload); feat::change_clash_mode(payload).await;
Ok(()) Ok(())
} }
/// 切换Clash核心 /// 切换Clash核心
#[tauri::command] #[tauri::command]
pub async fn change_clash_core(clash_core: String) -> CmdResult<Option<String>> { pub async fn change_clash_core(clash_core: String) -> CmdResult<Option<String>> {
log::info!(target: "app", "changing core to {clash_core}"); logging!(info, Type::Config, "changing core to {clash_core}");
match CoreManager::global() match CoreManager::global().change_core(&clash_core).await {
.change_core(Some(clash_core.clone()))
.await
{
Ok(_) => { Ok(_) => {
// 切换内核后重启内核 // 切换内核后重启内核
match CoreManager::global().restart_core().await { match CoreManager::global().restart_core().await {
Ok(_) => { Ok(_) => {
log::info!(target: "app", "core changed and restarted to {clash_core}"); logging!(
handle::Handle::notice_message("config_core::change_success", &clash_core); info,
Type::Core,
"core changed and restarted to {clash_core}"
);
handle::Handle::notice_message("config_core::change_success", clash_core);
handle::Handle::refresh_clash(); handle::Handle::refresh_clash();
Ok(None) Ok(None)
} }
Err(err) => { Err(err) => {
let error_msg = format!("Core changed but failed to restart: {err}"); let error_msg: String =
log::error!(target: "app", "{error_msg}"); format!("Core changed but failed to restart: {err}").into();
handle::Handle::notice_message("config_core::change_error", &error_msg); handle::Handle::notice_message("config_core::change_error", error_msg.clone());
logging!(error, Type::Core, "{error_msg}");
Ok(Some(error_msg)) Ok(Some(error_msg))
} }
} }
} }
Err(err) => { Err(err) => {
let error_msg = err.to_string(); let error_msg: String = err;
log::error!(target: "app", "failed to change core: {error_msg}"); logging!(error, Type::Core, "failed to change core: {error_msg}");
handle::Handle::notice_message("config_core::change_error", &error_msg); handle::Handle::notice_message("config_core::change_error", error_msg.clone());
Ok(Some(error_msg)) Ok(Some(error_msg))
} }
} }
@@ -68,150 +78,146 @@ pub async fn change_clash_core(clash_core: String) -> CmdResult<Option<String>>
/// 启动核心 /// 启动核心
#[tauri::command] #[tauri::command]
pub async fn start_core() -> CmdResult { pub async fn start_core() -> CmdResult {
wrap_err!(CoreManager::global().start_core().await) let result = CoreManager::global().start_core().await.stringify_err();
if result.is_ok() {
handle::Handle::refresh_clash();
}
result
} }
/// 关闭核心 /// 关闭核心
#[tauri::command] #[tauri::command]
pub async fn stop_core() -> CmdResult { pub async fn stop_core() -> CmdResult {
wrap_err!(CoreManager::global().stop_core().await) let result = CoreManager::global().stop_core().await.stringify_err();
if result.is_ok() {
handle::Handle::refresh_clash();
}
result
} }
/// 重启核心 /// 重启核心
#[tauri::command] #[tauri::command]
pub async fn restart_core() -> CmdResult { pub async fn restart_core() -> CmdResult {
wrap_err!(CoreManager::global().restart_core().await) let result = CoreManager::global().restart_core().await.stringify_err();
} if result.is_ok() {
handle::Handle::refresh_clash();
/// 获取代理延迟 }
#[tauri::command] result
pub async fn clash_api_get_proxy_delay(
name: String,
url: Option<String>,
timeout: i32,
) -> CmdResult<serde_json::Value> {
MihomoManager::global()
.test_proxy_delay(&name, url, timeout)
.await
} }
/// 测试URL延迟 /// 测试URL延迟
#[tauri::command] #[tauri::command]
pub async fn test_delay(url: String) -> CmdResult<u32> { pub async fn test_delay(url: String) -> CmdResult<u32> {
Ok(feat::test_delay(url).await.unwrap_or(10000u32)) let result = match feat::test_delay(url).await {
Ok(delay) => delay,
Err(e) => {
logging!(error, Type::Cmd, "{}", e);
10000u32
}
};
Ok(result)
} }
/// 保存DNS配置到单独文件 /// 保存DNS配置到单独文件
#[tauri::command] #[tauri::command]
pub async fn save_dns_config(dns_config: Mapping) -> CmdResult { pub async fn save_dns_config(dns_config: Mapping) -> CmdResult {
use crate::utils::dirs; use crate::utils::dirs;
use serde_yaml; use serde_yaml_ng;
use std::fs; use tokio::fs;
// 获取DNS配置文件路径 // 获取DNS配置文件路径
let dns_path = dirs::app_home_dir() let dns_path = dirs::app_home_dir()
.map_err(|e| e.to_string())? .stringify_err()?
.join("dns_config.yaml"); .join(constants::files::DNS_CONFIG);
// 保存DNS配置到文件 // 保存DNS配置到文件
let yaml_str = serde_yaml::to_string(&dns_config).map_err(|e| e.to_string())?; let yaml_str = serde_yaml_ng::to_string(&dns_config).stringify_err()?;
fs::write(&dns_path, yaml_str).map_err(|e| e.to_string())?; fs::write(&dns_path, yaml_str).await.stringify_err()?;
log::info!(target: "app", "DNS config saved to {dns_path:?}"); logging!(info, Type::Config, "DNS config saved to {dns_path:?}");
Ok(()) Ok(())
} }
/// 应用或撤销DNS配置 /// 应用或撤销DNS配置
#[tauri::command] #[tauri::command]
pub fn apply_dns_config(apply: bool) -> CmdResult { pub async fn apply_dns_config(apply: bool) -> CmdResult {
use crate::{ use crate::{
config::Config, config::Config,
core::{handle, CoreManager}, core::{CoreManager, handle},
utils::dirs, utils::dirs,
}; };
// 使用spawn来处理异步操作 if apply {
AsyncHandler::spawn(move || async move { // 读取DNS配置文件
if apply { let dns_path = dirs::app_home_dir()
// 读取DNS配置文件 .stringify_err()?
let dns_path = match dirs::app_home_dir() { .join(constants::files::DNS_CONFIG);
Ok(path) => path.join("dns_config.yaml"),
Err(e) => {
log::error!(target: "app", "Failed to get home dir: {e}");
return;
}
};
if !dns_path.exists() { if !dns_path.exists() {
log::warn!(target: "app", "DNS config file not found"); logging!(warn, Type::Config, "DNS config file not found");
return; return Err("DNS config file not found".into());
}
let dns_yaml = match std::fs::read_to_string(&dns_path) {
Ok(content) => content,
Err(e) => {
log::error!(target: "app", "Failed to read DNS config: {e}");
return;
}
};
// 解析DNS配置并创建patch
let patch_config = match serde_yaml::from_str::<serde_yaml::Mapping>(&dns_yaml) {
Ok(config) => {
let mut patch = serde_yaml::Mapping::new();
patch.insert("dns".into(), config.into());
patch
}
Err(e) => {
log::error!(target: "app", "Failed to parse DNS config: {e}");
return;
}
};
log::info!(target: "app", "Applying DNS config from file");
// 重新生成配置确保DNS配置被正确应用
// 这里不调用patch_clash以避免将DNS配置写入config.yaml
Config::runtime()
.latest()
.patch_config(patch_config.clone());
// 首先重新生成配置
if let Err(err) = Config::generate().await {
log::error!(target: "app", "Failed to regenerate config with DNS: {err}");
return;
}
// 然后应用新配置
if let Err(err) = CoreManager::global().update_config().await {
log::error!(target: "app", "Failed to apply config with DNS: {err}");
} else {
log::info!(target: "app", "DNS config successfully applied");
handle::Handle::refresh_clash();
}
} else {
// 当关闭DNS设置时不需要对配置进行任何修改
// 直接重新生成配置让enhance函数自动跳过DNS配置的加载
log::info!(target: "app", "DNS settings disabled, regenerating config");
// 重新生成配置
if let Err(err) = Config::generate().await {
log::error!(target: "app", "Failed to regenerate config: {err}");
return;
}
// 应用新配置
match CoreManager::global().update_config().await {
Ok(_) => {
log::info!(target: "app", "Config regenerated successfully");
handle::Handle::refresh_clash();
}
Err(err) => {
log::error!(target: "app", "Failed to apply regenerated config: {err}");
}
}
} }
});
let dns_yaml = fs::read_to_string(&dns_path).await.stringify_err_log(|e| {
logging!(error, Type::Config, "Failed to read DNS config: {e}");
})?;
// 解析DNS配置
let patch_config = serde_yaml_ng::from_str::<serde_yaml_ng::Mapping>(&dns_yaml)
.stringify_err_log(|e| {
logging!(error, Type::Config, "Failed to parse DNS config: {e}");
})?;
logging!(info, Type::Config, "Applying DNS config from file");
// 创建包含DNS配置的patch
let mut patch = serde_yaml_ng::Mapping::new();
patch.insert("dns".into(), patch_config.into());
// 应用DNS配置到运行时配置
Config::runtime().await.draft_mut().patch_config(patch);
// 重新生成配置
Config::generate().await.stringify_err_log(|err| {
let err = format!("Failed to regenerate config with DNS: {err}");
logging!(error, Type::Config, "{err}");
})?;
// 应用新配置
CoreManager::global()
.update_config()
.await
.stringify_err_log(|err| {
let err = format!("Failed to apply config with DNS: {err}");
logging!(error, Type::Config, "{err}");
})?;
logging!(info, Type::Config, "DNS config successfully applied");
handle::Handle::refresh_clash();
} else {
// 当关闭DNS设置时重新生成配置不加载DNS配置文件
logging!(
info,
Type::Config,
"DNS settings disabled, regenerating config"
);
Config::generate().await.stringify_err_log(|err| {
let err = format!("Failed to regenerate config: {err}");
logging!(error, Type::Config, "{err}");
})?;
CoreManager::global()
.update_config()
.await
.stringify_err_log(|err| {
let err = format!("Failed to apply regenerated config: {err}");
logging!(error, Type::Config, "{err}");
})?;
logging!(info, Type::Config, "Config regenerated successfully");
handle::Handle::refresh_clash();
}
Ok(()) Ok(())
} }
@@ -222,8 +228,8 @@ pub fn check_dns_config_exists() -> CmdResult<bool> {
use crate::utils::dirs; use crate::utils::dirs;
let dns_path = dirs::app_home_dir() let dns_path = dirs::app_home_dir()
.map_err(|e| e.to_string())? .stringify_err()?
.join("dns_config.yaml"); .join(constants::files::DNS_CONFIG);
Ok(dns_path.exists()) Ok(dns_path.exists())
} }
@@ -232,38 +238,41 @@ pub fn check_dns_config_exists() -> CmdResult<bool> {
#[tauri::command] #[tauri::command]
pub async fn get_dns_config_content() -> CmdResult<String> { pub async fn get_dns_config_content() -> CmdResult<String> {
use crate::utils::dirs; use crate::utils::dirs;
use std::fs; use tokio::fs;
let dns_path = dirs::app_home_dir() let dns_path = dirs::app_home_dir()
.map_err(|e| e.to_string())? .stringify_err()?
.join("dns_config.yaml"); .join(constants::files::DNS_CONFIG);
if !dns_path.exists() { if !fs::try_exists(&dns_path).await.stringify_err()? {
return Err("DNS config file not found".into()); return Err("DNS config file not found".into());
} }
let content = fs::read_to_string(&dns_path).map_err(|e| e.to_string())?; let content = fs::read_to_string(&dns_path).await.stringify_err()?.into();
Ok(content) Ok(content)
} }
/// 验证DNS配置文件 /// 验证DNS配置文件
#[tauri::command] #[tauri::command]
pub async fn validate_dns_config() -> CmdResult<(bool, String)> { pub async fn validate_dns_config() -> CmdResult<(bool, String)> {
use crate::{core::CoreManager, utils::dirs}; let app_dir = dirs::app_home_dir().stringify_err()?;
let dns_path = app_dir.join(constants::files::DNS_CONFIG);
let app_dir = dirs::app_home_dir().map_err(|e| e.to_string())?;
let dns_path = app_dir.join("dns_config.yaml");
let dns_path_str = dns_path.to_str().unwrap_or_default(); let dns_path_str = dns_path.to_str().unwrap_or_default();
if !dns_path.exists() { if !dns_path.exists() {
return Ok((false, "DNS config file not found".to_string())); return Ok((false, "DNS config file not found".into()));
} }
match CoreManager::global() CoreConfigValidator::validate_config_file(dns_path_str, None)
.validate_config_file(dns_path_str, None)
.await .await
{ .stringify_err()
Ok(result) => Ok(result), }
Err(e) => Err(e.to_string()),
} #[tauri::command]
pub async fn get_clash_logs() -> CmdResult<Vec<CompactString>> {
let logs = CoreManager::global()
.get_clash_logs()
.await
.unwrap_or_default();
Ok(logs)
} }

View File

@@ -4,12 +4,12 @@ use super::CmdResult;
#[tauri::command] #[tauri::command]
pub async fn entry_lightweight_mode() -> CmdResult { pub async fn entry_lightweight_mode() -> CmdResult {
lightweight::entry_lightweight_mode(); lightweight::entry_lightweight_mode().await;
Ok(()) Ok(())
} }
#[tauri::command] #[tauri::command]
pub async fn exit_lightweight_mode() -> CmdResult { pub async fn exit_lightweight_mode() -> CmdResult {
lightweight::exit_lightweight_mode(); lightweight::exit_lightweight_mode().await;
Ok(()) Ok(())
} }

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,124 @@
use std::sync::Arc;
use regex::Regex;
use reqwest::{Client, cookie::Jar};
use crate::{logging, utils::logging::Type};
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
pub(super) async fn check_bahamut_anime(client: &Client) -> UnlockItem {
let cookie_store = Arc::new(Jar::default());
let client_with_cookies = match Client::builder()
.user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36")
.cookie_provider(Arc::clone(&cookie_store))
.build() {
Ok(client) => client,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to create client with cookies for Bahamut Anime: {}",
e
);
client.clone()
}
};
let device_url = "https://ani.gamer.com.tw/ajax/getdeviceid.php";
let device_id = match client_with_cookies.get(device_url).send().await {
Ok(response) => match response.text().await {
Ok(text) => match Regex::new(r#""deviceid"\s*:\s*"([^"]+)"#) {
Ok(re) => re
.captures(&text)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
.unwrap_or_default(),
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile deviceid regex for Bahamut Anime: {}",
e
);
String::new()
}
},
Err(_) => String::new(),
},
Err(_) => String::new(),
};
if device_id.is_empty() {
return UnlockItem {
name: "Bahamut Anime".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let url =
format!("https://ani.gamer.com.tw/ajax/token.php?adID=89422&sn=37783&device={device_id}");
let token_result = match client_with_cookies.get(&url).send().await {
Ok(response) => match response.text().await {
Ok(body) => {
if body.contains("animeSn") {
Some(body)
} else {
None
}
}
Err(_) => None,
},
Err(_) => None,
};
if token_result.is_none() {
return UnlockItem {
name: "Bahamut Anime".to_string(),
status: "No".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let region = match client_with_cookies
.get("https://ani.gamer.com.tw/")
.send()
.await
{
Ok(response) => match response.text().await {
Ok(body) => match Regex::new(r#"data-geo="([^"]+)"#) {
Ok(region_re) => region_re
.captures(&body)
.and_then(|caps| caps.get(1))
.map(|m| {
let country_code = m.as_str();
let emoji = country_code_to_emoji(country_code);
format!("{emoji}{country_code}")
}),
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile region regex for Bahamut Anime: {}",
e
);
None
}
},
Err(_) => None,
},
Err(_) => None,
};
UnlockItem {
name: "Bahamut Anime".to_string(),
status: "Yes".to_string(),
region,
check_time: Some(get_local_date_string()),
}
}

View File

@@ -0,0 +1,91 @@
use reqwest::Client;
use serde_json::Value;
use super::UnlockItem;
use super::utils::get_local_date_string;
pub(super) async fn check_bilibili_china_mainland(client: &Client) -> UnlockItem {
let url = "https://api.bilibili.com/pgc/player/web/playurl?avid=82846771&qn=0&type=&otype=json&ep_id=307247&fourk=1&fnver=0&fnval=16&module=bangumi";
match client.get(url).send().await {
Ok(response) => match response.json::<Value>().await {
Ok(body) => {
let status = body
.get("code")
.and_then(|v| v.as_i64())
.map(|code| {
if code == 0 {
"Yes"
} else if code == -10403 {
"No"
} else {
"Failed"
}
})
.unwrap_or("Failed");
UnlockItem {
name: "哔哩哔哩大陆".to_string(),
status: status.to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
Err(_) => UnlockItem {
name: "哔哩哔哩大陆".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
},
Err(_) => UnlockItem {
name: "哔哩哔哩大陆".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
}
}
pub(super) async fn check_bilibili_hk_mc_tw(client: &Client) -> UnlockItem {
let url = "https://api.bilibili.com/pgc/player/web/playurl?avid=18281381&cid=29892777&qn=0&type=&otype=json&ep_id=183799&fourk=1&fnver=0&fnval=16&module=bangumi";
match client.get(url).send().await {
Ok(response) => match response.json::<Value>().await {
Ok(body) => {
let status = body
.get("code")
.and_then(|v| v.as_i64())
.map(|code| {
if code == 0 {
"Yes"
} else if code == -10403 {
"No"
} else {
"Failed"
}
})
.unwrap_or("Failed");
UnlockItem {
name: "哔哩哔哩港澳台".to_string(),
status: status.to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
Err(_) => UnlockItem {
name: "哔哩哔哩港澳台".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
},
Err(_) => UnlockItem {
name: "哔哩哔哩港澳台".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
}
}

View File

@@ -0,0 +1,94 @@
use std::collections::HashMap;
use reqwest::Client;
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
pub(super) async fn check_chatgpt_combined(client: &Client) -> Vec<UnlockItem> {
let mut results = Vec::new();
let url_country = "https://chat.openai.com/cdn-cgi/trace";
let result_country = client.get(url_country).send().await;
let region = match result_country {
Ok(response) => {
if let Ok(body) = response.text().await {
let mut map = HashMap::new();
for line in body.lines() {
if let Some(index) = line.find('=') {
let key = &line[..index];
let value = &line[index + 1..];
map.insert(key.to_string(), value.to_string());
}
}
map.get("loc").map(|loc| {
let emoji = country_code_to_emoji(loc);
format!("{emoji}{loc}")
})
} else {
None
}
}
Err(_) => None,
};
let url_ios = "https://ios.chat.openai.com/";
let result_ios = client.get(url_ios).send().await;
let ios_status = match result_ios {
Ok(response) => {
if let Ok(body) = response.text().await {
let body_lower = body.to_lowercase();
if body_lower.contains("you may be connected to a disallowed isp") {
"Disallowed ISP"
} else if body_lower.contains("request is not allowed. please try again later.") {
"Yes"
} else if body_lower.contains("sorry, you have been blocked") {
"Blocked"
} else {
"Failed"
}
} else {
"Failed"
}
}
Err(_) => "Failed",
};
let url_web = "https://api.openai.com/compliance/cookie_requirements";
let result_web = client.get(url_web).send().await;
let web_status = match result_web {
Ok(response) => {
if let Ok(body) = response.text().await {
let body_lower = body.to_lowercase();
if body_lower.contains("unsupported_country") {
"Unsupported Country/Region"
} else {
"Yes"
}
} else {
"Failed"
}
}
Err(_) => "Failed",
};
results.push(UnlockItem {
name: "ChatGPT iOS".to_string(),
status: ios_status.to_string(),
region: region.clone(),
check_time: Some(get_local_date_string()),
});
results.push(UnlockItem {
name: "ChatGPT Web".to_string(),
status: web_status.to_string(),
region,
check_time: Some(get_local_date_string()),
});
results
}

View File

@@ -0,0 +1,60 @@
use reqwest::Client;
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
const BLOCKED_CODES: [&str; 10] = ["AF", "BY", "CN", "CU", "HK", "IR", "KP", "MO", "RU", "SY"];
pub(super) async fn check_claude(client: &Client) -> UnlockItem {
let url = "https://claude.ai/cdn-cgi/trace";
match client.get(url).send().await {
Ok(response) => match response.text().await {
Ok(body) => {
let mut country_code: Option<String> = None;
for line in body.lines() {
if let Some(rest) = line.strip_prefix("loc=") {
country_code = Some(rest.trim().to_uppercase());
break;
}
}
if let Some(code) = country_code {
let emoji = country_code_to_emoji(&code);
let status = if BLOCKED_CODES.contains(&code.as_str()) {
"No"
} else {
"Yes"
};
UnlockItem {
name: "Claude".to_string(),
status: status.to_string(),
region: Some(format!("{emoji}{code}")),
check_time: Some(get_local_date_string()),
}
} else {
UnlockItem {
name: "Claude".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
}
Err(_) => UnlockItem {
name: "Claude".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
},
Err(_) => UnlockItem {
name: "Claude".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
}
}

View File

@@ -0,0 +1,490 @@
use regex::Regex;
use reqwest::Client;
use crate::{logging, utils::logging::Type};
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
#[allow(clippy::cognitive_complexity)]
pub(super) async fn check_disney_plus(client: &Client) -> UnlockItem {
let device_api_url = "https://disney.api.edge.bamgrid.com/devices";
let auth_header =
"Bearer ZGlzbmV5JmJyb3dzZXImMS4wLjA.Cu56AgSfBTDag5NiRA81oLHkDZfu5L3CKadnefEAY84";
let device_req_body = serde_json::json!({
"deviceFamily": "browser",
"applicationRuntime": "chrome",
"deviceProfile": "windows",
"attributes": {}
});
let device_result = client
.post(device_api_url)
.header("authorization", auth_header)
.header("content-type", "application/json; charset=UTF-8")
.json(&device_req_body)
.send()
.await;
if device_result.is_err() {
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Network Connection)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let device_response = match device_result {
Ok(response) => response,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to get Disney+ device response: {}",
e
);
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Network Connection)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
if device_response.status().as_u16() == 403 {
return UnlockItem {
name: "Disney+".to_string(),
status: "No (IP Banned By Disney+)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let device_body = match device_response.text().await {
Ok(body) => body,
Err(_) => {
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Error: Cannot read response)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let re = match Regex::new(r#""assertion"\s*:\s*"([^"]+)"#) {
Ok(re) => re,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile assertion regex for Disney+: {}",
e
);
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Regex Error)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let assertion = match re.captures(&device_body) {
Some(caps) => caps.get(1).map(|m| m.as_str().to_string()),
None => None,
};
if assertion.is_none() {
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Error: Cannot extract assertion)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let token_url = "https://disney.api.edge.bamgrid.com/token";
let assertion_str = match assertion {
Some(assertion) => assertion,
None => {
logging!(error, Type::Network, "No assertion found for Disney+");
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (No Assertion)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let token_body = [
(
"grant_type",
"urn:ietf:params:oauth:grant-type:token-exchange",
),
("latitude", "0"),
("longitude", "0"),
("platform", "browser"),
("subject_token", assertion_str.as_str()),
(
"subject_token_type",
"urn:bamtech:params:oauth:token-type:device",
),
];
let token_result = client
.post(token_url)
.header("authorization", auth_header)
.header("content-type", "application/x-www-form-urlencoded")
.form(&token_body)
.send()
.await;
if token_result.is_err() {
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Network Connection)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let token_response = match token_result {
Ok(response) => response,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to get Disney+ token response: {}",
e
);
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Network Connection)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let token_status = token_response.status();
let token_body_text = match token_response.text().await {
Ok(body) => body,
Err(_) => {
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Error: Cannot read token response)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
if token_body_text.contains("forbidden-location") || token_body_text.contains("403 ERROR") {
return UnlockItem {
name: "Disney+".to_string(),
status: "No (IP Banned By Disney+)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let token_json: Result<serde_json::Value, _> = serde_json::from_str(&token_body_text);
let refresh_token = match token_json {
Ok(json) => json
.get("refresh_token")
.and_then(|v| v.as_str())
.map(|s| s.to_string()),
Err(_) => match Regex::new(r#""refresh_token"\s*:\s*"([^"]+)"#) {
Ok(refresh_token_re) => refresh_token_re
.captures(&token_body_text)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string())),
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile refresh_token regex for Disney+: {}",
e
);
None
}
},
};
if refresh_token.is_none() {
return UnlockItem {
name: "Disney+".to_string(),
status: format!(
"Failed (Error: Cannot extract refresh token, status: {}, response: {})",
token_status.as_u16(),
token_body_text.chars().take(100).collect::<String>() + "..."
),
region: None,
check_time: Some(get_local_date_string()),
};
}
let graphql_url = "https://disney.api.edge.bamgrid.com/graph/v1/device/graphql";
let graphql_payload = format!(
r#"{{"query":"mutation refreshToken($input: RefreshTokenInput!) {{ refreshToken(refreshToken: $input) {{ activeSession {{ sessionId }} }} }}","variables":{{"input":{{"refreshToken":"{}"}}}}}}"#,
refresh_token.unwrap_or_default()
);
let graphql_result = client
.post(graphql_url)
.header("authorization", auth_header)
.header("content-type", "application/json")
.body(graphql_payload)
.send()
.await;
if graphql_result.is_err() {
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Network Connection)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let preview_check = client.get("https://disneyplus.com").send().await;
let is_unavailable = match preview_check {
Ok(response) => {
let url = response.url().to_string();
url.contains("preview") || url.contains("unavailable")
}
Err(_) => true,
};
let graphql_response = match graphql_result {
Ok(response) => response,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to get Disney+ GraphQL response: {}",
e
);
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Network Connection)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let graphql_status = graphql_response.status();
let graphql_body_text = match graphql_response.text().await {
Ok(text) => text,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to read Disney+ GraphQL response text: {}",
e
);
String::new()
}
};
if graphql_body_text.is_empty() || graphql_status.as_u16() >= 400 {
let region_from_main = match client.get("https://www.disneyplus.com/").send().await {
Ok(response) => match response.text().await {
Ok(body) => match Regex::new(r#"region"\s*:\s*"([^"]+)"#) {
Ok(region_re) => region_re
.captures(&body)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string())),
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile Disney+ main page region regex: {}",
e
);
None
}
},
Err(_) => None,
},
Err(_) => None,
};
if let Some(region) = region_from_main {
let emoji = country_code_to_emoji(&region);
return UnlockItem {
name: "Disney+".to_string(),
status: "Yes".to_string(),
region: Some(format!("{emoji}{region} (from main page)")),
check_time: Some(get_local_date_string()),
};
}
if graphql_body_text.is_empty() {
return UnlockItem {
name: "Disney+".to_string(),
status: format!(
"Failed (GraphQL error: empty response, status: {})",
graphql_status.as_u16()
),
region: None,
check_time: Some(get_local_date_string()),
};
}
return UnlockItem {
name: "Disney+".to_string(),
status: format!(
"Failed (GraphQL error: {}, status: {})",
graphql_body_text.chars().take(50).collect::<String>() + "...",
graphql_status.as_u16()
),
region: None,
check_time: Some(get_local_date_string()),
};
}
let region_re = match Regex::new(r#""countryCode"\s*:\s*"([^"]+)"#) {
Ok(re) => re,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile Disney+ countryCode regex: {}",
e
);
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Regex Error)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let region_code = region_re
.captures(&graphql_body_text)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()));
let supported_re = match Regex::new(r#""inSupportedLocation"\s*:\s*(false|true)"#) {
Ok(re) => re,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile Disney+ supported location regex: {}",
e
);
return UnlockItem {
name: "Disney+".to_string(),
status: "Failed (Regex Error)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let in_supported_location = supported_re
.captures(&graphql_body_text)
.and_then(|caps| caps.get(1).map(|m| m.as_str() == "true"));
if region_code.is_none() {
let region_from_main = match client.get("https://www.disneyplus.com/").send().await {
Ok(response) => match response.text().await {
Ok(body) => match Regex::new(r#"region"\s*:\s*"([^"]+)"#) {
Ok(region_re) => region_re
.captures(&body)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string())),
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile Disney+ main page region regex: {}",
e
);
None
}
},
Err(_) => None,
},
Err(_) => None,
};
if let Some(region) = region_from_main {
let emoji = country_code_to_emoji(&region);
return UnlockItem {
name: "Disney+".to_string(),
status: "Yes".to_string(),
region: Some(format!("{emoji}{region} (from main page)")),
check_time: Some(get_local_date_string()),
};
}
return UnlockItem {
name: "Disney+".to_string(),
status: "No".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let region = match region_code {
Some(code) => code,
None => {
logging!(error, Type::Network, "No region code found for Disney+");
return UnlockItem {
name: "Disney+".to_string(),
status: "No".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
if region == "JP" {
let emoji = country_code_to_emoji("JP");
return UnlockItem {
name: "Disney+".to_string(),
status: "Yes".to_string(),
region: Some(format!("{emoji}{region}")),
check_time: Some(get_local_date_string()),
};
}
if is_unavailable {
return UnlockItem {
name: "Disney+".to_string(),
status: "No".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
match in_supported_location {
Some(false) => {
let emoji = country_code_to_emoji(&region);
UnlockItem {
name: "Disney+".to_string(),
status: "Soon".to_string(),
region: Some(format!("{emoji}{region}(即将上线)")),
check_time: Some(get_local_date_string()),
}
}
Some(true) => {
let emoji = country_code_to_emoji(&region);
UnlockItem {
name: "Disney+".to_string(),
status: "Yes".to_string(),
region: Some(format!("{emoji}{region}")),
check_time: Some(get_local_date_string()),
}
}
None => UnlockItem {
name: "Disney+".to_string(),
status: format!("Failed (Error: Unknown region status for {region})"),
region: None,
check_time: Some(get_local_date_string()),
},
}
}

View File

@@ -0,0 +1,66 @@
use regex::Regex;
use reqwest::Client;
use crate::{logging, utils::logging::Type};
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
pub(super) async fn check_gemini(client: &Client) -> UnlockItem {
let url = "https://gemini.google.com";
match client.get(url).send().await {
Ok(response) => {
if let Ok(body) = response.text().await {
let is_ok = body.contains("45631641,null,true");
let status = if is_ok { "Yes" } else { "No" };
let re = match Regex::new(r#",2,1,200,"([A-Z]{3})""#) {
Ok(re) => re,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile Gemini regex: {}",
e
);
return UnlockItem {
name: "Gemini".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let region = re.captures(&body).and_then(|caps| {
caps.get(1).map(|m| {
let country_code = m.as_str();
let emoji = country_code_to_emoji(country_code);
format!("{emoji}{country_code}")
})
});
UnlockItem {
name: "Gemini".to_string(),
status: status.to_string(),
region,
check_time: Some(get_local_date_string()),
}
} else {
UnlockItem {
name: "Gemini".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
}
Err(_) => UnlockItem {
name: "Gemini".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
}
}

View File

@@ -0,0 +1,188 @@
use std::sync::Arc;
use reqwest::Client;
use tauri::command;
use tokio::{sync::Mutex, task::JoinSet};
use crate::{logging, utils::logging::Type};
mod bahamut;
mod bilibili;
mod chatgpt;
mod claude;
mod disney_plus;
mod gemini;
mod netflix;
mod prime_video;
mod spotify;
mod tiktok;
mod types;
mod utils;
mod youtube;
pub use types::UnlockItem;
use bahamut::check_bahamut_anime;
use bilibili::{check_bilibili_china_mainland, check_bilibili_hk_mc_tw};
use chatgpt::check_chatgpt_combined;
use claude::check_claude;
use disney_plus::check_disney_plus;
use gemini::check_gemini;
use netflix::check_netflix;
use prime_video::check_prime_video;
use spotify::check_spotify;
use tiktok::check_tiktok;
use youtube::check_youtube_premium;
#[command]
pub async fn get_unlock_items() -> Result<Vec<UnlockItem>, String> {
Ok(types::default_unlock_items())
}
#[command]
pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
let client = match Client::builder()
.user_agent("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36")
.timeout(std::time::Duration::from_secs(30))
.danger_accept_invalid_certs(true)
.danger_accept_invalid_hostnames(true)
.tcp_keepalive(std::time::Duration::from_secs(60))
.connection_verbose(true)
.build() {
Ok(client) => client,
Err(e) => return Err(format!("创建HTTP客户端失败: {e}")),
};
let results = Arc::new(Mutex::new(Vec::new()));
let mut tasks = JoinSet::new();
let client_arc = Arc::new(client);
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_bilibili_china_mainland(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_bilibili_hk_mc_tw(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let chatgpt_results = check_chatgpt_combined(&client).await;
let mut results = results.lock().await;
results.extend(chatgpt_results);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_claude(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_gemini(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_youtube_premium(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_bahamut_anime(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_netflix(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_disney_plus(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_spotify(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_tiktok(&client).await;
results.lock().await.push(result);
});
}
{
let client = Arc::clone(&client_arc);
let results = Arc::clone(&results);
tasks.spawn(async move {
let result = check_prime_video(&client).await;
results.lock().await.push(result);
});
}
while let Some(res) = tasks.join_next().await {
if let Err(e) = res {
eprintln!("任务执行失败: {e}");
}
}
let results = match Arc::try_unwrap(results) {
Ok(mutex) => mutex.into_inner(),
Err(_) => {
logging!(
error,
Type::Network,
"Failed to unwrap results Arc, references still exist"
);
return Err("Failed to collect results".to_string());
}
};
Ok(results)
}

View File

@@ -0,0 +1,220 @@
use reqwest::Client;
use serde_json::Value;
use crate::{logging, utils::logging::Type};
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
pub(super) async fn check_netflix(client: &Client) -> UnlockItem {
let cdn_result = check_netflix_cdn(client).await;
if cdn_result.status == "Yes" {
return cdn_result;
}
let url1 = "https://www.netflix.com/title/81280792";
let url2 = "https://www.netflix.com/title/70143836";
let result1 = client
.get(url1)
.timeout(std::time::Duration::from_secs(30))
.send()
.await;
if let Err(e) = &result1 {
eprintln!("Netflix请求错误: {e}");
return UnlockItem {
name: "Netflix".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let result2 = client
.get(url2)
.timeout(std::time::Duration::from_secs(30))
.send()
.await;
if let Err(e) = &result2 {
eprintln!("Netflix请求错误: {e}");
return UnlockItem {
name: "Netflix".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let status1 = match result1 {
Ok(response) => response.status().as_u16(),
Err(e) => {
logging!(
error,
Type::Network,
"Failed to get Netflix response 1: {}",
e
);
return UnlockItem {
name: "Netflix".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let status2 = match result2 {
Ok(response) => response.status().as_u16(),
Err(e) => {
logging!(
error,
Type::Network,
"Failed to get Netflix response 2: {}",
e
);
return UnlockItem {
name: "Netflix".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
if status1 == 404 && status2 == 404 {
return UnlockItem {
name: "Netflix".to_string(),
status: "Originals Only".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
if status1 == 403 || status2 == 403 {
return UnlockItem {
name: "Netflix".to_string(),
status: "No".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
if status1 == 200 || status1 == 301 || status2 == 200 || status2 == 301 {
let test_url = "https://www.netflix.com/title/80018499";
match client
.get(test_url)
.timeout(std::time::Duration::from_secs(30))
.send()
.await
{
Ok(response) => {
if let Some(location) = response.headers().get("location")
&& let Ok(location_str) = location.to_str()
{
let parts: Vec<&str> = location_str.split('/').collect();
if parts.len() >= 4 {
let region_code = parts[3].split('-').next().unwrap_or("unknown");
let emoji = country_code_to_emoji(region_code);
return UnlockItem {
name: "Netflix".to_string(),
status: "Yes".to_string(),
region: Some(format!("{emoji}{region_code}")),
check_time: Some(get_local_date_string()),
};
}
}
let emoji = country_code_to_emoji("us");
UnlockItem {
name: "Netflix".to_string(),
status: "Yes".to_string(),
region: Some(format!("{emoji}{}", "us")),
check_time: Some(get_local_date_string()),
}
}
Err(e) => {
eprintln!("获取Netflix区域信息失败: {e}");
UnlockItem {
name: "Netflix".to_string(),
status: "Yes (但无法获取区域)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
}
} else {
UnlockItem {
name: "Netflix".to_string(),
status: format!("Failed (状态码: {status1}_{status2}"),
region: None,
check_time: Some(get_local_date_string()),
}
}
}
async fn check_netflix_cdn(client: &Client) -> UnlockItem {
let url = "https://api.fast.com/netflix/speedtest/v2?https=true&token=YXNkZmFzZGxmbnNkYWZoYXNkZmhrYWxm&urlCount=5";
match client
.get(url)
.timeout(std::time::Duration::from_secs(30))
.send()
.await
{
Ok(response) => {
if response.status().as_u16() == 403 {
return UnlockItem {
name: "Netflix".to_string(),
status: "No (IP Banned By Netflix)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
match response.json::<Value>().await {
Ok(data) => {
if let Some(targets) = data.get("targets").and_then(|t| t.as_array())
&& !targets.is_empty()
&& let Some(location) = targets[0].get("location")
&& let Some(country) = location.get("country").and_then(|c| c.as_str())
{
let emoji = country_code_to_emoji(country);
return UnlockItem {
name: "Netflix".to_string(),
status: "Yes".to_string(),
region: Some(format!("{emoji}{country}")),
check_time: Some(get_local_date_string()),
};
}
UnlockItem {
name: "Netflix".to_string(),
status: "Unknown".to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
Err(e) => {
eprintln!("解析Fast.com API响应失败: {e}");
UnlockItem {
name: "Netflix".to_string(),
status: "Failed (解析错误)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
}
}
Err(e) => {
eprintln!("Fast.com API请求失败: {e}");
UnlockItem {
name: "Netflix".to_string(),
status: "Failed (CDN API)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
}
}

View File

@@ -0,0 +1,108 @@
use regex::Regex;
use reqwest::Client;
use crate::{logging, utils::logging::Type};
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
pub(super) async fn check_prime_video(client: &Client) -> UnlockItem {
let url = "https://www.primevideo.com";
let result = client.get(url).send().await;
if result.is_err() {
return UnlockItem {
name: "Prime Video".to_string(),
status: "Failed (Network Connection)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
let response = match result {
Ok(response) => response,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to get Prime Video response: {}",
e
);
return UnlockItem {
name: "Prime Video".to_string(),
status: "Failed (Network Connection)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
match response.text().await {
Ok(body) => {
let is_blocked = body.contains("isServiceRestricted");
let region_re = match Regex::new(r#""currentTerritory":"([^"]+)""#) {
Ok(re) => re,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile Prime Video region regex: {}",
e
);
return UnlockItem {
name: "Prime Video".to_string(),
status: "Failed (Regex Error)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let region_code = region_re
.captures(&body)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()));
if is_blocked {
return UnlockItem {
name: "Prime Video".to_string(),
status: "No (Service Not Available)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
if let Some(region) = region_code {
let emoji = country_code_to_emoji(&region);
return UnlockItem {
name: "Prime Video".to_string(),
status: "Yes".to_string(),
region: Some(format!("{emoji}{region}")),
check_time: Some(get_local_date_string()),
};
}
if !is_blocked {
return UnlockItem {
name: "Prime Video".to_string(),
status: "Failed (Error: PAGE ERROR)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
UnlockItem {
name: "Prime Video".to_string(),
status: "Failed (Error: Unknown Region)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
Err(_) => UnlockItem {
name: "Prime Video".to_string(),
status: "Failed (Error: Cannot read response)".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
}
}

View File

@@ -0,0 +1,79 @@
use reqwest::{Client, Url};
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
pub(super) async fn check_spotify(client: &Client) -> UnlockItem {
let url = "https://www.spotify.com/api/content/v1/country-selector?platform=web&format=json";
match client.get(url).send().await {
Ok(response) => {
let final_url = response.url().clone();
let status_code = response.status();
let body = response.text().await.unwrap_or_default();
let region = extract_region(&final_url).or_else(|| extract_region_from_body(&body));
let status = determine_status(status_code.as_u16(), &body);
UnlockItem {
name: "Spotify".to_string(),
status: status.to_string(),
region,
check_time: Some(get_local_date_string()),
}
}
Err(_) => UnlockItem {
name: "Spotify".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
}
}
fn determine_status(status: u16, body: &str) -> &'static str {
if status == 403 || status == 451 {
return "No";
}
if !(200..300).contains(&status) {
return "Failed";
}
let body_lower = body.to_lowercase();
if body_lower.contains("not available in your country") {
return "No";
}
"Yes"
}
fn extract_region(url: &Url) -> Option<String> {
let mut segments = url.path_segments()?;
let first_segment = segments.next()?;
if first_segment.is_empty() || first_segment == "api" {
return None;
}
let country_code = first_segment.split('-').next().unwrap_or(first_segment);
let upper = country_code.to_uppercase();
let emoji = country_code_to_emoji(&upper);
Some(format!("{emoji}{upper}"))
}
fn extract_region_from_body(body: &str) -> Option<String> {
let marker = "\"countryCode\":\"";
if let Some(idx) = body.find(marker) {
let start = idx + marker.len();
let rest = &body[start..];
if let Some(end) = rest.find('"') {
let code = rest[..end].to_uppercase();
if !code.is_empty() {
let emoji = country_code_to_emoji(&code);
return Some(format!("{emoji}{code}"));
}
}
}
None
}

View File

@@ -0,0 +1,87 @@
use std::sync::OnceLock;
use regex::Regex;
use reqwest::Client;
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
pub(super) async fn check_tiktok(client: &Client) -> UnlockItem {
let trace_url = "https://www.tiktok.com/cdn-cgi/trace";
let mut status = String::from("Failed");
let mut region = None;
if let Ok(response) = client.get(trace_url).send().await {
let status_code = response.status().as_u16();
if let Ok(body) = response.text().await {
status = determine_status(status_code, &body).to_string();
region = extract_region_from_body(&body);
}
}
if (region.is_none() || status == "Failed")
&& let Ok(response) = client.get("https://www.tiktok.com/").send().await
{
let status_code = response.status().as_u16();
if let Ok(body) = response.text().await {
let fallback_status = determine_status(status_code, &body);
let fallback_region = extract_region_from_body(&body);
if status != "No" {
status = fallback_status.to_string();
}
if region.is_none() {
region = fallback_region;
}
}
}
UnlockItem {
name: "TikTok".to_string(),
status,
region,
check_time: Some(get_local_date_string()),
}
}
fn determine_status(status: u16, body: &str) -> &'static str {
if status == 403 || status == 451 {
return "No";
}
if !(200..300).contains(&status) {
return "Failed";
}
let body_lower = body.to_lowercase();
if body_lower.contains("access denied")
|| body_lower.contains("not available in your region")
|| body_lower.contains("tiktok is not available")
{
return "No";
}
"Yes"
}
fn extract_region_from_body(body: &str) -> Option<String> {
static REGION_REGEX: OnceLock<Option<Regex>> = OnceLock::new();
let regex = REGION_REGEX
.get_or_init(|| Regex::new(r#""region"\s*:\s*"([a-zA-Z-]+)""#).ok())
.as_ref()?;
if let Some(caps) = regex.captures(body)
&& let Some(matched) = caps.get(1)
{
let raw = matched.as_str();
let country_code = raw.split('-').next().unwrap_or(raw).to_uppercase();
if !country_code.is_empty() {
let emoji = country_code_to_emoji(&country_code);
return Some(format!("{emoji}{country_code}"));
}
}
None
}

View File

@@ -0,0 +1,43 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UnlockItem {
pub name: String,
pub status: String,
pub region: Option<String>,
pub check_time: Option<String>,
}
impl UnlockItem {
pub fn pending(name: &str) -> Self {
Self {
name: name.to_string(),
status: "Pending".to_string(),
region: None,
check_time: None,
}
}
}
const DEFAULT_UNLOCK_ITEM_NAMES: [&str; 13] = [
"哔哩哔哩大陆",
"哔哩哔哩港澳台",
"ChatGPT iOS",
"ChatGPT Web",
"Claude",
"Gemini",
"Youtube Premium",
"Bahamut Anime",
"Netflix",
"Disney+",
"Prime Video",
"Spotify",
"TikTok",
];
pub fn default_unlock_items() -> Vec<UnlockItem> {
DEFAULT_UNLOCK_ITEM_NAMES
.iter()
.map(|name| UnlockItem::pending(name))
.collect()
}

View File

@@ -0,0 +1,21 @@
use chrono::Local;
pub fn get_local_date_string() -> String {
let now = Local::now();
now.format("%Y-%m-%d %H:%M:%S").to_string()
}
pub fn country_code_to_emoji(country_code: &str) -> String {
let country_code = country_code.to_uppercase();
if country_code.len() < 2 {
return String::new();
}
let bytes = country_code.as_bytes();
let c1 = 0x1F1E6 + (bytes[0] as u32) - ('A' as u32);
let c2 = 0x1F1E6 + (bytes[1] as u32) - ('A' as u32);
char::from_u32(c1)
.and_then(|c1| char::from_u32(c2).map(|c2| format!("{c1}{c2}")))
.unwrap_or_default()
}

View File

@@ -0,0 +1,82 @@
use regex::Regex;
use reqwest::Client;
use crate::{logging, utils::logging::Type};
use super::UnlockItem;
use super::utils::{country_code_to_emoji, get_local_date_string};
pub(super) async fn check_youtube_premium(client: &Client) -> UnlockItem {
let url = "https://www.youtube.com/premium";
match client.get(url).send().await {
Ok(response) => {
if let Ok(body) = response.text().await {
let body_lower = body.to_lowercase();
if body_lower.contains("youtube premium is not available in your country") {
return UnlockItem {
name: "Youtube Premium".to_string(),
status: "No".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
if body_lower.contains("ad-free") {
let re = match Regex::new(r#"id="country-code"[^>]*>([^<]+)<"#) {
Ok(re) => re,
Err(e) => {
logging!(
error,
Type::Network,
"Failed to compile YouTube Premium regex: {}",
e
);
return UnlockItem {
name: "Youtube Premium".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
};
}
};
let region = re.captures(&body).and_then(|caps| {
caps.get(1).map(|m| {
let country_code = m.as_str().trim();
let emoji = country_code_to_emoji(country_code);
format!("{emoji}{country_code}")
})
});
return UnlockItem {
name: "Youtube Premium".to_string(),
status: "Yes".to_string(),
region,
check_time: Some(get_local_date_string()),
};
}
UnlockItem {
name: "Youtube Premium".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
} else {
UnlockItem {
name: "Youtube Premium".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
}
}
}
Err(_) => UnlockItem {
name: "Youtube Premium".to_string(),
status: "Failed".to_string(),
region: None,
check_time: Some(get_local_date_string()),
},
}
}

View File

@@ -1,10 +1,11 @@
use anyhow::Result; use anyhow::Result;
use smartstring::alias::String;
// Common result type used by command functions
pub type CmdResult<T = ()> = Result<T, String>; pub type CmdResult<T = ()> = Result<T, String>;
// Command modules // Command modules
pub mod app; pub mod app;
pub mod backup;
pub mod clash; pub mod clash;
pub mod lightweight; pub mod lightweight;
pub mod media_unlock_checker; pub mod media_unlock_checker;
@@ -22,6 +23,7 @@ pub mod webdav;
// Re-export all command functions for backwards compatibility // Re-export all command functions for backwards compatibility
pub use app::*; pub use app::*;
pub use backup::*;
pub use clash::*; pub use clash::*;
pub use lightweight::*; pub use lightweight::*;
pub use media_unlock_checker::*; pub use media_unlock_checker::*;
@@ -36,3 +38,27 @@ pub use uwp::*;
pub use validate::*; pub use validate::*;
pub use verge::*; pub use verge::*;
pub use webdav::*; pub use webdav::*;
pub trait StringifyErr<T> {
fn stringify_err(self) -> CmdResult<T>;
fn stringify_err_log<F>(self, log_fn: F) -> CmdResult<T>
where
F: Fn(&str);
}
impl<T, E: std::fmt::Display> StringifyErr<T> for Result<T, E> {
fn stringify_err(self) -> CmdResult<T> {
self.map_err(|e| e.to_string().into())
}
fn stringify_err_log<F>(self, log_fn: F) -> CmdResult<T>
where
F: Fn(&str),
{
self.map_err(|e| {
let msg = String::from(e.to_string());
log_fn(&msg);
msg
})
}
}

View File

@@ -1,13 +1,15 @@
use super::CmdResult; use super::CmdResult;
use crate::core::{async_proxy_query::AsyncProxyQuery, EventDrivenProxyManager}; use crate::cmd::StringifyErr;
use crate::wrap_err; use crate::core::{EventDrivenProxyManager, async_proxy_query::AsyncProxyQuery};
use crate::process::AsyncHandler;
use crate::{logging, utils::logging::Type};
use network_interface::NetworkInterface; use network_interface::NetworkInterface;
use serde_yaml::Mapping; use serde_yaml_ng::Mapping;
/// get the system proxy /// get the system proxy
#[tauri::command] #[tauri::command]
pub async fn get_sys_proxy() -> CmdResult<Mapping> { pub async fn get_sys_proxy() -> CmdResult<Mapping> {
log::debug!(target: "app", "异步获取系统代理配置"); logging!(debug, Type::Network, "异步获取系统代理配置");
let current = AsyncProxyQuery::get_system_proxy().await; let current = AsyncProxyQuery::get_system_proxy().await;
@@ -19,20 +21,27 @@ pub async fn get_sys_proxy() -> CmdResult<Mapping> {
); );
map.insert("bypass".into(), current.bypass.into()); map.insert("bypass".into(), current.bypass.into());
log::debug!(target: "app", "返回系统代理配置: enable={}, {}:{}", current.enable, current.host, current.port); logging!(
debug,
Type::Network,
"返回系统代理配置: enable={}, {}:{}",
current.enable,
current.host,
current.port
);
Ok(map) Ok(map)
} }
/// 获取自动代理配置 /// 获取自动代理配置
#[tauri::command] #[tauri::command]
pub async fn get_auto_proxy() -> CmdResult<Mapping> { pub async fn get_auto_proxy() -> CmdResult<Mapping> {
log::debug!(target: "app", "开始获取自动代理配置(事件驱动)"); logging!(debug, Type::Network, "开始获取自动代理配置(事件驱动)");
let proxy_manager = EventDrivenProxyManager::global(); let proxy_manager = EventDrivenProxyManager::global();
let current = proxy_manager.get_auto_proxy_cached(); let current = proxy_manager.get_auto_proxy_cached().await;
// 异步请求更新,立即返回缓存数据 // 异步请求更新,立即返回缓存数据
tokio::spawn(async move { AsyncHandler::spawn(move || async move {
let _ = proxy_manager.get_auto_proxy_async().await; let _ = proxy_manager.get_auto_proxy_async().await;
}); });
@@ -40,7 +49,13 @@ pub async fn get_auto_proxy() -> CmdResult<Mapping> {
map.insert("enable".into(), current.enable.into()); map.insert("enable".into(), current.enable.into());
map.insert("url".into(), current.url.clone().into()); map.insert("url".into(), current.url.clone().into());
log::debug!(target: "app", "返回自动代理配置(缓存): enable={}, url={}", current.enable, current.url); logging!(
debug,
Type::Network,
"返回自动代理配置(缓存): enable={}, url={}",
current.enable,
current.url
);
Ok(map) Ok(map)
} }
@@ -81,7 +96,7 @@ pub fn get_network_interfaces_info() -> CmdResult<Vec<NetworkInterface>> {
use network_interface::{NetworkInterface, NetworkInterfaceConfig}; use network_interface::{NetworkInterface, NetworkInterfaceConfig};
let names = get_network_interfaces(); let names = get_network_interfaces();
let interfaces = wrap_err!(NetworkInterface::show())?; let interfaces = NetworkInterface::show().stringify_err()?;
let mut result = Vec::new(); let mut result = Vec::new();

File diff suppressed because it is too large Load Diff

View File

@@ -1,99 +1,20 @@
use super::CmdResult; use super::CmdResult;
use crate::{core::handle, module::mihomo::MihomoManager, state::proxy::CmdProxyState}; use crate::{logging, utils::logging::Type};
use std::{
sync::Mutex,
time::{Duration, Instant},
};
use tauri::Manager;
const PROVIDERS_REFRESH_INTERVAL: Duration = Duration::from_secs(3);
const PROXIES_REFRESH_INTERVAL: Duration = Duration::from_secs(1);
// TODO: 前端通过 emit 发送更新事件, tray 监听更新事件
/// 同步托盘和GUI的代理选择状态
#[tauri::command] #[tauri::command]
pub async fn get_proxies() -> CmdResult<serde_json::Value> { pub async fn sync_tray_proxy_selection() -> CmdResult<()> {
let manager = MihomoManager::global(); use crate::core::tray::Tray;
let app_handle = handle::Handle::global().app_handle().unwrap(); match Tray::global().update_menu().await {
let cmd_proxy_state = app_handle.state::<Mutex<CmdProxyState>>(); Ok(_) => {
logging!(info, Type::Cmd, "Tray proxy selection synced successfully");
let should_refresh = { Ok(())
let mut state = cmd_proxy_state.lock().unwrap();
let now = Instant::now();
if now.duration_since(state.last_refresh_time) > PROXIES_REFRESH_INTERVAL {
state.need_refresh = true;
state.last_refresh_time = now;
} }
state.need_refresh Err(e) => {
}; logging!(error, Type::Cmd, "Failed to sync tray proxy selection: {e}");
Err(e.to_string().into())
if should_refresh {
let proxies = manager.get_refresh_proxies().await?;
{
let mut state = cmd_proxy_state.lock().unwrap();
state.proxies = Box::new(proxies);
state.need_refresh = false;
} }
log::debug!(target: "app", "proxies刷新成功");
} }
let proxies = {
let state = cmd_proxy_state.lock().unwrap();
state.proxies.clone()
};
Ok(*proxies)
}
/// 强制刷新代理缓存用于profile切换
#[tauri::command]
pub async fn force_refresh_proxies() -> CmdResult<serde_json::Value> {
let manager = MihomoManager::global();
let app_handle = handle::Handle::global().app_handle().unwrap();
let cmd_proxy_state = app_handle.state::<Mutex<CmdProxyState>>();
log::debug!(target: "app", "强制刷新代理缓存");
let proxies = manager.get_refresh_proxies().await?;
{
let mut state = cmd_proxy_state.lock().unwrap();
state.proxies = Box::new(proxies.clone());
state.need_refresh = false;
state.last_refresh_time = Instant::now();
}
log::debug!(target: "app", "强制刷新代理缓存完成");
Ok(proxies)
}
#[tauri::command]
pub async fn get_providers_proxies() -> CmdResult<serde_json::Value> {
let app_handle = handle::Handle::global().app_handle().unwrap();
let cmd_proxy_state = app_handle.state::<Mutex<CmdProxyState>>();
let should_refresh = {
let mut state = cmd_proxy_state.lock().unwrap();
let now = Instant::now();
if now.duration_since(state.last_refresh_time) > PROVIDERS_REFRESH_INTERVAL {
state.need_refresh = true;
state.last_refresh_time = now;
}
state.need_refresh
};
if should_refresh {
let manager = MihomoManager::global();
let providers = manager.get_providers_proxies().await?;
{
let mut state = cmd_proxy_state.lock().unwrap();
state.providers_proxies = Box::new(providers);
state.need_refresh = false;
}
log::debug!(target: "app", "providers_proxies刷新成功");
}
let providers_proxies = {
let state = cmd_proxy_state.lock().unwrap();
state.providers_proxies.clone()
};
Ok(*providers_proxies)
} }

View File

@@ -1,36 +1,114 @@
use super::CmdResult; use super::CmdResult;
use crate::{config::*, wrap_err}; use crate::{cmd::StringifyErr, config::*, core::CoreManager, log_err};
use anyhow::Context; use anyhow::{Context, anyhow};
use serde_yaml::Mapping; use serde_yaml_ng::Mapping;
use smartstring::alias::String;
use std::collections::HashMap; use std::collections::HashMap;
/// 获取运行时配置 /// 获取运行时配置
#[tauri::command] #[tauri::command]
pub fn get_runtime_config() -> CmdResult<Option<Mapping>> { pub async fn get_runtime_config() -> CmdResult<Option<Mapping>> {
Ok(Config::runtime().latest().config.clone()) Ok(Config::runtime().await.latest_ref().config.clone())
} }
/// 获取运行时YAML配置 /// 获取运行时YAML配置
#[tauri::command] #[tauri::command]
pub fn get_runtime_yaml() -> CmdResult<String> { pub async fn get_runtime_yaml() -> CmdResult<String> {
let runtime = Config::runtime(); let runtime = Config::runtime().await;
let runtime = runtime.latest(); let runtime = runtime.latest_ref();
let config = runtime.config.as_ref(); let config = runtime.config.as_ref();
wrap_err!(config config
.ok_or(anyhow::anyhow!("failed to parse config to yaml file")) .ok_or_else(|| anyhow!("failed to parse config to yaml file"))
.and_then( .and_then(|config| {
|config| serde_yaml::to_string(config).context("failed to convert config to yaml") serde_yaml_ng::to_string(config)
)) .context("failed to convert config to yaml")
.map(|s| s.into())
})
.stringify_err()
} }
/// 获取运行时存在的键 /// 获取运行时存在的键
#[tauri::command] #[tauri::command]
pub fn get_runtime_exists() -> CmdResult<Vec<String>> { pub async fn get_runtime_exists() -> CmdResult<Vec<String>> {
Ok(Config::runtime().latest().exists_keys.clone()) Ok(Config::runtime().await.latest_ref().exists_keys.clone())
} }
/// 获取运行时日志 /// 获取运行时日志
#[tauri::command] #[tauri::command]
pub fn get_runtime_logs() -> CmdResult<HashMap<String, Vec<(String, String)>>> { pub async fn get_runtime_logs() -> CmdResult<HashMap<String, Vec<(String, String)>>> {
Ok(Config::runtime().latest().chain_logs.clone()) Ok(Config::runtime().await.latest_ref().chain_logs.clone())
}
#[tauri::command]
pub async fn get_runtime_proxy_chain_config(proxy_chain_exit_node: String) -> CmdResult<String> {
let runtime = Config::runtime().await;
let runtime = runtime.latest_ref();
let config = runtime
.config
.as_ref()
.ok_or_else(|| anyhow!("failed to parse config to yaml file"))
.stringify_err()?;
if let Some(serde_yaml_ng::Value::Sequence(proxies)) = config.get("proxies") {
let mut proxy_name = Some(Some(proxy_chain_exit_node.as_str()));
let mut proxies_chain = Vec::new();
while let Some(proxy) = proxies.iter().find(|proxy| {
if let serde_yaml_ng::Value::Mapping(proxy_map) = proxy {
proxy_map.get("name").map(|x| x.as_str()) == proxy_name
&& proxy_map.get("dialer-proxy").is_some()
} else {
false
}
}) {
proxies_chain.push(proxy.to_owned());
proxy_name = proxy.get("dialer-proxy").map(|x| x.as_str());
}
if let Some(entry_proxy) = proxies
.iter()
.find(|proxy| proxy.get("name").map(|x| x.as_str()) == proxy_name)
&& !proxies_chain.is_empty()
{
// 添加第一个节点
proxies_chain.push(entry_proxy.to_owned());
}
proxies_chain.reverse();
let mut config: HashMap<String, Vec<serde_yaml_ng::Value>> = HashMap::new();
config.insert("proxies".into(), proxies_chain);
serde_yaml_ng::to_string(&config)
.context("YAML generation failed")
.map(|s| s.into())
.stringify_err()
} else {
Err("failed to get proxies or proxy-groups".into())
}
}
/// 更新运行时链式代理配置
#[tauri::command]
pub async fn update_proxy_chain_config_in_runtime(
proxy_chain_config: Option<serde_yaml_ng::Value>,
) -> CmdResult<()> {
{
let runtime = Config::runtime().await;
let mut draft = runtime.draft_mut();
draft.update_proxy_chain_config(proxy_chain_config);
drop(draft);
runtime.apply();
}
// 生成新的运行配置文件并通知 Clash 核心重新加载
let run_path = Config::generate_file(ConfigType::Run)
.await
.stringify_err()?;
log_err!(CoreManager::global().put_configs_force(run_path).await);
Ok(())
} }

View File

@@ -1,165 +1,171 @@
use super::CmdResult; use super::CmdResult;
use crate::{ use crate::{
cmd::StringifyErr,
config::*, config::*,
core::*, core::{validate::CoreConfigValidator, *},
logging, logging,
utils::{dirs, logging::Type}, utils::{dirs, logging::Type},
wrap_err,
}; };
use std::fs; use smartstring::alias::String;
use tokio::fs;
/// 保存profiles的配置 /// 保存profiles的配置
#[tauri::command] #[tauri::command]
pub async fn save_profile_file(index: String, file_data: Option<String>) -> CmdResult { pub async fn save_profile_file(index: String, file_data: Option<String>) -> CmdResult {
if file_data.is_none() { let file_data = match file_data {
return Ok(()); Some(d) => d,
} None => return Ok(()),
// 在异步操作前完成所有文件操作
let (file_path, original_content, is_merge_file) = {
let profiles = Config::profiles();
let profiles_guard = profiles.latest();
let item = wrap_err!(profiles_guard.get_item(&index))?;
// 确定是否为merge类型文件
let is_merge = item.itype.as_ref().is_some_and(|t| t == "merge");
let content = wrap_err!(item.read_file())?;
let path = item.file.clone().ok_or("file field is null")?;
let profiles_dir = wrap_err!(dirs::app_profiles_dir())?;
(profiles_dir.join(path), content, is_merge)
}; };
// 保存新的配置文件 // 在异步操作前获取必要元数据并释放锁
wrap_err!(fs::write(&file_path, file_data.clone().unwrap()))?; let (rel_path, is_merge_file) = {
let profiles = Config::profiles().await;
let profiles_guard = profiles.latest_ref();
let item = profiles_guard.get_item(&index).stringify_err()?;
let is_merge = item.itype.as_ref().is_some_and(|t| t == "merge");
let path = item.file.clone().ok_or("file field is null")?;
(path, is_merge)
};
// 读取原始内容在释放profiles_guard后进行
let original_content = PrfItem {
file: Some(rel_path.clone()),
..Default::default()
}
.read_file()
.await
.stringify_err()?;
let profiles_dir = dirs::app_profiles_dir().stringify_err()?;
let file_path = profiles_dir.join(rel_path.as_str());
let file_path_str = file_path.to_string_lossy().to_string(); let file_path_str = file_path.to_string_lossy().to_string();
// 保存新的配置文件
fs::write(&file_path, &file_data).await.stringify_err()?;
logging!( logging!(
info, info,
Type::Config, Type::Config,
true,
"[cmd配置save] 开始验证配置文件: {}, 是否为merge文件: {}", "[cmd配置save] 开始验证配置文件: {}, 是否为merge文件: {}",
file_path_str, file_path_str,
is_merge_file is_merge_file
); );
// 对于 merge 文件,只进行语法验证,不进行后续内核验证
if is_merge_file { if is_merge_file {
logging!( return handle_merge_file(&file_path_str, &file_path, &original_content).await;
info, }
Type::Config,
true, handle_full_validation(&file_path_str, &file_path, &original_content).await
"[cmd配置save] 检测到merge文件只进行语法验证" }
);
match CoreManager::global() async fn restore_original(
.validate_config_file(&file_path_str, Some(true)) file_path: &std::path::Path,
.await original_content: &str,
{ ) -> Result<(), String> {
Ok((true, _)) => { fs::write(file_path, original_content).await.stringify_err()
logging!( }
info,
Type::Config, fn is_script_error(err: &str, file_path_str: &str) -> bool {
true, file_path_str.ends_with(".js")
"[cmd配置save] merge文件语法验证通过" || err.contains("Script syntax error")
); || err.contains("Script must contain a main function")
// 成功后尝试更新整体配置 || err.contains("Failed to read script file")
if let Err(e) = CoreManager::global().update_config().await { }
logging!(
warn, async fn handle_merge_file(
Type::Config, file_path_str: &str,
true, file_path: &std::path::Path,
"[cmd配置save] 更新整体配置时发生错误: {}", original_content: &str,
e ) -> CmdResult {
); logging!(
} info,
return Ok(()); Type::Config,
} "[cmd配置save] 检测到merge文件只进行语法验证"
Ok((false, error_msg)) => { );
match CoreConfigValidator::validate_config_file(file_path_str, Some(true)).await {
Ok((true, _)) => {
logging!(info, Type::Config, "[cmd配置save] merge文件语法验证通过");
if let Err(e) = CoreManager::global().update_config().await {
logging!( logging!(
warn, warn,
Type::Config, Type::Config,
true, "[cmd配置save] 更新整体配置时发生错误: {}",
"[cmd配置save] merge文件语法验证失败: {}",
error_msg
);
// 恢复原始配置文件
wrap_err!(fs::write(&file_path, original_content))?;
// 发送合并文件专用错误通知
let result = (false, error_msg.clone());
crate::cmd::validate::handle_yaml_validation_notice(&result, "合并配置文件");
return Ok(());
}
Err(e) => {
logging!(
error,
Type::Config,
true,
"[cmd配置save] 验证过程发生错误: {}",
e e
); );
// 恢复原始配置文件 } else {
wrap_err!(fs::write(&file_path, original_content))?; handle::Handle::refresh_clash();
return Err(e.to_string());
} }
}
}
// 非merge文件使用完整验证流程
match CoreManager::global()
.validate_config_file(&file_path_str, None)
.await
{
Ok((true, _)) => {
logging!(info, Type::Config, true, "[cmd配置save] 验证成功");
Ok(()) Ok(())
} }
Ok((false, error_msg)) => { Ok((false, error_msg)) => {
logging!( logging!(
warn, warn,
Type::Config, Type::Config,
true, "[cmd配置save] merge文件语法验证失败: {}",
"[cmd配置save] 验证失败: {}",
error_msg error_msg
); );
// 恢复原始配置文件 restore_original(file_path, original_content).await?;
wrap_err!(fs::write(&file_path, original_content))?; let result = (false, error_msg.clone());
crate::cmd::validate::handle_yaml_validation_notice(&result, "合并配置文件");
Ok(())
}
Err(e) => {
logging!(error, Type::Config, "[cmd配置save] 验证过程发生错误: {}", e);
restore_original(file_path, original_content).await?;
Err(e.to_string().into())
}
}
}
// 智能判断错误类型 async fn handle_full_validation(
let is_script_error = file_path_str.ends_with(".js") file_path_str: &str,
|| error_msg.contains("Script syntax error") file_path: &std::path::Path,
|| error_msg.contains("Script must contain a main function") original_content: &str,
|| error_msg.contains("Failed to read script file"); ) -> CmdResult {
match CoreConfigValidator::validate_config_file(file_path_str, None).await {
Ok((true, _)) => {
logging!(info, Type::Config, "[cmd配置save] 验证成功");
Ok(())
}
Ok((false, error_msg)) => {
logging!(warn, Type::Config, "[cmd配置save] 验证失败: {}", error_msg);
restore_original(file_path, original_content).await?;
if error_msg.contains("YAML syntax error") if error_msg.contains("YAML syntax error")
|| error_msg.contains("Failed to read file:") || error_msg.contains("Failed to read file:")
|| (!file_path_str.ends_with(".js") && !is_script_error) || (!file_path_str.ends_with(".js") && !is_script_error(&error_msg, file_path_str))
{ {
// 普通YAML错误使用YAML通知处理 logging!(
log::info!(target: "app", "[cmd配置save] YAML配置文件验证失败发送通知"); info,
let result = (false, error_msg.clone()); Type::Config,
"[cmd配置save] YAML配置文件验证失败发送通知"
);
let result = (false, error_msg.to_owned());
crate::cmd::validate::handle_yaml_validation_notice(&result, "YAML配置文件"); crate::cmd::validate::handle_yaml_validation_notice(&result, "YAML配置文件");
} else if is_script_error { } else if is_script_error(&error_msg, file_path_str) {
// 脚本错误使用专门的通知处理 logging!(
log::info!(target: "app", "[cmd配置save] 脚本文件验证失败,发送通知"); info,
let result = (false, error_msg.clone()); Type::Config,
"[cmd配置save] 脚本文件验证失败,发送通知"
);
let result = (false, error_msg.to_owned());
crate::cmd::validate::handle_script_validation_notice(&result, "脚本文件"); crate::cmd::validate::handle_script_validation_notice(&result, "脚本文件");
} else { } else {
// 普通配置错误使用一般通知 logging!(
log::info!(target: "app", "[cmd配置save] 其他类型验证失败,发送一般通知"); info,
handle::Handle::notice_message("config_validate::error", &error_msg); Type::Config,
"[cmd配置save] 其他类型验证失败,发送一般通知"
);
handle::Handle::notice_message("config_validate::error", error_msg.to_owned());
} }
Ok(()) Ok(())
} }
Err(e) => { Err(e) => {
logging!( logging!(error, Type::Config, "[cmd配置save] 验证过程发生错误: {}", e);
error, restore_original(file_path, original_content).await?;
Type::Config, Err(e.to_string().into())
true,
"[cmd配置save] 验证过程发生错误: {}",
e
);
// 恢复原始配置文件
wrap_err!(fs::write(&file_path, original_content))?;
Err(e.to_string())
} }
} }
} }

View File

@@ -1,48 +1,45 @@
use super::CmdResult; use super::{CmdResult, StringifyErr};
use crate::{ use crate::{
core::{service, CoreManager}, core::service::{self, SERVICE_MANAGER, ServiceStatus},
utils::i18n::t, utils::i18n::t,
}; };
use smartstring::SmartString;
async fn execute_service_operation( async fn execute_service_operation_sync(status: ServiceStatus, op_type: &str) -> CmdResult {
service_op: impl std::future::Future<Output = Result<(), impl ToString + std::fmt::Debug>>, if let Err(e) = SERVICE_MANAGER
op_type: &str, .lock()
) -> CmdResult { .await
if service_op.await.is_err() { .handle_service_status(&status)
let emsg = format!("{} {} failed", op_type, "Service"); .await
return Err(t(emsg.as_str())); {
} let emsg = format!("{} Service failed: {}", op_type, e);
if CoreManager::global().restart_core().await.is_err() { return Err(SmartString::from(&*t(emsg.as_str()).await));
let emsg = format!("{} {} failed", "Restart", "Core");
return Err(t(emsg.as_str()));
} }
Ok(()) Ok(())
} }
#[tauri::command] #[tauri::command]
pub async fn install_service() -> CmdResult { pub async fn install_service() -> CmdResult {
execute_service_operation(service::install_service(), "Install").await execute_service_operation_sync(ServiceStatus::InstallRequired, "Install").await
} }
#[tauri::command] #[tauri::command]
pub async fn uninstall_service() -> CmdResult { pub async fn uninstall_service() -> CmdResult {
execute_service_operation(service::uninstall_service(), "Uninstall").await execute_service_operation_sync(ServiceStatus::UninstallRequired, "Uninstall").await
} }
#[tauri::command] #[tauri::command]
pub async fn reinstall_service() -> CmdResult { pub async fn reinstall_service() -> CmdResult {
execute_service_operation(service::reinstall_service(), "Reinstall").await execute_service_operation_sync(ServiceStatus::ReinstallRequired, "Reinstall").await
} }
#[tauri::command] #[tauri::command]
pub async fn repair_service() -> CmdResult { pub async fn repair_service() -> CmdResult {
execute_service_operation(service::force_reinstall_service(), "Repair").await execute_service_operation_sync(ServiceStatus::ForceReinstallRequired, "Repair").await
} }
#[tauri::command] #[tauri::command]
pub async fn is_service_available() -> CmdResult<bool> { pub async fn is_service_available() -> CmdResult<bool> {
service::is_service_available() service::is_service_available().await.stringify_err()?;
.await Ok(true)
.map(|_| true)
.map_err(|e| e.to_string())
} }

View File

@@ -1,7 +1,9 @@
use super::CmdResult; use super::CmdResult;
use crate::{ use crate::{
core::{handle, CoreManager}, core::{CoreManager, handle},
logging,
module::sysinfo::PlatformSpecification, module::sysinfo::PlatformSpecification,
utils::logging::Type,
}; };
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use std::{ use std::{
@@ -23,20 +25,20 @@ static APP_START_TIME: Lazy<AtomicI64> = Lazy::new(|| {
#[tauri::command] #[tauri::command]
pub async fn export_diagnostic_info() -> CmdResult<()> { pub async fn export_diagnostic_info() -> CmdResult<()> {
let sysinfo = PlatformSpecification::new_async().await; let sysinfo = PlatformSpecification::new_sync();
let info = format!("{sysinfo:?}"); let info = format!("{sysinfo:?}");
let app_handle = handle::Handle::global().app_handle().unwrap(); let app_handle = handle::Handle::app_handle();
let cliboard = app_handle.clipboard(); let cliboard = app_handle.clipboard();
if cliboard.write_text(info).is_err() { if cliboard.write_text(info).is_err() {
log::error!(target: "app", "Failed to write to clipboard"); logging!(error, Type::System, "Failed to write to clipboard");
} }
Ok(()) Ok(())
} }
#[tauri::command] #[tauri::command]
pub async fn get_system_info() -> CmdResult<String> { pub async fn get_system_info() -> CmdResult<String> {
let sysinfo = PlatformSpecification::new_async().await; let sysinfo = PlatformSpecification::new_sync();
let info = format!("{sysinfo:?}"); let info = format!("{sysinfo:?}");
Ok(info) Ok(info)
} }
@@ -44,7 +46,7 @@ pub async fn get_system_info() -> CmdResult<String> {
/// 获取当前内核运行模式 /// 获取当前内核运行模式
#[tauri::command] #[tauri::command]
pub async fn get_running_mode() -> Result<String, String> { pub async fn get_running_mode() -> Result<String, String> {
Ok(CoreManager::global().get_running_mode().await.to_string()) Ok(CoreManager::global().get_running_mode().to_string())
} }
/// 获取应用的运行时间(毫秒) /// 获取应用的运行时间(毫秒)

View File

@@ -1,13 +1,14 @@
use super::CmdResult; use crate::cmd::CmdResult;
/// Platform-specific implementation for UWP functionality /// Platform-specific implementation for UWP functionality
#[cfg(windows)] #[cfg(windows)]
mod platform { mod platform {
use super::CmdResult; use crate::cmd::CmdResult;
use crate::{core::win_uwp, wrap_err}; use crate::cmd::StringifyErr;
use crate::core::win_uwp;
pub async fn invoke_uwp_tool() -> CmdResult { pub fn invoke_uwp_tool() -> CmdResult {
wrap_err!(win_uwp::invoke_uwptools().await) win_uwp::invoke_uwptools().stringify_err()
} }
} }
@@ -16,7 +17,7 @@ mod platform {
mod platform { mod platform {
use super::CmdResult; use super::CmdResult;
pub async fn invoke_uwp_tool() -> CmdResult { pub fn invoke_uwp_tool() -> CmdResult {
Ok(()) Ok(())
} }
} }
@@ -24,5 +25,5 @@ mod platform {
/// Command exposed to Tauri /// Command exposed to Tauri
#[tauri::command] #[tauri::command]
pub async fn invoke_uwp_tool() -> CmdResult { pub async fn invoke_uwp_tool() -> CmdResult {
platform::invoke_uwp_tool().await platform::invoke_uwp_tool()
} }

View File

@@ -1,10 +1,15 @@
use super::CmdResult; use super::CmdResult;
use crate::{core::*, logging, utils::logging::Type}; use crate::{
core::{validate::CoreConfigValidator, *},
logging,
utils::logging::Type,
};
use smartstring::alias::String;
/// 发送脚本验证通知消息 /// 发送脚本验证通知消息
#[tauri::command] #[tauri::command]
pub async fn script_validate_notice(status: String, msg: String) -> CmdResult { pub async fn script_validate_notice(status: String, msg: String) -> CmdResult {
handle::Handle::notice_message(&status, &msg); handle::Handle::notice_message(status.as_str(), msg.as_str());
Ok(()) Ok(())
} }
@@ -28,27 +33,17 @@ pub fn handle_script_validation_notice(result: &(bool, String), file_type: &str)
"config_validate::script_error" "config_validate::script_error"
}; };
logging!( logging!(warn, Type::Config, "{} 验证失败: {}", file_type, error_msg);
warn, handle::Handle::notice_message(status, error_msg.to_owned());
Type::Config,
true,
"{} 验证失败: {}",
file_type,
error_msg
);
handle::Handle::notice_message(status, error_msg);
} }
} }
/// 验证指定脚本文件 /// 验证指定脚本文件
#[tauri::command] #[tauri::command]
pub async fn validate_script_file(file_path: String) -> CmdResult<bool> { pub async fn validate_script_file(file_path: String) -> CmdResult<bool> {
logging!(info, Type::Config, true, "验证脚本文件: {}", file_path); logging!(info, Type::Config, "验证脚本文件: {}", file_path);
match CoreManager::global() match CoreConfigValidator::validate_config_file(&file_path, None).await {
.validate_config_file(&file_path, None)
.await
{
Ok(result) => { Ok(result) => {
handle_script_validation_notice(&result, "脚本文件"); handle_script_validation_notice(&result, "脚本文件");
Ok(result.0) // 返回验证结果布尔值 Ok(result.0) // 返回验证结果布尔值
@@ -58,7 +53,6 @@ pub async fn validate_script_file(file_path: String) -> CmdResult<bool> {
logging!( logging!(
error, error,
Type::Config, Type::Config,
true,
"验证脚本文件过程发生错误: {}", "验证脚本文件过程发生错误: {}",
error_msg error_msg
); );
@@ -76,7 +70,6 @@ pub fn handle_yaml_validation_notice(result: &(bool, String), file_type: &str) {
logging!( logging!(
info, info,
Type::Config, Type::Config,
true,
"[通知] 处理{}验证错误: {}", "[通知] 处理{}验证错误: {}",
file_type, file_type,
error_msg error_msg
@@ -117,22 +110,14 @@ pub fn handle_yaml_validation_notice(result: &(bool, String), file_type: &str) {
} }
}; };
logging!( logging!(warn, Type::Config, "{} 验证失败: {}", file_type, error_msg);
warn,
Type::Config,
true,
"{} 验证失败: {}",
file_type,
error_msg
);
logging!( logging!(
info, info,
Type::Config, Type::Config,
true,
"[通知] 发送通知: status={}, msg={}", "[通知] 发送通知: status={}, msg={}",
status, status,
error_msg error_msg
); );
handle::Handle::notice_message(status, error_msg); handle::Handle::notice_message(status, error_msg.to_owned());
} }
} }

View File

@@ -1,16 +1,20 @@
use super::CmdResult; use super::CmdResult;
use crate::{config::*, feat, wrap_err}; use crate::{cmd::StringifyErr, config::*, feat};
/// 获取Verge配置 /// 获取Verge配置
#[tauri::command] #[tauri::command]
pub fn get_verge_config() -> CmdResult<IVergeResponse> { pub async fn get_verge_config() -> CmdResult<IVergeResponse> {
let verge = Config::verge(); let verge = Config::verge().await;
let verge_data = verge.data().clone(); let verge_data = {
Ok(IVergeResponse::from(*verge_data)) let ref_data = verge.latest_ref();
ref_data.clone()
};
let verge_response = IVergeResponse::from(verge_data);
Ok(verge_response)
} }
/// 修改Verge配置 /// 修改Verge配置
#[tauri::command] #[tauri::command]
pub async fn patch_verge_config(payload: IVerge) -> CmdResult { pub async fn patch_verge_config(payload: IVerge) -> CmdResult {
wrap_err!(feat::patch_verge(payload, false).await) feat::patch_verge(&payload, false).await.stringify_err()
} }

View File

@@ -1,6 +1,7 @@
use super::CmdResult; use super::CmdResult;
use crate::{config::*, core, feat, wrap_err}; use crate::{cmd::StringifyErr, config::*, core, feat};
use reqwest_dav::list_cmd::ListFile; use reqwest_dav::list_cmd::ListFile;
use smartstring::alias::String;
/// 保存 WebDAV 配置 /// 保存 WebDAV 配置
#[tauri::command] #[tauri::command]
@@ -11,11 +12,14 @@ pub async fn save_webdav_config(url: String, username: String, password: String)
webdav_password: Some(password), webdav_password: Some(password),
..IVerge::default() ..IVerge::default()
}; };
Config::verge().draft().patch_config(patch.clone()); Config::verge().await.draft_mut().patch_config(&patch);
Config::verge().apply(); Config::verge().await.apply();
Config::verge()
.data() // 分离数据获取和异步调用
let verge_data = Config::verge().await.latest_ref().clone();
verge_data
.save_file() .save_file()
.await
.map_err(|err| err.to_string())?; .map_err(|err| err.to_string())?;
core::backup::WebDavClient::global().reset(); core::backup::WebDavClient::global().reset();
Ok(()) Ok(())
@@ -24,23 +28,25 @@ pub async fn save_webdav_config(url: String, username: String, password: String)
/// 创建 WebDAV 备份并上传 /// 创建 WebDAV 备份并上传
#[tauri::command] #[tauri::command]
pub async fn create_webdav_backup() -> CmdResult<()> { pub async fn create_webdav_backup() -> CmdResult<()> {
wrap_err!(feat::create_backup_and_upload_webdav().await) feat::create_backup_and_upload_webdav()
.await
.stringify_err()
} }
/// 列出 WebDAV 上的备份文件 /// 列出 WebDAV 上的备份文件
#[tauri::command] #[tauri::command]
pub async fn list_webdav_backup() -> CmdResult<Vec<ListFile>> { pub async fn list_webdav_backup() -> CmdResult<Vec<ListFile>> {
wrap_err!(feat::list_wevdav_backup().await) feat::list_wevdav_backup().await.stringify_err()
} }
/// 删除 WebDAV 上的备份文件 /// 删除 WebDAV 上的备份文件
#[tauri::command] #[tauri::command]
pub async fn delete_webdav_backup(filename: String) -> CmdResult<()> { pub async fn delete_webdav_backup(filename: String) -> CmdResult<()> {
wrap_err!(feat::delete_webdav_backup(filename).await) feat::delete_webdav_backup(filename).await.stringify_err()
} }
/// 从 WebDAV 恢复备份文件 /// 从 WebDAV 恢复备份文件
#[tauri::command] #[tauri::command]
pub async fn restore_webdav_backup(filename: String) -> CmdResult<()> { pub async fn restore_webdav_backup(filename: String) -> CmdResult<()> {
wrap_err!(feat::restore_webdav_backup(filename).await) feat::restore_webdav_backup(filename).await.stringify_err()
} }

View File

@@ -1,7 +1,11 @@
use crate::config::Config;
use crate::constants::{network, tun as tun_const};
use crate::utils::dirs::{ipc_path, path_to_str};
use crate::utils::{dirs, help}; use crate::utils::{dirs, help};
use crate::{logging, utils::logging::Type};
use anyhow::Result; use anyhow::Result;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_yaml::{Mapping, Value}; use serde_yaml_ng::{Mapping, Value};
use std::{ use std::{
net::{IpAddr, Ipv4Addr, SocketAddr}, net::{IpAddr, Ipv4Addr, SocketAddr},
str::FromStr, str::FromStr,
@@ -11,25 +15,34 @@ use std::{
pub struct IClashTemp(pub Mapping); pub struct IClashTemp(pub Mapping);
impl IClashTemp { impl IClashTemp {
pub fn new() -> Self { pub async fn new() -> Self {
let template = Self::template(); let template = Self::template();
match dirs::clash_path().and_then(|path| help::read_mapping(&path)) { let clash_path_result = dirs::clash_path();
let map_result = if let Ok(path) = clash_path_result {
help::read_mapping(&path).await
} else {
Err(anyhow::anyhow!("Failed to get clash path"))
};
match map_result {
Ok(mut map) => { Ok(mut map) => {
template.0.keys().for_each(|key| { template.0.keys().for_each(|key| {
if !map.contains_key(key) { if !map.contains_key(key)
map.insert(key.clone(), template.0.get(key).unwrap().clone()); && let Some(value) = template.0.get(key)
{
map.insert(key.clone(), value.clone());
} }
}); });
// 确保 secret 字段存在且不为空 // 确保 secret 字段存在且不为空
if let Some(Value::String(s)) = map.get_mut("secret") { if let Some(Value::String(s)) = map.get_mut("secret")
if s.is_empty() { && s.is_empty()
*s = "set-your-secret".to_string(); {
} *s = "set-your-secret".into();
} }
Self(Self::guard(map)) Self(Self::guard(map))
} }
Err(err) => { Err(err) => {
log::error!(target: "app", "{err}"); logging!(error, Type::Config, "{err}");
template template
} }
} }
@@ -37,26 +50,43 @@ impl IClashTemp {
pub fn template() -> Self { pub fn template() -> Self {
let mut map = Mapping::new(); let mut map = Mapping::new();
let mut tun = Mapping::new(); let mut tun_config = Mapping::new();
let mut cors_map = Mapping::new(); let mut cors_map = Mapping::new();
tun.insert("enable".into(), false.into());
tun.insert("stack".into(), "gvisor".into()); tun_config.insert("enable".into(), false.into());
tun.insert("auto-route".into(), true.into()); tun_config.insert("stack".into(), tun_const::DEFAULT_STACK.into());
tun.insert("strict-route".into(), false.into()); tun_config.insert("auto-route".into(), true.into());
tun.insert("auto-detect-interface".into(), true.into()); tun_config.insert("strict-route".into(), false.into());
tun.insert("dns-hijack".into(), vec!["any:53"].into()); tun_config.insert("auto-detect-interface".into(), true.into());
tun_config.insert("dns-hijack".into(), tun_const::DNS_HIJACK.into());
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
map.insert("redir-port".into(), 7895.into()); map.insert("redir-port".into(), network::ports::DEFAULT_REDIR.into());
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
map.insert("tproxy-port".into(), 7896.into()); map.insert("tproxy-port".into(), network::ports::DEFAULT_TPROXY.into());
map.insert("mixed-port".into(), 7897.into());
map.insert("socks-port".into(), 7898.into()); map.insert("mixed-port".into(), network::ports::DEFAULT_MIXED.into());
map.insert("port".into(), 7899.into()); map.insert("socks-port".into(), network::ports::DEFAULT_SOCKS.into());
map.insert("log-level".into(), "warning".into()); map.insert("port".into(), network::ports::DEFAULT_HTTP.into());
map.insert("log-level".into(), "info".into());
map.insert("allow-lan".into(), false.into()); map.insert("allow-lan".into(), false.into());
map.insert("ipv6".into(), true.into()); map.insert("ipv6".into(), true.into());
map.insert("mode".into(), "rule".into()); map.insert("mode".into(), "rule".into());
map.insert("external-controller".into(), "127.0.0.1:9097".into()); map.insert(
"external-controller".into(),
network::DEFAULT_EXTERNAL_CONTROLLER.into(),
);
#[cfg(unix)]
map.insert(
"external-controller-unix".into(),
Self::guard_external_controller_ipc().into(),
);
#[cfg(windows)]
map.insert(
"external-controller-pipe".into(),
Self::guard_external_controller_ipc().into(),
);
map.insert("tun".into(), tun_config.into());
cors_map.insert("allow-private-network".into(), true.into()); cors_map.insert("allow-private-network".into(), true.into());
cors_map.insert( cors_map.insert(
"allow-origins".into(), "allow-origins".into(),
@@ -73,7 +103,6 @@ impl IClashTemp {
.into(), .into(),
); );
map.insert("secret".into(), "set-your-secret".into()); map.insert("secret".into(), "set-your-secret".into());
map.insert("tun".into(), tun.into());
map.insert("external-controller-cors".into(), cors_map.into()); map.insert("external-controller-cors".into(), cors_map.into());
map.insert("unified-delay".into(), true.into()); map.insert("unified-delay".into(), true.into());
Self(map) Self(map)
@@ -87,7 +116,12 @@ impl IClashTemp {
let mixed_port = Self::guard_mixed_port(&config); let mixed_port = Self::guard_mixed_port(&config);
let socks_port = Self::guard_socks_port(&config); let socks_port = Self::guard_socks_port(&config);
let port = Self::guard_port(&config); let port = Self::guard_port(&config);
let ctrl = Self::guard_server_ctrl(&config); let ctrl = Self::guard_external_controller(&config);
#[cfg(unix)]
let external_controller_unix = Self::guard_external_controller_ipc();
#[cfg(windows)]
let external_controller_pipe = Self::guard_external_controller_ipc();
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
config.insert("redir-port".into(), redir_port.into()); config.insert("redir-port".into(), redir_port.into());
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
@@ -97,25 +131,16 @@ impl IClashTemp {
config.insert("port".into(), port.into()); config.insert("port".into(), port.into());
config.insert("external-controller".into(), ctrl.into()); config.insert("external-controller".into(), ctrl.into());
// 强制覆盖 external-controller-cors 字段,允许本地和 tauri 前端 #[cfg(unix)]
let mut cors_map = Mapping::new(); config.insert(
cors_map.insert("allow-private-network".into(), true.into()); "external-controller-unix".into(),
cors_map.insert( external_controller_unix.into(),
"allow-origins".into(), );
vec![ #[cfg(windows)]
"tauri://localhost", config.insert(
"http://tauri.localhost", "external-controller-pipe".into(),
// Only enable this in dev mode external_controller_pipe.into(),
#[cfg(feature = "verge-dev")]
"http://localhost:3000",
"https://yacd.metacubex.one",
"https://metacubex.github.io",
"https://board.zash.run.place",
]
.into(),
); );
config.insert("external-controller-cors".into(), cors_map.into());
config config
} }
@@ -125,12 +150,13 @@ impl IClashTemp {
} }
} }
pub fn save_config(&self) -> Result<()> { pub async fn save_config(&self) -> Result<()> {
help::save_yaml( help::save_yaml(
&dirs::clash_path()?, &dirs::clash_path()?,
&self.0, &self.0,
Some("# Generated by Clash Verge"), Some("# Generated by Clash Verge"),
) )
.await
} }
pub fn get_mixed_port(&self) -> u16 { pub fn get_mixed_port(&self) -> u16 {
@@ -188,9 +214,9 @@ impl IClashTemp {
Value::Number(val_num) => val_num.as_u64().map(|u| u as u16), Value::Number(val_num) => val_num.as_u64().map(|u| u as u16),
_ => None, _ => None,
}) })
.unwrap_or(7896); .unwrap_or(network::ports::DEFAULT_TPROXY);
if port == 0 { if port == 0 {
port = 7896; port = network::ports::DEFAULT_TPROXY;
} }
port port
} }
@@ -261,7 +287,28 @@ impl IClashTemp {
} }
None => None, None => None,
}) })
.unwrap_or("127.0.0.1:9097".into()) .unwrap_or_else(|| "127.0.0.1:9097".into())
}
pub fn guard_external_controller(config: &Mapping) -> String {
// 在初始化阶段,直接返回配置中的值,不进行额外检查
// 这样可以避免在配置加载期间的循环依赖
Self::guard_server_ctrl(config)
}
pub async fn guard_external_controller_with_setting(config: &Mapping) -> String {
// 检查 enable_external_controller 设置,用于运行时配置生成
let enable_external_controller = Config::verge()
.await
.latest_ref()
.enable_external_controller
.unwrap_or(false);
if enable_external_controller {
Self::guard_server_ctrl(config)
} else {
"".into()
}
} }
pub fn guard_client_ctrl(config: &Mapping) -> String { pub fn guard_client_ctrl(config: &Mapping) -> String {
@@ -276,6 +323,17 @@ impl IClashTemp {
Err(_) => "127.0.0.1:9097".into(), Err(_) => "127.0.0.1:9097".into(),
} }
} }
pub fn guard_external_controller_ipc() -> String {
// 总是使用当前的 IPC 路径,确保配置文件与运行时路径一致
ipc_path()
.ok()
.and_then(|path| path_to_str(&path).ok().map(|s| s.into()))
.unwrap_or_else(|| {
logging!(error, Type::Config, "Failed to get IPC path");
crate::constants::network::DEFAULT_EXTERNAL_CONTROLLER.into()
})
}
} }
#[derive(Default, Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] #[derive(Default, Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]

View File

@@ -1,18 +1,18 @@
use super::{Draft, IClashTemp, IProfiles, IRuntime, IVerge}; use super::{IClashTemp, IProfiles, IRuntime, IVerge};
use crate::{ use crate::{
config::PrfItem, cmd,
core::{handle, CoreManager}, config::{PrfItem, profiles_append_item_safe},
enhance, logging, constants::{files, timing},
process::AsyncHandler, core::{CoreManager, handle, service, tray, validate::CoreConfigValidator},
utils::{dirs, help, logging::Type}, enhance, logging, logging_error,
utils::{Draft, dirs, help, logging::Type},
}; };
use anyhow::{anyhow, Result}; use anyhow::{Result, anyhow};
use once_cell::sync::OnceCell; use backoff::{Error as BackoffError, ExponentialBackoff};
use smartstring::alias::String;
use std::path::PathBuf; use std::path::PathBuf;
use tokio::time::{sleep, Duration}; use tokio::sync::OnceCell;
use tokio::time::sleep;
pub const RUNTIME_CONFIG: &str = "clash-verge.yaml";
pub const CHECK_CONFIG: &str = "clash-verge-check.yaml";
pub struct Config { pub struct Config {
clash_config: Draft<Box<IClashTemp>>, clash_config: Draft<Box<IClashTemp>>,
@@ -22,141 +22,182 @@ pub struct Config {
} }
impl Config { impl Config {
pub fn global() -> &'static Config { pub async fn global() -> &'static Config {
static CONFIG: OnceCell<Config> = OnceCell::new(); static CONFIG: OnceCell<Config> = OnceCell::const_new();
CONFIG
CONFIG.get_or_init(|| Config { .get_or_init(|| async {
clash_config: Draft::from(Box::new(IClashTemp::new())), Config {
verge_config: Draft::from(Box::new(IVerge::new())), clash_config: Draft::from(Box::new(IClashTemp::new().await)),
profiles_config: Draft::from(Box::new(IProfiles::new())), verge_config: Draft::from(Box::new(IVerge::new().await)),
runtime_config: Draft::from(Box::new(IRuntime::new())), profiles_config: Draft::from(Box::new(IProfiles::new().await)),
}) runtime_config: Draft::from(Box::new(IRuntime::new())),
}
})
.await
} }
pub fn clash() -> Draft<Box<IClashTemp>> { pub async fn clash() -> Draft<Box<IClashTemp>> {
Self::global().clash_config.clone() Self::global().await.clash_config.clone()
} }
pub fn verge() -> Draft<Box<IVerge>> { pub async fn verge() -> Draft<Box<IVerge>> {
Self::global().verge_config.clone() Self::global().await.verge_config.clone()
} }
pub fn profiles() -> Draft<Box<IProfiles>> { pub async fn profiles() -> Draft<Box<IProfiles>> {
Self::global().profiles_config.clone() Self::global().await.profiles_config.clone()
} }
pub fn runtime() -> Draft<Box<IRuntime>> { pub async fn runtime() -> Draft<Box<IRuntime>> {
Self::global().runtime_config.clone() Self::global().await.runtime_config.clone()
} }
/// 初始化订阅 /// 初始化订阅
pub async fn init_config() -> Result<()> { pub async fn init_config() -> Result<()> {
if Self::profiles() Self::ensure_default_profile_items().await?;
.data()
.get_item(&"Merge".to_string()) // init Tun mode
.is_err() if !cmd::system::is_admin().unwrap_or_default()
&& service::is_service_available().await.is_err()
{ {
let merge_item = PrfItem::from_merge(Some("Merge".to_string()))?; let verge = Config::verge().await;
Self::profiles().data().append_item(merge_item.clone())?; verge.draft_mut().enable_tun_mode = Some(false);
verge.apply();
let _ = tray::Tray::global().update_tray_display().await;
// 分离数据获取和异步调用避免Send问题
let verge_data = Config::verge().await.latest_ref().clone();
logging_error!(Type::Core, verge_data.save_file().await);
} }
if Self::profiles()
.data() let validation_result = Self::generate_and_validate().await?;
.get_item(&"Script".to_string())
.is_err() if let Some((msg_type, msg_content)) = validation_result {
{ sleep(timing::STARTUP_ERROR_DELAY).await;
let script_item = PrfItem::from_script(Some("Script".to_string()))?; handle::Handle::notice_message(msg_type, msg_content);
Self::profiles().data().append_item(script_item.clone())?;
} }
Ok(())
}
// Ensure "Merge" and "Script" profile items exist, adding them if missing.
async fn ensure_default_profile_items() -> Result<()> {
let profiles = Self::profiles().await;
if profiles.latest_ref().get_item("Merge").is_err() {
let merge_item = &mut PrfItem::from_merge(Some("Merge".into()))?;
profiles_append_item_safe(merge_item).await?;
}
if profiles.latest_ref().get_item("Script").is_err() {
let script_item = &mut PrfItem::from_script(Some("Script".into()))?;
profiles_append_item_safe(script_item).await?;
}
Ok(())
}
async fn generate_and_validate() -> Result<Option<(&'static str, String)>> {
// 生成运行时配置 // 生成运行时配置
if let Err(err) = Self::generate().await { if let Err(err) = Self::generate().await {
logging!(error, Type::Config, true, "生成运行时配置失败: {}", err); logging!(error, Type::Config, "生成运行时配置失败: {}", err);
} else { } else {
logging!(info, Type::Config, true, "生成运行时配置成功"); logging!(info, Type::Config, "生成运行时配置成功");
} }
// 生成运行时配置文件并验证 // 生成运行时配置文件并验证
let config_result = Self::generate_file(ConfigType::Run); let config_result = Self::generate_file(ConfigType::Run).await;
let validation_result = if config_result.is_ok() { if config_result.is_ok() {
// 验证配置文件 // 验证配置文件
logging!(info, Type::Config, true, "开始验证配置"); logging!(info, Type::Config, "开始验证配置");
match CoreManager::global().validate_config().await { match CoreConfigValidator::global().validate_config().await {
Ok((is_valid, error_msg)) => { Ok((is_valid, error_msg)) => {
if !is_valid { if !is_valid {
logging!( logging!(
warn, warn,
Type::Config, Type::Config,
true,
"[首次启动] 配置验证失败,使用默认最小配置启动: {}", "[首次启动] 配置验证失败,使用默认最小配置启动: {}",
error_msg error_msg
); );
CoreManager::global() CoreManager::global()
.use_default_config("config_validate::boot_error", &error_msg) .use_default_config("config_validate::boot_error", &error_msg)
.await?; .await?;
Some(("config_validate::boot_error", error_msg)) Ok(Some(("config_validate::boot_error", error_msg)))
} else { } else {
logging!(info, Type::Config, true, "配置验证成功"); logging!(info, Type::Config, "配置验证成功");
Some(("config_validate::success", String::new())) // 前端没有必要知道验证成功的消息,也没有事件驱动
// Some(("config_validate::success", String::new()))
Ok(None)
} }
} }
Err(err) => { Err(err) => {
logging!(warn, Type::Config, true, "验证程执行失败: {}", err); logging!(warn, Type::Config, "验证程执行失败: {}", err);
CoreManager::global() CoreManager::global()
.use_default_config("config_validate::process_terminated", "") .use_default_config("config_validate::process_terminated", "")
.await?; .await?;
Some(("config_validate::process_terminated", String::new())) Ok(Some(("config_validate::process_terminated", String::new())))
} }
} }
} else { } else {
logging!(warn, Type::Config, true, "生成配置文件失败,使用默认配置"); logging!(warn, Type::Config, "生成配置文件失败,使用默认配置");
CoreManager::global() CoreManager::global()
.use_default_config("config_validate::error", "") .use_default_config("config_validate::error", "")
.await?; .await?;
Some(("config_validate::error", String::new())) Ok(Some(("config_validate::error", String::new())))
};
// 在单独的任务中发送通知
if let Some((msg_type, msg_content)) = validation_result {
AsyncHandler::spawn(move || async move {
sleep(Duration::from_secs(2)).await;
handle::Handle::notice_message(msg_type, &msg_content);
});
} }
Ok(())
} }
/// 将订阅丢到对应的文件中 pub async fn generate_file(typ: ConfigType) -> Result<PathBuf> {
pub fn generate_file(typ: ConfigType) -> Result<PathBuf> {
let path = match typ { let path = match typ {
ConfigType::Run => dirs::app_home_dir()?.join(RUNTIME_CONFIG), ConfigType::Run => dirs::app_home_dir()?.join(files::RUNTIME_CONFIG),
ConfigType::Check => dirs::app_home_dir()?.join(CHECK_CONFIG), ConfigType::Check => dirs::app_home_dir()?.join(files::CHECK_CONFIG),
}; };
let runtime = Config::runtime(); let runtime = Config::runtime().await;
let runtime = runtime.latest();
let config = runtime let config = runtime
.latest_ref()
.config .config
.as_ref() .as_ref()
.ok_or(anyhow!("failed to get runtime config"))?; .ok_or_else(|| anyhow!("failed to get runtime config"))?
.clone();
drop(runtime); // 显式释放锁
help::save_yaml(&path, &config, Some("# Generated by Clash Verge"))?; help::save_yaml(&path, &config, Some("# Generated by Clash Verge")).await?;
Ok(path) Ok(path)
} }
/// 生成订阅存好
pub async fn generate() -> Result<()> { pub async fn generate() -> Result<()> {
let (config, exists_keys, logs) = enhance::enhance().await; let (config, exists_keys, logs) = enhance::enhance().await;
*Config::runtime().draft() = Box::new(IRuntime { **Config::runtime().await.draft_mut() = IRuntime {
config: Some(config), config: Some(config),
exists_keys, exists_keys,
chain_logs: logs, chain_logs: logs,
}); };
Ok(()) Ok(())
} }
pub async fn verify_config_initialization() {
let backoff_strategy = ExponentialBackoff {
initial_interval: std::time::Duration::from_millis(100),
max_interval: std::time::Duration::from_secs(2),
max_elapsed_time: Some(std::time::Duration::from_secs(10)),
multiplier: 2.0,
..Default::default()
};
let operation = || async {
if Config::runtime().await.latest_ref().config.is_some() {
return Ok::<(), BackoffError<anyhow::Error>>(());
}
Config::generate().await.map_err(BackoffError::transient)
};
if let Err(e) = backoff::future::retry(backoff_strategy, operation).await {
logging!(error, Type::Setup, "Config init verification failed: {}", e);
}
}
} }
#[derive(Debug)] #[derive(Debug)]
@@ -170,33 +211,33 @@ mod tests {
use std::mem; use std::mem;
#[test] #[test]
#[allow(unused_variables)]
#[allow(clippy::expect_used)]
fn test_prfitem_from_merge_size() { fn test_prfitem_from_merge_size() {
let merge_item = PrfItem::from_merge(Some("Merge".to_string())).unwrap(); let merge_item =
dbg!(&merge_item); PrfItem::from_merge(Some("Merge".into())).expect("Failed to create merge item in test");
let prfitem_size = mem::size_of_val(&merge_item); let prfitem_size = mem::size_of_val(&merge_item);
dbg!(prfitem_size);
// Boxed version // Boxed version
let boxed_merge_item = Box::new(merge_item); let boxed_merge_item = Box::new(merge_item);
let box_prfitem_size = mem::size_of_val(&boxed_merge_item); let box_prfitem_size = mem::size_of_val(&boxed_merge_item);
dbg!(box_prfitem_size);
// The size of Box<T> is always pointer-sized (usually 8 bytes on 64-bit) // The size of Box<T> is always pointer-sized (usually 8 bytes on 64-bit)
// assert_eq!(box_prfitem_size, mem::size_of::<Box<PrfItem>>()); // assert_eq!(box_prfitem_size, mem::size_of::<Box<PrfItem>>());
assert!(box_prfitem_size < prfitem_size); assert!(box_prfitem_size < prfitem_size);
} }
#[test] #[test]
#[allow(unused_variables)]
fn test_draft_size_non_boxed() { fn test_draft_size_non_boxed() {
let draft = Draft::from(IRuntime::new()); let draft = Draft::from(IRuntime::new());
let iruntime_size = std::mem::size_of_val(&draft); let iruntime_size = std::mem::size_of_val(&draft);
dbg!(iruntime_size);
assert_eq!(iruntime_size, std::mem::size_of::<Draft<IRuntime>>()); assert_eq!(iruntime_size, std::mem::size_of::<Draft<IRuntime>>());
} }
#[test] #[test]
#[allow(unused_variables)]
fn test_draft_size_boxed() { fn test_draft_size_boxed() {
let draft = Draft::from(Box::new(IRuntime::new())); let draft = Draft::from(Box::new(IRuntime::new()));
let box_iruntime_size = std::mem::size_of_val(&draft); let box_iruntime_size = std::mem::size_of_val(&draft);
dbg!(box_iruntime_size);
assert_eq!( assert_eq!(
box_iruntime_size, box_iruntime_size,
std::mem::size_of::<Draft<Box<IRuntime>>>() std::mem::size_of::<Draft<Box<IRuntime>>>()

View File

@@ -1,135 +0,0 @@
use super::{IClashTemp, IProfiles, IRuntime, IVerge};
use parking_lot::{MappedMutexGuard, Mutex, MutexGuard};
use std::sync::Arc;
#[derive(Debug, Clone)]
pub struct Draft<T: Clone + ToOwned> {
inner: Arc<Mutex<(T, Option<T>)>>,
}
macro_rules! draft_define {
($id: ident) => {
impl From<$id> for Draft<$id> {
fn from(data: $id) -> Self {
Draft {
inner: Arc::new(Mutex::new((data, None))),
}
}
}
impl Draft<Box<$id>> {
#[allow(unused)]
pub fn data(&self) -> MappedMutexGuard<Box<$id>> {
MutexGuard::map(self.inner.lock(), |guard| &mut guard.0)
}
pub fn latest(&self) -> MappedMutexGuard<Box<$id>> {
MutexGuard::map(self.inner.lock(), |inner| {
if inner.1.is_none() {
&mut inner.0
} else {
inner.1.as_mut().unwrap()
}
})
}
pub fn draft(&self) -> MappedMutexGuard<Box<$id>> {
MutexGuard::map(self.inner.lock(), |inner| {
if inner.1.is_none() {
inner.1 = Some(inner.0.clone());
}
inner.1.as_mut().unwrap()
})
}
pub fn apply(&self) -> Option<Box<$id>> {
let mut inner = self.inner.lock();
match inner.1.take() {
Some(draft) => {
let old_value = inner.0.to_owned();
inner.0 = draft.to_owned();
Some(old_value)
}
None => None,
}
}
pub fn discard(&self) -> Option<Box<$id>> {
let mut inner = self.inner.lock();
inner.1.take()
}
}
impl From<Box<$id>> for Draft<Box<$id>> {
fn from(data: Box<$id>) -> Self {
Draft {
inner: Arc::new(Mutex::new((data, None))),
}
}
}
};
}
// draft_define!(IClash);
draft_define!(IClashTemp);
draft_define!(IProfiles);
draft_define!(IRuntime);
draft_define!(IVerge);
#[test]
fn test_draft_box() {
let verge = Box::new(IVerge {
enable_auto_launch: Some(true),
enable_tun_mode: Some(false),
..IVerge::default()
});
let draft = Draft::from(verge);
assert_eq!(draft.data().enable_auto_launch, Some(true));
assert_eq!(draft.data().enable_tun_mode, Some(false));
assert_eq!(draft.draft().enable_auto_launch, Some(true));
assert_eq!(draft.draft().enable_tun_mode, Some(false));
{
let mut d = draft.draft();
d.enable_auto_launch = Some(false);
d.enable_tun_mode = Some(true);
}
assert_eq!(draft.data().enable_auto_launch, Some(true));
assert_eq!(draft.data().enable_tun_mode, Some(false));
assert_eq!(draft.draft().enable_auto_launch, Some(false));
assert_eq!(draft.draft().enable_tun_mode, Some(true));
assert_eq!(draft.latest().enable_auto_launch, Some(false));
assert_eq!(draft.latest().enable_tun_mode, Some(true));
assert!(draft.apply().is_some());
assert!(draft.apply().is_none());
assert_eq!(draft.data().enable_auto_launch, Some(false));
assert_eq!(draft.data().enable_tun_mode, Some(true));
assert_eq!(draft.draft().enable_auto_launch, Some(false));
assert_eq!(draft.draft().enable_tun_mode, Some(true));
{
let mut d = draft.draft();
d.enable_auto_launch = Some(true);
}
assert_eq!(draft.data().enable_auto_launch, Some(false));
assert_eq!(draft.draft().enable_auto_launch, Some(true));
assert!(draft.discard().is_some());
assert_eq!(draft.data().enable_auto_launch, Some(false));
assert!(draft.discard().is_none());
assert_eq!(draft.draft().enable_auto_launch, Some(false));
}

View File

@@ -1,14 +1,15 @@
use crate::utils::dirs::get_encryption_key; use crate::utils::dirs::get_encryption_key;
use aes_gcm::{ use aes_gcm::{
aead::{Aead, KeyInit},
Aes256Gcm, Key, Aes256Gcm, Key,
aead::{Aead, KeyInit},
}; };
use base64::{engine::general_purpose::STANDARD, Engine}; use base64::{Engine, engine::general_purpose::STANDARD};
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
const NONCE_LENGTH: usize = 12; const NONCE_LENGTH: usize = 12;
/// Encrypt data /// Encrypt data
#[allow(deprecated)]
pub fn encrypt_data(data: &str) -> Result<String, Box<dyn std::error::Error>> { pub fn encrypt_data(data: &str) -> Result<String, Box<dyn std::error::Error>> {
let encryption_key = get_encryption_key()?; let encryption_key = get_encryption_key()?;
let key = Key::<Aes256Gcm>::from_slice(&encryption_key); let key = Key::<Aes256Gcm>::from_slice(&encryption_key);
@@ -30,6 +31,7 @@ pub fn encrypt_data(data: &str) -> Result<String, Box<dyn std::error::Error>> {
} }
/// Decrypt data /// Decrypt data
#[allow(deprecated)]
pub fn decrypt_data(encrypted: &str) -> Result<String, Box<dyn std::error::Error>> { pub fn decrypt_data(encrypted: &str) -> Result<String, Box<dyn std::error::Error>> {
let encryption_key = get_encryption_key()?; let encryption_key = get_encryption_key()?;
let key = Key::<Aes256Gcm>::from_slice(&encryption_key); let key = Key::<Aes256Gcm>::from_slice(&encryption_key);

View File

@@ -1,16 +1,13 @@
mod clash; mod clash;
#[allow(clippy::module_inception)] #[allow(clippy::module_inception)]
mod config; mod config;
mod draft;
mod encrypt; mod encrypt;
mod prfitem; mod prfitem;
mod profiles; pub mod profiles;
mod runtime; mod runtime;
mod verge; mod verge;
pub use self::{ pub use self::{clash::*, config::*, encrypt::*, prfitem::*, profiles::*, runtime::*, verge::*};
clash::*, config::*, draft::*, encrypt::*, prfitem::*, profiles::*, runtime::*, verge::*,
};
pub const DEFAULT_PAC: &str = r#"function FindProxyForURL(url, host) { pub const DEFAULT_PAC: &str = r#"function FindProxyForURL(url, host) {
return "PROXY 127.0.0.1:%mixed-port%; SOCKS5 127.0.0.1:%mixed-port%; DIRECT;"; return "PROXY 127.0.0.1:%mixed-port%; SOCKS5 127.0.0.1:%mixed-port%; DIRECT;";

View File

@@ -1,15 +1,17 @@
use crate::utils::{ use crate::{
dirs, help, config::profiles,
network::{NetworkManager, ProxyType}, utils::{
tmpl, dirs, help,
network::{NetworkManager, ProxyType},
tmpl,
},
}; };
use anyhow::{bail, Context, Result}; use anyhow::{Context, Result, bail};
use reqwest::StatusCode;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_yaml::Mapping; use serde_yaml_ng::Mapping;
use std::{fs, time::Duration}; use smartstring::alias::String;
use std::time::Duration;
use super::Config; use tokio::fs;
#[derive(Debug, Clone, Deserialize, Serialize, Default)] #[derive(Debug, Clone, Deserialize, Serialize, Default)]
pub struct PrfItem { pub struct PrfItem {
@@ -104,6 +106,10 @@ pub struct PrfOption {
#[serde(skip_serializing_if = "Option::is_none")] #[serde(skip_serializing_if = "Option::is_none")]
pub danger_accept_invalid_certs: Option<bool>, pub danger_accept_invalid_certs: Option<bool>,
#[serde(default = "default_allow_auto_update")]
#[serde(skip_serializing_if = "Option::is_none")]
pub allow_auto_update: Option<bool>,
pub merge: Option<String>, pub merge: Option<String>,
pub script: Option<String>, pub script: Option<String>,
@@ -116,25 +122,29 @@ pub struct PrfOption {
} }
impl PrfOption { impl PrfOption {
pub fn merge(one: Option<Self>, other: Option<Self>) -> Option<Self> { pub fn merge(one: Option<&Self>, other: Option<&Self>) -> Option<Self> {
match (one, other) { match (one, other) {
(Some(mut a), Some(b)) => { (Some(a_ref), Some(b_ref)) => {
a.user_agent = b.user_agent.or(a.user_agent); let mut result = a_ref.clone();
a.with_proxy = b.with_proxy.or(a.with_proxy); result.user_agent = b_ref.user_agent.clone().or(result.user_agent);
a.self_proxy = b.self_proxy.or(a.self_proxy); result.with_proxy = b_ref.with_proxy.or(result.with_proxy);
a.danger_accept_invalid_certs = b result.self_proxy = b_ref.self_proxy.or(result.self_proxy);
result.danger_accept_invalid_certs = b_ref
.danger_accept_invalid_certs .danger_accept_invalid_certs
.or(a.danger_accept_invalid_certs); .or(result.danger_accept_invalid_certs);
a.update_interval = b.update_interval.or(a.update_interval); result.allow_auto_update = b_ref.allow_auto_update.or(result.allow_auto_update);
a.merge = b.merge.or(a.merge); result.update_interval = b_ref.update_interval.or(result.update_interval);
a.script = b.script.or(a.script); result.merge = b_ref.merge.clone().or(result.merge);
a.rules = b.rules.or(a.rules); result.script = b_ref.script.clone().or(result.script);
a.proxies = b.proxies.or(a.proxies); result.rules = b_ref.rules.clone().or(result.rules);
a.groups = b.groups.or(a.groups); result.proxies = b_ref.proxies.clone().or(result.proxies);
a.timeout_seconds = b.timeout_seconds.or(a.timeout_seconds); result.groups = b_ref.groups.clone().or(result.groups);
Some(a) result.timeout_seconds = b_ref.timeout_seconds.or(result.timeout_seconds);
Some(result)
} }
t => t.0.or(t.1), (Some(a_ref), None) => Some(a_ref.clone()),
(None, Some(b_ref)) => Some(b_ref.clone()),
(None, None) => None,
} }
} }
} }
@@ -142,25 +152,31 @@ impl PrfOption {
impl PrfItem { impl PrfItem {
/// From partial item /// From partial item
/// must contain `itype` /// must contain `itype`
pub async fn from(item: PrfItem, file_data: Option<String>) -> Result<PrfItem> { pub async fn from(item: &PrfItem, file_data: Option<String>) -> Result<PrfItem> {
if item.itype.is_none() { if item.itype.is_none() {
bail!("type should not be null"); bail!("type should not be null");
} }
match item.itype.unwrap().as_str() { let itype = item
.itype
.as_ref()
.ok_or_else(|| anyhow::anyhow!("type should not be null"))?;
match itype.as_str() {
"remote" => { "remote" => {
if item.url.is_none() { let url = item
bail!("url should not be null"); .url
} .as_ref()
let url = item.url.as_ref().unwrap().as_str(); .ok_or_else(|| anyhow::anyhow!("url should not be null"))?;
let name = item.name; let name = item.name.as_ref();
let desc = item.desc; let desc = item.desc.as_ref();
PrfItem::from_url(url, name, desc, item.option).await let option = item.option.as_ref();
PrfItem::from_url(url, name, desc, option).await
} }
"local" => { "local" => {
let name = item.name.unwrap_or("Local File".into()); let name = item.name.clone().unwrap_or_else(|| "Local File".into());
let desc = item.desc.unwrap_or("".into()); let desc = item.desc.clone().unwrap_or_else(|| "".into());
PrfItem::from_local(name, desc, file_data, item.option) let option = item.option.as_ref();
PrfItem::from_local(name, desc, file_data, option).await
} }
typ => bail!("invalid profile item type \"{typ}\""), typ => bail!("invalid profile item type \"{typ}\""),
} }
@@ -168,14 +184,14 @@ impl PrfItem {
/// ## Local type /// ## Local type
/// create a new item from name/desc /// create a new item from name/desc
pub fn from_local( pub async fn from_local(
name: String, name: String,
desc: String, desc: String,
file_data: Option<String>, file_data: Option<String>,
option: Option<PrfOption>, option: Option<&PrfOption>,
) -> Result<PrfItem> { ) -> Result<PrfItem> {
let uid = help::get_uid("L"); let uid = help::get_uid("L").into();
let file = format!("{uid}.yaml"); let file = format!("{uid}.yaml").into();
let opt_ref = option.as_ref(); let opt_ref = option.as_ref();
let update_interval = opt_ref.and_then(|o| o.update_interval); let update_interval = opt_ref.and_then(|o| o.update_interval);
let mut merge = opt_ref.and_then(|o| o.merge.clone()); let mut merge = opt_ref.and_then(|o| o.merge.clone());
@@ -185,31 +201,29 @@ impl PrfItem {
let mut groups = opt_ref.and_then(|o| o.groups.clone()); let mut groups = opt_ref.and_then(|o| o.groups.clone());
if merge.is_none() { if merge.is_none() {
let merge_item = PrfItem::from_merge(None)?; let merge_item = &mut PrfItem::from_merge(None)?;
Config::profiles().data().append_item(merge_item.clone())?; profiles::profiles_append_item_safe(merge_item).await?;
merge = merge_item.uid; merge = merge_item.uid.clone();
} }
if script.is_none() { if script.is_none() {
let script_item = PrfItem::from_script(None)?; let script_item = &mut PrfItem::from_script(None)?;
Config::profiles().data().append_item(script_item.clone())?; profiles::profiles_append_item_safe(script_item).await?;
script = script_item.uid; script = script_item.uid.clone();
} }
if rules.is_none() { if rules.is_none() {
let rules_item = PrfItem::from_rules()?; let rules_item = &mut PrfItem::from_rules()?;
Config::profiles().data().append_item(rules_item.clone())?; profiles::profiles_append_item_safe(rules_item).await?;
rules = rules_item.uid; rules = rules_item.uid.clone();
} }
if proxies.is_none() { if proxies.is_none() {
let proxies_item = PrfItem::from_proxies()?; let proxies_item = &mut PrfItem::from_proxies()?;
Config::profiles() profiles::profiles_append_item_safe(proxies_item).await?;
.data() proxies = proxies_item.uid.clone();
.append_item(proxies_item.clone())?;
proxies = proxies_item.uid;
} }
if groups.is_none() { if groups.is_none() {
let groups_item = PrfItem::from_groups()?; let groups_item = &mut PrfItem::from_groups()?;
Config::profiles().data().append_item(groups_item.clone())?; profiles::profiles_append_item_safe(groups_item).await?;
groups = groups_item.uid; groups = groups_item.uid.clone();
} }
Ok(PrfItem { Ok(PrfItem {
uid: Some(uid), uid: Some(uid),
@@ -231,7 +245,7 @@ impl PrfItem {
}), }),
home: None, home: None,
updated: Some(chrono::Local::now().timestamp() as usize), updated: Some(chrono::Local::now().timestamp() as usize),
file_data: Some(file_data.unwrap_or(tmpl::ITEM_LOCAL.into())), file_data: Some(file_data.unwrap_or_else(|| tmpl::ITEM_LOCAL.into())),
}) })
} }
@@ -239,23 +253,23 @@ impl PrfItem {
/// create a new item from url /// create a new item from url
pub async fn from_url( pub async fn from_url(
url: &str, url: &str,
name: Option<String>, name: Option<&String>,
desc: Option<String>, desc: Option<&String>,
option: Option<PrfOption>, option: Option<&PrfOption>,
) -> Result<PrfItem> { ) -> Result<PrfItem> {
let opt_ref = option.as_ref(); let with_proxy = option.is_some_and(|o| o.with_proxy.unwrap_or(false));
let with_proxy = opt_ref.is_some_and(|o| o.with_proxy.unwrap_or(false)); let self_proxy = option.is_some_and(|o| o.self_proxy.unwrap_or(false));
let self_proxy = opt_ref.is_some_and(|o| o.self_proxy.unwrap_or(false));
let accept_invalid_certs = let accept_invalid_certs =
opt_ref.is_some_and(|o| o.danger_accept_invalid_certs.unwrap_or(false)); option.is_some_and(|o| o.danger_accept_invalid_certs.unwrap_or(false));
let user_agent = opt_ref.and_then(|o| o.user_agent.clone()); let allow_auto_update = option.map(|o| o.allow_auto_update.unwrap_or(true));
let update_interval = opt_ref.and_then(|o| o.update_interval); let user_agent = option.and_then(|o| o.user_agent.clone());
let timeout = opt_ref.and_then(|o| o.timeout_seconds).unwrap_or(20); let update_interval = option.and_then(|o| o.update_interval);
let mut merge = opt_ref.and_then(|o| o.merge.clone()); let timeout = option.and_then(|o| o.timeout_seconds).unwrap_or(20);
let mut script = opt_ref.and_then(|o| o.script.clone()); let mut merge = option.and_then(|o| o.merge.clone());
let mut rules = opt_ref.and_then(|o| o.rules.clone()); let mut script = option.and_then(|o| o.script.clone());
let mut proxies = opt_ref.and_then(|o| o.proxies.clone()); let mut rules = option.and_then(|o| o.rules.clone());
let mut groups = opt_ref.and_then(|o| o.groups.clone()); let mut proxies = option.and_then(|o| o.proxies.clone());
let mut groups = option.and_then(|o| o.groups.clone());
// 选择代理类型 // 选择代理类型
let proxy_type = if self_proxy { let proxy_type = if self_proxy {
@@ -267,7 +281,7 @@ impl PrfItem {
}; };
// 使用网络管理器发送请求 // 使用网络管理器发送请求
let resp = match NetworkManager::global() let resp = match NetworkManager::new()
.get_with_interrupt( .get_with_interrupt(
url, url,
proxy_type, proxy_type,
@@ -285,25 +299,34 @@ impl PrfItem {
}; };
let status_code = resp.status(); let status_code = resp.status();
if !StatusCode::is_success(&status_code) { if !status_code.is_success() {
bail!("failed to fetch remote profile with status {status_code}") bail!("failed to fetch remote profile with status {status_code}")
} }
let header = resp.headers(); let header = resp.headers();
// parse the Subscription UserInfo // parse the Subscription UserInfo
let extra = match header.get("Subscription-Userinfo") { let extra;
Some(value) => { 'extra: {
let sub_info = value.to_str().unwrap_or(""); for (k, v) in header.iter() {
Some(PrfExtra { let key_lower = k.as_str().to_ascii_lowercase();
upload: help::parse_str(sub_info, "upload").unwrap_or(0), // Accept standard custom-metadata prefixes (x-amz-meta-, x-obs-meta-, x-cos-meta-, etc.).
download: help::parse_str(sub_info, "download").unwrap_or(0), if key_lower
total: help::parse_str(sub_info, "total").unwrap_or(0), .strip_suffix("subscription-userinfo")
expire: help::parse_str(sub_info, "expire").unwrap_or(0), .is_some_and(|prefix| prefix.is_empty() || prefix.ends_with('-'))
}) {
let sub_info = v.to_str().unwrap_or("");
extra = Some(PrfExtra {
upload: help::parse_str(sub_info, "upload").unwrap_or(0),
download: help::parse_str(sub_info, "download").unwrap_or(0),
total: help::parse_str(sub_info, "total").unwrap_or(0),
expire: help::parse_str(sub_info, "expire").unwrap_or(0),
});
break 'extra;
}
} }
None => None, extra = None;
}; }
// parse the Content-Disposition // parse the Content-Disposition
let filename = match header.get("Content-Disposition") { let filename = match header.get("Content-Disposition") {
@@ -314,19 +337,20 @@ impl PrfItem {
Some(filename) => { Some(filename) => {
let iter = percent_encoding::percent_decode(filename.as_bytes()); let iter = percent_encoding::percent_decode(filename.as_bytes());
let filename = iter.decode_utf8().unwrap_or_default(); let filename = iter.decode_utf8().unwrap_or_default();
filename.split("''").last().map(|s| s.to_string()) filename.split("''").last().map(|s| s.into())
} }
None => match help::parse_str::<String>(filename, "filename") { None => match help::parse_str::<String>(filename, "filename") {
Some(filename) => { Some(filename) => {
let filename = filename.trim_matches('"'); let filename = filename.trim_matches('"');
Some(filename.to_string()) Some(filename.into())
} }
None => None, None => None,
}, },
} }
} }
None => Some( None => Some(
crate::utils::help::get_last_part_and_decode(url).unwrap_or("Remote File".into()), crate::utils::help::get_last_part_and_decode(url)
.unwrap_or_else(|| "Remote File".into()),
), ),
}; };
let update_interval = match update_interval { let update_interval = match update_interval {
@@ -343,21 +367,25 @@ impl PrfItem {
let home = match header.get("profile-web-page-url") { let home = match header.get("profile-web-page-url") {
Some(value) => { Some(value) => {
let str_value = value.to_str().unwrap_or(""); let str_value = value.to_str().unwrap_or("");
Some(str_value.to_string()) Some(str_value.into())
} }
None => None, None => None,
}; };
let uid = help::get_uid("R"); let uid = help::get_uid("R").into();
let file = format!("{uid}.yaml"); let file = format!("{uid}.yaml").into();
let name = name.unwrap_or(filename.unwrap_or("Remote File".into())); let name = name.map(|s| s.to_owned()).unwrap_or_else(|| {
let data = resp.text_with_charset("utf-8").await?; filename
.map(|s| s.into())
.unwrap_or_else(|| "Remote File".into())
});
let data = resp.text_with_charset()?;
// process the charset "UTF-8 with BOM" // process the charset "UTF-8 with BOM"
let data = data.trim_start_matches('\u{feff}'); let data = data.trim_start_matches('\u{feff}');
// check the data whether the valid yaml format // check the data whether the valid yaml format
let yaml = serde_yaml::from_str::<Mapping>(data) let yaml = serde_yaml_ng::from_str::<Mapping>(data)
.context("the remote profile data is invalid yaml")?; .context("the remote profile data is invalid yaml")?;
if !yaml.contains_key("proxies") && !yaml.contains_key("proxy-providers") { if !yaml.contains_key("proxies") && !yaml.contains_key("proxy-providers") {
@@ -365,38 +393,36 @@ impl PrfItem {
} }
if merge.is_none() { if merge.is_none() {
let merge_item = PrfItem::from_merge(None)?; let merge_item = &mut PrfItem::from_merge(None)?;
Config::profiles().data().append_item(merge_item.clone())?; profiles::profiles_append_item_safe(merge_item).await?;
merge = merge_item.uid; merge = merge_item.uid.clone();
} }
if script.is_none() { if script.is_none() {
let script_item = PrfItem::from_script(None)?; let script_item = &mut PrfItem::from_script(None)?;
Config::profiles().data().append_item(script_item.clone())?; profiles::profiles_append_item_safe(script_item).await?;
script = script_item.uid; script = script_item.uid.clone();
} }
if rules.is_none() { if rules.is_none() {
let rules_item = PrfItem::from_rules()?; let rules_item = &mut PrfItem::from_rules()?;
Config::profiles().data().append_item(rules_item.clone())?; profiles::profiles_append_item_safe(rules_item).await?;
rules = rules_item.uid; rules = rules_item.uid.clone();
} }
if proxies.is_none() { if proxies.is_none() {
let proxies_item = PrfItem::from_proxies()?; let proxies_item = &mut PrfItem::from_proxies()?;
Config::profiles() profiles::profiles_append_item_safe(proxies_item).await?;
.data() proxies = proxies_item.uid.clone();
.append_item(proxies_item.clone())?;
proxies = proxies_item.uid;
} }
if groups.is_none() { if groups.is_none() {
let groups_item = PrfItem::from_groups()?; let groups_item = &mut PrfItem::from_groups()?;
Config::profiles().data().append_item(groups_item.clone())?; profiles::profiles_append_item_safe(groups_item).await?;
groups = groups_item.uid; groups = groups_item.uid.clone();
} }
Ok(PrfItem { Ok(PrfItem {
uid: Some(uid), uid: Some(uid),
itype: Some("remote".into()), itype: Some("remote".into()),
name: Some(name), name: Some(name),
desc, desc: desc.cloned(),
file: Some(file), file: Some(file),
url: Some(url.into()), url: Some(url.into()),
selected: None, selected: None,
@@ -408,6 +434,7 @@ impl PrfItem {
rules, rules,
proxies, proxies,
groups, groups,
allow_auto_update,
..PrfOption::default() ..PrfOption::default()
}), }),
home, home,
@@ -419,13 +446,13 @@ impl PrfItem {
/// ## Merge type (enhance) /// ## Merge type (enhance)
/// create the enhanced item by using `merge` rule /// create the enhanced item by using `merge` rule
pub fn from_merge(uid: Option<String>) -> Result<PrfItem> { pub fn from_merge(uid: Option<String>) -> Result<PrfItem> {
let mut id = help::get_uid("m"); let mut id = help::get_uid("m").into();
let mut template = tmpl::ITEM_MERGE_EMPTY.into(); let mut template = tmpl::ITEM_MERGE_EMPTY.into();
if let Some(uid) = uid { if let Some(uid) = uid {
id = uid; id = uid;
template = tmpl::ITEM_MERGE.into(); template = tmpl::ITEM_MERGE.into();
} }
let file = format!("{id}.yaml"); let file = format!("{id}.yaml").into();
Ok(PrfItem { Ok(PrfItem {
uid: Some(id), uid: Some(id),
@@ -446,11 +473,11 @@ impl PrfItem {
/// ## Script type (enhance) /// ## Script type (enhance)
/// create the enhanced item by using javascript quick.js /// create the enhanced item by using javascript quick.js
pub fn from_script(uid: Option<String>) -> Result<PrfItem> { pub fn from_script(uid: Option<String>) -> Result<PrfItem> {
let mut id = help::get_uid("s"); let mut id = help::get_uid("s").into();
if let Some(uid) = uid { if let Some(uid) = uid {
id = uid; id = uid;
} }
let file = format!("{id}.js"); // js ext let file = format!("{id}.js").into(); // js ext
Ok(PrfItem { Ok(PrfItem {
uid: Some(id), uid: Some(id),
@@ -470,8 +497,8 @@ impl PrfItem {
/// ## Rules type (enhance) /// ## Rules type (enhance)
pub fn from_rules() -> Result<PrfItem> { pub fn from_rules() -> Result<PrfItem> {
let uid = help::get_uid("r"); let uid = help::get_uid("r").into();
let file = format!("{uid}.yaml"); // yaml ext let file = format!("{uid}.yaml").into(); // yaml ext
Ok(PrfItem { Ok(PrfItem {
uid: Some(uid), uid: Some(uid),
@@ -491,8 +518,8 @@ impl PrfItem {
/// ## Proxies type (enhance) /// ## Proxies type (enhance)
pub fn from_proxies() -> Result<PrfItem> { pub fn from_proxies() -> Result<PrfItem> {
let uid = help::get_uid("p"); let uid = help::get_uid("p").into();
let file = format!("{uid}.yaml"); // yaml ext let file = format!("{uid}.yaml").into(); // yaml ext
Ok(PrfItem { Ok(PrfItem {
uid: Some(uid), uid: Some(uid),
@@ -512,8 +539,8 @@ impl PrfItem {
/// ## Groups type (enhance) /// ## Groups type (enhance)
pub fn from_groups() -> Result<PrfItem> { pub fn from_groups() -> Result<PrfItem> {
let uid = help::get_uid("g"); let uid = help::get_uid("g").into();
let file = format!("{uid}.yaml"); // yaml ext let file = format!("{uid}.yaml").into(); // yaml ext
Ok(PrfItem { Ok(PrfItem {
uid: Some(uid), uid: Some(uid),
@@ -532,24 +559,32 @@ impl PrfItem {
} }
/// get the file data /// get the file data
pub fn read_file(&self) -> Result<String> { pub async fn read_file(&self) -> Result<String> {
if self.file.is_none() { let file = self
bail!("could not find the file"); .file
} .as_ref()
.ok_or_else(|| anyhow::anyhow!("could not find the file"))?;
let file = self.file.clone().unwrap(); let path = dirs::app_profiles_dir()?.join(file.as_str());
let path = dirs::app_profiles_dir()?.join(file); let content = fs::read_to_string(path)
fs::read_to_string(path).context("failed to read the file") .await
.context("failed to read the file")?;
Ok(content.into())
} }
/// save the file data /// save the file data
pub fn save_file(&self, data: String) -> Result<()> { pub async fn save_file(&self, data: String) -> Result<()> {
if self.file.is_none() { let file = self
bail!("could not find the file"); .file
} .as_ref()
.ok_or_else(|| anyhow::anyhow!("could not find the file"))?;
let file = self.file.clone().unwrap(); let path = dirs::app_profiles_dir()?.join(file.as_str());
let path = dirs::app_profiles_dir()?.join(file); fs::write(path, data.as_bytes())
fs::write(path, data.as_bytes()).context("failed to save the file") .await
.context("failed to save the file")
} }
} }
// 向前兼容,默认为订阅启用自动更新
fn default_allow_auto_update() -> Option<bool> {
Some(true)
}

View File

@@ -1,9 +1,15 @@
use super::{prfitem::PrfItem, PrfOption}; use super::{PrfOption, prfitem::PrfItem};
use crate::utils::{dirs, help}; use crate::utils::{
use anyhow::{bail, Context, Result}; dirs::{self, PathBufExec},
help,
};
use crate::{logging, utils::logging::Type};
use anyhow::{Context, Result, bail};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_yaml::Mapping; use serde_yaml_ng::Mapping;
use std::{collections::HashSet, fs, io::Write}; use smartstring::alias::String;
use std::collections::HashSet;
use tokio::fs;
/// Define the `profiles.yaml` schema /// Define the `profiles.yaml` schema
#[derive(Default, Debug, Clone, Deserialize, Serialize)] #[derive(Default, Debug, Clone, Deserialize, Serialize)]
@@ -26,30 +32,48 @@ pub struct CleanupResult {
macro_rules! patch { macro_rules! patch {
($lv: expr, $rv: expr, $key: tt) => { ($lv: expr, $rv: expr, $key: tt) => {
if ($rv.$key).is_some() { if ($rv.$key).is_some() {
$lv.$key = $rv.$key; $lv.$key = $rv.$key.clone();
} }
}; };
} }
impl IProfiles { impl IProfiles {
pub fn new() -> Self { // Helper to find and remove an item by uid from the items vec, returning its file name (if any).
match dirs::profiles_path().and_then(|path| help::read_yaml::<Self>(&path)) { fn take_item_file_by_uid(
Ok(mut profiles) => { items: &mut Vec<PrfItem>,
if profiles.items.is_none() { target_uid: Option<String>,
profiles.items = Some(vec![]); ) -> Option<String> {
} for (i, _) in items.iter().enumerate() {
// compatible with the old old old version if items[i].uid == target_uid {
if let Some(items) = profiles.items.as_mut() { return items.remove(i).file;
for item in items.iter_mut() { }
if item.uid.is_none() { }
item.uid = Some(help::get_uid("d")); None
}
pub async fn new() -> Self {
match dirs::profiles_path() {
Ok(path) => match help::read_yaml::<Self>(&path).await {
Ok(mut profiles) => {
if profiles.items.is_none() {
profiles.items = Some(vec![]);
}
// compatible with the old old old version
if let Some(items) = profiles.items.as_mut() {
for item in items.iter_mut() {
if item.uid.is_none() {
item.uid = Some(help::get_uid("d").into());
}
} }
} }
profiles
} }
profiles Err(err) => {
} logging!(error, Type::Config, "{err}");
Self::template()
}
},
Err(err) => { Err(err) => {
log::error!(target: "app", "{err}"); logging!(error, Type::Config, "{err}");
Self::template() Self::template()
} }
} }
@@ -62,12 +86,13 @@ impl IProfiles {
} }
} }
pub fn save_file(&self) -> Result<()> { pub async fn save_file(&self) -> Result<()> {
help::save_yaml( help::save_yaml(
&dirs::profiles_path()?, &dirs::profiles_path()?,
self, self,
Some("# Profiles Config for Clash Verge"), Some("# Profiles Config for Clash Verge"),
) )
.await
} }
/// 只修改currentvalid和chain /// 只修改currentvalid和chain
@@ -76,8 +101,9 @@ impl IProfiles {
self.items = Some(vec![]); self.items = Some(vec![]);
} }
if let Some(current) = patch.current { if let Some(current) = patch.current
let items = self.items.as_ref().unwrap(); && let Some(items) = self.items.as_ref()
{
let some_uid = Some(current); let some_uid = Some(current);
if items.iter().any(|e| e.uid == some_uid) { if items.iter().any(|e| e.uid == some_uid) {
self.current = some_uid; self.current = some_uid;
@@ -97,28 +123,30 @@ impl IProfiles {
} }
/// find the item by the uid /// find the item by the uid
pub fn get_item(&self, uid: &String) -> Result<&PrfItem> { pub fn get_item(&self, uid: impl AsRef<str>) -> Result<&PrfItem> {
if let Some(items) = self.items.as_ref() { let uid_str = uid.as_ref();
let some_uid = Some(uid.clone());
if let Some(items) = self.items.as_ref() {
for each in items.iter() { for each in items.iter() {
if each.uid == some_uid { if let Some(uid_val) = &each.uid
&& uid_val.as_str() == uid_str
{
return Ok(each); return Ok(each);
} }
} }
} }
bail!("failed to get the profile item \"uid:{uid}\""); bail!("failed to get the profile item \"uid:{}\"", uid_str);
} }
/// append new item /// append new item
/// if the file_data is some /// if the file_data is some
/// then should save the data to file /// then should save the data to file
pub fn append_item(&mut self, mut item: PrfItem) -> Result<()> { pub async fn append_item(&mut self, item: &mut PrfItem) -> Result<()> {
if item.uid.is_none() { let uid = &item.uid;
if uid.is_none() {
bail!("the uid should not be null"); bail!("the uid should not be null");
} }
let uid = item.uid.clone();
// save the file data // save the file data
// move the field value after save // move the field value after save
@@ -127,19 +155,20 @@ impl IProfiles {
bail!("the file should not be null"); bail!("the file should not be null");
} }
let file = item.file.clone().unwrap(); let file = item.file.clone().ok_or_else(|| {
let path = dirs::app_profiles_dir()?.join(&file); anyhow::anyhow!("file field is required when file_data is provided")
})?;
let path = dirs::app_profiles_dir()?.join(file.as_str());
fs::File::create(path) fs::write(&path, file_data.as_bytes())
.with_context(|| format!("failed to create file \"{file}\""))? .await
.write(file_data.as_bytes())
.with_context(|| format!("failed to write to file \"{file}\""))?; .with_context(|| format!("failed to write to file \"{file}\""))?;
} }
if self.current.is_none() if self.current.is_none()
&& (item.itype == Some("remote".to_string()) || item.itype == Some("local".to_string())) && (item.itype == Some("remote".into()) || item.itype == Some("local".into()))
{ {
self.current = uid; self.current = uid.to_owned();
} }
if self.items.is_none() { if self.items.is_none() {
@@ -147,42 +176,43 @@ impl IProfiles {
} }
if let Some(items) = self.items.as_mut() { if let Some(items) = self.items.as_mut() {
items.push(item) items.push(item.to_owned());
} }
self.save_file() Ok(())
} }
/// reorder items /// reorder items
pub fn reorder(&mut self, active_id: String, over_id: String) -> Result<()> { pub async fn reorder(&mut self, active_id: &String, over_id: &String) -> Result<()> {
let mut items = self.items.take().unwrap_or_default(); let mut items = self.items.take().unwrap_or_default();
let mut old_index = None; let mut old_index = None;
let mut new_index = None; let mut new_index = None;
for (i, _) in items.iter().enumerate() { for (i, _) in items.iter().enumerate() {
if items[i].uid == Some(active_id.clone()) { if items[i].uid.as_ref() == Some(active_id) {
old_index = Some(i); old_index = Some(i);
} }
if items[i].uid == Some(over_id.clone()) { if items[i].uid.as_ref() == Some(over_id) {
new_index = Some(i); new_index = Some(i);
} }
} }
if old_index.is_none() || new_index.is_none() { let (old_idx, new_idx) = match (old_index, new_index) {
return Ok(()); (Some(old), Some(new)) => (old, new),
} _ => return Ok(()),
let item = items.remove(old_index.unwrap()); };
items.insert(new_index.unwrap(), item); let item = items.remove(old_idx);
items.insert(new_idx, item);
self.items = Some(items); self.items = Some(items);
self.save_file() self.save_file().await
} }
/// update the item value /// update the item value
pub fn patch_item(&mut self, uid: String, item: PrfItem) -> Result<()> { pub async fn patch_item(&mut self, uid: &String, item: &PrfItem) -> Result<()> {
let mut items = self.items.take().unwrap_or_default(); let mut items = self.items.take().unwrap_or_default();
for each in items.iter_mut() { for each in items.iter_mut() {
if each.uid == Some(uid.clone()) { if each.uid.as_ref() == Some(uid) {
patch!(each, item, itype); patch!(each, item, itype);
patch!(each, item, name); patch!(each, item, name);
patch!(each, item, desc); patch!(each, item, desc);
@@ -194,7 +224,7 @@ impl IProfiles {
patch!(each, item, option); patch!(each, item, option);
self.items = Some(items); self.items = Some(items);
return self.save_file(); return self.save_file().await;
} }
} }
@@ -204,13 +234,13 @@ impl IProfiles {
/// be used to update the remote item /// be used to update the remote item
/// only patch `updated` `extra` `file_data` /// only patch `updated` `extra` `file_data`
pub fn update_item(&mut self, uid: String, mut item: PrfItem) -> Result<()> { pub async fn update_item(&mut self, uid: &String, item: &mut PrfItem) -> Result<()> {
if self.items.is_none() { if self.items.is_none() {
self.items = Some(vec![]); self.items = Some(vec![]);
} }
// find the item // find the item
let _ = self.get_item(&uid)?; let _ = self.get_item(uid)?;
if let Some(items) = self.items.as_mut() { if let Some(items) = self.items.as_mut() {
let some_uid = Some(uid.clone()); let some_uid = Some(uid.clone());
@@ -219,23 +249,25 @@ impl IProfiles {
if each.uid == some_uid { if each.uid == some_uid {
each.extra = item.extra; each.extra = item.extra;
each.updated = item.updated; each.updated = item.updated;
each.home = item.home; each.home = item.home.to_owned();
each.option = PrfOption::merge(each.option.clone(), item.option); each.option = PrfOption::merge(each.option.as_ref(), item.option.as_ref());
// save the file data // save the file data
// move the field value after save // move the field value after save
if let Some(file_data) = item.file_data.take() { if let Some(file_data) = item.file_data.take() {
let file = each.file.take(); let file = each.file.take();
let file = let file = file.unwrap_or_else(|| {
file.unwrap_or(item.file.take().unwrap_or(format!("{}.yaml", &uid))); item.file
.take()
.unwrap_or_else(|| format!("{}.yaml", &uid).into())
});
// the file must exists // the file must exists
each.file = Some(file.clone()); each.file = Some(file.clone());
let path = dirs::app_profiles_dir()?.join(&file); let path = dirs::app_profiles_dir()?.join(file.as_str());
fs::File::create(path) fs::write(&path, file_data.as_bytes())
.with_context(|| format!("failed to create file \"{file}\""))? .await
.write(file_data.as_bytes())
.with_context(|| format!("failed to write to file \"{file}\""))?; .with_context(|| format!("failed to write to file \"{file}\""))?;
} }
@@ -244,137 +276,66 @@ impl IProfiles {
} }
} }
self.save_file() self.save_file().await
} }
/// delete item /// delete item
/// if delete the current then return true /// if delete the current then return true
pub fn delete_item(&mut self, uid: String) -> Result<bool> { pub async fn delete_item(&mut self, uid: &String) -> Result<bool> {
let current = self.current.as_ref().unwrap_or(&uid); let current = self.current.as_ref().unwrap_or(uid);
let current = current.clone(); let current = current.clone();
let item = self.get_item(&uid)?; let item = self.get_item(uid)?;
let merge_uid = item.option.as_ref().and_then(|e| e.merge.clone()); let merge_uid = item.option.as_ref().and_then(|e| e.merge.clone());
let script_uid = item.option.as_ref().and_then(|e| e.script.clone()); let script_uid = item.option.as_ref().and_then(|e| e.script.clone());
let rules_uid = item.option.as_ref().and_then(|e| e.rules.clone()); let rules_uid = item.option.as_ref().and_then(|e| e.rules.clone());
let proxies_uid = item.option.as_ref().and_then(|e| e.proxies.clone()); let proxies_uid = item.option.as_ref().and_then(|e| e.proxies.clone());
let groups_uid = item.option.as_ref().and_then(|e| e.groups.clone()); let groups_uid = item.option.as_ref().and_then(|e| e.groups.clone());
let mut items = self.items.take().unwrap_or_default(); let mut items = self.items.take().unwrap_or_default();
let mut index = None;
let mut merge_index = None;
let mut script_index = None;
let mut rules_index = None;
let mut proxies_index = None;
let mut groups_index = None;
// get the index // remove the main item (if exists) and delete its file
for (i, _) in items.iter().enumerate() { if let Some(file) = Self::take_item_file_by_uid(&mut items, Some(uid.clone())) {
if items[i].uid == Some(uid.clone()) { let _ = dirs::app_profiles_dir()?
index = Some(i); .join(file.as_str())
break; .remove_if_exists()
} .await;
} }
if let Some(index) = index {
if let Some(file) = items.remove(index).file { // remove related extension items (merge, script, rules, proxies, groups)
let _ = dirs::app_profiles_dir().map(|path| { if let Some(file) = Self::take_item_file_by_uid(&mut items, merge_uid.clone()) {
let path = path.join(file); let _ = dirs::app_profiles_dir()?
if path.exists() { .join(file.as_str())
let _ = fs::remove_file(path); .remove_if_exists()
} .await;
});
}
} }
// get the merge index if let Some(file) = Self::take_item_file_by_uid(&mut items, script_uid.clone()) {
for (i, _) in items.iter().enumerate() { let _ = dirs::app_profiles_dir()?
if items[i].uid == merge_uid { .join(file.as_str())
merge_index = Some(i); .remove_if_exists()
break; .await;
}
} }
if let Some(index) = merge_index { if let Some(file) = Self::take_item_file_by_uid(&mut items, rules_uid.clone()) {
if let Some(file) = items.remove(index).file { let _ = dirs::app_profiles_dir()?
let _ = dirs::app_profiles_dir().map(|path| { .join(file.as_str())
let path = path.join(file); .remove_if_exists()
if path.exists() { .await;
let _ = fs::remove_file(path);
}
});
}
} }
// get the script index if let Some(file) = Self::take_item_file_by_uid(&mut items, proxies_uid.clone()) {
for (i, _) in items.iter().enumerate() { let _ = dirs::app_profiles_dir()?
if items[i].uid == script_uid { .join(file.as_str())
script_index = Some(i); .remove_if_exists()
break; .await;
}
} }
if let Some(index) = script_index { if let Some(file) = Self::take_item_file_by_uid(&mut items, groups_uid.clone()) {
if let Some(file) = items.remove(index).file { let _ = dirs::app_profiles_dir()?
let _ = dirs::app_profiles_dir().map(|path| { .join(file.as_str())
let path = path.join(file); .remove_if_exists()
if path.exists() { .await;
let _ = fs::remove_file(path);
}
});
}
}
// get the rules index
for (i, _) in items.iter().enumerate() {
if items[i].uid == rules_uid {
rules_index = Some(i);
break;
}
}
if let Some(index) = rules_index {
if let Some(file) = items.remove(index).file {
let _ = dirs::app_profiles_dir().map(|path| {
let path = path.join(file);
if path.exists() {
let _ = fs::remove_file(path);
}
});
}
}
// get the proxies index
for (i, _) in items.iter().enumerate() {
if items[i].uid == proxies_uid {
proxies_index = Some(i);
break;
}
}
if let Some(index) = proxies_index {
if let Some(file) = items.remove(index).file {
let _ = dirs::app_profiles_dir().map(|path| {
let path = path.join(file);
if path.exists() {
let _ = fs::remove_file(path);
}
});
}
}
// get the groups index
for (i, _) in items.iter().enumerate() {
if items[i].uid == groups_uid {
groups_index = Some(i);
break;
}
}
if let Some(index) = groups_index {
if let Some(file) = items.remove(index).file {
let _ = dirs::app_profiles_dir().map(|path| {
let path = path.join(file);
if path.exists() {
let _ = fs::remove_file(path);
}
});
}
} }
// delete the original uid // delete the original uid
if current == uid { if current == *uid {
self.current = None; self.current = None;
for item in items.iter() { for item in items.iter() {
if item.itype == Some("remote".to_string()) if item.itype == Some("remote".into()) || item.itype == Some("local".into()) {
|| item.itype == Some("local".to_string())
{
self.current = item.uid.clone(); self.current = item.uid.clone();
break; break;
} }
@@ -382,20 +343,20 @@ impl IProfiles {
} }
self.items = Some(items); self.items = Some(items);
self.save_file()?; self.save_file().await?;
Ok(current == uid) Ok(current == *uid)
} }
/// 获取current指向的订阅内容 /// 获取current指向的订阅内容
pub fn current_mapping(&self) -> Result<Mapping> { pub async fn current_mapping(&self) -> Result<Mapping> {
match (self.current.as_ref(), self.items.as_ref()) { match (self.current.as_ref(), self.items.as_ref()) {
(Some(current), Some(items)) => { (Some(current), Some(items)) => {
if let Some(item) = items.iter().find(|e| e.uid.as_ref() == Some(current)) { if let Some(item) = items.iter().find(|e| e.uid.as_ref() == Some(current)) {
let file_path = match item.file.as_ref() { let file_path = match item.file.as_ref() {
Some(file) => dirs::app_profiles_dir()?.join(file), Some(file) => dirs::app_profiles_dir()?.join(file.as_str()),
None => bail!("failed to get the file field"), None => bail!("failed to get the file field"),
}; };
return help::read_mapping(&file_path); return help::read_mapping(&file_path).await;
} }
bail!("failed to find the current profile \"uid:{current}\""); bail!("failed to find the current profile \"uid:{current}\"");
} }
@@ -495,7 +456,7 @@ impl IProfiles {
} }
/// 以 app 中的 profile 列表为准,删除不再需要的文件 /// 以 app 中的 profile 列表为准,删除不再需要的文件
pub fn cleanup_orphaned_files(&self) -> Result<CleanupResult> { pub async fn cleanup_orphaned_files(&self) -> Result<CleanupResult> {
let profiles_dir = dirs::app_profiles_dir()?; let profiles_dir = dirs::app_profiles_dir()?;
if !profiles_dir.exists() { if !profiles_dir.exists() {
@@ -527,25 +488,29 @@ impl IProfiles {
total_files += 1; total_files += 1;
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) { if let Some(file_name) = path.file_name().and_then(|n| n.to_str())
if Self::is_profile_file(file_name) { && Self::is_profile_file(file_name)
// 检查是否为全局扩展文件 {
if protected_files.contains(file_name) { // 检查是否为全局扩展文件
log::debug!(target: "app", "保护全局扩展配置文件: {file_name}"); if protected_files.contains(file_name) {
continue; logging!(debug, Type::Config, "保护全局扩展配置文件: {file_name}");
} continue;
}
// 检查是否为活跃文件 // 检查是否为活跃文件
if !active_files.contains(file_name) { if !active_files.contains(file_name) {
match std::fs::remove_file(&path) { match path.to_path_buf().remove_if_exists().await {
Ok(_) => { Ok(_) => {
deleted_files.push(file_name.to_string()); deleted_files.push(file_name.into());
log::info!(target: "app", "已清理冗余文件: {file_name}"); logging!(info, Type::Config, "已清理冗余文件: {file_name}");
} }
Err(e) => { Err(e) => {
failed_deletions.push(format!("{file_name}: {e}")); failed_deletions.push(format!("{file_name}: {e}").into());
log::warn!(target: "app", "清理文件失败: {file_name} - {e}"); logging!(
} warn,
Type::Config,
"Warning: 清理文件失败: {file_name} - {e}"
);
} }
} }
} }
@@ -558,8 +523,9 @@ impl IProfiles {
failed_deletions, failed_deletions,
}; };
log::info!( logging!(
target: "app", info,
Type::Config,
"Profile 文件清理完成: 总文件数={}, 删除文件数={}, 失败数={}", "Profile 文件清理完成: 总文件数={}, 删除文件数={}, 失败数={}",
result.total_files, result.total_files,
result.deleted_files.len(), result.deleted_files.len(),
@@ -573,8 +539,8 @@ impl IProfiles {
fn get_protected_global_files(&self) -> HashSet<String> { fn get_protected_global_files(&self) -> HashSet<String> {
let mut protected_files = HashSet::new(); let mut protected_files = HashSet::new();
protected_files.insert("Merge.yaml".to_string()); protected_files.insert("Merge.yaml".into());
protected_files.insert("Script.js".to_string()); protected_files.insert("Script.js".into());
protected_files protected_files
} }
@@ -591,50 +557,44 @@ impl IProfiles {
} }
// 对于主 profile 类型remote/local还需要收集其关联的扩展文件 // 对于主 profile 类型remote/local还需要收集其关联的扩展文件
if let Some(itype) = &item.itype { if let Some(itype) = &item.itype
if itype == "remote" || itype == "local" { && (itype == "remote" || itype == "local")
if let Some(option) = &item.option { && let Some(option) = &item.option
// 收集关联的扩展文件 {
if let Some(merge_uid) = &option.merge { // 收集关联的扩展文件
if let Ok(merge_item) = self.get_item(merge_uid) { if let Some(merge_uid) = &option.merge
if let Some(file) = &merge_item.file { && let Ok(merge_item) = self.get_item(merge_uid)
active_files.insert(file.clone()); && let Some(file) = &merge_item.file
} {
} active_files.insert(file.clone());
} }
if let Some(script_uid) = &option.script { if let Some(script_uid) = &option.script
if let Ok(script_item) = self.get_item(script_uid) { && let Ok(script_item) = self.get_item(script_uid)
if let Some(file) = &script_item.file { && let Some(file) = &script_item.file
active_files.insert(file.clone()); {
} active_files.insert(file.clone());
} }
}
if let Some(rules_uid) = &option.rules { if let Some(rules_uid) = &option.rules
if let Ok(rules_item) = self.get_item(rules_uid) { && let Ok(rules_item) = self.get_item(rules_uid)
if let Some(file) = &rules_item.file { && let Some(file) = &rules_item.file
active_files.insert(file.clone()); {
} active_files.insert(file.clone());
} }
}
if let Some(proxies_uid) = &option.proxies { if let Some(proxies_uid) = &option.proxies
if let Ok(proxies_item) = self.get_item(proxies_uid) { && let Ok(proxies_item) = self.get_item(proxies_uid)
if let Some(file) = &proxies_item.file { && let Some(file) = &proxies_item.file
active_files.insert(file.clone()); {
} active_files.insert(file.clone());
} }
}
if let Some(groups_uid) = &option.groups { if let Some(groups_uid) = &option.groups
if let Ok(groups_item) = self.get_item(groups_uid) { && let Ok(groups_item) = self.get_item(groups_uid)
if let Some(file) = &groups_item.file { && let Some(file) = &groups_item.file
active_files.insert(file.clone()); {
} active_files.insert(file.clone());
}
}
}
} }
} }
} }
@@ -667,23 +627,75 @@ impl IProfiles {
.unwrap_or(false) .unwrap_or(false)
}) })
} }
}
pub fn auto_cleanup(&self) -> Result<()> {
match self.cleanup_orphaned_files() { // 特殊的Send-safe helper函数完全避免跨await持有guard
Ok(result) => { use crate::config::Config;
if !result.deleted_files.is_empty() {
log::info!( pub async fn profiles_append_item_with_filedata_safe(
target: "app", item: &PrfItem,
"自动清理完成,删除了 {} 个冗余文件", file_data: Option<String>,
result.deleted_files.len() ) -> Result<()> {
); let item = &mut PrfItem::from(item, file_data).await?;
} profiles_append_item_safe(item).await
Ok(()) }
}
Err(e) => { pub async fn profiles_append_item_safe(item: &mut PrfItem) -> Result<()> {
log::warn!(target: "app", "自动清理失败: {e}"); Config::profiles()
Ok(()) .await
} .with_data_modify(|mut profiles| async move {
} profiles.append_item(item).await?;
} Ok((profiles, ()))
})
.await
}
pub async fn profiles_patch_item_safe(index: &String, item: &PrfItem) -> Result<()> {
Config::profiles()
.await
.with_data_modify(|mut profiles| async move {
profiles.patch_item(index, item).await?;
Ok((profiles, ()))
})
.await
}
pub async fn profiles_delete_item_safe(index: &String) -> Result<bool> {
Config::profiles()
.await
.with_data_modify(|mut profiles| async move {
let deleted = profiles.delete_item(index).await?;
Ok((profiles, deleted))
})
.await
}
pub async fn profiles_reorder_safe(active_id: &String, over_id: &String) -> Result<()> {
Config::profiles()
.await
.with_data_modify(|mut profiles| async move {
profiles.reorder(active_id, over_id).await?;
Ok((profiles, ()))
})
.await
}
pub async fn profiles_save_file_safe() -> Result<()> {
Config::profiles()
.await
.with_data_modify(|profiles| async move {
profiles.save_file().await?;
Ok((profiles, ()))
})
.await
}
pub async fn profiles_draft_update_item_safe(index: &String, item: &mut PrfItem) -> Result<()> {
Config::profiles()
.await
.with_data_modify(|mut profiles| async move {
profiles.update_item(index, item).await?;
Ok((profiles, ()))
})
.await
} }

View File

@@ -1,7 +1,9 @@
use crate::enhance::field::use_keys; use crate::enhance::field::use_keys;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_yaml::{Mapping, Value}; use serde_yaml_ng::{Mapping, Value};
use smartstring::alias::String;
use std::collections::HashMap; use std::collections::HashMap;
#[derive(Default, Debug, Clone, Deserialize, Serialize)] #[derive(Default, Debug, Clone, Deserialize, Serialize)]
pub struct IRuntime { pub struct IRuntime {
pub config: Option<Mapping>, pub config: Option<Mapping>,
@@ -30,15 +32,15 @@ impl IRuntime {
let patch_tun = patch.get("tun"); let patch_tun = patch.get("tun");
if patch_tun.is_some() { if patch_tun.is_some() {
let tun = config.get("tun"); let tun = config.get("tun");
let mut tun = tun.map_or(Mapping::new(), |val| { let mut tun: Mapping = tun.map_or_else(Mapping::new, |val| {
val.as_mapping().cloned().unwrap_or(Mapping::new()) val.as_mapping().cloned().unwrap_or_else(Mapping::new)
}); });
let patch_tun = patch_tun.map_or(Mapping::new(), |val| { let patch_tun = patch_tun.map_or_else(Mapping::new, |val| {
val.as_mapping().cloned().unwrap_or(Mapping::new()) val.as_mapping().cloned().unwrap_or_else(Mapping::new)
}); });
use_keys(&patch_tun).into_iter().for_each(|key| { use_keys(&patch_tun).into_iter().for_each(|key| {
if let Some(value) = patch_tun.get(&key).to_owned() { if let Some(value) = patch_tun.get(key.as_str()) {
tun.insert(key.into(), value.clone()); tun.insert(Value::from(key.as_str()), value.clone());
} }
}); });
@@ -46,4 +48,76 @@ impl IRuntime {
} }
} }
} }
//跟新链式代理配置文件
/// {
/// "proxies":[
/// {
/// name : 入口节点,
/// type: xxx
/// server: xxx
/// port: xxx
/// ports: xxx
/// password: xxx
/// skip-cert-verify: xxx,
/// },
/// {
/// name : hop_node_1_xxxx,
/// type: xxx
/// server: xxx
/// port: xxx
/// ports: xxx
/// password: xxx
/// skip-cert-verify: xxx,
/// dialer-proxy : "入口节点"
/// },
/// {
/// name : 出口节点,
/// type: xxx
/// server: xxx
/// port: xxx
/// ports: xxx
/// password: xxx
/// skip-cert-verify: xxx,
/// dialer-proxy : "hop_node_1_xxxx"
/// }
/// ],
/// "proxy-groups" : [
/// {
/// name : "proxy_chain",
/// type: "select",
/// proxies ["出口节点"]
/// }
/// ]
/// }
///
/// 传入none 为删除
pub fn update_proxy_chain_config(&mut self, proxy_chain_config: Option<Value>) {
if let Some(config) = self.config.as_mut() {
if let Some(Value::Sequence(proxies)) = config.get_mut("proxies") {
proxies.iter_mut().for_each(|proxy| {
if let Some(proxy) = proxy.as_mapping_mut()
&& proxy.get("dialer-proxy").is_some()
{
proxy.remove("dialer-proxy");
}
});
}
if let Some(Value::Sequence(dialer_proxies)) = proxy_chain_config
&& let Some(Value::Sequence(proxies)) = config.get_mut("proxies")
{
for (i, dialer_proxy) in dialer_proxies.iter().enumerate() {
if let Some(Value::Mapping(proxy)) = proxies
.iter_mut()
.find(|proxy| proxy.get("name") == Some(dialer_proxy))
&& i != 0
&& let Some(dialer_proxy) = dialer_proxies.get(i - 1)
{
proxy.insert("dialer-proxy".into(), dialer_proxy.to_owned());
}
}
}
}
}
} }

View File

@@ -1,11 +1,13 @@
use crate::config::Config;
use crate::{ use crate::{
config::{deserialize_encrypted, serialize_encrypted, DEFAULT_PAC}, config::{DEFAULT_PAC, deserialize_encrypted, serialize_encrypted},
logging, logging,
utils::{dirs, help, i18n, logging::Type}, utils::{dirs, help, i18n, logging::Type},
}; };
use anyhow::Result; use anyhow::Result;
use log::LevelFilter; use log::LevelFilter;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use smartstring::alias::String;
/// ### `verge.yaml` schema /// ### `verge.yaml` schema
#[derive(Default, Debug, Clone, Deserialize, Serialize)] #[derive(Default, Debug, Clone, Deserialize, Serialize)]
@@ -14,6 +16,12 @@ pub struct IVerge {
/// silent | error | warn | info | debug | trace /// silent | error | warn | info | debug | trace
pub app_log_level: Option<String>, pub app_log_level: Option<String>,
/// app log max size in KB
pub app_log_max_size: Option<u64>,
/// app log max count
pub app_log_max_count: Option<usize>,
// i18n // i18n
pub language: Option<String>, pub language: Option<String>,
@@ -50,6 +58,9 @@ pub struct IVerge {
/// menu icon /// menu icon
pub menu_icon: Option<String>, pub menu_icon: Option<String>,
/// menu order
pub menu_order: Option<Vec<String>>,
/// sysproxy tray icon /// sysproxy tray icon
pub sysproxy_tray_icon: Option<bool>, pub sysproxy_tray_icon: Option<bool>,
@@ -125,6 +136,9 @@ pub struct IVerge {
/// 默认的延迟测试超时时间 /// 默认的延迟测试超时时间
pub default_latency_timeout: Option<i32>, pub default_latency_timeout: Option<i32>,
/// 是否自动检测当前节点延迟
pub enable_auto_delay_detection: Option<bool>,
/// 是否使用内部的脚本支持,默认为真 /// 是否使用内部的脚本支持,默认为真
pub enable_builtin_enhanced: Option<bool>, pub enable_builtin_enhanced: Option<bool>,
@@ -138,9 +152,6 @@ pub struct IVerge {
/// 0: 不清理; 1: 1天2: 7天; 3: 30天; 4: 90天 /// 0: 不清理; 1: 1天2: 7天; 3: 30天; 4: 90天
pub auto_log_clean: Option<i32>, pub auto_log_clean: Option<i32>,
/// 是否启用随机端口
pub enable_random_port: Option<bool>,
/// verge 的各种 port 用于覆盖 clash 的各种 port /// verge 的各种 port 用于覆盖 clash 的各种 port
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
pub verge_redir_port: Option<u16>, pub verge_redir_port: Option<u16>,
@@ -193,7 +204,9 @@ pub struct IVerge {
pub enable_tray_speed: Option<bool>, pub enable_tray_speed: Option<bool>,
pub enable_tray_icon: Option<bool>, // pub enable_tray_icon: Option<bool>,
/// show proxy groups directly on tray root menu
pub tray_inline_proxy_groups: Option<bool>,
/// 自动进入轻量模式 /// 自动进入轻量模式
pub enable_auto_light_weight_mode: Option<bool>, pub enable_auto_light_weight_mode: Option<bool>,
@@ -204,8 +217,11 @@ pub struct IVerge {
/// 启用代理页面自动滚动 /// 启用代理页面自动滚动
pub enable_hover_jump_navigator: Option<bool>, pub enable_hover_jump_navigator: Option<bool>,
/// 服务状态跟踪 /// 代理页面自动滚动延迟(毫秒)
pub service_state: Option<crate::core::service::ServiceState>, pub hover_jump_navigator_delay: Option<u64>,
/// 启用外部控制器
pub enable_external_controller: Option<bool>,
} }
#[derive(Default, Debug, Clone, Deserialize, Serialize)] #[derive(Default, Debug, Clone, Deserialize, Serialize)]
@@ -237,9 +253,9 @@ impl IVerge {
pub const VALID_CLASH_CORES: &'static [&'static str] = &["verge-mihomo", "verge-mihomo-alpha"]; pub const VALID_CLASH_CORES: &'static [&'static str] = &["verge-mihomo", "verge-mihomo-alpha"];
/// 验证并修正配置文件中的clash_core值 /// 验证并修正配置文件中的clash_core值
pub fn validate_and_fix_config() -> Result<()> { pub async fn validate_and_fix_config() -> Result<()> {
let config_path = dirs::verge_path()?; let config_path = dirs::verge_path()?;
let mut config = match help::read_yaml::<IVerge>(&config_path) { let mut config = match help::read_yaml::<IVerge>(&config_path).await {
Ok(config) => config, Ok(config) => config,
Err(_) => Self::template(), Err(_) => Self::template(),
}; };
@@ -252,41 +268,33 @@ impl IVerge {
logging!( logging!(
warn, warn,
Type::Config, Type::Config,
true,
"启动时发现无效的clash_core配置: '{}', 将自动修正为 'verge-mihomo'", "启动时发现无效的clash_core配置: '{}', 将自动修正为 'verge-mihomo'",
core core
); );
config.clash_core = Some("verge-mihomo".to_string()); config.clash_core = Some("verge-mihomo".into());
needs_fix = true; needs_fix = true;
} }
} else { } else {
logging!( logging!(
info, info,
Type::Config, Type::Config,
true,
"启动时发现未配置clash_core, 将设置为默认值 'verge-mihomo'" "启动时发现未配置clash_core, 将设置为默认值 'verge-mihomo'"
); );
config.clash_core = Some("verge-mihomo".to_string()); config.clash_core = Some("verge-mihomo".into());
needs_fix = true; needs_fix = true;
} }
// 修正后保存配置 // 修正后保存配置
if needs_fix { if needs_fix {
logging!(info, Type::Config, true, "正在保存修正后的配置文件..."); logging!(info, Type::Config, "正在保存修正后的配置文件...");
help::save_yaml(&config_path, &config, Some("# Clash Verge Config"))?; help::save_yaml(&config_path, &config, Some("# Clash Verge Config")).await?;
logging!( logging!(info, Type::Config, "配置文件修正完成,需要重新加载配置");
info,
Type::Config,
true,
"配置文件修正完成,需要重新加载配置"
);
Self::reload_config_after_fix(config)?; Self::reload_config_after_fix(config).await?;
} else { } else {
logging!( logging!(
info, info,
Type::Config, Type::Config,
true,
"clash_core配置验证通过: {:?}", "clash_core配置验证通过: {:?}",
config.clash_core config.clash_core
); );
@@ -296,50 +304,61 @@ impl IVerge {
} }
/// 配置修正后重新加载配置 /// 配置修正后重新加载配置
fn reload_config_after_fix(updated_config: IVerge) -> Result<()> { async fn reload_config_after_fix(updated_config: IVerge) -> Result<()> {
use crate::config::Config;
let config_draft = Config::verge();
*config_draft.draft() = Box::new(updated_config.clone());
config_draft.apply();
logging!( logging!(
info, info,
Type::Config, Type::Config,
true,
"内存配置已强制更新新的clash_core: {:?}", "内存配置已强制更新新的clash_core: {:?}",
updated_config.clash_core &updated_config.clash_core
); );
let config_draft = Config::verge().await;
**config_draft.draft_mut() = updated_config;
config_draft.apply();
Ok(()) Ok(())
} }
pub fn get_valid_clash_core(&self) -> String { pub fn get_valid_clash_core(&self) -> String {
self.clash_core self.clash_core
.clone() .clone()
.unwrap_or_else(|| "verge-mihomo".to_string()) .unwrap_or_else(|| "verge-mihomo".into())
} }
fn get_system_language() -> String { fn get_system_language() -> String {
let sys_lang = sys_locale::get_locale() let sys_lang = sys_locale::get_locale()
.unwrap_or_else(|| String::from("en")) .unwrap_or_else(|| "en".into())
.to_lowercase(); .to_lowercase();
let lang_code = sys_lang.split(['_', '-']).next().unwrap_or("en"); let lang_code = sys_lang.split(['_', '-']).next().unwrap_or("en");
let supported_languages = i18n::get_supported_languages(); let supported_languages = i18n::get_supported_languages();
if supported_languages.contains(&lang_code.to_string()) { if supported_languages.contains(&lang_code.into()) {
lang_code.to_string() lang_code.into()
} else { } else {
String::from("en") String::from("en")
} }
} }
pub fn new() -> Self { pub async fn new() -> Self {
match dirs::verge_path().and_then(|path| help::read_yaml::<IVerge>(&path)) { match dirs::verge_path() {
Ok(config) => config, Ok(path) => match help::read_yaml::<IVerge>(&path).await {
Ok(mut config) => {
// compatibility
if let Some(start_page) = config.start_page.clone()
&& start_page == "/home"
{
config.start_page = Some(String::from("/"));
}
config
}
Err(err) => {
logging!(error, Type::Config, "{err}");
Self::template()
}
},
Err(err) => { Err(err) => {
log::error!(target: "app", "{err}"); logging!(error, Type::Config, "{err}");
Self::template() Self::template()
} }
} }
@@ -347,6 +366,8 @@ impl IVerge {
pub fn template() -> Self { pub fn template() -> Self {
Self { Self {
app_log_max_size: Some(128),
app_log_max_count: Some(8),
clash_core: Some("verge-mihomo".into()), clash_core: Some("verge-mihomo".into()),
language: Some(Self::get_system_language()), language: Some(Self::get_system_language()),
theme_mode: Some("system".into()), theme_mode: Some("system".into()),
@@ -354,7 +375,7 @@ impl IVerge {
env_type: Some("bash".into()), env_type: Some("bash".into()),
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
env_type: Some("powershell".into()), env_type: Some("powershell".into()),
start_page: Some("/home".into()), start_page: Some("/".into()),
traffic_graph: Some(true), traffic_graph: Some(true),
enable_memory_usage: Some(true), enable_memory_usage: Some(true),
enable_group_icon: Some(true), enable_group_icon: Some(true),
@@ -367,11 +388,11 @@ impl IVerge {
enable_auto_launch: Some(false), enable_auto_launch: Some(false),
enable_silent_start: Some(false), enable_silent_start: Some(false),
enable_hover_jump_navigator: Some(true), enable_hover_jump_navigator: Some(true),
hover_jump_navigator_delay: Some(280),
enable_system_proxy: Some(false), enable_system_proxy: Some(false),
proxy_auto_config: Some(false), proxy_auto_config: Some(false),
pac_file_content: Some(DEFAULT_PAC.into()), pac_file_content: Some(DEFAULT_PAC.into()),
proxy_host: Some("127.0.0.1".into()), proxy_host: Some("127.0.0.1".into()),
enable_random_port: Some(false),
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
verge_redir_port: Some(7895), verge_redir_port: Some(7895),
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
@@ -391,39 +412,44 @@ impl IVerge {
auto_close_connection: Some(true), auto_close_connection: Some(true),
auto_check_update: Some(true), auto_check_update: Some(true),
enable_builtin_enhanced: Some(true), enable_builtin_enhanced: Some(true),
auto_log_clean: Some(2), auto_log_clean: Some(2), // 1: 1天, 2: 7天, 3: 30天, 4: 90天
webdav_url: None, webdav_url: None,
webdav_username: None, webdav_username: None,
webdav_password: None, webdav_password: None,
enable_tray_speed: Some(false), enable_tray_speed: Some(false),
enable_tray_icon: Some(true), // enable_tray_icon: Some(true),
tray_inline_proxy_groups: Some(false),
enable_global_hotkey: Some(true), enable_global_hotkey: Some(true),
enable_auto_light_weight_mode: Some(false), enable_auto_light_weight_mode: Some(false),
auto_light_weight_minutes: Some(10), auto_light_weight_minutes: Some(10),
enable_dns_settings: Some(false), enable_dns_settings: Some(false),
home_cards: None, home_cards: None,
service_state: None, enable_external_controller: Some(false),
..Self::default() ..Self::default()
} }
} }
/// Save IVerge App Config /// Save IVerge App Config
pub fn save_file(&self) -> Result<()> { pub async fn save_file(&self) -> Result<()> {
help::save_yaml(&dirs::verge_path()?, &self, Some("# Clash Verge Config")) help::save_yaml(&dirs::verge_path()?, &self, Some("# Clash Verge Config")).await
} }
/// patch verge config /// patch verge config
/// only save to file /// only save to file
pub fn patch_config(&mut self, patch: IVerge) { #[allow(clippy::cognitive_complexity)]
pub fn patch_config(&mut self, patch: &IVerge) {
macro_rules! patch { macro_rules! patch {
($key: tt) => { ($key: tt) => {
if patch.$key.is_some() { if patch.$key.is_some() {
self.$key = patch.$key; self.$key = patch.$key.clone();
} }
}; };
} }
patch!(app_log_level); patch!(app_log_level);
patch!(app_log_max_size);
patch!(app_log_max_count);
patch!(language); patch!(language);
patch!(theme_mode); patch!(theme_mode);
patch!(tray_event); patch!(tray_event);
@@ -436,6 +462,7 @@ impl IVerge {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
patch!(tray_icon); patch!(tray_icon);
patch!(menu_icon); patch!(menu_icon);
patch!(menu_order);
patch!(common_tray_icon); patch!(common_tray_icon);
patch!(sysproxy_tray_icon); patch!(sysproxy_tray_icon);
patch!(tun_tray_icon); patch!(tun_tray_icon);
@@ -444,7 +471,7 @@ impl IVerge {
patch!(enable_auto_launch); patch!(enable_auto_launch);
patch!(enable_silent_start); patch!(enable_silent_start);
patch!(enable_hover_jump_navigator); patch!(enable_hover_jump_navigator);
patch!(enable_random_port); patch!(hover_jump_navigator_delay);
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
patch!(verge_redir_port); patch!(verge_redir_port);
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
@@ -476,6 +503,7 @@ impl IVerge {
patch!(auto_check_update); patch!(auto_check_update);
patch!(default_latency_test); patch!(default_latency_test);
patch!(default_latency_timeout); patch!(default_latency_timeout);
patch!(enable_auto_delay_detection);
patch!(enable_builtin_enhanced); patch!(enable_builtin_enhanced);
patch!(proxy_layout_column); patch!(proxy_layout_column);
patch!(test_list); patch!(test_list);
@@ -485,21 +513,17 @@ impl IVerge {
patch!(webdav_username); patch!(webdav_username);
patch!(webdav_password); patch!(webdav_password);
patch!(enable_tray_speed); patch!(enable_tray_speed);
patch!(enable_tray_icon); // patch!(enable_tray_icon);
patch!(tray_inline_proxy_groups);
patch!(enable_auto_light_weight_mode); patch!(enable_auto_light_weight_mode);
patch!(auto_light_weight_minutes); patch!(auto_light_weight_minutes);
patch!(enable_dns_settings); patch!(enable_dns_settings);
patch!(home_cards); patch!(home_cards);
patch!(service_state); patch!(enable_external_controller);
} }
/// 在初始化前尝试拿到单例端口的值
pub fn get_singleton_port() -> u16 { pub fn get_singleton_port() -> u16 {
#[cfg(not(feature = "verge-dev"))] crate::constants::network::ports::SINGLETON_SERVER
const SERVER_PORT: u16 = 33331;
#[cfg(feature = "verge-dev")]
const SERVER_PORT: u16 = 11233;
SERVER_PORT
} }
/// 获取日志等级 /// 获取日志等级
@@ -523,6 +547,8 @@ impl IVerge {
#[derive(Debug, Clone, Serialize)] #[derive(Debug, Clone, Serialize)]
pub struct IVergeResponse { pub struct IVergeResponse {
pub app_log_level: Option<String>, pub app_log_level: Option<String>,
pub app_log_max_size: Option<u64>,
pub app_log_max_count: Option<usize>,
pub language: Option<String>, pub language: Option<String>,
pub theme_mode: Option<String>, pub theme_mode: Option<String>,
pub tray_event: Option<String>, pub tray_event: Option<String>,
@@ -536,6 +562,7 @@ pub struct IVergeResponse {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
pub tray_icon: Option<String>, pub tray_icon: Option<String>,
pub menu_icon: Option<String>, pub menu_icon: Option<String>,
pub menu_order: Option<Vec<String>>,
pub sysproxy_tray_icon: Option<bool>, pub sysproxy_tray_icon: Option<bool>,
pub tun_tray_icon: Option<bool>, pub tun_tray_icon: Option<bool>,
pub enable_tun_mode: Option<bool>, pub enable_tun_mode: Option<bool>,
@@ -558,11 +585,11 @@ pub struct IVergeResponse {
pub auto_check_update: Option<bool>, pub auto_check_update: Option<bool>,
pub default_latency_test: Option<String>, pub default_latency_test: Option<String>,
pub default_latency_timeout: Option<i32>, pub default_latency_timeout: Option<i32>,
pub enable_auto_delay_detection: Option<bool>,
pub enable_builtin_enhanced: Option<bool>, pub enable_builtin_enhanced: Option<bool>,
pub proxy_layout_column: Option<i32>, pub proxy_layout_column: Option<i32>,
pub test_list: Option<Vec<IVergeTestItem>>, pub test_list: Option<Vec<IVergeTestItem>>,
pub auto_log_clean: Option<i32>, pub auto_log_clean: Option<i32>,
pub enable_random_port: Option<bool>,
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
pub verge_redir_port: Option<u16>, pub verge_redir_port: Option<u16>,
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
@@ -580,13 +607,15 @@ pub struct IVergeResponse {
pub webdav_username: Option<String>, pub webdav_username: Option<String>,
pub webdav_password: Option<String>, pub webdav_password: Option<String>,
pub enable_tray_speed: Option<bool>, pub enable_tray_speed: Option<bool>,
pub enable_tray_icon: Option<bool>, // pub enable_tray_icon: Option<bool>,
pub tray_inline_proxy_groups: Option<bool>,
pub enable_auto_light_weight_mode: Option<bool>, pub enable_auto_light_weight_mode: Option<bool>,
pub auto_light_weight_minutes: Option<u64>, pub auto_light_weight_minutes: Option<u64>,
pub enable_dns_settings: Option<bool>, pub enable_dns_settings: Option<bool>,
pub home_cards: Option<serde_json::Value>, pub home_cards: Option<serde_json::Value>,
pub enable_hover_jump_navigator: Option<bool>, pub enable_hover_jump_navigator: Option<bool>,
pub service_state: Option<crate::core::service::ServiceState>, pub hover_jump_navigator_delay: Option<u64>,
pub enable_external_controller: Option<bool>,
} }
impl From<IVerge> for IVergeResponse { impl From<IVerge> for IVergeResponse {
@@ -595,6 +624,8 @@ impl From<IVerge> for IVergeResponse {
let valid_clash_core = verge.get_valid_clash_core(); let valid_clash_core = verge.get_valid_clash_core();
Self { Self {
app_log_level: verge.app_log_level, app_log_level: verge.app_log_level,
app_log_max_size: verge.app_log_max_size,
app_log_max_count: verge.app_log_max_count,
language: verge.language, language: verge.language,
theme_mode: verge.theme_mode, theme_mode: verge.theme_mode,
tray_event: verge.tray_event, tray_event: verge.tray_event,
@@ -608,6 +639,7 @@ impl From<IVerge> for IVergeResponse {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
tray_icon: verge.tray_icon, tray_icon: verge.tray_icon,
menu_icon: verge.menu_icon, menu_icon: verge.menu_icon,
menu_order: verge.menu_order,
sysproxy_tray_icon: verge.sysproxy_tray_icon, sysproxy_tray_icon: verge.sysproxy_tray_icon,
tun_tray_icon: verge.tun_tray_icon, tun_tray_icon: verge.tun_tray_icon,
enable_tun_mode: verge.enable_tun_mode, enable_tun_mode: verge.enable_tun_mode,
@@ -630,11 +662,11 @@ impl From<IVerge> for IVergeResponse {
auto_check_update: verge.auto_check_update, auto_check_update: verge.auto_check_update,
default_latency_test: verge.default_latency_test, default_latency_test: verge.default_latency_test,
default_latency_timeout: verge.default_latency_timeout, default_latency_timeout: verge.default_latency_timeout,
enable_auto_delay_detection: verge.enable_auto_delay_detection,
enable_builtin_enhanced: verge.enable_builtin_enhanced, enable_builtin_enhanced: verge.enable_builtin_enhanced,
proxy_layout_column: verge.proxy_layout_column, proxy_layout_column: verge.proxy_layout_column,
test_list: verge.test_list, test_list: verge.test_list,
auto_log_clean: verge.auto_log_clean, auto_log_clean: verge.auto_log_clean,
enable_random_port: verge.enable_random_port,
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
verge_redir_port: verge.verge_redir_port, verge_redir_port: verge.verge_redir_port,
#[cfg(not(target_os = "windows"))] #[cfg(not(target_os = "windows"))]
@@ -652,13 +684,21 @@ impl From<IVerge> for IVergeResponse {
webdav_username: verge.webdav_username, webdav_username: verge.webdav_username,
webdav_password: verge.webdav_password, webdav_password: verge.webdav_password,
enable_tray_speed: verge.enable_tray_speed, enable_tray_speed: verge.enable_tray_speed,
enable_tray_icon: verge.enable_tray_icon, // enable_tray_icon: verge.enable_tray_icon,
tray_inline_proxy_groups: verge.tray_inline_proxy_groups,
enable_auto_light_weight_mode: verge.enable_auto_light_weight_mode, enable_auto_light_weight_mode: verge.enable_auto_light_weight_mode,
auto_light_weight_minutes: verge.auto_light_weight_minutes, auto_light_weight_minutes: verge.auto_light_weight_minutes,
enable_dns_settings: verge.enable_dns_settings, enable_dns_settings: verge.enable_dns_settings,
home_cards: verge.home_cards, home_cards: verge.home_cards,
enable_hover_jump_navigator: verge.enable_hover_jump_navigator, enable_hover_jump_navigator: verge.enable_hover_jump_navigator,
service_state: verge.service_state, hover_jump_navigator_delay: verge.hover_jump_navigator_delay,
enable_external_controller: verge.enable_external_controller,
} }
} }
} }
impl From<Box<IVerge>> for IVergeResponse {
fn from(verge: Box<IVerge>) -> Self {
IVergeResponse::from(*verge)
}
}

View File

@@ -0,0 +1,78 @@
use std::time::Duration;
pub mod network {
pub const DEFAULT_PROXY_HOST: &str = "127.0.0.1";
pub const DEFAULT_EXTERNAL_CONTROLLER: &str = "127.0.0.1:9097";
pub mod ports {
#[cfg(not(target_os = "windows"))]
pub const DEFAULT_REDIR: u16 = 7895;
#[cfg(target_os = "linux")]
pub const DEFAULT_TPROXY: u16 = 7896;
pub const DEFAULT_MIXED: u16 = 7897;
pub const DEFAULT_SOCKS: u16 = 7898;
pub const DEFAULT_HTTP: u16 = 7899;
#[cfg(not(feature = "verge-dev"))]
pub const SINGLETON_SERVER: u16 = 33331;
#[cfg(feature = "verge-dev")]
pub const SINGLETON_SERVER: u16 = 11233;
}
}
pub mod bypass {
#[cfg(target_os = "windows")]
pub const DEFAULT: &str = "localhost;127.*;192.168.*;10.*;172.16.*;172.17.*;172.18.*;172.19.*;172.20.*;172.21.*;172.22.*;172.23.*;172.24.*;172.25.*;172.26.*;172.27.*;172.28.*;172.29.*;172.30.*;172.31.*;<local>";
#[cfg(target_os = "linux")]
pub const DEFAULT: &str =
"localhost,127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,172.29.0.0/16,::1";
#[cfg(target_os = "macos")]
pub const DEFAULT: &str = "127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,172.29.0.0/16,localhost,*.local,*.crashlytics.com,<local>";
}
pub mod timing {
use super::Duration;
pub const CONFIG_UPDATE_DEBOUNCE: Duration = Duration::from_millis(500);
pub const CONFIG_RELOAD_DELAY: Duration = Duration::from_millis(300);
pub const EVENT_EMIT_DELAY: Duration = Duration::from_millis(20);
pub const STARTUP_ERROR_DELAY: Duration = Duration::from_secs(2);
pub const ERROR_BATCH_DELAY: Duration = Duration::from_millis(300);
#[cfg(target_os = "windows")]
pub const SERVICE_WAIT_MAX: Duration = Duration::from_millis(3000);
#[cfg(target_os = "windows")]
pub const SERVICE_WAIT_INTERVAL: Duration = Duration::from_millis(200);
}
pub mod retry {
pub const EVENT_EMIT_THRESHOLD: u64 = 10;
}
pub mod files {
pub const RUNTIME_CONFIG: &str = "clash-verge.yaml";
pub const CHECK_CONFIG: &str = "clash-verge-check.yaml";
pub const DNS_CONFIG: &str = "dns_config.yaml";
pub const WINDOW_STATE: &str = "window_state.json";
}
pub mod error_patterns {
pub const CONNECTION_ERRORS: &[&str] = &[
"Failed to create connection",
"The system cannot find the file specified",
"operation timed out",
"connection refused",
];
}
pub mod tun {
#[cfg(target_os = "linux")]
pub const DEFAULT_STACK: &str = "mixed";
#[cfg(not(target_os = "linux"))]
pub const DEFAULT_STACK: &str = "gvisor";
pub const DNS_HIJACK: &[&str] = &["any:53"];
}

View File

@@ -1,6 +1,9 @@
#[cfg(target_os = "windows")]
use crate::process::AsyncHandler;
use crate::{logging, utils::logging::Type};
use anyhow::Result; use anyhow::Result;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::time::{timeout, Duration}; use tokio::time::{Duration, timeout};
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
use anyhow::anyhow; use anyhow::anyhow;
@@ -25,8 +28,8 @@ impl Default for AsyncSysproxy {
fn default() -> Self { fn default() -> Self {
Self { Self {
enable: false, enable: false,
host: "127.0.0.1".to_string(), host: "127.0.0.1".into(),
port: 7890, port: 7897,
bypass: String::new(), bypass: String::new(),
} }
} }
@@ -39,15 +42,21 @@ impl AsyncProxyQuery {
pub async fn get_auto_proxy() -> AsyncAutoproxy { pub async fn get_auto_proxy() -> AsyncAutoproxy {
match timeout(Duration::from_secs(3), Self::get_auto_proxy_impl()).await { match timeout(Duration::from_secs(3), Self::get_auto_proxy_impl()).await {
Ok(Ok(proxy)) => { Ok(Ok(proxy)) => {
log::debug!(target: "app", "异步获取自动代理成功: enable={}, url={}", proxy.enable, proxy.url); logging!(
debug,
Type::Network,
"异步获取自动代理成功: enable={}, url={}",
proxy.enable,
proxy.url
);
proxy proxy
} }
Ok(Err(e)) => { Ok(Err(e)) => {
log::warn!(target: "app", "异步获取自动代理失败: {e}"); logging!(warn, Type::Network, "Warning: 异步获取自动代理失败: {e}");
AsyncAutoproxy::default() AsyncAutoproxy::default()
} }
Err(_) => { Err(_) => {
log::warn!(target: "app", "异步获取自动代理超时"); logging!(warn, Type::Network, "Warning: 异步获取自动代理超时");
AsyncAutoproxy::default() AsyncAutoproxy::default()
} }
} }
@@ -57,15 +66,22 @@ impl AsyncProxyQuery {
pub async fn get_system_proxy() -> AsyncSysproxy { pub async fn get_system_proxy() -> AsyncSysproxy {
match timeout(Duration::from_secs(3), Self::get_system_proxy_impl()).await { match timeout(Duration::from_secs(3), Self::get_system_proxy_impl()).await {
Ok(Ok(proxy)) => { Ok(Ok(proxy)) => {
log::debug!(target: "app", "异步获取系统代理成功: enable={}, {}:{}", proxy.enable, proxy.host, proxy.port); logging!(
debug,
Type::Network,
"异步获取系统代理成功: enable={}, {}:{}",
proxy.enable,
proxy.host,
proxy.port
);
proxy proxy
} }
Ok(Err(e)) => { Ok(Err(e)) => {
log::warn!(target: "app", "异步获取系统代理失败: {e}"); logging!(warn, Type::Network, "Warning: 异步获取系统代理失败: {e}");
AsyncSysproxy::default() AsyncSysproxy::default()
} }
Err(_) => { Err(_) => {
log::warn!(target: "app", "异步获取系统代理超时"); logging!(warn, Type::Network, "Warning: 异步获取系统代理超时");
AsyncSysproxy::default() AsyncSysproxy::default()
} }
} }
@@ -74,7 +90,7 @@ impl AsyncProxyQuery {
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
async fn get_auto_proxy_impl() -> Result<AsyncAutoproxy> { async fn get_auto_proxy_impl() -> Result<AsyncAutoproxy> {
// Windows: 从注册表读取PAC配置 // Windows: 从注册表读取PAC配置
tokio::task::spawn_blocking(move || -> Result<AsyncAutoproxy> { AsyncHandler::spawn_blocking(move || -> Result<AsyncAutoproxy> {
Self::get_pac_config_from_registry() Self::get_pac_config_from_registry()
}) })
.await? .await?
@@ -85,7 +101,7 @@ impl AsyncProxyQuery {
use std::ptr; use std::ptr;
use winapi::shared::minwindef::{DWORD, HKEY}; use winapi::shared::minwindef::{DWORD, HKEY};
use winapi::um::winnt::{KEY_READ, REG_DWORD, REG_SZ}; use winapi::um::winnt::{KEY_READ, REG_DWORD, REG_SZ};
use winapi::um::winreg::{RegCloseKey, RegOpenKeyExW, RegQueryValueExW, HKEY_CURRENT_USER}; use winapi::um::winreg::{HKEY_CURRENT_USER, RegCloseKey, RegOpenKeyExW, RegQueryValueExW};
unsafe { unsafe {
let key_path = "Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\0" let key_path = "Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\0"
@@ -97,7 +113,7 @@ impl AsyncProxyQuery {
RegOpenKeyExW(HKEY_CURRENT_USER, key_path.as_ptr(), 0, KEY_READ, &mut hkey); RegOpenKeyExW(HKEY_CURRENT_USER, key_path.as_ptr(), 0, KEY_READ, &mut hkey);
if result != 0 { if result != 0 {
log::debug!(target: "app", "无法打开注册表项"); logging!(debug, Type::Network, "无法打开注册表项");
return Ok(AsyncAutoproxy::default()); return Ok(AsyncAutoproxy::default());
} }
@@ -123,7 +139,7 @@ impl AsyncProxyQuery {
.position(|&x| x == 0) .position(|&x| x == 0)
.unwrap_or(url_buffer.len()); .unwrap_or(url_buffer.len());
pac_url = String::from_utf16_lossy(&url_buffer[..end_pos]); pac_url = String::from_utf16_lossy(&url_buffer[..end_pos]);
log::debug!(target: "app", "从注册表读取到PAC URL: {}", pac_url); logging!(debug, Type::Network, "从注册表读取到PAC URL: {pac_url}");
} }
// 2. 检查自动检测设置是否启用 // 2. 检查自动检测设置是否启用
@@ -148,10 +164,14 @@ impl AsyncProxyQuery {
|| (detect_query_result == 0 && detect_value_type == REG_DWORD && auto_detect != 0); || (detect_query_result == 0 && detect_value_type == REG_DWORD && auto_detect != 0);
if pac_enabled { if pac_enabled {
log::debug!(target: "app", "PAC配置启用: URL={}, AutoDetect={}", pac_url, auto_detect); logging!(
debug,
Type::Network,
"PAC配置启用: URL={pac_url}, AutoDetect={auto_detect}"
);
if pac_url.is_empty() && auto_detect != 0 { if pac_url.is_empty() && auto_detect != 0 {
pac_url = "auto-detect".to_string(); pac_url = "auto-detect".into();
} }
Ok(AsyncAutoproxy { Ok(AsyncAutoproxy {
@@ -159,7 +179,7 @@ impl AsyncProxyQuery {
url: pac_url, url: pac_url,
}) })
} else { } else {
log::debug!(target: "app", "PAC配置未启用"); logging!(debug, Type::Network, "PAC配置未启用");
Ok(AsyncAutoproxy::default()) Ok(AsyncAutoproxy::default())
} }
} }
@@ -175,7 +195,11 @@ impl AsyncProxyQuery {
} }
let stdout = String::from_utf8_lossy(&output.stdout); let stdout = String::from_utf8_lossy(&output.stdout);
log::debug!(target: "app", "scutil output: {stdout}"); crate::logging!(
debug,
crate::utils::logging::Type::Network,
"scutil output: {stdout}"
);
let mut pac_enabled = false; let mut pac_enabled = false;
let mut pac_url = String::new(); let mut pac_url = String::new();
@@ -189,12 +213,16 @@ impl AsyncProxyQuery {
// 正确解析包含冒号的URL // 正确解析包含冒号的URL
// 格式: "ProxyAutoConfigURLString : http://127.0.0.1:11233/commands/pac" // 格式: "ProxyAutoConfigURLString : http://127.0.0.1:11233/commands/pac"
if let Some(colon_pos) = line.find(" : ") { if let Some(colon_pos) = line.find(" : ") {
pac_url = line[colon_pos + 3..].trim().to_string(); pac_url = line[colon_pos + 3..].trim().into();
} }
} }
} }
log::debug!(target: "app", "解析结果: pac_enabled={pac_enabled}, pac_url={pac_url}"); crate::logging!(
debug,
crate::utils::logging::Type::Network,
"解析结果: pac_enabled={pac_enabled}, pac_url={pac_url}"
);
Ok(AsyncAutoproxy { Ok(AsyncAutoproxy {
enable: pac_enabled && !pac_url.is_empty(), enable: pac_enabled && !pac_url.is_empty(),
@@ -207,13 +235,13 @@ impl AsyncProxyQuery {
// Linux: 检查环境变量和GNOME设置 // Linux: 检查环境变量和GNOME设置
// 首先检查环境变量 // 首先检查环境变量
if let Ok(auto_proxy) = std::env::var("auto_proxy") { if let Ok(auto_proxy) = std::env::var("auto_proxy")
if !auto_proxy.is_empty() { && !auto_proxy.is_empty()
return Ok(AsyncAutoproxy { {
enable: true, return Ok(AsyncAutoproxy {
url: auto_proxy, enable: true,
}); url: auto_proxy,
} });
} }
// 尝试使用 gsettings 获取 GNOME 代理设置 // 尝试使用 gsettings 获取 GNOME 代理设置
@@ -222,31 +250,31 @@ impl AsyncProxyQuery {
.output() .output()
.await; .await;
if let Ok(output) = output { if let Ok(output) = output
if output.status.success() { && output.status.success()
let mode = String::from_utf8_lossy(&output.stdout).trim().to_string(); {
if mode.contains("auto") { let mode: String = String::from_utf8_lossy(&output.stdout).trim().into();
// 获取 PAC URL if mode.contains("auto") {
let pac_output = Command::new("gsettings") // 获取 PAC URL
.args(["get", "org.gnome.system.proxy", "autoconfig-url"]) let pac_output = Command::new("gsettings")
.output() .args(["get", "org.gnome.system.proxy", "autoconfig-url"])
.await; .output()
.await;
if let Ok(pac_output) = pac_output { if let Ok(pac_output) = pac_output
if pac_output.status.success() { && pac_output.status.success()
let pac_url = String::from_utf8_lossy(&pac_output.stdout) {
.trim() let pac_url: String = String::from_utf8_lossy(&pac_output.stdout)
.trim_matches('\'') .trim()
.trim_matches('"') .trim_matches('\'')
.to_string(); .trim_matches('"')
.into();
if !pac_url.is_empty() { if !pac_url.is_empty() {
return Ok(AsyncAutoproxy { return Ok(AsyncAutoproxy {
enable: true, enable: true,
url: pac_url, url: pac_url,
}); });
}
}
} }
} }
} }
@@ -258,7 +286,7 @@ impl AsyncProxyQuery {
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
async fn get_system_proxy_impl() -> Result<AsyncSysproxy> { async fn get_system_proxy_impl() -> Result<AsyncSysproxy> {
// Windows: 使用注册表直接读取代理设置 // Windows: 使用注册表直接读取代理设置
tokio::task::spawn_blocking(move || -> Result<AsyncSysproxy> { AsyncHandler::spawn_blocking(move || -> Result<AsyncSysproxy> {
Self::get_system_proxy_from_registry() Self::get_system_proxy_from_registry()
}) })
.await? .await?
@@ -269,7 +297,7 @@ impl AsyncProxyQuery {
use std::ptr; use std::ptr;
use winapi::shared::minwindef::{DWORD, HKEY}; use winapi::shared::minwindef::{DWORD, HKEY};
use winapi::um::winnt::{KEY_READ, REG_DWORD, REG_SZ}; use winapi::um::winnt::{KEY_READ, REG_DWORD, REG_SZ};
use winapi::um::winreg::{RegCloseKey, RegOpenKeyExW, RegQueryValueExW, HKEY_CURRENT_USER}; use winapi::um::winreg::{HKEY_CURRENT_USER, RegCloseKey, RegOpenKeyExW, RegQueryValueExW};
unsafe { unsafe {
let key_path = "Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\0" let key_path = "Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\0"
@@ -354,14 +382,18 @@ impl AsyncProxyQuery {
if !proxy_server.is_empty() { if !proxy_server.is_empty() {
// 解析服务器地址和端口 // 解析服务器地址和端口
let (host, port) = if let Some(colon_pos) = proxy_server.rfind(':') { let (host, port) = if let Some(colon_pos) = proxy_server.rfind(':') {
let host = proxy_server[..colon_pos].to_string(); let host = proxy_server[..colon_pos].into();
let port = proxy_server[colon_pos + 1..].parse::<u16>().unwrap_or(8080); let port = proxy_server[colon_pos + 1..].parse::<u16>().unwrap_or(8080);
(host, port) (host, port)
} else { } else {
(proxy_server, 8080) (proxy_server, 8080)
}; };
log::debug!(target: "app", "从注册表读取到代理设置: {}:{}, bypass: {}", host, port, bypass_list); logging!(
debug,
Type::Network,
"从注册表读取到代理设置: {host}:{port}, bypass: {bypass_list}"
);
Ok(AsyncSysproxy { Ok(AsyncSysproxy {
enable: true, enable: true,
@@ -384,12 +416,12 @@ impl AsyncProxyQuery {
} }
let stdout = String::from_utf8_lossy(&output.stdout); let stdout = String::from_utf8_lossy(&output.stdout);
log::debug!(target: "app", "scutil proxy output: {stdout}"); logging!(debug, Type::Network, "scutil proxy output: {stdout}");
let mut http_enabled = false; let mut http_enabled = false;
let mut http_host = String::new(); let mut http_host = String::new();
let mut http_port = 8080u16; let mut http_port = 8080u16;
let mut exceptions = Vec::new(); let mut exceptions: Vec<String> = Vec::new();
for line in stdout.lines() { for line in stdout.lines() {
let line = line.trim(); let line = line.trim();
@@ -397,20 +429,20 @@ impl AsyncProxyQuery {
http_enabled = true; http_enabled = true;
} else if line.contains("HTTPProxy") && !line.contains("Port") { } else if line.contains("HTTPProxy") && !line.contains("Port") {
if let Some(host_part) = line.split(':').nth(1) { if let Some(host_part) = line.split(':').nth(1) {
http_host = host_part.trim().to_string(); http_host = host_part.trim().into();
} }
} else if line.contains("HTTPPort") { } else if line.contains("HTTPPort") {
if let Some(port_part) = line.split(':').nth(1) { if let Some(port_part) = line.split(':').nth(1)
if let Ok(port) = port_part.trim().parse::<u16>() { && let Ok(port) = port_part.trim().parse::<u16>()
http_port = port; {
} http_port = port;
} }
} else if line.contains("ExceptionsList") { } else if line.contains("ExceptionsList") {
// 解析异常列表 // 解析异常列表
if let Some(list_part) = line.split(':').nth(1) { if let Some(list_part) = line.split(':').nth(1) {
let list = list_part.trim(); let list = list_part.trim();
if !list.is_empty() { if !list.is_empty() {
exceptions.push(list.to_string()); exceptions.push(list.into());
} }
} }
} }
@@ -429,16 +461,16 @@ impl AsyncProxyQuery {
// Linux: 检查环境变量和桌面环境设置 // Linux: 检查环境变量和桌面环境设置
// 首先检查环境变量 // 首先检查环境变量
if let Ok(http_proxy) = std::env::var("http_proxy") { if let Ok(http_proxy) = std::env::var("http_proxy")
if let Ok(proxy_info) = Self::parse_proxy_url(&http_proxy) { && let Ok(proxy_info) = Self::parse_proxy_url(&http_proxy)
return Ok(proxy_info); {
} return Ok(proxy_info);
} }
if let Ok(https_proxy) = std::env::var("https_proxy") { if let Ok(https_proxy) = std::env::var("https_proxy")
if let Ok(proxy_info) = Self::parse_proxy_url(&https_proxy) { && let Ok(proxy_info) = Self::parse_proxy_url(&https_proxy)
return Ok(proxy_info); {
} return Ok(proxy_info);
} }
// 尝试使用 gsettings 获取 GNOME 代理设置 // 尝试使用 gsettings 获取 GNOME 代理设置
@@ -447,45 +479,44 @@ impl AsyncProxyQuery {
.output() .output()
.await; .await;
if let Ok(mode_output) = mode_output { if let Ok(mode_output) = mode_output
if mode_output.status.success() { && mode_output.status.success()
let mode = String::from_utf8_lossy(&mode_output.stdout) {
.trim() let mode: String = String::from_utf8_lossy(&mode_output.stdout).trim().into();
.to_string(); if mode.contains("manual") {
if mode.contains("manual") { // 获取HTTP代理设置
// 获取HTTP代理设置 let host_result = Command::new("gsettings")
let host_result = Command::new("gsettings") .args(["get", "org.gnome.system.proxy.http", "host"])
.args(["get", "org.gnome.system.proxy.http", "host"]) .output()
.output() .await;
.await;
let port_result = Command::new("gsettings") let port_result = Command::new("gsettings")
.args(["get", "org.gnome.system.proxy.http", "port"]) .args(["get", "org.gnome.system.proxy.http", "port"])
.output() .output()
.await; .await;
if let (Ok(host_output), Ok(port_output)) = (host_result, port_result) { if let (Ok(host_output), Ok(port_output)) = (host_result, port_result)
if host_output.status.success() && port_output.status.success() { && host_output.status.success()
let host = String::from_utf8_lossy(&host_output.stdout) && port_output.status.success()
.trim() {
.trim_matches('\'') let host: String = String::from_utf8_lossy(&host_output.stdout)
.trim_matches('"') .trim()
.to_string(); .trim_matches('\'')
.trim_matches('"')
.into();
let port = String::from_utf8_lossy(&port_output.stdout) let port = String::from_utf8_lossy(&port_output.stdout)
.trim() .trim()
.parse::<u16>() .parse::<u16>()
.unwrap_or(8080); .unwrap_or(8080);
if !host.is_empty() { if !host.is_empty() {
return Ok(AsyncSysproxy { return Ok(AsyncSysproxy {
enable: true, enable: true,
host, host,
port, port,
bypass: String::new(), bypass: String::new(),
}); });
}
}
} }
} }
} }
@@ -510,11 +541,11 @@ impl AsyncProxyQuery {
// 解析主机和端口 // 解析主机和端口
let (host, port) = if let Some(colon_pos) = url.rfind(':') { let (host, port) = if let Some(colon_pos) = url.rfind(':') {
let host = url[..colon_pos].to_string(); let host: String = url[..colon_pos].into();
let port = url[colon_pos + 1..].parse::<u16>().unwrap_or(8080); let port = url[colon_pos + 1..].parse::<u16>().unwrap_or(8080);
(host, port) (host, port)
} else { } else {
(url.to_string(), 8080) (url.into(), 8080)
}; };
if host.is_empty() { if host.is_empty() {

View File

@@ -1,18 +1,24 @@
use crate::{config::Config, utils::dirs}; use crate::constants::files::DNS_CONFIG;
use crate::{
config::Config,
logging,
process::AsyncHandler,
utils::{dirs, logging::Type},
};
use anyhow::Error; use anyhow::Error;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
use parking_lot::Mutex; use parking_lot::Mutex;
use reqwest_dav::list_cmd::{ListEntity, ListFile}; use reqwest_dav::list_cmd::{ListEntity, ListFile};
use smartstring::alias::String;
use std::{ use std::{
collections::HashMap, collections::HashMap,
env::{consts::OS, temp_dir}, env::{consts::OS, temp_dir},
fs,
io::Write, io::Write,
path::PathBuf, path::PathBuf,
sync::Arc, sync::Arc,
time::Duration, time::Duration,
}; };
use tokio::time::timeout; use tokio::{fs, time::timeout};
use zip::write::SimpleFileOptions; use zip::write::SimpleFileOptions;
// 应用版本常量,来自 tauri.conf.json // 应用版本常量,来自 tauri.conf.json
@@ -74,16 +80,19 @@ impl WebDavClient {
// 获取或创建配置 // 获取或创建配置
let config = { let config = {
let mut lock = self.config.lock(); // 首先检查是否已有配置
if let Some(cfg) = lock.as_ref() { let existing_config = self.config.lock().as_ref().cloned();
cfg.clone()
if let Some(cfg) = existing_config {
cfg
} else { } else {
let verge = Config::verge().latest().clone(); // 释放锁后获取异步配置
let verge = Config::verge().await.latest_ref().clone();
if verge.webdav_url.is_none() if verge.webdav_url.is_none()
|| verge.webdav_username.is_none() || verge.webdav_username.is_none()
|| verge.webdav_password.is_none() || verge.webdav_password.is_none()
{ {
let msg = "Unable to create web dav client, please make sure the webdav config is correct".to_string(); let msg: String = "Unable to create web dav client, please make sure the webdav config is correct".into();
return Err(anyhow::Error::msg(msg)); return Err(anyhow::Error::msg(msg));
} }
@@ -92,12 +101,13 @@ impl WebDavClient {
.webdav_url .webdav_url
.unwrap_or_default() .unwrap_or_default()
.trim_end_matches('/') .trim_end_matches('/')
.to_string(), .into(),
username: verge.webdav_username.unwrap_or_default(), username: verge.webdav_username.unwrap_or_default(),
password: verge.webdav_password.unwrap_or_default(), password: verge.webdav_password.unwrap_or_default(),
}; };
*lock = Some(config.clone()); // 重新获取锁并存储配置
*self.config.lock() = Some(config.clone());
config config
} }
}; };
@@ -117,20 +127,38 @@ impl WebDavClient {
attempt.follow() attempt.follow()
} }
})) }))
.build() .build()?,
.unwrap(),
) )
.set_host(config.url) .set_host(config.url.into())
.set_auth(reqwest_dav::Auth::Basic(config.username, config.password)) .set_auth(reqwest_dav::Auth::Basic(
config.username.into(),
config.password.into(),
))
.build()?; .build()?;
// 尝试检查目录是否存在,如果不存在尝试创建,但创建失败不报错 // 尝试检查目录是否存在,如果不存在尝试创建
if client if client
.list(dirs::BACKUP_DIR, reqwest_dav::Depth::Number(0)) .list(dirs::BACKUP_DIR, reqwest_dav::Depth::Number(0))
.await .await
.is_err() .is_err()
{ {
let _ = client.mkcol(dirs::BACKUP_DIR).await; match client.mkcol(dirs::BACKUP_DIR).await {
Ok(_) => logging!(info, Type::Backup, "Successfully created backup directory"),
Err(e) => {
logging!(
warn,
Type::Backup,
"Warning: Failed to create backup directory: {}",
e
);
// 清除缓存,强制下次重新尝试
self.reset();
return Err(anyhow::Error::msg(format!(
"Failed to create backup directory: {}",
e
)));
}
}
} }
// 缓存客户端 // 缓存客户端
@@ -149,10 +177,10 @@ impl WebDavClient {
pub async fn upload(&self, file_path: PathBuf, file_name: String) -> Result<(), Error> { pub async fn upload(&self, file_path: PathBuf, file_name: String) -> Result<(), Error> {
let client = self.get_client(Operation::Upload).await?; let client = self.get_client(Operation::Upload).await?;
let webdav_path: String = format!("{}/{}", dirs::BACKUP_DIR, file_name); let webdav_path: String = format!("{}/{}", dirs::BACKUP_DIR, file_name).into();
// 读取文件并上传,如果失败尝试一次重试 // 读取文件并上传,如果失败尝试一次重试
let file_content = fs::read(&file_path)?; let file_content = fs::read(&file_path).await?;
// 添加超时保护 // 添加超时保护
let upload_result = timeout( let upload_result = timeout(
@@ -163,7 +191,11 @@ impl WebDavClient {
match upload_result { match upload_result {
Err(_) => { Err(_) => {
log::warn!("Upload timed out, retrying once"); logging!(
warn,
Type::Backup,
"Warning: Upload timed out, retrying once"
);
tokio::time::sleep(Duration::from_millis(500)).await; tokio::time::sleep(Duration::from_millis(500)).await;
timeout( timeout(
Duration::from_secs(TIMEOUT_UPLOAD), Duration::from_secs(TIMEOUT_UPLOAD),
@@ -174,7 +206,11 @@ impl WebDavClient {
} }
Ok(Err(e)) => { Ok(Err(e)) => {
log::warn!("Upload failed, retrying once: {e}"); logging!(
warn,
Type::Backup,
"Warning: Upload failed, retrying once: {e}"
);
tokio::time::sleep(Duration::from_millis(500)).await; tokio::time::sleep(Duration::from_millis(500)).await;
timeout( timeout(
Duration::from_secs(TIMEOUT_UPLOAD), Duration::from_secs(TIMEOUT_UPLOAD),
@@ -194,7 +230,7 @@ impl WebDavClient {
let fut = async { let fut = async {
let response = client.get(path.as_str()).await?; let response = client.get(path.as_str()).await?;
let content = response.bytes().await?; let content = response.bytes().await?;
fs::write(&storage_path, &content)?; fs::write(&storage_path, &content).await?;
Ok::<(), Error>(()) Ok::<(), Error>(())
}; };
@@ -232,41 +268,53 @@ impl WebDavClient {
} }
} }
pub fn create_backup() -> Result<(String, PathBuf), Error> { pub async fn create_backup() -> Result<(String, PathBuf), Error> {
let now = chrono::Local::now().format("%Y-%m-%d_%H-%M-%S").to_string(); let now = chrono::Local::now().format("%Y-%m-%d_%H-%M-%S").to_string();
let zip_file_name = format!("{OS}-backup-{now}.zip"); let zip_file_name: String = format!("{OS}-backup-{now}.zip").into();
let zip_path = temp_dir().join(&zip_file_name); let zip_path = temp_dir().join(zip_file_name.as_str());
let file = fs::File::create(&zip_path)?; let value = zip_path.clone();
let file = AsyncHandler::spawn_blocking(move || std::fs::File::create(&value)).await??;
let mut zip = zip::ZipWriter::new(file); let mut zip = zip::ZipWriter::new(file);
zip.add_directory("profiles/", SimpleFileOptions::default())?; zip.add_directory("profiles/", SimpleFileOptions::default())?;
let options = SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored); let options = SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored);
if let Ok(entries) = fs::read_dir(dirs::app_profiles_dir()?) {
for entry in entries { if let Ok(mut entries) = fs::read_dir(dirs::app_profiles_dir()?).await {
let entry = entry.unwrap(); while let Some(entry) = entries.next_entry().await? {
let path = entry.path(); let path = entry.path();
if path.is_file() { if path.is_file() {
let backup_path = format!("profiles/{}", entry.file_name().to_str().unwrap()); let file_name_os = entry.file_name();
let file_name = file_name_os
.to_str()
.ok_or_else(|| anyhow::Error::msg("Invalid file name encoding"))?;
let backup_path = format!("profiles/{}", file_name);
zip.start_file(backup_path, options)?; zip.start_file(backup_path, options)?;
zip.write_all(fs::read(path).unwrap().as_slice())?; let file_content = fs::read(&path).await?;
zip.write_all(&file_content)?;
} }
} }
} }
zip.start_file(dirs::CLASH_CONFIG, options)?; zip.start_file(dirs::CLASH_CONFIG, options)?;
zip.write_all(fs::read(dirs::clash_path()?)?.as_slice())?; zip.write_all(fs::read(dirs::clash_path()?).await?.as_slice())?;
let mut verge_config: serde_json::Value = let verge_text = fs::read_to_string(dirs::verge_path()?).await?;
serde_yaml::from_str(&fs::read_to_string(dirs::verge_path()?)?)?; let mut verge_config: serde_json::Value = serde_yaml_ng::from_str(&verge_text)?;
if let Some(obj) = verge_config.as_object_mut() { if let Some(obj) = verge_config.as_object_mut() {
obj.remove("webdav_username"); obj.remove("webdav_username");
obj.remove("webdav_password"); obj.remove("webdav_password");
obj.remove("webdav_url"); obj.remove("webdav_url");
} }
zip.start_file(dirs::VERGE_CONFIG, options)?; zip.start_file(dirs::VERGE_CONFIG, options)?;
zip.write_all(serde_yaml::to_string(&verge_config)?.as_bytes())?; zip.write_all(serde_yaml_ng::to_string(&verge_config)?.as_bytes())?;
let dns_config_path = dirs::app_home_dir()?.join(DNS_CONFIG);
if dns_config_path.exists() {
zip.start_file(DNS_CONFIG, options)?;
zip.write_all(fs::read(&dns_config_path).await?.as_slice())?;
}
zip.start_file(dirs::PROFILE_YAML, options)?; zip.start_file(dirs::PROFILE_YAML, options)?;
zip.write_all(fs::read(dirs::profiles_path()?)?.as_slice())?; zip.write_all(fs::read(dirs::profiles_path()?).await?.as_slice())?;
zip.finish()?; zip.finish()?;
Ok((zip_file_name, zip_path)) Ok((zip_file_name, zip_path))
} }

File diff suppressed because it is too large Load Diff

View File

@@ -1,38 +1,24 @@
use parking_lot::RwLock;
use std::sync::Arc; use std::sync::Arc;
use tokio::sync::RwLock;
use tokio::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use tokio::time::{sleep, timeout, Duration}; use tokio::time::{Duration, sleep, timeout};
use tokio_stream::{StreamExt, wrappers::UnboundedReceiverStream};
use crate::config::{Config, IVerge}; use crate::config::{Config, IVerge};
use crate::core::async_proxy_query::AsyncProxyQuery; use crate::core::{async_proxy_query::AsyncProxyQuery, handle};
use crate::logging_error; use crate::process::AsyncHandler;
use crate::utils::logging::Type; use crate::{logging, utils::logging::Type};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use smartstring::alias::String;
use sysproxy::{Autoproxy, Sysproxy}; use sysproxy::{Autoproxy, Sysproxy};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ProxyEvent { pub enum ProxyEvent {
/// 配置变更事件 /// 配置变更事件
ConfigChanged, ConfigChanged,
/// 强制检查代理状态
#[allow(dead_code)]
ForceCheck,
/// 启用系统代理
#[allow(dead_code)]
EnableProxy,
/// 禁用系统代理
#[allow(dead_code)]
DisableProxy,
/// 切换到PAC模式
#[allow(dead_code)]
SwitchToPac,
/// 切换到HTTP代理模式
#[allow(dead_code)]
SwitchToHttp,
/// 应用启动事件 /// 应用启动事件
AppStarted, AppStarted,
/// 应用关闭事件 /// 应用关闭事件
#[allow(dead_code)]
AppStopping, AppStopping,
} }
@@ -53,13 +39,13 @@ impl Default for ProxyState {
pac_enabled: false, pac_enabled: false,
auto_proxy: Autoproxy { auto_proxy: Autoproxy {
enable: false, enable: false,
url: "".to_string(), url: "".into(),
}, },
sys_proxy: Sysproxy { sys_proxy: Sysproxy {
enable: false, enable: false,
host: "127.0.0.1".to_string(), host: "127.0.0.1".into(),
port: 7890, port: 7897,
bypass: "".to_string(), bypass: "".into(),
}, },
last_updated: std::time::Instant::now(), last_updated: std::time::Instant::now(),
is_healthy: true, is_healthy: true,
@@ -74,7 +60,7 @@ pub struct EventDrivenProxyManager {
} }
#[derive(Debug)] #[derive(Debug)]
struct QueryRequest { pub struct QueryRequest {
response_tx: oneshot::Sender<Autoproxy>, response_tx: oneshot::Sender<Autoproxy>,
} }
@@ -83,6 +69,7 @@ struct ProxyConfig {
sys_enabled: bool, sys_enabled: bool,
pac_enabled: bool, pac_enabled: bool,
guard_enabled: bool, guard_enabled: bool,
guard_duration: u64,
} }
static PROXY_MANAGER: Lazy<EventDrivenProxyManager> = Lazy::new(EventDrivenProxyManager::new); static PROXY_MANAGER: Lazy<EventDrivenProxyManager> = Lazy::new(EventDrivenProxyManager::new);
@@ -97,7 +84,8 @@ impl EventDrivenProxyManager {
let (event_tx, event_rx) = mpsc::unbounded_channel(); let (event_tx, event_rx) = mpsc::unbounded_channel();
let (query_tx, query_rx) = mpsc::unbounded_channel(); let (query_tx, query_rx) = mpsc::unbounded_channel();
Self::start_event_loop(state.clone(), event_rx, query_rx); let state_clone = Arc::clone(&state);
AsyncHandler::spawn(move || Self::start_event_loop(state_clone, event_rx, query_rx));
Self { Self {
state, state,
@@ -107,8 +95,8 @@ impl EventDrivenProxyManager {
} }
/// 获取自动代理配置(缓存) /// 获取自动代理配置(缓存)
pub fn get_auto_proxy_cached(&self) -> Autoproxy { pub async fn get_auto_proxy_cached(&self) -> Autoproxy {
self.state.read().auto_proxy.clone() self.state.read().await.auto_proxy.clone()
} }
/// 异步获取最新的自动代理配置 /// 异步获取最新的自动代理配置
@@ -117,15 +105,15 @@ impl EventDrivenProxyManager {
let query = QueryRequest { response_tx: tx }; let query = QueryRequest { response_tx: tx };
if self.query_sender.send(query).is_err() { if self.query_sender.send(query).is_err() {
log::error!(target: "app", "发送查询请求失败,返回缓存数据"); logging!(error, Type::Network, "发送查询请求失败,返回缓存数据");
return self.get_auto_proxy_cached(); return self.get_auto_proxy_cached().await;
} }
match timeout(Duration::from_secs(5), rx).await { match timeout(Duration::from_secs(5), rx).await {
Ok(Ok(result)) => result, Ok(Ok(result)) => result,
_ => { _ => {
log::warn!(target: "app", "查询超时,返回缓存数据"); logging!(warn, Type::Network, "Warning: 查询超时,返回缓存数据");
self.get_auto_proxy_cached() self.get_auto_proxy_cached().await
} }
} }
} }
@@ -141,96 +129,86 @@ impl EventDrivenProxyManager {
} }
/// 通知应用即将关闭 /// 通知应用即将关闭
#[allow(dead_code)]
pub fn notify_app_stopping(&self) { pub fn notify_app_stopping(&self) {
self.send_event(ProxyEvent::AppStopping); self.send_event(ProxyEvent::AppStopping);
} }
/// 启用系统代理
#[allow(dead_code)]
pub fn enable_proxy(&self) {
self.send_event(ProxyEvent::EnableProxy);
}
/// 禁用系统代理
#[allow(dead_code)]
pub fn disable_proxy(&self) {
self.send_event(ProxyEvent::DisableProxy);
}
/// 强制检查代理状态
#[allow(dead_code)]
pub fn force_check(&self) {
self.send_event(ProxyEvent::ForceCheck);
}
fn send_event(&self, event: ProxyEvent) { fn send_event(&self, event: ProxyEvent) {
if let Err(e) = self.event_sender.send(event) { if let Err(e) = self.event_sender.send(event) {
log::error!(target: "app", "发送代理事件失败: {e}"); logging!(error, Type::Network, "发送代理事件失败: {e}");
} }
} }
fn start_event_loop( pub async fn start_event_loop(
state: Arc<RwLock<ProxyState>>, state: Arc<RwLock<ProxyState>>,
mut event_rx: mpsc::UnboundedReceiver<ProxyEvent>, event_rx: mpsc::UnboundedReceiver<ProxyEvent>,
mut query_rx: mpsc::UnboundedReceiver<QueryRequest>, query_rx: mpsc::UnboundedReceiver<QueryRequest>,
) { ) {
tokio::spawn(async move { logging!(info, Type::Network, "事件驱动代理管理器启动");
log::info!(target: "app", "事件驱动代理管理器启动");
loop { // 将 mpsc 接收器包装成 Stream避免每次循环创建 future
tokio::select! { let mut event_stream = UnboundedReceiverStream::new(event_rx);
event = event_rx.recv() => { let mut query_stream = UnboundedReceiverStream::new(query_rx);
match event {
Some(event) => { // 初始化定时器,用于周期性检查代理设置
log::debug!(target: "app", "处理代理事件: {event:?}"); let config = Self::get_proxy_config().await;
Self::handle_event(&state, event).await; let mut guard_interval = tokio::time::interval(Duration::from_secs(config.guard_duration));
} // 防止首次立即触发
None => { guard_interval.tick().await;
log::info!(target: "app", "事件通道关闭,代理管理器停止");
break; loop {
} tokio::select! {
} Some(event) = event_stream.next() => {
} logging!(debug, Type::Network, "处理代理事件: {event:?}");
query = query_rx.recv() => { let event_clone = event.clone(); // 保存一份副本用于后续检查
match query { Self::handle_event(&state, event).await;
Some(query) => {
let result = Self::handle_query(&state).await; // 检查是否是配置变更事件,如果是,则可能需要更新定时器
let _ = query.response_tx.send(result); if matches!(event_clone, ProxyEvent::ConfigChanged | ProxyEvent::AppStarted) {
} let new_config = Self::get_proxy_config().await;
None => { // 重新设置定时器间隔
log::info!(target: "app", "查询通道关闭"); guard_interval = tokio::time::interval(Duration::from_secs(new_config.guard_duration));
break; // 防止首次立即触发
} guard_interval.tick().await;
}
} }
} }
Some(query) = query_stream.next() => {
let result = Self::handle_query(&state).await;
let _ = query.response_tx.send(result);
}
_ = guard_interval.tick() => {
// 定时检查代理设置
let config = Self::get_proxy_config().await;
if config.guard_enabled && config.sys_enabled {
logging!(debug, Type::Network, "定时检查代理设置");
Self::check_and_restore_proxy(&state).await;
}
}
else => {
// 两个通道都关闭时退出
logging!(info, Type::Network, "事件或查询通道关闭,代理管理器停止");
break;
}
} }
}); }
} }
async fn handle_event(state: &Arc<RwLock<ProxyState>>, event: ProxyEvent) { async fn handle_event(state: &Arc<RwLock<ProxyState>>, event: ProxyEvent) {
match event { match event {
ProxyEvent::ConfigChanged | ProxyEvent::ForceCheck => { ProxyEvent::ConfigChanged => {
Self::update_proxy_config(state).await; Self::update_proxy_config(state).await;
} }
ProxyEvent::EnableProxy => {
Self::enable_system_proxy(state).await;
}
ProxyEvent::DisableProxy => {
Self::disable_system_proxy(state).await;
}
ProxyEvent::SwitchToPac => {
Self::switch_proxy_mode(state, true).await;
}
ProxyEvent::SwitchToHttp => {
Self::switch_proxy_mode(state, false).await;
}
ProxyEvent::AppStarted => { ProxyEvent::AppStarted => {
Self::initialize_proxy_state(state).await; Self::initialize_proxy_state(state).await;
} }
ProxyEvent::AppStopping => { ProxyEvent::AppStopping => {
log::info!(target: "app", "清理代理状态"); logging!(info, Type::Network, "清理代理状态");
Self::update_state_timestamp(state, |s| {
s.sys_enabled = false;
s.pac_enabled = false;
s.is_healthy = false;
})
.await;
} }
} }
} }
@@ -240,15 +218,16 @@ impl EventDrivenProxyManager {
Self::update_state_timestamp(state, |s| { Self::update_state_timestamp(state, |s| {
s.auto_proxy = auto_proxy.clone(); s.auto_proxy = auto_proxy.clone();
}); })
.await;
auto_proxy auto_proxy
} }
async fn initialize_proxy_state(state: &Arc<RwLock<ProxyState>>) { async fn initialize_proxy_state(state: &Arc<RwLock<ProxyState>>) {
log::info!(target: "app", "初始化代理状态"); logging!(info, Type::Network, "初始化代理状态");
let config = Self::get_proxy_config(); let config = Self::get_proxy_config().await;
let auto_proxy = Self::get_auto_proxy_with_timeout().await; let auto_proxy = Self::get_auto_proxy_with_timeout().await;
let sys_proxy = Self::get_sys_proxy_with_timeout().await; let sys_proxy = Self::get_sys_proxy_with_timeout().await;
@@ -258,20 +237,28 @@ impl EventDrivenProxyManager {
s.auto_proxy = auto_proxy; s.auto_proxy = auto_proxy;
s.sys_proxy = sys_proxy; s.sys_proxy = sys_proxy;
s.is_healthy = true; s.is_healthy = true;
}); })
.await;
log::info!(target: "app", "代理状态初始化完成: sys={}, pac={}", config.sys_enabled, config.pac_enabled); logging!(
info,
Type::Network,
"代理状态初始化完成: sys={}, pac={}",
config.sys_enabled,
config.pac_enabled
);
} }
async fn update_proxy_config(state: &Arc<RwLock<ProxyState>>) { async fn update_proxy_config(state: &Arc<RwLock<ProxyState>>) {
log::debug!(target: "app", "更新代理配置"); logging!(debug, Type::Network, "更新代理配置");
let config = Self::get_proxy_config(); let config = Self::get_proxy_config().await;
Self::update_state_timestamp(state, |s| { Self::update_state_timestamp(state, |s| {
s.sys_enabled = config.sys_enabled; s.sys_enabled = config.sys_enabled;
s.pac_enabled = config.pac_enabled; s.pac_enabled = config.pac_enabled;
}); })
.await;
if config.guard_enabled && config.sys_enabled { if config.guard_enabled && config.sys_enabled {
Self::check_and_restore_proxy(state).await; Self::check_and_restore_proxy(state).await;
@@ -279,8 +266,12 @@ impl EventDrivenProxyManager {
} }
async fn check_and_restore_proxy(state: &Arc<RwLock<ProxyState>>) { async fn check_and_restore_proxy(state: &Arc<RwLock<ProxyState>>) {
if handle::Handle::global().is_exiting() {
logging!(debug, Type::Network, "应用正在退出,跳过系统代理守卫检查");
return;
}
let (sys_enabled, pac_enabled) = { let (sys_enabled, pac_enabled) = {
let s = state.read(); let s = state.read().await;
(s.sys_enabled, s.pac_enabled) (s.sys_enabled, s.pac_enabled)
}; };
@@ -288,7 +279,7 @@ impl EventDrivenProxyManager {
return; return;
} }
log::debug!(target: "app", "检查代理状态"); logging!(debug, Type::Network, "检查代理状态");
if pac_enabled { if pac_enabled {
Self::check_and_restore_pac_proxy(state).await; Self::check_and_restore_pac_proxy(state).await;
@@ -298,16 +289,24 @@ impl EventDrivenProxyManager {
} }
async fn check_and_restore_pac_proxy(state: &Arc<RwLock<ProxyState>>) { async fn check_and_restore_pac_proxy(state: &Arc<RwLock<ProxyState>>) {
if handle::Handle::global().is_exiting() {
logging!(debug, Type::Network, "应用正在退出跳过PAC代理恢复检查");
return;
}
let current = Self::get_auto_proxy_with_timeout().await; let current = Self::get_auto_proxy_with_timeout().await;
let expected = Self::get_expected_pac_config(); let expected = Self::get_expected_pac_config().await;
Self::update_state_timestamp(state, |s| { Self::update_state_timestamp(state, |s| {
s.auto_proxy = current.clone(); s.auto_proxy = current.clone();
}); })
.await;
if !current.enable || current.url != expected.url { if !current.enable || current.url != expected.url {
log::info!(target: "app", "PAC代理设置异常正在恢复..."); logging!(info, Type::Network, "PAC代理设置异常正在恢复...");
Self::restore_pac_proxy(&expected.url).await; if let Err(e) = Self::restore_pac_proxy(&expected.url).await {
logging!(error, Type::Network, "恢复PAC代理失败: {}", e);
}
sleep(Duration::from_millis(500)).await; sleep(Duration::from_millis(500)).await;
let restored = Self::get_auto_proxy_with_timeout().await; let restored = Self::get_auto_proxy_with_timeout().await;
@@ -315,21 +314,30 @@ impl EventDrivenProxyManager {
Self::update_state_timestamp(state, |s| { Self::update_state_timestamp(state, |s| {
s.is_healthy = restored.enable && restored.url == expected.url; s.is_healthy = restored.enable && restored.url == expected.url;
s.auto_proxy = restored; s.auto_proxy = restored;
}); })
.await;
} }
} }
async fn check_and_restore_sys_proxy(state: &Arc<RwLock<ProxyState>>) { async fn check_and_restore_sys_proxy(state: &Arc<RwLock<ProxyState>>) {
if handle::Handle::global().is_exiting() {
logging!(debug, Type::Network, "应用正在退出,跳过系统代理恢复检查");
return;
}
let current = Self::get_sys_proxy_with_timeout().await; let current = Self::get_sys_proxy_with_timeout().await;
let expected = Self::get_expected_sys_proxy(); let expected = Self::get_expected_sys_proxy().await;
Self::update_state_timestamp(state, |s| { Self::update_state_timestamp(state, |s| {
s.sys_proxy = current.clone(); s.sys_proxy = current.clone();
}); })
.await;
if !current.enable || current.host != expected.host || current.port != expected.port { if !current.enable || current.host != expected.host || current.port != expected.port {
log::info!(target: "app", "系统代理设置异常,正在恢复..."); logging!(info, Type::Network, "系统代理设置异常,正在恢复...");
Self::restore_sys_proxy(&expected).await; if let Err(e) = Self::restore_sys_proxy(&expected).await {
logging!(error, Type::Network, "恢复系统代理失败: {}", e);
}
sleep(Duration::from_millis(500)).await; sleep(Duration::from_millis(500)).await;
let restored = Self::get_sys_proxy_with_timeout().await; let restored = Self::get_sys_proxy_with_timeout().await;
@@ -339,60 +347,11 @@ impl EventDrivenProxyManager {
&& restored.host == expected.host && restored.host == expected.host
&& restored.port == expected.port; && restored.port == expected.port;
s.sys_proxy = restored; s.sys_proxy = restored;
}); })
.await;
} }
} }
async fn enable_system_proxy(state: &Arc<RwLock<ProxyState>>) {
log::info!(target: "app", "启用系统代理");
let pac_enabled = state.read().pac_enabled;
if pac_enabled {
let expected = Self::get_expected_pac_config();
Self::restore_pac_proxy(&expected.url).await;
} else {
let expected = Self::get_expected_sys_proxy();
Self::restore_sys_proxy(&expected).await;
}
Self::check_and_restore_proxy(state).await;
}
async fn disable_system_proxy(_state: &Arc<RwLock<ProxyState>>) {
log::info!(target: "app", "禁用系统代理");
#[cfg(not(target_os = "windows"))]
{
let disabled_sys = Sysproxy::default();
let disabled_auto = Autoproxy::default();
logging_error!(Type::System, true, disabled_auto.set_auto_proxy());
logging_error!(Type::System, true, disabled_sys.set_system_proxy());
}
}
async fn switch_proxy_mode(state: &Arc<RwLock<ProxyState>>, to_pac: bool) {
log::info!(target: "app", "切换到{}模式", if to_pac { "PAC" } else { "HTTP代理" });
if to_pac {
let disabled_sys = Sysproxy::default();
logging_error!(Type::System, true, disabled_sys.set_system_proxy());
let expected = Self::get_expected_pac_config();
Self::restore_pac_proxy(&expected.url).await;
} else {
let disabled_auto = Autoproxy::default();
logging_error!(Type::System, true, disabled_auto.set_auto_proxy());
let expected = Self::get_expected_sys_proxy();
Self::restore_sys_proxy(&expected).await;
}
Self::update_state_timestamp(state, |s| s.pac_enabled = to_pac);
Self::check_and_restore_proxy(state).await;
}
async fn get_auto_proxy_with_timeout() -> Autoproxy { async fn get_auto_proxy_with_timeout() -> Autoproxy {
let async_proxy = AsyncProxyQuery::get_auto_proxy().await; let async_proxy = AsyncProxyQuery::get_auto_proxy().await;
@@ -416,116 +375,149 @@ impl EventDrivenProxyManager {
} }
// 统一的状态更新方法 // 统一的状态更新方法
fn update_state_timestamp<F>(state: &Arc<RwLock<ProxyState>>, update_fn: F) async fn update_state_timestamp<F>(state: &Arc<RwLock<ProxyState>>, update_fn: F)
where where
F: FnOnce(&mut ProxyState), F: FnOnce(&mut ProxyState),
{ {
let mut state_guard = state.write(); let mut state_guard = state.write().await;
update_fn(&mut state_guard); update_fn(&mut state_guard);
state_guard.last_updated = std::time::Instant::now(); state_guard.last_updated = std::time::Instant::now();
} }
fn get_proxy_config() -> ProxyConfig { async fn get_proxy_config() -> ProxyConfig {
let verge_config = Config::verge(); let (sys_enabled, pac_enabled, guard_enabled, guard_duration) = {
let verge = verge_config.latest(); let verge_config = Config::verge().await;
let verge = verge_config.latest_ref();
(
verge.enable_system_proxy.unwrap_or(false),
verge.proxy_auto_config.unwrap_or(false),
verge.enable_proxy_guard.unwrap_or(false),
verge.proxy_guard_duration.unwrap_or(30), // 默认30秒
)
};
ProxyConfig { ProxyConfig {
sys_enabled: verge.enable_system_proxy.unwrap_or(false), sys_enabled,
pac_enabled: verge.proxy_auto_config.unwrap_or(false), pac_enabled,
guard_enabled: verge.enable_proxy_guard.unwrap_or(false), guard_enabled,
guard_duration,
} }
} }
fn get_expected_pac_config() -> Autoproxy { async fn get_expected_pac_config() -> Autoproxy {
let verge_config = Config::verge(); let proxy_host = {
let verge = verge_config.latest(); let verge_config = Config::verge().await;
let (proxy_host, pac_port) = ( let verge = verge_config.latest_ref();
verge verge
.proxy_host .proxy_host
.clone() .clone()
.unwrap_or_else(|| "127.0.0.1".to_string()), .unwrap_or_else(|| "127.0.0.1".into())
IVerge::get_singleton_port(), };
); let pac_port = IVerge::get_singleton_port();
Autoproxy { Autoproxy {
enable: true, enable: true,
url: format!("http://{proxy_host}:{pac_port}/commands/pac"), url: format!("http://{proxy_host}:{pac_port}/commands/pac"),
} }
} }
fn get_expected_sys_proxy() -> Sysproxy { async fn get_expected_sys_proxy() -> Sysproxy {
let verge_config = Config::verge(); use crate::constants::network;
let verge = verge_config.latest();
let port = verge let (verge_mixed_port, proxy_host) = {
.verge_mixed_port let verge_config = Config::verge().await;
.unwrap_or(Config::clash().data().get_mixed_port()); let verge_ref = verge_config.latest_ref();
let proxy_host = verge (verge_ref.verge_mixed_port, verge_ref.proxy_host.clone())
.proxy_host };
.clone()
.unwrap_or_else(|| "127.0.0.1".to_string()); let default_port = {
let clash_config = Config::clash().await;
clash_config.latest_ref().get_mixed_port()
};
let port = verge_mixed_port.unwrap_or(default_port);
let host = proxy_host
.unwrap_or_else(|| network::DEFAULT_PROXY_HOST.into())
.into();
Sysproxy { Sysproxy {
enable: true, enable: true,
host: proxy_host, host,
port, port,
bypass: Self::get_bypass_config(), bypass: Self::get_bypass_config().await.into(),
} }
} }
fn get_bypass_config() -> String { async fn get_bypass_config() -> String {
let verge_config = Config::verge(); use crate::constants::bypass;
let verge = verge_config.latest();
let verge_config = Config::verge().await;
let verge = verge_config.latest_ref();
let use_default = verge.use_default_bypass.unwrap_or(true); let use_default = verge.use_default_bypass.unwrap_or(true);
let custom_bypass = verge.system_proxy_bypass.clone().unwrap_or_default(); let custom = verge.system_proxy_bypass.as_deref().unwrap_or("");
#[cfg(target_os = "windows")] match (use_default, custom.is_empty()) {
let default_bypass = "localhost;127.*;192.168.*;10.*;172.16.*;172.17.*;172.18.*;172.19.*;172.20.*;172.21.*;172.22.*;172.23.*;172.24.*;172.25.*;172.26.*;172.27.*;172.28.*;172.29.*;172.30.*;172.31.*;<local>"; (_, true) => bypass::DEFAULT.into(),
(true, false) => format!("{},{}", bypass::DEFAULT, custom).into(),
#[cfg(target_os = "linux")] (false, false) => custom.into(),
let default_bypass =
"localhost,127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,172.29.0.0/16,::1";
#[cfg(target_os = "macos")]
let default_bypass = "127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,172.29.0.0/16,localhost,*.local,*.crashlytics.com,<local>";
if custom_bypass.is_empty() {
default_bypass.to_string()
} else if use_default {
format!("{default_bypass},{custom_bypass}")
} else {
custom_bypass
} }
} }
async fn restore_pac_proxy(expected_url: &str) { #[cfg(target_os = "windows")]
#[cfg(not(target_os = "windows"))] async fn restore_pac_proxy(expected_url: &str) -> Result<(), anyhow::Error> {
if handle::Handle::global().is_exiting() {
logging!(debug, Type::Network, "应用正在退出跳过PAC代理恢复");
return Ok(());
}
Self::execute_sysproxy_command(&["pac", expected_url]).await
}
#[allow(clippy::unused_async)]
#[cfg(not(target_os = "windows"))]
async fn restore_pac_proxy(expected_url: &str) -> Result<(), anyhow::Error> {
{ {
let new_autoproxy = Autoproxy { let new_autoproxy = Autoproxy {
enable: true, enable: true,
url: expected_url.to_string(), url: expected_url.to_string(),
}; };
logging_error!(Type::System, true, new_autoproxy.set_auto_proxy()); // logging_error!(Type::System, true, new_autoproxy.set_auto_proxy());
} new_autoproxy
.set_auto_proxy()
#[cfg(target_os = "windows")] .map_err(|e| anyhow::anyhow!("Failed to set auto proxy: {}", e))
{
Self::execute_sysproxy_command(&["pac", expected_url]).await;
}
}
async fn restore_sys_proxy(expected: &Sysproxy) {
#[cfg(not(target_os = "windows"))]
{
logging_error!(Type::System, true, expected.set_system_proxy());
}
#[cfg(target_os = "windows")]
{
let address = format!("{}:{}", expected.host, expected.port);
Self::execute_sysproxy_command(&["global", &address, &expected.bypass]).await;
} }
} }
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
async fn execute_sysproxy_command(args: &[&str]) { async fn restore_sys_proxy(expected: &Sysproxy) -> Result<(), anyhow::Error> {
if handle::Handle::global().is_exiting() {
logging!(debug, Type::Network, "应用正在退出,跳过系统代理恢复");
return Ok(());
}
let address = format!("{}:{}", expected.host, expected.port);
Self::execute_sysproxy_command(&["global", &address, &expected.bypass]).await
}
#[allow(clippy::unused_async)]
#[cfg(not(target_os = "windows"))]
async fn restore_sys_proxy(expected: &Sysproxy) -> Result<(), anyhow::Error> {
{
// logging_error!(Type::System, true, expected.set_system_proxy());
expected
.set_system_proxy()
.map_err(|e| anyhow::anyhow!("Failed to set system proxy: {}", e))
}
}
#[cfg(target_os = "windows")]
async fn execute_sysproxy_command(args: &[&str]) -> Result<(), anyhow::Error> {
if handle::Handle::global().is_exiting() {
logging!(
debug,
Type::Network,
"应用正在退出,取消调用 sysproxy.exe参数: {:?}",
args
);
return Ok(());
}
use crate::utils::dirs; use crate::utils::dirs;
#[allow(unused_imports)] // creation_flags必须 #[allow(unused_imports)] // creation_flags必须
use std::os::windows::process::CommandExt; use std::os::windows::process::CommandExt;
@@ -534,38 +526,23 @@ impl EventDrivenProxyManager {
let binary_path = match dirs::service_path() { let binary_path = match dirs::service_path() {
Ok(path) => path, Ok(path) => path,
Err(e) => { Err(e) => {
log::error!(target: "app", "获取服务路径失败: {}", e); logging!(error, Type::Network, "获取服务路径失败: {e}");
return; return Err(e);
} }
}; };
let sysproxy_exe = binary_path.with_file_name("sysproxy.exe"); let sysproxy_exe = binary_path.with_file_name("sysproxy.exe");
if !sysproxy_exe.exists() { if !sysproxy_exe.exists() {
log::error!(target: "app", "sysproxy.exe 不存在"); logging!(error, Type::Network, "sysproxy.exe 不存在");
return;
} }
anyhow::ensure!(sysproxy_exe.exists(), "sysproxy.exe does not exist");
let output = Command::new(sysproxy_exe) let _output = Command::new(sysproxy_exe)
.args(args) .args(args)
.creation_flags(0x08000000) // CREATE_NO_WINDOW - 隐藏窗口 .creation_flags(0x08000000) // CREATE_NO_WINDOW - 隐藏窗口
.output() .output()
.await; .await?;
match output { Ok(())
Ok(output) => {
if !output.status.success() {
log::error!(target: "app", "执行sysproxy命令失败: {:?}", args);
let stderr = String::from_utf8_lossy(&output.stderr);
if !stderr.is_empty() {
log::error!(target: "app", "sysproxy错误输出: {}", stderr);
}
} else {
log::debug!(target: "app", "成功执行sysproxy命令: {:?}", args);
}
}
Err(e) => {
log::error!(target: "app", "执行sysproxy命令出错: {}", e);
}
}
} }
} }

View File

@@ -1,269 +1,24 @@
use once_cell::sync::OnceCell; use crate::{APP_HANDLE, constants::timing, singleton};
use parking_lot::RwLock; use parking_lot::RwLock;
use std::{ use smartstring::alias::String;
sync::{ use std::{sync::Arc, thread};
atomic::{AtomicU64, Ordering}, use tauri::{AppHandle, Manager, WebviewWindow};
mpsc, Arc, use tauri_plugin_mihomo::{Mihomo, MihomoExt};
}, use tokio::sync::RwLockReadGuard;
thread,
time::{Duration, Instant},
};
use tauri::{AppHandle, Emitter, Manager, WebviewWindow};
use crate::{logging, utils::logging::Type}; use super::notification::{ErrorMessage, FrontendEvent, NotificationSystem};
/// 不同类型的前端通知
#[derive(Debug, Clone)]
enum FrontendEvent {
RefreshClash,
RefreshVerge,
NoticeMessage { status: String, message: String },
ProfileChanged { current_profile_id: String },
TimerUpdated { profile_index: String },
StartupCompleted,
ProfileUpdateStarted { uid: String },
ProfileUpdateCompleted { uid: String },
}
/// 事件发送统计和监控
#[derive(Debug, Default)]
struct EventStats {
total_sent: AtomicU64,
total_errors: AtomicU64,
last_error_time: RwLock<Option<Instant>>,
}
/// 存储启动期间的错误消息
#[derive(Debug, Clone)]
struct ErrorMessage {
status: String,
message: String,
}
/// 全局前端通知系统
#[derive(Debug)]
struct NotificationSystem {
sender: Option<mpsc::Sender<FrontendEvent>>,
worker_handle: Option<thread::JoinHandle<()>>,
is_running: bool,
stats: EventStats,
last_emit_time: RwLock<Instant>,
/// 当通知系统失败超过阈值时,进入紧急模式
emergency_mode: RwLock<bool>,
}
impl Default for NotificationSystem {
fn default() -> Self {
Self::new()
}
}
impl NotificationSystem {
fn new() -> Self {
Self {
sender: None,
worker_handle: None,
is_running: false,
stats: EventStats::default(),
last_emit_time: RwLock::new(Instant::now()),
emergency_mode: RwLock::new(false),
}
}
/// 启动通知处理线程
fn start(&mut self) {
if self.is_running {
return;
}
let (tx, rx) = mpsc::channel();
self.sender = Some(tx);
self.is_running = true;
*self.last_emit_time.write() = Instant::now();
self.worker_handle = Some(
thread::Builder::new()
.name("frontend-notifier".into())
.spawn(move || {
let handle = Handle::global();
while !handle.is_exiting() {
match rx.recv_timeout(Duration::from_millis(100)) {
Ok(event) => {
let system_guard = handle.notification_system.read();
if system_guard.as_ref().is_none() {
log::warn!("NotificationSystem not found in handle while processing event.");
continue;
}
let system = system_guard.as_ref().unwrap();
let is_emergency = *system.emergency_mode.read();
if is_emergency {
if let FrontendEvent::NoticeMessage { ref status, .. } = event {
if status == "info" {
log::warn!(
"Emergency mode active, skipping info message"
);
continue;
}
}
}
if let Some(window) = handle.get_window() {
*system.last_emit_time.write() = Instant::now();
let (event_name_str, payload_result) = match event {
FrontendEvent::RefreshClash => {
("verge://refresh-clash-config", Ok(serde_json::json!("yes")))
}
FrontendEvent::RefreshVerge => {
("verge://refresh-verge-config", Ok(serde_json::json!("yes")))
}
FrontendEvent::NoticeMessage { status, message } => {
match serde_json::to_value((status, message)) {
Ok(p) => ("verge://notice-message", Ok(p)),
Err(e) => {
log::error!("Failed to serialize NoticeMessage payload: {e}");
("verge://notice-message", Err(e))
}
}
}
FrontendEvent::ProfileChanged { current_profile_id } => {
("profile-changed", Ok(serde_json::json!(current_profile_id)))
}
FrontendEvent::TimerUpdated { profile_index } => {
("verge://timer-updated", Ok(serde_json::json!(profile_index)))
}
FrontendEvent::StartupCompleted => {
("verge://startup-completed", Ok(serde_json::json!(null)))
}
FrontendEvent::ProfileUpdateStarted { uid } => {
("profile-update-started", Ok(serde_json::json!({ "uid": uid })))
}
FrontendEvent::ProfileUpdateCompleted { uid } => {
("profile-update-completed", Ok(serde_json::json!({ "uid": uid })))
}
};
if let Ok(payload) = payload_result {
match window.emit(event_name_str, payload) {
Ok(_) => {
system.stats.total_sent.fetch_add(1, Ordering::SeqCst);
// 记录成功发送的事件
if log::log_enabled!(log::Level::Debug) {
log::debug!("Successfully emitted event: {event_name_str}");
}
}
Err(e) => {
log::warn!("Failed to emit event {event_name_str}: {e}");
system.stats.total_errors.fetch_add(1, Ordering::SeqCst);
*system.stats.last_error_time.write() = Some(Instant::now());
let errors = system.stats.total_errors.load(Ordering::SeqCst);
const EMIT_ERROR_THRESHOLD: u64 = 10;
if errors > EMIT_ERROR_THRESHOLD && !*system.emergency_mode.read() {
log::warn!(
"Reached {EMIT_ERROR_THRESHOLD} emit errors, entering emergency mode"
);
*system.emergency_mode.write() = true;
}
}
}
} else {
system.stats.total_errors.fetch_add(1, Ordering::SeqCst);
*system.stats.last_error_time.write() = Some(Instant::now());
log::warn!("Skipped emitting event due to payload serialization error for {event_name_str}");
}
} else {
log::warn!("No window found, skipping event emit.");
}
thread::sleep(Duration::from_millis(20));
}
Err(mpsc::RecvTimeoutError::Timeout) => {
continue;
}
Err(mpsc::RecvTimeoutError::Disconnected) => {
log::info!(
"Notification channel disconnected, exiting worker thread"
);
break;
}
}
}
log::info!("Notification worker thread exiting");
})
.expect("Failed to start notification worker thread"),
);
}
/// 发送事件到队列
fn send_event(&self, event: FrontendEvent) -> bool {
if *self.emergency_mode.read() {
if let FrontendEvent::NoticeMessage { ref status, .. } = event {
if status == "info" {
log::info!("Skipping info message in emergency mode");
return false;
}
}
}
if let Some(sender) = &self.sender {
match sender.send(event) {
Ok(_) => true,
Err(e) => {
log::warn!("Failed to send event to notification queue: {e:?}");
self.stats.total_errors.fetch_add(1, Ordering::SeqCst);
*self.stats.last_error_time.write() = Some(Instant::now());
false
}
}
} else {
log::warn!("Notification system not started, can't send event");
false
}
}
fn shutdown(&mut self) {
log::info!("NotificationSystem shutdown initiated");
self.is_running = false;
// 先关闭发送端,让接收端知道不会再有新消息
if let Some(sender) = self.sender.take() {
drop(sender);
}
// 设置超时避免无限等待
if let Some(handle) = self.worker_handle.take() {
match handle.join() {
Ok(_) => {
log::info!("NotificationSystem worker thread joined successfully");
}
Err(e) => {
log::error!("NotificationSystem worker thread join failed: {e:?}");
}
}
}
log::info!("NotificationSystem shutdown completed");
}
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Handle { pub struct Handle {
pub app_handle: Arc<RwLock<Option<AppHandle>>>, is_exiting: Arc<RwLock<bool>>,
pub is_exiting: Arc<RwLock<bool>>,
startup_errors: Arc<RwLock<Vec<ErrorMessage>>>, startup_errors: Arc<RwLock<Vec<ErrorMessage>>>,
startup_completed: Arc<RwLock<bool>>, startup_completed: Arc<RwLock<bool>>,
notification_system: Arc<RwLock<Option<NotificationSystem>>>, pub(crate) notification_system: Arc<RwLock<Option<NotificationSystem>>>,
} }
impl Default for Handle { impl Default for Handle {
fn default() -> Self { fn default() -> Self {
Self { Self {
app_handle: Arc::new(RwLock::new(None)),
is_exiting: Arc::new(RwLock::new(false)), is_exiting: Arc::new(RwLock::new(false)),
startup_errors: Arc::new(RwLock::new(Vec::new())), startup_errors: Arc::new(RwLock::new(Vec::new())),
startup_completed: Arc::new(RwLock::new(false)), startup_completed: Arc::new(RwLock::new(false)),
@@ -272,35 +27,37 @@ impl Default for Handle {
} }
} }
singleton!(Handle, HANDLE);
impl Handle { impl Handle {
pub fn global() -> &'static Handle { pub fn new() -> Self {
static HANDLE: OnceCell<Handle> = OnceCell::new(); Self::default()
HANDLE.get_or_init(Handle::default)
} }
pub fn init(&self, app_handle: &AppHandle) { pub fn init(&self) {
{ if self.is_exiting() {
let mut handle = self.app_handle.write(); return;
*handle = Some(app_handle.clone());
} }
let mut system_opt = self.notification_system.write(); let mut system_opt = self.notification_system.write();
if let Some(system) = system_opt.as_mut() { if let Some(system) = system_opt.as_mut()
&& !system.is_running
{
system.start(); system.start();
} }
} }
pub fn app_handle(&self) -> Option<AppHandle> { pub fn app_handle() -> &'static AppHandle {
self.app_handle.read().clone() #[allow(clippy::expect_used)]
APP_HANDLE.get().expect("App handle not initialized")
} }
pub fn get_window(&self) -> Option<WebviewWindow> { pub async fn mihomo() -> RwLockReadGuard<'static, Mihomo> {
let app_handle = self.app_handle()?; Self::app_handle().mihomo().read().await
let window: Option<WebviewWindow> = app_handle.get_webview_window("main"); }
if window.is_none() {
log::debug!(target:"app", "main window not found"); pub fn get_window() -> Option<WebviewWindow> {
} Self::app_handle().get_webview_window("main")
window
} }
pub fn refresh_clash() { pub fn refresh_clash() {
@@ -328,99 +85,29 @@ impl Handle {
} }
pub fn notify_profile_changed(profile_id: String) { pub fn notify_profile_changed(profile_id: String) {
let handle = Self::global(); Self::send_event(FrontendEvent::ProfileChanged {
if handle.is_exiting() { current_profile_id: profile_id,
return; });
}
let system_opt = handle.notification_system.read();
if let Some(system) = system_opt.as_ref() {
system.send_event(FrontendEvent::ProfileChanged {
current_profile_id: profile_id,
});
} else {
log::warn!(
"Notification system not initialized when trying to send ProfileChanged event."
);
}
} }
pub fn notify_timer_updated(profile_index: String) { pub fn notify_timer_updated(profile_index: String) {
let handle = Self::global(); Self::send_event(FrontendEvent::TimerUpdated { profile_index });
if handle.is_exiting() {
return;
}
let system_opt = handle.notification_system.read();
if let Some(system) = system_opt.as_ref() {
system.send_event(FrontendEvent::TimerUpdated { profile_index });
} else {
log::warn!(
"Notification system not initialized when trying to send TimerUpdated event."
);
}
}
pub fn notify_startup_completed() {
let handle = Self::global();
if handle.is_exiting() {
return;
}
let system_opt = handle.notification_system.read();
if let Some(system) = system_opt.as_ref() {
system.send_event(FrontendEvent::StartupCompleted);
} else {
log::warn!(
"Notification system not initialized when trying to send StartupCompleted event."
);
}
} }
pub fn notify_profile_update_started(uid: String) { pub fn notify_profile_update_started(uid: String) {
let handle = Self::global(); Self::send_event(FrontendEvent::ProfileUpdateStarted { uid });
if handle.is_exiting() {
return;
}
let system_opt = handle.notification_system.read();
if let Some(system) = system_opt.as_ref() {
system.send_event(FrontendEvent::ProfileUpdateStarted { uid });
} else {
log::warn!("Notification system not initialized when trying to send ProfileUpdateStarted event.");
}
} }
pub fn notify_profile_update_completed(uid: String) { pub fn notify_profile_update_completed(uid: String) {
let handle = Self::global(); Self::send_event(FrontendEvent::ProfileUpdateCompleted { uid });
if handle.is_exiting() {
return;
}
let system_opt = handle.notification_system.read();
if let Some(system) = system_opt.as_ref() {
system.send_event(FrontendEvent::ProfileUpdateCompleted { uid });
} else {
log::warn!("Notification system not initialized when trying to send ProfileUpdateCompleted event.");
}
} }
/// 通知前端显示消息队列
pub fn notice_message<S: Into<String>, M: Into<String>>(status: S, msg: M) { pub fn notice_message<S: Into<String>, M: Into<String>>(status: S, msg: M) {
let handle = Self::global(); let handle = Self::global();
let status_str = status.into(); let status_str = status.into();
let msg_str = msg.into(); let msg_str = msg.into();
if !*handle.startup_completed.read() { if !*handle.startup_completed.read() {
logging!(
info,
Type::Frontend,
true,
"启动过程中发现错误,加入消息队列: {} - {}",
status_str,
msg_str
);
let mut errors = handle.startup_errors.write(); let mut errors = handle.startup_errors.write();
errors.push(ErrorMessage { errors.push(ErrorMessage {
status: status_str, status: status_str,
@@ -433,25 +120,29 @@ impl Handle {
return; return;
} }
Self::send_event(FrontendEvent::NoticeMessage {
status: status_str,
message: msg_str,
});
}
fn send_event(event: FrontendEvent) {
let handle = Self::global();
if handle.is_exiting() {
return;
}
let system_opt = handle.notification_system.read(); let system_opt = handle.notification_system.read();
if let Some(system) = system_opt.as_ref() { if let Some(system) = system_opt.as_ref() {
system.send_event(FrontendEvent::NoticeMessage { system.send_event(event);
status: status_str,
message: msg_str,
});
} }
} }
pub fn mark_startup_completed(&self) { pub fn mark_startup_completed(&self) {
{ *self.startup_completed.write() = true;
let mut completed = self.startup_completed.write();
*completed = true;
}
self.send_startup_errors(); self.send_startup_errors();
} }
/// 发送启动时累积的所有错误消息
fn send_startup_errors(&self) { fn send_startup_errors(&self) {
let errors = { let errors = {
let mut errors = self.startup_errors.write(); let mut errors = self.startup_errors.write();
@@ -462,19 +153,10 @@ impl Handle {
return; return;
} }
logging!( let _ = thread::Builder::new()
info,
Type::Frontend,
true,
"发送{}条启动时累积的错误消息",
errors.len()
);
// 启动单独线程处理启动错误,避免阻塞主线程
let thread_result = thread::Builder::new()
.name("startup-errors-sender".into()) .name("startup-errors-sender".into())
.spawn(move || { .spawn(move || {
thread::sleep(Duration::from_secs(2)); thread::sleep(timing::STARTUP_ERROR_DELAY);
let handle = Handle::global(); let handle = Handle::global();
if handle.is_exiting() { if handle.is_exiting() {
@@ -493,19 +175,14 @@ impl Handle {
message: error.message, message: error.message,
}); });
thread::sleep(Duration::from_millis(300)); thread::sleep(timing::ERROR_BATCH_DELAY);
} }
} }
}); });
if let Err(e) = thread_result {
log::error!("Failed to spawn startup errors thread: {e}");
}
} }
pub fn set_is_exiting(&self) { pub fn set_is_exiting(&self) {
let mut is_exiting = self.is_exiting.write(); *self.is_exiting.write() = true;
*is_exiting = true;
let mut system_opt = self.notification_system.write(); let mut system_opt = self.notification_system.write();
if let Some(system) = system_opt.as_mut() { if let Some(system) = system_opt.as_mut() {
@@ -517,3 +194,20 @@ impl Handle {
*self.is_exiting.read() *self.is_exiting.read()
} }
} }
#[cfg(target_os = "macos")]
impl Handle {
pub fn set_activation_policy(&self, policy: tauri::ActivationPolicy) -> Result<(), String> {
Self::app_handle()
.set_activation_policy(policy)
.map_err(|e| e.to_string().into())
}
pub fn set_activation_policy_regular(&self) {
let _ = self.set_activation_policy(tauri::ActivationPolicy::Regular);
}
pub fn set_activation_policy_accessory(&self) {
let _ = self.set_activation_policy(tauri::ActivationPolicy::Accessory);
}
}

View File

@@ -1,36 +1,284 @@
use crate::utils::notification::{notify_event, NotificationEvent}; use crate::process::AsyncHandler;
use crate::utils::notification::{NotificationEvent, notify_event};
use crate::{ use crate::{
config::Config, core::handle, feat, logging, logging_error, config::Config, core::handle, feat, logging, module::lightweight::entry_lightweight_mode,
module::lightweight::entry_lightweight_mode, utils::logging::Type, singleton_with_logging, utils::logging::Type,
}; };
use anyhow::{bail, Result}; use anyhow::{Result, bail};
use once_cell::sync::OnceCell;
use parking_lot::Mutex; use parking_lot::Mutex;
use std::{collections::HashMap, sync::Arc}; use smartstring::alias::String;
use tauri::Manager; use std::{collections::HashMap, fmt, str::FromStr, sync::Arc};
use tauri_plugin_global_shortcut::{Code, GlobalShortcutExt, ShortcutState}; use tauri_plugin_global_shortcut::{Code, GlobalShortcutExt, ShortcutState};
/// Enum representing all available hotkey functions
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum HotkeyFunction {
OpenOrCloseDashboard,
ClashModeRule,
ClashModeGlobal,
ClashModeDirect,
ToggleSystemProxy,
ToggleTunMode,
EntryLightweightMode,
Quit,
#[cfg(target_os = "macos")]
Hide,
}
impl fmt::Display for HotkeyFunction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s = match self {
HotkeyFunction::OpenOrCloseDashboard => "open_or_close_dashboard",
HotkeyFunction::ClashModeRule => "clash_mode_rule",
HotkeyFunction::ClashModeGlobal => "clash_mode_global",
HotkeyFunction::ClashModeDirect => "clash_mode_direct",
HotkeyFunction::ToggleSystemProxy => "toggle_system_proxy",
HotkeyFunction::ToggleTunMode => "toggle_tun_mode",
HotkeyFunction::EntryLightweightMode => "entry_lightweight_mode",
HotkeyFunction::Quit => "quit",
#[cfg(target_os = "macos")]
HotkeyFunction::Hide => "hide",
};
write!(f, "{s}")
}
}
impl FromStr for HotkeyFunction {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.trim() {
"open_or_close_dashboard" => Ok(HotkeyFunction::OpenOrCloseDashboard),
"clash_mode_rule" => Ok(HotkeyFunction::ClashModeRule),
"clash_mode_global" => Ok(HotkeyFunction::ClashModeGlobal),
"clash_mode_direct" => Ok(HotkeyFunction::ClashModeDirect),
"toggle_system_proxy" => Ok(HotkeyFunction::ToggleSystemProxy),
"toggle_tun_mode" => Ok(HotkeyFunction::ToggleTunMode),
"entry_lightweight_mode" => Ok(HotkeyFunction::EntryLightweightMode),
"quit" => Ok(HotkeyFunction::Quit),
#[cfg(target_os = "macos")]
"hide" => Ok(HotkeyFunction::Hide),
_ => bail!("invalid hotkey function: {}", s),
}
}
}
#[cfg(target_os = "macos")]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
/// Enum representing predefined system hotkeys
pub enum SystemHotkey {
CmdQ,
CmdW,
}
#[cfg(target_os = "macos")]
impl fmt::Display for SystemHotkey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s = match self {
SystemHotkey::CmdQ => "CMD+Q",
SystemHotkey::CmdW => "CMD+W",
};
write!(f, "{s}")
}
}
#[cfg(target_os = "macos")]
impl SystemHotkey {
pub fn function(self) -> HotkeyFunction {
match self {
SystemHotkey::CmdQ => HotkeyFunction::Quit,
SystemHotkey::CmdW => HotkeyFunction::Hide,
}
}
}
pub struct Hotkey { pub struct Hotkey {
current: Arc<Mutex<Vec<String>>>, current: Arc<Mutex<Vec<String>>>,
} }
impl Hotkey { impl Hotkey {
pub fn global() -> &'static Hotkey { fn new() -> Self {
static HOTKEY: OnceCell<Hotkey> = OnceCell::new(); Self {
HOTKEY.get_or_init(|| Hotkey {
current: Arc::new(Mutex::new(Vec::new())), current: Arc::new(Mutex::new(Vec::new())),
}) }
} }
pub fn init(&self) -> Result<()> { /// Execute the function associated with a hotkey function enum
let verge = Config::verge(); fn execute_function(function: HotkeyFunction) {
let enable_global_hotkey = verge.latest().enable_global_hotkey.unwrap_or(true); match function {
HotkeyFunction::OpenOrCloseDashboard => {
AsyncHandler::spawn(async move || {
crate::feat::open_or_close_dashboard().await;
notify_event(NotificationEvent::DashboardToggled).await;
});
}
HotkeyFunction::ClashModeRule => {
AsyncHandler::spawn(async move || {
feat::change_clash_mode("rule".into()).await;
notify_event(NotificationEvent::ClashModeChanged { mode: "Rule" }).await;
});
}
HotkeyFunction::ClashModeGlobal => {
AsyncHandler::spawn(async move || {
feat::change_clash_mode("global".into()).await;
notify_event(NotificationEvent::ClashModeChanged { mode: "Global" }).await;
});
}
HotkeyFunction::ClashModeDirect => {
AsyncHandler::spawn(async move || {
feat::change_clash_mode("direct".into()).await;
notify_event(NotificationEvent::ClashModeChanged { mode: "Direct" }).await;
});
}
HotkeyFunction::ToggleSystemProxy => {
AsyncHandler::spawn(async move || {
feat::toggle_system_proxy().await;
notify_event(NotificationEvent::SystemProxyToggled).await;
});
}
HotkeyFunction::ToggleTunMode => {
AsyncHandler::spawn(async move || {
feat::toggle_tun_mode(None).await;
notify_event(NotificationEvent::TunModeToggled).await;
});
}
HotkeyFunction::EntryLightweightMode => {
AsyncHandler::spawn(async move || {
entry_lightweight_mode().await;
notify_event(NotificationEvent::LightweightModeEntered).await;
});
}
HotkeyFunction::Quit => {
AsyncHandler::spawn(async move || {
notify_event(NotificationEvent::AppQuit).await;
feat::quit().await;
});
}
#[cfg(target_os = "macos")]
HotkeyFunction::Hide => {
AsyncHandler::spawn(async move || {
feat::hide().await;
notify_event(NotificationEvent::AppHidden).await;
});
}
}
}
#[cfg(target_os = "macos")]
/// Register a system hotkey using enum
pub async fn register_system_hotkey(&self, hotkey: SystemHotkey) -> Result<()> {
let hotkey_str = hotkey.to_string();
let function = hotkey.function();
self.register_hotkey_with_function(&hotkey_str, function)
.await
}
#[cfg(target_os = "macos")]
/// Unregister a system hotkey using enum
pub fn unregister_system_hotkey(&self, hotkey: SystemHotkey) -> Result<()> {
let hotkey_str = hotkey.to_string();
self.unregister(&hotkey_str)
}
/// Register a hotkey with function enum
#[allow(clippy::unused_async)]
pub async fn register_hotkey_with_function(
&self,
hotkey: &str,
function: HotkeyFunction,
) -> Result<()> {
let app_handle = handle::Handle::app_handle();
let manager = app_handle.global_shortcut();
logging!(
debug,
Type::Hotkey,
"Attempting to register hotkey: {} for function: {}",
hotkey,
function
);
if manager.is_registered(hotkey) {
logging!(
debug,
Type::Hotkey,
"Hotkey {} was already registered, unregistering first",
hotkey
);
manager.unregister(hotkey)?;
}
let is_quit = matches!(function, HotkeyFunction::Quit);
manager.on_shortcut(hotkey, move |_app_handle, hotkey_event, event| {
let hotkey_event_owned = *hotkey_event;
let event_owned = event;
let function_owned = function;
let is_quit_owned = is_quit;
AsyncHandler::spawn(move || async move {
if event_owned.state == ShortcutState::Pressed {
logging!(
debug,
Type::Hotkey,
"Hotkey pressed: {:?}",
hotkey_event_owned
);
if hotkey_event_owned.key == Code::KeyQ && is_quit_owned {
if let Some(window) = handle::Handle::get_window()
&& window.is_focused().unwrap_or(false)
{
logging!(debug, Type::Hotkey, "Executing quit function");
Self::execute_function(function_owned);
}
} else {
logging!(debug, Type::Hotkey, "Executing function directly");
let is_enable_global_hotkey = Config::verge()
.await
.latest_ref()
.enable_global_hotkey
.unwrap_or(true);
if is_enable_global_hotkey {
Self::execute_function(function_owned);
} else {
use crate::utils::window_manager::WindowManager;
let is_visible = WindowManager::is_main_window_visible();
let is_focused = WindowManager::is_main_window_focused();
if is_focused && is_visible {
Self::execute_function(function_owned);
}
}
}
}
});
})?;
logging!(
debug,
Type::Hotkey,
"Successfully registered hotkey {} for {}",
hotkey,
function
);
Ok(())
}
}
// Use unified singleton macro
singleton_with_logging!(Hotkey, INSTANCE, "Hotkey");
impl Hotkey {
pub async fn init(&self) -> Result<()> {
let verge = Config::verge().await;
let enable_global_hotkey = verge.latest_ref().enable_global_hotkey.unwrap_or(true);
logging!( logging!(
debug, debug,
Type::Hotkey, Type::Hotkey,
true,
"Initializing global hotkeys: {}", "Initializing global hotkeys: {}",
enable_global_hotkey enable_global_hotkey
); );
@@ -39,11 +287,13 @@ impl Hotkey {
return Ok(()); return Ok(());
} }
if let Some(hotkeys) = verge.latest().hotkeys.as_ref() { // Extract hotkeys data before async operations
let hotkeys = verge.latest_ref().hotkeys.as_ref().cloned();
if let Some(hotkeys) = hotkeys {
logging!( logging!(
debug, debug,
Type::Hotkey, Type::Hotkey,
true,
"Has {} hotkeys need to register", "Has {} hotkeys need to register",
hotkeys.len() hotkeys.len()
); );
@@ -58,16 +308,14 @@ impl Hotkey {
logging!( logging!(
debug, debug,
Type::Hotkey, Type::Hotkey,
true,
"Registering hotkey: {} -> {}", "Registering hotkey: {} -> {}",
key, key,
func func
); );
if let Err(e) = self.register(key, func) { if let Err(e) = self.register(key, func).await {
logging!( logging!(
error, error,
Type::Hotkey, Type::Hotkey,
true,
"Failed to register hotkey {} -> {}: {:?}", "Failed to register hotkey {} -> {}: {:?}",
key, key,
func, func,
@@ -89,7 +337,6 @@ impl Hotkey {
logging!( logging!(
error, error,
Type::Hotkey, Type::Hotkey,
true,
"Invalid hotkey configuration: `{}`:`{}`", "Invalid hotkey configuration: `{}`:`{}`",
key, key,
func func
@@ -97,7 +344,7 @@ impl Hotkey {
} }
} }
} }
self.current.lock().clone_from(hotkeys); self.current.lock().clone_from(&hotkeys);
} else { } else {
logging!(debug, Type::Hotkey, "No hotkeys configured"); logging!(debug, Type::Hotkey, "No hotkeys configured");
} }
@@ -106,192 +353,30 @@ impl Hotkey {
} }
pub fn reset(&self) -> Result<()> { pub fn reset(&self) -> Result<()> {
let app_handle = handle::Handle::global().app_handle().unwrap(); let app_handle = handle::Handle::app_handle();
let manager = app_handle.global_shortcut(); let manager = app_handle.global_shortcut();
manager.unregister_all()?; manager.unregister_all()?;
Ok(()) Ok(())
} }
pub fn register(&self, hotkey: &str, func: &str) -> Result<()> { /// Register a hotkey with string-based function (backward compatibility)
let app_handle = handle::Handle::global().app_handle().unwrap(); pub async fn register(&self, hotkey: &str, func: &str) -> Result<()> {
let manager = app_handle.global_shortcut(); let function = HotkeyFunction::from_str(func)?;
self.register_hotkey_with_function(hotkey, function).await
logging!(
debug,
Type::Hotkey,
"Attempting to register hotkey: {} for function: {}",
hotkey,
func
);
if manager.is_registered(hotkey) {
logging!(
debug,
Type::Hotkey,
"Hotkey {} was already registered, unregistering first",
hotkey
);
manager.unregister(hotkey)?;
}
let app_handle_clone = app_handle.clone();
let f: Box<dyn Fn() + Send + Sync> = match func.trim() {
"open_or_close_dashboard" => {
let app_handle = app_handle_clone.clone();
Box::new(move || {
logging!(
debug,
Type::Hotkey,
true,
"=== Hotkey Dashboard Window Operation Start ==="
);
logging!(
info,
Type::Hotkey,
true,
"Using unified WindowManager for hotkey operation (bypass debounce)"
);
crate::feat::open_or_close_dashboard_hotkey();
logging!(
debug,
Type::Hotkey,
"=== Hotkey Dashboard Window Operation End ==="
);
notify_event(&app_handle, NotificationEvent::DashboardToggled);
})
}
"clash_mode_rule" => {
let app_handle = app_handle_clone.clone();
Box::new(move || {
feat::change_clash_mode("rule".into());
notify_event(
&app_handle,
NotificationEvent::ClashModeChanged { mode: "Rule" },
);
})
}
"clash_mode_global" => {
let app_handle = app_handle_clone.clone();
Box::new(move || {
feat::change_clash_mode("global".into());
notify_event(
&app_handle,
NotificationEvent::ClashModeChanged { mode: "Global" },
);
})
}
"clash_mode_direct" => {
let app_handle = app_handle_clone.clone();
Box::new(move || {
feat::change_clash_mode("direct".into());
notify_event(
&app_handle,
NotificationEvent::ClashModeChanged { mode: "Direct" },
);
})
}
"toggle_system_proxy" => {
let app_handle = app_handle_clone.clone();
Box::new(move || {
feat::toggle_system_proxy();
notify_event(&app_handle, NotificationEvent::SystemProxyToggled);
})
}
"toggle_tun_mode" => {
let app_handle = app_handle_clone.clone();
Box::new(move || {
feat::toggle_tun_mode(None);
notify_event(&app_handle, NotificationEvent::TunModeToggled);
})
}
"entry_lightweight_mode" => {
let app_handle = app_handle_clone.clone();
Box::new(move || {
entry_lightweight_mode();
notify_event(&app_handle, NotificationEvent::LightweightModeEntered);
})
}
"quit" => {
let app_handle = app_handle_clone.clone();
Box::new(move || {
feat::quit();
notify_event(&app_handle, NotificationEvent::AppQuit);
})
}
#[cfg(target_os = "macos")]
"hide" => {
let app_handle = app_handle_clone.clone();
Box::new(move || {
feat::hide();
notify_event(&app_handle, NotificationEvent::AppHidden);
})
}
_ => {
logging!(error, Type::Hotkey, "Invalid function: {}", func);
bail!("invalid function \"{func}\"");
}
};
let is_quit = func.trim() == "quit";
let _ = manager.on_shortcut(hotkey, move |app_handle, hotkey, event| {
if event.state == ShortcutState::Pressed {
logging!(debug, Type::Hotkey, "Hotkey pressed: {:?}", hotkey);
if hotkey.key == Code::KeyQ && is_quit {
if let Some(window) = app_handle.get_webview_window("main") {
if window.is_focused().unwrap_or(false) {
logging!(debug, Type::Hotkey, "Executing quit function");
f();
}
}
} else {
logging!(debug, Type::Hotkey, "Executing function directly");
let is_enable_global_hotkey = Config::verge()
.latest()
.enable_global_hotkey
.unwrap_or(true);
if is_enable_global_hotkey {
f();
} else {
use crate::utils::window_manager::WindowManager;
let is_visible = WindowManager::is_main_window_visible();
let is_focused = WindowManager::is_main_window_focused();
if is_focused && is_visible {
f();
}
}
}
}
});
logging!(
debug,
Type::Hotkey,
"Successfully registered hotkey {} for {}",
hotkey,
func
);
Ok(())
} }
pub fn unregister(&self, hotkey: &str) -> Result<()> { pub fn unregister(&self, hotkey: &str) -> Result<()> {
let app_handle = handle::Handle::global().app_handle().unwrap(); let app_handle = handle::Handle::app_handle();
let manager = app_handle.global_shortcut(); let manager = app_handle.global_shortcut();
manager.unregister(hotkey)?; manager.unregister(hotkey)?;
logging!(debug, Type::Hotkey, "Unregister hotkey {}", hotkey); logging!(debug, Type::Hotkey, "Unregister hotkey {}", hotkey);
Ok(()) Ok(())
} }
pub fn update(&self, new_hotkeys: Vec<String>) -> Result<()> { pub async fn update(&self, new_hotkeys: Vec<String>) -> Result<()> {
let mut current = self.current.lock(); // Extract current hotkeys before async operations
let old_map = Self::get_map_from_vec(&current); let current_hotkeys = self.current.lock().clone();
let old_map = Self::get_map_from_vec(&current_hotkeys);
let new_map = Self::get_map_from_vec(&new_hotkeys); let new_map = Self::get_map_from_vec(&new_hotkeys);
let (del, add) = Self::get_diff(old_map, new_map); let (del, add) = Self::get_diff(old_map, new_map);
@@ -300,11 +385,12 @@ impl Hotkey {
let _ = self.unregister(key); let _ = self.unregister(key);
}); });
add.iter().for_each(|(key, func)| { for (key, func) in add.iter() {
logging_error!(Type::Hotkey, self.register(key, func)); self.register(key, func).await?;
}); }
*current = new_hotkeys; // Update the current hotkeys after all async operations
*self.current.lock() = new_hotkeys;
Ok(()) Ok(())
} }
@@ -356,12 +442,11 @@ impl Hotkey {
impl Drop for Hotkey { impl Drop for Hotkey {
fn drop(&mut self) { fn drop(&mut self) {
let app_handle = handle::Handle::global().app_handle().unwrap(); let app_handle = handle::Handle::app_handle();
if let Err(e) = app_handle.global_shortcut().unregister_all() { if let Err(e) = app_handle.global_shortcut().unregister_all() {
logging!( logging!(
error, error,
Type::Hotkey, Type::Hotkey,
true,
"Error unregistering all hotkeys: {:?}", "Error unregistering all hotkeys: {:?}",
e e
); );

View File

@@ -0,0 +1,6 @@
use std::sync::Arc;
use clash_verge_logger::AsyncLogger;
use once_cell::sync::Lazy;
pub static CLASH_LOGGER: Lazy<Arc<AsyncLogger>> = Lazy::new(|| Arc::new(AsyncLogger::new()));

Some files were not shown because too many files have changed in this diff Show More