refactor: profile switch (#5197)
* refactor: proxy refresh
* fix(proxy-store): properly hydrate and filter backend provider snapshots
* fix(proxy-store): add monotonic fetch guard and event bridge cleanup
* fix(proxy-store): tweak fetch sequencing guard to prevent snapshot invalidation from wiping fast responses
* docs: UPDATELOG.md
* fix(proxy-snapshot, proxy-groups): restore last-selected proxy and group info
* fix(proxy): merge static and provider entries in snapshot; fix Virtuoso viewport height
* fix(proxy-groups): restrict reduced-height viewport to chain-mode column
* refactor(profiles): introduce a state machine
* refactor:replace state machine with reducer
* refactor:introduce a profile switch worker
* refactor: hooked up a backend-driven profile switch flow
* refactor(profile-switch): serialize switches with async queue and enrich frontend events
* feat(profiles): centralize profile switching with reducer/driver queue to fix stuck UI on rapid toggles
* chore: translate comments and log messages to English to avoid encoding issues
* refactor: migrate backend queue to SwitchDriver actor
* fix(profile): unify error string types in validation helper
* refactor(profile): make switch driver fully async and handle panics safely
* refactor(cmd): move switch-validation helper into new profile_switch module
* refactor(profile): modularize switch logic into profile_switch.rs
* refactor(profile_switch): modularize switch handler
- Break monolithic switch handler into proper module hierarchy
- Move shared globals, constants, and SwitchScope guard to state.rs
- Isolate queue orchestration and async task spawning in driver.rs
- Consolidate switch pipeline and config patching in workflow.rs
- Extract request pre-checks/YAML validation into validation.rs
* refactor(profile_switch): centralize state management and add cancellation flow
- Introduced SwitchManager in state.rs to unify mutex, sequencing, and SwitchScope handling.
- Added SwitchCancellation and SwitchRequest wrappers to encapsulate cancel tokens and notifications.
- Updated driver to allocate task IDs via SwitchManager, cancel old tokens, and queue next jobs in order.
- Updated workflow to check cancellation and sequence at each phase, replacing global flags with manager APIs.
* feat(profile_switch): integrate explicit state machine for profile switching
- workflow.rs:24 now delegates each switch to SwitchStateMachine, passing an owned SwitchRequest.
Queue cancellation and state-sequence checks are centralized inside the machine instead of scattered guards.
- workflow.rs:176 replaces the old helper with `SwitchStateMachine::new(manager(), None, profiles).run().await`,
ensuring manual profile patches follow the same workflow (locking, validation, rollback) as queued switches.
- workflow.rs:180 & 275 expose `validate_profile_yaml` and `restore_previous_profile` for reuse inside the state machine.
- workflow/state_machine.rs:1 introduces a dedicated state machine module.
It manages global mutex acquisition, request/cancellation state, YAML validation, draft patching,
`CoreManager::update_config`, failure rollback, and tray/notification side-effects.
Transitions check for cancellations and stale sequences; completions release guards via `SwitchScope` drop.
* refactor(profile-switch): integrate stage-aware panic handling
- src-tauri/src/cmd/profile_switch/workflow/state_machine.rs:1
Defines SwitchStage and SwitchPanicInfo as crate-visible, wraps each transition in with_stage(...) with catch_unwind, and propagates CmdResult<bool> to distinguish validation failures from panics while keeping cancellation semantics.
- src-tauri/src/cmd/profile_switch/workflow.rs:25
Updates run_switch_job to return Result<bool, SwitchPanicInfo>, routing timeout, validation, config, and stage panic cases separately. Reuses SwitchPanicInfo for logging/UI notifications; patch_profiles_config maps state-machine panics into user-facing error strings.
- src-tauri/src/cmd/profile_switch/driver.rs:1
Adds SwitchJobOutcome to unify workflow results: normal completions carry bool, and panics propagate SwitchPanicInfo. The driver loop now logs panics explicitly and uses AssertUnwindSafe(...).catch_unwind() to guard setup-phase panics.
* refactor(profile-switch): add watchdog, heartbeat, and async timeout guards
- Introduce SwitchHeartbeat for stage tracking and timing; log stage transitions with elapsed durations.
- Add watchdog in driver to cancel stalled switches (5s heartbeat timeout).
- Wrap blocking ops (Config::apply, tray updates, profiles_save_file_safe, etc.) with time::timeout to prevent async stalls.
- Improve logs for stage transitions and watchdog timeouts to clarify cancellation points.
* refactor(profile-switch): async post-switch tasks, early lock release, and spawn_blocking for IO
* feat(profile-switch): track cleanup and coordinate pipeline
- Add explicit cleanup tracking in the driver (`cleanup_profiles` map + `CleanupDone` messages) to know when background post-switch work is still running before starting a new workflow. (driver.rs:29-50)
- Update `handle_enqueue` to detect “cleanup in progress”: same-profile retries are short-circuited; other requests collapse the pending queue, cancelling old tokens so only the latest intent survives. (driver.rs:176-247)
- Rework scheduling helpers: `start_next_job` refuses to start while cleanup is outstanding; discarded requests release cancellation tokens; cleanup completion explicitly restarts the pipeline. (driver.rs:258-442)
* feat(profile-switch): unify post-switch cleanup handling
- workflow.rs (25-427) returns `SwitchWorkflowResult` (success + CleanupHandle) or `SwitchWorkflowError`.
All failure/timeout paths stash post-switch work into a single CleanupHandle.
Cleanup helpers (`notify_profile_switch_finished` and `close_connections_after_switch`) run inside that task for proper lifetime handling.
- driver.rs (29-439) propagates CleanupHandle through `SwitchJobOutcome`, spawns a bridge to wait for completion, and blocks `start_next_job` until done.
Direct driver-side panics now schedule failure cleanup via the shared helper.
* tmp
* Revert "tmp"
This reverts commit e582cf4a652231a67a7c951802cb19b385f6afd7.
* refactor: queue frontend events through async dispatcher
* refactor: queue frontend switch/proxy events and throttle notices
* chore: frontend debug log
* fix: re-enable only ProfileSwitchFinished events - keep others suppressed for crash isolation
- Re-enabled only ProfileSwitchFinished events; RefreshClash, RefreshProxy, and ProfileChanged remain suppressed (they log suppression messages)
- Allows frontend to receive task completion notifications for UI feedback while crash isolation continues
- src-tauri/src/core/handle.rs now only suppresses notify_profile_changed
- Serialized emitter, frontend logging bridge, and other diagnostics unchanged
* refactor: refreshClashData
* refactor(proxy): stabilize proxy switch pipeline and rendering
- Add coalescing buffer in notification.rs to emit only the latest proxies-updated snapshot
- Replace nextTick with queueMicrotask in asyncQueue.ts for same-frame hydration
- Hide auto-generated GLOBAL snapshot and preserve optional metadata in proxy-snapshot.ts
- Introduce stable proxy rendering state in AppDataProvider (proxyTargetProfileId, proxyDisplayProfileId, isProxyRefreshPending)
- Update proxy page to fade content during refresh and overlay status banner instead of showing incomplete snapshot
* refactor(profiles): move manual activating logic to reducer for deterministic queue tracking
* refactor: replace proxy-data event bridge with pure polling and simplify proxy store
- Replaced the proxy-data event bridge with pure polling: AppDataProvider now fetches the initial snapshot and drives refreshes from the polled switchStatus, removing verge://refresh-* listeners (src/providers/app-data-provider.tsx).
- Simplified proxy-store by dropping the proxies-updated listener queue and unused payload/normalizer helpers; relies on SWR/provider fetch path + calcuProxies for live updates (src/stores/proxy-store.ts).
- Trimmed layout-level event wiring to keep only notice/show/hide subscriptions, removing obsolete refresh listeners (src/pages/_layout/useLayoutEvents.ts).
* refactor(proxy): streamline proxies-updated handling and store event flow
- AppDataProvider now treats `proxies-updated` as the fast path: the listener
calls `applyLiveProxyPayload` immediately and schedules only a single fallback
`fetchLiveProxies` ~600 ms later (replacing the old 0/250/1000/2000 cascade).
Expensive provider/rule refreshes run in parallel via `Promise.allSettled`, and
the multi-stage queue on profile updates completion was removed
(src/providers/app-data-provider.tsx).
- Rebuilt proxy-store to support the event flow: restored `setLive`, provider
normalization, and an animation-frame + async queue that applies payloads without
blocking. Exposed `applyLiveProxyPayload` so providers can push events directly
into the store (src/stores/proxy-store.ts).
* refactor: switch delay
* refactor(app-data-provider): trigger getProfileSwitchStatus revalidation on profile-switch-finished
- AppDataProvider now listens to `profile-switch-finished` and calls `mutate("getProfileSwitchStatus")` to immediately update state and unlock buttons (src/providers/app-data-provider.tsx).
- Retain existing detailed timing logs for monitoring other stages.
- Frontend success notifications remain instant; background refreshes continue asynchronously.
* fix(profiles): prevent duplicate toast on page remount
* refactor(profile-switch): make active switches preemptible and prevent queue piling
- Add notify mechanism to SwitchCancellation to await cancellation without busy-waiting (state.rs:82)
- Collapse pending queue to a single entry in the driver; cancel in-flight task on newer request (driver.rs:232)
- Update handle_update_core to watch cancel token and 30s timeout; release locks, discard draft, and exit early if canceled (state_machine.rs:301)
- Providers revalidate status immediately on profile-switch-finished events (app-data-provider.tsx:208)
* refactor(core): make core reload phase controllable, reduce 0xcfffffff risk
- CoreManager::apply_config now calls `reload_config_with_retry`, each attempt waits up to 5s, retries 3 times; on failure, returns error with duration logged and triggers core restart if needed (src-tauri/src/core/manager/config.rs:175, 205)
- `reload_config_with_retry` logs attempt info on timeout or error; if error is a Mihomo connection issue, fallback to original restart logic (src-tauri/src/core/manager/config.rs:211)
- `reload_config_once` retains original Mihomo call for retry wrapper usage (src-tauri/src/core/manager/config.rs:247)
* chore(frontend-logs): downgrade routine event logs from info to debug
- Logs like `emit_via_app entering spawn_blocking`, `Async emit…`, `Buffered proxies…` are now debug-level (src-tauri/src/core/notification.rs:155, :265, :309…)
- Genuine warnings/errors (failures/timeouts) remain at warn/error
- Core stage logs remain info to keep backend tracking visible
* refactor(frontend-emit): make emit_via_app fire-and-forget async
- `emit_via_app` now a regular function; spawns with `tokio::spawn` and logs a warn if `emit_to` fails, caller returns immediately (src-tauri/src/core/notification.rs:269)
- Removed `.await` at Async emit and flush_proxies calls; only record dispatch duration and warn on failure (src-tauri/src/core/notification.rs:211, :329)
* refactor(ui): restructure profile switch for event-driven speed + polling stability
- Backend
- SwitchManager maintains a lightweight event queue: added `event_sequence`, `recent_events`, and `SwitchResultEvent`; provides `push_event` / `events_after` (state.rs)
- `handle_completion` pushes events on success/failure and keeps `last_result` (driver.rs) for frontend incremental fetch
- New Tauri command `get_profile_switch_events(after_sequence)` exposes `events_after` (profile_switch/mod.rs → profile.rs → lib.rs)
- Notification system
- `NotificationSystem::process_event` only logs debug, disables WebView `emit_to`, fixes 0xcfffffff
- Related emit/buffer functions now safe no-op, removed unused structures and warnings (notification.rs)
- Frontend
- services/cmds.ts defines `SwitchResultEvent` and `getProfileSwitchEvents`
- `AppDataProvider` holds `switchEventSeqRef`, polls incremental events every 0.25s (busy) / 1s (idle); each event triggers:
- immediate `globalMutate("getProfiles")` to refresh current profile
- background refresh of proxies/providers/rules via `Promise.allSettled` (failures logged, non-blocking)
- forced `mutateSwitchStatus` to correct state
- original switchStatus effect calls `handleSwitchResult` as fallback; other toast/activation logic handled in profiles.tsx
- Commands / API cleanup
- removed `pub use profile_switch::*;` in cmd::mod.rs to avoid conflicts; frontend uses new command polling
* refactor(frontend): optimize profile switch with optimistic updates
* refactor(profile-switch): switch to event-driven flow with Profile Store
- SwitchManager pushes events; frontend polls get_profile_switch_events
- Zustand store handles optimistic profiles; AppDataProvider applies updates and background-fetches
- UI flicker removed
* fix(app-data): re-hook profile store updates during switch hydration
* fix(notification): restore frontend event dispatch and non-blocking emits
* fix(app-data-provider): restore proxy refresh and seed snapshot after refactor
* fix: ensure switch completion events are received and handle proxies-updated
* fix(app-data-provider): dedupe switch results by taskId and fix stale profile state
* fix(profile-switch): ensure patch_profiles_config_by_profile_index waits for real completion and handle join failures in apply_config_with_timeout
* docs: UPDATELOG.md
* chore: add necessary comments
* fix(core): always dispatch async proxy snapshot after RefreshClash event
* fix(proxy-store, provider): handle pending snapshots and proxy profiles
- Added pending snapshot tracking in proxy-store so `lastAppliedFetchId` no longer jumps on seed. Profile adoption is deferred until a qualifying fetch completes. Exposed `clearPendingProfile` for rollback support.
- Cleared pending snapshot state whenever live payloads apply or the store resets, preventing stale optimistic profile IDs after failures.
- In provider integration, subscribed to the pending proxy profile and fed it into target-profile derivation. Cleared it on failed switch results so hydration can advance and UI status remains accurate.
* fix(proxy): re-hook tray refresh events into proxy refresh queue
- Reattached listen("verge://refresh-proxy-config", …) at src/providers/app-data-provider.tsx:402 and registered it for cleanup.
- Added matching window fallback handler at src/providers/app-data-provider.tsx:430 so in-app dispatches share the same refresh path.
* fix(proxy-snapshot/proxy-groups): address review findings on snapshot placeholders
- src/utils/proxy-snapshot.ts:72-95 now derives snapshot group members solely from proxy-groups.proxies, so provider ids under `use` no longer generate placeholder proxy items.
- src/components/proxy/proxy-groups.tsx:665-677 lets the hydration overlay capture pointer events (and shows a wait cursor) so users can’t interact with snapshot-only placeholders before live data is ready.
* fix(profile-switch): preserve queued requests and avoid stale connection teardown
- Keep earlier queued switches intact by dropping the blanket “collapse” call: after removing duplicates for the same profile, new requests are simply appended, leaving other profiles pending (driver.rs:376). Resolves queue-loss scenario.
- Gate connection cleanup on real successes so cancelled/stale runs no longer tear down Mihomo connections; success handler now skips close_connections_after_switch when success == false (workflow.rs:419).
* fix(profile-switch, layout): improve profile validation and restore backend refresh
- Hardened profile validation using `tokio::fs` with a 5s timeout and offloading YAML parsing to `AsyncHandler::spawn_blocking`, preventing slow disks or malformed files from freezing the runtime (src-tauri/src/cmd/profile_switch/validation.rs:9, 71).
- Restored backend-triggered refresh handling by listening for `verge://refresh-clash-config` / `verge://refresh-verge-config` and invoking shared refresh services so SWR caches stay in sync with core events (src/pages/_layout/useLayoutEvents.ts:6, 45, 55).
* feat(profile-switch): handle cancellations for superseded requests
- Added a `cancelled` flag and constructor so superseded requests publish an explicit cancellation instead of a failure (src-tauri/src/cmd/profile_switch/state.rs:249, src-tauri/src/cmd/profile_switch/driver.rs:482)
- Updated the profile switch effect to log cancellations as info, retain the shared `mutate` call, and skip emitting error toasts while still refreshing follow-up work (src/pages/profiles.tsx:554, src/pages/profiles.tsx:581)
- Exposed the new flag on the TypeScript contract to keep downstream consumers type-safe (src/services/cmds.ts:20)
* fix(profiles): wrap logging payload for Tauri frontend_log
* fix(profile-switch): add rollback and error propagation for failed persistence
- Added rollback on apply failure so Mihomo restores to the previous profile
before exiting the success path early (state_machine.rs:474).
- Reworked persist_profiles_with_timeout to surface timeout/join/save errors,
convert them into CmdResult failures, and trigger rollback + error propagation
when persistence fails (state_machine.rs:703).
* fix(profile-switch): prevent mid-finalize reentrancy and lingering tasks
* fix(profile-switch): preserve pending queue and surface discarded switches
* fix(profile-switch): avoid draining Mihomo sockets on failed/cancelled switches
* fix(app-data-provider): restore backend-driven refresh and reattach fallbacks
* fix(profile-switch): queue concurrent updates and add bounded wait/backoff
* fix(proxy): trigger live refresh on app start for proxy snapshot
* refactor(profile-switch): split flow into layers and centralize async cleanup
- Introduced `SwitchDriver` to encapsulate queue and driver logic while keeping the public Tauri command API.
- Added workflow/cleanup helpers for notification dispatch and Mihomo connection draining, re-exported for API consistency.
- Replaced monolithic state machine with `core.rs`, `context.rs`, and `stages.rs`, plus a thin `mod.rs` re-export layer; stage methods are now individually testable.
- Removed legacy `workflow/state_machine.rs` and adjusted visibility on re-exported types/constants to ensure compilation.
This commit is contained in:
@@ -1,6 +1,7 @@
|
||||
import { RefreshRounded, StorageOutlined } from "@mui/icons-material";
|
||||
import {
|
||||
Box,
|
||||
Chip,
|
||||
Button,
|
||||
Dialog,
|
||||
DialogActions,
|
||||
@@ -18,7 +19,7 @@ import {
|
||||
} from "@mui/material";
|
||||
import { useLockFn } from "ahooks";
|
||||
import dayjs from "dayjs";
|
||||
import { useState } from "react";
|
||||
import { useMemo, useState } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { updateProxyProvider } from "tauri-plugin-mihomo-api";
|
||||
|
||||
@@ -48,29 +49,61 @@ const parseExpire = (expire?: number) => {
|
||||
export const ProviderButton = () => {
|
||||
const { t } = useTranslation();
|
||||
const [open, setOpen] = useState(false);
|
||||
const { proxyProviders, refreshProxy, refreshProxyProviders } = useAppData();
|
||||
const {
|
||||
proxyProviders,
|
||||
proxyHydration,
|
||||
refreshProxy,
|
||||
refreshProxyProviders,
|
||||
} = useAppData();
|
||||
|
||||
const isHydrating = proxyHydration !== "live";
|
||||
const [updating, setUpdating] = useState<Record<string, boolean>>({});
|
||||
|
||||
// 检查是否有提供者
|
||||
const hasProviders = Object.keys(proxyProviders || {}).length > 0;
|
||||
|
||||
// Hydration hint badge keeps users aware of sync state
|
||||
const hydrationChip = useMemo(() => {
|
||||
if (proxyHydration === "live") return null;
|
||||
|
||||
return (
|
||||
<Chip
|
||||
size="small"
|
||||
color={proxyHydration === "snapshot" ? "warning" : "info"}
|
||||
label={
|
||||
proxyHydration === "snapshot"
|
||||
? t("Snapshot data")
|
||||
: t("Proxy data is syncing, please wait")
|
||||
}
|
||||
sx={{ fontWeight: 500 }}
|
||||
/>
|
||||
);
|
||||
}, [proxyHydration, t]);
|
||||
|
||||
// 更新单个代理提供者
|
||||
const updateProvider = useLockFn(async (name: string) => {
|
||||
if (isHydrating) {
|
||||
showNotice("info", t("Proxy data is syncing, please wait"));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// 设置更新状态
|
||||
setUpdating((prev) => ({ ...prev, [name]: true }));
|
||||
|
||||
await updateProxyProvider(name);
|
||||
|
||||
// 刷新数据
|
||||
await refreshProxy();
|
||||
await refreshProxyProviders();
|
||||
|
||||
showNotice("success", `${name} 更新成功`);
|
||||
await refreshProxy();
|
||||
showNotice(
|
||||
"success",
|
||||
t("Provider {{name}} updated successfully", { name }),
|
||||
);
|
||||
} catch (err: any) {
|
||||
showNotice(
|
||||
"error",
|
||||
`${name} 更新失败: ${err?.message || err.toString()}`,
|
||||
t("Provider {{name}} update failed: {{message}}", {
|
||||
name,
|
||||
message: err?.message || err.toString(),
|
||||
}),
|
||||
);
|
||||
} finally {
|
||||
// 清除更新状态
|
||||
@@ -80,11 +113,16 @@ export const ProviderButton = () => {
|
||||
|
||||
// 更新所有代理提供者
|
||||
const updateAllProviders = useLockFn(async () => {
|
||||
if (isHydrating) {
|
||||
showNotice("info", t("Proxy data is syncing, please wait"));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// 获取所有provider的名称
|
||||
const allProviders = Object.keys(proxyProviders || {});
|
||||
if (allProviders.length === 0) {
|
||||
showNotice("info", "没有可更新的代理提供者");
|
||||
showNotice("info", t("No providers to update"));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -110,54 +148,67 @@ export const ProviderButton = () => {
|
||||
}
|
||||
}
|
||||
|
||||
// 刷新数据
|
||||
await refreshProxy();
|
||||
await refreshProxyProviders();
|
||||
|
||||
showNotice("success", "全部代理提供者更新成功");
|
||||
await refreshProxy();
|
||||
showNotice("success", t("All providers updated successfully"));
|
||||
} catch (err: any) {
|
||||
showNotice("error", `更新失败: ${err?.message || err.toString()}`);
|
||||
showNotice(
|
||||
"error",
|
||||
t("Failed to update providers: {{message}}", {
|
||||
message: err?.message || err.toString(),
|
||||
}),
|
||||
);
|
||||
} finally {
|
||||
// 清除所有更新状态
|
||||
setUpdating({});
|
||||
}
|
||||
});
|
||||
|
||||
const handleClose = () => {
|
||||
setOpen(false);
|
||||
};
|
||||
const handleClose = () => setOpen(false);
|
||||
|
||||
if (!hasProviders) return null;
|
||||
|
||||
return (
|
||||
<>
|
||||
<Button
|
||||
variant="outlined"
|
||||
size="small"
|
||||
startIcon={<StorageOutlined />}
|
||||
onClick={() => setOpen(true)}
|
||||
sx={{ mr: 1 }}
|
||||
>
|
||||
{t("Proxy Provider")}
|
||||
</Button>
|
||||
<Box sx={{ display: "flex", alignItems: "center", gap: 1, mr: 1 }}>
|
||||
<Button
|
||||
variant="outlined"
|
||||
size="small"
|
||||
startIcon={<StorageOutlined />}
|
||||
onClick={() => setOpen(true)}
|
||||
disabled={isHydrating}
|
||||
title={
|
||||
isHydrating ? t("Proxy data is syncing, please wait") : undefined
|
||||
}
|
||||
>
|
||||
{t("Proxy Provider")}
|
||||
</Button>
|
||||
{hydrationChip}
|
||||
</Box>
|
||||
|
||||
<Dialog open={open} onClose={handleClose} maxWidth="sm" fullWidth>
|
||||
<DialogTitle>
|
||||
<Box
|
||||
display="flex"
|
||||
justifyContent="space-between"
|
||||
alignItems="center"
|
||||
sx={{
|
||||
display: "flex",
|
||||
justifyContent: "space-between",
|
||||
alignItems: "center",
|
||||
}}
|
||||
>
|
||||
<Typography variant="h6">{t("Proxy Provider")}</Typography>
|
||||
<Box>
|
||||
<Button
|
||||
variant="contained"
|
||||
size="small"
|
||||
onClick={updateAllProviders}
|
||||
>
|
||||
{t("Update All")}
|
||||
</Button>
|
||||
</Box>
|
||||
<Button
|
||||
variant="contained"
|
||||
size="small"
|
||||
onClick={updateAllProviders}
|
||||
disabled={isHydrating}
|
||||
title={
|
||||
isHydrating
|
||||
? t("Proxy data is syncing, please wait")
|
||||
: undefined
|
||||
}
|
||||
>
|
||||
{t("Update All")}
|
||||
</Button>
|
||||
</Box>
|
||||
</DialogTitle>
|
||||
|
||||
@@ -166,54 +217,63 @@ export const ProviderButton = () => {
|
||||
{Object.entries(proxyProviders || {})
|
||||
.sort()
|
||||
.map(([key, item]) => {
|
||||
const provider = item;
|
||||
const time = dayjs(provider.updatedAt);
|
||||
if (!item) return null;
|
||||
|
||||
const time = dayjs(item.updatedAt);
|
||||
const isUpdating = updating[key];
|
||||
|
||||
// 订阅信息
|
||||
const sub = provider.subscriptionInfo;
|
||||
const hasSubInfo = !!sub;
|
||||
const upload = sub?.Upload || 0;
|
||||
const download = sub?.Download || 0;
|
||||
const total = sub?.Total || 0;
|
||||
const expire = sub?.Expire || 0;
|
||||
|
||||
// 流量使用进度
|
||||
const sub = item.subscriptionInfo;
|
||||
const hasSubInfo = Boolean(sub);
|
||||
const upload = sub?.Upload ?? 0;
|
||||
const download = sub?.Download ?? 0;
|
||||
const total = sub?.Total ?? 0;
|
||||
const expire = sub?.Expire ?? 0;
|
||||
const progress =
|
||||
total > 0
|
||||
? Math.min(
|
||||
Math.round(((download + upload) * 100) / total) + 1,
|
||||
100,
|
||||
Math.max(0, ((upload + download) / total) * 100),
|
||||
)
|
||||
: 0;
|
||||
|
||||
return (
|
||||
<ListItem
|
||||
key={key}
|
||||
sx={[
|
||||
{
|
||||
p: 0,
|
||||
mb: "8px",
|
||||
borderRadius: 2,
|
||||
overflow: "hidden",
|
||||
transition: "all 0.2s",
|
||||
},
|
||||
({ palette: { mode, primary } }) => {
|
||||
const bgcolor =
|
||||
mode === "light" ? "#ffffff" : "#24252f";
|
||||
const hoverColor =
|
||||
mode === "light"
|
||||
? alpha(primary.main, 0.1)
|
||||
: alpha(primary.main, 0.2);
|
||||
|
||||
return {
|
||||
backgroundColor: bgcolor,
|
||||
"&:hover": {
|
||||
backgroundColor: hoverColor,
|
||||
},
|
||||
};
|
||||
},
|
||||
]}
|
||||
secondaryAction={
|
||||
<Box
|
||||
sx={{
|
||||
width: 40,
|
||||
display: "flex",
|
||||
justifyContent: "center",
|
||||
alignItems: "center",
|
||||
}}
|
||||
>
|
||||
<IconButton
|
||||
size="small"
|
||||
color="primary"
|
||||
onClick={() => updateProvider(key)}
|
||||
disabled={isUpdating || isHydrating}
|
||||
sx={{
|
||||
animation: isUpdating
|
||||
? "spin 1s linear infinite"
|
||||
: "none",
|
||||
"@keyframes spin": {
|
||||
"0%": { transform: "rotate(0deg)" },
|
||||
"100%": { transform: "rotate(360deg)" },
|
||||
},
|
||||
}}
|
||||
title={t("Update Provider") as string}
|
||||
>
|
||||
<RefreshRounded />
|
||||
</IconButton>
|
||||
</Box>
|
||||
}
|
||||
sx={{
|
||||
mb: 1,
|
||||
borderRadius: 1,
|
||||
border: "1px solid",
|
||||
borderColor: alpha("#ccc", 0.4),
|
||||
backgroundColor: alpha("#fff", 0.02),
|
||||
}}
|
||||
>
|
||||
<ListItemText
|
||||
sx={{ px: 2, py: 1 }}
|
||||
@@ -223,6 +283,7 @@ export const ProviderButton = () => {
|
||||
display: "flex",
|
||||
justifyContent: "space-between",
|
||||
alignItems: "center",
|
||||
gap: 1,
|
||||
}}
|
||||
>
|
||||
<Typography
|
||||
@@ -232,12 +293,12 @@ export const ProviderButton = () => {
|
||||
title={key}
|
||||
sx={{ display: "flex", alignItems: "center" }}
|
||||
>
|
||||
<span style={{ marginRight: "8px" }}>{key}</span>
|
||||
<span style={{ marginRight: 8 }}>{key}</span>
|
||||
<TypeBox component="span">
|
||||
{provider.proxies.length}
|
||||
{item.proxies.length}
|
||||
</TypeBox>
|
||||
<TypeBox component="span">
|
||||
{provider.vehicleType}
|
||||
{item.vehicleType}
|
||||
</TypeBox>
|
||||
</Typography>
|
||||
|
||||
@@ -252,72 +313,39 @@ export const ProviderButton = () => {
|
||||
</Box>
|
||||
}
|
||||
secondary={
|
||||
<>
|
||||
{/* 订阅信息 */}
|
||||
{hasSubInfo && (
|
||||
<>
|
||||
<Box
|
||||
sx={{
|
||||
mb: 1,
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
justifyContent: "space-between",
|
||||
}}
|
||||
>
|
||||
<span title={t("Used / Total") as string}>
|
||||
{parseTraffic(upload + download)} /{" "}
|
||||
{parseTraffic(total)}
|
||||
</span>
|
||||
<span title={t("Expire Time") as string}>
|
||||
{parseExpire(expire)}
|
||||
</span>
|
||||
</Box>
|
||||
hasSubInfo ? (
|
||||
<>
|
||||
<Box
|
||||
sx={{
|
||||
mb: 1,
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
justifyContent: "space-between",
|
||||
}}
|
||||
>
|
||||
<span title={t("Used / Total") as string}>
|
||||
{parseTraffic(upload + download)} /{" "}
|
||||
{parseTraffic(total)}
|
||||
</span>
|
||||
<span title={t("Expire Time") as string}>
|
||||
{parseExpire(expire)}
|
||||
</span>
|
||||
</Box>
|
||||
|
||||
{/* 进度条 */}
|
||||
<LinearProgress
|
||||
variant="determinate"
|
||||
value={progress}
|
||||
sx={{
|
||||
height: 6,
|
||||
borderRadius: 3,
|
||||
opacity: total > 0 ? 1 : 0,
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</>
|
||||
<LinearProgress
|
||||
variant="determinate"
|
||||
value={progress}
|
||||
sx={{
|
||||
height: 6,
|
||||
borderRadius: 3,
|
||||
opacity: total > 0 ? 1 : 0,
|
||||
}}
|
||||
/>
|
||||
</>
|
||||
) : null
|
||||
}
|
||||
/>
|
||||
<Divider orientation="vertical" flexItem />
|
||||
<Box
|
||||
sx={{
|
||||
width: 40,
|
||||
display: "flex",
|
||||
justifyContent: "center",
|
||||
alignItems: "center",
|
||||
}}
|
||||
>
|
||||
<IconButton
|
||||
size="small"
|
||||
color="primary"
|
||||
onClick={() => {
|
||||
updateProvider(key);
|
||||
}}
|
||||
disabled={isUpdating}
|
||||
sx={{
|
||||
animation: isUpdating
|
||||
? "spin 1s linear infinite"
|
||||
: "none",
|
||||
"@keyframes spin": {
|
||||
"0%": { transform: "rotate(0deg)" },
|
||||
"100%": { transform: "rotate(360deg)" },
|
||||
},
|
||||
}}
|
||||
title={t("Update Provider") as string}
|
||||
>
|
||||
<RefreshRounded />
|
||||
</IconButton>
|
||||
</Box>
|
||||
</ListItem>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -61,10 +61,17 @@ export const ProxyGroups = (props: Props) => {
|
||||
}>({ open: false, message: "" });
|
||||
|
||||
const { verge } = useVerge();
|
||||
const { proxies: proxiesData } = useAppData();
|
||||
const {
|
||||
proxies: proxiesData,
|
||||
proxyHydration,
|
||||
proxyTargetProfileId,
|
||||
proxyDisplayProfileId,
|
||||
isProxyRefreshPending,
|
||||
} = useAppData();
|
||||
const groups = proxiesData?.groups;
|
||||
const availableGroups = useMemo(() => groups ?? [], [groups]);
|
||||
|
||||
const showHydrationOverlay = isProxyRefreshPending;
|
||||
const pendingProfileSwitch = proxyTargetProfileId !== proxyDisplayProfileId;
|
||||
const defaultRuleGroup = useMemo(() => {
|
||||
if (isChainMode && mode === "rule" && availableGroups.length > 0) {
|
||||
return availableGroups[0].name;
|
||||
@@ -76,6 +83,35 @@ export const ProxyGroups = (props: Props) => {
|
||||
() => selectedGroup ?? defaultRuleGroup,
|
||||
[selectedGroup, defaultRuleGroup],
|
||||
);
|
||||
const hydrationChip = useMemo(() => {
|
||||
if (proxyHydration === "live") return null;
|
||||
|
||||
const label =
|
||||
proxyHydration === "snapshot" ? t("Snapshot data") : t("Syncing...");
|
||||
|
||||
return (
|
||||
<Chip
|
||||
size="small"
|
||||
color={proxyHydration === "snapshot" ? "warning" : "info"}
|
||||
label={label}
|
||||
sx={{ fontWeight: 500, height: 22 }}
|
||||
/>
|
||||
);
|
||||
}, [proxyHydration, t]);
|
||||
|
||||
const overlayMessage = useMemo(() => {
|
||||
if (!showHydrationOverlay) return null;
|
||||
|
||||
if (pendingProfileSwitch) {
|
||||
return t("Loading proxy data for the selected profile...");
|
||||
}
|
||||
|
||||
if (proxyHydration === "snapshot") {
|
||||
return t("Preparing proxy snapshot...");
|
||||
}
|
||||
|
||||
return t("Syncing proxy data...");
|
||||
}, [showHydrationOverlay, pendingProfileSwitch, proxyHydration, t]);
|
||||
|
||||
const { renderList, onProxies, onHeadState } = useRenderList(
|
||||
mode,
|
||||
@@ -93,7 +129,7 @@ export const ProxyGroups = (props: Props) => {
|
||||
[renderList],
|
||||
);
|
||||
|
||||
// 统代理选择
|
||||
// 系统代理选择
|
||||
const { handleProxyGroupChange } = useProxySelection({
|
||||
onSuccess: () => {
|
||||
onProxies();
|
||||
@@ -306,12 +342,7 @@ export const ProxyGroups = (props: Props) => {
|
||||
try {
|
||||
await Promise.race([
|
||||
delayManager.checkListDelay(names, groupName, timeout),
|
||||
delayGroup(groupName, url, timeout).then((result) => {
|
||||
console.log(
|
||||
`[ProxyGroups] getGroupProxyDelays返回结果数量:`,
|
||||
Object.keys(result || {}).length,
|
||||
);
|
||||
}), // 查询group delays 将清除fixed(不关注调用结果)
|
||||
delayGroup(groupName, url, timeout),
|
||||
]);
|
||||
console.log(`[ProxyGroups] 延迟测试完成,组: ${groupName}`);
|
||||
} catch (error) {
|
||||
@@ -376,6 +407,11 @@ export const ProxyGroups = (props: Props) => {
|
||||
}
|
||||
|
||||
if (isChainMode) {
|
||||
const chainVirtuosoHeight =
|
||||
mode === "rule" && proxyGroupNames.length > 0
|
||||
? "calc(100% - 80px)"
|
||||
: "calc(100% - 14px)";
|
||||
|
||||
// 获取所有代理组
|
||||
const proxyGroups = proxiesData?.groups || [];
|
||||
|
||||
@@ -454,10 +490,7 @@ export const ProxyGroups = (props: Props) => {
|
||||
<Virtuoso
|
||||
ref={virtuosoRef}
|
||||
style={{
|
||||
height:
|
||||
mode === "rule" && proxyGroups.length > 0
|
||||
? "calc(100% - 80px)" // 只有标题的高度
|
||||
: "calc(100% - 14px)",
|
||||
height: chainVirtuosoHeight,
|
||||
}}
|
||||
totalCount={renderList.length}
|
||||
increaseViewportBy={{ top: 200, bottom: 200 }}
|
||||
@@ -548,7 +581,9 @@ export const ProxyGroups = (props: Props) => {
|
||||
{group.name}
|
||||
</Typography>
|
||||
<Typography variant="caption" color="text.secondary">
|
||||
{group.type} · {group.all.length} 节点
|
||||
{`${t("Group Type")}: ${group.type} · ${t("Proxy Count")}: ${
|
||||
Array.isArray(group.all) ? group.all.length : 0
|
||||
}`}
|
||||
</Typography>
|
||||
</Box>
|
||||
</MenuItem>
|
||||
@@ -556,7 +591,7 @@ export const ProxyGroups = (props: Props) => {
|
||||
{availableGroups.length === 0 && (
|
||||
<MenuItem disabled>
|
||||
<Typography variant="body2" color="text.secondary">
|
||||
暂无可用代理组
|
||||
{t("Empty")}
|
||||
</Typography>
|
||||
</MenuItem>
|
||||
)}
|
||||
@@ -567,9 +602,29 @@ export const ProxyGroups = (props: Props) => {
|
||||
|
||||
return (
|
||||
<div
|
||||
style={{ position: "relative", height: "100%", willChange: "transform" }}
|
||||
style={{
|
||||
position: "relative",
|
||||
height: "100%",
|
||||
willChange: "transform",
|
||||
opacity: showHydrationOverlay ? 0.45 : 1,
|
||||
transition: "opacity 120ms ease",
|
||||
}}
|
||||
>
|
||||
{/* 代理组导航栏 */}
|
||||
{hydrationChip && (
|
||||
<Box
|
||||
sx={{
|
||||
position: "absolute",
|
||||
top: 8,
|
||||
right: 16,
|
||||
zIndex: 2,
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
gap: 8,
|
||||
}}
|
||||
>
|
||||
{hydrationChip}
|
||||
</Box>
|
||||
)}
|
||||
{mode === "rule" && (
|
||||
<ProxyGroupNavigator
|
||||
proxyGroupNames={proxyGroupNames}
|
||||
@@ -608,6 +663,39 @@ export const ProxyGroups = (props: Props) => {
|
||||
)}
|
||||
/>
|
||||
<ScrollTopButton show={showScrollTop} onClick={scrollToTop} />
|
||||
{showHydrationOverlay && overlayMessage && (
|
||||
<Box
|
||||
sx={{
|
||||
position: "absolute",
|
||||
inset: 0,
|
||||
zIndex: 3,
|
||||
display: "flex",
|
||||
alignItems: "center",
|
||||
justifyContent: "center",
|
||||
pointerEvents: "auto",
|
||||
cursor: "wait",
|
||||
backgroundColor: "rgba(8, 8, 8, 0.12)",
|
||||
}}
|
||||
>
|
||||
<Box
|
||||
sx={{
|
||||
px: 2.5,
|
||||
py: 1.5,
|
||||
borderRadius: 1,
|
||||
bgcolor: "background.paper",
|
||||
boxShadow: 3,
|
||||
}}
|
||||
>
|
||||
<Typography
|
||||
variant="body2"
|
||||
color="text.secondary"
|
||||
sx={{ fontWeight: 500 }}
|
||||
>
|
||||
{overlayMessage}
|
||||
</Typography>
|
||||
</Box>
|
||||
</Box>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -14,50 +14,13 @@ import {
|
||||
} from "./use-head-state";
|
||||
import { useWindowWidth } from "./use-window-width";
|
||||
|
||||
// 定义代理项接口
|
||||
interface IProxyItem {
|
||||
name: string;
|
||||
type: string;
|
||||
udp: boolean;
|
||||
xudp: boolean;
|
||||
tfo: boolean;
|
||||
mptcp: boolean;
|
||||
smux: boolean;
|
||||
history: {
|
||||
time: string;
|
||||
delay: number;
|
||||
}[];
|
||||
provider?: string;
|
||||
testUrl?: string;
|
||||
[key: string]: any; // 添加索引签名以适应其他可能的属性
|
||||
}
|
||||
|
||||
// 代理组类型
|
||||
type ProxyGroup = {
|
||||
name: string;
|
||||
type: string;
|
||||
udp: boolean;
|
||||
xudp: boolean;
|
||||
tfo: boolean;
|
||||
mptcp: boolean;
|
||||
smux: boolean;
|
||||
history: {
|
||||
time: string;
|
||||
delay: number;
|
||||
}[];
|
||||
now: string;
|
||||
all: IProxyItem[];
|
||||
hidden?: boolean;
|
||||
icon?: string;
|
||||
testUrl?: string;
|
||||
provider?: string;
|
||||
};
|
||||
type RenderGroup = IProxyGroupItem;
|
||||
|
||||
export interface IRenderItem {
|
||||
// 组 | head | item | empty | item col
|
||||
type: 0 | 1 | 2 | 3 | 4;
|
||||
key: string;
|
||||
group: ProxyGroup;
|
||||
group: RenderGroup;
|
||||
proxy?: IProxyItem;
|
||||
col?: number;
|
||||
proxyCol?: IProxyItem[];
|
||||
@@ -99,7 +62,7 @@ export const useRenderList = (
|
||||
selectedGroup?: string | null,
|
||||
) => {
|
||||
// 使用全局数据提供者
|
||||
const { proxies: proxiesData, refreshProxy } = useAppData();
|
||||
const { proxies: proxiesData, proxyHydration, refreshProxy } = useAppData();
|
||||
const { verge } = useVerge();
|
||||
const { width } = useWindowWidth();
|
||||
const [headStates, setHeadState] = useHeadStateNew();
|
||||
@@ -123,17 +86,29 @@ export const useRenderList = (
|
||||
|
||||
// 确保代理数据加载
|
||||
useEffect(() => {
|
||||
if (!proxiesData) return;
|
||||
if (!proxiesData || proxyHydration !== "live") return;
|
||||
const { groups, proxies } = proxiesData;
|
||||
|
||||
if (
|
||||
(mode === "rule" && !groups.length) ||
|
||||
(mode === "global" && proxies.length < 2)
|
||||
) {
|
||||
const handle = setTimeout(() => refreshProxy(), 500);
|
||||
const handle = setTimeout(() => {
|
||||
void refreshProxy().catch(() => {});
|
||||
}, 500);
|
||||
return () => clearTimeout(handle);
|
||||
}
|
||||
}, [proxiesData, mode, refreshProxy]);
|
||||
}, [proxiesData, proxyHydration, mode, refreshProxy]);
|
||||
|
||||
useEffect(() => {
|
||||
if (proxyHydration !== "snapshot") return;
|
||||
|
||||
const handle = setTimeout(() => {
|
||||
void refreshProxy().catch(() => {});
|
||||
}, 1800);
|
||||
|
||||
return () => clearTimeout(handle);
|
||||
}, [proxyHydration, refreshProxy]);
|
||||
|
||||
// 链式代理模式节点自动计算延迟
|
||||
useEffect(() => {
|
||||
@@ -147,7 +122,7 @@ export const useRenderList = (
|
||||
// 设置组监听器,当有延迟更新时自动刷新
|
||||
const groupListener = () => {
|
||||
console.log("[ChainMode] 延迟更新,刷新UI");
|
||||
refreshProxy();
|
||||
void refreshProxy().catch(() => {});
|
||||
};
|
||||
|
||||
delayManager.setGroupListener("chain-mode", groupListener);
|
||||
@@ -188,9 +163,12 @@ export const useRenderList = (
|
||||
// 链式代理模式下,显示代理组和其节点
|
||||
if (isChainMode && runtimeConfig && mode === "rule") {
|
||||
// 使用正常的规则模式代理组
|
||||
const allGroups = proxiesData.groups.length
|
||||
? proxiesData.groups
|
||||
: [proxiesData.global!];
|
||||
const chainGroups = proxiesData.groups ?? [];
|
||||
const allGroups = chainGroups.length
|
||||
? chainGroups
|
||||
: proxiesData.global
|
||||
? [proxiesData.global]
|
||||
: [];
|
||||
|
||||
// 如果选择了特定代理组,只显示该组的节点
|
||||
if (selectedGroup) {
|
||||
@@ -282,7 +260,7 @@ export const useRenderList = (
|
||||
});
|
||||
|
||||
// 创建一个虚拟的组来容纳所有节点
|
||||
const virtualGroup: ProxyGroup = {
|
||||
const virtualGroup: RenderGroup = {
|
||||
name: "All Proxies",
|
||||
type: "Selector",
|
||||
udp: false,
|
||||
@@ -340,7 +318,7 @@ export const useRenderList = (
|
||||
});
|
||||
|
||||
// 创建一个虚拟的组来容纳所有节点
|
||||
const virtualGroup: ProxyGroup = {
|
||||
const virtualGroup: RenderGroup = {
|
||||
name: "All Proxies",
|
||||
type: "Selector",
|
||||
udp: false,
|
||||
@@ -380,12 +358,15 @@ export const useRenderList = (
|
||||
|
||||
// 正常模式的渲染逻辑
|
||||
const useRule = mode === "rule" || mode === "script";
|
||||
const renderGroups =
|
||||
useRule && proxiesData.groups.length
|
||||
? proxiesData.groups
|
||||
: [proxiesData.global!];
|
||||
const renderGroups = (() => {
|
||||
const groups = proxiesData.groups ?? [];
|
||||
if (useRule && groups.length) {
|
||||
return groups;
|
||||
}
|
||||
return proxiesData.global ? [proxiesData.global] : groups;
|
||||
})();
|
||||
|
||||
const retList = renderGroups.flatMap((group: ProxyGroup) => {
|
||||
const retList = renderGroups.flatMap((group: RenderGroup) => {
|
||||
const headState = headStates[group.name] || DEFAULT_STATE;
|
||||
const ret: IRenderItem[] = [
|
||||
{
|
||||
|
||||
Reference in New Issue
Block a user