Compare commits
592 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4a7859bdae | ||
|
|
c0f9920531 | ||
|
|
d3d32006c3 | ||
|
|
fb260fb33d | ||
|
|
50567d9b97 | ||
|
|
9370a56337 | ||
|
|
73e53eb33f | ||
|
|
30d1655e07 | ||
|
|
9dc50da167 | ||
|
|
b3b8eeb577 | ||
|
|
413f29e22a | ||
|
|
ae319279ae | ||
|
|
c0e111e756 | ||
|
|
52545a626c | ||
|
|
518875acde | ||
|
|
b672dd7055 | ||
|
|
804641425b | ||
|
|
d3386908ff | ||
|
|
59e7095b0f | ||
|
|
8fc8eb1789 | ||
|
|
0f1537ef48 | ||
|
|
8c734a5a35 | ||
|
|
5187712a71 | ||
|
|
7b7fa2239b | ||
|
|
85ff296912 | ||
|
|
5e7adf76ca | ||
|
|
d094d3885c | ||
|
|
648c93c066 | ||
|
|
1e9df69ffc | ||
|
|
ef35752d84 | ||
|
|
ffb7400a22 | ||
|
|
4d5f1f4327 | ||
|
|
999830aaf5 | ||
|
|
6d7efbbf28 | ||
|
|
a869dbb441 | ||
|
|
928f226d10 | ||
|
|
c27ad3fdcb | ||
|
|
c2dcd86722 | ||
|
|
af79bcd1cf | ||
|
|
2d73afdff2 | ||
|
|
e7a9f8f755 | ||
|
|
d209238009 | ||
|
|
dfcdb33e58 | ||
|
|
37359ffc27 | ||
|
|
fb09e6c85d | ||
|
|
d10665091b | ||
|
|
d8b0e9929c | ||
|
|
73323edf06 | ||
|
|
f4de4738f1 | ||
|
|
2e9f6dd174 | ||
|
|
e928089a77 | ||
|
|
f41998284a | ||
|
|
9375674c91 | ||
|
|
2a7ccb5bde | ||
|
|
2af0af0837 | ||
|
|
0fcf168b08 | ||
|
|
f39436f1d0 | ||
|
|
a9eb512f20 | ||
|
|
713162ca37 | ||
|
|
87168b6ce0 | ||
|
|
2ee8d164fd | ||
|
|
c736796380 | ||
|
|
6df1e137f3 | ||
|
|
5a29508407 | ||
|
|
45c68424f0 | ||
|
|
9426fc1b1c | ||
|
|
c234d1dc16 | ||
|
|
11035db307 | ||
|
|
d2614396da | ||
|
|
a2bbb69b73 | ||
|
|
b23b2a95c5 | ||
|
|
8fa9bcc650 | ||
|
|
e544203ca0 | ||
|
|
ed9eed226d | ||
|
|
806769b307 | ||
|
|
d291fc5c64 | ||
|
|
bf45e487f9 | ||
|
|
99ef0e51fc | ||
|
|
cc2dc66d5f | ||
|
|
001c11913a | ||
|
|
f061bce2a1 | ||
|
|
d7859b07a6 | ||
|
|
585963e751 | ||
|
|
d84b762ef3 | ||
|
|
8657cedca0 | ||
|
|
9ea9704bbf | ||
|
|
302677aed0 | ||
|
|
1415df1d23 | ||
|
|
b7d2bc7c74 | ||
|
|
4f7d069f19 | ||
|
|
3bedf7ec35 | ||
|
|
c4c37bf291 | ||
|
|
815a865265 | ||
|
|
2d2167e048 | ||
|
|
a05ea64bcd | ||
|
|
fe96a7030a | ||
|
|
9050e56cdb | ||
|
|
b813cbdfc8 | ||
|
|
95aee6ec81 | ||
|
|
4d2c1b4dc2 | ||
|
|
d9fdf261d1 | ||
|
|
a8b17926ed | ||
|
|
afb049ca17 | ||
|
|
9c9aefe4cd | ||
|
|
e7a4415d1f | ||
|
|
ef3f8e1839 | ||
|
|
0e933597f5 | ||
|
|
bafe2ae164 | ||
|
|
6d93e21bc7 | ||
|
|
91fb0d9ffa | ||
|
|
8fc72814c8 | ||
|
|
a5d3d6fc50 | ||
|
|
b6d51d6fe4 | ||
|
|
a5c00ecb12 | ||
|
|
91e12798e4 | ||
|
|
9ee4b067d0 | ||
|
|
366deb2756 | ||
|
|
98778fe6a3 | ||
|
|
b9dd62e2e6 | ||
|
|
a1dcdd04a7 | ||
|
|
8ebf915330 | ||
|
|
5281449e26 | ||
|
|
b2f0bf2f69 | ||
|
|
d6bba4f68c | ||
|
|
278ab30d40 | ||
|
|
786c981fe0 | ||
|
|
b77cc012e1 | ||
|
|
dc31ec524b | ||
|
|
a0ef64cda8 | ||
|
|
7be790c6fb | ||
|
|
fbe6cefbdb | ||
|
|
a60cab989d | ||
|
|
385ffafc67 | ||
|
|
462b11d96a | ||
|
|
96ce529b16 | ||
|
|
8e20b1b0a0 | ||
|
|
c2d7bf296a | ||
|
|
98725bbecf | ||
|
|
c465000178 | ||
|
|
3d09cf0666 | ||
|
|
70770b3c13 | ||
|
|
0cdb9a05ce | ||
|
|
c97c4cbd41 | ||
|
|
fecae38c63 | ||
|
|
210c12a74e | ||
|
|
a1c0a09423 | ||
|
|
fc99f24802 | ||
|
|
28bcdc3706 | ||
|
|
859d09ff8c | ||
|
|
10f155da78 | ||
|
|
4e31dc8728 | ||
|
|
98a52c5c33 | ||
|
|
0b63bebb6c | ||
|
|
bccde5ef6d | ||
|
|
215ba4da63 | ||
|
|
886d1a551a | ||
|
|
d05bcc17f7 | ||
|
|
c63584daca | ||
|
|
c2f59ffc02 | ||
|
|
f90361f8e4 | ||
|
|
67d254236d | ||
|
|
fd5bddeb80 | ||
|
|
4835d68222 | ||
|
|
fe78e2d5cd | ||
|
|
e73217ad5f | ||
|
|
88cde5d99d | ||
|
|
41bc0e62a1 | ||
|
|
b05799cfae | ||
|
|
592e7f846d | ||
|
|
bcd54bf995 | ||
|
|
0b4403b67b | ||
|
|
a591ee1efc | ||
|
|
ef9ccafe61 | ||
|
|
e6b7d512fb | ||
|
|
6113be3b6c | ||
|
|
7fab8eeaf6 | ||
|
|
15d5113729 | ||
|
|
e5eaff37a4 | ||
|
|
4f2633a62b | ||
|
|
5d114806f7 | ||
|
|
06dc7a6ef4 | ||
|
|
8dbe3f8c48 | ||
|
|
778d506be7 | ||
|
|
65cf6c387b | ||
|
|
2e3174baa7 | ||
|
|
7c71d07ad2 | ||
|
|
8760ed17dc | ||
|
|
02b44d83af | ||
|
|
bb2059c76f | ||
|
|
f541464ff4 | ||
|
|
98527d5038 | ||
|
|
2ba2f4d42c | ||
|
|
51b08be87e | ||
|
|
4dd811330b | ||
|
|
76ca24086b | ||
|
|
3d96a575c0 | ||
|
|
db091f5d2e | ||
|
|
baebce4aad | ||
|
|
fefc5c23fd | ||
|
|
924e7d1022 | ||
|
|
44eb781060 | ||
|
|
3bd981d47b | ||
|
|
1d725b8bde | ||
|
|
15a0c30ccb | ||
|
|
537a3000b6 | ||
|
|
965ee9844d | ||
|
|
fa39cfc41b | ||
|
|
902c8fcaf2 | ||
|
|
5fb770c113 | ||
|
|
ca3fa869d5 | ||
|
|
51ba1d1e34 | ||
|
|
7e3804bf34 | ||
|
|
55e8582ee4 | ||
|
|
0e9595f255 | ||
|
|
19246ac616 | ||
|
|
b91087e175 | ||
|
|
7789d0bd5c | ||
|
|
1875e1b513 | ||
|
|
8e5c150a4f | ||
|
|
85f4afe2a1 | ||
|
|
0b8b3c5a1a | ||
|
|
5ce95d74a9 | ||
|
|
d531432f4a | ||
|
|
4f1d61a56e | ||
|
|
121b8c433b | ||
|
|
3d2507430b | ||
|
|
601e99f0b5 | ||
|
|
5370bd45ed | ||
|
|
1246a66b35 | ||
|
|
7d6fb54783 | ||
|
|
79d14f1d51 | ||
|
|
59dd71ebaa | ||
|
|
ea319e951c | ||
|
|
8c0af66ca9 | ||
|
|
a4d94c8bc9 | ||
|
|
ea8ca1b739 | ||
|
|
e4f1bab8fb | ||
|
|
7e05b8f13b | ||
|
|
0a771bd67a | ||
|
|
2798e930ac | ||
|
|
1357913f8b | ||
|
|
14b990ad9f | ||
|
|
58bd2032f3 | ||
|
|
fe7eb59f18 | ||
|
|
e3cd16189b | ||
|
|
bd9db1b4f7 | ||
|
|
5db4677ff8 | ||
|
|
44280b23e4 | ||
|
|
7cfc31b6e5 | ||
|
|
c7cd47fbdc | ||
|
|
a9d91a09c4 | ||
|
|
d18b98304b | ||
|
|
a80bc10719 | ||
|
|
0f34d63b6d | ||
|
|
02c271dfb2 | ||
|
|
03ab2410cc | ||
|
|
f5c2b2a23d | ||
|
|
4417fe6cd9 | ||
|
|
57c031a8f8 | ||
|
|
a9733d9746 | ||
|
|
936764e6ce | ||
|
|
9750cd3ce6 | ||
|
|
bfd1274a8c | ||
|
|
f195b3bccf | ||
|
|
5376d50cfb | ||
|
|
0b6681436a | ||
|
|
7fc238c27b | ||
|
|
72aa56007c | ||
|
|
2bc720534d | ||
|
|
1e88f95b43 | ||
|
|
9d47bc66c4 | ||
|
|
10f250b7e7 | ||
|
|
f492580864 | ||
|
|
86b4712beb | ||
|
|
0d12103085 | ||
|
|
bf4e1a3270 | ||
|
|
3f1f53434c | ||
|
|
d3477159a8 | ||
|
|
c05395c258 | ||
|
|
d25eb49bfe | ||
|
|
f3f8ea0481 | ||
|
|
0af971b08a | ||
|
|
a7fa63f054 | ||
|
|
b20b30baad | ||
|
|
cb15a38cb3 | ||
|
|
39673af46f | ||
|
|
c6afbf6ee8 | ||
|
|
abb0df59df | ||
|
|
b0decf824e | ||
|
|
5ec5fdcfc7 | ||
|
|
f9bc739c51 | ||
|
|
a1b3f267de | ||
|
|
dbcad24093 | ||
|
|
1176f8c863 | ||
|
|
90b98f695b | ||
|
|
600b0b52f4 | ||
|
|
8b3bc18ea8 | ||
|
|
9a9c9a2da1 | ||
|
|
c5023b4505 | ||
|
|
18b79d3693 | ||
|
|
982c8b4df2 | ||
|
|
8a4f2de887 | ||
|
|
14288568bf | ||
|
|
1cd013fb94 | ||
|
|
0c88568cd7 | ||
|
|
ecdeadfe1e | ||
|
|
d86bdea127 | ||
|
|
40f0e1bb19 | ||
|
|
78496312ec | ||
|
|
3e23609b68 | ||
|
|
8488a92026 | ||
|
|
1b4691d0ac | ||
|
|
fae2c27648 | ||
|
|
7a14e90802 | ||
|
|
c8c79d9baa | ||
|
|
a2d33c5447 | ||
|
|
fb5d5a7d37 | ||
|
|
b608a389c5 | ||
|
|
9de90200f5 | ||
|
|
860f154d54 | ||
|
|
f7d4040ac7 | ||
|
|
b3075cce24 | ||
|
|
909c4028f1 | ||
|
|
620922f82e | ||
|
|
22e1d329cb | ||
|
|
33fdcc38b5 | ||
|
|
fef2728a7c | ||
|
|
8f88270cdf | ||
|
|
3493580236 | ||
|
|
7f2729fd0f | ||
|
|
b8c82320d3 | ||
|
|
94ec25c193 | ||
|
|
2366530622 | ||
|
|
1cf8e2384e | ||
|
|
d9a5c11d6a | ||
|
|
7811714f89 | ||
|
|
e869da8d4c | ||
|
|
e414b49879 | ||
|
|
627119bb22 | ||
|
|
74ade3ee41 | ||
|
|
324628dd3d | ||
|
|
9d96ac0f6a | ||
|
|
409571f54b | ||
|
|
a995a13163 | ||
|
|
7848d6b1de | ||
|
|
5d2e114b4d | ||
|
|
c207516b47 | ||
|
|
6724f1ae35 | ||
|
|
1787d5372e | ||
|
|
4c41144dd0 | ||
|
|
27636c848f | ||
|
|
8060d699f0 | ||
|
|
f36f31a636 | ||
|
|
d300fac3d9 | ||
|
|
8c2262dd95 | ||
|
|
c438e916ca | ||
|
|
0855bd4896 | ||
|
|
f2073a2f83 | ||
|
|
a1f468202f | ||
|
|
a24bf4042c | ||
|
|
15d22b4bf6 | ||
|
|
231d264652 | ||
|
|
67ac353fd5 | ||
|
|
1c5534ad36 | ||
|
|
251678493c | ||
|
|
ccbffa14f0 | ||
|
|
dfc1f736af | ||
|
|
c54d89a465 | ||
|
|
55b95a1985 | ||
|
|
31e3104c7f | ||
|
|
58a0089b19 | ||
|
|
043ed4cb31 | ||
|
|
f64c01044c | ||
|
|
5dca724017 | ||
|
|
579f9bd1f8 | ||
|
|
7c9104a5b9 | ||
|
|
14d1531469 | ||
|
|
74e1e92607 | ||
|
|
f7a56c0eb3 | ||
|
|
e2fa76332a | ||
|
|
0daa8720cd | ||
|
|
f70b8b1213 | ||
|
|
feb3dfbe86 | ||
|
|
f38e4a6cac | ||
|
|
893188d693 | ||
|
|
b989aeb7b0 | ||
|
|
40f87c834d | ||
|
|
0bb9cb5097 | ||
|
|
b51797e238 | ||
|
|
926c095409 | ||
|
|
0c65f8ebad | ||
|
|
63f4295063 | ||
|
|
d2b38a8a3c | ||
|
|
45ddb15d56 | ||
|
|
7aef9d2a5a | ||
|
|
45fdebeaca | ||
|
|
0ea875f7f7 | ||
|
|
1b54c9fc1b | ||
|
|
89f3adcbef | ||
|
|
b13fef5ad9 | ||
|
|
9110955b63 | ||
|
|
4508d062f1 | ||
|
|
bea0dde074 | ||
|
|
3e674b186f | ||
|
|
92d9c94e87 | ||
|
|
c09066c0a3 | ||
|
|
3a7be3dfb7 | ||
|
|
09f14c23e4 | ||
|
|
eaaae3b393 | ||
|
|
3939741a06 | ||
|
|
f86a1816e0 | ||
|
|
9cbd8b4529 | ||
|
|
5dea73fc2a | ||
|
|
01af1bea23 | ||
|
|
1227e86134 | ||
|
|
c6a6ea48dd | ||
|
|
2080dbdc0f | ||
|
|
6eecd70bd5 | ||
|
|
a9951e4eca | ||
|
|
53688f332f | ||
|
|
d23d1d9a1d | ||
|
|
51ff9e1851 | ||
|
|
824814da56 | ||
|
|
040fcd059f | ||
|
|
f2339620a5 | ||
|
|
d58c0a7df5 | ||
|
|
355a18e5eb | ||
|
|
4598c805eb | ||
|
|
aa204649fa | ||
|
|
fbaff3e90c | ||
|
|
0d070fb934 | ||
|
|
c416bd5755 | ||
|
|
90406ae883 | ||
|
|
600b3dfbac | ||
|
|
72e4491dc4 | ||
|
|
76c3695567 | ||
|
|
475a09bb54 | ||
|
|
6a1fce69e0 | ||
|
|
485fd0169b | ||
|
|
a9464ff776 | ||
|
|
335ca817d2 | ||
|
|
6d112c387d | ||
|
|
e4c243de2d | ||
|
|
02f67961a9 | ||
|
|
7d5fd295ed | ||
|
|
daa0b1592d | ||
|
|
b411783bbe | ||
|
|
40a59bbc1a | ||
|
|
93fc4932ee | ||
|
|
2277d7232e | ||
|
|
435318cf1d | ||
|
|
a9a9d8a78f | ||
|
|
a2544d237e | ||
|
|
9397ac0174 | ||
|
|
4c719da096 | ||
|
|
7613417c33 | ||
|
|
a2a65cade7 | ||
|
|
3a07402aa2 | ||
|
|
59b67f5d3f | ||
|
|
e30cfc3a2f | ||
|
|
52655d9702 | ||
|
|
e93846ddc1 | ||
|
|
4cf2f6b1e6 | ||
|
|
43a3cb74ac | ||
|
|
3360339c08 | ||
|
|
f0dbe9fa60 | ||
|
|
756751b765 | ||
|
|
85a9f6c8d4 | ||
|
|
7fe0381850 | ||
|
|
537d27d10b | ||
|
|
a5fdd3f1a2 | ||
|
|
be8a632a09 | ||
|
|
95c34f1df5 | ||
|
|
8b53a7bd99 | ||
|
|
b169ee8149 | ||
|
|
9cc6dde999 | ||
|
|
cf1fbb63c4 | ||
|
|
2aa629ff5d | ||
|
|
1e2b453c24 | ||
|
|
331e4a4970 | ||
|
|
ee3ffaef1d | ||
|
|
7cc3bc83a0 | ||
|
|
558e28ddaf | ||
|
|
45e69543b3 | ||
|
|
67ee41c5ea | ||
|
|
160ed05178 | ||
|
|
5ecfe121b3 | ||
|
|
a654137af9 | ||
|
|
cb591f19fb | ||
|
|
4823a348be | ||
|
|
32da6ae808 | ||
|
|
7eb70b0f0d | ||
|
|
52e8e45daf | ||
|
|
319c5b84fa | ||
|
|
6a93ff1fc1 | ||
|
|
6069b654d1 | ||
|
|
2af8c32497 | ||
|
|
a3957289c8 | ||
|
|
3f5cd6c26a | ||
|
|
8046dad56d | ||
|
|
cdc1fd2d87 | ||
|
|
499626b946 | ||
|
|
a9cfb2cfaa | ||
|
|
7b976c16eb | ||
|
|
44e8a035aa | ||
|
|
6b57607926 | ||
|
|
c3675e48fd | ||
|
|
a66393c609 | ||
|
|
776abaf56d | ||
|
|
18808004f4 | ||
|
|
db8761946d | ||
|
|
2194a96145 | ||
|
|
ecd396d70f | ||
|
|
5ab0438397 | ||
|
|
bcde047695 | ||
|
|
109f5f9648 | ||
|
|
d16c691c0f | ||
|
|
3eb2a5b3ef | ||
|
|
569e2d5192 | ||
|
|
0077157d28 | ||
|
|
9e19bab5a7 | ||
|
|
b1e2940db6 | ||
|
|
4113cd619c | ||
|
|
1f78d576a3 | ||
|
|
e2a548f6a5 | ||
|
|
2956725e66 | ||
|
|
5131d37d58 | ||
|
|
1dfba159e0 | ||
|
|
631718a138 | ||
|
|
3172ab2d1c | ||
|
|
6a6bde3764 | ||
|
|
f756b37f97 | ||
|
|
a26c28517a | ||
|
|
894428642b | ||
|
|
36d58d05b3 | ||
|
|
80de055fc2 | ||
|
|
4905b44c8a | ||
|
|
02e19bb132 | ||
|
|
84989e0ea3 | ||
|
|
9661c5fd82 | ||
|
|
c8dfdb7a5a | ||
|
|
df5897c908 | ||
|
|
8d0af75145 | ||
|
|
36c6f0ca8d | ||
|
|
3048a2ae08 | ||
|
|
27535c7bb7 | ||
|
|
15a1770ee9 | ||
|
|
f580409ade | ||
|
|
f209d17e3c | ||
|
|
5047b0f614 | ||
|
|
ad623da86d | ||
|
|
3ec7772e5d | ||
|
|
e6b57561e3 | ||
|
|
3b1c2c95ec | ||
|
|
364abafffe | ||
|
|
389281b96c | ||
|
|
fed64bcd08 | ||
|
|
17a5c4d094 | ||
|
|
f2317c7816 | ||
|
|
7fce7ca62f | ||
|
|
167bcb222d | ||
|
|
108a599666 | ||
|
|
57476741cd | ||
|
|
077f3e79f8 | ||
|
|
2c9aa4bca7 | ||
|
|
9cf0f1e0a7 | ||
|
|
660c03a564 | ||
|
|
1d49d79af2 | ||
|
|
4ccb17dde6 | ||
|
|
8bc433711d | ||
|
|
2e82bd8624 | ||
|
|
4b57d513a2 | ||
|
|
d36c3a83a9 | ||
|
|
ac3afe4dee | ||
|
|
667844aa12 | ||
|
|
6d192233d1 | ||
|
|
764ef48fd1 | ||
|
|
3f95c81243 | ||
|
|
7c385d329d | ||
|
|
553438d219 | ||
|
|
941921904c | ||
|
|
7d808ed798 | ||
|
|
7704072a65 | ||
|
|
9076baf1c8 | ||
|
|
954ff53d9b | ||
|
|
37d268bb16 | ||
|
|
5befa90f81 | ||
|
|
6e3313f00d | ||
|
|
7b5afb7afe | ||
|
|
fe13dad06f | ||
|
|
b41bad0ae2 | ||
|
|
18ef7f0272 |
@@ -3,3 +3,7 @@ linker = "aarch64-linux-gnu-gcc"
|
||||
|
||||
[target.armv7-unknown-linux-gnueabihf]
|
||||
linker = "arm-linux-gnueabihf-gcc"
|
||||
|
||||
[alias]
|
||||
clippy-all = "clippy --all-targets --all-features -- -D warnings"
|
||||
clippy-only = "clippy --all-targets --features clippy -- -D warnings"
|
||||
|
||||
101
.devcontainer/devcontainer.json
Normal file
101
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,101 @@
|
||||
{
|
||||
"name": "Clash Verge Rev Development Environment",
|
||||
"image": "mcr.microsoft.com/devcontainers/base:ubuntu-22.04",
|
||||
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/node:1": {
|
||||
"version": "20"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/rust:1": {
|
||||
"version": "latest",
|
||||
"profile": "default"
|
||||
},
|
||||
"ghcr.io/devcontainers/features/git:1": {},
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {},
|
||||
"ghcr.io/devcontainers/features/docker-in-docker:2": {}
|
||||
},
|
||||
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
"rust-lang.rust-analyzer",
|
||||
"tauri-apps.tauri-vscode",
|
||||
"ms-vscode.vscode-typescript-next",
|
||||
"esbenp.prettier-vscode",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"ms-vscode.vscode-json",
|
||||
"redhat.vscode-yaml",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"ms-vscode.hexeditor",
|
||||
"christian-kohler.path-intellisense",
|
||||
"yzhang.markdown-all-in-one",
|
||||
"streetsidesoftware.code-spell-checker",
|
||||
"ms-vscode.vscode-eslint"
|
||||
],
|
||||
"settings": {
|
||||
"rust-analyzer.cargo.features": ["verge-dev"],
|
||||
"rust-analyzer.check.command": "clippy",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"[rust]": {
|
||||
"editor.defaultFormatter": "rust-lang.rust-analyzer"
|
||||
},
|
||||
"[json]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[yaml]": {
|
||||
"editor.defaultFormatter": "redhat.vscode-yaml"
|
||||
},
|
||||
"[typescript]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
},
|
||||
"[typescriptreact]": {
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"forwardPorts": [1420, 3000, 8080, 9090, 7890, 7891],
|
||||
|
||||
"portsAttributes": {
|
||||
"1420": {
|
||||
"label": "Tauri Dev Server",
|
||||
"onAutoForward": "notify"
|
||||
},
|
||||
"3000": {
|
||||
"label": "Vite Dev Server",
|
||||
"onAutoForward": "notify"
|
||||
},
|
||||
"7890": {
|
||||
"label": "Clash HTTP Proxy",
|
||||
"onAutoForward": "silent"
|
||||
},
|
||||
"7891": {
|
||||
"label": "Clash SOCKS Proxy",
|
||||
"onAutoForward": "silent"
|
||||
},
|
||||
"9090": {
|
||||
"label": "Clash API",
|
||||
"onAutoForward": "silent"
|
||||
}
|
||||
},
|
||||
|
||||
"postCreateCommand": "bash .devcontainer/post-create.sh",
|
||||
|
||||
"mounts": [
|
||||
"source=clash-verge-node-modules,target=${containerWorkspaceFolder}/node_modules,type=volume",
|
||||
"source=clash-verge-cargo-registry,target=/usr/local/cargo/registry,type=volume",
|
||||
"source=clash-verge-cargo-git,target=/usr/local/cargo/git,type=volume"
|
||||
],
|
||||
|
||||
"containerEnv": {
|
||||
"RUST_BACKTRACE": "1",
|
||||
"NODE_OPTIONS": "--max-old-space-size=4096",
|
||||
"TAURI_DEV_WATCHER_IGNORE_FILE": ".taurignore"
|
||||
},
|
||||
|
||||
"remoteUser": "vscode",
|
||||
"workspaceFolder": "/workspaces/clash-verge-rev",
|
||||
"shutdownAction": "stopContainer"
|
||||
}
|
||||
35
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
35
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -9,18 +9,21 @@ body:
|
||||
attributes:
|
||||
value: |
|
||||
## 在提交问题之前,请确认以下事项:
|
||||
|
||||
1. 请 **确保** 您已经查阅了 [Clash Verge Rev 官方文档](https://clash-verge-rev.github.io/guide/term.html) 以及 [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
|
||||
2. 请 **确保** [已有的问题](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue) 中没有人提交过相似issue,否则请在已有的issue下进行讨论
|
||||
3. 请 **务必** 给issue填写一个简洁明了的标题,以便他人快速检索
|
||||
4. 请 **务必** 查看 [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) 版本更新日志
|
||||
5. 请 **务必** 尝试 [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) 版本,确定问题是否仍然存在
|
||||
4. 请 **务必** 查看 [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) 版本更新日志
|
||||
5. 请 **务必** 尝试 [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) 版本,确定问题是否仍然存在
|
||||
6. 请 **务必** 按照模板规范详细描述问题以及尝试更新 Alpha 版本,否则issue将会被直接关闭
|
||||
|
||||
## Before submitting the issue, please make sure of the following checklist:
|
||||
|
||||
1. Please make sure you have read the [Clash Verge Rev official documentation](https://clash-verge-rev.github.io/guide/term.html) and [FAQ](https://clash-verge-rev.github.io/faq/windows.html)
|
||||
2. Please make sure there is no similar issue in the [existing issues](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue), otherwise please discuss under the existing issue
|
||||
3. Please be sure to fill in a concise and clear title for the issue so that others can quickly search
|
||||
4. Please be sure to check out [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) version update log
|
||||
5. Please be sure to try the [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) version to ensure that the problem still exists
|
||||
4. Please be sure to check out [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) version update log
|
||||
5. Please be sure to try the [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) version to ensure that the problem still exists
|
||||
6. Please describe the problem in detail according to the template specification and try to update the Alpha version, otherwise the issue will be closed
|
||||
|
||||
- type: textarea
|
||||
@@ -32,8 +35,9 @@ body:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: 软件版本 / Verge Version
|
||||
description: 请提供Verge的具体版本,如果是alpha版本,请注明下载时间(精确到小时分钟) / Please provide the specific version of Verge. If it is an alpha version, please indicate the download time (accurate to hours and minutes)
|
||||
label: 软件版本 / CVR Version
|
||||
description: 请提供 CVR 的具体版本,如果是 AutoBuild 版本,请注明下载时间(精确到小时分钟) / Please provide the specific version of CVR. If it is an AutoBuild version, please indicate the download time (accurate to hours and minutes)
|
||||
render: text
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
@@ -42,13 +46,13 @@ body:
|
||||
description: 请提供复现问题的步骤 / Steps to reproduce the behavior
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: 操作系统 / OS
|
||||
options:
|
||||
- Windows
|
||||
- Linux
|
||||
- MacOS
|
||||
- label: Windows
|
||||
- label: Linux
|
||||
- label: MacOS
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
@@ -61,12 +65,9 @@ body:
|
||||
attributes:
|
||||
label: 日志(勿上传日志文件,请粘贴日志内容) / Log (Do not upload the log file, paste the log content directly)
|
||||
description: 请提供完整或相关部分的Debug日志(请在“软件左侧菜单”->“设置”->“日志等级”调整到debug,Verge错误请把“杂项设置”->“app日志等级”调整到debug,并重启Verge生效。日志文件在“软件左侧菜单”->“设置”->“日志目录”下) / Please provide a complete or relevant part of the Debug log (please adjust the "Log level" to debug in "Software left menu" -> "Settings" -> "Log level". If there is a Verge error, please adjust "Miscellaneous settings" -> "app log level" to debug, and restart Verge to take effect. The log file is under "Software left menu" -> "Settings" -> "Log directory")
|
||||
value: |
|
||||
<details><summary>日志内容 / Log Content</summary>
|
||||
```log
|
||||
<!-- 在此处粘贴完整日志 / Paste the full log here -->
|
||||
|
||||
```
|
||||
</details>
|
||||
placeholder: |
|
||||
日志目录一般位于 Clash Verge Rev 安装目录的 "logs/" 子目录中,请将日志内容粘贴到此处。
|
||||
Log directory is usually located in the "logs/" subdirectory of the Clash Verge Rev installation directory, please paste the log content here.
|
||||
render: log
|
||||
validations:
|
||||
required: true
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
1
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,3 +1,4 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: 讨论交流 / Communication
|
||||
url: https://t.me/clash_verge_rev
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
4
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@@ -12,13 +12,13 @@ body:
|
||||
1. 请 **确保** 您已经查阅了 [Clash Verge Rev 官方文档](https://clash-verge-rev.github.io/guide/term.html) 确认软件不存在类似的功能
|
||||
2. 请 **确保** [已有的问题](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue) 中没有人提交过相似issue,否则请在已有的issue下进行讨论
|
||||
3. 请 **务必** 给issue填写一个简洁明了的标题,以便他人快速检索
|
||||
4. 请 **务必** 先下载 [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) 版本测试,确保该功能还未实现
|
||||
4. 请 **务必** 先下载 [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) 版本测试,确保该功能还未实现
|
||||
5. 请 **务必** 按照模板规范详细描述问题,否则issue将会被关闭
|
||||
## Before submitting the issue, please make sure of the following checklist:
|
||||
1. Please make sure you have read the [Clash Verge Rev official documentation](https://clash-verge-rev.github.io/guide/term.html) to confirm that the software does not have similar functions
|
||||
2. Please make sure there is no similar issue in the [existing issues](https://github.com/clash-verge-rev/clash-verge-rev/issues?q=is%3Aissue), otherwise please discuss under the existing issue
|
||||
3. Please be sure to fill in a concise and clear title for the issue so that others can quickly search
|
||||
4. Please be sure to download the [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) version for testing to ensure that the function has not been implemented
|
||||
4. Please be sure to download the [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) version for testing to ensure that the function has not been implemented
|
||||
5. Please describe the problem in detail according to the template specification, otherwise the issue will be closed
|
||||
|
||||
- type: textarea
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/i18n_request.yml
vendored
4
.github/ISSUE_TEMPLATE/i18n_request.yml
vendored
@@ -52,7 +52,7 @@ body:
|
||||
- type: input
|
||||
id: verge-version
|
||||
attributes:
|
||||
label: 软件版本 / Verge Version
|
||||
description: 请提供你使用的 Verge 具体版本 / Please provide the specific version of Verge you are using
|
||||
label: 软件版本 / CVR Version
|
||||
description: 请提供你使用的 CVR 具体版本 / Please provide the specific version of CVR you are using
|
||||
validations:
|
||||
required: true
|
||||
|
||||
10
.github/workflows/alpha.yml
vendored
10
.github/workflows/alpha.yml
vendored
@@ -25,6 +25,7 @@ env:
|
||||
TAG_CHANNEL: Alpha
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: short
|
||||
HUSKY: 0
|
||||
concurrency:
|
||||
group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}"
|
||||
|
||||
@@ -294,6 +295,15 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
|
||||
|
||||
- name: Install x86 OpenSSL (macOS only)
|
||||
if: matrix.target == 'x86_64-apple-darwin'
|
||||
run: |
|
||||
arch -x86_64 brew install openssl@3
|
||||
echo "OPENSSL_DIR=$(brew --prefix openssl@3)" >> $GITHUB_ENV
|
||||
echo "OPENSSL_INCLUDE_DIR=$(brew --prefix openssl@3)/include" >> $GITHUB_ENV
|
||||
echo "OPENSSL_LIB_DIR=$(brew --prefix openssl@3)/lib" >> $GITHUB_ENV
|
||||
echo "PKG_CONFIG_PATH=$(brew --prefix openssl@3)/lib/pkgconfig" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
|
||||
133
.github/workflows/autobuild-check-test.yml
vendored
Normal file
133
.github/workflows/autobuild-check-test.yml
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
name: Autobuild Check Logic Test
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check_autobuild_logic:
|
||||
name: Check Autobuild Should Run Logic
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Check if version or source changed, or assets already exist
|
||||
id: check
|
||||
run: |
|
||||
# # 仅用于测试逻辑,手动触发自动跳过
|
||||
# if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
||||
# echo "should_run=skip" >> $GITHUB_OUTPUT
|
||||
# echo "🟡 手动触发,跳过 should_run 检查"
|
||||
# exit 0
|
||||
# fi
|
||||
|
||||
# 确保有 HEAD~1
|
||||
if ! git rev-parse HEAD~1 > /dev/null 2>&1; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "🟢 没有前一个提交,默认需要构建"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# 版本号变更判断
|
||||
CURRENT_VERSION=$(jq -r '.version' package.json)
|
||||
PREVIOUS_VERSION=$(git show HEAD~1:package.json | jq -r '.version' 2>/dev/null || echo "")
|
||||
|
||||
if [ "$CURRENT_VERSION" != "$PREVIOUS_VERSION" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "🟢 版本号变更: $PREVIOUS_VERSION → $CURRENT_VERSION"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# 检查 src 变更(排除常见产物与缓存)
|
||||
SRC_DIFF=$(git diff --name-only HEAD~1 HEAD -- src/ | grep -Ev '^src/(dist|build|node_modules|\.next|\.cache)' || true)
|
||||
TAURI_DIFF=$(git diff --name-only HEAD~1 HEAD -- src-tauri/ | grep -Ev '^src-tauri/(target|node_modules|dist|\.cache)' || true)
|
||||
|
||||
if [ -n "$SRC_DIFF" ] || [ -n "$TAURI_DIFF" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "🟢 源码变更 detected"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# 找到最后一个修改 Tauri 相关文件的 commit
|
||||
echo "🔍 查找最后一个 Tauri 相关变更的 commit..."
|
||||
|
||||
LAST_TAURI_COMMIT=""
|
||||
for commit in $(git rev-list HEAD --max-count=50); do
|
||||
# 检查此 commit 是否修改了 Tauri 相关文件
|
||||
CHANGED_FILES=$(git show --name-only --pretty=format: $commit | tr '\n' ' ')
|
||||
HAS_TAURI_CHANGES=false
|
||||
|
||||
# 检查各个模式
|
||||
if echo "$CHANGED_FILES" | grep -q "src/" && echo "$CHANGED_FILES" | grep -qvE "src/(dist|build|node_modules|\.next|\.cache)"; then
|
||||
HAS_TAURI_CHANGES=true
|
||||
elif echo "$CHANGED_FILES" | grep -qE "src-tauri/(src|Cargo\.(toml|lock)|tauri\..*\.conf\.json|build\.rs|capabilities)"; then
|
||||
HAS_TAURI_CHANGES=true
|
||||
fi
|
||||
|
||||
if [ "$HAS_TAURI_CHANGES" = true ]; then
|
||||
LAST_TAURI_COMMIT=$(git rev-parse --short $commit)
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "$LAST_TAURI_COMMIT" ]; then
|
||||
echo "⚠️ 最近的 commits 中未找到 Tauri 相关变更,使用当前 commit"
|
||||
LAST_TAURI_COMMIT=$(git rev-parse --short HEAD)
|
||||
fi
|
||||
|
||||
CURRENT_COMMIT=$(git rev-parse --short HEAD)
|
||||
echo "📝 最后 Tauri 相关 commit: $LAST_TAURI_COMMIT"
|
||||
echo "📝 当前 commit: $CURRENT_COMMIT"
|
||||
|
||||
# 检查 autobuild release 是否存在
|
||||
AUTOBUILD_RELEASE_EXISTS=$(gh release view "autobuild" --json id -q '.id' 2>/dev/null || echo "")
|
||||
|
||||
if [ -z "$AUTOBUILD_RELEASE_EXISTS" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "🟢 没有 autobuild release,需构建"
|
||||
else
|
||||
# 检查 latest.json 是否存在
|
||||
LATEST_JSON_EXISTS=$(gh release view "autobuild" --json assets -q '.assets[] | select(.name == "latest.json") | .name' 2>/dev/null || echo "")
|
||||
|
||||
if [ -z "$LATEST_JSON_EXISTS" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "🟢 没有 latest.json,需构建"
|
||||
else
|
||||
# 下载并解析 latest.json 检查版本和 commit hash
|
||||
echo "📥 下载 latest.json 检查版本..."
|
||||
LATEST_JSON_URL=$(gh release view "autobuild" --json assets -q '.assets[] | select(.name == "latest.json") | .browser_download_url' 2>/dev/null)
|
||||
|
||||
if [ -n "$LATEST_JSON_URL" ]; then
|
||||
LATEST_JSON_CONTENT=$(curl -s "$LATEST_JSON_URL" 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$LATEST_JSON_CONTENT" ]; then
|
||||
LATEST_VERSION=$(echo "$LATEST_JSON_CONTENT" | jq -r '.version' 2>/dev/null || echo "")
|
||||
echo "📦 最新 autobuild 版本: $LATEST_VERSION"
|
||||
|
||||
# 从版本字符串中提取 commit hash (格式: X.Y.Z+autobuild.MMDD.commit)
|
||||
LATEST_COMMIT=$(echo "$LATEST_VERSION" | sed -n 's/.*+autobuild\.[0-9]\{4\}\.\([a-f0-9]*\)$/\1/p' || echo "")
|
||||
echo "📝 最新 autobuild commit: $LATEST_COMMIT"
|
||||
|
||||
if [ "$LAST_TAURI_COMMIT" != "$LATEST_COMMIT" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "🟢 Tauri commit hash 不匹配 ($LAST_TAURI_COMMIT != $LATEST_COMMIT),需构建"
|
||||
else
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
echo "🔴 相同 Tauri commit hash ($LAST_TAURI_COMMIT),不需构建"
|
||||
fi
|
||||
else
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "⚠️ 无法下载或解析 latest.json,需构建"
|
||||
fi
|
||||
else
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
echo "⚠️ 无法获取 latest.json 下载 URL,需构建"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
- name: Output should_run result
|
||||
run: |
|
||||
echo "Result: ${{ steps.check.outputs.should_run }}"
|
||||
296
.github/workflows/autobuild.yml
vendored
296
.github/workflows/autobuild.yml
vendored
@@ -3,14 +3,15 @@ name: Auto Build
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# UTC+8 0,6,12,18
|
||||
- cron: "0 16,22,4,10 * * *"
|
||||
# UTC+8 12:00, 18:00 -> UTC 4:00, 10:00
|
||||
- cron: "0 4,10 * * *"
|
||||
permissions: write-all
|
||||
env:
|
||||
TAG_NAME: autobuild
|
||||
TAG_CHANNEL: AutoBuild
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: short
|
||||
HUSKY: 0
|
||||
concurrency:
|
||||
group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}"
|
||||
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||
@@ -18,55 +19,10 @@ concurrency:
|
||||
jobs:
|
||||
check_commit:
|
||||
name: Check Commit Needs Build
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: clash-verge-rev/clash-verge-rev/.github/workflows/check-commit-needs-build.yml@dev
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
- name: Check if version changed or src changed
|
||||
id: check
|
||||
run: |
|
||||
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
CURRENT_VERSION=$(cat package.json | jq -r '.version')
|
||||
echo "Current version: $CURRENT_VERSION"
|
||||
|
||||
git checkout HEAD~1 package.json
|
||||
PREVIOUS_VERSION=$(cat package.json | jq -r '.version')
|
||||
echo "Previous version: $PREVIOUS_VERSION"
|
||||
|
||||
git checkout HEAD package.json
|
||||
|
||||
if [ "$CURRENT_VERSION" != "$PREVIOUS_VERSION" ]; then
|
||||
echo "Version changed from $PREVIOUS_VERSION to $CURRENT_VERSION"
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
CURRENT_SRC_HASH=$(git rev-parse HEAD:src)
|
||||
PREVIOUS_SRC_HASH=$(git rev-parse HEAD~1:src 2>/dev/null || echo "")
|
||||
CURRENT_TAURI_HASH=$(git rev-parse HEAD:src-tauri 2>/dev/null || echo "")
|
||||
PREVIOUS_TAURI_HASH=$(git rev-parse HEAD~1:src-tauri 2>/dev/null || echo "")
|
||||
|
||||
echo "Current src hash: $CURRENT_SRC_HASH"
|
||||
echo "Previous src hash: $PREVIOUS_SRC_HASH"
|
||||
echo "Current tauri hash: $CURRENT_TAURI_HASH"
|
||||
echo "Previous tauri hash: $PREVIOUS_TAURI_HASH"
|
||||
|
||||
if [ "$CURRENT_SRC_HASH" != "$PREVIOUS_SRC_HASH" ] || [ "$CURRENT_TAURI_HASH" != "$PREVIOUS_TAURI_HASH" ]; then
|
||||
echo "Source directories changed"
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "Version and source directories unchanged"
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
tag_name: autobuild
|
||||
force_build: ${{ github.event_name == 'workflow_dispatch' }}
|
||||
|
||||
update_tag:
|
||||
name: Update tag
|
||||
@@ -95,9 +51,28 @@ jobs:
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Release AutoBuild Version
|
||||
run: pnpm release-version autobuild-latest
|
||||
|
||||
- name: Set Env
|
||||
run: |
|
||||
echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV
|
||||
VERSION=$(jq -r .version package.json)
|
||||
echo "VERSION=$VERSION" >> $GITHUB_ENV
|
||||
echo "DOWNLOAD_URL=https://github.com/clash-verge-rev/clash-verge-rev/releases/download/autobuild" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
|
||||
- run: |
|
||||
@@ -111,25 +86,24 @@ jobs:
|
||||
cat > release.txt << EOF
|
||||
$UPDATE_LOGS
|
||||
|
||||
## 我应该下载哪个版本?
|
||||
|
||||
### MacOS
|
||||
- MacOS intel芯片: x64.dmg
|
||||
- MacOS apple M芯片: aarch64.dmg
|
||||
|
||||
### Linux
|
||||
- Linux 64位: amd64.deb/amd64.rpm
|
||||
- Linux arm64 architecture: arm64.deb/aarch64.rpm
|
||||
- Linux armv7架构: armhf.deb/armhfp.rpm
|
||||
## 下载地址
|
||||
|
||||
### Windows (不再支持Win7)
|
||||
#### 正常版本(推荐)
|
||||
- 64位: x64-setup.exe
|
||||
- arm64架构: arm64-setup.exe
|
||||
#### 便携版问题很多不再提供
|
||||
- [64位(常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64-setup_windows.exe) | [ARM64(不常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64-setup.exe)
|
||||
|
||||
#### 内置Webview2版(体积较大,仅在企业版系统或无法安装webview2时使用)
|
||||
- 64位: x64_fixed_webview2-setup.exe
|
||||
- arm64架构: arm64_fixed_webview2-setup.exe
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_fixed_webview2-setup.exe) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64_fixed_webview2-setup.exe)
|
||||
|
||||
### macOS
|
||||
- [Apple M芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_aarch64_darwin.dmg) | [Intel芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_darwin.dmg)
|
||||
|
||||
### Linux
|
||||
#### DEB包(Debian系) 使用 apt ./路径 安装
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_amd64_linux.deb) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64.deb) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_armhf.deb)
|
||||
|
||||
#### RPM包(Redhat系) 使用 dnf ./路径 安装
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.x86_64_linux.rpm) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.aarch64.rpm) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.armhfp.rpm)
|
||||
|
||||
### FAQ
|
||||
- [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
|
||||
@@ -152,29 +126,18 @@ jobs:
|
||||
|
||||
clean_old_assets:
|
||||
name: Clean Old Release Assets
|
||||
runs-on: ubuntu-latest
|
||||
needs: update_tag
|
||||
if: ${{ needs.update_tag.result == 'success' }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Remove old assets from release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG_NAME: ${{ env.TAG_NAME }}
|
||||
run: |
|
||||
VERSION=$(cat package.json | jq -r '.version')
|
||||
assets=$(gh release view "$TAG_NAME" --json assets -q '.assets[].name' || true)
|
||||
for asset in $assets; do
|
||||
if [[ "$asset" != *"$VERSION"* ]]; then
|
||||
echo "Deleting old asset: $asset"
|
||||
gh release delete-asset "$TAG_NAME" "$asset" -y
|
||||
fi
|
||||
done
|
||||
needs: [check_commit, update_tag]
|
||||
if: ${{ needs.check_commit.outputs.should_run == 'true' && needs.update_tag.result == 'success' }}
|
||||
|
||||
uses: clash-verge-rev/clash-verge-rev/.github/workflows/clean-old-assets.yml@dev
|
||||
with:
|
||||
tag_name: autobuild
|
||||
dry_run: false
|
||||
|
||||
autobuild-x86-windows-macos-linux:
|
||||
name: Autobuild x86 Windows, MacOS and Linux
|
||||
needs: update_tag
|
||||
needs: [check_commit, update_tag]
|
||||
if: ${{ needs.check_commit.outputs.should_run == 'true' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -206,6 +169,8 @@ jobs:
|
||||
workspaces: src-tauri
|
||||
cache-all-crates: true
|
||||
save-if: ${{ github.ref == 'refs/heads/dev' }}
|
||||
shared-key: autobuild-${{ runner.os }}-${{ matrix.target }}
|
||||
key: ${{ runner.os }}-${{ matrix.target }}-${{ hashFiles('src-tauri/Cargo.lock') }}
|
||||
|
||||
- name: Install dependencies (ubuntu only)
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
@@ -213,6 +178,15 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
|
||||
|
||||
- name: Install x86 OpenSSL (macOS only)
|
||||
if: matrix.target == 'x86_64-apple-darwin'
|
||||
run: |
|
||||
arch -x86_64 brew install openssl@3
|
||||
echo "OPENSSL_DIR=$(brew --prefix openssl@3)" >> $GITHUB_ENV
|
||||
echo "OPENSSL_INCLUDE_DIR=$(brew --prefix openssl@3)/include" >> $GITHUB_ENV
|
||||
echo "OPENSSL_LIB_DIR=$(brew --prefix openssl@3)/lib" >> $GITHUB_ENV
|
||||
echo "PKG_CONFIG_PATH=$(brew --prefix openssl@3)/lib/pkgconfig" >> $GITHUB_ENV
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
@@ -224,15 +198,23 @@ jobs:
|
||||
node-version: "22"
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Cache pnpm store
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: Pnpm install and check
|
||||
run: |
|
||||
pnpm i
|
||||
pnpm run prebuild ${{ matrix.target }}
|
||||
|
||||
- name: Release ${{ env.TAG_CHANNEL }} Version
|
||||
run: pnpm release-version ${{ env.TAG_NAME }}
|
||||
run: pnpm release-version autobuild-latest
|
||||
|
||||
- name: Tauri build
|
||||
- name: Tauri build for Windows-macOS-Linux
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
NODE_OPTIONS: "--max_old_space_size=4096"
|
||||
@@ -253,10 +235,12 @@ jobs:
|
||||
prerelease: true
|
||||
tauriScript: pnpm
|
||||
args: --target ${{ matrix.target }}
|
||||
# includeUpdaterJson: true
|
||||
|
||||
autobuild-arm-linux:
|
||||
name: Autobuild ARM Linux
|
||||
needs: update_tag
|
||||
needs: [check_commit, update_tag]
|
||||
if: ${{ needs.check_commit.outputs.should_run == 'true' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -284,6 +268,8 @@ jobs:
|
||||
workspaces: src-tauri
|
||||
cache-all-crates: true
|
||||
save-if: ${{ github.ref == 'refs/heads/dev' }}
|
||||
shared-key: autobuild-${{ runner.os }}-${{ matrix.target }}
|
||||
key: ${{ runner.os }}-${{ matrix.target }}-${{ hashFiles('src-tauri/Cargo.lock') }}
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
@@ -296,13 +282,21 @@ jobs:
|
||||
node-version: "22"
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Cache pnpm store
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: Pnpm install and check
|
||||
run: |
|
||||
pnpm i
|
||||
pnpm run prebuild ${{ matrix.target }}
|
||||
|
||||
- name: Release ${{ env.TAG_CHANNEL }} Version
|
||||
run: pnpm release-version ${{ env.TAG_NAME }}
|
||||
run: pnpm release-version autobuild-latest
|
||||
|
||||
- name: Setup for linux
|
||||
run: |
|
||||
@@ -353,7 +347,7 @@ jobs:
|
||||
gcc-arm-linux-gnueabihf \
|
||||
g++-arm-linux-gnueabihf
|
||||
|
||||
- name: Build for Linux
|
||||
- name: Tauri Build for Linux
|
||||
run: |
|
||||
export PKG_CONFIG_ALLOW_CROSS=1
|
||||
if [ "${{ matrix.target }}" == "aarch64-unknown-linux-gnu" ]; then
|
||||
@@ -389,7 +383,8 @@ jobs:
|
||||
|
||||
autobuild-x86-arm-windows_webview2:
|
||||
name: Autobuild x86 and ARM Windows with WebView2
|
||||
needs: update_tag
|
||||
needs: [check_commit, update_tag]
|
||||
if: ${{ needs.check_commit.outputs.should_run == 'true' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -414,6 +409,8 @@ jobs:
|
||||
workspaces: src-tauri
|
||||
cache-all-crates: true
|
||||
save-if: ${{ github.ref == 'refs/heads/dev' }}
|
||||
shared-key: autobuild-${{ runner.os }}-${{ matrix.target }}
|
||||
key: ${{ runner.os }}-${{ matrix.target }}-${{ hashFiles('src-tauri/Cargo.lock') }}
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
@@ -426,13 +423,21 @@ jobs:
|
||||
node-version: "22"
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Cache pnpm store
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('pnpm-lock.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: Pnpm install and check
|
||||
run: |
|
||||
pnpm i
|
||||
pnpm run prebuild ${{ matrix.target }}
|
||||
|
||||
- name: Release ${{ env.TAG_CHANNEL }} Version
|
||||
run: pnpm release-version ${{ env.TAG_NAME }}
|
||||
run: pnpm release-version autobuild-latest
|
||||
|
||||
- name: Download WebView2 Runtime
|
||||
run: |
|
||||
@@ -441,7 +446,7 @@ jobs:
|
||||
Remove-Item .\src-tauri\tauri.windows.conf.json
|
||||
Rename-Item .\src-tauri\webview2.${{ matrix.arch }}.json tauri.windows.conf.json
|
||||
|
||||
- name: Tauri build
|
||||
- name: Tauri build for Windows
|
||||
id: build
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
@@ -452,6 +457,7 @@ jobs:
|
||||
with:
|
||||
tauriScript: pnpm
|
||||
args: --target ${{ matrix.target }}
|
||||
# includeUpdaterJson: true
|
||||
|
||||
- name: Rename
|
||||
run: |
|
||||
@@ -486,3 +492,107 @@ jobs:
|
||||
run: pnpm portable-fixed-webview2 ${{ matrix.target }} --${{ env.TAG_NAME }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
notify-telegram:
|
||||
name: Notify Telegram
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
[
|
||||
update_tag,
|
||||
autobuild-x86-windows-macos-linux,
|
||||
autobuild-arm-linux,
|
||||
autobuild-x86-arm-windows_webview2,
|
||||
]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Fetch UPDATE logs
|
||||
id: fetch_update_logs
|
||||
run: |
|
||||
if [ -f "UPDATELOG.md" ]; then
|
||||
UPDATE_LOGS=$(awk '/^## v/{if(flag) exit; flag=1} flag' UPDATELOG.md)
|
||||
if [ -n "$UPDATE_LOGS" ]; then
|
||||
echo "Found update logs"
|
||||
echo "UPDATE_LOGS<<EOF" >> $GITHUB_ENV
|
||||
echo "$UPDATE_LOGS" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No update sections found in UPDATELOG.md"
|
||||
fi
|
||||
else
|
||||
echo "UPDATELOG.md file not found"
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Release AutoBuild Version
|
||||
run: pnpm release-version autobuild-latest
|
||||
|
||||
- name: Get Version and Release Info
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install jq
|
||||
echo "VERSION=$(cat package.json | jq '.version' | tr -d '"')" >> $GITHUB_ENV
|
||||
echo "DOWNLOAD_URL=https://github.com/clash-verge-rev/clash-verge-rev/releases/download/autobuild" >> $GITHUB_ENV
|
||||
echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate release.txt
|
||||
run: |
|
||||
if [ -z "$UPDATE_LOGS" ]; then
|
||||
echo "No update logs found, using default message"
|
||||
UPDATE_LOGS="More new features are now supported. Check for detailed changelog soon."
|
||||
else
|
||||
echo "Using found update logs"
|
||||
fi
|
||||
|
||||
cat > release.txt << EOF
|
||||
$UPDATE_LOGS
|
||||
|
||||
## 下载地址
|
||||
|
||||
### Windows (不再支持Win7)
|
||||
#### 正常版本(推荐)
|
||||
- [64位(常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64-setup_windows.exe) | [ARM64(不常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64-setup_windows.exe)
|
||||
|
||||
#### 内置Webview2版(体积较大,仅在企业版系统或无法安装webview2时使用)
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_fixed_webview2-setup.exe) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64_fixed_webview2-setup.exe)
|
||||
|
||||
### macOS
|
||||
- [Apple M芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_aarch64_darwin.dmg) | [Intel芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_darwin.dmg)
|
||||
|
||||
### Linux
|
||||
#### DEB包(Debian系) 使用 apt ./路径 安装
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_amd64_linux.deb) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64.deb) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_armhf.deb)
|
||||
|
||||
#### RPM包(Redhat系) 使用 dnf ./路径 安装
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.x86_64_linux.rpm) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.aarch64.rpm) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}-1.armhfp.rpm)
|
||||
|
||||
### FAQ
|
||||
- [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
|
||||
|
||||
### 稳定机场VPN推荐
|
||||
- [狗狗加速](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
|
||||
Created at ${{ env.BUILDTIME }}.
|
||||
EOF
|
||||
|
||||
- name: Send Telegram Notification
|
||||
run: node scripts/telegram.mjs
|
||||
env:
|
||||
TELEGRAM_BOT_TOKEN: ${{ secrets.TELEGRAM_BOT_TOKEN }}
|
||||
BUILD_TYPE: autobuild
|
||||
VERSION: ${{ env.VERSION }}
|
||||
DOWNLOAD_URL: ${{ env.DOWNLOAD_URL }}
|
||||
|
||||
159
.github/workflows/check-commit-needs-build.yml
vendored
Normal file
159
.github/workflows/check-commit-needs-build.yml
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
name: Check Commit Needs Build
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag_name:
|
||||
description: "Release tag name to check against (default: autobuild)"
|
||||
required: false
|
||||
default: "autobuild"
|
||||
type: string
|
||||
force_build:
|
||||
description: "Force build regardless of checks"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
workflow_call:
|
||||
inputs:
|
||||
tag_name:
|
||||
description: "Release tag name to check against (default: autobuild)"
|
||||
required: false
|
||||
default: "autobuild"
|
||||
type: string
|
||||
force_build:
|
||||
description: "Force build regardless of checks"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
outputs:
|
||||
should_run:
|
||||
description: "Whether the build should run"
|
||||
value: ${{ jobs.check_commit.outputs.should_run }}
|
||||
last_tauri_commit:
|
||||
description: "The last commit hash with Tauri-related changes"
|
||||
value: ${{ jobs.check_commit.outputs.last_tauri_commit }}
|
||||
autobuild_version:
|
||||
description: "The generated autobuild version string"
|
||||
value: ${{ jobs.check_commit.outputs.autobuild_version }}
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
env:
|
||||
TAG_NAME: ${{ inputs.tag_name || 'autobuild' }}
|
||||
|
||||
jobs:
|
||||
check_commit:
|
||||
name: Check Commit Needs Build
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_run: ${{ steps.check.outputs.should_run }}
|
||||
last_tauri_commit: ${{ steps.check.outputs.last_tauri_commit }}
|
||||
autobuild_version: ${{ steps.check.outputs.autobuild_version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 50
|
||||
|
||||
- name: Check if version changed or src changed
|
||||
id: check
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Force build if requested
|
||||
if [ "${{ inputs.force_build }}" == "true" ]; then
|
||||
echo "🚀 Force build requested"
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
CURRENT_VERSION=$(cat package.json | jq -r '.version')
|
||||
echo "📦 Current version: $CURRENT_VERSION"
|
||||
|
||||
git checkout HEAD~1 package.json
|
||||
PREVIOUS_VERSION=$(cat package.json | jq -r '.version')
|
||||
echo "📦 Previous version: $PREVIOUS_VERSION"
|
||||
|
||||
git checkout HEAD package.json
|
||||
|
||||
if [ "$CURRENT_VERSION" != "$PREVIOUS_VERSION" ]; then
|
||||
echo "✅ Version changed from $PREVIOUS_VERSION to $CURRENT_VERSION"
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Use get_latest_tauri_commit.bash to find the latest Tauri-related commit
|
||||
echo "🔍 Finding last commit with Tauri-related changes using script..."
|
||||
|
||||
# Make script executable
|
||||
chmod +x scripts-workflow/get_latest_tauri_commit.bash
|
||||
|
||||
# Get the latest Tauri-related commit hash (full hash)
|
||||
LAST_TAURI_COMMIT_FULL=$(./scripts-workflow/get_latest_tauri_commit.bash)
|
||||
if [[ $? -ne 0 ]] || [[ -z "$LAST_TAURI_COMMIT_FULL" ]]; then
|
||||
echo "❌ Failed to get Tauri-related commit, using current commit"
|
||||
LAST_TAURI_COMMIT_FULL=$(git rev-parse HEAD)
|
||||
fi
|
||||
|
||||
# Get short hash for display and version tagging
|
||||
LAST_TAURI_COMMIT=$(git rev-parse --short "$LAST_TAURI_COMMIT_FULL")
|
||||
|
||||
echo "📝 Last Tauri-related commit: $LAST_TAURI_COMMIT"
|
||||
|
||||
# Generate autobuild version using autobuild-latest format
|
||||
CURRENT_BASE_VERSION=$(echo "$CURRENT_VERSION" | sed -E 's/-(alpha|beta|rc)(\.[0-9]+)?//g' | sed -E 's/\+[a-zA-Z0-9.-]+//g')
|
||||
MONTH=$(TZ=Asia/Shanghai date +%m)
|
||||
DAY=$(TZ=Asia/Shanghai date +%d)
|
||||
AUTOBUILD_VERSION="${CURRENT_BASE_VERSION}+autobuild.${MONTH}${DAY}.${LAST_TAURI_COMMIT}"
|
||||
|
||||
echo "🏷️ Autobuild version: $AUTOBUILD_VERSION"
|
||||
echo "📝 Last Tauri commit: $LAST_TAURI_COMMIT"
|
||||
|
||||
# Set outputs for other jobs to use
|
||||
echo "last_tauri_commit=$LAST_TAURI_COMMIT" >> $GITHUB_OUTPUT
|
||||
echo "autobuild_version=$AUTOBUILD_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
# Check if autobuild release exists
|
||||
echo "🔍 Checking autobuild release and latest.json..."
|
||||
AUTOBUILD_RELEASE_EXISTS=$(gh release view "${{ env.TAG_NAME }}" --json id -q '.id' 2>/dev/null || echo "")
|
||||
|
||||
if [ -z "$AUTOBUILD_RELEASE_EXISTS" ]; then
|
||||
echo "✅ No autobuild release exists, build needed"
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# Check if latest.json exists in the release
|
||||
LATEST_JSON_EXISTS=$(gh release view "${{ env.TAG_NAME }}" --json assets -q '.assets[] | select(.name == "latest.json") | .name' 2>/dev/null || echo "")
|
||||
|
||||
if [ -z "$LATEST_JSON_EXISTS" ]; then
|
||||
echo "✅ No latest.json found in autobuild release, build needed"
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
# Download and parse latest.json to check version and commit hash
|
||||
echo "📥 Downloading latest.json to check version..."
|
||||
LATEST_JSON_URL="https://github.com/clash-verge-rev/clash-verge-rev/releases/download/autobuild/latest.json"
|
||||
|
||||
LATEST_JSON_CONTENT=$(curl -sL "$LATEST_JSON_URL" 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$LATEST_JSON_CONTENT" ]; then
|
||||
LATEST_VERSION=$(echo "$LATEST_JSON_CONTENT" | jq -r '.version' 2>/dev/null || echo "")
|
||||
echo "📦 Latest autobuild version: $LATEST_VERSION"
|
||||
|
||||
# Extract commit hash from version string (format: X.Y.Z+autobuild.MMDD.commit)
|
||||
LATEST_COMMIT=$(echo "$LATEST_VERSION" | sed -n 's/.*+autobuild\.[0-9]\{4\}\.\([a-f0-9]*\)$/\1/p' || echo "")
|
||||
echo "📝 Latest autobuild commit: $LATEST_COMMIT"
|
||||
|
||||
if [ "$LAST_TAURI_COMMIT" != "$LATEST_COMMIT" ]; then
|
||||
echo "✅ Tauri commit hash mismatch ($LAST_TAURI_COMMIT != $LATEST_COMMIT), build needed"
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "❌ Same Tauri commit hash ($LAST_TAURI_COMMIT), no build needed"
|
||||
echo "should_run=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
else
|
||||
echo "⚠️ Failed to download or parse latest.json, build needed"
|
||||
echo "should_run=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
220
.github/workflows/clean-old-assets.yml
vendored
Normal file
220
.github/workflows/clean-old-assets.yml
vendored
Normal file
@@ -0,0 +1,220 @@
|
||||
name: Clean Old Assets
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag_name:
|
||||
description: "Release tag name to clean (default: autobuild)"
|
||||
required: false
|
||||
default: "autobuild"
|
||||
type: string
|
||||
dry_run:
|
||||
description: "Dry run mode (only show what would be deleted)"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
workflow_call:
|
||||
inputs:
|
||||
tag_name:
|
||||
description: "Release tag name to clean (default: autobuild)"
|
||||
required: false
|
||||
default: "autobuild"
|
||||
type: string
|
||||
dry_run:
|
||||
description: "Dry run mode (only show what would be deleted)"
|
||||
required: false
|
||||
default: false
|
||||
type: boolean
|
||||
|
||||
permissions: write-all
|
||||
|
||||
env:
|
||||
TAG_NAME: ${{ inputs.tag_name || 'autobuild' }}
|
||||
TAG_CHANNEL: AutoBuild
|
||||
|
||||
jobs:
|
||||
check_current_version:
|
||||
name: Check Current Version and Commit
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
current_version: ${{ steps.check.outputs.current_version }}
|
||||
last_tauri_commit: ${{ steps.check.outputs.last_tauri_commit }}
|
||||
autobuild_version: ${{ steps.check.outputs.autobuild_version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 50
|
||||
|
||||
- name: Get current version and find last Tauri commit
|
||||
id: check
|
||||
run: |
|
||||
CURRENT_VERSION=$(cat package.json | jq -r '.version')
|
||||
echo "📦 Current version: $CURRENT_VERSION"
|
||||
|
||||
# Find the last commit that changed Tauri-related files
|
||||
echo "🔍 Finding last commit with Tauri-related changes..."
|
||||
|
||||
# Define patterns for Tauri-related files
|
||||
TAURI_PATTERNS="src/ src-tauri/src src-tauri/Cargo.toml src-tauri/Cargo.lock src-tauri/tauri.*.conf.json src-tauri/build.rs src-tauri/capabilities"
|
||||
|
||||
# Get the last commit that changed any of these patterns (excluding build artifacts)
|
||||
LAST_TAURI_COMMIT=""
|
||||
for commit in $(git rev-list HEAD --max-count=50); do
|
||||
# Check if this commit changed any Tauri-related files
|
||||
CHANGED_FILES=$(git show --name-only --pretty=format: $commit | tr '\n' ' ')
|
||||
HAS_TAURI_CHANGES=false
|
||||
|
||||
# Check each pattern
|
||||
if echo "$CHANGED_FILES" | grep -q "src/" && echo "$CHANGED_FILES" | grep -qvE "src/(dist|build|node_modules|\.next|\.cache)"; then
|
||||
HAS_TAURI_CHANGES=true
|
||||
elif echo "$CHANGED_FILES" | grep -qE "src-tauri/(src|Cargo\.(toml|lock)|tauri\..*\.conf\.json|build\.rs|capabilities)"; then
|
||||
HAS_TAURI_CHANGES=true
|
||||
fi
|
||||
|
||||
if [ "$HAS_TAURI_CHANGES" = true ]; then
|
||||
LAST_TAURI_COMMIT=$(git rev-parse --short $commit)
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -z "$LAST_TAURI_COMMIT" ]; then
|
||||
echo "⚠️ No Tauri-related changes found in recent commits, using current commit"
|
||||
LAST_TAURI_COMMIT=$(git rev-parse --short HEAD)
|
||||
fi
|
||||
|
||||
echo "📝 Last Tauri-related commit: $LAST_TAURI_COMMIT"
|
||||
echo "📝 Current commit: $(git rev-parse --short HEAD)"
|
||||
|
||||
# Generate autobuild version for consistency
|
||||
CURRENT_BASE_VERSION=$(echo "$CURRENT_VERSION" | sed -E 's/-(alpha|beta|rc)(\.[0-9]+)?//g' | sed -E 's/\+[a-zA-Z0-9.-]+//g')
|
||||
MONTH=$(TZ=Asia/Shanghai date +%m)
|
||||
DAY=$(TZ=Asia/Shanghai date +%d)
|
||||
AUTOBUILD_VERSION="${CURRENT_BASE_VERSION}+autobuild.${MONTH}${DAY}.${LAST_TAURI_COMMIT}"
|
||||
|
||||
echo "🏷️ Current autobuild version: $AUTOBUILD_VERSION"
|
||||
|
||||
# Set outputs for other jobs to use
|
||||
echo "current_version=$CURRENT_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "last_tauri_commit=$LAST_TAURI_COMMIT" >> $GITHUB_OUTPUT
|
||||
echo "autobuild_version=$AUTOBUILD_VERSION" >> $GITHUB_OUTPUT
|
||||
|
||||
clean_old_assets:
|
||||
name: Clean Old Release Assets
|
||||
runs-on: ubuntu-latest
|
||||
needs: check_current_version
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Clean old assets from release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG_NAME: ${{ env.TAG_NAME }}
|
||||
DRY_RUN: ${{ inputs.dry_run }}
|
||||
run: |
|
||||
# Use values from check_current_version job
|
||||
CURRENT_AUTOBUILD_VERSION="${{ needs.check_current_version.outputs.autobuild_version }}"
|
||||
LAST_TAURI_COMMIT="${{ needs.check_current_version.outputs.last_tauri_commit }}"
|
||||
CURRENT_VERSION="${{ needs.check_current_version.outputs.current_version }}"
|
||||
|
||||
echo "📦 Current version: $CURRENT_VERSION"
|
||||
echo "📦 Current autobuild version: $CURRENT_AUTOBUILD_VERSION"
|
||||
echo "📝 Last Tauri commit: $LAST_TAURI_COMMIT"
|
||||
echo "🏷️ Target tag: $TAG_NAME"
|
||||
echo "🔍 Dry run mode: $DRY_RUN"
|
||||
|
||||
# Check if release exists
|
||||
RELEASE_EXISTS=$(gh release view "$TAG_NAME" --json id -q '.id' 2>/dev/null || echo "")
|
||||
|
||||
if [ -z "$RELEASE_EXISTS" ]; then
|
||||
echo "❌ Release '$TAG_NAME' not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "✅ Found release '$TAG_NAME'"
|
||||
|
||||
# Get all assets
|
||||
echo "📋 Getting list of all assets..."
|
||||
assets=$(gh release view "$TAG_NAME" --json assets -q '.assets[].name' || true)
|
||||
|
||||
if [ -z "$assets" ]; then
|
||||
echo "ℹ️ No assets found in release '$TAG_NAME'"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "📋 Found assets:"
|
||||
echo "$assets" | sed 's/^/ - /'
|
||||
|
||||
# Count assets to keep and delete
|
||||
ASSETS_TO_KEEP=""
|
||||
ASSETS_TO_DELETE=""
|
||||
|
||||
for asset in $assets; do
|
||||
# Keep assets that match current autobuild version or are non-versioned files (like latest.json)
|
||||
if [[ "$asset" == *"$CURRENT_AUTOBUILD_VERSION"* ]] || [[ "$asset" == "latest.json" ]]; then
|
||||
ASSETS_TO_KEEP="$ASSETS_TO_KEEP$asset\n"
|
||||
else
|
||||
ASSETS_TO_DELETE="$ASSETS_TO_DELETE$asset\n"
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "🔒 Assets to keep (current version: $CURRENT_AUTOBUILD_VERSION):"
|
||||
if [ -n "$ASSETS_TO_KEEP" ]; then
|
||||
echo -e "$ASSETS_TO_KEEP" | grep -v '^$' | sed 's/^/ - /'
|
||||
else
|
||||
echo " - None"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "🗑️ Assets to delete:"
|
||||
if [ -n "$ASSETS_TO_DELETE" ]; then
|
||||
echo -e "$ASSETS_TO_DELETE" | grep -v '^$' | sed 's/^/ - /'
|
||||
else
|
||||
echo " - None"
|
||||
echo "ℹ️ No old assets to clean"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
echo ""
|
||||
echo "🔍 DRY RUN MODE: No assets will actually be deleted"
|
||||
echo " To actually delete these assets, run this workflow again with dry_run=false"
|
||||
else
|
||||
echo ""
|
||||
echo "🗑️ Deleting old assets..."
|
||||
|
||||
DELETED_COUNT=0
|
||||
FAILED_COUNT=0
|
||||
|
||||
for asset in $assets; do
|
||||
# Skip assets that should be kept
|
||||
if [[ "$asset" == *"$CURRENT_AUTOBUILD_VERSION"* ]] || [[ "$asset" == "latest.json" ]]; then
|
||||
continue
|
||||
fi
|
||||
|
||||
echo " Deleting: $asset"
|
||||
if gh release delete-asset "$TAG_NAME" "$asset" -y 2>/dev/null; then
|
||||
DELETED_COUNT=$((DELETED_COUNT + 1))
|
||||
else
|
||||
echo " ⚠️ Failed to delete $asset"
|
||||
FAILED_COUNT=$((FAILED_COUNT + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
echo ""
|
||||
echo "📊 Cleanup summary:"
|
||||
echo " - Deleted: $DELETED_COUNT assets"
|
||||
if [ $FAILED_COUNT -gt 0 ]; then
|
||||
echo " - Failed: $FAILED_COUNT assets"
|
||||
fi
|
||||
echo " - Kept: $(echo -e "$ASSETS_TO_KEEP" | grep -v '^$' | wc -l) assets"
|
||||
|
||||
if [ $FAILED_COUNT -gt 0 ]; then
|
||||
echo "⚠️ Some assets failed to delete. Please check the logs above."
|
||||
exit 1
|
||||
else
|
||||
echo "✅ Cleanup completed successfully!"
|
||||
fi
|
||||
fi
|
||||
63
.github/workflows/clippy.yml
vendored
63
.github/workflows/clippy.yml
vendored
@@ -1,63 +0,0 @@
|
||||
name: Clippy Lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
clippy:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
- os: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
- os: ubuntu-22.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust Stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Add Rust Target
|
||||
run: rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: Rust Cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: src-tauri
|
||||
save-if: false
|
||||
|
||||
- name: Install dependencies (ubuntu only)
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Pnpm install and check
|
||||
run: |
|
||||
pnpm i
|
||||
pnpm run prebuild ${{ matrix.target }}
|
||||
|
||||
- name: Build Web Assets
|
||||
run: pnpm run web:build
|
||||
env:
|
||||
NODE_OPTIONS: "--max_old_space_size=4096"
|
||||
|
||||
- name: Run Clippy
|
||||
run: cargo clippy --manifest-path src-tauri/Cargo.toml --all-targets --all-features -- -D warnings
|
||||
3
.github/workflows/cross_check.yaml
vendored
3
.github/workflows/cross_check.yaml
vendored
@@ -9,6 +9,9 @@ on:
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
HUSKY: 0
|
||||
|
||||
jobs:
|
||||
cargo-check:
|
||||
# Treat all Rust compiler warnings as errors
|
||||
|
||||
92
.github/workflows/dev.yml
vendored
92
.github/workflows/dev.yml
vendored
@@ -2,12 +2,36 @@ name: Development Test
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
run_windows:
|
||||
description: "运行 Windows"
|
||||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
run_macos_aarch64:
|
||||
description: "运行 macOS aarch64"
|
||||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
run_windows_arm64:
|
||||
description: "运行 Windows ARM64"
|
||||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
run_linux_amd64:
|
||||
description: "运行 Linux amd64"
|
||||
required: false
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
permissions: write-all
|
||||
env:
|
||||
TAG_NAME: deploytest
|
||||
TAG_CHANNEL: DeployTest
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: short
|
||||
HUSKY: 0
|
||||
concurrency:
|
||||
# only allow per workflow per commit (and not pr) to run at a time
|
||||
group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}"
|
||||
cancel-in-progress: ${{ github.ref != 'refs/heads/main' }}
|
||||
|
||||
@@ -20,46 +44,82 @@ jobs:
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
bundle: nsis
|
||||
id: windows
|
||||
input: run_windows
|
||||
- os: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
bundle: dmg
|
||||
- os: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
bundle: dmg
|
||||
id: macos-aarch64
|
||||
input: run_macos_aarch64
|
||||
- os: windows-latest
|
||||
target: aarch64-pc-windows-msvc
|
||||
bundle: nsis
|
||||
id: windows-arm64
|
||||
input: run_windows_arm64
|
||||
- os: ubuntu-22.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
bundle: deb
|
||||
id: linux-amd64
|
||||
input: run_linux_amd64
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Skip job if not selected
|
||||
if: github.event.inputs[matrix.input] != 'true'
|
||||
run: echo "Job ${{ matrix.id }} skipped as requested"
|
||||
|
||||
- name: Checkout Repository
|
||||
if: github.event.inputs[matrix.input] == 'true'
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust Stable
|
||||
if: github.event.inputs[matrix.input] == 'true'
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
|
||||
- name: Add Rust Target
|
||||
if: github.event.inputs[matrix.input] == 'true'
|
||||
run: rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: Rust Cache
|
||||
if: github.event.inputs[matrix.input] == 'true'
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: src-tauri
|
||||
save-if: false
|
||||
cache-all-crates: true
|
||||
shared-key: autobuild-shared
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
- name: Install dependencies (ubuntu only)
|
||||
if: matrix.os == 'ubuntu-22.04' && github.event.inputs[matrix.input] == 'true'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
if: github.event.inputs[matrix.input] == 'true'
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install Node
|
||||
if: github.event.inputs[matrix.input] == 'true'
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Pnpm install and check
|
||||
if: github.event.inputs[matrix.input] == 'true'
|
||||
run: |
|
||||
pnpm i
|
||||
pnpm run prebuild ${{ matrix.target }}
|
||||
|
||||
- name: Release ${{ env.TAG_CHANNEL }} Version
|
||||
if: github.event.inputs[matrix.input] == 'true'
|
||||
run: pnpm release-version ${{ env.TAG_NAME }}
|
||||
|
||||
- name: Tauri build
|
||||
if: github.event.inputs[matrix.input] == 'true'
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
env:
|
||||
NODE_OPTIONS: "--max_old_space_size=4096"
|
||||
@@ -76,18 +136,26 @@ jobs:
|
||||
tauriScript: pnpm
|
||||
args: --target ${{ matrix.target }} -b ${{ matrix.bundle }}
|
||||
|
||||
- name: Upload Artifacts
|
||||
if: matrix.os == 'macos-latest'
|
||||
- name: Upload Artifacts (macOS)
|
||||
if: matrix.os == 'macos-latest' && github.event.inputs[matrix.input] == 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
path: src-tauri/target/${{ matrix.target }}/release/bundle/dmg/*.dmg
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload Artifacts
|
||||
if: matrix.os == 'windows-latest'
|
||||
- name: Upload Artifacts (Windows)
|
||||
if: matrix.os == 'windows-latest' && github.event.inputs[matrix.input] == 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
path: src-tauri/target/${{ matrix.target }}/release/bundle/nsis/*.exe
|
||||
if-no-files-found: error
|
||||
|
||||
- name: Upload Artifacts (Linux)
|
||||
if: matrix.os == 'ubuntu-22.04' && github.event.inputs[matrix.input] == 'true'
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ matrix.target }}
|
||||
path: src-tauri/target/${{ matrix.target }}/release/bundle/deb/*.deb
|
||||
if-no-files-found: error
|
||||
|
||||
44
.github/workflows/fmt.yml
vendored
44
.github/workflows/fmt.yml
vendored
@@ -7,31 +7,73 @@ name: Check Formatting
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
env:
|
||||
HUSKY: 0
|
||||
|
||||
jobs:
|
||||
rustfmt:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check Rust changes
|
||||
id: check_rust
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
rust:
|
||||
- 'src-tauri/**'
|
||||
- '**/*.rs'
|
||||
|
||||
- name: Skip if no Rust changes
|
||||
if: steps.check_rust.outputs.rust != 'true'
|
||||
run: echo "No Rust changes, skipping rustfmt."
|
||||
|
||||
- name: install Rust stable and rustfmt
|
||||
if: steps.check_rust.outputs.rust == 'true'
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
components: rustfmt
|
||||
|
||||
- name: run cargo fmt
|
||||
if: steps.check_rust.outputs.rust == 'true'
|
||||
run: cargo fmt --manifest-path ./src-tauri/Cargo.toml --all -- --check
|
||||
|
||||
prettier:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check Web changes
|
||||
id: check_web
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
web:
|
||||
- 'src/**'
|
||||
- '**/*.js'
|
||||
- '**/*.ts'
|
||||
- '**/*.tsx'
|
||||
- '**/*.css'
|
||||
- '**/*.scss'
|
||||
- '**/*.json'
|
||||
- '**/*.md'
|
||||
- '**/*.json'
|
||||
|
||||
- name: Skip if no Web changes
|
||||
if: steps.check_web.outputs.web != 'true'
|
||||
run: echo "No web changes, skipping prettier."
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
if: steps.check_web.outputs.web == 'true'
|
||||
with:
|
||||
node-version: "lts/*"
|
||||
- run: corepack enable
|
||||
if: steps.check_web.outputs.web == 'true'
|
||||
- run: pnpm install --frozen-lockfile
|
||||
if: steps.check_web.outputs.web == 'true'
|
||||
- run: pnpm format:check
|
||||
if: steps.check_web.outputs.web == 'true'
|
||||
|
||||
# taplo:
|
||||
# name: taplo (.toml files)
|
||||
|
||||
82
.github/workflows/lint-clippy.yml
vendored
Normal file
82
.github/workflows/lint-clippy.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: Clippy Lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
workflow_dispatch:
|
||||
env:
|
||||
HUSKY: 0
|
||||
|
||||
jobs:
|
||||
clippy:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- os: windows-latest
|
||||
target: x86_64-pc-windows-msvc
|
||||
- os: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
- os: ubuntu-22.04
|
||||
target: x86_64-unknown-linux-gnu
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- name: Check src-tauri changes
|
||||
if: github.event_name != 'workflow_dispatch'
|
||||
id: check_changes
|
||||
uses: dorny/paths-filter@v3
|
||||
with:
|
||||
filters: |
|
||||
rust:
|
||||
- 'src-tauri/**'
|
||||
|
||||
- name: Skip if src-tauri not changed
|
||||
if: github.event_name != 'workflow_dispatch' && steps.check_changes.outputs.rust != 'true'
|
||||
run: echo "No src-tauri changes, skipping clippy lint."
|
||||
|
||||
- name: Continue if src-tauri changed
|
||||
if: github.event_name != 'workflow_dispatch' && steps.check_changes.outputs.rust == 'true'
|
||||
run: echo "src-tauri changed, running clippy lint."
|
||||
|
||||
- name: Manual trigger - always run
|
||||
if: github.event_name == 'workflow_dispatch'
|
||||
run: |
|
||||
echo "Manual trigger detected: skipping changes check and running clippy."
|
||||
|
||||
- name: Checkout Repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Rust Stable
|
||||
uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: stable
|
||||
components: clippy
|
||||
|
||||
- name: Add Rust Target
|
||||
run: rustup target add ${{ matrix.target }}
|
||||
|
||||
- name: Rust Cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: src-tauri
|
||||
cache-all-crates: true
|
||||
save-if: false
|
||||
shared-key: autobuild-${{ runner.os }}-${{ matrix.target }}
|
||||
key: ${{ runner.os }}-${{ matrix.target }}-${{ hashFiles('src-tauri/Cargo.lock') }}
|
||||
|
||||
- name: Install dependencies (ubuntu only)
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
|
||||
|
||||
- name: Run Clippy
|
||||
working-directory: ./src-tauri
|
||||
run: cargo clippy-all
|
||||
|
||||
- name: Run Logging Check
|
||||
working-directory: ./src-tauri
|
||||
shell: bash
|
||||
run: |
|
||||
cargo install --git https://github.com/clash-verge-rev/clash-verge-logging-check.git
|
||||
clash-verge-logging-check
|
||||
255
.github/workflows/release.yml
vendored
255
.github/workflows/release.yml
vendored
@@ -5,16 +5,13 @@ on:
|
||||
# ! 不再使用 workflow_dispatch 触发。
|
||||
# workflow_dispatch:
|
||||
push:
|
||||
# 应当限制在 main 分支上触发发布。
|
||||
branches:
|
||||
- main
|
||||
# 应当限制 v*.*.* 的 tag 触发发布。
|
||||
tags:
|
||||
- "v*.*.*"
|
||||
permissions: write-all
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: short
|
||||
HUSKY: 0
|
||||
concurrency:
|
||||
# only allow per workflow per commit (and not pr) to run at a time
|
||||
group: "${{ github.workflow }} - ${{ github.head_ref || github.ref }}"
|
||||
@@ -27,22 +24,132 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check if tag is from main branch
|
||||
run: |
|
||||
TAG_REF="${GITHUB_REF##*/}"
|
||||
echo "Checking if tag $TAG_REF is from main branch..."
|
||||
|
||||
TAG_COMMIT=$(git rev-list -n 1 $TAG_REF)
|
||||
MAIN_COMMITS=$(git rev-list origin/main)
|
||||
|
||||
if echo "$MAIN_COMMITS" | grep -q "$TAG_COMMIT"; then
|
||||
echo "✅ Tag $TAG_REF is from main branch"
|
||||
else
|
||||
echo "❌ Tag $TAG_REF is not from main branch"
|
||||
echo "This release workflow only accepts tags from main branch."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check tag and package.json version
|
||||
run: |
|
||||
TAG_REF="${GITHUB_REF##*/}"
|
||||
TAG_REF="${GITHUB_REF_NAME:-${GITHUB_REF##*/}}"
|
||||
echo "Current tag: $TAG_REF"
|
||||
|
||||
PKG_VERSION=$(jq -r .version package.json)
|
||||
echo "package.json version: $PKG_VERSION"
|
||||
if [[ "$TAG_REF" != "v$PKG_VERSION" ]]; then
|
||||
echo "Tag ($TAG_REF) does not match package.json version (v$PKG_VERSION)."
|
||||
|
||||
EXPECTED_TAG="v$PKG_VERSION"
|
||||
|
||||
if [[ "$TAG_REF" != "$EXPECTED_TAG" ]]; then
|
||||
echo "❌ Version mismatch:"
|
||||
echo " Git tag : $TAG_REF"
|
||||
echo " package.json : $EXPECTED_TAG"
|
||||
exit 1
|
||||
fi
|
||||
echo "Tag and package.json version are consistent."
|
||||
|
||||
echo "✅ Tag and package.json version are consistent."
|
||||
|
||||
update_tag:
|
||||
name: Update tag
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release, release-for-linux-arm, release-for-fixed-webview2]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Fetch UPDATE logs
|
||||
id: fetch_update_logs
|
||||
run: |
|
||||
if [ -f "UPDATELOG.md" ]; then
|
||||
UPDATE_LOGS=$(awk '/^## v/{if(flag) exit; flag=1} flag' UPDATELOG.md)
|
||||
if [ -n "$UPDATE_LOGS" ]; then
|
||||
echo "Found update logs"
|
||||
echo "UPDATE_LOGS<<EOF" >> $GITHUB_ENV
|
||||
echo "$UPDATE_LOGS" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No update sections found in UPDATELOG.md"
|
||||
fi
|
||||
else
|
||||
echo "UPDATELOG.md file not found"
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
- name: Set Env
|
||||
run: |
|
||||
echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV
|
||||
TAG_REF="${GITHUB_REF##*/}"
|
||||
echo "TAG_NAME=$TAG_REF" >> $GITHUB_ENV
|
||||
VERSION=$(echo "$TAG_REF" | sed 's/^v//')
|
||||
echo "VERSION=$VERSION" >> $GITHUB_ENV
|
||||
echo "DOWNLOAD_URL=https://github.com/clash-verge-rev/clash-verge-rev/releases/download/$TAG_REF" >> $GITHUB_ENV
|
||||
shell: bash
|
||||
|
||||
- run: |
|
||||
if [ -z "$UPDATE_LOGS" ]; then
|
||||
echo "No update logs found, using default message"
|
||||
UPDATE_LOGS="More new features are now supported. Check for detailed changelog soon."
|
||||
else
|
||||
echo "Using found update logs"
|
||||
fi
|
||||
|
||||
cat > release.txt << EOF
|
||||
$UPDATE_LOGS
|
||||
|
||||
## 下载地址
|
||||
|
||||
### Windows (不再支持Win7)
|
||||
#### 正常版本(推荐)
|
||||
- [64位(常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64-setup.exe) | [ARM64(不常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64-setup.exe)
|
||||
|
||||
#### 内置Webview2版(体积较大,仅在企业版系统或无法安装webview2时使用)
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_fixed_webview2-setup.exe) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64_fixed_webview2-setup.exe)
|
||||
|
||||
### macOS
|
||||
- [Apple M芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_aarch64.dmg) | [Intel芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64.dmg)
|
||||
|
||||
### Linux
|
||||
#### DEB包(Debian系) 使用 apt ./路径 安装
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_amd64.deb) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64.deb) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_armhf.deb)
|
||||
|
||||
#### RPM包(Redhat系) 使用 dnf ./路径 安装
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.x86_64.rpm) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.aarch64.rpm) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.armhfp.rpm)
|
||||
|
||||
### FAQ
|
||||
- [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
|
||||
|
||||
### 稳定机场VPN推荐
|
||||
- [狗狗加速](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
Created at ${{ env.BUILDTIME }}.
|
||||
EOF
|
||||
|
||||
- name: Upload Release
|
||||
uses: softprops/action-gh-release@v2
|
||||
with:
|
||||
tag_name: ${{ env.TAG_NAME }}
|
||||
name: "Clash Verge Rev ${{ env.TAG_NAME }}"
|
||||
body_path: release.txt
|
||||
draft: false
|
||||
prerelease: false
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# generate_release_notes: true
|
||||
|
||||
release:
|
||||
name: Release Build
|
||||
needs: check_tag_version
|
||||
needs: [check_tag_version]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -81,6 +188,15 @@ jobs:
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev patchelf
|
||||
|
||||
- name: Install x86 OpenSSL (macOS only)
|
||||
if: matrix.target == 'x86_64-apple-darwin'
|
||||
run: |
|
||||
arch -x86_64 brew install openssl@3
|
||||
echo "OPENSSL_DIR=$(brew --prefix openssl@3)" >> $GITHUB_ENV
|
||||
echo "OPENSSL_INCLUDE_DIR=$(brew --prefix openssl@3)/include" >> $GITHUB_ENV
|
||||
echo "OPENSSL_LIB_DIR=$(brew --prefix openssl@3)/lib" >> $GITHUB_ENV
|
||||
echo "PKG_CONFIG_PATH=$(brew --prefix openssl@3)/lib/pkgconfig" >> $GITHUB_ENV
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
@@ -110,14 +226,18 @@ jobs:
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
with:
|
||||
tagName: v__VERSION__
|
||||
releaseName: "Clash Verge Rev v__VERSION__"
|
||||
releaseBody: "More new features are now supported."
|
||||
tagName: ${{ github.ref_name }}
|
||||
releaseName: "Clash Verge Rev ${{ github.ref_name }}"
|
||||
releaseBody: "Draft release, will be updated later."
|
||||
releaseDraft: true
|
||||
prerelease: false
|
||||
tauriScript: pnpm
|
||||
args: --target ${{ matrix.target }}
|
||||
includeUpdaterJson: true
|
||||
|
||||
release-for-linux-arm:
|
||||
name: Release Build for Linux ARM
|
||||
needs: [check_tag_version]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -232,7 +352,7 @@ jobs:
|
||||
with:
|
||||
tag_name: v${{env.VERSION}}
|
||||
name: "Clash Verge Rev v${{env.VERSION}}"
|
||||
body: "More new features are now supported."
|
||||
body: "See release notes for detailed changelog."
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: |
|
||||
src-tauri/target/${{ matrix.target }}/release/bundle/deb/*.deb
|
||||
@@ -240,6 +360,7 @@ jobs:
|
||||
|
||||
release-for-fixed-webview2:
|
||||
name: Release Build for Fixed WebView2
|
||||
needs: [check_tag_version]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@@ -323,7 +444,7 @@ jobs:
|
||||
with:
|
||||
tag_name: v${{steps.build.outputs.appVersion}}
|
||||
name: "Clash Verge Rev v${{steps.build.outputs.appVersion}}"
|
||||
body: "More new features are now supported."
|
||||
body: "See release notes for detailed changelog."
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
files: src-tauri/target/${{ matrix.target }}/release/bundle/nsis/*setup*
|
||||
|
||||
@@ -335,7 +456,7 @@ jobs:
|
||||
release-update:
|
||||
name: Release Update
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release, release-for-linux-arm]
|
||||
needs: [update_tag]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -360,7 +481,7 @@ jobs:
|
||||
|
||||
release-update-for-fixed-webview2:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release-for-fixed-webview2]
|
||||
needs: [update_tag]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -386,7 +507,7 @@ jobs:
|
||||
submit-to-winget:
|
||||
name: Submit to Winget
|
||||
runs-on: ubuntu-latest
|
||||
needs: [release-update]
|
||||
needs: [update_tag, release-update]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
@@ -405,3 +526,103 @@ jobs:
|
||||
release-tag: v${{env.VERSION}}
|
||||
installers-regex: '_(arm64|x64|x86)-setup\.exe$'
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
|
||||
notify-telegram:
|
||||
name: Notify Telegram
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
[
|
||||
update_tag,
|
||||
release-update,
|
||||
release-update-for-fixed-webview2,
|
||||
submit-to-winget,
|
||||
]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Fetch UPDATE logs
|
||||
id: fetch_update_logs
|
||||
run: |
|
||||
if [ -f "UPDATELOG.md" ]; then
|
||||
UPDATE_LOGS=$(awk '/^## v/{if(flag) exit; flag=1} flag' UPDATELOG.md)
|
||||
if [ -n "$UPDATE_LOGS" ]; then
|
||||
echo "Found update logs"
|
||||
echo "UPDATE_LOGS<<EOF" >> $GITHUB_ENV
|
||||
echo "$UPDATE_LOGS" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
else
|
||||
echo "No update sections found in UPDATELOG.md"
|
||||
fi
|
||||
else
|
||||
echo "UPDATELOG.md file not found"
|
||||
fi
|
||||
shell: bash
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- uses: pnpm/action-setup@v4
|
||||
name: Install pnpm
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Get Version and Release Info
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install jq
|
||||
echo "VERSION=$(cat package.json | jq '.version' | tr -d '"')" >> $GITHUB_ENV
|
||||
echo "DOWNLOAD_URL=https://github.com/clash-verge-rev/clash-verge-rev/releases/download/v$(cat package.json | jq '.version' | tr -d '"')" >> $GITHUB_ENV
|
||||
echo "BUILDTIME=$(TZ=Asia/Shanghai date)" >> $GITHUB_ENV
|
||||
|
||||
- name: Generate release.txt
|
||||
run: |
|
||||
if [ -z "$UPDATE_LOGS" ]; then
|
||||
echo "No update logs found, using default message"
|
||||
UPDATE_LOGS="More new features are now supported. Check for detailed changelog soon."
|
||||
else
|
||||
echo "Using found update logs"
|
||||
fi
|
||||
|
||||
cat > release.txt << EOF
|
||||
$UPDATE_LOGS
|
||||
|
||||
## 下载地址
|
||||
|
||||
### Windows (不再支持Win7)
|
||||
#### 正常版本(推荐)
|
||||
- [64位(常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64-setup.exe) | [ARM64(不常用)](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64-setup.exe)
|
||||
|
||||
#### 内置Webview2版(体积较大,仅在企业版系统或无法安装webview2时使用)
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64_fixed_webview2-setup.exe) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64_fixed_webview2-setup.exe)
|
||||
|
||||
### macOS
|
||||
- [Apple M芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_aarch64.dmg) | [Intel芯片](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_x64.dmg)
|
||||
|
||||
### Linux
|
||||
#### DEB包(Debian系) 使用 apt ./路径 安装
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_amd64.deb) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_arm64.deb) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge_${{ env.VERSION }}_armhf.deb)
|
||||
|
||||
#### RPM包(Redhat系) 使用 dnf ./路径 安装
|
||||
- [64位](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.x86_64.rpm) | [ARM64](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.aarch64.rpm) | [ARMv7](${{ env.DOWNLOAD_URL }}/Clash.Verge-${{ env.VERSION }}-1.armhfp.rpm)
|
||||
|
||||
### FAQ
|
||||
- [常见问题](https://clash-verge-rev.github.io/faq/windows.html)
|
||||
|
||||
### 稳定机场VPN推荐
|
||||
- [狗狗加速](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
Created at ${{ env.BUILDTIME }}.
|
||||
EOF
|
||||
|
||||
- name: Send Telegram Notification
|
||||
run: node scripts/telegram.mjs
|
||||
env:
|
||||
TELEGRAM_BOT_TOKEN: ${{ secrets.TELEGRAM_BOT_TOKEN }}
|
||||
BUILD_TYPE: release
|
||||
VERSION: ${{ env.VERSION }}
|
||||
DOWNLOAD_URL: ${{ env.DOWNLOAD_URL }}
|
||||
|
||||
3
.github/workflows/updater.yml
vendored
3
.github/workflows/updater.yml
vendored
@@ -2,6 +2,9 @@ name: Updater CI
|
||||
|
||||
on: workflow_dispatch
|
||||
permissions: write-all
|
||||
env:
|
||||
HUSKY: 0
|
||||
|
||||
jobs:
|
||||
release-update:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -10,3 +10,5 @@ scripts/_env.sh
|
||||
.tool-versions
|
||||
.idea
|
||||
.old
|
||||
.eslintcache
|
||||
target
|
||||
@@ -1,26 +1,44 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
#pnpm pretty-quick --staged
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
cd "$ROOT_DIR"
|
||||
|
||||
if git diff --cached --name-only | grep -q '^src/'; then
|
||||
pnpm format:check
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Code format check failed in src/. Please fix formatting issues."
|
||||
if ! command -v pnpm >/dev/null 2>&1; then
|
||||
echo "❌ pnpm is required for pre-commit checks."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if git diff --cached --name-only | grep -q '^src-tauri/'; then
|
||||
echo "[pre-commit] Running lint-staged for JS/TS files..."
|
||||
pnpm exec lint-staged
|
||||
|
||||
RUST_FILES="$(git diff --cached --name-only --diff-filter=ACMR | grep -E '^src-tauri/.*\.rs$' || true)"
|
||||
if [ -n "$RUST_FILES" ]; then
|
||||
echo "[pre-commit] Formatting Rust changes with cargo fmt..."
|
||||
(
|
||||
cd src-tauri
|
||||
cargo fmt
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "rustfmt failed to format the code. Please fix the issues and try again."
|
||||
exit 1
|
||||
)
|
||||
while IFS= read -r file; do
|
||||
[ -n "$file" ] && git add "$file"
|
||||
done <<< "$RUST_FILES"
|
||||
|
||||
echo "[pre-commit] Linting Rust changes with cargo clippy..."
|
||||
(
|
||||
cd src-tauri
|
||||
cargo clippy-all
|
||||
if ! command -v clash-verge-logging-check >/dev/null 2>&1; then
|
||||
echo "[pre-commit] Installing clash-verge-logging-check..."
|
||||
cargo install --git https://github.com/clash-verge-rev/clash-verge-logging-check.git
|
||||
fi
|
||||
cd ..
|
||||
clash-verge-logging-check
|
||||
)
|
||||
fi
|
||||
|
||||
#git add .
|
||||
TS_FILES="$(git diff --cached --name-only --diff-filter=ACMR | grep -E '\.(ts|tsx)$' || true)"
|
||||
if [ -n "$TS_FILES" ]; then
|
||||
echo "[pre-commit] Running TypeScript type check..."
|
||||
pnpm typecheck
|
||||
fi
|
||||
|
||||
# 允许提交
|
||||
exit 0
|
||||
echo "[pre-commit] All checks completed successfully."
|
||||
|
||||
@@ -1,28 +1,43 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# $1: remote name (e.g., origin)
|
||||
# $2: remote url (e.g., git@github.com:clash-verge-rev/clash-verge-rev.git)
|
||||
remote_name="${1:-origin}"
|
||||
remote_url="${2:-unknown}"
|
||||
|
||||
if git diff --cached --name-only | grep -q '^src-tauri/'; then
|
||||
cargo clippy --manifest-path ./src-tauri/Cargo.toml
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Clippy found issues in src-tauri. Please fix them before pushing."
|
||||
ROOT_DIR="$(git rev-parse --show-toplevel)"
|
||||
cd "$ROOT_DIR"
|
||||
|
||||
if ! command -v pnpm >/dev/null 2>&1; then
|
||||
echo "❌ pnpm is required for pre-push checks."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# 只在 push 到 origin 并且 origin 指向目标仓库时执行格式检查
|
||||
if [ "$1" = "origin" ] && echo "$2" | grep -Eq 'github\.com[:/]+clash-verge-rev/clash-verge-rev(\.git)?$'; then
|
||||
echo "[pre-push] Detected push to origin (clash-verge-rev/clash-verge-rev)"
|
||||
echo "[pre-push] Running pnpm format:check..."
|
||||
echo "[pre-push] Preparing to push to '$remote_name' ($remote_url). Running full validation..."
|
||||
|
||||
pnpm format:check
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "❌ Code format check failed. Please fix formatting before pushing."
|
||||
exit 1
|
||||
fi
|
||||
echo "[pre-push] Checking Prettier formatting..."
|
||||
pnpm format:check
|
||||
|
||||
echo "[pre-push] Running ESLint..."
|
||||
pnpm lint
|
||||
|
||||
echo "[pre-push] Running TypeScript type checking..."
|
||||
pnpm typecheck
|
||||
|
||||
if command -v cargo >/dev/null 2>&1; then
|
||||
echo "[pre-push] Verifying Rust formatting..."
|
||||
(
|
||||
cd src-tauri
|
||||
cargo fmt --check
|
||||
)
|
||||
|
||||
echo "[pre-push] Running cargo clippy..."
|
||||
(
|
||||
cd src-tauri
|
||||
cargo clippy-all
|
||||
)
|
||||
else
|
||||
echo "[pre-push] Not pushing to target repo. Skipping format check."
|
||||
echo "[pre-push] ⚠️ cargo not found; skipping Rust checks."
|
||||
fi
|
||||
|
||||
echo "[pre-push] All checks passed."
|
||||
exit 0
|
||||
|
||||
@@ -6,3 +6,5 @@ pnpm-lock.yaml
|
||||
|
||||
src-tauri/target/
|
||||
src-tauri/gen/
|
||||
|
||||
target
|
||||
|
||||
@@ -11,6 +11,6 @@
|
||||
"arrowParens": "always",
|
||||
"proseWrap": "preserve",
|
||||
"htmlWhitespaceSensitivity": "css",
|
||||
"endOfLine": "lf",
|
||||
"endOfLine": "auto",
|
||||
"embeddedLanguageFormatting": "auto"
|
||||
}
|
||||
|
||||
@@ -2,16 +2,25 @@
|
||||
|
||||
Thank you for your interest in contributing to Clash Verge Rev! This document provides guidelines and instructions to help you set up your development environment and start contributing.
|
||||
|
||||
## Internationalization (i18n)
|
||||
|
||||
We welcome translations and improvements to existing locales. Please follow the detailed guidelines in [CONTRIBUTING_i18n.md](docs/CONTRIBUTING_i18n.md) for instructions on extracting strings, file naming conventions, testing translations, and submitting translation PRs.
|
||||
|
||||
## Development Setup
|
||||
|
||||
Before you start contributing to the project, you need to set up your development environment. Here are the steps you need to follow:
|
||||
|
||||
### Prerequisites
|
||||
|
||||
1. **Install Rust and Node.js**: Our project requires both Rust and Node.js. Please follow the instructions provided [here](https://tauri.app/v1/guides/getting-started/prerequisites) to install them on your system.
|
||||
1. **Install Rust and Node.js**: Our project requires both Rust and Node.js. Please follow the instructions provided [here](https://tauri.app/start/prerequisites/) to install them on your system.
|
||||
|
||||
### Setup for Windows Users
|
||||
|
||||
> [!NOTE]
|
||||
> **If you are using a Windows ARM device, you additionally need to install [LLVM](https://github.com/llvm/llvm-project/releases) (including clang) and set the environment variable.**
|
||||
>
|
||||
> Because the `ring` crate is compiled based on `clang` under Windows ARM.
|
||||
|
||||
If you're a Windows user, you may need to perform some additional steps:
|
||||
|
||||
- Make sure to add Rust and Node.js to your system's PATH. This is usually done during the installation process, but you can verify and manually add them if necessary.
|
||||
@@ -51,11 +60,14 @@ apt-get install -y libxslt1.1 libwebkit2gtk-4.1-dev libayatana-appindicator3-dev
|
||||
You have two options for downloading the clash binary:
|
||||
|
||||
- Automatically download it via the provided script:
|
||||
|
||||
```shell
|
||||
pnpm run prebuild
|
||||
# Use '--force' to force update to the latest version
|
||||
# pnpm run prebuild --force
|
||||
# Use '--force' or '-f' to update both the Mihomo core version
|
||||
# and the Clash Verge Rev service version to the latest available.
|
||||
pnpm run prebuild --force
|
||||
```
|
||||
|
||||
- Manually download it from the [Mihomo release](https://github.com/MetaCubeX/mihomo/releases). After downloading, rename the binary according to the [Tauri configuration](https://tauri.app/v1/api/config#bundleconfig.externalbin).
|
||||
|
||||
### Run the Development Server
|
||||
@@ -66,6 +78,8 @@ To run the development server, use the following command:
|
||||
pnpm dev
|
||||
# If an app instance already exists, use a different command
|
||||
pnpm dev:diff
|
||||
# To using tauri built-in dev tool
|
||||
pnpm dev:tauri
|
||||
```
|
||||
|
||||
### Build the Project
|
||||
|
||||
17
README.md
17
README.md
@@ -9,6 +9,15 @@
|
||||
A Clash Meta GUI based on <a href="https://github.com/tauri-apps/tauri">Tauri</a>.
|
||||
</h3>
|
||||
|
||||
<p align="center">
|
||||
Languages:
|
||||
<a href="./README.md">简体中文</a> ·
|
||||
<a href="./docs/README_en.md">English</a> ·
|
||||
<a href="./docs/README_es.md">Español</a> ·
|
||||
<a href="./docs/README_ru.md">Русский</a> ·
|
||||
<a href="./docs/README_ja.md">日本語</a>
|
||||
</p>
|
||||
|
||||
## Preview
|
||||
|
||||
| Dark | Light |
|
||||
@@ -24,10 +33,10 @@ Supports Windows (x64/x86), Linux (x64/arm64) and macOS 10.15+ (intel/apple).
|
||||
#### 我应当怎样选择发行版
|
||||
|
||||
| 版本 | 特征 | 链接 |
|
||||
| :-------- | :--------------------------------------- | :------------------------------------------------------------------------------------- |
|
||||
| :---------- | :--------------------------------------- | :------------------------------------------------------------------------------------- |
|
||||
| Stable | 正式版,高可靠性,适合日常使用。 | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
|
||||
| Alpha | 早期测试版,功能未完善,可能存在缺陷。 | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
|
||||
| AutoBuild | 滚动更新版,持续集成更新,适合开发测试。 | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
|
||||
| Alpha(废弃) | 测试发布流程。 | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
|
||||
| AutoBuild | 滚动更新版,适合测试反馈,可能存在缺陷。 | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
|
||||
|
||||
#### 安装说明和常见问题,请到 [文档页](https://clash-verge-rev.github.io/) 查看
|
||||
|
||||
@@ -88,7 +97,7 @@ To run the development server, execute the following commands after all prerequi
|
||||
|
||||
```shell
|
||||
pnpm i
|
||||
pnpm run check
|
||||
pnpm run prebuild
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
|
||||
242
UPDATELOG.md
242
UPDATELOG.md
@@ -1,3 +1,218 @@
|
||||
## v2.4.3
|
||||
|
||||
感谢 @Slinetrac, @oomeow 以及 @Lythrilla 的出色贡献
|
||||
|
||||
### 🐞 修复问题
|
||||
|
||||
- 优化服务模式重装逻辑,避免不必要的重复检查
|
||||
- 修复轻量模式退出无响应的问题
|
||||
- 修复托盘轻量模式支持退出/进入
|
||||
- 修复静默启动和自动进入轻量模式时,托盘状态刷新不再依赖窗口创建流程
|
||||
- macOS Tun/系统代理 模式下图标大小不统一
|
||||
- 托盘节点切换不再显示隐藏组
|
||||
- 修复前端 IP 检测无法使用 ipapi, ipsb 提供商
|
||||
- 修复MacOS 下 Tun开启后 系统代理无法打开的问题
|
||||
- 修复服务模式启动时,修改、生成配置文件或重启内核可能导致页面卡死的问题
|
||||
- 修复 Webdav 恢复备份不重启
|
||||
- 修复 Linux 开机后无法正常代理需要手动设置
|
||||
- 修复增加订阅或导入订阅文件时订阅页面无更新
|
||||
- 修复系统代理守卫功能不工作
|
||||
- 修复 KDE + Wayland 下多屏显示 UI 异常
|
||||
- 修复 Windows 深色模式下首次启动客户端标题栏颜色异常
|
||||
- 修复静默启动不加载完整 WebView 的问题
|
||||
- 修复 Linux WebKit 网络进程的崩溃
|
||||
- 修复无法导入订阅
|
||||
- 修复实际导入成功但显示导入失败的问题
|
||||
- 修复服务不可用时,自动关闭 Tun 模式导致应用卡死问题
|
||||
- 修复删除订阅时未能实际删除相关文件
|
||||
- 修复 macOS 连接界面显示异常
|
||||
- 修复规则配置项在不同配置文件间全局共享导致切换被重置的问题
|
||||
- 修复 Linux Wayland 下部分 GPU 可能出现的 UI 渲染问题
|
||||
- 修复自动更新使版本回退的问题
|
||||
- 修复首页自定义卡片在切换轻量模式时失效
|
||||
- 修复悬浮跳转导航失效
|
||||
- 修复小键盘热键映射错误
|
||||
- 修复前端无法及时刷新操作状态
|
||||
- 修复 macOS 从 Dock 栏退出轻量模式状态不同步
|
||||
- 修复 Linux 系统主题切换不生效
|
||||
- 修复 `允许自动更新` 字段使手动订阅刷新失效
|
||||
|
||||
<details>
|
||||
<summary><strong> ✨ 新增功能 </strong></summary>
|
||||
|
||||
- **Mihomo(Meta) 内核升级至 v1.19.15**
|
||||
- 支持前端修改日志(最大文件大小、最大保留数量)
|
||||
- 新增链式代理图形化设置功能
|
||||
- 新增系统标题栏与程序标题栏切换 (设置-页面设置-倾向系统标题栏)
|
||||
- 监听关机事件,自动关闭系统代理
|
||||
- 主界面“当前节点”卡片新增“延迟测试”按钮
|
||||
- 新增批量选择配置文件功能
|
||||
- Windows / Linux / MacOS 监听关机信号,优雅恢复网络设置
|
||||
- 新增本地备份功能
|
||||
- 主界面“当前节点”卡片新增自动延迟检测开关(默认关闭)
|
||||
- 允许独立控制订阅自动更新
|
||||
- 托盘 `更多` 中新增 `关闭所有连接` 按钮
|
||||
- 新增左侧菜单栏的排序功能(右键点击左侧菜单栏)
|
||||
- 托盘 `打开目录` 中新增 `应用日志` 和 `内核日志`
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong> 🚀 优化改进 </strong></summary>
|
||||
|
||||
- 重构并简化服务模式启动检测流程,消除重复检测
|
||||
- 重构并简化窗口创建流程
|
||||
- 重构日志系统,单个日志默认最大 10 MB
|
||||
- 优化前端资源占用
|
||||
- 改进 macos 下系统代理设置的方法
|
||||
- 优化 TUN 模式可用性的判断
|
||||
- 移除流媒体检测的系统级提示(使用软件内通知)
|
||||
- 优化后端 i18n 资源占用
|
||||
- 改进 Linux 托盘支持并添加 `--no-tray` 选项
|
||||
- Linux 现在在新生成的配置中默认将 TUN 栈恢复为 mixed 模式
|
||||
- 为代理延迟测试的 URL 设置增加了保护以及添加了安全的备用 URL
|
||||
- 更新了 Wayland 合成器检测逻辑,从而在 Hyprland 会话中保留原生 Wayland 后端
|
||||
- 改进 Windows 和 Unix 的 服务连接方式以及权限,避免无法连接服务或内核
|
||||
- 修改内核默认日志级别为 Info
|
||||
- 支持通过桌面快捷方式重新打开应用
|
||||
- 支持订阅界面输入链接后回车导入
|
||||
- 选择按延迟排序时每次延迟测试自动刷新节点顺序
|
||||
- 配置重载失败时自动重启核心
|
||||
- 启用 TUN 前等待服务就绪
|
||||
- 卸载 TUN 时会先关闭
|
||||
- 优化应用启动页
|
||||
- 优化首页当前节点对MATCH规则的支持
|
||||
- 允许在 `界面设置` 修改 `悬浮跳转导航延迟`
|
||||
- 添加热键绑定错误的提示信息
|
||||
- 在 macOS 10.15 及更高版本默认包含 Mihomo-go122,以解决 Intel 架构 Mac 无法运行内核的问题
|
||||
- Tun 模式不可用时,禁用系统托盘的 Tun 模式菜单
|
||||
|
||||
</details>
|
||||
|
||||
## v2.4.2
|
||||
|
||||
### ✨ 新增功能
|
||||
|
||||
- 增加托盘节点选择
|
||||
|
||||
### 🚀 性能优化
|
||||
|
||||
- 优化前端首页加载速度
|
||||
- 优化前端未使用 i18n 文件缓存
|
||||
- 优化后端内存占用
|
||||
- 优化后端启动速度
|
||||
|
||||
### 🐞 修复问题
|
||||
|
||||
- 修复首页节点切换失效的问题
|
||||
- 修复和优化服务检查流程
|
||||
- 修复2.4.1引入的订阅地址重定向报错问题
|
||||
- 修复 rpm/deb 包名称问题
|
||||
- 修复托盘轻量模式状态检测异常
|
||||
- 修复通过 scheme 导入订阅崩溃
|
||||
- 修复单例检测实效
|
||||
- 修复启动阶段可能导致的无法连接内核
|
||||
- 修复导入订阅无法 Auth Basic
|
||||
|
||||
### 👙 界面样式
|
||||
|
||||
- 简化和改进代理设置样式
|
||||
|
||||
## v2.4.1
|
||||
|
||||
### 🏆 重大改进
|
||||
|
||||
- **应用响应速度提升**:采用全新异步处理架构,大幅提升应用响应速度和稳定性
|
||||
|
||||
### ✨ 新增功能
|
||||
|
||||
- **Mihomo(Meta) 内核升级至 v1.19.13**
|
||||
|
||||
### 🚀 性能优化
|
||||
|
||||
- 优化热键响应速度,提升快捷键操作体验
|
||||
- 改进服务管理响应性,减少系统服务操作等待时间
|
||||
- 提升文件和配置处理性能
|
||||
- 优化任务管理和日志记录效率
|
||||
- 优化异步内存管理,减少内存占用并提升多任务处理效率
|
||||
- 优化启动阶段初始化性能
|
||||
|
||||
### 🐞 修复问题
|
||||
|
||||
- 修复应用在某些操作中可能出现的响应延迟问题
|
||||
- 修复任务管理中的潜在并发问题
|
||||
- 修复通过托盘重启应用无法恢复
|
||||
- 修复订阅在某些情况下无法导入
|
||||
- 修复无法新建订阅时使用远程链接
|
||||
- 修复卸载服务后的 tun 开关状态问题
|
||||
- 修复页面快速切换订阅时导致崩溃
|
||||
- 修复丢失工作目录时无法恢复环境
|
||||
- 修复从轻量模式恢复导致崩溃
|
||||
|
||||
### 👙 界面样式
|
||||
|
||||
- 统一代理设置样式
|
||||
|
||||
### 🗑️ 移除内容
|
||||
|
||||
- 移除启动阶段自动清理过期订阅
|
||||
|
||||
## v2.4.0
|
||||
|
||||
**发行代号:融**
|
||||
代号释义: 「融」象征融合与贯通,寓意新版本通过全新 IPC 通信机制 将系统各部分紧密衔接,打破壁垒,实现更高效的 数据流通与全面性能优化。
|
||||
|
||||
### 🏆 重大改进
|
||||
|
||||
- **核心通信架构升级**:采用全新通信机制,提升应用性能和稳定性
|
||||
- **流量监控系统重构**:全新的流量监控界面,支持更丰富的数据展示
|
||||
- **数据缓存优化**:改进配置和节点数据缓存,提升响应速度
|
||||
|
||||
### ✨ 新增功能
|
||||
|
||||
- **Mihomo(Meta) 内核升级至 v1.19.12**
|
||||
- 新增版本信息复制按钮
|
||||
- 增强型流量监控,支持更详细的数据分析
|
||||
- 新增流量图表多种显示模式
|
||||
- 新增强制刷新配置和节点缓存功能
|
||||
- 首页流量统计支持查看刻度线详情
|
||||
|
||||
### 🚀 性能优化
|
||||
|
||||
- 全面提升数据传输和处理效率
|
||||
- 优化内存使用,减少系统资源消耗
|
||||
- 改进流量图表渲染性能
|
||||
- 优化配置和节点刷新策略,从5秒延长到60秒
|
||||
- 改进数据缓存机制,减少重复请求
|
||||
- 优化异步程序性能
|
||||
|
||||
### 🐞 修复问题
|
||||
|
||||
- 修复系统代理状态检测和显示不一致问题
|
||||
- 修复系统主题窗口颜色不一致问题
|
||||
- 修复特殊字符 URL 处理问题
|
||||
- 修复配置修改后缓存不同步问题
|
||||
- 修复 Windows 安装器自启设置问题
|
||||
- 修复 macOS 下 Dock 图标恢复窗口问题
|
||||
- 修复 linux 下 KDE/Plasma 异常标题栏按钮
|
||||
- 修复架构升级后节点测速功能异常
|
||||
- 修复架构升级后流量统计功能异常
|
||||
- 修复架构升级后日志功能异常
|
||||
- 修复外部控制器跨域配置保存问题
|
||||
- 修复首页端口显示不一致问题
|
||||
- 修复首页流量统计刻度线显示问题
|
||||
- 修复日志页面按钮功能混淆问题
|
||||
- 修复日志等级设置保存问题
|
||||
- 修复日志等级异常过滤
|
||||
- 修复清理日志天数功能异常
|
||||
- 修复偶发性启动卡死问题
|
||||
- 修复首页虚拟网卡开关在管理模式下的状态问题
|
||||
|
||||
### 🔧 技术改进
|
||||
|
||||
- 统一使用新的内核通信方式
|
||||
- 新增外部控制器配置界面
|
||||
- 改进跨平台兼容性支持
|
||||
|
||||
## v2.3.2
|
||||
|
||||
### 🐞 修复问题
|
||||
@@ -8,18 +223,28 @@
|
||||
- 修复同时开启静默启动与自动进入轻量模式后,自动进入轻量模式失效的问题
|
||||
- 修复静默启动时托盘工具栏轻量模式开启与关闭状态的同步
|
||||
- 修复导入订阅时非 http 协议链接被错误尝试导入
|
||||
- 修复切换节点后页面长时间 loading 及缓存过期导致的数据不同步问题
|
||||
- 修复将快捷键名称更名为 `Clash Verge`之后无法删除图标和无法删除注册表
|
||||
- 修复`DNS`覆写 `fallback` `proxy server` `nameserver` `direct Nameserver` 字段支持留空
|
||||
- 修复`DNS`覆写 `nameserver-policy` 字段无法正确识别 `geo` 库
|
||||
- 修复搜索框输入特殊字符崩溃
|
||||
- 修复 Windows 下 Start UP 名称与 exe 名称不统一
|
||||
- 修复显示 Mihomo 内核日志等级应该大于设置等级
|
||||
|
||||
### ✨ 新增功能
|
||||
|
||||
- `sidecar` 模式下清理多余的内核进程,防止运行出现异常
|
||||
- 新 macOS 下 TUN 和系统代理模式托盘图标(暂测)
|
||||
- 快捷键事件通过系统通知
|
||||
- 添加外部 `cors` 控制面板
|
||||
|
||||
### 🚀 优化改进
|
||||
|
||||
- 优化重构订阅切换逻辑,可以随时中断载入过程,防止卡死
|
||||
- 引入事件驱动代理管理器,优化代理配置更新逻辑,防止卡死
|
||||
- 改进主页订阅卡流量已使用比例计算精度
|
||||
- 优化后端缓存刷新机制,支持毫秒级 TTL(默认 3000ms),减少重复请求并提升性能,切换节点时强制刷新后端数据,前端 UI 实时更新,操作更流畅
|
||||
- 解耦前端数据拉取与后端缓存刷新,提升节点切换速度和一致性
|
||||
|
||||
### 🗑️ 移除内容
|
||||
|
||||
@@ -49,7 +274,8 @@
|
||||
|
||||
- 优化 托盘 统一响应
|
||||
- 优化 静默启动+自启动轻量模式 运行方式
|
||||
- 升级依赖
|
||||
- 降低前端潜在内存泄漏风险,提升运行时性能
|
||||
- 优化 React 状态、副作用、数据获取、清理等流程。
|
||||
|
||||
## v2.3.0
|
||||
|
||||
@@ -379,7 +605,7 @@
|
||||
|
||||
- 新增窗口状态实时监控与自动保存功能
|
||||
- 增强核心配置变更时的验证与错误处理机制
|
||||
- 支持通过环境变量`CLASH_VERGE_REV_IP`自定义复制IP地址
|
||||
- 支持通过环境变量 `CLASH_VERGE_REV_IP`自定义复制IP地址
|
||||
- 添加连接表列宽持久化设置与进程过滤功能
|
||||
- 新增代理组首字母导航与动态滚动定位功能
|
||||
- 实现连接追踪暂停/恢复功能
|
||||
@@ -700,7 +926,7 @@
|
||||
- 禁用部分 Webview2 快捷键
|
||||
- 热键配置新增连接符 + 号
|
||||
- 新增部分悬浮提示按钮,用于解释说明
|
||||
- 当日志等级为`Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字)
|
||||
- 当日志等级为 `Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字)
|
||||
- 设置页面右上角新增 TG 频道链接
|
||||
- 各种细节优化和界面性能优化
|
||||
|
||||
@@ -740,7 +966,7 @@
|
||||
- 禁用部分 Webview2 快捷键
|
||||
- 热键配置新增连接符 + 号
|
||||
- 新增部分悬浮提示按钮,用于解释说明
|
||||
- 当日志等级为`Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字)
|
||||
- 当日志等级为 `Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字)
|
||||
- 设置页面右上角新增 TG 频道链接
|
||||
- 各种细节优化和界面性能优化
|
||||
|
||||
@@ -776,7 +1002,7 @@
|
||||
- 禁用部分 Webview2 快捷键
|
||||
- 热键配置新增连接符 + 号
|
||||
- 新增部分悬浮提示按钮,用于解释说明
|
||||
- 当日志等级为`Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字)
|
||||
- 当日志等级为 `Debug`时(更改需重启软件生效),支持点击内存主动内存回收(绿色文字)
|
||||
- 设置页面右上角新增 TG 频道链接
|
||||
|
||||
### Bugs Fixes
|
||||
@@ -950,7 +1176,7 @@
|
||||
### Features
|
||||
|
||||
- 缓存代理组图标
|
||||
- 使用`boa_engine` 代替 `rquickjs`
|
||||
- 使用 `boa_engine` 代替 `rquickjs`
|
||||
- 支持 Linux armv7
|
||||
|
||||
### Bugs Fixes
|
||||
@@ -1015,7 +1241,7 @@
|
||||
- 支持自定义托盘图标
|
||||
- 支持禁用代理组图标
|
||||
- 代理组显示当前代理
|
||||
- 修改 `打开面板` 快捷键为`打开/关闭面板`
|
||||
- 修改 `打开面板` 快捷键为 `打开/关闭面板`
|
||||
|
||||
---
|
||||
|
||||
@@ -1179,7 +1405,7 @@
|
||||
|
||||
### Bugs Fixes
|
||||
|
||||
- Windows 下更新时无法覆盖`clash-verge-service.exe`的问题(需要卸载重装一次服务,下次更新生效)
|
||||
- Windows 下更新时无法覆盖 `clash-verge-service.exe`的问题(需要卸载重装一次服务,下次更新生效)
|
||||
- 窗口最大化按钮变化问题
|
||||
- 窗口尺寸保存错误问题
|
||||
- 复制环境变量类型无法切换问题
|
||||
|
||||
81
docs/CONTRIBUTING_i18n.md
Normal file
81
docs/CONTRIBUTING_i18n.md
Normal file
@@ -0,0 +1,81 @@
|
||||
# CONTRIBUTING — i18n
|
||||
|
||||
Thank you for considering contributing to our localization work — your help is appreciated.
|
||||
|
||||
Quick overview
|
||||
|
||||
- cvr-i18 is a CLI that helps manage simple top-level JSON locale files:
|
||||
- Detect duplicated top-level keys
|
||||
- Find keys missing versus a base file (default: en.json)
|
||||
- Export missing entries for translators
|
||||
- Reorder keys to match the base file for predictable diffs
|
||||
- Operate on a directory or a single file
|
||||
|
||||
Get the CLI (No binary provided yet)
|
||||
|
||||
```bash
|
||||
git clone https://github.com/clash-verge-rev/clash-verge-rev-i18n-cli
|
||||
cd clash-verge-rev-i18n-cli
|
||||
cargo install --path .
|
||||
# or
|
||||
cargo install --git https://github.com/clash-verge-rev/clash-verge-rev-i18n-cli
|
||||
```
|
||||
|
||||
Common commands
|
||||
|
||||
- Show help: `cvr-i18`
|
||||
- Directory (auto-detects `./locales` or `./src/locales`): `cvr-i18 -d /path/to/locales`
|
||||
- Check duplicates: `cvr-i18 -k`
|
||||
- Check missing keys: `cvr-i18 -m`
|
||||
- Export missing keys: `cvr-i18 -m -e ./exports`
|
||||
- Sort keys to base file: `cvr-i18 -s`
|
||||
- Use a base file: `cvr-i18 -b base.json`
|
||||
- Single file: `cvr-i18 -f locales/zh.json`
|
||||
|
||||
Options (short)
|
||||
|
||||
- `-d, --directory <DIR>`
|
||||
- `-f, --file <FILE>`
|
||||
- `-k, --duplicated-key`
|
||||
- `-m, --missing-key`
|
||||
- `-e, --export <DIR>`
|
||||
- `-s, --sort`
|
||||
- `-b, --base <FILE>`
|
||||
|
||||
Exit codes
|
||||
|
||||
- `0` — success (no issues)
|
||||
- `1` — issues found (duplicates/missing)
|
||||
- `2` — error (IO/parse/runtime)
|
||||
|
||||
How to contribute (recommended steps)
|
||||
|
||||
- Start small: fix typos, improve phrasing, or refine tone and consistency.
|
||||
- Run the CLI against your locale files to detect duplicates or missing keys.
|
||||
- Export starter JSONs for translators with `-m -e <DIR>`.
|
||||
- Prefer incremental PRs or draft PRs; leave a comment on the issue if you want guidance.
|
||||
- Open an issue to report missing strings, UI context, or localization bugs.
|
||||
- Add or improve docs and tests to make future contributions easier.
|
||||
|
||||
PR checklist
|
||||
|
||||
- Keep JSON files UTF-8 encoded.
|
||||
- Follow the repo’s locale file structure and naming conventions.
|
||||
- Reorder keys to match the base file (`-s`) for minimal diffs.
|
||||
- Test translations in a local dev build before opening a PR.
|
||||
- Reference related issues and explain any context for translations or changes.
|
||||
|
||||
Notes
|
||||
|
||||
- The tool expects simple top-level JSON key/value maps.
|
||||
- Exported JSONs are starter files for translators (fill in values, keep keys).
|
||||
- Sorting keeps diffs consistent and reviewable.
|
||||
|
||||
Repository
|
||||
https://github.com/clash-verge-rev/clash-verge-rev-i18n-cli
|
||||
|
||||
## Feedback & Contributions
|
||||
|
||||
- For tool usage issues or feedback: please open an Issue in the [repository](https://github.com/clash-verge-rev/clash-verge-rev-i18n-cli) so it can be tracked and addressed.
|
||||
- For localization contributions (translations, fixes, context notes, etc.): submit a PR or Issue in this repository and include examples, context, and testing instructions when possible.
|
||||
- If you need help or a review, leave a comment on your submission requesting assistance.
|
||||
124
docs/README_en.md
Normal file
124
docs/README_en.md
Normal file
@@ -0,0 +1,124 @@
|
||||
<h1 align="center">
|
||||
<img src="../src-tauri/icons/icon.png" alt="Clash" width="128" />
|
||||
<br>
|
||||
Continuation of <a href="https://github.com/zzzgydi/clash-verge">Clash Verge</a>
|
||||
<br>
|
||||
</h1>
|
||||
|
||||
<h3 align="center">
|
||||
A Clash Meta GUI built with <a href="https://github.com/tauri-apps/tauri">Tauri</a>.
|
||||
</h3>
|
||||
|
||||
<p align="center">
|
||||
Languages:
|
||||
<a href="../README.md">简体中文</a> ·
|
||||
<a href="./README_en.md">English</a> ·
|
||||
<a href="./README_es.md">Español</a> ·
|
||||
<a href="./README_ru.md">Русский</a> ·
|
||||
<a href="./README_ja.md">日本語</a>
|
||||
</p>
|
||||
|
||||
## Preview
|
||||
|
||||
| Dark | Light |
|
||||
| ----------------------------------- | ------------------------------------- |
|
||||
|  |  |
|
||||
|
||||
## Install
|
||||
|
||||
Visit the [Release page](https://github.com/clash-verge-rev/clash-verge-rev/releases) to download the installer that matches your platform.<br>
|
||||
We provide packages for Windows (x64/x86), Linux (x64/arm64), and macOS 10.15+ (Intel/Apple).
|
||||
|
||||
#### Choosing a Release Channel
|
||||
|
||||
| Channel | Description | Link |
|
||||
| :---------- | :-------------------------------------------------------------------- | :------------------------------------------------------------------------------------- |
|
||||
| Stable | Official builds with high reliability, ideal for daily use. | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
|
||||
| Alpha (EOL) | Legacy builds used to validate the publish pipeline. | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
|
||||
| AutoBuild | Rolling builds for testing and feedback. Expect experimental changes. | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
|
||||
|
||||
#### Installation Guides & FAQ
|
||||
|
||||
Read the [project documentation](https://clash-verge-rev.github.io/) for install steps, troubleshooting, and frequently asked questions.
|
||||
|
||||
---
|
||||
|
||||
### Telegram Channel
|
||||
|
||||
Join [@clash_verge_rev](https://t.me/clash_verge_re) for update announcements.
|
||||
|
||||
## Promotion
|
||||
|
||||
#### [Doggygo VPN — Performance-oriented global accelerator](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
|
||||
- High-performance overseas network service with free trials, discounted plans, streaming unlocks, and first-class Hysteria protocol support.
|
||||
- Register through the exclusive Clash Verge link to get a 3-day trial with 1 GB of traffic per day: [Sign up](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
- Exclusive 20% off coupon for Clash Verge users: `verge20` (limited to 500 uses)
|
||||
- Discounted bundle from ¥15.8 per month for 160 GB, plus an additional 20% off for yearly billing
|
||||
- Operated by an overseas team with reliable service and up to 50% revenue share
|
||||
- Load-balanced clusters with high-speed dedicated routes (compatible with legacy clients), exceptionally low latency, smooth 4K playback
|
||||
- First global provider to support the `Hysteria2` protocol—perfect fit for the Clash Verge client
|
||||
- Supports streaming services and ChatGPT access
|
||||
- Official site: [https://狗狗加速.com](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
|
||||
#### Build Infrastructure Sponsor — [YXVM Dedicated Servers](https://yxvm.com/aff.php?aff=827)
|
||||
|
||||
Our builds and releases run on YXVM dedicated servers that deliver premium resources, strong performance, and high-speed networking. If downloads feel fast and usage feels snappy, it is thanks to robust hardware.
|
||||
|
||||
🧩 Highlights of YXVM Dedicated Servers:
|
||||
|
||||
- 🌎 Optimized global routes for dramatically faster downloads
|
||||
- 🔧 Bare-metal resources instead of shared VPS capacity for maximum performance
|
||||
- 🧠 Great for proxy workloads, hosting web/CDN services, CI/CD pipelines, or any high-load tasks
|
||||
- 💡 Ready to use instantly with multiple datacenter options, including CN2 and IEPL
|
||||
- 📦 The configuration used by this project is on sale—feel free to get the same setup
|
||||
- 🎯 Want the same build environment? [Order a YXVM server today](https://yxvm.com/aff.php?aff=827)
|
||||
|
||||
## Features
|
||||
|
||||
- Built on high-performance Rust with the Tauri 2 framework
|
||||
- Ships with the embedded [Clash.Meta (mihomo)](https://github.com/MetaCubeX/mihomo) core and supports switching to the `Alpha` channel
|
||||
- Clean, polished UI with theme color controls, proxy group/tray icons, and `CSS Injection`
|
||||
- Enhanced profile management (Merge and Script helpers) with configuration syntax hints
|
||||
- System proxy controls, guard mode, and `TUN` (virtual network adapter) support
|
||||
- Visual editors for nodes and rules
|
||||
- WebDAV-based backup and sync for configurations
|
||||
|
||||
### FAQ
|
||||
|
||||
See the [FAQ page](https://clash-verge-rev.github.io/faq/windows.html) for platform-specific guidance.
|
||||
|
||||
### Donation
|
||||
|
||||
[Support Clash Verge Rev development](https://github.com/sponsors/clash-verge-rev)
|
||||
|
||||
## Development
|
||||
|
||||
See [CONTRIBUTING.md](../CONTRIBUTING.md) for detailed contribution guidelines.
|
||||
|
||||
After installing all **Tauri** prerequisites, run the development shell with:
|
||||
|
||||
```shell
|
||||
pnpm i
|
||||
pnpm run prebuild
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
## Contributions
|
||||
|
||||
Issues and pull requests are welcome!
|
||||
|
||||
## Acknowledgement
|
||||
|
||||
Clash Verge Rev builds on or draws inspiration from these projects:
|
||||
|
||||
- [zzzgydi/clash-verge](https://github.com/zzzgydi/clash-verge): A Tauri-based Clash GUI for Windows, macOS, and Linux.
|
||||
- [tauri-apps/tauri](https://github.com/tauri-apps/tauri): Build smaller, faster, more secure desktop apps with a web frontend.
|
||||
- [Dreamacro/clash](https://github.com/Dreamacro/clash): A rule-based tunnel written in Go.
|
||||
- [MetaCubeX/mihomo](https://github.com/MetaCubeX/mihomo): A rule-based tunnel written in Go.
|
||||
- [Fndroid/clash_for_windows_pkg](https://github.com/Fndroid/clash_for_windows_pkg): A Clash GUI for Windows and macOS.
|
||||
- [vitejs/vite](https://github.com/vitejs/vite): Next-generation frontend tooling with blazing-fast DX.
|
||||
|
||||
## License
|
||||
|
||||
GPL-3.0 License. See the [license file](../LICENSE) for details.
|
||||
124
docs/README_es.md
Normal file
124
docs/README_es.md
Normal file
@@ -0,0 +1,124 @@
|
||||
<h1 align="center">
|
||||
<img src="../src-tauri/icons/icon.png" alt="Clash" width="128" />
|
||||
<br>
|
||||
Continuación de <a href="https://github.com/zzzgydi/clash-verge">Clash Verge</a>
|
||||
<br>
|
||||
</h1>
|
||||
|
||||
<h3 align="center">
|
||||
Una interfaz gráfica para Clash Meta construida con <a href="https://github.com/tauri-apps/tauri">Tauri</a>.
|
||||
</h3>
|
||||
|
||||
<p align="center">
|
||||
Idiomas:
|
||||
<a href="../README.md">简体中文</a> ·
|
||||
<a href="./README_en.md">English</a> ·
|
||||
<a href="./README_es.md">Español</a> ·
|
||||
<a href="./README_ru.md">Русский</a> ·
|
||||
<a href="./README_ja.md">日本語</a>
|
||||
</p>
|
||||
|
||||
## Vista previa
|
||||
|
||||
| Oscuro | Claro |
|
||||
| ----------------------------------- | ----------------------------------- |
|
||||
|  |  |
|
||||
|
||||
## Instalación
|
||||
|
||||
Visita la [página de lanzamientos](https://github.com/clash-verge-rev/clash-verge-rev/releases) y descarga el instalador que corresponda a tu plataforma.<br>
|
||||
Ofrecemos paquetes para Windows (x64/x86), Linux (x64/arm64) y macOS 10.15+ (Intel/Apple).
|
||||
|
||||
#### Cómo elegir el canal de lanzamiento
|
||||
|
||||
| Canal | Descripción | Enlace |
|
||||
| :---------- | :----------------------------------------------------------------------------- | :------------------------------------------------------------------------------------- |
|
||||
| Stable | Compilaciones oficiales de alta fiabilidad; ideales para el uso diario. | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
|
||||
| Alpha (EOL) | Compilaciones heredadas usadas para validar el flujo de publicación. | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
|
||||
| AutoBuild | Compilaciones continuas para pruebas y retroalimentación. Espera cambios beta. | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
|
||||
|
||||
#### Guías de instalación y preguntas frecuentes
|
||||
|
||||
Consulta la [documentación del proyecto](https://clash-verge-rev.github.io/) para encontrar los pasos de instalación, solución de problemas y preguntas frecuentes.
|
||||
|
||||
---
|
||||
|
||||
### Canal de Telegram
|
||||
|
||||
Únete a [@clash_verge_rev](https://t.me/clash_verge_re) para enterarte de las novedades.
|
||||
|
||||
## Promociones
|
||||
|
||||
#### [Doggygo VPN — Acelerador global orientado al rendimiento](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
|
||||
- Servicio internacional de alto rendimiento con prueba gratuita, planes con descuento, desbloqueo de streaming y soporte de protocolo Hysteria de primera clase.
|
||||
- Regístrate mediante el enlace exclusivo de Clash Verge y obtén una prueba de 3 días con 1 GB de tráfico diario: [Regístrate](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
- Cupón exclusivo de 20% de descuento para usuarios de Clash Verge: `verge20` (limitado a 500 usos)
|
||||
- Plan promocional desde ¥15.8 al mes con 160 GB, más 20% de descuento adicional por pago anual
|
||||
- Equipo ubicado en el extranjero para un servicio confiable, con hasta 50% de comisión compartida
|
||||
- Clústeres balanceados con rutas dedicadas de alta velocidad (compatibles con clientes antiguos), latencia extremadamente baja, reproducción 4K sin interrupciones
|
||||
- Primer proveedor global que soporta el protocolo `Hysteria2`, ideal para el cliente Clash Verge
|
||||
- Desbloquea servicios de streaming y acceso a ChatGPT
|
||||
- Sitio oficial: [https://狗狗加速.com](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
|
||||
#### Patrocinador de la infraestructura de compilación — [Servidores dedicados YXVM](https://yxvm.com/aff.php?aff=827)
|
||||
|
||||
Las compilaciones y lanzamientos del proyecto se ejecutan en servidores dedicados de YXVM, que proporcionan recursos premium, alto rendimiento y redes de alta velocidad. Si las descargas son rápidas y el uso es fluido, es gracias a este hardware robusto.
|
||||
|
||||
🧩 Ventajas de los servidores dedicados YXVM:
|
||||
|
||||
- 🌎 Rutas globales optimizadas para descargas significativamente más rápidas
|
||||
- 🔧 Recursos bare-metal, en lugar de VPS compartidos, para obtener el máximo rendimiento
|
||||
- 🧠 Ideales para proxys, alojamiento de sitios web/CDN, pipelines de CI/CD o cualquier carga elevada
|
||||
- 💡 Listos para usar al instante, con múltiples centros de datos disponibles (incluidos CN2 e IEPL)
|
||||
- 📦 La misma configuración utilizada por este proyecto está disponible para su compra
|
||||
- 🎯 ¿Quieres el mismo entorno de compilación? [Solicita un servidor YXVM hoy](https://yxvm.com/aff.php?aff=827)
|
||||
|
||||
## Funciones
|
||||
|
||||
- Basado en Rust de alto rendimiento y en el framework Tauri 2
|
||||
- Incluye el núcleo integrado [Clash.Meta (mihomo)](https://github.com/MetaCubeX/mihomo) y permite cambiar al canal `Alpha`
|
||||
- Interfaz limpia y elegante con controles de color de tema, iconos de grupos proxy/bandeja y `CSS Injection`
|
||||
- Gestión avanzada de perfiles (herramientas Merge y Script) con sugerencias de sintaxis para configuraciones
|
||||
- Control del proxy del sistema, modo guardián y soporte para `TUN` (adaptador de red virtual)
|
||||
- Editores visuales para nodos y reglas
|
||||
- Copias de seguridad y sincronización mediante WebDAV
|
||||
|
||||
### Preguntas frecuentes
|
||||
|
||||
Visita la [página de FAQ](https://clash-verge-rev.github.io/faq/windows.html) para obtener instrucciones específicas por plataforma.
|
||||
|
||||
### Donaciones
|
||||
|
||||
[Apoya el desarrollo de Clash Verge Rev](https://github.com/sponsors/clash-verge-rev)
|
||||
|
||||
## Desarrollo
|
||||
|
||||
Consulta [CONTRIBUTING.md](../CONTRIBUTING.md) para conocer las pautas de contribución.
|
||||
|
||||
Después de instalar todos los requisitos de **Tauri**, ejecuta el entorno de desarrollo con:
|
||||
|
||||
```shell
|
||||
pnpm i
|
||||
pnpm run prebuild
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
## Contribuciones
|
||||
|
||||
Se agradecen los issues y pull requests.
|
||||
|
||||
## Agradecimientos
|
||||
|
||||
Clash Verge Rev se basa en, o se inspira en, los siguientes proyectos:
|
||||
|
||||
- [zzzgydi/clash-verge](https://github.com/zzzgydi/clash-verge): Interfaz gráfica para Clash basada en Tauri. Compatible con Windows, macOS y Linux.
|
||||
- [tauri-apps/tauri](https://github.com/tauri-apps/tauri): Construye aplicaciones de escritorio más pequeñas, rápidas y seguras con un frontend web.
|
||||
- [Dreamacro/clash](https://github.com/Dreamacro/clash): Túnel basado en reglas escrito en Go.
|
||||
- [MetaCubeX/mihomo](https://github.com/MetaCubeX/mihomo): Túnel basado en reglas escrito en Go.
|
||||
- [Fndroid/clash_for_windows_pkg](https://github.com/Fndroid/clash_for_windows_pkg): Interfaz de Clash para Windows y macOS.
|
||||
- [vitejs/vite](https://github.com/vitejs/vite): Herramientas de frontend de nueva generación con una experiencia rapidísima.
|
||||
|
||||
## Licencia
|
||||
|
||||
Licencia GPL-3.0. Consulta el [archivo de licencia](../LICENSE) para más detalles.
|
||||
124
docs/README_ja.md
Normal file
124
docs/README_ja.md
Normal file
@@ -0,0 +1,124 @@
|
||||
<h1 align="center">
|
||||
<img src="../src-tauri/icons/icon.png" alt="Clash" width="128" />
|
||||
<br>
|
||||
<a href="https://github.com/zzzgydi/clash-verge">Clash Verge</a> の継続プロジェクト
|
||||
<br>
|
||||
</h1>
|
||||
|
||||
<h3 align="center">
|
||||
<a href="https://github.com/tauri-apps/tauri">Tauri</a> で構築された Clash Meta GUI。
|
||||
</h3>
|
||||
|
||||
<p align="center">
|
||||
言語:
|
||||
<a href="../README.md">简体中文</a> ·
|
||||
<a href="./README_en.md">English</a> ·
|
||||
<a href="./README_es.md">Español</a> ·
|
||||
<a href="./README_ru.md">Русский</a> ·
|
||||
<a href="./README_ja.md">日本語</a>
|
||||
</p>
|
||||
|
||||
## プレビュー
|
||||
|
||||
| ダーク | ライト |
|
||||
| --------------------------------------- | ---------------------------------------- |
|
||||
|  |  |
|
||||
|
||||
## インストール
|
||||
|
||||
[リリースページ](https://github.com/clash-verge-rev/clash-verge-rev/releases) から、ご利用のプラットフォームに対応したインストーラーをダウンロードしてください。<br>
|
||||
Windows (x64/x86)、Linux (x64/arm64)、macOS 10.15+ (Intel/Apple) をサポートしています。
|
||||
|
||||
#### リリースチャンネルの選び方
|
||||
|
||||
| チャンネル | 説明 | リンク |
|
||||
| :---------- | :--------------------------------------------------------------- | :------------------------------------------------------------------------------------- |
|
||||
| Stable | 安定版。信頼性が高く、日常利用に最適です。 | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
|
||||
| Alpha (EOL) | 公開フローの検証に使用した旧テスト版。 | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
|
||||
| AutoBuild | 継続的に更新されるテスト版。フィードバックや新機能検証向けです。 | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
|
||||
|
||||
#### インストール手順と FAQ
|
||||
|
||||
詳しい導入手順やトラブルシュートは [ドキュメントサイト](https://clash-verge-rev.github.io/) を参照してください。
|
||||
|
||||
---
|
||||
|
||||
### Telegram チャンネル
|
||||
|
||||
更新情報は [@clash_verge_rev](https://t.me/clash_verge_re) をフォローしてください。
|
||||
|
||||
## プロモーション
|
||||
|
||||
#### [Doggygo VPN — 高性能グローバルアクセラレータ](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
|
||||
- 無料トライアル、割引プラン、ストリーミング解放、世界初の Hysteria プロトコル対応を備えた高性能海外ネットワークサービス。
|
||||
- Clash Verge 専用リンクから登録すると、3 日間・1 日 1 GB の無料体験が利用できます。 [登録はこちら](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
- Clash Verge 利用者限定 20% オフクーポン: `verge20`(先着 500 名)
|
||||
- 月額 15.8 元で 160 GB を利用できるプラン、年額契約ならさらに 20% オフ
|
||||
- 海外チーム運営による高信頼サービス、収益シェアは最大 50%
|
||||
- 負荷分散クラスタと高速専用回線(旧クライアント互換)、極低レイテンシで 4K も快適
|
||||
- 世界初の `Hysteria2` プロトコル対応。Clash Verge クライアントとの相性抜群
|
||||
- ストリーミングおよび ChatGPT の利用にも対応
|
||||
- 公式サイト: [https://狗狗加速.com](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
|
||||
#### ビルド環境スポンサー — [YXVM 専用サーバー](https://yxvm.com/aff.php?aff=827)
|
||||
|
||||
本プロジェクトのビルドとリリースは、YXVM の専用サーバーによって支えられています。高速ダウンロードや快適な操作性は、強力なハードウェアがあってこそです。
|
||||
|
||||
🧩 YXVM 専用サーバーの特長:
|
||||
|
||||
- 🌎 最適化されたグローバル回線で圧倒的なダウンロード速度
|
||||
- 🔧 VPS とは異なるベアメタル資源で最高性能を発揮
|
||||
- 🧠 プロキシ運用、Web/CDN ホスティング、CI/CD など高負荷ワークロードに最適
|
||||
- 💡 複数データセンターから即時利用可能。CN2 や IEPL も選択可
|
||||
- 📦 本プロジェクトが使用している構成も販売中。同じ環境を入手できます
|
||||
- 🎯 同じビルド体験をしたい方は [今すぐ YXVM サーバーを注文](https://yxvm.com/aff.php?aff=827)
|
||||
|
||||
## 機能
|
||||
|
||||
- 高性能な Rust と Tauri 2 フレームワークに基づくデスクトップアプリ
|
||||
- 組み込みの [Clash.Meta (mihomo)](https://github.com/MetaCubeX/mihomo) コアを搭載し、`Alpha` チャンネルへの切り替えも可能
|
||||
- テーマカラーやプロキシグループ/トレイアイコン、`CSS Injection` をカスタマイズできる洗練された UI
|
||||
- 設定ファイルの管理および拡張(Merge・Script 支援)、構成シンタックスヒントを提供
|
||||
- システムプロキシ制御、ガード機能、`TUN`(仮想ネットワークアダプタ)モード
|
||||
- ノードとルールのビジュアルエディタ
|
||||
- WebDAV による設定のバックアップと同期
|
||||
|
||||
### FAQ
|
||||
|
||||
プラットフォーム別の案内は [FAQ ページ](https://clash-verge-rev.github.io/faq/windows.html) を参照してください。
|
||||
|
||||
### 寄付
|
||||
|
||||
[Clash Verge Rev の開発を支援する](https://github.com/sponsors/clash-verge-rev)
|
||||
|
||||
## 開発
|
||||
|
||||
詳細な貢献ガイドは [CONTRIBUTING.md](../CONTRIBUTING.md) をご覧ください。
|
||||
|
||||
**Tauri** の前提条件を整えたら、以下のコマンドで開発サーバーを起動できます:
|
||||
|
||||
```shell
|
||||
pnpm i
|
||||
pnpm run prebuild
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
## コントリビューション
|
||||
|
||||
Issue や Pull Request を歓迎します。
|
||||
|
||||
## 謝辞
|
||||
|
||||
Clash Verge Rev は、以下のプロジェクトに影響を受けています。
|
||||
|
||||
- [zzzgydi/clash-verge](https://github.com/zzzgydi/clash-verge): Tauri ベースの Clash GUI。Windows / macOS / Linux に対応。
|
||||
- [tauri-apps/tauri](https://github.com/tauri-apps/tauri): Web フロントエンドで小型・高速・安全なデスクトップアプリを構築するためのフレームワーク。
|
||||
- [Dreamacro/clash](https://github.com/Dreamacro/clash): Go 製のルールベーストンネル。
|
||||
- [MetaCubeX/mihomo](https://github.com/MetaCubeX/mihomo): Go 製のルールベーストンネル。
|
||||
- [Fndroid/clash_for_windows_pkg](https://github.com/Fndroid/clash_for_windows_pkg): Windows / macOS 向けの Clash GUI。
|
||||
- [vitejs/vite](https://github.com/vitejs/vite): 次世代のフロントエンドツール群。高速な開発体験を提供。
|
||||
|
||||
## ライセンス
|
||||
|
||||
GPL-3.0 ライセンス。詳細は [LICENSE](../LICENSE) を参照してください。
|
||||
120
docs/README_ru.md
Normal file
120
docs/README_ru.md
Normal file
@@ -0,0 +1,120 @@
|
||||
<h1 align="center">
|
||||
<img src="../src-tauri/icons/icon.png" alt="Clash" width="128" />
|
||||
<br>
|
||||
Continuation of <a href="https://github.com/zzzgydi/clash-verge">Clash Verge</a>
|
||||
<br>
|
||||
</h1>
|
||||
|
||||
<h3 align="center">
|
||||
Clash Meta GUI базируется на <a href="https://github.com/tauri-apps/tauri">Tauri</a>.
|
||||
</h3>
|
||||
|
||||
<p align="center">
|
||||
Языки:
|
||||
<a href="../README.md">简体中文</a> ·
|
||||
<a href="./README_en.md">English</a> ·
|
||||
<a href="./README_es.md">Español</a> ·
|
||||
<a href="./README_ru.md">Русский</a> ·
|
||||
<a href="./README_ja.md">日本語</a>
|
||||
</p>
|
||||
## Предпросмотр
|
||||
|
||||
| Тёмная тема | Светлая тема |
|
||||
| ---------------------------------- | ------------------------------------ |
|
||||
|  |  |
|
||||
|
||||
## Установка
|
||||
|
||||
Пожалуйста, перейдите на страницу релизов, чтобы скачать соответствующий установочный пакет: [Страница релизов](https://github.com/clash-verge-rev/clash-verge-rev/releases)<br>
|
||||
Перейти на [Страницу релизов](https://github.com/clash-verge-rev/clash-verge-rev/releases) to download the corresponding installation package<br>
|
||||
Поддержка Windows (x64/x86), Linux (x64/arm64) и macOS 10.15+ (intel/apple).
|
||||
|
||||
#### Как выбрать дистрибутив?
|
||||
|
||||
| Версия | Характеристики | Ссылка |
|
||||
| :-------------------- | :------------------------------------------------------------------------------------------------------ | :------------------------------------------------------------------------------------- |
|
||||
| Stable | Официальный релиз, высокая надежность, подходит для повседневного использования. | [Release](https://github.com/clash-verge-rev/clash-verge-rev/releases) |
|
||||
| Alpha(неиспользуемый) | Тестирование процесса публикации. | [Alpha](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/alpha) |
|
||||
| AutoBuild | Версия с постоянным обновлением, подходящая для тестирования и обратной связи. Может содержать дефекты. | [AutoBuild](https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild) |
|
||||
|
||||
#### Инструкции по установке и ответы на часто задаваемые вопросы можно найти на [странице документации](https://clash-verge-rev.github.io/)
|
||||
|
||||
---
|
||||
|
||||
### TG канал: [@clash_verge_rev](https://t.me/clash_verge_re)
|
||||
|
||||
## Продвижение
|
||||
|
||||
#### [Doggygo VPN —— технический VPN-сервис (айрпорт)](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
|
||||
- Высокопроизводительный иностранный VPN-сервис (айрпорт) с бесплатным пробным периодом, выгодными тарифами, возможностью разблокировки потокового ТВ и первым в мире поддержкой протокола Hysteria.
|
||||
- Зарегистрируйтесь по эксклюзивной ссылке Clash Verge и получите 3 дня бесплатного использования, 1 Гб трафика в день: [регистрация](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
- Эксклюзивный промо-код на скидку 20% для Clash Verge: verge20 (только 500 штук)
|
||||
- Специальный тарифный план всего за 15,8 юаней в месяц, 160 Гб трафика, скидка 20% при оплате за год
|
||||
- Команда за рубежом, без риска побега, до 50% кэшбэка
|
||||
- Архитектура с балансировкойнагрузки, высокоскоростная выделенная линия (совместима со старыми клиентами), чрезвычайно низкая задержка, без проблем в часы пик, 4K видео загружается мгновенно
|
||||
- Первый в мире VPN-сервис (айрпорт), поддерживающий протокол Hysteria, теперь доступен более быстрый протокол `Hysteria2` (лучшее сочетание с клиентом Clash Verge)
|
||||
- Разблокировка потоковые сервисы и ChatGPT
|
||||
- Официальный сайт: [https://狗狗加速.com](https://verge.dginv.click/#/register?code=oaxsAGo6)
|
||||
|
||||
#### Среда сборки и публикации этого проекта полностью поддерживается выделенным сервером [YXVM](https://yxvm.com/aff.php?aff=827)
|
||||
|
||||
Благодарим вас за предоставление надежной бэкэнд-среды с эксклюзивными ресурсами, высокой производительностью и высокоскоростной сетью. Если вы считаете, что загрузка файлов происходит достаточно быстро, а использование — достаточно плавно, то это потому, что мы используем серверы высшего уровня!
|
||||
|
||||
🧩 Преимущества выделенного сервера YXVM:
|
||||
|
||||
- 🌎 Премиум-сеть с оптимизацией обратного пути для молниеносной скорости загрузки
|
||||
- 🔧 Выделенные физические серверные ресурсы, не имеющие аналогов среди VPS, обеспечивающие максимальную производительность
|
||||
- 🧠 Идеально подходит для прокси, хостинга веб-сайтов/CDN-сайтов, рабочих процессов CI/CD или любых приложений с высокой нагрузкой
|
||||
- 💡 Поддержка использования сразу после включения, выбор нескольких дата-центров, CN2 / IEPL на выбор
|
||||
- 📦 Эта конфигурация в настоящее время доступна для покупки — не стесняйтесь заказывать ту же модель!
|
||||
- 🎯 Хотите попробовать такую же сборку? [Закажите выделенный сервер YXVM прямо сейчас!](https://yxvm.com/aff.php?aff=827)
|
||||
|
||||
## Фичи
|
||||
|
||||
- Основан на произвоительном Rust и фреймворке Tauri 2
|
||||
- Имеет встроенное ядро [Clash.Meta(mihomo)](https://github.com/MetaCubeX/mihomo) и поддерживает переключение на ядро версии `Alpha`.
|
||||
- Чистый и эстетичный пользовательский интерфейс, поддержка настраиваемых цветов темы, значков прокси-группы/системного трея и `CSS Injection`。
|
||||
- Управление и расширение конфигурационными файлами (Merge и Script), подсказки по синтаксису конфигурационных файлов.
|
||||
- Режим системного прокси и защита, `TUN (Tunneled Network Interface)` режим.
|
||||
- Визуальное редактирование узлов и правил
|
||||
- Резервное копирование и синхронизация конфигурации WebDAV
|
||||
|
||||
### FAQ
|
||||
|
||||
Смотрите [Страница часто задаваемых вопросов](https://clash-verge-rev.github.io/faq/windows.html)
|
||||
|
||||
### Донат
|
||||
|
||||
[Поддержите развитие Clash Verge Rev](https://github.com/sponsors/clash-verge-rev)
|
||||
|
||||
## Разработка
|
||||
|
||||
Дополнительные сведения смотреть в файле [CONTRIBUTING.md](../CONTRIBUTING.md).
|
||||
|
||||
Для запуска сервера разработки выполните следующие команды после установки всех необходимых компонентов для **Tauri**:
|
||||
|
||||
```shell
|
||||
pnpm i
|
||||
pnpm run prebuild
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
## Вклад
|
||||
|
||||
Обращения и запросы на PR приветствуются!
|
||||
|
||||
## Благодарность
|
||||
|
||||
Clash Verge rev был основан на этих проектах или вдохновлен ими, и так далее:
|
||||
|
||||
- [zzzgydi/clash-verge](https://github.com/zzzgydi/clash-verge): Графический интерфейс Clash на основе tauri. Поддерживает Windows, macOS и Linux.
|
||||
- [tauri-apps/tauri](https://github.com/tauri-apps/tauri): Создавайте более компактные, быстрые и безопасные настольные приложения с веб-интерфейсом.
|
||||
- [Dreamacro/clash](https://github.com/Dreamacro/clash): Правило-ориентированный туннель на Go.
|
||||
- [MetaCubeX/mihomo](https://github.com/MetaCubeX/mihomo): Правило-ориентированный туннель на Go.
|
||||
- [Fndroid/clash_for_windows_pkg](https://github.com/Fndroid/clash_for_windows_pkg): Графический интерфейс пользователя для Windows/macOS на основе Clash.
|
||||
- [vitejs/vite](https://github.com/vitejs/vite): Инструменты нового поколения для фронтенда. Они быстрые!
|
||||
|
||||
## Лицензия
|
||||
|
||||
GPL-3.0 License. Подробности смотрите в [Лицензии](../LICENSE).
|
||||
135
eslint.config.ts
Normal file
135
eslint.config.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import eslintJS from "@eslint/js";
|
||||
import eslintReact from "@eslint-react/eslint-plugin";
|
||||
import { defineConfig } from "eslint/config";
|
||||
import configPrettier from "eslint-config-prettier";
|
||||
import { createTypeScriptImportResolver } from "eslint-import-resolver-typescript";
|
||||
import pluginImportX from "eslint-plugin-import-x";
|
||||
import pluginPrettier from "eslint-plugin-prettier";
|
||||
import pluginReactHooks from "eslint-plugin-react-hooks";
|
||||
import pluginReactRefresh from "eslint-plugin-react-refresh";
|
||||
import pluginUnusedImports from "eslint-plugin-unused-imports";
|
||||
import globals from "globals";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
export default defineConfig([
|
||||
{
|
||||
files: ["**/*.{js,mjs,cjs,ts,mts,cts,jsx,tsx}"],
|
||||
|
||||
plugins: {
|
||||
js: eslintJS,
|
||||
"react-hooks": pluginReactHooks,
|
||||
// @ts-expect-error -- https://github.com/un-ts/eslint-plugin-import-x/issues/421
|
||||
"import-x": pluginImportX,
|
||||
"react-refresh": pluginReactRefresh,
|
||||
"unused-imports": pluginUnusedImports,
|
||||
prettier: pluginPrettier,
|
||||
},
|
||||
|
||||
extends: [
|
||||
eslintJS.configs.recommended,
|
||||
tseslint.configs.recommended,
|
||||
eslintReact.configs["recommended-typescript"],
|
||||
configPrettier,
|
||||
],
|
||||
|
||||
languageOptions: {
|
||||
globals: globals.browser,
|
||||
},
|
||||
|
||||
settings: {
|
||||
react: {
|
||||
version: "detect",
|
||||
},
|
||||
"import-x/resolver-next": [
|
||||
createTypeScriptImportResolver({
|
||||
project: "./tsconfig.json",
|
||||
}),
|
||||
],
|
||||
},
|
||||
|
||||
rules: {
|
||||
// React
|
||||
"react-hooks/rules-of-hooks": "error",
|
||||
"react-hooks/exhaustive-deps": "error",
|
||||
"react-refresh/only-export-components": [
|
||||
"warn",
|
||||
{ allowConstantExport: true },
|
||||
],
|
||||
|
||||
"@eslint-react/no-forward-ref": "off",
|
||||
|
||||
// React performance and production quality rules
|
||||
"@eslint-react/no-array-index-key": "warn",
|
||||
"@eslint-react/no-children-count": "error",
|
||||
"@eslint-react/no-children-for-each": "error",
|
||||
"@eslint-react/no-children-map": "error",
|
||||
"@eslint-react/no-children-only": "error",
|
||||
"@eslint-react/no-children-prop": "error",
|
||||
"@eslint-react/no-children-to-array": "error",
|
||||
"@eslint-react/no-class-component": "error",
|
||||
"@eslint-react/no-clone-element": "error",
|
||||
"@eslint-react/no-create-ref": "error",
|
||||
"@eslint-react/no-default-props": "error",
|
||||
"@eslint-react/no-direct-mutation-state": "error",
|
||||
"@eslint-react/no-implicit-key": "error",
|
||||
"@eslint-react/no-prop-types": "error",
|
||||
"@eslint-react/no-set-state-in-component-did-mount": "error",
|
||||
"@eslint-react/no-set-state-in-component-did-update": "error",
|
||||
"@eslint-react/no-set-state-in-component-will-update": "error",
|
||||
"@eslint-react/no-string-refs": "error",
|
||||
"@eslint-react/no-unstable-context-value": "warn",
|
||||
"@eslint-react/no-unstable-default-props": "warn",
|
||||
"@eslint-react/no-unused-class-component-members": "error",
|
||||
"@eslint-react/no-unused-state": "error",
|
||||
"@eslint-react/no-useless-fragment": "warn",
|
||||
"@eslint-react/prefer-destructuring-assignment": "warn",
|
||||
|
||||
// TypeScript
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
|
||||
// unused-imports 代替 no-unused-vars
|
||||
"@typescript-eslint/no-unused-vars": "off",
|
||||
"unused-imports/no-unused-imports": "error",
|
||||
"unused-imports/no-unused-vars": [
|
||||
"warn",
|
||||
{
|
||||
vars: "all",
|
||||
varsIgnorePattern: "^_",
|
||||
args: "after-used",
|
||||
argsIgnorePattern: "^_",
|
||||
caughtErrorsIgnorePattern: "^ignore",
|
||||
},
|
||||
],
|
||||
|
||||
// Import
|
||||
"import-x/no-unresolved": "error",
|
||||
"import-x/order": [
|
||||
"warn",
|
||||
{
|
||||
groups: [
|
||||
"builtin",
|
||||
"external",
|
||||
"internal",
|
||||
"parent",
|
||||
"sibling",
|
||||
"index",
|
||||
],
|
||||
"newlines-between": "always",
|
||||
alphabetize: {
|
||||
order: "asc",
|
||||
caseInsensitive: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
||||
// 其他常见
|
||||
"prefer-const": "warn",
|
||||
"no-case-declarations": "error",
|
||||
"no-fallthrough": "error",
|
||||
"no-empty": ["warn", { allowEmptyCatch: true }],
|
||||
|
||||
// Prettier 格式化问题
|
||||
"prettier/prettier": "warn",
|
||||
},
|
||||
},
|
||||
]);
|
||||
141
package.json
141
package.json
@@ -1,10 +1,13 @@
|
||||
{
|
||||
"name": "clash-verge",
|
||||
"version": "2.3.2",
|
||||
"version": "2.4.3",
|
||||
"license": "GPL-3.0-only",
|
||||
"scripts": {
|
||||
"dev": "cross-env RUST_BACKTRACE=1 tauri dev -f verge-dev",
|
||||
"dev:diff": "cross-env RUST_BACKTRACE=1 tauri dev -f verge-dev",
|
||||
"prepare": "husky || true",
|
||||
"dev": "cross-env RUST_BACKTRACE=full tauri dev -f verge-dev",
|
||||
"dev:diff": "cross-env RUST_BACKTRACE=full tauri dev -f verge-dev",
|
||||
"dev:trace": "cross-env RUST_BACKTRACE=full RUSTFLAGS=\"--cfg tokio_unstable\" tauri dev -f verge-dev tokio-trace",
|
||||
"dev:tauri": "cross-env RUST_BACKTRACE=full tauri dev -f tauri-dev",
|
||||
"build": "cross-env NODE_OPTIONS='--max-old-space-size=4096' tauri build",
|
||||
"build:fast": "cross-env NODE_OPTIONS='--max-old-space-size=4096' tauri build -- --profile fast-release",
|
||||
"tauri": "tauri",
|
||||
@@ -18,11 +21,17 @@
|
||||
"portable-fixed-webview2": "node scripts/portable-fixed-webview2.mjs",
|
||||
"fix-alpha-version": "node scripts/fix-alpha_version.mjs",
|
||||
"release-version": "node scripts/release-version.mjs",
|
||||
"release:autobuild": "pnpm release-version autobuild",
|
||||
"release:deploytest": "pnpm release-version deploytest",
|
||||
"publish-version": "node scripts/publish-version.mjs",
|
||||
"fmt": "cargo fmt --manifest-path ./src-tauri/Cargo.toml",
|
||||
"clippy": "cargo clippy --manifest-path ./src-tauri/Cargo.toml",
|
||||
"clippy": "cargo clippy --all-features --all-targets --manifest-path ./src-tauri/Cargo.toml",
|
||||
"lint": "eslint -c eslint.config.ts --max-warnings=0 --cache --cache-location .eslintcache src",
|
||||
"lint:fix": "eslint -c eslint.config.ts --max-warnings=0 --cache --cache-location .eslintcache --fix src",
|
||||
"format": "prettier --write .",
|
||||
"format:check": "prettier --check ."
|
||||
"format:check": "prettier --check .",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"test": "vitest run"
|
||||
},
|
||||
"dependencies": {
|
||||
"@dnd-kit/core": "^6.3.1",
|
||||
@@ -31,74 +40,96 @@
|
||||
"@emotion/react": "^11.14.0",
|
||||
"@emotion/styled": "^11.14.1",
|
||||
"@juggle/resize-observer": "^3.4.0",
|
||||
"@mui/icons-material": "^7.1.2",
|
||||
"@mui/lab": "7.0.0-beta.14",
|
||||
"@mui/material": "^7.1.2",
|
||||
"@mui/x-data-grid": "^8.6.0",
|
||||
"@tauri-apps/api": "2.6.0",
|
||||
"@tauri-apps/plugin-clipboard-manager": "^2.3.0",
|
||||
"@tauri-apps/plugin-dialog": "^2.3.0",
|
||||
"@tauri-apps/plugin-fs": "^2.4.0",
|
||||
"@tauri-apps/plugin-global-shortcut": "^2.3.0",
|
||||
"@tauri-apps/plugin-notification": "^2.3.0",
|
||||
"@tauri-apps/plugin-process": "^2.3.0",
|
||||
"@tauri-apps/plugin-shell": "2.3.0",
|
||||
"@mui/icons-material": "^7.3.4",
|
||||
"@mui/lab": "7.0.0-beta.17",
|
||||
"@mui/material": "^7.3.4",
|
||||
"@mui/x-data-grid": "^8.16.0",
|
||||
"@tauri-apps/api": "2.9.0",
|
||||
"@tauri-apps/plugin-clipboard-manager": "^2.3.2",
|
||||
"@tauri-apps/plugin-dialog": "^2.4.2",
|
||||
"@tauri-apps/plugin-fs": "^2.4.4",
|
||||
"@tauri-apps/plugin-http": "~2.5.4",
|
||||
"@tauri-apps/plugin-process": "^2.3.1",
|
||||
"@tauri-apps/plugin-shell": "2.3.3",
|
||||
"@tauri-apps/plugin-updater": "2.9.0",
|
||||
"@tauri-apps/plugin-window-state": "^2.3.0",
|
||||
"@types/json-schema": "^7.0.15",
|
||||
"ahooks": "^3.8.5",
|
||||
"axios": "^1.10.0",
|
||||
"chart.js": "^4.5.0",
|
||||
"cli-color": "^2.0.4",
|
||||
"dayjs": "1.11.13",
|
||||
"ahooks": "^3.9.6",
|
||||
"axios": "^1.13.1",
|
||||
"dayjs": "1.11.19",
|
||||
"foxact": "^0.2.49",
|
||||
"glob": "^11.0.3",
|
||||
"i18next": "^25.2.1",
|
||||
"i18next": "^25.6.0",
|
||||
"js-yaml": "^4.1.0",
|
||||
"json-schema": "^0.4.0",
|
||||
"lodash-es": "^4.17.21",
|
||||
"monaco-editor": "^0.52.2",
|
||||
"monaco-editor": "^0.54.0",
|
||||
"monaco-yaml": "^5.4.0",
|
||||
"nanoid": "^5.1.5",
|
||||
"react": "19.1.0",
|
||||
"react-chartjs-2": "^5.3.0",
|
||||
"react-dom": "19.1.0",
|
||||
"nanoid": "^5.1.6",
|
||||
"react": "19.2.0",
|
||||
"react-dom": "19.2.0",
|
||||
"react-error-boundary": "6.0.0",
|
||||
"react-hook-form": "^7.58.1",
|
||||
"react-i18next": "15.5.3",
|
||||
"react-hook-form": "^7.66.0",
|
||||
"react-i18next": "16.2.3",
|
||||
"react-markdown": "10.1.0",
|
||||
"react-monaco-editor": "0.58.0",
|
||||
"react-router-dom": "7.6.2",
|
||||
"react-virtuoso": "^4.13.0",
|
||||
"sockette": "^2.0.6",
|
||||
"swr": "^2.3.3",
|
||||
"tar": "^7.4.3",
|
||||
"types-pac": "^1.0.3",
|
||||
"zustand": "^5.0.6"
|
||||
"react-monaco-editor": "0.59.0",
|
||||
"react-router": "^7.9.5",
|
||||
"react-virtuoso": "^4.14.1",
|
||||
"swr": "^2.3.6",
|
||||
"tauri-plugin-mihomo-api": "git+https://github.com/clash-verge-rev/tauri-plugin-mihomo",
|
||||
"types-pac": "^1.0.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@actions/github": "^6.0.1",
|
||||
"@tauri-apps/cli": "2.6.1",
|
||||
"@eslint-react/eslint-plugin": "^2.2.4",
|
||||
"@eslint/js": "^9.39.0",
|
||||
"@tauri-apps/cli": "2.9.2",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/lodash-es": "^4.17.12",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@vitejs/plugin-legacy": "^7.0.0",
|
||||
"@vitejs/plugin-react": "4.6.0",
|
||||
"@types/node": "^24.9.2",
|
||||
"@types/react": "19.2.2",
|
||||
"@types/react-dom": "19.2.2",
|
||||
"@vitejs/plugin-legacy": "^7.2.1",
|
||||
"@vitejs/plugin-react-swc": "^4.2.0",
|
||||
"adm-zip": "^0.5.16",
|
||||
"commander": "^14.0.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"cli-color": "^2.0.4",
|
||||
"commander": "^14.0.2",
|
||||
"cross-env": "^10.1.0",
|
||||
"eslint": "^9.39.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-import-resolver-typescript": "^4.4.4",
|
||||
"eslint-plugin-import-x": "^4.16.1",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"eslint-plugin-react-hooks": "^7.0.1",
|
||||
"eslint-plugin-react-refresh": "^0.4.24",
|
||||
"eslint-plugin-unused-imports": "^4.3.0",
|
||||
"glob": "^11.0.3",
|
||||
"globals": "^16.5.0",
|
||||
"https-proxy-agent": "^7.0.6",
|
||||
"meta-json-schema": "^1.19.11",
|
||||
"husky": "^9.1.7",
|
||||
"jiti": "^2.6.1",
|
||||
"lint-staged": "^16.2.6",
|
||||
"meta-json-schema": "^1.19.14",
|
||||
"node-fetch": "^3.3.2",
|
||||
"prettier": "^3.6.2",
|
||||
"prettier-plugin-organize-imports": "^4.1.0",
|
||||
"sass": "^1.89.2",
|
||||
"terser": "^5.43.1",
|
||||
"typescript": "^5.8.3",
|
||||
"vite": "^7.0.0",
|
||||
"vite-plugin-monaco-editor": "^1.1.0",
|
||||
"vite-plugin-svgr": "^4.3.0"
|
||||
"sass": "^1.93.3",
|
||||
"tar": "^7.5.2",
|
||||
"terser": "^5.44.0",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.46.2",
|
||||
"vite": "^7.1.12",
|
||||
"vite-plugin-monaco-editor-esm": "^2.0.2",
|
||||
"vite-plugin-svgr": "^4.5.0",
|
||||
"vitest": "^4.0.6"
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{ts,tsx,js,jsx}": [
|
||||
"eslint --fix --max-warnings=0",
|
||||
"prettier --write",
|
||||
"git add"
|
||||
],
|
||||
"*.{css,scss,json,md}": [
|
||||
"prettier --write",
|
||||
"git add"
|
||||
]
|
||||
},
|
||||
"type": "module",
|
||||
"packageManager": "pnpm@9.13.2"
|
||||
|
||||
5606
pnpm-lock.yaml
generated
5606
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"extends": ["config:recommended"],
|
||||
"extends": ["config:recommended", ":disableDependencyDashboard"],
|
||||
"baseBranches": ["dev"],
|
||||
"enabledManagers": ["cargo", "npm"],
|
||||
"labels": ["dependencies"],
|
||||
@@ -35,8 +35,13 @@
|
||||
"description": "Group all npm dependencies into a single PR",
|
||||
"matchManagers": ["npm"],
|
||||
"groupName": "npm dependencies"
|
||||
},
|
||||
{
|
||||
"description": "Group all GitHub Actions updates into a single PR",
|
||||
"matchManagers": ["github-actions"],
|
||||
"groupName": "github actions"
|
||||
}
|
||||
],
|
||||
"postUpdateOptions": ["pnpmDedupe"],
|
||||
"ignoreDeps": ["serde_yaml"]
|
||||
"postUpdateOptions": ["pnpmDedupe", "updateCargoLock"],
|
||||
"ignoreDeps": ["criterion"]
|
||||
}
|
||||
|
||||
56
scripts-workflow/get_latest_tauri_commit.bash
Executable file
56
scripts-workflow/get_latest_tauri_commit.bash
Executable file
@@ -0,0 +1,56 @@
|
||||
#!/bin/bash
|
||||
|
||||
# 获取最近一个和 Tauri 相关的改动的 commit hash
|
||||
# This script finds the latest commit that modified Tauri-related files
|
||||
|
||||
# Tauri 相关文件的模式
|
||||
TAURI_PATTERNS=(
|
||||
"src-tauri/"
|
||||
"Cargo.toml"
|
||||
"Cargo.lock"
|
||||
"tauri.*.conf.json"
|
||||
"package.json"
|
||||
"pnpm-lock.yaml"
|
||||
"src/"
|
||||
)
|
||||
|
||||
# 排除的文件模式(build artifacts 等)
|
||||
EXCLUDE_PATTERNS=(
|
||||
"src-tauri/target/"
|
||||
"src-tauri/gen/"
|
||||
"*.log"
|
||||
"*.tmp"
|
||||
"node_modules/"
|
||||
".git/"
|
||||
)
|
||||
|
||||
# 构建 git log 的路径过滤参数
|
||||
PATHS=""
|
||||
for pattern in "${TAURI_PATTERNS[@]}"; do
|
||||
if [[ -e "$pattern" ]]; then
|
||||
PATHS="$PATHS $pattern"
|
||||
fi
|
||||
done
|
||||
|
||||
# 如果没有找到相关路径,返回错误
|
||||
if [[ -z "$PATHS" ]]; then
|
||||
echo "Error: No Tauri-related paths found in current directory" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 获取最新的 commit hash
|
||||
# 使用 git log 查找最近修改了 Tauri 相关文件的提交
|
||||
LATEST_COMMIT=$(git log --format="%H" -n 1 -- $PATHS)
|
||||
|
||||
# 验证是否找到了 commit
|
||||
if [[ -z "$LATEST_COMMIT" ]]; then
|
||||
echo "Error: No commits found for Tauri-related files" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# 输出结果
|
||||
echo "$LATEST_COMMIT"
|
||||
|
||||
# 如果需要更多信息,可以取消注释以下行
|
||||
# echo "Latest Tauri-related commit: $LATEST_COMMIT"
|
||||
# git show --stat --oneline "$LATEST_COMMIT"
|
||||
@@ -1,18 +1,30 @@
|
||||
import AdmZip from "adm-zip";
|
||||
import { execSync } from "child_process";
|
||||
import { createHash } from "crypto";
|
||||
import fs from "fs";
|
||||
import fsp from "fs/promises";
|
||||
import zlib from "zlib";
|
||||
import { extract } from "tar";
|
||||
import path from "path";
|
||||
import AdmZip from "adm-zip";
|
||||
import fetch from "node-fetch";
|
||||
import { HttpsProxyAgent } from "https-proxy-agent";
|
||||
import { execSync } from "child_process";
|
||||
import { log_info, log_debug, log_error, log_success } from "./utils.mjs";
|
||||
import { glob } from "glob";
|
||||
import { HttpsProxyAgent } from "https-proxy-agent";
|
||||
import fetch from "node-fetch";
|
||||
import path from "path";
|
||||
import { extract } from "tar";
|
||||
import zlib from "zlib";
|
||||
import { log_debug, log_error, log_info, log_success } from "./utils.mjs";
|
||||
|
||||
/**
|
||||
* Prebuild script with optimization features:
|
||||
* 1. Skip downloading mihomo core if it already exists (unless --force is used)
|
||||
* 2. Cache version information for 1 hour to avoid repeated version checks
|
||||
* 3. Use file hash to detect changes and skip unnecessary chmod/copy operations
|
||||
* 4. Use --force or -f flag to force re-download and update all resources
|
||||
*
|
||||
*/
|
||||
|
||||
const cwd = process.cwd();
|
||||
const TEMP_DIR = path.join(cwd, "node_modules/.verge");
|
||||
const FORCE = process.argv.includes("--force");
|
||||
const FORCE = process.argv.includes("--force") || process.argv.includes("-f");
|
||||
const VERSION_CACHE_FILE = path.join(TEMP_DIR, ".version_cache.json");
|
||||
const HASH_CACHE_FILE = path.join(TEMP_DIR, ".hash_cache.json");
|
||||
|
||||
const PLATFORM_MAP = {
|
||||
"x86_64-pc-windows-msvc": "win32",
|
||||
@@ -43,7 +55,7 @@ const ARCH_MAP = {
|
||||
|
||||
const arg1 = process.argv.slice(2)[0];
|
||||
const arg2 = process.argv.slice(2)[1];
|
||||
const target = arg1 === "--force" ? arg2 : arg1;
|
||||
let target = arg1 === "--force" || arg1 === "-f" ? arg2 : arg1;
|
||||
const { platform, arch } = target
|
||||
? { platform: PLATFORM_MAP[target], arch: ARCH_MAP[target] }
|
||||
: process;
|
||||
@@ -54,66 +66,120 @@ const SIDECAR_HOST = target
|
||||
.toString()
|
||||
.match(/(?<=host: ).+(?=\s*)/g)[0];
|
||||
|
||||
/* ======= clash meta alpha======= */
|
||||
// =======================
|
||||
// Version Cache
|
||||
// =======================
|
||||
async function loadVersionCache() {
|
||||
try {
|
||||
if (fs.existsSync(VERSION_CACHE_FILE)) {
|
||||
const data = await fsp.readFile(VERSION_CACHE_FILE, "utf-8");
|
||||
return JSON.parse(data);
|
||||
}
|
||||
} catch (err) {
|
||||
log_debug("Failed to load version cache:", err.message);
|
||||
}
|
||||
return {};
|
||||
}
|
||||
async function saveVersionCache(cache) {
|
||||
try {
|
||||
await fsp.mkdir(TEMP_DIR, { recursive: true });
|
||||
await fsp.writeFile(VERSION_CACHE_FILE, JSON.stringify(cache, null, 2));
|
||||
log_debug("Version cache saved");
|
||||
} catch (err) {
|
||||
log_debug("Failed to save version cache:", err.message);
|
||||
}
|
||||
}
|
||||
async function getCachedVersion(key) {
|
||||
const cache = await loadVersionCache();
|
||||
const cached = cache[key];
|
||||
if (cached && Date.now() - cached.timestamp < 3600000) {
|
||||
log_info(`Using cached version for ${key}: ${cached.version}`);
|
||||
return cached.version;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
async function setCachedVersion(key, version) {
|
||||
const cache = await loadVersionCache();
|
||||
cache[key] = { version, timestamp: Date.now() };
|
||||
await saveVersionCache(cache);
|
||||
}
|
||||
|
||||
// =======================
|
||||
// Hash Cache & File Hash
|
||||
// =======================
|
||||
async function calculateFileHash(filePath) {
|
||||
try {
|
||||
const fileBuffer = await fsp.readFile(filePath);
|
||||
const hashSum = createHash("sha256");
|
||||
hashSum.update(fileBuffer);
|
||||
return hashSum.digest("hex");
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
async function loadHashCache() {
|
||||
try {
|
||||
if (fs.existsSync(HASH_CACHE_FILE)) {
|
||||
const data = await fsp.readFile(HASH_CACHE_FILE, "utf-8");
|
||||
return JSON.parse(data);
|
||||
}
|
||||
} catch (err) {
|
||||
log_debug("Failed to load hash cache:", err.message);
|
||||
}
|
||||
return {};
|
||||
}
|
||||
async function saveHashCache(cache) {
|
||||
try {
|
||||
await fsp.mkdir(TEMP_DIR, { recursive: true });
|
||||
await fsp.writeFile(HASH_CACHE_FILE, JSON.stringify(cache, null, 2));
|
||||
log_debug("Hash cache saved");
|
||||
} catch (err) {
|
||||
log_debug("Failed to save hash cache:", err.message);
|
||||
}
|
||||
}
|
||||
async function hasFileChanged(filePath, targetPath) {
|
||||
if (FORCE) return true;
|
||||
if (!fs.existsSync(targetPath)) return true;
|
||||
const hashCache = await loadHashCache();
|
||||
const sourceHash = await calculateFileHash(filePath);
|
||||
const targetHash = await calculateFileHash(targetPath);
|
||||
if (!sourceHash || !targetHash) return true;
|
||||
const cacheKey = targetPath;
|
||||
const cachedHash = hashCache[cacheKey];
|
||||
if (cachedHash === sourceHash && sourceHash === targetHash) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
async function updateHashCache(targetPath) {
|
||||
const hashCache = await loadHashCache();
|
||||
const hash = await calculateFileHash(targetPath);
|
||||
if (hash) {
|
||||
hashCache[targetPath] = hash;
|
||||
await saveHashCache(hashCache);
|
||||
}
|
||||
}
|
||||
|
||||
// =======================
|
||||
// Meta maps (stable & alpha)
|
||||
// =======================
|
||||
const META_ALPHA_VERSION_URL =
|
||||
"https://github.com/MetaCubeX/mihomo/releases/download/Prerelease-Alpha/version.txt";
|
||||
const META_ALPHA_URL_PREFIX = `https://github.com/MetaCubeX/mihomo/releases/download/Prerelease-Alpha`;
|
||||
let META_ALPHA_VERSION;
|
||||
|
||||
const META_ALPHA_MAP = {
|
||||
"win32-x64": "mihomo-windows-amd64-compatible",
|
||||
"win32-ia32": "mihomo-windows-386",
|
||||
"win32-arm64": "mihomo-windows-arm64",
|
||||
"darwin-x64": "mihomo-darwin-amd64-compatible",
|
||||
"darwin-arm64": "mihomo-darwin-arm64",
|
||||
"linux-x64": "mihomo-linux-amd64-compatible",
|
||||
"linux-ia32": "mihomo-linux-386",
|
||||
"linux-arm64": "mihomo-linux-arm64",
|
||||
"linux-arm": "mihomo-linux-armv7",
|
||||
"linux-riscv64": "mihomo-linux-riscv64",
|
||||
"linux-loong64": "mihomo-linux-loong64",
|
||||
};
|
||||
|
||||
// Fetch the latest alpha release version from the version.txt file
|
||||
async function getLatestAlphaVersion() {
|
||||
const options = {};
|
||||
|
||||
const httpProxy =
|
||||
process.env.HTTP_PROXY ||
|
||||
process.env.http_proxy ||
|
||||
process.env.HTTPS_PROXY ||
|
||||
process.env.https_proxy;
|
||||
|
||||
if (httpProxy) {
|
||||
options.agent = new HttpsProxyAgent(httpProxy);
|
||||
}
|
||||
try {
|
||||
const response = await fetch(META_ALPHA_VERSION_URL, {
|
||||
...options,
|
||||
method: "GET",
|
||||
});
|
||||
let v = await response.text();
|
||||
META_ALPHA_VERSION = v.trim(); // Trim to remove extra whitespaces
|
||||
log_info(`Latest alpha version: ${META_ALPHA_VERSION}`);
|
||||
} catch (error) {
|
||||
log_error("Error fetching latest alpha version:", error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/* ======= clash meta stable ======= */
|
||||
const META_VERSION_URL =
|
||||
"https://github.com/MetaCubeX/mihomo/releases/latest/download/version.txt";
|
||||
const META_URL_PREFIX = `https://github.com/MetaCubeX/mihomo/releases/download`;
|
||||
let META_VERSION;
|
||||
|
||||
const META_MAP = {
|
||||
"win32-x64": "mihomo-windows-amd64-compatible",
|
||||
const META_ALPHA_MAP = {
|
||||
"win32-x64": "mihomo-windows-amd64-v2",
|
||||
"win32-ia32": "mihomo-windows-386",
|
||||
"win32-arm64": "mihomo-windows-arm64",
|
||||
"darwin-x64": "mihomo-darwin-amd64-compatible",
|
||||
"darwin-arm64": "mihomo-darwin-arm64",
|
||||
"linux-x64": "mihomo-linux-amd64-compatible",
|
||||
"darwin-x64": "mihomo-darwin-amd64-v1-go122",
|
||||
"darwin-arm64": "mihomo-darwin-arm64-go122",
|
||||
"linux-x64": "mihomo-linux-amd64-v2",
|
||||
"linux-ia32": "mihomo-linux-386",
|
||||
"linux-arm64": "mihomo-linux-arm64",
|
||||
"linux-arm": "mihomo-linux-armv7",
|
||||
@@ -121,65 +187,116 @@ const META_MAP = {
|
||||
"linux-loong64": "mihomo-linux-loong64",
|
||||
};
|
||||
|
||||
// Fetch the latest release version from the version.txt file
|
||||
async function getLatestReleaseVersion() {
|
||||
const options = {};
|
||||
const META_MAP = {
|
||||
"win32-x64": "mihomo-windows-amd64-v2",
|
||||
"win32-ia32": "mihomo-windows-386",
|
||||
"win32-arm64": "mihomo-windows-arm64",
|
||||
"darwin-x64": "mihomo-darwin-amd64-v2-go122",
|
||||
"darwin-arm64": "mihomo-darwin-arm64-go122",
|
||||
"linux-x64": "mihomo-linux-amd64-v2",
|
||||
"linux-ia32": "mihomo-linux-386",
|
||||
"linux-arm64": "mihomo-linux-arm64",
|
||||
"linux-arm": "mihomo-linux-armv7",
|
||||
"linux-riscv64": "mihomo-linux-riscv64",
|
||||
"linux-loong64": "mihomo-linux-loong64",
|
||||
};
|
||||
|
||||
// =======================
|
||||
// Fetch latest versions
|
||||
// =======================
|
||||
async function getLatestAlphaVersion() {
|
||||
if (!FORCE) {
|
||||
const cached = await getCachedVersion("META_ALPHA_VERSION");
|
||||
if (cached) {
|
||||
META_ALPHA_VERSION = cached;
|
||||
return;
|
||||
}
|
||||
}
|
||||
const options = {};
|
||||
const httpProxy =
|
||||
process.env.HTTP_PROXY ||
|
||||
process.env.http_proxy ||
|
||||
process.env.HTTPS_PROXY ||
|
||||
process.env.https_proxy;
|
||||
if (httpProxy) options.agent = new HttpsProxyAgent(httpProxy);
|
||||
|
||||
if (httpProxy) {
|
||||
options.agent = new HttpsProxyAgent(httpProxy);
|
||||
try {
|
||||
const response = await fetch(META_ALPHA_VERSION_URL, {
|
||||
...options,
|
||||
method: "GET",
|
||||
});
|
||||
if (!response.ok)
|
||||
throw new Error(
|
||||
`Failed to fetch ${META_ALPHA_VERSION_URL}: ${response.status}`,
|
||||
);
|
||||
META_ALPHA_VERSION = (await response.text()).trim();
|
||||
log_info(`Latest alpha version: ${META_ALPHA_VERSION}`);
|
||||
await setCachedVersion("META_ALPHA_VERSION", META_ALPHA_VERSION);
|
||||
} catch (err) {
|
||||
log_error("Error fetching latest alpha version:", err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async function getLatestReleaseVersion() {
|
||||
if (!FORCE) {
|
||||
const cached = await getCachedVersion("META_VERSION");
|
||||
if (cached) {
|
||||
META_VERSION = cached;
|
||||
return;
|
||||
}
|
||||
}
|
||||
const options = {};
|
||||
const httpProxy =
|
||||
process.env.HTTP_PROXY ||
|
||||
process.env.http_proxy ||
|
||||
process.env.HTTPS_PROXY ||
|
||||
process.env.https_proxy;
|
||||
if (httpProxy) options.agent = new HttpsProxyAgent(httpProxy);
|
||||
|
||||
try {
|
||||
const response = await fetch(META_VERSION_URL, {
|
||||
...options,
|
||||
method: "GET",
|
||||
});
|
||||
let v = await response.text();
|
||||
META_VERSION = v.trim(); // Trim to remove extra whitespaces
|
||||
if (!response.ok)
|
||||
throw new Error(
|
||||
`Failed to fetch ${META_VERSION_URL}: ${response.status}`,
|
||||
);
|
||||
META_VERSION = (await response.text()).trim();
|
||||
log_info(`Latest release version: ${META_VERSION}`);
|
||||
} catch (error) {
|
||||
log_error("Error fetching latest release version:", error.message);
|
||||
await setCachedVersion("META_VERSION", META_VERSION);
|
||||
} catch (err) {
|
||||
log_error("Error fetching latest release version:", err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* check available
|
||||
*/
|
||||
// =======================
|
||||
// Validate availability
|
||||
// =======================
|
||||
if (!META_MAP[`${platform}-${arch}`]) {
|
||||
throw new Error(
|
||||
`clash meta alpha unsupported platform "${platform}-${arch}"`,
|
||||
);
|
||||
throw new Error(`clash meta unsupported platform "${platform}-${arch}"`);
|
||||
}
|
||||
|
||||
if (!META_ALPHA_MAP[`${platform}-${arch}`]) {
|
||||
throw new Error(
|
||||
`clash meta alpha unsupported platform "${platform}-${arch}"`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* core info
|
||||
*/
|
||||
// =======================
|
||||
// Build meta objects
|
||||
// =======================
|
||||
function clashMetaAlpha() {
|
||||
const name = META_ALPHA_MAP[`${platform}-${arch}`];
|
||||
const isWin = platform === "win32";
|
||||
const urlExt = isWin ? "zip" : "gz";
|
||||
const downloadURL = `${META_ALPHA_URL_PREFIX}/${name}-${META_ALPHA_VERSION}.${urlExt}`;
|
||||
const exeFile = `${name}${isWin ? ".exe" : ""}`;
|
||||
const zipFile = `${name}-${META_ALPHA_VERSION}.${urlExt}`;
|
||||
|
||||
return {
|
||||
name: "verge-mihomo-alpha",
|
||||
targetFile: `verge-mihomo-alpha-${SIDECAR_HOST}${isWin ? ".exe" : ""}`,
|
||||
exeFile,
|
||||
zipFile,
|
||||
downloadURL,
|
||||
exeFile: `${name}${isWin ? ".exe" : ""}`,
|
||||
zipFile: `${name}-${META_ALPHA_VERSION}.${urlExt}`,
|
||||
downloadURL: `${META_ALPHA_URL_PREFIX}/${name}-${META_ALPHA_VERSION}.${urlExt}`,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -187,35 +304,83 @@ function clashMeta() {
|
||||
const name = META_MAP[`${platform}-${arch}`];
|
||||
const isWin = platform === "win32";
|
||||
const urlExt = isWin ? "zip" : "gz";
|
||||
const downloadURL = `${META_URL_PREFIX}/${META_VERSION}/${name}-${META_VERSION}.${urlExt}`;
|
||||
const exeFile = `${name}${isWin ? ".exe" : ""}`;
|
||||
const zipFile = `${name}-${META_VERSION}.${urlExt}`;
|
||||
|
||||
return {
|
||||
name: "verge-mihomo",
|
||||
targetFile: `verge-mihomo-${SIDECAR_HOST}${isWin ? ".exe" : ""}`,
|
||||
exeFile,
|
||||
zipFile,
|
||||
downloadURL,
|
||||
exeFile: `${name}${isWin ? ".exe" : ""}`,
|
||||
zipFile: `${name}-${META_VERSION}.${urlExt}`,
|
||||
downloadURL: `${META_URL_PREFIX}/${META_VERSION}/${name}-${META_VERSION}.${urlExt}`,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* download sidecar and rename
|
||||
*/
|
||||
|
||||
// =======================
|
||||
// download helper (增强:status + magic bytes)
|
||||
// =======================
|
||||
async function downloadFile(url, outPath) {
|
||||
const options = {};
|
||||
const httpProxy =
|
||||
process.env.HTTP_PROXY ||
|
||||
process.env.http_proxy ||
|
||||
process.env.HTTPS_PROXY ||
|
||||
process.env.https_proxy;
|
||||
if (httpProxy) options.agent = new HttpsProxyAgent(httpProxy);
|
||||
|
||||
const response = await fetch(url, {
|
||||
...options,
|
||||
method: "GET",
|
||||
headers: { "Content-Type": "application/octet-stream" },
|
||||
});
|
||||
if (!response.ok) {
|
||||
const body = await response.text().catch(() => "");
|
||||
// 将 body 写到文件以便排查(可通过临时目录查看)
|
||||
await fsp.mkdir(path.dirname(outPath), { recursive: true });
|
||||
await fsp.writeFile(outPath, body);
|
||||
throw new Error(`Failed to download ${url}: status ${response.status}`);
|
||||
}
|
||||
|
||||
const buf = Buffer.from(await response.arrayBuffer());
|
||||
await fsp.mkdir(path.dirname(outPath), { recursive: true });
|
||||
|
||||
// 简单 magic 字节检查
|
||||
if (url.endsWith(".gz") || url.endsWith(".tgz")) {
|
||||
if (!(buf[0] === 0x1f && buf[1] === 0x8b)) {
|
||||
await fsp.writeFile(outPath, buf);
|
||||
throw new Error(
|
||||
`Downloaded file for ${url} is not a valid gzip (magic mismatch).`,
|
||||
);
|
||||
}
|
||||
} else if (url.endsWith(".zip")) {
|
||||
if (!(buf[0] === 0x50 && buf[1] === 0x4b)) {
|
||||
await fsp.writeFile(outPath, buf);
|
||||
throw new Error(
|
||||
`Downloaded file for ${url} is not a valid zip (magic mismatch).`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await fsp.writeFile(outPath, buf);
|
||||
log_success(`download finished: ${url}`);
|
||||
}
|
||||
|
||||
// =======================
|
||||
// resolveSidecar (支持 zip / tgz / gz)
|
||||
// =======================
|
||||
async function resolveSidecar(binInfo) {
|
||||
const { name, targetFile, zipFile, exeFile, downloadURL } = binInfo;
|
||||
|
||||
const sidecarDir = path.join(cwd, "src-tauri", "sidecar");
|
||||
const sidecarPath = path.join(sidecarDir, targetFile);
|
||||
|
||||
await fsp.mkdir(sidecarDir, { recursive: true });
|
||||
if (!FORCE && fs.existsSync(sidecarPath)) return;
|
||||
|
||||
if (!FORCE && fs.existsSync(sidecarPath)) {
|
||||
log_success(`"${name}" already exists, skipping download`);
|
||||
return;
|
||||
}
|
||||
|
||||
const tempDir = path.join(TEMP_DIR, name);
|
||||
const tempZip = path.join(tempDir, zipFile);
|
||||
const tempExe = path.join(tempDir, exeFile);
|
||||
|
||||
await fsp.mkdir(tempDir, { recursive: true });
|
||||
|
||||
try {
|
||||
if (!fs.existsSync(tempZip)) {
|
||||
await downloadFile(downloadURL, tempZip);
|
||||
@@ -224,140 +389,118 @@ async function resolveSidecar(binInfo) {
|
||||
if (zipFile.endsWith(".zip")) {
|
||||
const zip = new AdmZip(tempZip);
|
||||
zip.getEntries().forEach((entry) => {
|
||||
log_debug(`"${name}" entry name`, entry.entryName);
|
||||
log_debug(`"${name}" entry: ${entry.entryName}`);
|
||||
});
|
||||
zip.extractAllTo(tempDir, true);
|
||||
// 尝试按 exeFile 重命名,否则找第一个可执行文件
|
||||
if (fs.existsSync(tempExe)) {
|
||||
await fsp.rename(tempExe, sidecarPath);
|
||||
} else {
|
||||
// 搜索候选
|
||||
const files = await fsp.readdir(tempDir);
|
||||
const candidate = files.find(
|
||||
(f) =>
|
||||
f === path.basename(exeFile) ||
|
||||
f.endsWith(".exe") ||
|
||||
!f.includes("."),
|
||||
);
|
||||
if (!candidate)
|
||||
throw new Error(`Expected binary not found in ${tempDir}`);
|
||||
await fsp.rename(path.join(tempDir, candidate), sidecarPath);
|
||||
}
|
||||
if (platform !== "win32") execSync(`chmod 755 ${sidecarPath}`);
|
||||
log_success(`unzip finished: "${name}"`);
|
||||
} else if (zipFile.endsWith(".tgz")) {
|
||||
// tgz
|
||||
await fsp.mkdir(tempDir, { recursive: true });
|
||||
await extract({
|
||||
cwd: tempDir,
|
||||
file: tempZip,
|
||||
//strip: 1, // 可能需要根据实际的 .tgz 文件结构调整
|
||||
});
|
||||
await extract({ cwd: tempDir, file: tempZip });
|
||||
const files = await fsp.readdir(tempDir);
|
||||
log_debug(`"${name}" files in tempDir:`, files);
|
||||
const extractedFile = files.find((file) => file.startsWith("虚空终端-"));
|
||||
if (extractedFile) {
|
||||
const extractedFilePath = path.join(tempDir, extractedFile);
|
||||
await fsp.rename(extractedFilePath, sidecarPath);
|
||||
log_success(`"${name}" file renamed to "${sidecarPath}"`);
|
||||
log_debug(`"${name}" extracted files:`, files);
|
||||
// 优先寻找给定 exeFile 或已知前缀
|
||||
let extracted = files.find(
|
||||
(f) =>
|
||||
f === path.basename(exeFile) ||
|
||||
f.startsWith("虚空终端-") ||
|
||||
!f.includes("."),
|
||||
);
|
||||
if (!extracted) extracted = files[0];
|
||||
if (!extracted) throw new Error(`Expected file not found in ${tempDir}`);
|
||||
await fsp.rename(path.join(tempDir, extracted), sidecarPath);
|
||||
execSync(`chmod 755 ${sidecarPath}`);
|
||||
log_success(`chmod binary finished: "${name}"`);
|
||||
log_success(`tgz processed: "${name}"`);
|
||||
} else {
|
||||
throw new Error(`Expected file not found in ${tempDir}`);
|
||||
}
|
||||
} else {
|
||||
// gz
|
||||
// .gz
|
||||
const readStream = fs.createReadStream(tempZip);
|
||||
const writeStream = fs.createWriteStream(sidecarPath);
|
||||
await new Promise((resolve, reject) => {
|
||||
const onError = (error) => {
|
||||
log_error(`"${name}" gz failed:`, error.message);
|
||||
reject(error);
|
||||
};
|
||||
readStream
|
||||
.pipe(zlib.createGunzip().on("error", onError))
|
||||
.pipe(zlib.createGunzip())
|
||||
.on("error", (e) => {
|
||||
log_error(`gunzip error for ${name}:`, e.message);
|
||||
reject(e);
|
||||
})
|
||||
.pipe(writeStream)
|
||||
.on("finish", () => {
|
||||
execSync(`chmod 755 ${sidecarPath}`);
|
||||
log_success(`chmod binary finished: "${name}"`);
|
||||
if (platform !== "win32") execSync(`chmod 755 ${sidecarPath}`);
|
||||
resolve();
|
||||
})
|
||||
.on("error", onError);
|
||||
.on("error", (e) => {
|
||||
log_error(`write stream error for ${name}:`, e.message);
|
||||
reject(e);
|
||||
});
|
||||
});
|
||||
log_success(`gz binary processed: "${name}"`);
|
||||
}
|
||||
} catch (err) {
|
||||
// 需要删除文件
|
||||
await fsp.rm(sidecarPath, { recursive: true, force: true });
|
||||
throw err;
|
||||
} finally {
|
||||
// delete temp dir
|
||||
await fsp.rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
}
|
||||
|
||||
const resolveSetDnsScript = () =>
|
||||
resolveResource({
|
||||
file: "set_dns.sh",
|
||||
localPath: path.join(cwd, "scripts/set_dns.sh"),
|
||||
});
|
||||
const resolveUnSetDnsScript = () =>
|
||||
resolveResource({
|
||||
file: "unset_dns.sh",
|
||||
localPath: path.join(cwd, "scripts/unset_dns.sh"),
|
||||
});
|
||||
|
||||
/**
|
||||
* download the file to the resources dir
|
||||
*/
|
||||
async function resolveResource(binInfo) {
|
||||
const { file, downloadURL, localPath } = binInfo;
|
||||
|
||||
const resDir = path.join(cwd, "src-tauri/resources");
|
||||
const targetPath = path.join(resDir, file);
|
||||
|
||||
if (!FORCE && fs.existsSync(targetPath)) return;
|
||||
if (!FORCE && fs.existsSync(targetPath) && !downloadURL && !localPath) {
|
||||
log_success(`"${file}" already exists, skipping`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (downloadURL) {
|
||||
if (!FORCE && fs.existsSync(targetPath)) {
|
||||
log_success(`"${file}" already exists, skipping download`);
|
||||
return;
|
||||
}
|
||||
await fsp.mkdir(resDir, { recursive: true });
|
||||
await downloadFile(downloadURL, targetPath);
|
||||
await updateHashCache(targetPath);
|
||||
}
|
||||
|
||||
if (localPath) {
|
||||
await fs.copyFile(localPath, targetPath, (err) => {
|
||||
if (err) {
|
||||
console.error("Error copying file:", err);
|
||||
} else {
|
||||
console.log("File was copied successfully");
|
||||
if (!(await hasFileChanged(localPath, targetPath))) {
|
||||
return;
|
||||
}
|
||||
});
|
||||
log_debug(`copy file finished: "${localPath}"`);
|
||||
await fsp.mkdir(resDir, { recursive: true });
|
||||
await fsp.copyFile(localPath, targetPath);
|
||||
await updateHashCache(targetPath);
|
||||
log_success(`Copied file: ${file}`);
|
||||
}
|
||||
|
||||
log_success(`${file} finished`);
|
||||
}
|
||||
|
||||
/**
|
||||
* download file and save to `path`
|
||||
*/ async function downloadFile(url, path) {
|
||||
const options = {};
|
||||
|
||||
const httpProxy =
|
||||
process.env.HTTP_PROXY ||
|
||||
process.env.http_proxy ||
|
||||
process.env.HTTPS_PROXY ||
|
||||
process.env.https_proxy;
|
||||
|
||||
if (httpProxy) {
|
||||
options.agent = new HttpsProxyAgent(httpProxy);
|
||||
}
|
||||
|
||||
const response = await fetch(url, {
|
||||
...options,
|
||||
method: "GET",
|
||||
headers: { "Content-Type": "application/octet-stream" },
|
||||
});
|
||||
const buffer = await response.arrayBuffer();
|
||||
await fsp.writeFile(path, new Uint8Array(buffer));
|
||||
|
||||
log_success(`download finished: ${url}`);
|
||||
}
|
||||
|
||||
// SimpleSC.dll
|
||||
// SimpleSC.dll (win plugin)
|
||||
const resolvePlugin = async () => {
|
||||
const url =
|
||||
"https://nsis.sourceforge.io/mediawiki/images/e/ef/NSIS_Simple_Service_Plugin_Unicode_1.30.zip";
|
||||
|
||||
const tempDir = path.join(TEMP_DIR, "SimpleSC");
|
||||
const tempZip = path.join(
|
||||
tempDir,
|
||||
"NSIS_Simple_Service_Plugin_Unicode_1.30.zip",
|
||||
);
|
||||
const tempDll = path.join(tempDir, "SimpleSC.dll");
|
||||
const pluginDir = path.join(process.env.APPDATA, "Local/NSIS");
|
||||
const pluginDir = path.join(process.env.APPDATA || "", "Local/NSIS");
|
||||
const pluginPath = path.join(pluginDir, "SimpleSC.dll");
|
||||
await fsp.mkdir(pluginDir, { recursive: true });
|
||||
await fsp.mkdir(tempDir, { recursive: true });
|
||||
@@ -367,95 +510,118 @@ const resolvePlugin = async () => {
|
||||
await downloadFile(url, tempZip);
|
||||
}
|
||||
const zip = new AdmZip(tempZip);
|
||||
zip.getEntries().forEach((entry) => {
|
||||
log_debug(`"SimpleSC" entry name`, entry.entryName);
|
||||
});
|
||||
zip
|
||||
.getEntries()
|
||||
.forEach((entry) => log_debug(`"SimpleSC" entry`, entry.entryName));
|
||||
zip.extractAllTo(tempDir, true);
|
||||
if (fs.existsSync(tempDll)) {
|
||||
await fsp.cp(tempDll, pluginPath, { recursive: true, force: true });
|
||||
log_success(`unzip finished: "SimpleSC"`);
|
||||
} else {
|
||||
// 如果 dll 名称不同,尝试找到 dll
|
||||
const files = await fsp.readdir(tempDir);
|
||||
const dll = files.find((f) => f.toLowerCase().endsWith(".dll"));
|
||||
if (dll) {
|
||||
await fsp.cp(path.join(tempDir, dll), pluginPath, {
|
||||
recursive: true,
|
||||
force: true,
|
||||
});
|
||||
log_success(`unzip finished: "SimpleSC" (found ${dll})`);
|
||||
} else {
|
||||
throw new Error("SimpleSC.dll not found in zip");
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
await fsp.rm(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
};
|
||||
|
||||
// service chmod
|
||||
// service chmod (保留并使用 glob)
|
||||
const resolveServicePermission = async () => {
|
||||
const serviceExecutables = [
|
||||
"clash-verge-service*",
|
||||
"install-service*",
|
||||
"uninstall-service*",
|
||||
"clash-verge-service-install*",
|
||||
"clash-verge-service-uninstall*",
|
||||
];
|
||||
const resDir = path.join(cwd, "src-tauri/resources");
|
||||
const hashCache = await loadHashCache();
|
||||
let hasChanges = false;
|
||||
|
||||
for (let f of serviceExecutables) {
|
||||
// 使用glob模块来处理通配符
|
||||
const files = glob.sync(path.join(resDir, f));
|
||||
for (let filePath of files) {
|
||||
if (fs.existsSync(filePath)) {
|
||||
const currentHash = await calculateFileHash(filePath);
|
||||
const cacheKey = `${filePath}_chmod`;
|
||||
if (!FORCE && hashCache[cacheKey] === currentHash) {
|
||||
continue;
|
||||
}
|
||||
try {
|
||||
execSync(`chmod 755 ${filePath}`);
|
||||
log_success(`chmod finished: "${filePath}"`);
|
||||
} catch (e) {
|
||||
log_error(`chmod failed for ${filePath}:`, e.message);
|
||||
}
|
||||
hashCache[cacheKey] = currentHash;
|
||||
hasChanges = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (hasChanges) {
|
||||
await saveHashCache(hashCache);
|
||||
}
|
||||
};
|
||||
|
||||
// 在 resolveResource 函数后添加新函数
|
||||
// resolve locales (从 src/locales 复制到 resources/locales,并使用 hash 检查)
|
||||
async function resolveLocales() {
|
||||
const srcLocalesDir = path.join(cwd, "src/locales");
|
||||
const targetLocalesDir = path.join(cwd, "src-tauri/resources/locales");
|
||||
|
||||
try {
|
||||
// 确保目标目录存在
|
||||
await fsp.mkdir(targetLocalesDir, { recursive: true });
|
||||
|
||||
// 读取所有语言文件
|
||||
const files = await fsp.readdir(srcLocalesDir);
|
||||
|
||||
// 复制每个文件
|
||||
for (const file of files) {
|
||||
const srcPath = path.join(srcLocalesDir, file);
|
||||
const targetPath = path.join(targetLocalesDir, file);
|
||||
|
||||
if (!(await hasFileChanged(srcPath, targetPath))) continue;
|
||||
await fsp.copyFile(srcPath, targetPath);
|
||||
await updateHashCache(targetPath);
|
||||
log_success(`Copied locale file: ${file}`);
|
||||
}
|
||||
|
||||
log_success("All locale files copied successfully");
|
||||
log_success("All locale files processed successfully");
|
||||
} catch (err) {
|
||||
log_error("Error copying locale files:", err.message);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* main
|
||||
*/
|
||||
const SERVICE_URL = `https://github.com/clash-verge-rev/clash-verge-service/releases/download/${SIDECAR_HOST}`;
|
||||
|
||||
// =======================
|
||||
// Other resource resolvers (service, mmdb, geosite, geoip, enableLoopback, sysproxy)
|
||||
// =======================
|
||||
const SERVICE_URL = `https://github.com/clash-verge-rev/clash-verge-service-ipc/releases/download/${SIDECAR_HOST}`;
|
||||
const resolveService = () => {
|
||||
let ext = platform === "win32" ? ".exe" : "";
|
||||
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
|
||||
resolveResource({
|
||||
return resolveResource({
|
||||
file: "clash-verge-service" + suffix + ext,
|
||||
downloadURL: `${SERVICE_URL}/clash-verge-service${ext}`,
|
||||
});
|
||||
};
|
||||
|
||||
const resolveInstall = () => {
|
||||
let ext = platform === "win32" ? ".exe" : "";
|
||||
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
|
||||
resolveResource({
|
||||
file: "install-service" + suffix + ext,
|
||||
downloadURL: `${SERVICE_URL}/install-service${ext}`,
|
||||
return resolveResource({
|
||||
file: "clash-verge-service-install" + suffix + ext,
|
||||
downloadURL: `${SERVICE_URL}/clash-verge-service-install${ext}`,
|
||||
});
|
||||
};
|
||||
|
||||
const resolveUninstall = () => {
|
||||
let ext = platform === "win32" ? ".exe" : "";
|
||||
let suffix = platform === "linux" ? "-" + SIDECAR_HOST : "";
|
||||
|
||||
resolveResource({
|
||||
file: "uninstall-service" + suffix + ext,
|
||||
downloadURL: `${SERVICE_URL}/uninstall-service${ext}`,
|
||||
return resolveResource({
|
||||
file: "clash-verge-service-uninstall" + suffix + ext,
|
||||
downloadURL: `${SERVICE_URL}/clash-verge-service-uninstall${ext}`,
|
||||
});
|
||||
};
|
||||
|
||||
@@ -479,15 +645,27 @@ const resolveEnableLoopback = () =>
|
||||
file: "enableLoopback.exe",
|
||||
downloadURL: `https://github.com/Kuingsmile/uwp-tool/releases/download/latest/enableLoopback.exe`,
|
||||
});
|
||||
|
||||
const resolveWinSysproxy = () =>
|
||||
resolveResource({
|
||||
file: "sysproxy.exe",
|
||||
downloadURL: `https://github.com/clash-verge-rev/sysproxy/releases/download/${arch}/sysproxy.exe`,
|
||||
});
|
||||
|
||||
const resolveSetDnsScript = () =>
|
||||
resolveResource({
|
||||
file: "set_dns.sh",
|
||||
localPath: path.join(cwd, "scripts/set_dns.sh"),
|
||||
});
|
||||
const resolveUnSetDnsScript = () =>
|
||||
resolveResource({
|
||||
file: "unset_dns.sh",
|
||||
localPath: path.join(cwd, "scripts/unset_dns.sh"),
|
||||
});
|
||||
|
||||
// =======================
|
||||
// Tasks
|
||||
// =======================
|
||||
const tasks = [
|
||||
// { name: "clash", func: resolveClash, retry: 5 },
|
||||
{
|
||||
name: "verge-mihomo-alpha",
|
||||
func: () =>
|
||||
@@ -537,11 +715,7 @@ const tasks = [
|
||||
retry: 5,
|
||||
macosOnly: true,
|
||||
},
|
||||
{
|
||||
name: "locales",
|
||||
func: resolveLocales,
|
||||
retry: 2,
|
||||
},
|
||||
{ name: "locales", func: resolveLocales, retry: 2 },
|
||||
];
|
||||
|
||||
async function runTask() {
|
||||
|
||||
@@ -54,7 +54,7 @@ async function run() {
|
||||
execSync(`git tag ${tag}`, { stdio: "inherit" });
|
||||
execSync(`git push origin ${tag}`, { stdio: "inherit" });
|
||||
console.log(`[INFO]: Git tag ${tag} created and pushed.`);
|
||||
} catch (e) {
|
||||
} catch {
|
||||
console.error(`[ERROR]: Failed to create or push git tag: ${tag}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -6,15 +6,19 @@
|
||||
*
|
||||
* <version> can be:
|
||||
* - A full semver version (e.g., 1.2.3, v1.2.3, 1.2.3-beta, v1.2.3+build)
|
||||
* - A tag: "alpha", "beta", "rc", or "autobuild"
|
||||
* - A tag: "alpha", "beta", "rc", "autobuild", "autobuild-latest", or "deploytest"
|
||||
* - "alpha", "beta", "rc": Appends the tag to the current base version (e.g., 1.2.3-beta)
|
||||
* - "autobuild": Appends a timestamped autobuild tag (e.g., 1.2.3+autobuild.2406101530)
|
||||
* - "autobuild-latest": Appends an autobuild tag with latest Tauri commit (e.g., 1.2.3+autobuild.0614.a1b2c3d)
|
||||
* - "deploytest": Appends a timestamped deploytest tag (e.g., 1.2.3+deploytest.2406101530)
|
||||
*
|
||||
* Examples:
|
||||
* pnpm release-version 1.2.3
|
||||
* pnpm release-version v1.2.3-beta
|
||||
* pnpm release-version beta
|
||||
* pnpm release-version autobuild
|
||||
* pnpm release-version autobuild-latest
|
||||
* pnpm release-version deploytest
|
||||
*
|
||||
* The script will:
|
||||
* - Validate and normalize the version argument
|
||||
@@ -25,10 +29,10 @@
|
||||
* Errors are logged and the process exits with code 1 on failure.
|
||||
*/
|
||||
|
||||
import { execSync } from "child_process";
|
||||
import { program } from "commander";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { program } from "commander";
|
||||
import { execSync } from "child_process";
|
||||
|
||||
/**
|
||||
* 获取当前 git 短 commit hash
|
||||
@@ -37,23 +41,61 @@ import { execSync } from "child_process";
|
||||
function getGitShortCommit() {
|
||||
try {
|
||||
return execSync("git rev-parse --short HEAD").toString().trim();
|
||||
} catch (e) {
|
||||
} catch {
|
||||
console.warn("[WARN]: Failed to get git short commit, fallback to 'nogit'");
|
||||
return "nogit";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 生成短时间戳(格式:YYMMDD)或带 commit(格式:YYMMDD.cc39b27)
|
||||
* @param {boolean} withCommit 是否带 commit
|
||||
* 获取最新 Tauri 相关提交的短 hash
|
||||
* @returns {string}
|
||||
*/
|
||||
function generateShortTimestamp(withCommit = false) {
|
||||
function getLatestTauriCommit() {
|
||||
try {
|
||||
const fullHash = execSync(
|
||||
"bash ./scripts-workflow/get_latest_tauri_commit.bash",
|
||||
)
|
||||
.toString()
|
||||
.trim();
|
||||
const shortHash = execSync(`git rev-parse --short ${fullHash}`)
|
||||
.toString()
|
||||
.trim();
|
||||
console.log(`[INFO]: Latest Tauri-related commit: ${shortHash}`);
|
||||
return shortHash;
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
"[WARN]: Failed to get latest Tauri commit, fallback to current git short commit",
|
||||
);
|
||||
console.warn(`[WARN]: Error details: ${error.message}`);
|
||||
return getGitShortCommit();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 生成短时间戳(格式:MMDD)或带 commit(格式:MMDD.cc39b27)
|
||||
* 使用 Asia/Shanghai 时区
|
||||
* @param {boolean} withCommit 是否带 commit
|
||||
* @param {boolean} useTauriCommit 是否使用 Tauri 相关的 commit(仅当 withCommit 为 true 时有效)
|
||||
* @returns {string}
|
||||
*/
|
||||
function generateShortTimestamp(withCommit = false, useTauriCommit = false) {
|
||||
const now = new Date();
|
||||
const month = String(now.getMonth() + 1).padStart(2, "0");
|
||||
const day = String(now.getDate()).padStart(2, "0");
|
||||
|
||||
const formatter = new Intl.DateTimeFormat("en-CA", {
|
||||
timeZone: "Asia/Shanghai",
|
||||
month: "2-digit",
|
||||
day: "2-digit",
|
||||
});
|
||||
|
||||
const parts = formatter.formatToParts(now);
|
||||
const month = parts.find((part) => part.type === "month").value;
|
||||
const day = parts.find((part) => part.type === "day").value;
|
||||
|
||||
if (withCommit) {
|
||||
const gitShort = getGitShortCommit();
|
||||
const gitShort = useTauriCommit
|
||||
? getLatestTauriCommit()
|
||||
: getGitShortCommit();
|
||||
return `${month}${day}.${gitShort}`;
|
||||
}
|
||||
return `${month}${day}`;
|
||||
@@ -135,20 +177,19 @@ async function updateCargoVersion(newVersion) {
|
||||
const versionWithoutV = newVersion.startsWith("v")
|
||||
? newVersion.slice(1)
|
||||
: newVersion;
|
||||
const baseVersion = getBaseVersion(versionWithoutV);
|
||||
|
||||
const updatedLines = lines.map((line) => {
|
||||
if (line.trim().startsWith("version =")) {
|
||||
return line.replace(
|
||||
/version\s*=\s*"[^"]+"/,
|
||||
`version = "${baseVersion}"`,
|
||||
`version = "${versionWithoutV}"`,
|
||||
);
|
||||
}
|
||||
return line;
|
||||
});
|
||||
|
||||
await fs.writeFile(cargoTomlPath, updatedLines.join("\n"), "utf8");
|
||||
console.log(`[INFO]: Cargo.toml version updated to: ${baseVersion}`);
|
||||
console.log(`[INFO]: Cargo.toml version updated to: ${versionWithoutV}`);
|
||||
} catch (error) {
|
||||
console.error("Error updating Cargo.toml version:", error);
|
||||
throw error;
|
||||
@@ -168,19 +209,23 @@ async function updateTauriConfigVersion(newVersion) {
|
||||
const versionWithoutV = newVersion.startsWith("v")
|
||||
? newVersion.slice(1)
|
||||
: newVersion;
|
||||
const baseVersion = getBaseVersion(versionWithoutV);
|
||||
|
||||
console.log(
|
||||
"[INFO]: Current tauri.conf.json version is: ",
|
||||
tauriConfig.version,
|
||||
);
|
||||
tauriConfig.version = baseVersion;
|
||||
|
||||
// 使用完整版本信息,包含build metadata
|
||||
tauriConfig.version = versionWithoutV;
|
||||
|
||||
await fs.writeFile(
|
||||
tauriConfigPath,
|
||||
JSON.stringify(tauriConfig, null, 2),
|
||||
"utf8",
|
||||
);
|
||||
console.log(`[INFO]: tauri.conf.json version updated to: ${baseVersion}`);
|
||||
console.log(
|
||||
`[INFO]: tauri.conf.json version updated to: ${versionWithoutV}`,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("Error updating tauri.conf.json version:", error);
|
||||
throw error;
|
||||
@@ -214,15 +259,31 @@ async function main(versionArg) {
|
||||
|
||||
try {
|
||||
let newVersion;
|
||||
const validTags = ["alpha", "beta", "rc", "autobuild"];
|
||||
const validTags = [
|
||||
"alpha",
|
||||
"beta",
|
||||
"rc",
|
||||
"autobuild",
|
||||
"autobuild-latest",
|
||||
"deploytest",
|
||||
];
|
||||
|
||||
if (validTags.includes(versionArg.toLowerCase())) {
|
||||
const currentVersion = await getCurrentVersion();
|
||||
const baseVersion = getBaseVersion(currentVersion);
|
||||
|
||||
if (versionArg.toLowerCase() === "autobuild") {
|
||||
// 格式: 2.3.0+autobuild.250613.cc39b27
|
||||
newVersion = `${baseVersion}+autobuild.${generateShortTimestamp(true)}`;
|
||||
// 格式: 2.3.0+autobuild.1004.cc39b27
|
||||
// 使用 Tauri 相关的最新 commit hash
|
||||
newVersion = `${baseVersion}+autobuild.${generateShortTimestamp(true, true)}`;
|
||||
} else if (versionArg.toLowerCase() === "autobuild-latest") {
|
||||
// 格式: 2.3.0+autobuild.1004.a1b2c3d (使用最新 Tauri 提交)
|
||||
const latestTauriCommit = getLatestTauriCommit();
|
||||
newVersion = `${baseVersion}+autobuild.${generateShortTimestamp()}.${latestTauriCommit}`;
|
||||
} else if (versionArg.toLowerCase() === "deploytest") {
|
||||
// 格式: 2.3.0+deploytest.1004.cc39b27
|
||||
// 使用 Tauri 相关的最新 commit hash
|
||||
newVersion = `${baseVersion}+deploytest.${generateShortTimestamp(true, true)}`;
|
||||
} else {
|
||||
newVersion = `${baseVersion}-${versionArg.toLowerCase()}`;
|
||||
}
|
||||
|
||||
123
scripts/telegram.mjs
Normal file
123
scripts/telegram.mjs
Normal file
@@ -0,0 +1,123 @@
|
||||
import axios from "axios";
|
||||
import { readFileSync } from "fs";
|
||||
import { log_error, log_info, log_success } from "./utils.mjs";
|
||||
|
||||
const CHAT_ID_RELEASE = "@clash_verge_re"; // 正式发布频道
|
||||
const CHAT_ID_TEST = "@vergetest"; // 测试频道
|
||||
|
||||
async function sendTelegramNotification() {
|
||||
if (!process.env.TELEGRAM_BOT_TOKEN) {
|
||||
throw new Error("TELEGRAM_BOT_TOKEN is required");
|
||||
}
|
||||
|
||||
const version =
|
||||
process.env.VERSION ||
|
||||
(() => {
|
||||
const pkg = readFileSync("package.json", "utf-8");
|
||||
return JSON.parse(pkg).version;
|
||||
})();
|
||||
|
||||
const downloadUrl =
|
||||
process.env.DOWNLOAD_URL ||
|
||||
`https://github.com/clash-verge-rev/clash-verge-rev/releases/download/v${version}`;
|
||||
|
||||
const isAutobuild =
|
||||
process.env.BUILD_TYPE === "autobuild" || version.includes("autobuild");
|
||||
const chatId = isAutobuild ? CHAT_ID_TEST : CHAT_ID_RELEASE;
|
||||
const buildType = isAutobuild ? "滚动更新版" : "正式版";
|
||||
|
||||
log_info(`Preparing Telegram notification for ${buildType} ${version}`);
|
||||
log_info(`Target channel: ${chatId}`);
|
||||
log_info(`Download URL: ${downloadUrl}`);
|
||||
|
||||
// 读取发布说明和下载地址
|
||||
let releaseContent = "";
|
||||
try {
|
||||
releaseContent = readFileSync("release.txt", "utf-8");
|
||||
log_info("成功读取 release.txt 文件");
|
||||
} catch (error) {
|
||||
log_error("无法读取 release.txt,使用默认发布说明", error);
|
||||
releaseContent = "更多新功能现已支持,详细更新日志请查看发布页面。";
|
||||
}
|
||||
|
||||
// Markdown 转换为 HTML
|
||||
function convertMarkdownToTelegramHTML(content) {
|
||||
return content
|
||||
.split("\n")
|
||||
.map((line) => {
|
||||
if (line.trim().length === 0) {
|
||||
return "";
|
||||
} else if (line.startsWith("## ")) {
|
||||
return `<b>${line.replace("## ", "")}</b>`;
|
||||
} else if (line.startsWith("### ")) {
|
||||
return `<b>${line.replace("### ", "")}</b>`;
|
||||
} else if (line.startsWith("#### ")) {
|
||||
return `<b>${line.replace("#### ", "")}</b>`;
|
||||
} else {
|
||||
let processedLine = line.replace(
|
||||
/\[([^\]]+)\]\(([^)]+)\)/g,
|
||||
(match, text, url) => {
|
||||
const encodedUrl = encodeURI(url);
|
||||
return `<a href="${encodedUrl}">${text}</a>`;
|
||||
},
|
||||
);
|
||||
processedLine = processedLine.replace(
|
||||
/\*\*([^*]+)\*\*/g,
|
||||
"<b>$1</b>",
|
||||
);
|
||||
return processedLine;
|
||||
}
|
||||
})
|
||||
.join("\n");
|
||||
}
|
||||
|
||||
function normalizeDetailsTags(content) {
|
||||
return content
|
||||
.replace(
|
||||
/<summary>\s*<strong>\s*(.*?)\s*<\/strong>\s*<\/summary>/g,
|
||||
"\n<b>$1</b>\n",
|
||||
)
|
||||
.replace(/<summary>\s*(.*?)\s*<\/summary>/g, "\n<b>$1</b>\n")
|
||||
.replace(/<\/?details>/g, "")
|
||||
.replace(/<\/?strong>/g, (m) => (m === "</strong>" ? "</b>" : "<b>"))
|
||||
.replace(/<br\s*\/?>/g, "\n");
|
||||
}
|
||||
|
||||
releaseContent = normalizeDetailsTags(releaseContent);
|
||||
const formattedContent = convertMarkdownToTelegramHTML(releaseContent);
|
||||
|
||||
const releaseTitle = isAutobuild ? "滚动更新版发布" : "正式发布";
|
||||
const encodedVersion = encodeURIComponent(version);
|
||||
const content = `<b>🎉 <a href="https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/autobuild">Clash Verge Rev v${version}</a> ${releaseTitle}</b>\n\n${formattedContent}`;
|
||||
|
||||
// 发送到 Telegram
|
||||
try {
|
||||
await axios.post(
|
||||
`https://api.telegram.org/bot${process.env.TELEGRAM_BOT_TOKEN}/sendMessage`,
|
||||
{
|
||||
chat_id: chatId,
|
||||
text: content,
|
||||
link_preview_options: {
|
||||
is_disabled: false,
|
||||
url: `https://github.com/clash-verge-rev/clash-verge-rev/releases/tag/v${encodedVersion}`,
|
||||
prefer_large_media: true,
|
||||
},
|
||||
parse_mode: "HTML",
|
||||
},
|
||||
);
|
||||
log_success(`✅ Telegram 通知发送成功到 ${chatId}`);
|
||||
} catch (error) {
|
||||
log_error(
|
||||
`❌ Telegram 通知发送失败到 ${chatId}:`,
|
||||
error.response?.data || error.message,
|
||||
error,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// 执行函数
|
||||
sendTelegramNotification().catch((error) => {
|
||||
log_error("脚本执行失败:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -8,7 +8,7 @@ const UPDATE_LOG = "UPDATELOG.md";
|
||||
export async function resolveUpdateLog(tag) {
|
||||
const cwd = process.cwd();
|
||||
|
||||
const reTitle = /^## v[\d\.]+/;
|
||||
const reTitle = /^## v[\d.]+/;
|
||||
const reEnd = /^---/;
|
||||
|
||||
const file = path.join(cwd, UPDATE_LOG);
|
||||
@@ -54,7 +54,7 @@ export async function resolveUpdateLogDefault() {
|
||||
|
||||
const data = await fsp.readFile(file, "utf-8");
|
||||
|
||||
const reTitle = /^## v[\d\.]+/;
|
||||
const reTitle = /^## v[\d.]+/;
|
||||
const reEnd = /^---/;
|
||||
|
||||
let isCapturing = false;
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
avoid-breaking-exported-api = true
|
||||
cognitive-complexity-threshold = 25
|
||||
4416
src-tauri/Cargo.lock
generated
4416
src-tauri/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,87 +1,91 @@
|
||||
[package]
|
||||
name = "clash-verge"
|
||||
version = "2.3.2"
|
||||
version = "2.4.3"
|
||||
description = "clash verge"
|
||||
authors = ["zzzgydi", "wonfen", "MystiPanda"]
|
||||
authors = ["zzzgydi", "Tunglies", "wonfen", "MystiPanda"]
|
||||
license = "GPL-3.0-only"
|
||||
repository = "https://github.com/clash-verge-rev/clash-verge-rev.git"
|
||||
default-run = "clash-verge"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
build = "build.rs"
|
||||
|
||||
[package.metadata.bundle]
|
||||
identifier = "io.github.clash-verge-rev.clash-verge-rev"
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2.3.0", features = [] }
|
||||
tauri-build = { version = "2.5.1", features = [] }
|
||||
|
||||
[dependencies]
|
||||
warp = "0.3.7"
|
||||
anyhow = "1.0.98"
|
||||
dirs = "6.0"
|
||||
warp = { version = "0.4.2", features = ["server"] }
|
||||
anyhow = "1.0.100"
|
||||
open = "5.3.2"
|
||||
log = "0.4.27"
|
||||
log = "0.4.28"
|
||||
dunce = "1.0.5"
|
||||
log4rs = "1.3.0"
|
||||
nanoid = "0.4"
|
||||
chrono = "0.4.41"
|
||||
sysinfo = "0.35.2"
|
||||
boa_engine = "0.20.0"
|
||||
serde_json = "1.0.140"
|
||||
serde_yaml = "0.9.34-deprecated"
|
||||
chrono = "0.4.42"
|
||||
sysinfo = { version = "0.37.2", features = ["network", "system"] }
|
||||
boa_engine = "0.21.0"
|
||||
serde_json = "1.0.145"
|
||||
serde_yaml_ng = "0.10.0"
|
||||
once_cell = "1.21.3"
|
||||
lazy_static = "1.5.0"
|
||||
port_scanner = "0.1.5"
|
||||
delay_timer = "0.11.6"
|
||||
parking_lot = "0.12.4"
|
||||
percent-encoding = "2.3.1"
|
||||
tokio = { version = "1.45.1", features = [
|
||||
parking_lot = "0.12.5"
|
||||
percent-encoding = "2.3.2"
|
||||
tokio = { version = "1.48.0", features = [
|
||||
"rt-multi-thread",
|
||||
"macros",
|
||||
"time",
|
||||
"sync",
|
||||
] }
|
||||
serde = { version = "1.0.219", features = ["derive"] }
|
||||
reqwest = { version = "0.12.20", features = ["json", "rustls-tls", "cookies"] }
|
||||
regex = "1.11.1"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
reqwest = { version = "0.12.24", features = ["json", "cookies"] }
|
||||
regex = "1.12.2"
|
||||
sysproxy = { git = "https://github.com/clash-verge-rev/sysproxy-rs" }
|
||||
image = "0.25.6"
|
||||
imageproc = "0.25.0"
|
||||
tauri = { version = "2.6.2", features = [
|
||||
tauri = { version = "2.9.2", features = [
|
||||
"protocol-asset",
|
||||
"devtools",
|
||||
"tray-icon",
|
||||
"image-ico",
|
||||
"image-png",
|
||||
] }
|
||||
network-interface = { version = "2.0.1", features = ["serde"] }
|
||||
tauri-plugin-shell = "2.3.0"
|
||||
tauri-plugin-dialog = "2.3.0"
|
||||
tauri-plugin-fs = "2.4.0"
|
||||
tauri-plugin-process = "2.3.0"
|
||||
tauri-plugin-clipboard-manager = "2.3.0"
|
||||
tauri-plugin-deep-link = "2.4.0"
|
||||
tauri-plugin-devtools = "2.0.0"
|
||||
tauri-plugin-window-state = "2.3.0"
|
||||
zip = "4.2.0"
|
||||
reqwest_dav = "0.2.1"
|
||||
network-interface = { version = "2.0.3", features = ["serde"] }
|
||||
tauri-plugin-shell = "2.3.3"
|
||||
tauri-plugin-dialog = "2.4.2"
|
||||
tauri-plugin-fs = "2.4.4"
|
||||
tauri-plugin-process = "2.3.1"
|
||||
tauri-plugin-clipboard-manager = "2.3.2"
|
||||
tauri-plugin-deep-link = "2.4.5"
|
||||
tauri-plugin-window-state = "2.4.1"
|
||||
zip = "6.0.0"
|
||||
reqwest_dav = "0.2.2"
|
||||
aes-gcm = { version = "0.10.3", features = ["std"] }
|
||||
base64 = "0.22.1"
|
||||
getrandom = "0.3.3"
|
||||
tokio-tungstenite = "0.27.0"
|
||||
getrandom = "0.3.4"
|
||||
futures = "0.3.31"
|
||||
sys-locale = "0.3.2"
|
||||
async-trait = "0.1.88"
|
||||
mihomo_api = { path = "src_crates/crate_mihomo_api" }
|
||||
ab_glyph = "0.2.29"
|
||||
tungstenite = "0.27.0"
|
||||
libc = "0.2.174"
|
||||
gethostname = "1.0.2"
|
||||
hmac = "0.12.1"
|
||||
sha2 = "0.10.9"
|
||||
hex = "0.4.3"
|
||||
libc = "0.2.177"
|
||||
gethostname = "1.1.0"
|
||||
scopeguard = "1.2.0"
|
||||
tauri-plugin-notification = "2.3.0"
|
||||
tauri-plugin-notification = "2.3.3"
|
||||
tokio-stream = "0.1.17"
|
||||
isahc = { version = "1.7.2", default-features = false, features = [
|
||||
"text-decoding",
|
||||
"parking_lot",
|
||||
] }
|
||||
backoff = { version = "0.4.0", features = ["tokio"] }
|
||||
compact_str = { version = "0.9.0", features = ["serde"] }
|
||||
tauri-plugin-http = "2.5.4"
|
||||
flexi_logger = "0.31.7"
|
||||
console-subscriber = { version = "0.5.0", optional = true }
|
||||
tauri-plugin-devtools = { version = "2.0.1" }
|
||||
tauri-plugin-mihomo = { git = "https://github.com/clash-verge-rev/tauri-plugin-mihomo" }
|
||||
clash_verge_logger = { git = "https://github.com/clash-verge-rev/clash-verge-logger" }
|
||||
async-trait = "0.1.89"
|
||||
smartstring = { version = "1.0.1", features = ["serde"] }
|
||||
clash_verge_service_ipc = { version = "2.0.21", features = [
|
||||
"client",
|
||||
], git = "https://github.com/clash-verge-rev/clash-verge-service-ipc" }
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
runas = "=1.2.0"
|
||||
@@ -100,54 +104,124 @@ winapi = { version = "0.3.9", features = [
|
||||
"winhttp",
|
||||
"winreg",
|
||||
] }
|
||||
windows-sys = { version = "0.61.2", features = [
|
||||
"Win32_Foundation",
|
||||
"Win32_Graphics_Gdi",
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_UI_WindowsAndMessaging",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
users = "0.11.0"
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
signal-hook = "0.3.18"
|
||||
|
||||
[target.'cfg(not(any(target_os = "android", target_os = "ios")))'.dependencies]
|
||||
tauri-plugin-autostart = "2.5.0"
|
||||
tauri-plugin-global-shortcut = "2.3.0"
|
||||
tauri-plugin-autostart = "2.5.1"
|
||||
tauri-plugin-global-shortcut = "2.3.1"
|
||||
tauri-plugin-updater = "2.9.0"
|
||||
|
||||
[features]
|
||||
default = ["custom-protocol"]
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
verge-dev = []
|
||||
verge-dev = ["clash_verge_logger/color"]
|
||||
tauri-dev = []
|
||||
tokio-trace = ["console-subscriber"]
|
||||
clippy = ["tauri/test"]
|
||||
tracing = []
|
||||
|
||||
[[bench]]
|
||||
name = "draft_benchmark"
|
||||
path = "benches/draft_benchmark.rs"
|
||||
harness = false
|
||||
|
||||
[profile.release]
|
||||
panic = "abort"
|
||||
codegen-units = 1
|
||||
lto = true
|
||||
opt-level = "s"
|
||||
lto = "thin"
|
||||
opt-level = 3
|
||||
debug = false
|
||||
strip = true
|
||||
overflow-checks = false
|
||||
rpath = false
|
||||
|
||||
[profile.dev]
|
||||
incremental = true
|
||||
codegen-units = 256 # 增加编译单元,提升编译速度
|
||||
opt-level = 0 # 禁用优化,进一步提升编译速度
|
||||
debug = true # 保留调试信息
|
||||
strip = false # 不剥离符号,保留调试信息
|
||||
codegen-units = 64
|
||||
opt-level = 0
|
||||
debug = true
|
||||
strip = "none"
|
||||
overflow-checks = true
|
||||
lto = false
|
||||
rpath = false
|
||||
|
||||
[profile.fast-release]
|
||||
inherits = "release" # 继承 release 的配置
|
||||
panic = "abort" # 与 release 相同
|
||||
codegen-units = 256 # 增加编译单元,提升编译速度
|
||||
lto = false # 禁用 LTO,提升编译速度
|
||||
opt-level = 0 # 禁用优化,大幅提升编译速度
|
||||
debug = true # 保留调试信息
|
||||
strip = false # 不剥离符号,保留调试信息
|
||||
inherits = "release"
|
||||
codegen-units = 64
|
||||
incremental = true
|
||||
lto = false
|
||||
opt-level = 0
|
||||
debug = true
|
||||
strip = false
|
||||
|
||||
[lib]
|
||||
name = "app_lib"
|
||||
crate-type = ["staticlib", "cdylib", "rlib"]
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.20.0"
|
||||
criterion = { version = "0.7.0", features = ["async_tokio"] }
|
||||
|
||||
[workspace]
|
||||
members = ["src_crates/crate_mihomo_api"]
|
||||
[lints.clippy]
|
||||
# Core categories - most important for code safety and correctness
|
||||
correctness = { level = "deny", priority = -1 }
|
||||
suspicious = { level = "deny", priority = -1 }
|
||||
|
||||
# [patch.crates-io]
|
||||
# bitflags = { git = "https://github.com/bitflags/bitflags", rev = "2.9.0" }
|
||||
# zerocopy = { git = "https://github.com/google/zerocopy", rev = "v0.8.24" }
|
||||
# tungstenite = { git = "https://github.com/snapview/tungstenite-rs", rev = "v0.26.2" }
|
||||
# Critical safety lints - warn for now due to extensive existing usage
|
||||
unwrap_used = "warn"
|
||||
expect_used = "warn"
|
||||
panic = "deny"
|
||||
unimplemented = "deny"
|
||||
|
||||
# Development quality lints
|
||||
todo = "warn"
|
||||
dbg_macro = "warn"
|
||||
#print_stdout = "warn"
|
||||
#print_stderr = "warn"
|
||||
|
||||
# Performance lints for proxy application
|
||||
clone_on_ref_ptr = "warn"
|
||||
rc_clone_in_vec_init = "warn"
|
||||
large_stack_arrays = "warn"
|
||||
large_const_arrays = "warn"
|
||||
|
||||
# Security lints
|
||||
#integer_division = "warn"
|
||||
#lossy_float_literal = "warn"
|
||||
#default_numeric_fallback = "warn"
|
||||
|
||||
# Mutex and async lints - strict control
|
||||
async_yields_async = "deny" # Prevents missing await in async blocks
|
||||
mutex_atomic = "deny" # Use atomics instead of Mutex<bool/int>
|
||||
mutex_integer = "deny" # Use AtomicInt instead of Mutex<int>
|
||||
rc_mutex = "deny" # Single-threaded Rc with Mutex is wrong
|
||||
unused_async = "deny" # Too many false positives in Tauri/framework code
|
||||
await_holding_lock = "deny"
|
||||
large_futures = "deny"
|
||||
future_not_send = "deny"
|
||||
|
||||
# Common style improvements
|
||||
redundant_else = "deny" # Too many in existing code
|
||||
needless_continue = "deny" # Too many in existing code
|
||||
needless_raw_string_hashes = "deny" # Too many in existing code
|
||||
|
||||
# Disable noisy categories for existing codebase but keep them available
|
||||
#style = { level = "allow", priority = -1 }
|
||||
#complexity = { level = "allow", priority = -1 }
|
||||
#perf = { level = "allow", priority = -1 }
|
||||
#pedantic = { level = "allow", priority = -1 }
|
||||
#nursery = { level = "allow", priority = -1 }
|
||||
#restriction = { level = "allow", priority = -1 }
|
||||
|
||||
or_fun_call = "deny"
|
||||
cognitive_complexity = "deny"
|
||||
|
||||
111
src-tauri/benches/draft_benchmark.rs
Normal file
111
src-tauri/benches/draft_benchmark.rs
Normal file
@@ -0,0 +1,111 @@
|
||||
use criterion::{Criterion, criterion_group, criterion_main};
|
||||
use std::hint::black_box;
|
||||
use std::process;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
use app_lib::config::IVerge;
|
||||
use app_lib::utils::Draft as DraftNew;
|
||||
|
||||
/// 创建测试数据
|
||||
fn make_draft() -> DraftNew<Box<IVerge>> {
|
||||
let verge = Box::new(IVerge {
|
||||
enable_auto_launch: Some(true),
|
||||
enable_tun_mode: Some(false),
|
||||
..Default::default()
|
||||
});
|
||||
DraftNew::from(verge)
|
||||
}
|
||||
|
||||
pub fn bench_draft(c: &mut Criterion) {
|
||||
let rt = Runtime::new().unwrap_or_else(|e| {
|
||||
eprintln!("Tokio runtime init failed: {e}");
|
||||
process::exit(1);
|
||||
});
|
||||
|
||||
let mut group = c.benchmark_group("draft");
|
||||
group.sample_size(100);
|
||||
group.warm_up_time(std::time::Duration::from_millis(300));
|
||||
group.measurement_time(std::time::Duration::from_secs(1));
|
||||
|
||||
group.bench_function("data_mut", |b| {
|
||||
b.iter(|| {
|
||||
let draft = black_box(make_draft());
|
||||
let mut data = draft.data_mut();
|
||||
data.enable_tun_mode = Some(true);
|
||||
black_box(&data.enable_tun_mode);
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("draft_mut_first", |b| {
|
||||
b.iter(|| {
|
||||
let draft = black_box(make_draft());
|
||||
let mut d = draft.draft_mut();
|
||||
d.enable_auto_launch = Some(false);
|
||||
black_box(&d.enable_auto_launch);
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("draft_mut_existing", |b| {
|
||||
b.iter(|| {
|
||||
let draft = black_box(make_draft());
|
||||
{
|
||||
let mut first = draft.draft_mut();
|
||||
first.enable_tun_mode = Some(true);
|
||||
black_box(&first.enable_tun_mode);
|
||||
}
|
||||
let mut second = draft.draft_mut();
|
||||
second.enable_tun_mode = Some(false);
|
||||
black_box(&second.enable_tun_mode);
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("latest_ref", |b| {
|
||||
b.iter(|| {
|
||||
let draft = black_box(make_draft());
|
||||
let latest = draft.latest_ref();
|
||||
black_box(&latest.enable_auto_launch);
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("apply", |b| {
|
||||
b.iter(|| {
|
||||
let draft = black_box(make_draft());
|
||||
{
|
||||
let mut d = draft.draft_mut();
|
||||
d.enable_auto_launch = Some(false);
|
||||
}
|
||||
draft.apply();
|
||||
black_box(&draft);
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("discard", |b| {
|
||||
b.iter(|| {
|
||||
let draft = black_box(make_draft());
|
||||
{
|
||||
let mut d = draft.draft_mut();
|
||||
d.enable_auto_launch = Some(false);
|
||||
}
|
||||
draft.discard();
|
||||
black_box(&draft);
|
||||
});
|
||||
});
|
||||
|
||||
group.bench_function("with_data_modify_async", |b| {
|
||||
b.to_async(&rt).iter(|| async {
|
||||
let draft = black_box(make_draft());
|
||||
let _: Result<(), anyhow::Error> = draft
|
||||
.with_data_modify::<_, _, _, anyhow::Error>(|mut box_data| async move {
|
||||
box_data.enable_auto_launch =
|
||||
Some(!box_data.enable_auto_launch.unwrap_or(false));
|
||||
Ok((box_data, ()))
|
||||
})
|
||||
.await;
|
||||
});
|
||||
});
|
||||
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(benches, bench_draft);
|
||||
criterion_main!(benches);
|
||||
@@ -1,3 +1,9 @@
|
||||
fn main() {
|
||||
tauri_build::build()
|
||||
#[cfg(feature = "clippy")]
|
||||
{
|
||||
println!("cargo:warning=Skipping tauri_build during Clippy");
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "clippy"))]
|
||||
tauri_build::build();
|
||||
}
|
||||
|
||||
@@ -18,6 +18,13 @@
|
||||
"autostart:allow-disable",
|
||||
"autostart:allow-is-enabled",
|
||||
"core:window:allow-set-theme",
|
||||
"notification:default"
|
||||
"notification:default",
|
||||
"http:default",
|
||||
"http:allow-fetch",
|
||||
{
|
||||
"identifier": "http:default",
|
||||
"allow": [{ "url": "https://*/*" }, { "url": "http://*/*" }]
|
||||
},
|
||||
"mihomo:default"
|
||||
]
|
||||
}
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 111 KiB After Width: | Height: | Size: 44 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 107 KiB After Width: | Height: | Size: 41 KiB |
@@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
chmod +x /usr/bin/install-service
|
||||
chmod +x /usr/bin/uninstall-service
|
||||
chmod +x /usr/bin/clash-verge-service-install
|
||||
chmod +x /usr/bin/clash-verge-service-uninstall
|
||||
chmod +x /usr/bin/clash-verge-service
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
#!/bin/bash
|
||||
/usr/bin/uninstall-service
|
||||
/usr/bin/clash-verge-service-uninstall
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,42 +1,64 @@
|
||||
use super::CmdResult;
|
||||
use crate::core::sysopt::Sysopt;
|
||||
use crate::{
|
||||
cmd::StringifyErr,
|
||||
feat, logging,
|
||||
utils::{dirs, logging::Type},
|
||||
wrap_err,
|
||||
utils::{
|
||||
dirs::{self, PathBufExec},
|
||||
logging::Type,
|
||||
},
|
||||
};
|
||||
use tauri::Manager;
|
||||
use smartstring::alias::String;
|
||||
use std::path::Path;
|
||||
use tauri::{AppHandle, Manager};
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
/// 打开应用程序所在目录
|
||||
#[tauri::command]
|
||||
pub fn open_app_dir() -> CmdResult<()> {
|
||||
let app_dir = wrap_err!(dirs::app_home_dir())?;
|
||||
wrap_err!(open::that(app_dir))
|
||||
pub async fn open_app_dir() -> CmdResult<()> {
|
||||
let app_dir = dirs::app_home_dir().stringify_err()?;
|
||||
open::that(app_dir).stringify_err()
|
||||
}
|
||||
|
||||
/// 打开核心所在目录
|
||||
#[tauri::command]
|
||||
pub fn open_core_dir() -> CmdResult<()> {
|
||||
let core_dir = wrap_err!(tauri::utils::platform::current_exe())?;
|
||||
pub async fn open_core_dir() -> CmdResult<()> {
|
||||
let core_dir = tauri::utils::platform::current_exe().stringify_err()?;
|
||||
let core_dir = core_dir.parent().ok_or("failed to get core dir")?;
|
||||
wrap_err!(open::that(core_dir))
|
||||
open::that(core_dir).stringify_err()
|
||||
}
|
||||
|
||||
/// 打开日志目录
|
||||
#[tauri::command]
|
||||
pub fn open_logs_dir() -> CmdResult<()> {
|
||||
let log_dir = wrap_err!(dirs::app_logs_dir())?;
|
||||
wrap_err!(open::that(log_dir))
|
||||
pub async fn open_logs_dir() -> CmdResult<()> {
|
||||
let log_dir = dirs::app_logs_dir().stringify_err()?;
|
||||
open::that(log_dir).stringify_err()
|
||||
}
|
||||
|
||||
/// 打开网页链接
|
||||
#[tauri::command]
|
||||
pub fn open_web_url(url: String) -> CmdResult<()> {
|
||||
wrap_err!(open::that(url))
|
||||
open::that(url.as_str()).stringify_err()
|
||||
}
|
||||
|
||||
// TODO 后续可以为前端提供接口,当前作为托盘菜单使用
|
||||
/// 打开 Verge 最新日志
|
||||
#[tauri::command]
|
||||
pub async fn open_app_log() -> CmdResult<()> {
|
||||
open::that(dirs::app_latest_log().stringify_err()?).stringify_err()
|
||||
}
|
||||
|
||||
// TODO 后续可以为前端提供接口,当前作为托盘菜单使用
|
||||
/// 打开 Clash 最新日志
|
||||
#[tauri::command]
|
||||
pub async fn open_core_log() -> CmdResult<()> {
|
||||
open::that(dirs::clash_latest_log().stringify_err()?).stringify_err()
|
||||
}
|
||||
|
||||
/// 打开/关闭开发者工具
|
||||
#[tauri::command]
|
||||
pub fn open_devtools(app_handle: tauri::AppHandle) {
|
||||
pub fn open_devtools(app_handle: AppHandle) {
|
||||
if let Some(window) = app_handle.get_webview_window("main") {
|
||||
if !window.is_devtools_open() {
|
||||
window.open_devtools();
|
||||
@@ -48,14 +70,14 @@ pub fn open_devtools(app_handle: tauri::AppHandle) {
|
||||
|
||||
/// 退出应用
|
||||
#[tauri::command]
|
||||
pub fn exit_app() {
|
||||
feat::quit();
|
||||
pub async fn exit_app() {
|
||||
feat::quit().await;
|
||||
}
|
||||
|
||||
/// 重启应用
|
||||
#[tauri::command]
|
||||
pub async fn restart_app() -> CmdResult<()> {
|
||||
feat::restart_app();
|
||||
feat::restart_app().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -68,36 +90,39 @@ pub fn get_portable_flag() -> CmdResult<bool> {
|
||||
/// 获取应用目录
|
||||
#[tauri::command]
|
||||
pub fn get_app_dir() -> CmdResult<String> {
|
||||
let app_home_dir = wrap_err!(dirs::app_home_dir())?
|
||||
let app_home_dir = dirs::app_home_dir()
|
||||
.stringify_err()?
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
.into();
|
||||
Ok(app_home_dir)
|
||||
}
|
||||
|
||||
/// 获取当前自启动状态
|
||||
#[tauri::command]
|
||||
pub fn get_auto_launch_status() -> CmdResult<bool> {
|
||||
use crate::core::sysopt::Sysopt;
|
||||
wrap_err!(Sysopt::global().get_launch_status())
|
||||
Sysopt::global().get_launch_status().stringify_err()
|
||||
}
|
||||
|
||||
/// 下载图标缓存
|
||||
#[tauri::command]
|
||||
pub async fn download_icon_cache(url: String, name: String) -> CmdResult<String> {
|
||||
let icon_cache_dir = wrap_err!(dirs::app_home_dir())?.join("icons").join("cache");
|
||||
let icon_path = icon_cache_dir.join(&name);
|
||||
let icon_cache_dir = dirs::app_home_dir()
|
||||
.stringify_err()?
|
||||
.join("icons")
|
||||
.join("cache");
|
||||
let icon_path = icon_cache_dir.join(name.as_str());
|
||||
|
||||
if icon_path.exists() {
|
||||
return Ok(icon_path.to_string_lossy().to_string());
|
||||
return Ok(icon_path.to_string_lossy().into());
|
||||
}
|
||||
|
||||
if !icon_cache_dir.exists() {
|
||||
let _ = std::fs::create_dir_all(&icon_cache_dir);
|
||||
let _ = fs::create_dir_all(&icon_cache_dir).await;
|
||||
}
|
||||
|
||||
let temp_path = icon_cache_dir.join(format!("{}.downloading", &name));
|
||||
let temp_path = icon_cache_dir.join(format!("{}.downloading", name.as_str()));
|
||||
|
||||
let response = wrap_err!(reqwest::get(&url).await)?;
|
||||
let response = reqwest::get(url.as_str()).await.stringify_err()?;
|
||||
|
||||
let content_type = response
|
||||
.headers()
|
||||
@@ -107,7 +132,7 @@ pub async fn download_icon_cache(url: String, name: String) -> CmdResult<String>
|
||||
|
||||
let is_image = content_type.starts_with("image/");
|
||||
|
||||
let content = wrap_err!(response.bytes().await)?;
|
||||
let content = response.bytes().await.stringify_err()?;
|
||||
|
||||
let is_html = content.len() > 15
|
||||
&& (content.starts_with(b"<!DOCTYPE html")
|
||||
@@ -116,38 +141,37 @@ pub async fn download_icon_cache(url: String, name: String) -> CmdResult<String>
|
||||
|
||||
if is_image && !is_html {
|
||||
{
|
||||
let mut file = match std::fs::File::create(&temp_path) {
|
||||
let mut file = match fs::File::create(&temp_path).await {
|
||||
Ok(file) => file,
|
||||
Err(_) => {
|
||||
if icon_path.exists() {
|
||||
return Ok(icon_path.to_string_lossy().to_string());
|
||||
} else {
|
||||
return Ok(icon_path.to_string_lossy().into());
|
||||
}
|
||||
return Err("Failed to create temporary file".into());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
wrap_err!(std::io::copy(&mut content.as_ref(), &mut file))?;
|
||||
file.write_all(content.as_ref()).await.stringify_err()?;
|
||||
file.flush().await.stringify_err()?;
|
||||
}
|
||||
|
||||
if !icon_path.exists() {
|
||||
match std::fs::rename(&temp_path, &icon_path) {
|
||||
match fs::rename(&temp_path, &icon_path).await {
|
||||
Ok(_) => {}
|
||||
Err(_) => {
|
||||
let _ = std::fs::remove_file(&temp_path);
|
||||
let _ = temp_path.remove_if_exists().await;
|
||||
if icon_path.exists() {
|
||||
return Ok(icon_path.to_string_lossy().to_string());
|
||||
return Ok(icon_path.to_string_lossy().into());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let _ = std::fs::remove_file(&temp_path);
|
||||
let _ = temp_path.remove_if_exists().await;
|
||||
}
|
||||
|
||||
Ok(icon_path.to_string_lossy().to_string())
|
||||
Ok(icon_path.to_string_lossy().into())
|
||||
} else {
|
||||
let _ = std::fs::remove_file(&temp_path);
|
||||
Err(format!("下载的内容不是有效图片: {url}"))
|
||||
let _ = temp_path.remove_if_exists().await;
|
||||
Err(format!("下载的内容不是有效图片: {}", url.as_str()).into())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -160,66 +184,74 @@ pub struct IconInfo {
|
||||
|
||||
/// 复制图标文件
|
||||
#[tauri::command]
|
||||
pub fn copy_icon_file(path: String, icon_info: IconInfo) -> CmdResult<String> {
|
||||
use std::{fs, path::Path};
|
||||
pub async fn copy_icon_file(path: String, icon_info: IconInfo) -> CmdResult<String> {
|
||||
let file_path = Path::new(path.as_str());
|
||||
|
||||
let file_path = Path::new(&path);
|
||||
|
||||
let icon_dir = wrap_err!(dirs::app_home_dir())?.join("icons");
|
||||
let icon_dir = dirs::app_home_dir().stringify_err()?.join("icons");
|
||||
if !icon_dir.exists() {
|
||||
let _ = fs::create_dir_all(&icon_dir);
|
||||
let _ = fs::create_dir_all(&icon_dir).await;
|
||||
}
|
||||
let ext = match file_path.extension() {
|
||||
Some(e) => e.to_string_lossy().to_string(),
|
||||
None => "ico".to_string(),
|
||||
let ext: String = match file_path.extension() {
|
||||
Some(e) => e.to_string_lossy().into(),
|
||||
None => "ico".into(),
|
||||
};
|
||||
|
||||
let dest_path = icon_dir.join(format!(
|
||||
"{0}-{1}.{ext}",
|
||||
icon_info.name, icon_info.current_t
|
||||
icon_info.name.as_str(),
|
||||
icon_info.current_t.as_str()
|
||||
));
|
||||
if file_path.exists() {
|
||||
if icon_info.previous_t.trim() != "" {
|
||||
fs::remove_file(
|
||||
icon_dir.join(format!("{0}-{1}.png", icon_info.name, icon_info.previous_t)),
|
||||
)
|
||||
icon_dir
|
||||
.join(format!(
|
||||
"{0}-{1}.png",
|
||||
icon_info.name.as_str(),
|
||||
icon_info.previous_t.as_str()
|
||||
))
|
||||
.remove_if_exists()
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
fs::remove_file(
|
||||
icon_dir.join(format!("{0}-{1}.ico", icon_info.name, icon_info.previous_t)),
|
||||
)
|
||||
icon_dir
|
||||
.join(format!(
|
||||
"{0}-{1}.ico",
|
||||
icon_info.name.as_str(),
|
||||
icon_info.previous_t.as_str()
|
||||
))
|
||||
.remove_if_exists()
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
}
|
||||
logging!(
|
||||
info,
|
||||
Type::Cmd,
|
||||
true,
|
||||
"Copying icon file path: {:?} -> file dist: {:?}",
|
||||
path,
|
||||
dest_path
|
||||
);
|
||||
match fs::copy(file_path, &dest_path) {
|
||||
Ok(_) => Ok(dest_path.to_string_lossy().to_string()),
|
||||
Err(err) => Err(err.to_string()),
|
||||
match fs::copy(file_path, &dest_path).await {
|
||||
Ok(_) => Ok(dest_path.to_string_lossy().into()),
|
||||
Err(err) => Err(err.to_string().into()),
|
||||
}
|
||||
} else {
|
||||
Err("file not found".to_string())
|
||||
Err("file not found".into())
|
||||
}
|
||||
}
|
||||
|
||||
/// 通知UI已准备就绪
|
||||
#[tauri::command]
|
||||
pub fn notify_ui_ready() -> CmdResult<()> {
|
||||
log::info!(target: "app", "前端UI已准备就绪");
|
||||
crate::utils::resolve::mark_ui_ready();
|
||||
logging!(info, Type::Cmd, "前端UI已准备就绪");
|
||||
crate::utils::resolve::ui::mark_ui_ready();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// UI加载阶段
|
||||
#[tauri::command]
|
||||
pub fn update_ui_stage(stage: String) -> CmdResult<()> {
|
||||
log::info!(target: "app", "UI加载阶段更新: {stage}");
|
||||
logging!(info, Type::Cmd, "UI加载阶段更新: {}", stage.as_str());
|
||||
|
||||
use crate::utils::resolve::UiReadyStage;
|
||||
use crate::utils::resolve::ui::UiReadyStage;
|
||||
|
||||
let stage_enum = match stage.as_str() {
|
||||
"NotStarted" => UiReadyStage::NotStarted,
|
||||
@@ -228,19 +260,16 @@ pub fn update_ui_stage(stage: String) -> CmdResult<()> {
|
||||
"ResourcesLoaded" => UiReadyStage::ResourcesLoaded,
|
||||
"Ready" => UiReadyStage::Ready,
|
||||
_ => {
|
||||
log::warn!(target: "app", "未知的UI加载阶段: {stage}");
|
||||
return Err(format!("未知的UI加载阶段: {stage}"));
|
||||
logging!(
|
||||
warn,
|
||||
Type::Cmd,
|
||||
"Warning: 未知的UI加载阶段: {}",
|
||||
stage.as_str()
|
||||
);
|
||||
return Err(format!("未知的UI加载阶段: {}", stage.as_str()).into());
|
||||
}
|
||||
};
|
||||
|
||||
crate::utils::resolve::update_ui_ready_stage(stage_enum);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// 重置UI就绪状态
|
||||
#[tauri::command]
|
||||
pub fn reset_ui_ready_state() -> CmdResult<()> {
|
||||
log::info!(target: "app", "重置UI就绪状态");
|
||||
crate::utils::resolve::reset_ui_ready();
|
||||
crate::utils::resolve::ui::update_ui_ready_stage(stage_enum);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
36
src-tauri/src/cmd/backup.rs
Normal file
36
src-tauri/src/cmd/backup.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
use super::CmdResult;
|
||||
use crate::{cmd::StringifyErr, feat};
|
||||
use feat::LocalBackupFile;
|
||||
use smartstring::alias::String;
|
||||
|
||||
/// Create a local backup
|
||||
#[tauri::command]
|
||||
pub async fn create_local_backup() -> CmdResult<()> {
|
||||
feat::create_local_backup().await.stringify_err()
|
||||
}
|
||||
|
||||
/// List local backups
|
||||
#[tauri::command]
|
||||
pub async fn list_local_backup() -> CmdResult<Vec<LocalBackupFile>> {
|
||||
feat::list_local_backup().await.stringify_err()
|
||||
}
|
||||
|
||||
/// Delete local backup
|
||||
#[tauri::command]
|
||||
pub async fn delete_local_backup(filename: String) -> CmdResult<()> {
|
||||
feat::delete_local_backup(filename).await.stringify_err()
|
||||
}
|
||||
|
||||
/// Restore local backup
|
||||
#[tauri::command]
|
||||
pub async fn restore_local_backup(filename: String) -> CmdResult<()> {
|
||||
feat::restore_local_backup(filename).await.stringify_err()
|
||||
}
|
||||
|
||||
/// Export local backup to a user selected destination
|
||||
#[tauri::command]
|
||||
pub async fn export_local_backup(filename: String, destination: String) -> CmdResult<()> {
|
||||
feat::export_local_backup(filename, destination)
|
||||
.await
|
||||
.stringify_err()
|
||||
}
|
||||
@@ -1,65 +1,75 @@
|
||||
use super::CmdResult;
|
||||
use crate::utils::dirs;
|
||||
use crate::{
|
||||
config::*, core::*, feat, module::mihomo::MihomoManager, process::AsyncHandler, wrap_err,
|
||||
cmd::StringifyErr,
|
||||
config::Config,
|
||||
constants,
|
||||
core::{CoreManager, handle, validate::CoreConfigValidator},
|
||||
};
|
||||
use serde_yaml::Mapping;
|
||||
use crate::{config::*, feat, logging, utils::logging::Type};
|
||||
use compact_str::CompactString;
|
||||
use serde_yaml_ng::Mapping;
|
||||
use smartstring::alias::String;
|
||||
use tokio::fs;
|
||||
|
||||
/// 复制Clash环境变量
|
||||
#[tauri::command]
|
||||
pub fn copy_clash_env() -> CmdResult {
|
||||
feat::copy_clash_env();
|
||||
pub async fn copy_clash_env() -> CmdResult {
|
||||
feat::copy_clash_env().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// 获取Clash信息
|
||||
#[tauri::command]
|
||||
pub fn get_clash_info() -> CmdResult<ClashInfo> {
|
||||
Ok(Config::clash().latest().get_client_info())
|
||||
pub async fn get_clash_info() -> CmdResult<ClashInfo> {
|
||||
Ok(Config::clash().await.latest_ref().get_client_info())
|
||||
}
|
||||
|
||||
/// 修改Clash配置
|
||||
#[tauri::command]
|
||||
pub async fn patch_clash_config(payload: Mapping) -> CmdResult {
|
||||
wrap_err!(feat::patch_clash(payload).await)
|
||||
feat::patch_clash(payload).await.stringify_err()
|
||||
}
|
||||
|
||||
/// 修改Clash模式
|
||||
#[tauri::command]
|
||||
pub async fn patch_clash_mode(payload: String) -> CmdResult {
|
||||
feat::change_clash_mode(payload);
|
||||
feat::change_clash_mode(payload).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// 切换Clash核心
|
||||
#[tauri::command]
|
||||
pub async fn change_clash_core(clash_core: String) -> CmdResult<Option<String>> {
|
||||
log::info!(target: "app", "changing core to {clash_core}");
|
||||
logging!(info, Type::Config, "changing core to {clash_core}");
|
||||
|
||||
match CoreManager::global()
|
||||
.change_core(Some(clash_core.clone()))
|
||||
.await
|
||||
{
|
||||
match CoreManager::global().change_core(&clash_core).await {
|
||||
Ok(_) => {
|
||||
// 切换内核后重启内核
|
||||
match CoreManager::global().restart_core().await {
|
||||
Ok(_) => {
|
||||
log::info!(target: "app", "core changed and restarted to {clash_core}");
|
||||
handle::Handle::notice_message("config_core::change_success", &clash_core);
|
||||
logging!(
|
||||
info,
|
||||
Type::Core,
|
||||
"core changed and restarted to {clash_core}"
|
||||
);
|
||||
handle::Handle::notice_message("config_core::change_success", clash_core);
|
||||
handle::Handle::refresh_clash();
|
||||
Ok(None)
|
||||
}
|
||||
Err(err) => {
|
||||
let error_msg = format!("Core changed but failed to restart: {err}");
|
||||
log::error!(target: "app", "{error_msg}");
|
||||
handle::Handle::notice_message("config_core::change_error", &error_msg);
|
||||
let error_msg: String =
|
||||
format!("Core changed but failed to restart: {err}").into();
|
||||
handle::Handle::notice_message("config_core::change_error", error_msg.clone());
|
||||
logging!(error, Type::Core, "{error_msg}");
|
||||
Ok(Some(error_msg))
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
let error_msg = err.to_string();
|
||||
log::error!(target: "app", "failed to change core: {error_msg}");
|
||||
handle::Handle::notice_message("config_core::change_error", &error_msg);
|
||||
let error_msg: String = err;
|
||||
logging!(error, Type::Core, "failed to change core: {error_msg}");
|
||||
handle::Handle::notice_message("config_core::change_error", error_msg.clone());
|
||||
Ok(Some(error_msg))
|
||||
}
|
||||
}
|
||||
@@ -68,150 +78,146 @@ pub async fn change_clash_core(clash_core: String) -> CmdResult<Option<String>>
|
||||
/// 启动核心
|
||||
#[tauri::command]
|
||||
pub async fn start_core() -> CmdResult {
|
||||
wrap_err!(CoreManager::global().start_core().await)
|
||||
let result = CoreManager::global().start_core().await.stringify_err();
|
||||
if result.is_ok() {
|
||||
handle::Handle::refresh_clash();
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// 关闭核心
|
||||
#[tauri::command]
|
||||
pub async fn stop_core() -> CmdResult {
|
||||
wrap_err!(CoreManager::global().stop_core().await)
|
||||
let result = CoreManager::global().stop_core().await.stringify_err();
|
||||
if result.is_ok() {
|
||||
handle::Handle::refresh_clash();
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// 重启核心
|
||||
#[tauri::command]
|
||||
pub async fn restart_core() -> CmdResult {
|
||||
wrap_err!(CoreManager::global().restart_core().await)
|
||||
}
|
||||
|
||||
/// 获取代理延迟
|
||||
#[tauri::command]
|
||||
pub async fn clash_api_get_proxy_delay(
|
||||
name: String,
|
||||
url: Option<String>,
|
||||
timeout: i32,
|
||||
) -> CmdResult<serde_json::Value> {
|
||||
MihomoManager::global()
|
||||
.test_proxy_delay(&name, url, timeout)
|
||||
.await
|
||||
let result = CoreManager::global().restart_core().await.stringify_err();
|
||||
if result.is_ok() {
|
||||
handle::Handle::refresh_clash();
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// 测试URL延迟
|
||||
#[tauri::command]
|
||||
pub async fn test_delay(url: String) -> CmdResult<u32> {
|
||||
Ok(feat::test_delay(url).await.unwrap_or(10000u32))
|
||||
let result = match feat::test_delay(url).await {
|
||||
Ok(delay) => delay,
|
||||
Err(e) => {
|
||||
logging!(error, Type::Cmd, "{}", e);
|
||||
10000u32
|
||||
}
|
||||
};
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
/// 保存DNS配置到单独文件
|
||||
#[tauri::command]
|
||||
pub async fn save_dns_config(dns_config: Mapping) -> CmdResult {
|
||||
use crate::utils::dirs;
|
||||
use serde_yaml;
|
||||
use std::fs;
|
||||
use serde_yaml_ng;
|
||||
use tokio::fs;
|
||||
|
||||
// 获取DNS配置文件路径
|
||||
let dns_path = dirs::app_home_dir()
|
||||
.map_err(|e| e.to_string())?
|
||||
.join("dns_config.yaml");
|
||||
.stringify_err()?
|
||||
.join(constants::files::DNS_CONFIG);
|
||||
|
||||
// 保存DNS配置到文件
|
||||
let yaml_str = serde_yaml::to_string(&dns_config).map_err(|e| e.to_string())?;
|
||||
fs::write(&dns_path, yaml_str).map_err(|e| e.to_string())?;
|
||||
log::info!(target: "app", "DNS config saved to {dns_path:?}");
|
||||
let yaml_str = serde_yaml_ng::to_string(&dns_config).stringify_err()?;
|
||||
fs::write(&dns_path, yaml_str).await.stringify_err()?;
|
||||
logging!(info, Type::Config, "DNS config saved to {dns_path:?}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// 应用或撤销DNS配置
|
||||
#[tauri::command]
|
||||
pub fn apply_dns_config(apply: bool) -> CmdResult {
|
||||
pub async fn apply_dns_config(apply: bool) -> CmdResult {
|
||||
use crate::{
|
||||
config::Config,
|
||||
core::{handle, CoreManager},
|
||||
core::{CoreManager, handle},
|
||||
utils::dirs,
|
||||
};
|
||||
|
||||
// 使用spawn来处理异步操作
|
||||
AsyncHandler::spawn(move || async move {
|
||||
if apply {
|
||||
// 读取DNS配置文件
|
||||
let dns_path = match dirs::app_home_dir() {
|
||||
Ok(path) => path.join("dns_config.yaml"),
|
||||
Err(e) => {
|
||||
log::error!(target: "app", "Failed to get home dir: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let dns_path = dirs::app_home_dir()
|
||||
.stringify_err()?
|
||||
.join(constants::files::DNS_CONFIG);
|
||||
|
||||
if !dns_path.exists() {
|
||||
log::warn!(target: "app", "DNS config file not found");
|
||||
return;
|
||||
logging!(warn, Type::Config, "DNS config file not found");
|
||||
return Err("DNS config file not found".into());
|
||||
}
|
||||
|
||||
let dns_yaml = match std::fs::read_to_string(&dns_path) {
|
||||
Ok(content) => content,
|
||||
Err(e) => {
|
||||
log::error!(target: "app", "Failed to read DNS config: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
let dns_yaml = fs::read_to_string(&dns_path).await.stringify_err_log(|e| {
|
||||
logging!(error, Type::Config, "Failed to read DNS config: {e}");
|
||||
})?;
|
||||
|
||||
// 解析DNS配置并创建patch
|
||||
let patch_config = match serde_yaml::from_str::<serde_yaml::Mapping>(&dns_yaml) {
|
||||
Ok(config) => {
|
||||
let mut patch = serde_yaml::Mapping::new();
|
||||
patch.insert("dns".into(), config.into());
|
||||
patch
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!(target: "app", "Failed to parse DNS config: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
// 解析DNS配置
|
||||
let patch_config = serde_yaml_ng::from_str::<serde_yaml_ng::Mapping>(&dns_yaml)
|
||||
.stringify_err_log(|e| {
|
||||
logging!(error, Type::Config, "Failed to parse DNS config: {e}");
|
||||
})?;
|
||||
|
||||
log::info!(target: "app", "Applying DNS config from file");
|
||||
logging!(info, Type::Config, "Applying DNS config from file");
|
||||
|
||||
// 重新生成配置,确保DNS配置被正确应用
|
||||
// 这里不调用patch_clash以避免将DNS配置写入config.yaml
|
||||
Config::runtime()
|
||||
.latest()
|
||||
.patch_config(patch_config.clone());
|
||||
// 创建包含DNS配置的patch
|
||||
let mut patch = serde_yaml_ng::Mapping::new();
|
||||
patch.insert("dns".into(), patch_config.into());
|
||||
|
||||
// 首先重新生成配置
|
||||
if let Err(err) = Config::generate().await {
|
||||
log::error!(target: "app", "Failed to regenerate config with DNS: {err}");
|
||||
return;
|
||||
}
|
||||
|
||||
// 然后应用新配置
|
||||
if let Err(err) = CoreManager::global().update_config().await {
|
||||
log::error!(target: "app", "Failed to apply config with DNS: {err}");
|
||||
} else {
|
||||
log::info!(target: "app", "DNS config successfully applied");
|
||||
handle::Handle::refresh_clash();
|
||||
}
|
||||
} else {
|
||||
// 当关闭DNS设置时,不需要对配置进行任何修改
|
||||
// 直接重新生成配置,让enhance函数自动跳过DNS配置的加载
|
||||
log::info!(target: "app", "DNS settings disabled, regenerating config");
|
||||
// 应用DNS配置到运行时配置
|
||||
Config::runtime().await.draft_mut().patch_config(patch);
|
||||
|
||||
// 重新生成配置
|
||||
if let Err(err) = Config::generate().await {
|
||||
log::error!(target: "app", "Failed to regenerate config: {err}");
|
||||
return;
|
||||
}
|
||||
Config::generate().await.stringify_err_log(|err| {
|
||||
let err = format!("Failed to regenerate config with DNS: {err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
})?;
|
||||
|
||||
// 应用新配置
|
||||
match CoreManager::global().update_config().await {
|
||||
Ok(_) => {
|
||||
log::info!(target: "app", "Config regenerated successfully");
|
||||
CoreManager::global()
|
||||
.update_config()
|
||||
.await
|
||||
.stringify_err_log(|err| {
|
||||
let err = format!("Failed to apply config with DNS: {err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
})?;
|
||||
|
||||
logging!(info, Type::Config, "DNS config successfully applied");
|
||||
handle::Handle::refresh_clash();
|
||||
} else {
|
||||
// 当关闭DNS设置时,重新生成配置(不加载DNS配置文件)
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"DNS settings disabled, regenerating config"
|
||||
);
|
||||
|
||||
Config::generate().await.stringify_err_log(|err| {
|
||||
let err = format!("Failed to regenerate config: {err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
})?;
|
||||
|
||||
CoreManager::global()
|
||||
.update_config()
|
||||
.await
|
||||
.stringify_err_log(|err| {
|
||||
let err = format!("Failed to apply regenerated config: {err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
})?;
|
||||
|
||||
logging!(info, Type::Config, "Config regenerated successfully");
|
||||
handle::Handle::refresh_clash();
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!(target: "app", "Failed to apply regenerated config: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -222,8 +228,8 @@ pub fn check_dns_config_exists() -> CmdResult<bool> {
|
||||
use crate::utils::dirs;
|
||||
|
||||
let dns_path = dirs::app_home_dir()
|
||||
.map_err(|e| e.to_string())?
|
||||
.join("dns_config.yaml");
|
||||
.stringify_err()?
|
||||
.join(constants::files::DNS_CONFIG);
|
||||
|
||||
Ok(dns_path.exists())
|
||||
}
|
||||
@@ -232,38 +238,41 @@ pub fn check_dns_config_exists() -> CmdResult<bool> {
|
||||
#[tauri::command]
|
||||
pub async fn get_dns_config_content() -> CmdResult<String> {
|
||||
use crate::utils::dirs;
|
||||
use std::fs;
|
||||
use tokio::fs;
|
||||
|
||||
let dns_path = dirs::app_home_dir()
|
||||
.map_err(|e| e.to_string())?
|
||||
.join("dns_config.yaml");
|
||||
.stringify_err()?
|
||||
.join(constants::files::DNS_CONFIG);
|
||||
|
||||
if !dns_path.exists() {
|
||||
if !fs::try_exists(&dns_path).await.stringify_err()? {
|
||||
return Err("DNS config file not found".into());
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&dns_path).map_err(|e| e.to_string())?;
|
||||
let content = fs::read_to_string(&dns_path).await.stringify_err()?.into();
|
||||
Ok(content)
|
||||
}
|
||||
|
||||
/// 验证DNS配置文件
|
||||
#[tauri::command]
|
||||
pub async fn validate_dns_config() -> CmdResult<(bool, String)> {
|
||||
use crate::{core::CoreManager, utils::dirs};
|
||||
|
||||
let app_dir = dirs::app_home_dir().map_err(|e| e.to_string())?;
|
||||
let dns_path = app_dir.join("dns_config.yaml");
|
||||
let app_dir = dirs::app_home_dir().stringify_err()?;
|
||||
let dns_path = app_dir.join(constants::files::DNS_CONFIG);
|
||||
let dns_path_str = dns_path.to_str().unwrap_or_default();
|
||||
|
||||
if !dns_path.exists() {
|
||||
return Ok((false, "DNS config file not found".to_string()));
|
||||
return Ok((false, "DNS config file not found".into()));
|
||||
}
|
||||
|
||||
match CoreManager::global()
|
||||
.validate_config_file(dns_path_str, None)
|
||||
CoreConfigValidator::validate_config_file(dns_path_str, None)
|
||||
.await
|
||||
{
|
||||
Ok(result) => Ok(result),
|
||||
Err(e) => Err(e.to_string()),
|
||||
}
|
||||
.stringify_err()
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_clash_logs() -> CmdResult<Vec<CompactString>> {
|
||||
let logs = CoreManager::global()
|
||||
.get_clash_logs()
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
Ok(logs)
|
||||
}
|
||||
|
||||
@@ -4,12 +4,12 @@ use super::CmdResult;
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn entry_lightweight_mode() -> CmdResult {
|
||||
lightweight::entry_lightweight_mode();
|
||||
lightweight::entry_lightweight_mode().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn exit_lightweight_mode() -> CmdResult {
|
||||
lightweight::exit_lightweight_mode();
|
||||
lightweight::exit_lightweight_mode().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
124
src-tauri/src/cmd/media_unlock_checker/bahamut.rs
Normal file
124
src-tauri/src/cmd/media_unlock_checker/bahamut.rs
Normal file
@@ -0,0 +1,124 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::{Client, cookie::Jar};
|
||||
|
||||
use crate::{logging, utils::logging::Type};
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
pub(super) async fn check_bahamut_anime(client: &Client) -> UnlockItem {
|
||||
let cookie_store = Arc::new(Jar::default());
|
||||
|
||||
let client_with_cookies = match Client::builder()
|
||||
.user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36")
|
||||
.cookie_provider(Arc::clone(&cookie_store))
|
||||
.build() {
|
||||
Ok(client) => client,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to create client with cookies for Bahamut Anime: {}",
|
||||
e
|
||||
);
|
||||
client.clone()
|
||||
}
|
||||
};
|
||||
|
||||
let device_url = "https://ani.gamer.com.tw/ajax/getdeviceid.php";
|
||||
let device_id = match client_with_cookies.get(device_url).send().await {
|
||||
Ok(response) => match response.text().await {
|
||||
Ok(text) => match Regex::new(r#""deviceid"\s*:\s*"([^"]+)"#) {
|
||||
Ok(re) => re
|
||||
.captures(&text)
|
||||
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
|
||||
.unwrap_or_default(),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile deviceid regex for Bahamut Anime: {}",
|
||||
e
|
||||
);
|
||||
String::new()
|
||||
}
|
||||
},
|
||||
Err(_) => String::new(),
|
||||
},
|
||||
Err(_) => String::new(),
|
||||
};
|
||||
|
||||
if device_id.is_empty() {
|
||||
return UnlockItem {
|
||||
name: "Bahamut Anime".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let url =
|
||||
format!("https://ani.gamer.com.tw/ajax/token.php?adID=89422&sn=37783&device={device_id}");
|
||||
|
||||
let token_result = match client_with_cookies.get(&url).send().await {
|
||||
Ok(response) => match response.text().await {
|
||||
Ok(body) => {
|
||||
if body.contains("animeSn") {
|
||||
Some(body)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Err(_) => None,
|
||||
},
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
if token_result.is_none() {
|
||||
return UnlockItem {
|
||||
name: "Bahamut Anime".to_string(),
|
||||
status: "No".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let region = match client_with_cookies
|
||||
.get("https://ani.gamer.com.tw/")
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) => match response.text().await {
|
||||
Ok(body) => match Regex::new(r#"data-geo="([^"]+)"#) {
|
||||
Ok(region_re) => region_re
|
||||
.captures(&body)
|
||||
.and_then(|caps| caps.get(1))
|
||||
.map(|m| {
|
||||
let country_code = m.as_str();
|
||||
let emoji = country_code_to_emoji(country_code);
|
||||
format!("{emoji}{country_code}")
|
||||
}),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile region regex for Bahamut Anime: {}",
|
||||
e
|
||||
);
|
||||
None
|
||||
}
|
||||
},
|
||||
Err(_) => None,
|
||||
},
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
UnlockItem {
|
||||
name: "Bahamut Anime".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
91
src-tauri/src/cmd/media_unlock_checker/bilibili.rs
Normal file
91
src-tauri/src/cmd/media_unlock_checker/bilibili.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
use reqwest::Client;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::get_local_date_string;
|
||||
|
||||
pub(super) async fn check_bilibili_china_mainland(client: &Client) -> UnlockItem {
|
||||
let url = "https://api.bilibili.com/pgc/player/web/playurl?avid=82846771&qn=0&type=&otype=json&ep_id=307247&fourk=1&fnver=0&fnval=16&module=bangumi";
|
||||
|
||||
match client.get(url).send().await {
|
||||
Ok(response) => match response.json::<Value>().await {
|
||||
Ok(body) => {
|
||||
let status = body
|
||||
.get("code")
|
||||
.and_then(|v| v.as_i64())
|
||||
.map(|code| {
|
||||
if code == 0 {
|
||||
"Yes"
|
||||
} else if code == -10403 {
|
||||
"No"
|
||||
} else {
|
||||
"Failed"
|
||||
}
|
||||
})
|
||||
.unwrap_or("Failed");
|
||||
|
||||
UnlockItem {
|
||||
name: "哔哩哔哩大陆".to_string(),
|
||||
status: status.to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
Err(_) => UnlockItem {
|
||||
name: "哔哩哔哩大陆".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
},
|
||||
Err(_) => UnlockItem {
|
||||
name: "哔哩哔哩大陆".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn check_bilibili_hk_mc_tw(client: &Client) -> UnlockItem {
|
||||
let url = "https://api.bilibili.com/pgc/player/web/playurl?avid=18281381&cid=29892777&qn=0&type=&otype=json&ep_id=183799&fourk=1&fnver=0&fnval=16&module=bangumi";
|
||||
|
||||
match client.get(url).send().await {
|
||||
Ok(response) => match response.json::<Value>().await {
|
||||
Ok(body) => {
|
||||
let status = body
|
||||
.get("code")
|
||||
.and_then(|v| v.as_i64())
|
||||
.map(|code| {
|
||||
if code == 0 {
|
||||
"Yes"
|
||||
} else if code == -10403 {
|
||||
"No"
|
||||
} else {
|
||||
"Failed"
|
||||
}
|
||||
})
|
||||
.unwrap_or("Failed");
|
||||
|
||||
UnlockItem {
|
||||
name: "哔哩哔哩港澳台".to_string(),
|
||||
status: status.to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
Err(_) => UnlockItem {
|
||||
name: "哔哩哔哩港澳台".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
},
|
||||
Err(_) => UnlockItem {
|
||||
name: "哔哩哔哩港澳台".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
94
src-tauri/src/cmd/media_unlock_checker/chatgpt.rs
Normal file
94
src-tauri/src/cmd/media_unlock_checker/chatgpt.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use reqwest::Client;
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
pub(super) async fn check_chatgpt_combined(client: &Client) -> Vec<UnlockItem> {
|
||||
let mut results = Vec::new();
|
||||
|
||||
let url_country = "https://chat.openai.com/cdn-cgi/trace";
|
||||
let result_country = client.get(url_country).send().await;
|
||||
|
||||
let region = match result_country {
|
||||
Ok(response) => {
|
||||
if let Ok(body) = response.text().await {
|
||||
let mut map = HashMap::new();
|
||||
for line in body.lines() {
|
||||
if let Some(index) = line.find('=') {
|
||||
let key = &line[..index];
|
||||
let value = &line[index + 1..];
|
||||
map.insert(key.to_string(), value.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
map.get("loc").map(|loc| {
|
||||
let emoji = country_code_to_emoji(loc);
|
||||
format!("{emoji}{loc}")
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
let url_ios = "https://ios.chat.openai.com/";
|
||||
let result_ios = client.get(url_ios).send().await;
|
||||
|
||||
let ios_status = match result_ios {
|
||||
Ok(response) => {
|
||||
if let Ok(body) = response.text().await {
|
||||
let body_lower = body.to_lowercase();
|
||||
if body_lower.contains("you may be connected to a disallowed isp") {
|
||||
"Disallowed ISP"
|
||||
} else if body_lower.contains("request is not allowed. please try again later.") {
|
||||
"Yes"
|
||||
} else if body_lower.contains("sorry, you have been blocked") {
|
||||
"Blocked"
|
||||
} else {
|
||||
"Failed"
|
||||
}
|
||||
} else {
|
||||
"Failed"
|
||||
}
|
||||
}
|
||||
Err(_) => "Failed",
|
||||
};
|
||||
|
||||
let url_web = "https://api.openai.com/compliance/cookie_requirements";
|
||||
let result_web = client.get(url_web).send().await;
|
||||
|
||||
let web_status = match result_web {
|
||||
Ok(response) => {
|
||||
if let Ok(body) = response.text().await {
|
||||
let body_lower = body.to_lowercase();
|
||||
if body_lower.contains("unsupported_country") {
|
||||
"Unsupported Country/Region"
|
||||
} else {
|
||||
"Yes"
|
||||
}
|
||||
} else {
|
||||
"Failed"
|
||||
}
|
||||
}
|
||||
Err(_) => "Failed",
|
||||
};
|
||||
|
||||
results.push(UnlockItem {
|
||||
name: "ChatGPT iOS".to_string(),
|
||||
status: ios_status.to_string(),
|
||||
region: region.clone(),
|
||||
check_time: Some(get_local_date_string()),
|
||||
});
|
||||
|
||||
results.push(UnlockItem {
|
||||
name: "ChatGPT Web".to_string(),
|
||||
status: web_status.to_string(),
|
||||
region,
|
||||
check_time: Some(get_local_date_string()),
|
||||
});
|
||||
|
||||
results
|
||||
}
|
||||
60
src-tauri/src/cmd/media_unlock_checker/claude.rs
Normal file
60
src-tauri/src/cmd/media_unlock_checker/claude.rs
Normal file
@@ -0,0 +1,60 @@
|
||||
use reqwest::Client;
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
const BLOCKED_CODES: [&str; 10] = ["AF", "BY", "CN", "CU", "HK", "IR", "KP", "MO", "RU", "SY"];
|
||||
|
||||
pub(super) async fn check_claude(client: &Client) -> UnlockItem {
|
||||
let url = "https://claude.ai/cdn-cgi/trace";
|
||||
|
||||
match client.get(url).send().await {
|
||||
Ok(response) => match response.text().await {
|
||||
Ok(body) => {
|
||||
let mut country_code: Option<String> = None;
|
||||
|
||||
for line in body.lines() {
|
||||
if let Some(rest) = line.strip_prefix("loc=") {
|
||||
country_code = Some(rest.trim().to_uppercase());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(code) = country_code {
|
||||
let emoji = country_code_to_emoji(&code);
|
||||
let status = if BLOCKED_CODES.contains(&code.as_str()) {
|
||||
"No"
|
||||
} else {
|
||||
"Yes"
|
||||
};
|
||||
|
||||
UnlockItem {
|
||||
name: "Claude".to_string(),
|
||||
status: status.to_string(),
|
||||
region: Some(format!("{emoji}{code}")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
} else {
|
||||
UnlockItem {
|
||||
name: "Claude".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => UnlockItem {
|
||||
name: "Claude".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
},
|
||||
Err(_) => UnlockItem {
|
||||
name: "Claude".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
490
src-tauri/src/cmd/media_unlock_checker/disney_plus.rs
Normal file
490
src-tauri/src/cmd/media_unlock_checker/disney_plus.rs
Normal file
@@ -0,0 +1,490 @@
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
|
||||
use crate::{logging, utils::logging::Type};
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
pub(super) async fn check_disney_plus(client: &Client) -> UnlockItem {
|
||||
let device_api_url = "https://disney.api.edge.bamgrid.com/devices";
|
||||
let auth_header =
|
||||
"Bearer ZGlzbmV5JmJyb3dzZXImMS4wLjA.Cu56AgSfBTDag5NiRA81oLHkDZfu5L3CKadnefEAY84";
|
||||
|
||||
let device_req_body = serde_json::json!({
|
||||
"deviceFamily": "browser",
|
||||
"applicationRuntime": "chrome",
|
||||
"deviceProfile": "windows",
|
||||
"attributes": {}
|
||||
});
|
||||
|
||||
let device_result = client
|
||||
.post(device_api_url)
|
||||
.header("authorization", auth_header)
|
||||
.header("content-type", "application/json; charset=UTF-8")
|
||||
.json(&device_req_body)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if device_result.is_err() {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let device_response = match device_result {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Disney+ device response: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
if device_response.status().as_u16() == 403 {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "No (IP Banned By Disney+)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let device_body = match device_response.text().await {
|
||||
Ok(body) => body,
|
||||
Err(_) => {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Error: Cannot read response)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let re = match Regex::new(r#""assertion"\s*:\s*"([^"]+)"#) {
|
||||
Ok(re) => re,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile assertion regex for Disney+: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Regex Error)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
let assertion = match re.captures(&device_body) {
|
||||
Some(caps) => caps.get(1).map(|m| m.as_str().to_string()),
|
||||
None => None,
|
||||
};
|
||||
|
||||
if assertion.is_none() {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Error: Cannot extract assertion)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let token_url = "https://disney.api.edge.bamgrid.com/token";
|
||||
let assertion_str = match assertion {
|
||||
Some(assertion) => assertion,
|
||||
None => {
|
||||
logging!(error, Type::Network, "No assertion found for Disney+");
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (No Assertion)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
let token_body = [
|
||||
(
|
||||
"grant_type",
|
||||
"urn:ietf:params:oauth:grant-type:token-exchange",
|
||||
),
|
||||
("latitude", "0"),
|
||||
("longitude", "0"),
|
||||
("platform", "browser"),
|
||||
("subject_token", assertion_str.as_str()),
|
||||
(
|
||||
"subject_token_type",
|
||||
"urn:bamtech:params:oauth:token-type:device",
|
||||
),
|
||||
];
|
||||
|
||||
let token_result = client
|
||||
.post(token_url)
|
||||
.header("authorization", auth_header)
|
||||
.header("content-type", "application/x-www-form-urlencoded")
|
||||
.form(&token_body)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if token_result.is_err() {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let token_response = match token_result {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Disney+ token response: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
let token_status = token_response.status();
|
||||
|
||||
let token_body_text = match token_response.text().await {
|
||||
Ok(body) => body,
|
||||
Err(_) => {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Error: Cannot read token response)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
if token_body_text.contains("forbidden-location") || token_body_text.contains("403 ERROR") {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "No (IP Banned By Disney+)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let token_json: Result<serde_json::Value, _> = serde_json::from_str(&token_body_text);
|
||||
|
||||
let refresh_token = match token_json {
|
||||
Ok(json) => json
|
||||
.get("refresh_token")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string()),
|
||||
Err(_) => match Regex::new(r#""refresh_token"\s*:\s*"([^"]+)"#) {
|
||||
Ok(refresh_token_re) => refresh_token_re
|
||||
.captures(&token_body_text)
|
||||
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string())),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile refresh_token regex for Disney+: {}",
|
||||
e
|
||||
);
|
||||
None
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
if refresh_token.is_none() {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: format!(
|
||||
"Failed (Error: Cannot extract refresh token, status: {}, response: {})",
|
||||
token_status.as_u16(),
|
||||
token_body_text.chars().take(100).collect::<String>() + "..."
|
||||
),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let graphql_url = "https://disney.api.edge.bamgrid.com/graph/v1/device/graphql";
|
||||
|
||||
let graphql_payload = format!(
|
||||
r#"{{"query":"mutation refreshToken($input: RefreshTokenInput!) {{ refreshToken(refreshToken: $input) {{ activeSession {{ sessionId }} }} }}","variables":{{"input":{{"refreshToken":"{}"}}}}}}"#,
|
||||
refresh_token.unwrap_or_default()
|
||||
);
|
||||
|
||||
let graphql_result = client
|
||||
.post(graphql_url)
|
||||
.header("authorization", auth_header)
|
||||
.header("content-type", "application/json")
|
||||
.body(graphql_payload)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if graphql_result.is_err() {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let preview_check = client.get("https://disneyplus.com").send().await;
|
||||
|
||||
let is_unavailable = match preview_check {
|
||||
Ok(response) => {
|
||||
let url = response.url().to_string();
|
||||
url.contains("preview") || url.contains("unavailable")
|
||||
}
|
||||
Err(_) => true,
|
||||
};
|
||||
|
||||
let graphql_response = match graphql_result {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Disney+ GraphQL response: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
let graphql_status = graphql_response.status();
|
||||
let graphql_body_text = match graphql_response.text().await {
|
||||
Ok(text) => text,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to read Disney+ GraphQL response text: {}",
|
||||
e
|
||||
);
|
||||
String::new()
|
||||
}
|
||||
};
|
||||
|
||||
if graphql_body_text.is_empty() || graphql_status.as_u16() >= 400 {
|
||||
let region_from_main = match client.get("https://www.disneyplus.com/").send().await {
|
||||
Ok(response) => match response.text().await {
|
||||
Ok(body) => match Regex::new(r#"region"\s*:\s*"([^"]+)"#) {
|
||||
Ok(region_re) => region_re
|
||||
.captures(&body)
|
||||
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string())),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile Disney+ main page region regex: {}",
|
||||
e
|
||||
);
|
||||
None
|
||||
}
|
||||
},
|
||||
Err(_) => None,
|
||||
},
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
if let Some(region) = region_from_main {
|
||||
let emoji = country_code_to_emoji(®ion);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region: Some(format!("{emoji}{region} (from main page)")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
if graphql_body_text.is_empty() {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: format!(
|
||||
"Failed (GraphQL error: empty response, status: {})",
|
||||
graphql_status.as_u16()
|
||||
),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: format!(
|
||||
"Failed (GraphQL error: {}, status: {})",
|
||||
graphql_body_text.chars().take(50).collect::<String>() + "...",
|
||||
graphql_status.as_u16()
|
||||
),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let region_re = match Regex::new(r#""countryCode"\s*:\s*"([^"]+)"#) {
|
||||
Ok(re) => re,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile Disney+ countryCode regex: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Regex Error)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
let region_code = region_re
|
||||
.captures(&graphql_body_text)
|
||||
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()));
|
||||
|
||||
let supported_re = match Regex::new(r#""inSupportedLocation"\s*:\s*(false|true)"#) {
|
||||
Ok(re) => re,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile Disney+ supported location regex: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Failed (Regex Error)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
let in_supported_location = supported_re
|
||||
.captures(&graphql_body_text)
|
||||
.and_then(|caps| caps.get(1).map(|m| m.as_str() == "true"));
|
||||
|
||||
if region_code.is_none() {
|
||||
let region_from_main = match client.get("https://www.disneyplus.com/").send().await {
|
||||
Ok(response) => match response.text().await {
|
||||
Ok(body) => match Regex::new(r#"region"\s*:\s*"([^"]+)"#) {
|
||||
Ok(region_re) => region_re
|
||||
.captures(&body)
|
||||
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string())),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile Disney+ main page region regex: {}",
|
||||
e
|
||||
);
|
||||
None
|
||||
}
|
||||
},
|
||||
Err(_) => None,
|
||||
},
|
||||
Err(_) => None,
|
||||
};
|
||||
|
||||
if let Some(region) = region_from_main {
|
||||
let emoji = country_code_to_emoji(®ion);
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region: Some(format!("{emoji}{region} (from main page)")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "No".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let region = match region_code {
|
||||
Some(code) => code,
|
||||
None => {
|
||||
logging!(error, Type::Network, "No region code found for Disney+");
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "No".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
if region == "JP" {
|
||||
let emoji = country_code_to_emoji("JP");
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region: Some(format!("{emoji}{region}")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
if is_unavailable {
|
||||
return UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "No".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
match in_supported_location {
|
||||
Some(false) => {
|
||||
let emoji = country_code_to_emoji(®ion);
|
||||
UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Soon".to_string(),
|
||||
region: Some(format!("{emoji}{region}(即将上线)")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
Some(true) => {
|
||||
let emoji = country_code_to_emoji(®ion);
|
||||
UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region: Some(format!("{emoji}{region}")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
None => UnlockItem {
|
||||
name: "Disney+".to_string(),
|
||||
status: format!("Failed (Error: Unknown region status for {region})"),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
66
src-tauri/src/cmd/media_unlock_checker/gemini.rs
Normal file
66
src-tauri/src/cmd/media_unlock_checker/gemini.rs
Normal file
@@ -0,0 +1,66 @@
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
|
||||
use crate::{logging, utils::logging::Type};
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
pub(super) async fn check_gemini(client: &Client) -> UnlockItem {
|
||||
let url = "https://gemini.google.com";
|
||||
|
||||
match client.get(url).send().await {
|
||||
Ok(response) => {
|
||||
if let Ok(body) = response.text().await {
|
||||
let is_ok = body.contains("45631641,null,true");
|
||||
let status = if is_ok { "Yes" } else { "No" };
|
||||
|
||||
let re = match Regex::new(r#",2,1,200,"([A-Z]{3})""#) {
|
||||
Ok(re) => re,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile Gemini regex: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Gemini".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let region = re.captures(&body).and_then(|caps| {
|
||||
caps.get(1).map(|m| {
|
||||
let country_code = m.as_str();
|
||||
let emoji = country_code_to_emoji(country_code);
|
||||
format!("{emoji}{country_code}")
|
||||
})
|
||||
});
|
||||
|
||||
UnlockItem {
|
||||
name: "Gemini".to_string(),
|
||||
status: status.to_string(),
|
||||
region,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
} else {
|
||||
UnlockItem {
|
||||
name: "Gemini".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => UnlockItem {
|
||||
name: "Gemini".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
188
src-tauri/src/cmd/media_unlock_checker/mod.rs
Normal file
188
src-tauri/src/cmd/media_unlock_checker/mod.rs
Normal file
@@ -0,0 +1,188 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use reqwest::Client;
|
||||
use tauri::command;
|
||||
use tokio::{sync::Mutex, task::JoinSet};
|
||||
|
||||
use crate::{logging, utils::logging::Type};
|
||||
|
||||
mod bahamut;
|
||||
mod bilibili;
|
||||
mod chatgpt;
|
||||
mod claude;
|
||||
mod disney_plus;
|
||||
mod gemini;
|
||||
mod netflix;
|
||||
mod prime_video;
|
||||
mod spotify;
|
||||
mod tiktok;
|
||||
mod types;
|
||||
mod utils;
|
||||
mod youtube;
|
||||
|
||||
pub use types::UnlockItem;
|
||||
|
||||
use bahamut::check_bahamut_anime;
|
||||
use bilibili::{check_bilibili_china_mainland, check_bilibili_hk_mc_tw};
|
||||
use chatgpt::check_chatgpt_combined;
|
||||
use claude::check_claude;
|
||||
use disney_plus::check_disney_plus;
|
||||
use gemini::check_gemini;
|
||||
use netflix::check_netflix;
|
||||
use prime_video::check_prime_video;
|
||||
use spotify::check_spotify;
|
||||
use tiktok::check_tiktok;
|
||||
use youtube::check_youtube_premium;
|
||||
|
||||
#[command]
|
||||
pub async fn get_unlock_items() -> Result<Vec<UnlockItem>, String> {
|
||||
Ok(types::default_unlock_items())
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn check_media_unlock() -> Result<Vec<UnlockItem>, String> {
|
||||
let client = match Client::builder()
|
||||
.user_agent("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36")
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.danger_accept_invalid_certs(true)
|
||||
.danger_accept_invalid_hostnames(true)
|
||||
.tcp_keepalive(std::time::Duration::from_secs(60))
|
||||
.connection_verbose(true)
|
||||
.build() {
|
||||
Ok(client) => client,
|
||||
Err(e) => return Err(format!("创建HTTP客户端失败: {e}")),
|
||||
};
|
||||
|
||||
let results = Arc::new(Mutex::new(Vec::new()));
|
||||
let mut tasks = JoinSet::new();
|
||||
let client_arc = Arc::new(client);
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_bilibili_china_mainland(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_bilibili_hk_mc_tw(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let chatgpt_results = check_chatgpt_combined(&client).await;
|
||||
let mut results = results.lock().await;
|
||||
results.extend(chatgpt_results);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_claude(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_gemini(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_youtube_premium(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_bahamut_anime(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_netflix(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_disney_plus(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_spotify(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_tiktok(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
{
|
||||
let client = Arc::clone(&client_arc);
|
||||
let results = Arc::clone(&results);
|
||||
tasks.spawn(async move {
|
||||
let result = check_prime_video(&client).await;
|
||||
results.lock().await.push(result);
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(res) = tasks.join_next().await {
|
||||
if let Err(e) = res {
|
||||
eprintln!("任务执行失败: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
let results = match Arc::try_unwrap(results) {
|
||||
Ok(mutex) => mutex.into_inner(),
|
||||
Err(_) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to unwrap results Arc, references still exist"
|
||||
);
|
||||
return Err("Failed to collect results".to_string());
|
||||
}
|
||||
};
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
220
src-tauri/src/cmd/media_unlock_checker/netflix.rs
Normal file
220
src-tauri/src/cmd/media_unlock_checker/netflix.rs
Normal file
@@ -0,0 +1,220 @@
|
||||
use reqwest::Client;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::{logging, utils::logging::Type};
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
pub(super) async fn check_netflix(client: &Client) -> UnlockItem {
|
||||
let cdn_result = check_netflix_cdn(client).await;
|
||||
if cdn_result.status == "Yes" {
|
||||
return cdn_result;
|
||||
}
|
||||
|
||||
let url1 = "https://www.netflix.com/title/81280792";
|
||||
let url2 = "https://www.netflix.com/title/70143836";
|
||||
|
||||
let result1 = client
|
||||
.get(url1)
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if let Err(e) = &result1 {
|
||||
eprintln!("Netflix请求错误: {e}");
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let result2 = client
|
||||
.get(url2)
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.send()
|
||||
.await;
|
||||
|
||||
if let Err(e) = &result2 {
|
||||
eprintln!("Netflix请求错误: {e}");
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let status1 = match result1 {
|
||||
Ok(response) => response.status().as_u16(),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Netflix response 1: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
let status2 = match result2 {
|
||||
Ok(response) => response.status().as_u16(),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Netflix response 2: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
if status1 == 404 && status2 == 404 {
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Originals Only".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
if status1 == 403 || status2 == 403 {
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "No".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
if status1 == 200 || status1 == 301 || status2 == 200 || status2 == 301 {
|
||||
let test_url = "https://www.netflix.com/title/80018499";
|
||||
match client
|
||||
.get(test_url)
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) => {
|
||||
if let Some(location) = response.headers().get("location")
|
||||
&& let Ok(location_str) = location.to_str()
|
||||
{
|
||||
let parts: Vec<&str> = location_str.split('/').collect();
|
||||
if parts.len() >= 4 {
|
||||
let region_code = parts[3].split('-').next().unwrap_or("unknown");
|
||||
let emoji = country_code_to_emoji(region_code);
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region: Some(format!("{emoji}{region_code}")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let emoji = country_code_to_emoji("us");
|
||||
UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region: Some(format!("{emoji}{}", "us")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("获取Netflix区域信息失败: {e}");
|
||||
UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Yes (但无法获取区域)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: format!("Failed (状态码: {status1}_{status2}"),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn check_netflix_cdn(client: &Client) -> UnlockItem {
|
||||
let url = "https://api.fast.com/netflix/speedtest/v2?https=true&token=YXNkZmFzZGxmbnNkYWZoYXNkZmhrYWxm&urlCount=5";
|
||||
|
||||
match client
|
||||
.get(url)
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) => {
|
||||
if response.status().as_u16() == 403 {
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "No (IP Banned By Netflix)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
match response.json::<Value>().await {
|
||||
Ok(data) => {
|
||||
if let Some(targets) = data.get("targets").and_then(|t| t.as_array())
|
||||
&& !targets.is_empty()
|
||||
&& let Some(location) = targets[0].get("location")
|
||||
&& let Some(country) = location.get("country").and_then(|c| c.as_str())
|
||||
{
|
||||
let emoji = country_code_to_emoji(country);
|
||||
return UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region: Some(format!("{emoji}{country}")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Unknown".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("解析Fast.com API响应失败: {e}");
|
||||
UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Failed (解析错误)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Fast.com API请求失败: {e}");
|
||||
UnlockItem {
|
||||
name: "Netflix".to_string(),
|
||||
status: "Failed (CDN API)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
108
src-tauri/src/cmd/media_unlock_checker/prime_video.rs
Normal file
108
src-tauri/src/cmd/media_unlock_checker/prime_video.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
|
||||
use crate::{logging, utils::logging::Type};
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
pub(super) async fn check_prime_video(client: &Client) -> UnlockItem {
|
||||
let url = "https://www.primevideo.com";
|
||||
|
||||
let result = client.get(url).send().await;
|
||||
|
||||
if result.is_err() {
|
||||
return UnlockItem {
|
||||
name: "Prime Video".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
let response = match result {
|
||||
Ok(response) => response,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to get Prime Video response: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Prime Video".to_string(),
|
||||
status: "Failed (Network Connection)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
match response.text().await {
|
||||
Ok(body) => {
|
||||
let is_blocked = body.contains("isServiceRestricted");
|
||||
|
||||
let region_re = match Regex::new(r#""currentTerritory":"([^"]+)""#) {
|
||||
Ok(re) => re,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile Prime Video region regex: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Prime Video".to_string(),
|
||||
status: "Failed (Regex Error)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
let region_code = region_re
|
||||
.captures(&body)
|
||||
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()));
|
||||
|
||||
if is_blocked {
|
||||
return UnlockItem {
|
||||
name: "Prime Video".to_string(),
|
||||
status: "No (Service Not Available)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(region) = region_code {
|
||||
let emoji = country_code_to_emoji(®ion);
|
||||
return UnlockItem {
|
||||
name: "Prime Video".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region: Some(format!("{emoji}{region}")),
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
if !is_blocked {
|
||||
return UnlockItem {
|
||||
name: "Prime Video".to_string(),
|
||||
status: "Failed (Error: PAGE ERROR)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
UnlockItem {
|
||||
name: "Prime Video".to_string(),
|
||||
status: "Failed (Error: Unknown Region)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
Err(_) => UnlockItem {
|
||||
name: "Prime Video".to_string(),
|
||||
status: "Failed (Error: Cannot read response)".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
79
src-tauri/src/cmd/media_unlock_checker/spotify.rs
Normal file
79
src-tauri/src/cmd/media_unlock_checker/spotify.rs
Normal file
@@ -0,0 +1,79 @@
|
||||
use reqwest::{Client, Url};
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
pub(super) async fn check_spotify(client: &Client) -> UnlockItem {
|
||||
let url = "https://www.spotify.com/api/content/v1/country-selector?platform=web&format=json";
|
||||
|
||||
match client.get(url).send().await {
|
||||
Ok(response) => {
|
||||
let final_url = response.url().clone();
|
||||
let status_code = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
|
||||
let region = extract_region(&final_url).or_else(|| extract_region_from_body(&body));
|
||||
let status = determine_status(status_code.as_u16(), &body);
|
||||
|
||||
UnlockItem {
|
||||
name: "Spotify".to_string(),
|
||||
status: status.to_string(),
|
||||
region,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
Err(_) => UnlockItem {
|
||||
name: "Spotify".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn determine_status(status: u16, body: &str) -> &'static str {
|
||||
if status == 403 || status == 451 {
|
||||
return "No";
|
||||
}
|
||||
|
||||
if !(200..300).contains(&status) {
|
||||
return "Failed";
|
||||
}
|
||||
|
||||
let body_lower = body.to_lowercase();
|
||||
if body_lower.contains("not available in your country") {
|
||||
return "No";
|
||||
}
|
||||
|
||||
"Yes"
|
||||
}
|
||||
|
||||
fn extract_region(url: &Url) -> Option<String> {
|
||||
let mut segments = url.path_segments()?;
|
||||
let first_segment = segments.next()?;
|
||||
|
||||
if first_segment.is_empty() || first_segment == "api" {
|
||||
return None;
|
||||
}
|
||||
|
||||
let country_code = first_segment.split('-').next().unwrap_or(first_segment);
|
||||
let upper = country_code.to_uppercase();
|
||||
let emoji = country_code_to_emoji(&upper);
|
||||
Some(format!("{emoji}{upper}"))
|
||||
}
|
||||
|
||||
fn extract_region_from_body(body: &str) -> Option<String> {
|
||||
let marker = "\"countryCode\":\"";
|
||||
if let Some(idx) = body.find(marker) {
|
||||
let start = idx + marker.len();
|
||||
let rest = &body[start..];
|
||||
if let Some(end) = rest.find('"') {
|
||||
let code = rest[..end].to_uppercase();
|
||||
if !code.is_empty() {
|
||||
let emoji = country_code_to_emoji(&code);
|
||||
return Some(format!("{emoji}{code}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
87
src-tauri/src/cmd/media_unlock_checker/tiktok.rs
Normal file
87
src-tauri/src/cmd/media_unlock_checker/tiktok.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
pub(super) async fn check_tiktok(client: &Client) -> UnlockItem {
|
||||
let trace_url = "https://www.tiktok.com/cdn-cgi/trace";
|
||||
|
||||
let mut status = String::from("Failed");
|
||||
let mut region = None;
|
||||
|
||||
if let Ok(response) = client.get(trace_url).send().await {
|
||||
let status_code = response.status().as_u16();
|
||||
if let Ok(body) = response.text().await {
|
||||
status = determine_status(status_code, &body).to_string();
|
||||
region = extract_region_from_body(&body);
|
||||
}
|
||||
}
|
||||
|
||||
if (region.is_none() || status == "Failed")
|
||||
&& let Ok(response) = client.get("https://www.tiktok.com/").send().await
|
||||
{
|
||||
let status_code = response.status().as_u16();
|
||||
if let Ok(body) = response.text().await {
|
||||
let fallback_status = determine_status(status_code, &body);
|
||||
let fallback_region = extract_region_from_body(&body);
|
||||
|
||||
if status != "No" {
|
||||
status = fallback_status.to_string();
|
||||
}
|
||||
|
||||
if region.is_none() {
|
||||
region = fallback_region;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
UnlockItem {
|
||||
name: "TikTok".to_string(),
|
||||
status,
|
||||
region,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
|
||||
fn determine_status(status: u16, body: &str) -> &'static str {
|
||||
if status == 403 || status == 451 {
|
||||
return "No";
|
||||
}
|
||||
|
||||
if !(200..300).contains(&status) {
|
||||
return "Failed";
|
||||
}
|
||||
|
||||
let body_lower = body.to_lowercase();
|
||||
if body_lower.contains("access denied")
|
||||
|| body_lower.contains("not available in your region")
|
||||
|| body_lower.contains("tiktok is not available")
|
||||
{
|
||||
return "No";
|
||||
}
|
||||
|
||||
"Yes"
|
||||
}
|
||||
|
||||
fn extract_region_from_body(body: &str) -> Option<String> {
|
||||
static REGION_REGEX: OnceLock<Option<Regex>> = OnceLock::new();
|
||||
let regex = REGION_REGEX
|
||||
.get_or_init(|| Regex::new(r#""region"\s*:\s*"([a-zA-Z-]+)""#).ok())
|
||||
.as_ref()?;
|
||||
|
||||
if let Some(caps) = regex.captures(body)
|
||||
&& let Some(matched) = caps.get(1)
|
||||
{
|
||||
let raw = matched.as_str();
|
||||
let country_code = raw.split('-').next().unwrap_or(raw).to_uppercase();
|
||||
if !country_code.is_empty() {
|
||||
let emoji = country_code_to_emoji(&country_code);
|
||||
return Some(format!("{emoji}{country_code}"));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
43
src-tauri/src/cmd/media_unlock_checker/types.rs
Normal file
43
src-tauri/src/cmd/media_unlock_checker/types.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct UnlockItem {
|
||||
pub name: String,
|
||||
pub status: String,
|
||||
pub region: Option<String>,
|
||||
pub check_time: Option<String>,
|
||||
}
|
||||
|
||||
impl UnlockItem {
|
||||
pub fn pending(name: &str) -> Self {
|
||||
Self {
|
||||
name: name.to_string(),
|
||||
status: "Pending".to_string(),
|
||||
region: None,
|
||||
check_time: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_UNLOCK_ITEM_NAMES: [&str; 13] = [
|
||||
"哔哩哔哩大陆",
|
||||
"哔哩哔哩港澳台",
|
||||
"ChatGPT iOS",
|
||||
"ChatGPT Web",
|
||||
"Claude",
|
||||
"Gemini",
|
||||
"Youtube Premium",
|
||||
"Bahamut Anime",
|
||||
"Netflix",
|
||||
"Disney+",
|
||||
"Prime Video",
|
||||
"Spotify",
|
||||
"TikTok",
|
||||
];
|
||||
|
||||
pub fn default_unlock_items() -> Vec<UnlockItem> {
|
||||
DEFAULT_UNLOCK_ITEM_NAMES
|
||||
.iter()
|
||||
.map(|name| UnlockItem::pending(name))
|
||||
.collect()
|
||||
}
|
||||
21
src-tauri/src/cmd/media_unlock_checker/utils.rs
Normal file
21
src-tauri/src/cmd/media_unlock_checker/utils.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
use chrono::Local;
|
||||
|
||||
pub fn get_local_date_string() -> String {
|
||||
let now = Local::now();
|
||||
now.format("%Y-%m-%d %H:%M:%S").to_string()
|
||||
}
|
||||
|
||||
pub fn country_code_to_emoji(country_code: &str) -> String {
|
||||
let country_code = country_code.to_uppercase();
|
||||
if country_code.len() < 2 {
|
||||
return String::new();
|
||||
}
|
||||
|
||||
let bytes = country_code.as_bytes();
|
||||
let c1 = 0x1F1E6 + (bytes[0] as u32) - ('A' as u32);
|
||||
let c2 = 0x1F1E6 + (bytes[1] as u32) - ('A' as u32);
|
||||
|
||||
char::from_u32(c1)
|
||||
.and_then(|c1| char::from_u32(c2).map(|c2| format!("{c1}{c2}")))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
82
src-tauri/src/cmd/media_unlock_checker/youtube.rs
Normal file
82
src-tauri/src/cmd/media_unlock_checker/youtube.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use regex::Regex;
|
||||
use reqwest::Client;
|
||||
|
||||
use crate::{logging, utils::logging::Type};
|
||||
|
||||
use super::UnlockItem;
|
||||
use super::utils::{country_code_to_emoji, get_local_date_string};
|
||||
|
||||
pub(super) async fn check_youtube_premium(client: &Client) -> UnlockItem {
|
||||
let url = "https://www.youtube.com/premium";
|
||||
|
||||
match client.get(url).send().await {
|
||||
Ok(response) => {
|
||||
if let Ok(body) = response.text().await {
|
||||
let body_lower = body.to_lowercase();
|
||||
|
||||
if body_lower.contains("youtube premium is not available in your country") {
|
||||
return UnlockItem {
|
||||
name: "Youtube Premium".to_string(),
|
||||
status: "No".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
if body_lower.contains("ad-free") {
|
||||
let re = match Regex::new(r#"id="country-code"[^>]*>([^<]+)<"#) {
|
||||
Ok(re) => re,
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Network,
|
||||
"Failed to compile YouTube Premium regex: {}",
|
||||
e
|
||||
);
|
||||
return UnlockItem {
|
||||
name: "Youtube Premium".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
};
|
||||
let region = re.captures(&body).and_then(|caps| {
|
||||
caps.get(1).map(|m| {
|
||||
let country_code = m.as_str().trim();
|
||||
let emoji = country_code_to_emoji(country_code);
|
||||
format!("{emoji}{country_code}")
|
||||
})
|
||||
});
|
||||
|
||||
return UnlockItem {
|
||||
name: "Youtube Premium".to_string(),
|
||||
status: "Yes".to_string(),
|
||||
region,
|
||||
check_time: Some(get_local_date_string()),
|
||||
};
|
||||
}
|
||||
|
||||
UnlockItem {
|
||||
name: "Youtube Premium".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
} else {
|
||||
UnlockItem {
|
||||
name: "Youtube Premium".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => UnlockItem {
|
||||
name: "Youtube Premium".to_string(),
|
||||
status: "Failed".to_string(),
|
||||
region: None,
|
||||
check_time: Some(get_local_date_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,11 @@
|
||||
use anyhow::Result;
|
||||
use smartstring::alias::String;
|
||||
|
||||
// Common result type used by command functions
|
||||
pub type CmdResult<T = ()> = Result<T, String>;
|
||||
|
||||
// Command modules
|
||||
pub mod app;
|
||||
pub mod backup;
|
||||
pub mod clash;
|
||||
pub mod lightweight;
|
||||
pub mod media_unlock_checker;
|
||||
@@ -22,6 +23,7 @@ pub mod webdav;
|
||||
|
||||
// Re-export all command functions for backwards compatibility
|
||||
pub use app::*;
|
||||
pub use backup::*;
|
||||
pub use clash::*;
|
||||
pub use lightweight::*;
|
||||
pub use media_unlock_checker::*;
|
||||
@@ -36,3 +38,27 @@ pub use uwp::*;
|
||||
pub use validate::*;
|
||||
pub use verge::*;
|
||||
pub use webdav::*;
|
||||
|
||||
pub trait StringifyErr<T> {
|
||||
fn stringify_err(self) -> CmdResult<T>;
|
||||
fn stringify_err_log<F>(self, log_fn: F) -> CmdResult<T>
|
||||
where
|
||||
F: Fn(&str);
|
||||
}
|
||||
|
||||
impl<T, E: std::fmt::Display> StringifyErr<T> for Result<T, E> {
|
||||
fn stringify_err(self) -> CmdResult<T> {
|
||||
self.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
fn stringify_err_log<F>(self, log_fn: F) -> CmdResult<T>
|
||||
where
|
||||
F: Fn(&str),
|
||||
{
|
||||
self.map_err(|e| {
|
||||
let msg = String::from(e.to_string());
|
||||
log_fn(&msg);
|
||||
msg
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
use super::CmdResult;
|
||||
use crate::core::{async_proxy_query::AsyncProxyQuery, EventDrivenProxyManager};
|
||||
use crate::wrap_err;
|
||||
use crate::cmd::StringifyErr;
|
||||
use crate::core::{EventDrivenProxyManager, async_proxy_query::AsyncProxyQuery};
|
||||
use crate::process::AsyncHandler;
|
||||
use crate::{logging, utils::logging::Type};
|
||||
use network_interface::NetworkInterface;
|
||||
use serde_yaml::Mapping;
|
||||
use serde_yaml_ng::Mapping;
|
||||
|
||||
/// get the system proxy
|
||||
#[tauri::command]
|
||||
pub async fn get_sys_proxy() -> CmdResult<Mapping> {
|
||||
log::debug!(target: "app", "异步获取系统代理配置");
|
||||
logging!(debug, Type::Network, "异步获取系统代理配置");
|
||||
|
||||
let current = AsyncProxyQuery::get_system_proxy().await;
|
||||
|
||||
@@ -19,20 +21,27 @@ pub async fn get_sys_proxy() -> CmdResult<Mapping> {
|
||||
);
|
||||
map.insert("bypass".into(), current.bypass.into());
|
||||
|
||||
log::debug!(target: "app", "返回系统代理配置: enable={}, {}:{}", current.enable, current.host, current.port);
|
||||
logging!(
|
||||
debug,
|
||||
Type::Network,
|
||||
"返回系统代理配置: enable={}, {}:{}",
|
||||
current.enable,
|
||||
current.host,
|
||||
current.port
|
||||
);
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
/// 获取自动代理配置
|
||||
#[tauri::command]
|
||||
pub async fn get_auto_proxy() -> CmdResult<Mapping> {
|
||||
log::debug!(target: "app", "开始获取自动代理配置(事件驱动)");
|
||||
logging!(debug, Type::Network, "开始获取自动代理配置(事件驱动)");
|
||||
|
||||
let proxy_manager = EventDrivenProxyManager::global();
|
||||
|
||||
let current = proxy_manager.get_auto_proxy_cached();
|
||||
let current = proxy_manager.get_auto_proxy_cached().await;
|
||||
// 异步请求更新,立即返回缓存数据
|
||||
tokio::spawn(async move {
|
||||
AsyncHandler::spawn(move || async move {
|
||||
let _ = proxy_manager.get_auto_proxy_async().await;
|
||||
});
|
||||
|
||||
@@ -40,7 +49,13 @@ pub async fn get_auto_proxy() -> CmdResult<Mapping> {
|
||||
map.insert("enable".into(), current.enable.into());
|
||||
map.insert("url".into(), current.url.clone().into());
|
||||
|
||||
log::debug!(target: "app", "返回自动代理配置(缓存): enable={}, url={}", current.enable, current.url);
|
||||
logging!(
|
||||
debug,
|
||||
Type::Network,
|
||||
"返回自动代理配置(缓存): enable={}, url={}",
|
||||
current.enable,
|
||||
current.url
|
||||
);
|
||||
Ok(map)
|
||||
}
|
||||
|
||||
@@ -81,7 +96,7 @@ pub fn get_network_interfaces_info() -> CmdResult<Vec<NetworkInterface>> {
|
||||
use network_interface::{NetworkInterface, NetworkInterfaceConfig};
|
||||
|
||||
let names = get_network_interfaces();
|
||||
let interfaces = wrap_err!(NetworkInterface::show())?;
|
||||
let interfaces = NetworkInterface::show().stringify_err()?;
|
||||
|
||||
let mut result = Vec::new();
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,99 +1,20 @@
|
||||
use super::CmdResult;
|
||||
use crate::{core::handle, module::mihomo::MihomoManager, state::proxy::CmdProxyState};
|
||||
use std::{
|
||||
sync::Mutex,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use tauri::Manager;
|
||||
|
||||
const PROVIDERS_REFRESH_INTERVAL: Duration = Duration::from_secs(3);
|
||||
const PROXIES_REFRESH_INTERVAL: Duration = Duration::from_secs(1);
|
||||
use crate::{logging, utils::logging::Type};
|
||||
|
||||
// TODO: 前端通过 emit 发送更新事件, tray 监听更新事件
|
||||
/// 同步托盘和GUI的代理选择状态
|
||||
#[tauri::command]
|
||||
pub async fn get_proxies() -> CmdResult<serde_json::Value> {
|
||||
let manager = MihomoManager::global();
|
||||
pub async fn sync_tray_proxy_selection() -> CmdResult<()> {
|
||||
use crate::core::tray::Tray;
|
||||
|
||||
let app_handle = handle::Handle::global().app_handle().unwrap();
|
||||
let cmd_proxy_state = app_handle.state::<Mutex<CmdProxyState>>();
|
||||
|
||||
let should_refresh = {
|
||||
let mut state = cmd_proxy_state.lock().unwrap();
|
||||
let now = Instant::now();
|
||||
if now.duration_since(state.last_refresh_time) > PROXIES_REFRESH_INTERVAL {
|
||||
state.need_refresh = true;
|
||||
state.last_refresh_time = now;
|
||||
match Tray::global().update_menu().await {
|
||||
Ok(_) => {
|
||||
logging!(info, Type::Cmd, "Tray proxy selection synced successfully");
|
||||
Ok(())
|
||||
}
|
||||
state.need_refresh
|
||||
};
|
||||
|
||||
if should_refresh {
|
||||
let proxies = manager.get_refresh_proxies().await?;
|
||||
{
|
||||
let mut state = cmd_proxy_state.lock().unwrap();
|
||||
state.proxies = Box::new(proxies);
|
||||
state.need_refresh = false;
|
||||
Err(e) => {
|
||||
logging!(error, Type::Cmd, "Failed to sync tray proxy selection: {e}");
|
||||
Err(e.to_string().into())
|
||||
}
|
||||
log::debug!(target: "app", "proxies刷新成功");
|
||||
}
|
||||
|
||||
let proxies = {
|
||||
let state = cmd_proxy_state.lock().unwrap();
|
||||
state.proxies.clone()
|
||||
};
|
||||
Ok(*proxies)
|
||||
}
|
||||
|
||||
/// 强制刷新代理缓存用于profile切换
|
||||
#[tauri::command]
|
||||
pub async fn force_refresh_proxies() -> CmdResult<serde_json::Value> {
|
||||
let manager = MihomoManager::global();
|
||||
let app_handle = handle::Handle::global().app_handle().unwrap();
|
||||
let cmd_proxy_state = app_handle.state::<Mutex<CmdProxyState>>();
|
||||
|
||||
log::debug!(target: "app", "强制刷新代理缓存");
|
||||
|
||||
let proxies = manager.get_refresh_proxies().await?;
|
||||
|
||||
{
|
||||
let mut state = cmd_proxy_state.lock().unwrap();
|
||||
state.proxies = Box::new(proxies.clone());
|
||||
state.need_refresh = false;
|
||||
state.last_refresh_time = Instant::now();
|
||||
}
|
||||
|
||||
log::debug!(target: "app", "强制刷新代理缓存完成");
|
||||
Ok(proxies)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_providers_proxies() -> CmdResult<serde_json::Value> {
|
||||
let app_handle = handle::Handle::global().app_handle().unwrap();
|
||||
let cmd_proxy_state = app_handle.state::<Mutex<CmdProxyState>>();
|
||||
|
||||
let should_refresh = {
|
||||
let mut state = cmd_proxy_state.lock().unwrap();
|
||||
let now = Instant::now();
|
||||
if now.duration_since(state.last_refresh_time) > PROVIDERS_REFRESH_INTERVAL {
|
||||
state.need_refresh = true;
|
||||
state.last_refresh_time = now;
|
||||
}
|
||||
state.need_refresh
|
||||
};
|
||||
|
||||
if should_refresh {
|
||||
let manager = MihomoManager::global();
|
||||
let providers = manager.get_providers_proxies().await?;
|
||||
{
|
||||
let mut state = cmd_proxy_state.lock().unwrap();
|
||||
state.providers_proxies = Box::new(providers);
|
||||
state.need_refresh = false;
|
||||
}
|
||||
log::debug!(target: "app", "providers_proxies刷新成功");
|
||||
}
|
||||
|
||||
let providers_proxies = {
|
||||
let state = cmd_proxy_state.lock().unwrap();
|
||||
state.providers_proxies.clone()
|
||||
};
|
||||
Ok(*providers_proxies)
|
||||
}
|
||||
|
||||
@@ -1,36 +1,114 @@
|
||||
use super::CmdResult;
|
||||
use crate::{config::*, wrap_err};
|
||||
use anyhow::Context;
|
||||
use serde_yaml::Mapping;
|
||||
use crate::{cmd::StringifyErr, config::*, core::CoreManager, log_err};
|
||||
use anyhow::{Context, anyhow};
|
||||
use serde_yaml_ng::Mapping;
|
||||
use smartstring::alias::String;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// 获取运行时配置
|
||||
#[tauri::command]
|
||||
pub fn get_runtime_config() -> CmdResult<Option<Mapping>> {
|
||||
Ok(Config::runtime().latest().config.clone())
|
||||
pub async fn get_runtime_config() -> CmdResult<Option<Mapping>> {
|
||||
Ok(Config::runtime().await.latest_ref().config.clone())
|
||||
}
|
||||
|
||||
/// 获取运行时YAML配置
|
||||
#[tauri::command]
|
||||
pub fn get_runtime_yaml() -> CmdResult<String> {
|
||||
let runtime = Config::runtime();
|
||||
let runtime = runtime.latest();
|
||||
pub async fn get_runtime_yaml() -> CmdResult<String> {
|
||||
let runtime = Config::runtime().await;
|
||||
let runtime = runtime.latest_ref();
|
||||
|
||||
let config = runtime.config.as_ref();
|
||||
wrap_err!(config
|
||||
.ok_or(anyhow::anyhow!("failed to parse config to yaml file"))
|
||||
.and_then(
|
||||
|config| serde_yaml::to_string(config).context("failed to convert config to yaml")
|
||||
))
|
||||
config
|
||||
.ok_or_else(|| anyhow!("failed to parse config to yaml file"))
|
||||
.and_then(|config| {
|
||||
serde_yaml_ng::to_string(config)
|
||||
.context("failed to convert config to yaml")
|
||||
.map(|s| s.into())
|
||||
})
|
||||
.stringify_err()
|
||||
}
|
||||
|
||||
/// 获取运行时存在的键
|
||||
#[tauri::command]
|
||||
pub fn get_runtime_exists() -> CmdResult<Vec<String>> {
|
||||
Ok(Config::runtime().latest().exists_keys.clone())
|
||||
pub async fn get_runtime_exists() -> CmdResult<Vec<String>> {
|
||||
Ok(Config::runtime().await.latest_ref().exists_keys.clone())
|
||||
}
|
||||
|
||||
/// 获取运行时日志
|
||||
#[tauri::command]
|
||||
pub fn get_runtime_logs() -> CmdResult<HashMap<String, Vec<(String, String)>>> {
|
||||
Ok(Config::runtime().latest().chain_logs.clone())
|
||||
pub async fn get_runtime_logs() -> CmdResult<HashMap<String, Vec<(String, String)>>> {
|
||||
Ok(Config::runtime().await.latest_ref().chain_logs.clone())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_runtime_proxy_chain_config(proxy_chain_exit_node: String) -> CmdResult<String> {
|
||||
let runtime = Config::runtime().await;
|
||||
let runtime = runtime.latest_ref();
|
||||
|
||||
let config = runtime
|
||||
.config
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("failed to parse config to yaml file"))
|
||||
.stringify_err()?;
|
||||
|
||||
if let Some(serde_yaml_ng::Value::Sequence(proxies)) = config.get("proxies") {
|
||||
let mut proxy_name = Some(Some(proxy_chain_exit_node.as_str()));
|
||||
let mut proxies_chain = Vec::new();
|
||||
|
||||
while let Some(proxy) = proxies.iter().find(|proxy| {
|
||||
if let serde_yaml_ng::Value::Mapping(proxy_map) = proxy {
|
||||
proxy_map.get("name").map(|x| x.as_str()) == proxy_name
|
||||
&& proxy_map.get("dialer-proxy").is_some()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}) {
|
||||
proxies_chain.push(proxy.to_owned());
|
||||
proxy_name = proxy.get("dialer-proxy").map(|x| x.as_str());
|
||||
}
|
||||
|
||||
if let Some(entry_proxy) = proxies
|
||||
.iter()
|
||||
.find(|proxy| proxy.get("name").map(|x| x.as_str()) == proxy_name)
|
||||
&& !proxies_chain.is_empty()
|
||||
{
|
||||
// 添加第一个节点
|
||||
proxies_chain.push(entry_proxy.to_owned());
|
||||
}
|
||||
|
||||
proxies_chain.reverse();
|
||||
|
||||
let mut config: HashMap<String, Vec<serde_yaml_ng::Value>> = HashMap::new();
|
||||
|
||||
config.insert("proxies".into(), proxies_chain);
|
||||
|
||||
serde_yaml_ng::to_string(&config)
|
||||
.context("YAML generation failed")
|
||||
.map(|s| s.into())
|
||||
.stringify_err()
|
||||
} else {
|
||||
Err("failed to get proxies or proxy-groups".into())
|
||||
}
|
||||
}
|
||||
|
||||
/// 更新运行时链式代理配置
|
||||
#[tauri::command]
|
||||
pub async fn update_proxy_chain_config_in_runtime(
|
||||
proxy_chain_config: Option<serde_yaml_ng::Value>,
|
||||
) -> CmdResult<()> {
|
||||
{
|
||||
let runtime = Config::runtime().await;
|
||||
let mut draft = runtime.draft_mut();
|
||||
draft.update_proxy_chain_config(proxy_chain_config);
|
||||
drop(draft);
|
||||
runtime.apply();
|
||||
}
|
||||
|
||||
// 生成新的运行配置文件并通知 Clash 核心重新加载
|
||||
let run_path = Config::generate_file(ConfigType::Run)
|
||||
.await
|
||||
.stringify_err()?;
|
||||
log_err!(CoreManager::global().put_configs_force(run_path).await);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,165 +1,171 @@
|
||||
use super::CmdResult;
|
||||
use crate::{
|
||||
cmd::StringifyErr,
|
||||
config::*,
|
||||
core::*,
|
||||
core::{validate::CoreConfigValidator, *},
|
||||
logging,
|
||||
utils::{dirs, logging::Type},
|
||||
wrap_err,
|
||||
};
|
||||
use std::fs;
|
||||
use smartstring::alias::String;
|
||||
use tokio::fs;
|
||||
|
||||
/// 保存profiles的配置
|
||||
#[tauri::command]
|
||||
pub async fn save_profile_file(index: String, file_data: Option<String>) -> CmdResult {
|
||||
if file_data.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// 在异步操作前完成所有文件操作
|
||||
let (file_path, original_content, is_merge_file) = {
|
||||
let profiles = Config::profiles();
|
||||
let profiles_guard = profiles.latest();
|
||||
let item = wrap_err!(profiles_guard.get_item(&index))?;
|
||||
// 确定是否为merge类型文件
|
||||
let is_merge = item.itype.as_ref().is_some_and(|t| t == "merge");
|
||||
let content = wrap_err!(item.read_file())?;
|
||||
let path = item.file.clone().ok_or("file field is null")?;
|
||||
let profiles_dir = wrap_err!(dirs::app_profiles_dir())?;
|
||||
(profiles_dir.join(path), content, is_merge)
|
||||
let file_data = match file_data {
|
||||
Some(d) => d,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
// 保存新的配置文件
|
||||
wrap_err!(fs::write(&file_path, file_data.clone().unwrap()))?;
|
||||
// 在异步操作前获取必要元数据并释放锁
|
||||
let (rel_path, is_merge_file) = {
|
||||
let profiles = Config::profiles().await;
|
||||
let profiles_guard = profiles.latest_ref();
|
||||
let item = profiles_guard.get_item(&index).stringify_err()?;
|
||||
let is_merge = item.itype.as_ref().is_some_and(|t| t == "merge");
|
||||
let path = item.file.clone().ok_or("file field is null")?;
|
||||
(path, is_merge)
|
||||
};
|
||||
|
||||
// 读取原始内容(在释放profiles_guard后进行)
|
||||
let original_content = PrfItem {
|
||||
file: Some(rel_path.clone()),
|
||||
..Default::default()
|
||||
}
|
||||
.read_file()
|
||||
.await
|
||||
.stringify_err()?;
|
||||
|
||||
let profiles_dir = dirs::app_profiles_dir().stringify_err()?;
|
||||
let file_path = profiles_dir.join(rel_path.as_str());
|
||||
let file_path_str = file_path.to_string_lossy().to_string();
|
||||
|
||||
// 保存新的配置文件
|
||||
fs::write(&file_path, &file_data).await.stringify_err()?;
|
||||
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
true,
|
||||
"[cmd配置save] 开始验证配置文件: {}, 是否为merge文件: {}",
|
||||
file_path_str,
|
||||
is_merge_file
|
||||
);
|
||||
|
||||
// 对于 merge 文件,只进行语法验证,不进行后续内核验证
|
||||
if is_merge_file {
|
||||
return handle_merge_file(&file_path_str, &file_path, &original_content).await;
|
||||
}
|
||||
|
||||
handle_full_validation(&file_path_str, &file_path, &original_content).await
|
||||
}
|
||||
|
||||
async fn restore_original(
|
||||
file_path: &std::path::Path,
|
||||
original_content: &str,
|
||||
) -> Result<(), String> {
|
||||
fs::write(file_path, original_content).await.stringify_err()
|
||||
}
|
||||
|
||||
fn is_script_error(err: &str, file_path_str: &str) -> bool {
|
||||
file_path_str.ends_with(".js")
|
||||
|| err.contains("Script syntax error")
|
||||
|| err.contains("Script must contain a main function")
|
||||
|| err.contains("Failed to read script file")
|
||||
}
|
||||
|
||||
async fn handle_merge_file(
|
||||
file_path_str: &str,
|
||||
file_path: &std::path::Path,
|
||||
original_content: &str,
|
||||
) -> CmdResult {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
true,
|
||||
"[cmd配置save] 检测到merge文件,只进行语法验证"
|
||||
);
|
||||
match CoreManager::global()
|
||||
.validate_config_file(&file_path_str, Some(true))
|
||||
.await
|
||||
{
|
||||
|
||||
match CoreConfigValidator::validate_config_file(file_path_str, Some(true)).await {
|
||||
Ok((true, _)) => {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
true,
|
||||
"[cmd配置save] merge文件语法验证通过"
|
||||
);
|
||||
// 成功后尝试更新整体配置
|
||||
logging!(info, Type::Config, "[cmd配置save] merge文件语法验证通过");
|
||||
if let Err(e) = CoreManager::global().update_config().await {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
true,
|
||||
"[cmd配置save] 更新整体配置时发生错误: {}",
|
||||
e
|
||||
);
|
||||
} else {
|
||||
handle::Handle::refresh_clash();
|
||||
}
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
Ok((false, error_msg)) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
true,
|
||||
"[cmd配置save] merge文件语法验证失败: {}",
|
||||
error_msg
|
||||
);
|
||||
// 恢复原始配置文件
|
||||
wrap_err!(fs::write(&file_path, original_content))?;
|
||||
// 发送合并文件专用错误通知
|
||||
restore_original(file_path, original_content).await?;
|
||||
let result = (false, error_msg.clone());
|
||||
crate::cmd::validate::handle_yaml_validation_notice(&result, "合并配置文件");
|
||||
return Ok(());
|
||||
}
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Config,
|
||||
true,
|
||||
"[cmd配置save] 验证过程发生错误: {}",
|
||||
e
|
||||
);
|
||||
// 恢复原始配置文件
|
||||
wrap_err!(fs::write(&file_path, original_content))?;
|
||||
return Err(e.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 非merge文件使用完整验证流程
|
||||
match CoreManager::global()
|
||||
.validate_config_file(&file_path_str, None)
|
||||
.await
|
||||
{
|
||||
Ok((true, _)) => {
|
||||
logging!(info, Type::Config, true, "[cmd配置save] 验证成功");
|
||||
Ok(())
|
||||
}
|
||||
Ok((false, error_msg)) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
true,
|
||||
"[cmd配置save] 验证失败: {}",
|
||||
error_msg
|
||||
);
|
||||
// 恢复原始配置文件
|
||||
wrap_err!(fs::write(&file_path, original_content))?;
|
||||
|
||||
// 智能判断错误类型
|
||||
let is_script_error = file_path_str.ends_with(".js")
|
||||
|| error_msg.contains("Script syntax error")
|
||||
|| error_msg.contains("Script must contain a main function")
|
||||
|| error_msg.contains("Failed to read script file");
|
||||
|
||||
if error_msg.contains("YAML syntax error")
|
||||
|| error_msg.contains("Failed to read file:")
|
||||
|| (!file_path_str.ends_with(".js") && !is_script_error)
|
||||
{
|
||||
// 普通YAML错误使用YAML通知处理
|
||||
log::info!(target: "app", "[cmd配置save] YAML配置文件验证失败,发送通知");
|
||||
let result = (false, error_msg.clone());
|
||||
crate::cmd::validate::handle_yaml_validation_notice(&result, "YAML配置文件");
|
||||
} else if is_script_error {
|
||||
// 脚本错误使用专门的通知处理
|
||||
log::info!(target: "app", "[cmd配置save] 脚本文件验证失败,发送通知");
|
||||
let result = (false, error_msg.clone());
|
||||
crate::cmd::validate::handle_script_validation_notice(&result, "脚本文件");
|
||||
} else {
|
||||
// 普通配置错误使用一般通知
|
||||
log::info!(target: "app", "[cmd配置save] 其他类型验证失败,发送一般通知");
|
||||
handle::Handle::notice_message("config_validate::error", &error_msg);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
logging!(
|
||||
error,
|
||||
Type::Config,
|
||||
true,
|
||||
"[cmd配置save] 验证过程发生错误: {}",
|
||||
e
|
||||
);
|
||||
// 恢复原始配置文件
|
||||
wrap_err!(fs::write(&file_path, original_content))?;
|
||||
Err(e.to_string())
|
||||
logging!(error, Type::Config, "[cmd配置save] 验证过程发生错误: {}", e);
|
||||
restore_original(file_path, original_content).await?;
|
||||
Err(e.to_string().into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_full_validation(
|
||||
file_path_str: &str,
|
||||
file_path: &std::path::Path,
|
||||
original_content: &str,
|
||||
) -> CmdResult {
|
||||
match CoreConfigValidator::validate_config_file(file_path_str, None).await {
|
||||
Ok((true, _)) => {
|
||||
logging!(info, Type::Config, "[cmd配置save] 验证成功");
|
||||
Ok(())
|
||||
}
|
||||
Ok((false, error_msg)) => {
|
||||
logging!(warn, Type::Config, "[cmd配置save] 验证失败: {}", error_msg);
|
||||
restore_original(file_path, original_content).await?;
|
||||
|
||||
if error_msg.contains("YAML syntax error")
|
||||
|| error_msg.contains("Failed to read file:")
|
||||
|| (!file_path_str.ends_with(".js") && !is_script_error(&error_msg, file_path_str))
|
||||
{
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[cmd配置save] YAML配置文件验证失败,发送通知"
|
||||
);
|
||||
let result = (false, error_msg.to_owned());
|
||||
crate::cmd::validate::handle_yaml_validation_notice(&result, "YAML配置文件");
|
||||
} else if is_script_error(&error_msg, file_path_str) {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[cmd配置save] 脚本文件验证失败,发送通知"
|
||||
);
|
||||
let result = (false, error_msg.to_owned());
|
||||
crate::cmd::validate::handle_script_validation_notice(&result, "脚本文件");
|
||||
} else {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"[cmd配置save] 其他类型验证失败,发送一般通知"
|
||||
);
|
||||
handle::Handle::notice_message("config_validate::error", error_msg.to_owned());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
logging!(error, Type::Config, "[cmd配置save] 验证过程发生错误: {}", e);
|
||||
restore_original(file_path, original_content).await?;
|
||||
Err(e.to_string().into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,48 +1,45 @@
|
||||
use super::CmdResult;
|
||||
use super::{CmdResult, StringifyErr};
|
||||
use crate::{
|
||||
core::{service, CoreManager},
|
||||
core::service::{self, SERVICE_MANAGER, ServiceStatus},
|
||||
utils::i18n::t,
|
||||
};
|
||||
use smartstring::SmartString;
|
||||
|
||||
async fn execute_service_operation(
|
||||
service_op: impl std::future::Future<Output = Result<(), impl ToString + std::fmt::Debug>>,
|
||||
op_type: &str,
|
||||
) -> CmdResult {
|
||||
if service_op.await.is_err() {
|
||||
let emsg = format!("{} {} failed", op_type, "Service");
|
||||
return Err(t(emsg.as_str()));
|
||||
}
|
||||
if CoreManager::global().restart_core().await.is_err() {
|
||||
let emsg = format!("{} {} failed", "Restart", "Core");
|
||||
return Err(t(emsg.as_str()));
|
||||
async fn execute_service_operation_sync(status: ServiceStatus, op_type: &str) -> CmdResult {
|
||||
if let Err(e) = SERVICE_MANAGER
|
||||
.lock()
|
||||
.await
|
||||
.handle_service_status(&status)
|
||||
.await
|
||||
{
|
||||
let emsg = format!("{} Service failed: {}", op_type, e);
|
||||
return Err(SmartString::from(&*t(emsg.as_str()).await));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn install_service() -> CmdResult {
|
||||
execute_service_operation(service::install_service(), "Install").await
|
||||
execute_service_operation_sync(ServiceStatus::InstallRequired, "Install").await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn uninstall_service() -> CmdResult {
|
||||
execute_service_operation(service::uninstall_service(), "Uninstall").await
|
||||
execute_service_operation_sync(ServiceStatus::UninstallRequired, "Uninstall").await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn reinstall_service() -> CmdResult {
|
||||
execute_service_operation(service::reinstall_service(), "Reinstall").await
|
||||
execute_service_operation_sync(ServiceStatus::ReinstallRequired, "Reinstall").await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn repair_service() -> CmdResult {
|
||||
execute_service_operation(service::force_reinstall_service(), "Repair").await
|
||||
execute_service_operation_sync(ServiceStatus::ForceReinstallRequired, "Repair").await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn is_service_available() -> CmdResult<bool> {
|
||||
service::is_service_available()
|
||||
.await
|
||||
.map(|_| true)
|
||||
.map_err(|e| e.to_string())
|
||||
service::is_service_available().await.stringify_err()?;
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use super::CmdResult;
|
||||
use crate::{
|
||||
core::{handle, CoreManager},
|
||||
core::{CoreManager, handle},
|
||||
logging,
|
||||
module::sysinfo::PlatformSpecification,
|
||||
utils::logging::Type,
|
||||
};
|
||||
use once_cell::sync::Lazy;
|
||||
use std::{
|
||||
@@ -23,20 +25,20 @@ static APP_START_TIME: Lazy<AtomicI64> = Lazy::new(|| {
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn export_diagnostic_info() -> CmdResult<()> {
|
||||
let sysinfo = PlatformSpecification::new_async().await;
|
||||
let sysinfo = PlatformSpecification::new_sync();
|
||||
let info = format!("{sysinfo:?}");
|
||||
|
||||
let app_handle = handle::Handle::global().app_handle().unwrap();
|
||||
let app_handle = handle::Handle::app_handle();
|
||||
let cliboard = app_handle.clipboard();
|
||||
if cliboard.write_text(info).is_err() {
|
||||
log::error!(target: "app", "Failed to write to clipboard");
|
||||
logging!(error, Type::System, "Failed to write to clipboard");
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_system_info() -> CmdResult<String> {
|
||||
let sysinfo = PlatformSpecification::new_async().await;
|
||||
let sysinfo = PlatformSpecification::new_sync();
|
||||
let info = format!("{sysinfo:?}");
|
||||
Ok(info)
|
||||
}
|
||||
@@ -44,7 +46,7 @@ pub async fn get_system_info() -> CmdResult<String> {
|
||||
/// 获取当前内核运行模式
|
||||
#[tauri::command]
|
||||
pub async fn get_running_mode() -> Result<String, String> {
|
||||
Ok(CoreManager::global().get_running_mode().await.to_string())
|
||||
Ok(CoreManager::global().get_running_mode().to_string())
|
||||
}
|
||||
|
||||
/// 获取应用的运行时间(毫秒)
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
use super::CmdResult;
|
||||
use crate::cmd::CmdResult;
|
||||
|
||||
/// Platform-specific implementation for UWP functionality
|
||||
#[cfg(windows)]
|
||||
mod platform {
|
||||
use super::CmdResult;
|
||||
use crate::{core::win_uwp, wrap_err};
|
||||
use crate::cmd::CmdResult;
|
||||
use crate::cmd::StringifyErr;
|
||||
use crate::core::win_uwp;
|
||||
|
||||
pub async fn invoke_uwp_tool() -> CmdResult {
|
||||
wrap_err!(win_uwp::invoke_uwptools().await)
|
||||
pub fn invoke_uwp_tool() -> CmdResult {
|
||||
win_uwp::invoke_uwptools().stringify_err()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +17,7 @@ mod platform {
|
||||
mod platform {
|
||||
use super::CmdResult;
|
||||
|
||||
pub async fn invoke_uwp_tool() -> CmdResult {
|
||||
pub fn invoke_uwp_tool() -> CmdResult {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -24,5 +25,5 @@ mod platform {
|
||||
/// Command exposed to Tauri
|
||||
#[tauri::command]
|
||||
pub async fn invoke_uwp_tool() -> CmdResult {
|
||||
platform::invoke_uwp_tool().await
|
||||
platform::invoke_uwp_tool()
|
||||
}
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
use super::CmdResult;
|
||||
use crate::{core::*, logging, utils::logging::Type};
|
||||
use crate::{
|
||||
core::{validate::CoreConfigValidator, *},
|
||||
logging,
|
||||
utils::logging::Type,
|
||||
};
|
||||
use smartstring::alias::String;
|
||||
|
||||
/// 发送脚本验证通知消息
|
||||
#[tauri::command]
|
||||
pub async fn script_validate_notice(status: String, msg: String) -> CmdResult {
|
||||
handle::Handle::notice_message(&status, &msg);
|
||||
handle::Handle::notice_message(status.as_str(), msg.as_str());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -28,27 +33,17 @@ pub fn handle_script_validation_notice(result: &(bool, String), file_type: &str)
|
||||
"config_validate::script_error"
|
||||
};
|
||||
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
true,
|
||||
"{} 验证失败: {}",
|
||||
file_type,
|
||||
error_msg
|
||||
);
|
||||
handle::Handle::notice_message(status, error_msg);
|
||||
logging!(warn, Type::Config, "{} 验证失败: {}", file_type, error_msg);
|
||||
handle::Handle::notice_message(status, error_msg.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
/// 验证指定脚本文件
|
||||
#[tauri::command]
|
||||
pub async fn validate_script_file(file_path: String) -> CmdResult<bool> {
|
||||
logging!(info, Type::Config, true, "验证脚本文件: {}", file_path);
|
||||
logging!(info, Type::Config, "验证脚本文件: {}", file_path);
|
||||
|
||||
match CoreManager::global()
|
||||
.validate_config_file(&file_path, None)
|
||||
.await
|
||||
{
|
||||
match CoreConfigValidator::validate_config_file(&file_path, None).await {
|
||||
Ok(result) => {
|
||||
handle_script_validation_notice(&result, "脚本文件");
|
||||
Ok(result.0) // 返回验证结果布尔值
|
||||
@@ -58,7 +53,6 @@ pub async fn validate_script_file(file_path: String) -> CmdResult<bool> {
|
||||
logging!(
|
||||
error,
|
||||
Type::Config,
|
||||
true,
|
||||
"验证脚本文件过程发生错误: {}",
|
||||
error_msg
|
||||
);
|
||||
@@ -76,7 +70,6 @@ pub fn handle_yaml_validation_notice(result: &(bool, String), file_type: &str) {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
true,
|
||||
"[通知] 处理{}验证错误: {}",
|
||||
file_type,
|
||||
error_msg
|
||||
@@ -117,22 +110,14 @@ pub fn handle_yaml_validation_notice(result: &(bool, String), file_type: &str) {
|
||||
}
|
||||
};
|
||||
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
true,
|
||||
"{} 验证失败: {}",
|
||||
file_type,
|
||||
error_msg
|
||||
);
|
||||
logging!(warn, Type::Config, "{} 验证失败: {}", file_type, error_msg);
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
true,
|
||||
"[通知] 发送通知: status={}, msg={}",
|
||||
status,
|
||||
error_msg
|
||||
);
|
||||
handle::Handle::notice_message(status, error_msg);
|
||||
handle::Handle::notice_message(status, error_msg.to_owned());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,20 @@
|
||||
use super::CmdResult;
|
||||
use crate::{config::*, feat, wrap_err};
|
||||
use crate::{cmd::StringifyErr, config::*, feat};
|
||||
|
||||
/// 获取Verge配置
|
||||
#[tauri::command]
|
||||
pub fn get_verge_config() -> CmdResult<IVergeResponse> {
|
||||
let verge = Config::verge();
|
||||
let verge_data = verge.data().clone();
|
||||
Ok(IVergeResponse::from(*verge_data))
|
||||
pub async fn get_verge_config() -> CmdResult<IVergeResponse> {
|
||||
let verge = Config::verge().await;
|
||||
let verge_data = {
|
||||
let ref_data = verge.latest_ref();
|
||||
ref_data.clone()
|
||||
};
|
||||
let verge_response = IVergeResponse::from(verge_data);
|
||||
Ok(verge_response)
|
||||
}
|
||||
|
||||
/// 修改Verge配置
|
||||
#[tauri::command]
|
||||
pub async fn patch_verge_config(payload: IVerge) -> CmdResult {
|
||||
wrap_err!(feat::patch_verge(payload, false).await)
|
||||
feat::patch_verge(&payload, false).await.stringify_err()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use super::CmdResult;
|
||||
use crate::{config::*, core, feat, wrap_err};
|
||||
use crate::{cmd::StringifyErr, config::*, core, feat};
|
||||
use reqwest_dav::list_cmd::ListFile;
|
||||
use smartstring::alias::String;
|
||||
|
||||
/// 保存 WebDAV 配置
|
||||
#[tauri::command]
|
||||
@@ -11,11 +12,14 @@ pub async fn save_webdav_config(url: String, username: String, password: String)
|
||||
webdav_password: Some(password),
|
||||
..IVerge::default()
|
||||
};
|
||||
Config::verge().draft().patch_config(patch.clone());
|
||||
Config::verge().apply();
|
||||
Config::verge()
|
||||
.data()
|
||||
Config::verge().await.draft_mut().patch_config(&patch);
|
||||
Config::verge().await.apply();
|
||||
|
||||
// 分离数据获取和异步调用
|
||||
let verge_data = Config::verge().await.latest_ref().clone();
|
||||
verge_data
|
||||
.save_file()
|
||||
.await
|
||||
.map_err(|err| err.to_string())?;
|
||||
core::backup::WebDavClient::global().reset();
|
||||
Ok(())
|
||||
@@ -24,23 +28,25 @@ pub async fn save_webdav_config(url: String, username: String, password: String)
|
||||
/// 创建 WebDAV 备份并上传
|
||||
#[tauri::command]
|
||||
pub async fn create_webdav_backup() -> CmdResult<()> {
|
||||
wrap_err!(feat::create_backup_and_upload_webdav().await)
|
||||
feat::create_backup_and_upload_webdav()
|
||||
.await
|
||||
.stringify_err()
|
||||
}
|
||||
|
||||
/// 列出 WebDAV 上的备份文件
|
||||
#[tauri::command]
|
||||
pub async fn list_webdav_backup() -> CmdResult<Vec<ListFile>> {
|
||||
wrap_err!(feat::list_wevdav_backup().await)
|
||||
feat::list_wevdav_backup().await.stringify_err()
|
||||
}
|
||||
|
||||
/// 删除 WebDAV 上的备份文件
|
||||
#[tauri::command]
|
||||
pub async fn delete_webdav_backup(filename: String) -> CmdResult<()> {
|
||||
wrap_err!(feat::delete_webdav_backup(filename).await)
|
||||
feat::delete_webdav_backup(filename).await.stringify_err()
|
||||
}
|
||||
|
||||
/// 从 WebDAV 恢复备份文件
|
||||
#[tauri::command]
|
||||
pub async fn restore_webdav_backup(filename: String) -> CmdResult<()> {
|
||||
wrap_err!(feat::restore_webdav_backup(filename).await)
|
||||
feat::restore_webdav_backup(filename).await.stringify_err()
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
use crate::config::Config;
|
||||
use crate::constants::{network, tun as tun_const};
|
||||
use crate::utils::dirs::{ipc_path, path_to_str};
|
||||
use crate::utils::{dirs, help};
|
||||
use crate::{logging, utils::logging::Type};
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_yaml::{Mapping, Value};
|
||||
use serde_yaml_ng::{Mapping, Value};
|
||||
use std::{
|
||||
net::{IpAddr, Ipv4Addr, SocketAddr},
|
||||
str::FromStr,
|
||||
@@ -11,25 +15,34 @@ use std::{
|
||||
pub struct IClashTemp(pub Mapping);
|
||||
|
||||
impl IClashTemp {
|
||||
pub fn new() -> Self {
|
||||
pub async fn new() -> Self {
|
||||
let template = Self::template();
|
||||
match dirs::clash_path().and_then(|path| help::read_mapping(&path)) {
|
||||
let clash_path_result = dirs::clash_path();
|
||||
let map_result = if let Ok(path) = clash_path_result {
|
||||
help::read_mapping(&path).await
|
||||
} else {
|
||||
Err(anyhow::anyhow!("Failed to get clash path"))
|
||||
};
|
||||
|
||||
match map_result {
|
||||
Ok(mut map) => {
|
||||
template.0.keys().for_each(|key| {
|
||||
if !map.contains_key(key) {
|
||||
map.insert(key.clone(), template.0.get(key).unwrap().clone());
|
||||
if !map.contains_key(key)
|
||||
&& let Some(value) = template.0.get(key)
|
||||
{
|
||||
map.insert(key.clone(), value.clone());
|
||||
}
|
||||
});
|
||||
// 确保 secret 字段存在且不为空
|
||||
if let Some(Value::String(s)) = map.get_mut("secret") {
|
||||
if s.is_empty() {
|
||||
*s = "set-your-secret".to_string();
|
||||
}
|
||||
if let Some(Value::String(s)) = map.get_mut("secret")
|
||||
&& s.is_empty()
|
||||
{
|
||||
*s = "set-your-secret".into();
|
||||
}
|
||||
Self(Self::guard(map))
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!(target: "app", "{err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
template
|
||||
}
|
||||
}
|
||||
@@ -37,26 +50,43 @@ impl IClashTemp {
|
||||
|
||||
pub fn template() -> Self {
|
||||
let mut map = Mapping::new();
|
||||
let mut tun = Mapping::new();
|
||||
let mut tun_config = Mapping::new();
|
||||
let mut cors_map = Mapping::new();
|
||||
tun.insert("enable".into(), false.into());
|
||||
tun.insert("stack".into(), "gvisor".into());
|
||||
tun.insert("auto-route".into(), true.into());
|
||||
tun.insert("strict-route".into(), false.into());
|
||||
tun.insert("auto-detect-interface".into(), true.into());
|
||||
tun.insert("dns-hijack".into(), vec!["any:53"].into());
|
||||
|
||||
tun_config.insert("enable".into(), false.into());
|
||||
tun_config.insert("stack".into(), tun_const::DEFAULT_STACK.into());
|
||||
tun_config.insert("auto-route".into(), true.into());
|
||||
tun_config.insert("strict-route".into(), false.into());
|
||||
tun_config.insert("auto-detect-interface".into(), true.into());
|
||||
tun_config.insert("dns-hijack".into(), tun_const::DNS_HIJACK.into());
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
map.insert("redir-port".into(), 7895.into());
|
||||
map.insert("redir-port".into(), network::ports::DEFAULT_REDIR.into());
|
||||
#[cfg(target_os = "linux")]
|
||||
map.insert("tproxy-port".into(), 7896.into());
|
||||
map.insert("mixed-port".into(), 7897.into());
|
||||
map.insert("socks-port".into(), 7898.into());
|
||||
map.insert("port".into(), 7899.into());
|
||||
map.insert("log-level".into(), "warning".into());
|
||||
map.insert("tproxy-port".into(), network::ports::DEFAULT_TPROXY.into());
|
||||
|
||||
map.insert("mixed-port".into(), network::ports::DEFAULT_MIXED.into());
|
||||
map.insert("socks-port".into(), network::ports::DEFAULT_SOCKS.into());
|
||||
map.insert("port".into(), network::ports::DEFAULT_HTTP.into());
|
||||
map.insert("log-level".into(), "info".into());
|
||||
map.insert("allow-lan".into(), false.into());
|
||||
map.insert("ipv6".into(), true.into());
|
||||
map.insert("mode".into(), "rule".into());
|
||||
map.insert("external-controller".into(), "127.0.0.1:9097".into());
|
||||
map.insert(
|
||||
"external-controller".into(),
|
||||
network::DEFAULT_EXTERNAL_CONTROLLER.into(),
|
||||
);
|
||||
#[cfg(unix)]
|
||||
map.insert(
|
||||
"external-controller-unix".into(),
|
||||
Self::guard_external_controller_ipc().into(),
|
||||
);
|
||||
#[cfg(windows)]
|
||||
map.insert(
|
||||
"external-controller-pipe".into(),
|
||||
Self::guard_external_controller_ipc().into(),
|
||||
);
|
||||
map.insert("tun".into(), tun_config.into());
|
||||
cors_map.insert("allow-private-network".into(), true.into());
|
||||
cors_map.insert(
|
||||
"allow-origins".into(),
|
||||
@@ -73,7 +103,6 @@ impl IClashTemp {
|
||||
.into(),
|
||||
);
|
||||
map.insert("secret".into(), "set-your-secret".into());
|
||||
map.insert("tun".into(), tun.into());
|
||||
map.insert("external-controller-cors".into(), cors_map.into());
|
||||
map.insert("unified-delay".into(), true.into());
|
||||
Self(map)
|
||||
@@ -87,7 +116,12 @@ impl IClashTemp {
|
||||
let mixed_port = Self::guard_mixed_port(&config);
|
||||
let socks_port = Self::guard_socks_port(&config);
|
||||
let port = Self::guard_port(&config);
|
||||
let ctrl = Self::guard_server_ctrl(&config);
|
||||
let ctrl = Self::guard_external_controller(&config);
|
||||
#[cfg(unix)]
|
||||
let external_controller_unix = Self::guard_external_controller_ipc();
|
||||
#[cfg(windows)]
|
||||
let external_controller_pipe = Self::guard_external_controller_ipc();
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
config.insert("redir-port".into(), redir_port.into());
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -97,25 +131,16 @@ impl IClashTemp {
|
||||
config.insert("port".into(), port.into());
|
||||
config.insert("external-controller".into(), ctrl.into());
|
||||
|
||||
// 强制覆盖 external-controller-cors 字段,允许本地和 tauri 前端
|
||||
let mut cors_map = Mapping::new();
|
||||
cors_map.insert("allow-private-network".into(), true.into());
|
||||
cors_map.insert(
|
||||
"allow-origins".into(),
|
||||
vec![
|
||||
"tauri://localhost",
|
||||
"http://tauri.localhost",
|
||||
// Only enable this in dev mode
|
||||
#[cfg(feature = "verge-dev")]
|
||||
"http://localhost:3000",
|
||||
"https://yacd.metacubex.one",
|
||||
"https://metacubex.github.io",
|
||||
"https://board.zash.run.place",
|
||||
]
|
||||
.into(),
|
||||
#[cfg(unix)]
|
||||
config.insert(
|
||||
"external-controller-unix".into(),
|
||||
external_controller_unix.into(),
|
||||
);
|
||||
#[cfg(windows)]
|
||||
config.insert(
|
||||
"external-controller-pipe".into(),
|
||||
external_controller_pipe.into(),
|
||||
);
|
||||
config.insert("external-controller-cors".into(), cors_map.into());
|
||||
|
||||
config
|
||||
}
|
||||
|
||||
@@ -125,12 +150,13 @@ impl IClashTemp {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save_config(&self) -> Result<()> {
|
||||
pub async fn save_config(&self) -> Result<()> {
|
||||
help::save_yaml(
|
||||
&dirs::clash_path()?,
|
||||
&self.0,
|
||||
Some("# Generated by Clash Verge"),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn get_mixed_port(&self) -> u16 {
|
||||
@@ -188,9 +214,9 @@ impl IClashTemp {
|
||||
Value::Number(val_num) => val_num.as_u64().map(|u| u as u16),
|
||||
_ => None,
|
||||
})
|
||||
.unwrap_or(7896);
|
||||
.unwrap_or(network::ports::DEFAULT_TPROXY);
|
||||
if port == 0 {
|
||||
port = 7896;
|
||||
port = network::ports::DEFAULT_TPROXY;
|
||||
}
|
||||
port
|
||||
}
|
||||
@@ -261,7 +287,28 @@ impl IClashTemp {
|
||||
}
|
||||
None => None,
|
||||
})
|
||||
.unwrap_or("127.0.0.1:9097".into())
|
||||
.unwrap_or_else(|| "127.0.0.1:9097".into())
|
||||
}
|
||||
|
||||
pub fn guard_external_controller(config: &Mapping) -> String {
|
||||
// 在初始化阶段,直接返回配置中的值,不进行额外检查
|
||||
// 这样可以避免在配置加载期间的循环依赖
|
||||
Self::guard_server_ctrl(config)
|
||||
}
|
||||
|
||||
pub async fn guard_external_controller_with_setting(config: &Mapping) -> String {
|
||||
// 检查 enable_external_controller 设置,用于运行时配置生成
|
||||
let enable_external_controller = Config::verge()
|
||||
.await
|
||||
.latest_ref()
|
||||
.enable_external_controller
|
||||
.unwrap_or(false);
|
||||
|
||||
if enable_external_controller {
|
||||
Self::guard_server_ctrl(config)
|
||||
} else {
|
||||
"".into()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn guard_client_ctrl(config: &Mapping) -> String {
|
||||
@@ -276,6 +323,17 @@ impl IClashTemp {
|
||||
Err(_) => "127.0.0.1:9097".into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn guard_external_controller_ipc() -> String {
|
||||
// 总是使用当前的 IPC 路径,确保配置文件与运行时路径一致
|
||||
ipc_path()
|
||||
.ok()
|
||||
.and_then(|path| path_to_str(&path).ok().map(|s| s.into()))
|
||||
.unwrap_or_else(|| {
|
||||
logging!(error, Type::Config, "Failed to get IPC path");
|
||||
crate::constants::network::DEFAULT_EXTERNAL_CONTROLLER.into()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
use super::{Draft, IClashTemp, IProfiles, IRuntime, IVerge};
|
||||
use super::{IClashTemp, IProfiles, IRuntime, IVerge};
|
||||
use crate::{
|
||||
config::PrfItem,
|
||||
core::{handle, CoreManager},
|
||||
enhance, logging,
|
||||
process::AsyncHandler,
|
||||
utils::{dirs, help, logging::Type},
|
||||
cmd,
|
||||
config::{PrfItem, profiles_append_item_safe},
|
||||
constants::{files, timing},
|
||||
core::{CoreManager, handle, service, tray, validate::CoreConfigValidator},
|
||||
enhance, logging, logging_error,
|
||||
utils::{Draft, dirs, help, logging::Type},
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use once_cell::sync::OnceCell;
|
||||
use anyhow::{Result, anyhow};
|
||||
use backoff::{Error as BackoffError, ExponentialBackoff};
|
||||
use smartstring::alias::String;
|
||||
use std::path::PathBuf;
|
||||
use tokio::time::{sleep, Duration};
|
||||
|
||||
pub const RUNTIME_CONFIG: &str = "clash-verge.yaml";
|
||||
pub const CHECK_CONFIG: &str = "clash-verge-check.yaml";
|
||||
use tokio::sync::OnceCell;
|
||||
use tokio::time::sleep;
|
||||
|
||||
pub struct Config {
|
||||
clash_config: Draft<Box<IClashTemp>>,
|
||||
@@ -22,141 +22,182 @@ pub struct Config {
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn global() -> &'static Config {
|
||||
static CONFIG: OnceCell<Config> = OnceCell::new();
|
||||
|
||||
CONFIG.get_or_init(|| Config {
|
||||
clash_config: Draft::from(Box::new(IClashTemp::new())),
|
||||
verge_config: Draft::from(Box::new(IVerge::new())),
|
||||
profiles_config: Draft::from(Box::new(IProfiles::new())),
|
||||
pub async fn global() -> &'static Config {
|
||||
static CONFIG: OnceCell<Config> = OnceCell::const_new();
|
||||
CONFIG
|
||||
.get_or_init(|| async {
|
||||
Config {
|
||||
clash_config: Draft::from(Box::new(IClashTemp::new().await)),
|
||||
verge_config: Draft::from(Box::new(IVerge::new().await)),
|
||||
profiles_config: Draft::from(Box::new(IProfiles::new().await)),
|
||||
runtime_config: Draft::from(Box::new(IRuntime::new())),
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn clash() -> Draft<Box<IClashTemp>> {
|
||||
Self::global().clash_config.clone()
|
||||
pub async fn clash() -> Draft<Box<IClashTemp>> {
|
||||
Self::global().await.clash_config.clone()
|
||||
}
|
||||
|
||||
pub fn verge() -> Draft<Box<IVerge>> {
|
||||
Self::global().verge_config.clone()
|
||||
pub async fn verge() -> Draft<Box<IVerge>> {
|
||||
Self::global().await.verge_config.clone()
|
||||
}
|
||||
|
||||
pub fn profiles() -> Draft<Box<IProfiles>> {
|
||||
Self::global().profiles_config.clone()
|
||||
pub async fn profiles() -> Draft<Box<IProfiles>> {
|
||||
Self::global().await.profiles_config.clone()
|
||||
}
|
||||
|
||||
pub fn runtime() -> Draft<Box<IRuntime>> {
|
||||
Self::global().runtime_config.clone()
|
||||
pub async fn runtime() -> Draft<Box<IRuntime>> {
|
||||
Self::global().await.runtime_config.clone()
|
||||
}
|
||||
|
||||
/// 初始化订阅
|
||||
pub async fn init_config() -> Result<()> {
|
||||
if Self::profiles()
|
||||
.data()
|
||||
.get_item(&"Merge".to_string())
|
||||
.is_err()
|
||||
Self::ensure_default_profile_items().await?;
|
||||
|
||||
// init Tun mode
|
||||
if !cmd::system::is_admin().unwrap_or_default()
|
||||
&& service::is_service_available().await.is_err()
|
||||
{
|
||||
let merge_item = PrfItem::from_merge(Some("Merge".to_string()))?;
|
||||
Self::profiles().data().append_item(merge_item.clone())?;
|
||||
let verge = Config::verge().await;
|
||||
verge.draft_mut().enable_tun_mode = Some(false);
|
||||
verge.apply();
|
||||
let _ = tray::Tray::global().update_tray_display().await;
|
||||
|
||||
// 分离数据获取和异步调用避免Send问题
|
||||
let verge_data = Config::verge().await.latest_ref().clone();
|
||||
logging_error!(Type::Core, verge_data.save_file().await);
|
||||
}
|
||||
if Self::profiles()
|
||||
.data()
|
||||
.get_item(&"Script".to_string())
|
||||
.is_err()
|
||||
{
|
||||
let script_item = PrfItem::from_script(Some("Script".to_string()))?;
|
||||
Self::profiles().data().append_item(script_item.clone())?;
|
||||
|
||||
let validation_result = Self::generate_and_validate().await?;
|
||||
|
||||
if let Some((msg_type, msg_content)) = validation_result {
|
||||
sleep(timing::STARTUP_ERROR_DELAY).await;
|
||||
handle::Handle::notice_message(msg_type, msg_content);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Ensure "Merge" and "Script" profile items exist, adding them if missing.
|
||||
async fn ensure_default_profile_items() -> Result<()> {
|
||||
let profiles = Self::profiles().await;
|
||||
if profiles.latest_ref().get_item("Merge").is_err() {
|
||||
let merge_item = &mut PrfItem::from_merge(Some("Merge".into()))?;
|
||||
profiles_append_item_safe(merge_item).await?;
|
||||
}
|
||||
if profiles.latest_ref().get_item("Script").is_err() {
|
||||
let script_item = &mut PrfItem::from_script(Some("Script".into()))?;
|
||||
profiles_append_item_safe(script_item).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn generate_and_validate() -> Result<Option<(&'static str, String)>> {
|
||||
// 生成运行时配置
|
||||
if let Err(err) = Self::generate().await {
|
||||
logging!(error, Type::Config, true, "生成运行时配置失败: {}", err);
|
||||
logging!(error, Type::Config, "生成运行时配置失败: {}", err);
|
||||
} else {
|
||||
logging!(info, Type::Config, true, "生成运行时配置成功");
|
||||
logging!(info, Type::Config, "生成运行时配置成功");
|
||||
}
|
||||
|
||||
// 生成运行时配置文件并验证
|
||||
let config_result = Self::generate_file(ConfigType::Run);
|
||||
let config_result = Self::generate_file(ConfigType::Run).await;
|
||||
|
||||
let validation_result = if config_result.is_ok() {
|
||||
if config_result.is_ok() {
|
||||
// 验证配置文件
|
||||
logging!(info, Type::Config, true, "开始验证配置");
|
||||
logging!(info, Type::Config, "开始验证配置");
|
||||
|
||||
match CoreManager::global().validate_config().await {
|
||||
match CoreConfigValidator::global().validate_config().await {
|
||||
Ok((is_valid, error_msg)) => {
|
||||
if !is_valid {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
true,
|
||||
"[首次启动] 配置验证失败,使用默认最小配置启动: {}",
|
||||
error_msg
|
||||
);
|
||||
CoreManager::global()
|
||||
.use_default_config("config_validate::boot_error", &error_msg)
|
||||
.await?;
|
||||
Some(("config_validate::boot_error", error_msg))
|
||||
Ok(Some(("config_validate::boot_error", error_msg)))
|
||||
} else {
|
||||
logging!(info, Type::Config, true, "配置验证成功");
|
||||
Some(("config_validate::success", String::new()))
|
||||
logging!(info, Type::Config, "配置验证成功");
|
||||
// 前端没有必要知道验证成功的消息,也没有事件驱动
|
||||
// Some(("config_validate::success", String::new()))
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
logging!(warn, Type::Config, true, "验证进程执行失败: {}", err);
|
||||
logging!(warn, Type::Config, "验证过程执行失败: {}", err);
|
||||
CoreManager::global()
|
||||
.use_default_config("config_validate::process_terminated", "")
|
||||
.await?;
|
||||
Some(("config_validate::process_terminated", String::new()))
|
||||
Ok(Some(("config_validate::process_terminated", String::new())))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logging!(warn, Type::Config, true, "生成配置文件失败,使用默认配置");
|
||||
logging!(warn, Type::Config, "生成配置文件失败,使用默认配置");
|
||||
CoreManager::global()
|
||||
.use_default_config("config_validate::error", "")
|
||||
.await?;
|
||||
Some(("config_validate::error", String::new()))
|
||||
};
|
||||
|
||||
// 在单独的任务中发送通知
|
||||
if let Some((msg_type, msg_content)) = validation_result {
|
||||
AsyncHandler::spawn(move || async move {
|
||||
sleep(Duration::from_secs(2)).await;
|
||||
handle::Handle::notice_message(msg_type, &msg_content);
|
||||
});
|
||||
Ok(Some(("config_validate::error", String::new())))
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// 将订阅丢到对应的文件中
|
||||
pub fn generate_file(typ: ConfigType) -> Result<PathBuf> {
|
||||
pub async fn generate_file(typ: ConfigType) -> Result<PathBuf> {
|
||||
let path = match typ {
|
||||
ConfigType::Run => dirs::app_home_dir()?.join(RUNTIME_CONFIG),
|
||||
ConfigType::Check => dirs::app_home_dir()?.join(CHECK_CONFIG),
|
||||
ConfigType::Run => dirs::app_home_dir()?.join(files::RUNTIME_CONFIG),
|
||||
ConfigType::Check => dirs::app_home_dir()?.join(files::CHECK_CONFIG),
|
||||
};
|
||||
|
||||
let runtime = Config::runtime();
|
||||
let runtime = runtime.latest();
|
||||
let runtime = Config::runtime().await;
|
||||
let config = runtime
|
||||
.latest_ref()
|
||||
.config
|
||||
.as_ref()
|
||||
.ok_or(anyhow!("failed to get runtime config"))?;
|
||||
.ok_or_else(|| anyhow!("failed to get runtime config"))?
|
||||
.clone();
|
||||
drop(runtime); // 显式释放锁
|
||||
|
||||
help::save_yaml(&path, &config, Some("# Generated by Clash Verge"))?;
|
||||
help::save_yaml(&path, &config, Some("# Generated by Clash Verge")).await?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
/// 生成订阅存好
|
||||
pub async fn generate() -> Result<()> {
|
||||
let (config, exists_keys, logs) = enhance::enhance().await;
|
||||
|
||||
*Config::runtime().draft() = Box::new(IRuntime {
|
||||
**Config::runtime().await.draft_mut() = IRuntime {
|
||||
config: Some(config),
|
||||
exists_keys,
|
||||
chain_logs: logs,
|
||||
});
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn verify_config_initialization() {
|
||||
let backoff_strategy = ExponentialBackoff {
|
||||
initial_interval: std::time::Duration::from_millis(100),
|
||||
max_interval: std::time::Duration::from_secs(2),
|
||||
max_elapsed_time: Some(std::time::Duration::from_secs(10)),
|
||||
multiplier: 2.0,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let operation = || async {
|
||||
if Config::runtime().await.latest_ref().config.is_some() {
|
||||
return Ok::<(), BackoffError<anyhow::Error>>(());
|
||||
}
|
||||
|
||||
Config::generate().await.map_err(BackoffError::transient)
|
||||
};
|
||||
|
||||
if let Err(e) = backoff::future::retry(backoff_strategy, operation).await {
|
||||
logging!(error, Type::Setup, "Config init verification failed: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -170,33 +211,33 @@ mod tests {
|
||||
use std::mem;
|
||||
|
||||
#[test]
|
||||
#[allow(unused_variables)]
|
||||
#[allow(clippy::expect_used)]
|
||||
fn test_prfitem_from_merge_size() {
|
||||
let merge_item = PrfItem::from_merge(Some("Merge".to_string())).unwrap();
|
||||
dbg!(&merge_item);
|
||||
let merge_item =
|
||||
PrfItem::from_merge(Some("Merge".into())).expect("Failed to create merge item in test");
|
||||
let prfitem_size = mem::size_of_val(&merge_item);
|
||||
dbg!(prfitem_size);
|
||||
// Boxed version
|
||||
let boxed_merge_item = Box::new(merge_item);
|
||||
let box_prfitem_size = mem::size_of_val(&boxed_merge_item);
|
||||
dbg!(box_prfitem_size);
|
||||
// The size of Box<T> is always pointer-sized (usually 8 bytes on 64-bit)
|
||||
// assert_eq!(box_prfitem_size, mem::size_of::<Box<PrfItem>>());
|
||||
assert!(box_prfitem_size < prfitem_size);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(unused_variables)]
|
||||
fn test_draft_size_non_boxed() {
|
||||
let draft = Draft::from(IRuntime::new());
|
||||
let iruntime_size = std::mem::size_of_val(&draft);
|
||||
dbg!(iruntime_size);
|
||||
assert_eq!(iruntime_size, std::mem::size_of::<Draft<IRuntime>>());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[allow(unused_variables)]
|
||||
fn test_draft_size_boxed() {
|
||||
let draft = Draft::from(Box::new(IRuntime::new()));
|
||||
let box_iruntime_size = std::mem::size_of_val(&draft);
|
||||
dbg!(box_iruntime_size);
|
||||
assert_eq!(
|
||||
box_iruntime_size,
|
||||
std::mem::size_of::<Draft<Box<IRuntime>>>()
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
use super::{IClashTemp, IProfiles, IRuntime, IVerge};
|
||||
use parking_lot::{MappedMutexGuard, Mutex, MutexGuard};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Draft<T: Clone + ToOwned> {
|
||||
inner: Arc<Mutex<(T, Option<T>)>>,
|
||||
}
|
||||
|
||||
macro_rules! draft_define {
|
||||
($id: ident) => {
|
||||
impl From<$id> for Draft<$id> {
|
||||
fn from(data: $id) -> Self {
|
||||
Draft {
|
||||
inner: Arc::new(Mutex::new((data, None))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Draft<Box<$id>> {
|
||||
#[allow(unused)]
|
||||
pub fn data(&self) -> MappedMutexGuard<Box<$id>> {
|
||||
MutexGuard::map(self.inner.lock(), |guard| &mut guard.0)
|
||||
}
|
||||
|
||||
pub fn latest(&self) -> MappedMutexGuard<Box<$id>> {
|
||||
MutexGuard::map(self.inner.lock(), |inner| {
|
||||
if inner.1.is_none() {
|
||||
&mut inner.0
|
||||
} else {
|
||||
inner.1.as_mut().unwrap()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn draft(&self) -> MappedMutexGuard<Box<$id>> {
|
||||
MutexGuard::map(self.inner.lock(), |inner| {
|
||||
if inner.1.is_none() {
|
||||
inner.1 = Some(inner.0.clone());
|
||||
}
|
||||
|
||||
inner.1.as_mut().unwrap()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn apply(&self) -> Option<Box<$id>> {
|
||||
let mut inner = self.inner.lock();
|
||||
|
||||
match inner.1.take() {
|
||||
Some(draft) => {
|
||||
let old_value = inner.0.to_owned();
|
||||
inner.0 = draft.to_owned();
|
||||
Some(old_value)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn discard(&self) -> Option<Box<$id>> {
|
||||
let mut inner = self.inner.lock();
|
||||
inner.1.take()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Box<$id>> for Draft<Box<$id>> {
|
||||
fn from(data: Box<$id>) -> Self {
|
||||
Draft {
|
||||
inner: Arc::new(Mutex::new((data, None))),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// draft_define!(IClash);
|
||||
draft_define!(IClashTemp);
|
||||
draft_define!(IProfiles);
|
||||
draft_define!(IRuntime);
|
||||
draft_define!(IVerge);
|
||||
|
||||
#[test]
|
||||
fn test_draft_box() {
|
||||
let verge = Box::new(IVerge {
|
||||
enable_auto_launch: Some(true),
|
||||
enable_tun_mode: Some(false),
|
||||
..IVerge::default()
|
||||
});
|
||||
|
||||
let draft = Draft::from(verge);
|
||||
|
||||
assert_eq!(draft.data().enable_auto_launch, Some(true));
|
||||
assert_eq!(draft.data().enable_tun_mode, Some(false));
|
||||
|
||||
assert_eq!(draft.draft().enable_auto_launch, Some(true));
|
||||
assert_eq!(draft.draft().enable_tun_mode, Some(false));
|
||||
|
||||
{
|
||||
let mut d = draft.draft();
|
||||
d.enable_auto_launch = Some(false);
|
||||
d.enable_tun_mode = Some(true);
|
||||
}
|
||||
|
||||
assert_eq!(draft.data().enable_auto_launch, Some(true));
|
||||
assert_eq!(draft.data().enable_tun_mode, Some(false));
|
||||
|
||||
assert_eq!(draft.draft().enable_auto_launch, Some(false));
|
||||
assert_eq!(draft.draft().enable_tun_mode, Some(true));
|
||||
|
||||
assert_eq!(draft.latest().enable_auto_launch, Some(false));
|
||||
assert_eq!(draft.latest().enable_tun_mode, Some(true));
|
||||
|
||||
assert!(draft.apply().is_some());
|
||||
assert!(draft.apply().is_none());
|
||||
|
||||
assert_eq!(draft.data().enable_auto_launch, Some(false));
|
||||
assert_eq!(draft.data().enable_tun_mode, Some(true));
|
||||
|
||||
assert_eq!(draft.draft().enable_auto_launch, Some(false));
|
||||
assert_eq!(draft.draft().enable_tun_mode, Some(true));
|
||||
|
||||
{
|
||||
let mut d = draft.draft();
|
||||
d.enable_auto_launch = Some(true);
|
||||
}
|
||||
|
||||
assert_eq!(draft.data().enable_auto_launch, Some(false));
|
||||
assert_eq!(draft.draft().enable_auto_launch, Some(true));
|
||||
|
||||
assert!(draft.discard().is_some());
|
||||
|
||||
assert_eq!(draft.data().enable_auto_launch, Some(false));
|
||||
assert!(draft.discard().is_none());
|
||||
|
||||
assert_eq!(draft.draft().enable_auto_launch, Some(false));
|
||||
}
|
||||
@@ -1,14 +1,15 @@
|
||||
use crate::utils::dirs::get_encryption_key;
|
||||
use aes_gcm::{
|
||||
aead::{Aead, KeyInit},
|
||||
Aes256Gcm, Key,
|
||||
aead::{Aead, KeyInit},
|
||||
};
|
||||
use base64::{engine::general_purpose::STANDARD, Engine};
|
||||
use base64::{Engine, engine::general_purpose::STANDARD};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
const NONCE_LENGTH: usize = 12;
|
||||
|
||||
/// Encrypt data
|
||||
#[allow(deprecated)]
|
||||
pub fn encrypt_data(data: &str) -> Result<String, Box<dyn std::error::Error>> {
|
||||
let encryption_key = get_encryption_key()?;
|
||||
let key = Key::<Aes256Gcm>::from_slice(&encryption_key);
|
||||
@@ -30,6 +31,7 @@ pub fn encrypt_data(data: &str) -> Result<String, Box<dyn std::error::Error>> {
|
||||
}
|
||||
|
||||
/// Decrypt data
|
||||
#[allow(deprecated)]
|
||||
pub fn decrypt_data(encrypted: &str) -> Result<String, Box<dyn std::error::Error>> {
|
||||
let encryption_key = get_encryption_key()?;
|
||||
let key = Key::<Aes256Gcm>::from_slice(&encryption_key);
|
||||
|
||||
@@ -1,16 +1,13 @@
|
||||
mod clash;
|
||||
#[allow(clippy::module_inception)]
|
||||
mod config;
|
||||
mod draft;
|
||||
mod encrypt;
|
||||
mod prfitem;
|
||||
mod profiles;
|
||||
pub mod profiles;
|
||||
mod runtime;
|
||||
mod verge;
|
||||
|
||||
pub use self::{
|
||||
clash::*, config::*, draft::*, encrypt::*, prfitem::*, profiles::*, runtime::*, verge::*,
|
||||
};
|
||||
pub use self::{clash::*, config::*, encrypt::*, prfitem::*, profiles::*, runtime::*, verge::*};
|
||||
|
||||
pub const DEFAULT_PAC: &str = r#"function FindProxyForURL(url, host) {
|
||||
return "PROXY 127.0.0.1:%mixed-port%; SOCKS5 127.0.0.1:%mixed-port%; DIRECT;";
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
use crate::utils::{
|
||||
use crate::{
|
||||
config::profiles,
|
||||
utils::{
|
||||
dirs, help,
|
||||
network::{NetworkManager, ProxyType},
|
||||
tmpl,
|
||||
},
|
||||
};
|
||||
use anyhow::{bail, Context, Result};
|
||||
use reqwest::StatusCode;
|
||||
use anyhow::{Context, Result, bail};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_yaml::Mapping;
|
||||
use std::{fs, time::Duration};
|
||||
|
||||
use super::Config;
|
||||
use serde_yaml_ng::Mapping;
|
||||
use smartstring::alias::String;
|
||||
use std::time::Duration;
|
||||
use tokio::fs;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize, Serialize, Default)]
|
||||
pub struct PrfItem {
|
||||
@@ -104,6 +106,10 @@ pub struct PrfOption {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub danger_accept_invalid_certs: Option<bool>,
|
||||
|
||||
#[serde(default = "default_allow_auto_update")]
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub allow_auto_update: Option<bool>,
|
||||
|
||||
pub merge: Option<String>,
|
||||
|
||||
pub script: Option<String>,
|
||||
@@ -116,25 +122,29 @@ pub struct PrfOption {
|
||||
}
|
||||
|
||||
impl PrfOption {
|
||||
pub fn merge(one: Option<Self>, other: Option<Self>) -> Option<Self> {
|
||||
pub fn merge(one: Option<&Self>, other: Option<&Self>) -> Option<Self> {
|
||||
match (one, other) {
|
||||
(Some(mut a), Some(b)) => {
|
||||
a.user_agent = b.user_agent.or(a.user_agent);
|
||||
a.with_proxy = b.with_proxy.or(a.with_proxy);
|
||||
a.self_proxy = b.self_proxy.or(a.self_proxy);
|
||||
a.danger_accept_invalid_certs = b
|
||||
(Some(a_ref), Some(b_ref)) => {
|
||||
let mut result = a_ref.clone();
|
||||
result.user_agent = b_ref.user_agent.clone().or(result.user_agent);
|
||||
result.with_proxy = b_ref.with_proxy.or(result.with_proxy);
|
||||
result.self_proxy = b_ref.self_proxy.or(result.self_proxy);
|
||||
result.danger_accept_invalid_certs = b_ref
|
||||
.danger_accept_invalid_certs
|
||||
.or(a.danger_accept_invalid_certs);
|
||||
a.update_interval = b.update_interval.or(a.update_interval);
|
||||
a.merge = b.merge.or(a.merge);
|
||||
a.script = b.script.or(a.script);
|
||||
a.rules = b.rules.or(a.rules);
|
||||
a.proxies = b.proxies.or(a.proxies);
|
||||
a.groups = b.groups.or(a.groups);
|
||||
a.timeout_seconds = b.timeout_seconds.or(a.timeout_seconds);
|
||||
Some(a)
|
||||
.or(result.danger_accept_invalid_certs);
|
||||
result.allow_auto_update = b_ref.allow_auto_update.or(result.allow_auto_update);
|
||||
result.update_interval = b_ref.update_interval.or(result.update_interval);
|
||||
result.merge = b_ref.merge.clone().or(result.merge);
|
||||
result.script = b_ref.script.clone().or(result.script);
|
||||
result.rules = b_ref.rules.clone().or(result.rules);
|
||||
result.proxies = b_ref.proxies.clone().or(result.proxies);
|
||||
result.groups = b_ref.groups.clone().or(result.groups);
|
||||
result.timeout_seconds = b_ref.timeout_seconds.or(result.timeout_seconds);
|
||||
Some(result)
|
||||
}
|
||||
t => t.0.or(t.1),
|
||||
(Some(a_ref), None) => Some(a_ref.clone()),
|
||||
(None, Some(b_ref)) => Some(b_ref.clone()),
|
||||
(None, None) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -142,25 +152,31 @@ impl PrfOption {
|
||||
impl PrfItem {
|
||||
/// From partial item
|
||||
/// must contain `itype`
|
||||
pub async fn from(item: PrfItem, file_data: Option<String>) -> Result<PrfItem> {
|
||||
pub async fn from(item: &PrfItem, file_data: Option<String>) -> Result<PrfItem> {
|
||||
if item.itype.is_none() {
|
||||
bail!("type should not be null");
|
||||
}
|
||||
|
||||
match item.itype.unwrap().as_str() {
|
||||
let itype = item
|
||||
.itype
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("type should not be null"))?;
|
||||
match itype.as_str() {
|
||||
"remote" => {
|
||||
if item.url.is_none() {
|
||||
bail!("url should not be null");
|
||||
}
|
||||
let url = item.url.as_ref().unwrap().as_str();
|
||||
let name = item.name;
|
||||
let desc = item.desc;
|
||||
PrfItem::from_url(url, name, desc, item.option).await
|
||||
let url = item
|
||||
.url
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("url should not be null"))?;
|
||||
let name = item.name.as_ref();
|
||||
let desc = item.desc.as_ref();
|
||||
let option = item.option.as_ref();
|
||||
PrfItem::from_url(url, name, desc, option).await
|
||||
}
|
||||
"local" => {
|
||||
let name = item.name.unwrap_or("Local File".into());
|
||||
let desc = item.desc.unwrap_or("".into());
|
||||
PrfItem::from_local(name, desc, file_data, item.option)
|
||||
let name = item.name.clone().unwrap_or_else(|| "Local File".into());
|
||||
let desc = item.desc.clone().unwrap_or_else(|| "".into());
|
||||
let option = item.option.as_ref();
|
||||
PrfItem::from_local(name, desc, file_data, option).await
|
||||
}
|
||||
typ => bail!("invalid profile item type \"{typ}\""),
|
||||
}
|
||||
@@ -168,14 +184,14 @@ impl PrfItem {
|
||||
|
||||
/// ## Local type
|
||||
/// create a new item from name/desc
|
||||
pub fn from_local(
|
||||
pub async fn from_local(
|
||||
name: String,
|
||||
desc: String,
|
||||
file_data: Option<String>,
|
||||
option: Option<PrfOption>,
|
||||
option: Option<&PrfOption>,
|
||||
) -> Result<PrfItem> {
|
||||
let uid = help::get_uid("L");
|
||||
let file = format!("{uid}.yaml");
|
||||
let uid = help::get_uid("L").into();
|
||||
let file = format!("{uid}.yaml").into();
|
||||
let opt_ref = option.as_ref();
|
||||
let update_interval = opt_ref.and_then(|o| o.update_interval);
|
||||
let mut merge = opt_ref.and_then(|o| o.merge.clone());
|
||||
@@ -185,31 +201,29 @@ impl PrfItem {
|
||||
let mut groups = opt_ref.and_then(|o| o.groups.clone());
|
||||
|
||||
if merge.is_none() {
|
||||
let merge_item = PrfItem::from_merge(None)?;
|
||||
Config::profiles().data().append_item(merge_item.clone())?;
|
||||
merge = merge_item.uid;
|
||||
let merge_item = &mut PrfItem::from_merge(None)?;
|
||||
profiles::profiles_append_item_safe(merge_item).await?;
|
||||
merge = merge_item.uid.clone();
|
||||
}
|
||||
if script.is_none() {
|
||||
let script_item = PrfItem::from_script(None)?;
|
||||
Config::profiles().data().append_item(script_item.clone())?;
|
||||
script = script_item.uid;
|
||||
let script_item = &mut PrfItem::from_script(None)?;
|
||||
profiles::profiles_append_item_safe(script_item).await?;
|
||||
script = script_item.uid.clone();
|
||||
}
|
||||
if rules.is_none() {
|
||||
let rules_item = PrfItem::from_rules()?;
|
||||
Config::profiles().data().append_item(rules_item.clone())?;
|
||||
rules = rules_item.uid;
|
||||
let rules_item = &mut PrfItem::from_rules()?;
|
||||
profiles::profiles_append_item_safe(rules_item).await?;
|
||||
rules = rules_item.uid.clone();
|
||||
}
|
||||
if proxies.is_none() {
|
||||
let proxies_item = PrfItem::from_proxies()?;
|
||||
Config::profiles()
|
||||
.data()
|
||||
.append_item(proxies_item.clone())?;
|
||||
proxies = proxies_item.uid;
|
||||
let proxies_item = &mut PrfItem::from_proxies()?;
|
||||
profiles::profiles_append_item_safe(proxies_item).await?;
|
||||
proxies = proxies_item.uid.clone();
|
||||
}
|
||||
if groups.is_none() {
|
||||
let groups_item = PrfItem::from_groups()?;
|
||||
Config::profiles().data().append_item(groups_item.clone())?;
|
||||
groups = groups_item.uid;
|
||||
let groups_item = &mut PrfItem::from_groups()?;
|
||||
profiles::profiles_append_item_safe(groups_item).await?;
|
||||
groups = groups_item.uid.clone();
|
||||
}
|
||||
Ok(PrfItem {
|
||||
uid: Some(uid),
|
||||
@@ -231,7 +245,7 @@ impl PrfItem {
|
||||
}),
|
||||
home: None,
|
||||
updated: Some(chrono::Local::now().timestamp() as usize),
|
||||
file_data: Some(file_data.unwrap_or(tmpl::ITEM_LOCAL.into())),
|
||||
file_data: Some(file_data.unwrap_or_else(|| tmpl::ITEM_LOCAL.into())),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -239,23 +253,23 @@ impl PrfItem {
|
||||
/// create a new item from url
|
||||
pub async fn from_url(
|
||||
url: &str,
|
||||
name: Option<String>,
|
||||
desc: Option<String>,
|
||||
option: Option<PrfOption>,
|
||||
name: Option<&String>,
|
||||
desc: Option<&String>,
|
||||
option: Option<&PrfOption>,
|
||||
) -> Result<PrfItem> {
|
||||
let opt_ref = option.as_ref();
|
||||
let with_proxy = opt_ref.is_some_and(|o| o.with_proxy.unwrap_or(false));
|
||||
let self_proxy = opt_ref.is_some_and(|o| o.self_proxy.unwrap_or(false));
|
||||
let with_proxy = option.is_some_and(|o| o.with_proxy.unwrap_or(false));
|
||||
let self_proxy = option.is_some_and(|o| o.self_proxy.unwrap_or(false));
|
||||
let accept_invalid_certs =
|
||||
opt_ref.is_some_and(|o| o.danger_accept_invalid_certs.unwrap_or(false));
|
||||
let user_agent = opt_ref.and_then(|o| o.user_agent.clone());
|
||||
let update_interval = opt_ref.and_then(|o| o.update_interval);
|
||||
let timeout = opt_ref.and_then(|o| o.timeout_seconds).unwrap_or(20);
|
||||
let mut merge = opt_ref.and_then(|o| o.merge.clone());
|
||||
let mut script = opt_ref.and_then(|o| o.script.clone());
|
||||
let mut rules = opt_ref.and_then(|o| o.rules.clone());
|
||||
let mut proxies = opt_ref.and_then(|o| o.proxies.clone());
|
||||
let mut groups = opt_ref.and_then(|o| o.groups.clone());
|
||||
option.is_some_and(|o| o.danger_accept_invalid_certs.unwrap_or(false));
|
||||
let allow_auto_update = option.map(|o| o.allow_auto_update.unwrap_or(true));
|
||||
let user_agent = option.and_then(|o| o.user_agent.clone());
|
||||
let update_interval = option.and_then(|o| o.update_interval);
|
||||
let timeout = option.and_then(|o| o.timeout_seconds).unwrap_or(20);
|
||||
let mut merge = option.and_then(|o| o.merge.clone());
|
||||
let mut script = option.and_then(|o| o.script.clone());
|
||||
let mut rules = option.and_then(|o| o.rules.clone());
|
||||
let mut proxies = option.and_then(|o| o.proxies.clone());
|
||||
let mut groups = option.and_then(|o| o.groups.clone());
|
||||
|
||||
// 选择代理类型
|
||||
let proxy_type = if self_proxy {
|
||||
@@ -267,7 +281,7 @@ impl PrfItem {
|
||||
};
|
||||
|
||||
// 使用网络管理器发送请求
|
||||
let resp = match NetworkManager::global()
|
||||
let resp = match NetworkManager::new()
|
||||
.get_with_interrupt(
|
||||
url,
|
||||
proxy_type,
|
||||
@@ -285,25 +299,34 @@ impl PrfItem {
|
||||
};
|
||||
|
||||
let status_code = resp.status();
|
||||
if !StatusCode::is_success(&status_code) {
|
||||
if !status_code.is_success() {
|
||||
bail!("failed to fetch remote profile with status {status_code}")
|
||||
}
|
||||
|
||||
let header = resp.headers();
|
||||
|
||||
// parse the Subscription UserInfo
|
||||
let extra = match header.get("Subscription-Userinfo") {
|
||||
Some(value) => {
|
||||
let sub_info = value.to_str().unwrap_or("");
|
||||
Some(PrfExtra {
|
||||
let extra;
|
||||
'extra: {
|
||||
for (k, v) in header.iter() {
|
||||
let key_lower = k.as_str().to_ascii_lowercase();
|
||||
// Accept standard custom-metadata prefixes (x-amz-meta-, x-obs-meta-, x-cos-meta-, etc.).
|
||||
if key_lower
|
||||
.strip_suffix("subscription-userinfo")
|
||||
.is_some_and(|prefix| prefix.is_empty() || prefix.ends_with('-'))
|
||||
{
|
||||
let sub_info = v.to_str().unwrap_or("");
|
||||
extra = Some(PrfExtra {
|
||||
upload: help::parse_str(sub_info, "upload").unwrap_or(0),
|
||||
download: help::parse_str(sub_info, "download").unwrap_or(0),
|
||||
total: help::parse_str(sub_info, "total").unwrap_or(0),
|
||||
expire: help::parse_str(sub_info, "expire").unwrap_or(0),
|
||||
})
|
||||
});
|
||||
break 'extra;
|
||||
}
|
||||
}
|
||||
extra = None;
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
// parse the Content-Disposition
|
||||
let filename = match header.get("Content-Disposition") {
|
||||
@@ -314,19 +337,20 @@ impl PrfItem {
|
||||
Some(filename) => {
|
||||
let iter = percent_encoding::percent_decode(filename.as_bytes());
|
||||
let filename = iter.decode_utf8().unwrap_or_default();
|
||||
filename.split("''").last().map(|s| s.to_string())
|
||||
filename.split("''").last().map(|s| s.into())
|
||||
}
|
||||
None => match help::parse_str::<String>(filename, "filename") {
|
||||
Some(filename) => {
|
||||
let filename = filename.trim_matches('"');
|
||||
Some(filename.to_string())
|
||||
Some(filename.into())
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
}
|
||||
}
|
||||
None => Some(
|
||||
crate::utils::help::get_last_part_and_decode(url).unwrap_or("Remote File".into()),
|
||||
crate::utils::help::get_last_part_and_decode(url)
|
||||
.unwrap_or_else(|| "Remote File".into()),
|
||||
),
|
||||
};
|
||||
let update_interval = match update_interval {
|
||||
@@ -343,21 +367,25 @@ impl PrfItem {
|
||||
let home = match header.get("profile-web-page-url") {
|
||||
Some(value) => {
|
||||
let str_value = value.to_str().unwrap_or("");
|
||||
Some(str_value.to_string())
|
||||
Some(str_value.into())
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let uid = help::get_uid("R");
|
||||
let file = format!("{uid}.yaml");
|
||||
let name = name.unwrap_or(filename.unwrap_or("Remote File".into()));
|
||||
let data = resp.text_with_charset("utf-8").await?;
|
||||
let uid = help::get_uid("R").into();
|
||||
let file = format!("{uid}.yaml").into();
|
||||
let name = name.map(|s| s.to_owned()).unwrap_or_else(|| {
|
||||
filename
|
||||
.map(|s| s.into())
|
||||
.unwrap_or_else(|| "Remote File".into())
|
||||
});
|
||||
let data = resp.text_with_charset()?;
|
||||
|
||||
// process the charset "UTF-8 with BOM"
|
||||
let data = data.trim_start_matches('\u{feff}');
|
||||
|
||||
// check the data whether the valid yaml format
|
||||
let yaml = serde_yaml::from_str::<Mapping>(data)
|
||||
let yaml = serde_yaml_ng::from_str::<Mapping>(data)
|
||||
.context("the remote profile data is invalid yaml")?;
|
||||
|
||||
if !yaml.contains_key("proxies") && !yaml.contains_key("proxy-providers") {
|
||||
@@ -365,38 +393,36 @@ impl PrfItem {
|
||||
}
|
||||
|
||||
if merge.is_none() {
|
||||
let merge_item = PrfItem::from_merge(None)?;
|
||||
Config::profiles().data().append_item(merge_item.clone())?;
|
||||
merge = merge_item.uid;
|
||||
let merge_item = &mut PrfItem::from_merge(None)?;
|
||||
profiles::profiles_append_item_safe(merge_item).await?;
|
||||
merge = merge_item.uid.clone();
|
||||
}
|
||||
if script.is_none() {
|
||||
let script_item = PrfItem::from_script(None)?;
|
||||
Config::profiles().data().append_item(script_item.clone())?;
|
||||
script = script_item.uid;
|
||||
let script_item = &mut PrfItem::from_script(None)?;
|
||||
profiles::profiles_append_item_safe(script_item).await?;
|
||||
script = script_item.uid.clone();
|
||||
}
|
||||
if rules.is_none() {
|
||||
let rules_item = PrfItem::from_rules()?;
|
||||
Config::profiles().data().append_item(rules_item.clone())?;
|
||||
rules = rules_item.uid;
|
||||
let rules_item = &mut PrfItem::from_rules()?;
|
||||
profiles::profiles_append_item_safe(rules_item).await?;
|
||||
rules = rules_item.uid.clone();
|
||||
}
|
||||
if proxies.is_none() {
|
||||
let proxies_item = PrfItem::from_proxies()?;
|
||||
Config::profiles()
|
||||
.data()
|
||||
.append_item(proxies_item.clone())?;
|
||||
proxies = proxies_item.uid;
|
||||
let proxies_item = &mut PrfItem::from_proxies()?;
|
||||
profiles::profiles_append_item_safe(proxies_item).await?;
|
||||
proxies = proxies_item.uid.clone();
|
||||
}
|
||||
if groups.is_none() {
|
||||
let groups_item = PrfItem::from_groups()?;
|
||||
Config::profiles().data().append_item(groups_item.clone())?;
|
||||
groups = groups_item.uid;
|
||||
let groups_item = &mut PrfItem::from_groups()?;
|
||||
profiles::profiles_append_item_safe(groups_item).await?;
|
||||
groups = groups_item.uid.clone();
|
||||
}
|
||||
|
||||
Ok(PrfItem {
|
||||
uid: Some(uid),
|
||||
itype: Some("remote".into()),
|
||||
name: Some(name),
|
||||
desc,
|
||||
desc: desc.cloned(),
|
||||
file: Some(file),
|
||||
url: Some(url.into()),
|
||||
selected: None,
|
||||
@@ -408,6 +434,7 @@ impl PrfItem {
|
||||
rules,
|
||||
proxies,
|
||||
groups,
|
||||
allow_auto_update,
|
||||
..PrfOption::default()
|
||||
}),
|
||||
home,
|
||||
@@ -419,13 +446,13 @@ impl PrfItem {
|
||||
/// ## Merge type (enhance)
|
||||
/// create the enhanced item by using `merge` rule
|
||||
pub fn from_merge(uid: Option<String>) -> Result<PrfItem> {
|
||||
let mut id = help::get_uid("m");
|
||||
let mut id = help::get_uid("m").into();
|
||||
let mut template = tmpl::ITEM_MERGE_EMPTY.into();
|
||||
if let Some(uid) = uid {
|
||||
id = uid;
|
||||
template = tmpl::ITEM_MERGE.into();
|
||||
}
|
||||
let file = format!("{id}.yaml");
|
||||
let file = format!("{id}.yaml").into();
|
||||
|
||||
Ok(PrfItem {
|
||||
uid: Some(id),
|
||||
@@ -446,11 +473,11 @@ impl PrfItem {
|
||||
/// ## Script type (enhance)
|
||||
/// create the enhanced item by using javascript quick.js
|
||||
pub fn from_script(uid: Option<String>) -> Result<PrfItem> {
|
||||
let mut id = help::get_uid("s");
|
||||
let mut id = help::get_uid("s").into();
|
||||
if let Some(uid) = uid {
|
||||
id = uid;
|
||||
}
|
||||
let file = format!("{id}.js"); // js ext
|
||||
let file = format!("{id}.js").into(); // js ext
|
||||
|
||||
Ok(PrfItem {
|
||||
uid: Some(id),
|
||||
@@ -470,8 +497,8 @@ impl PrfItem {
|
||||
|
||||
/// ## Rules type (enhance)
|
||||
pub fn from_rules() -> Result<PrfItem> {
|
||||
let uid = help::get_uid("r");
|
||||
let file = format!("{uid}.yaml"); // yaml ext
|
||||
let uid = help::get_uid("r").into();
|
||||
let file = format!("{uid}.yaml").into(); // yaml ext
|
||||
|
||||
Ok(PrfItem {
|
||||
uid: Some(uid),
|
||||
@@ -491,8 +518,8 @@ impl PrfItem {
|
||||
|
||||
/// ## Proxies type (enhance)
|
||||
pub fn from_proxies() -> Result<PrfItem> {
|
||||
let uid = help::get_uid("p");
|
||||
let file = format!("{uid}.yaml"); // yaml ext
|
||||
let uid = help::get_uid("p").into();
|
||||
let file = format!("{uid}.yaml").into(); // yaml ext
|
||||
|
||||
Ok(PrfItem {
|
||||
uid: Some(uid),
|
||||
@@ -512,8 +539,8 @@ impl PrfItem {
|
||||
|
||||
/// ## Groups type (enhance)
|
||||
pub fn from_groups() -> Result<PrfItem> {
|
||||
let uid = help::get_uid("g");
|
||||
let file = format!("{uid}.yaml"); // yaml ext
|
||||
let uid = help::get_uid("g").into();
|
||||
let file = format!("{uid}.yaml").into(); // yaml ext
|
||||
|
||||
Ok(PrfItem {
|
||||
uid: Some(uid),
|
||||
@@ -532,24 +559,32 @@ impl PrfItem {
|
||||
}
|
||||
|
||||
/// get the file data
|
||||
pub fn read_file(&self) -> Result<String> {
|
||||
if self.file.is_none() {
|
||||
bail!("could not find the file");
|
||||
}
|
||||
|
||||
let file = self.file.clone().unwrap();
|
||||
let path = dirs::app_profiles_dir()?.join(file);
|
||||
fs::read_to_string(path).context("failed to read the file")
|
||||
pub async fn read_file(&self) -> Result<String> {
|
||||
let file = self
|
||||
.file
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("could not find the file"))?;
|
||||
let path = dirs::app_profiles_dir()?.join(file.as_str());
|
||||
let content = fs::read_to_string(path)
|
||||
.await
|
||||
.context("failed to read the file")?;
|
||||
Ok(content.into())
|
||||
}
|
||||
|
||||
/// save the file data
|
||||
pub fn save_file(&self, data: String) -> Result<()> {
|
||||
if self.file.is_none() {
|
||||
bail!("could not find the file");
|
||||
}
|
||||
|
||||
let file = self.file.clone().unwrap();
|
||||
let path = dirs::app_profiles_dir()?.join(file);
|
||||
fs::write(path, data.as_bytes()).context("failed to save the file")
|
||||
pub async fn save_file(&self, data: String) -> Result<()> {
|
||||
let file = self
|
||||
.file
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow::anyhow!("could not find the file"))?;
|
||||
let path = dirs::app_profiles_dir()?.join(file.as_str());
|
||||
fs::write(path, data.as_bytes())
|
||||
.await
|
||||
.context("failed to save the file")
|
||||
}
|
||||
}
|
||||
|
||||
// 向前兼容,默认为订阅启用自动更新
|
||||
fn default_allow_auto_update() -> Option<bool> {
|
||||
Some(true)
|
||||
}
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
use super::{prfitem::PrfItem, PrfOption};
|
||||
use crate::utils::{dirs, help};
|
||||
use anyhow::{bail, Context, Result};
|
||||
use super::{PrfOption, prfitem::PrfItem};
|
||||
use crate::utils::{
|
||||
dirs::{self, PathBufExec},
|
||||
help,
|
||||
};
|
||||
use crate::{logging, utils::logging::Type};
|
||||
use anyhow::{Context, Result, bail};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_yaml::Mapping;
|
||||
use std::{collections::HashSet, fs, io::Write};
|
||||
use serde_yaml_ng::Mapping;
|
||||
use smartstring::alias::String;
|
||||
use std::collections::HashSet;
|
||||
use tokio::fs;
|
||||
|
||||
/// Define the `profiles.yaml` schema
|
||||
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
|
||||
@@ -26,14 +32,27 @@ pub struct CleanupResult {
|
||||
macro_rules! patch {
|
||||
($lv: expr, $rv: expr, $key: tt) => {
|
||||
if ($rv.$key).is_some() {
|
||||
$lv.$key = $rv.$key;
|
||||
$lv.$key = $rv.$key.clone();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl IProfiles {
|
||||
pub fn new() -> Self {
|
||||
match dirs::profiles_path().and_then(|path| help::read_yaml::<Self>(&path)) {
|
||||
// Helper to find and remove an item by uid from the items vec, returning its file name (if any).
|
||||
fn take_item_file_by_uid(
|
||||
items: &mut Vec<PrfItem>,
|
||||
target_uid: Option<String>,
|
||||
) -> Option<String> {
|
||||
for (i, _) in items.iter().enumerate() {
|
||||
if items[i].uid == target_uid {
|
||||
return items.remove(i).file;
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
pub async fn new() -> Self {
|
||||
match dirs::profiles_path() {
|
||||
Ok(path) => match help::read_yaml::<Self>(&path).await {
|
||||
Ok(mut profiles) => {
|
||||
if profiles.items.is_none() {
|
||||
profiles.items = Some(vec![]);
|
||||
@@ -42,14 +61,19 @@ impl IProfiles {
|
||||
if let Some(items) = profiles.items.as_mut() {
|
||||
for item in items.iter_mut() {
|
||||
if item.uid.is_none() {
|
||||
item.uid = Some(help::get_uid("d"));
|
||||
item.uid = Some(help::get_uid("d").into());
|
||||
}
|
||||
}
|
||||
}
|
||||
profiles
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!(target: "app", "{err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
Self::template()
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
logging!(error, Type::Config, "{err}");
|
||||
Self::template()
|
||||
}
|
||||
}
|
||||
@@ -62,12 +86,13 @@ impl IProfiles {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn save_file(&self) -> Result<()> {
|
||||
pub async fn save_file(&self) -> Result<()> {
|
||||
help::save_yaml(
|
||||
&dirs::profiles_path()?,
|
||||
self,
|
||||
Some("# Profiles Config for Clash Verge"),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
/// 只修改current,valid和chain
|
||||
@@ -76,8 +101,9 @@ impl IProfiles {
|
||||
self.items = Some(vec![]);
|
||||
}
|
||||
|
||||
if let Some(current) = patch.current {
|
||||
let items = self.items.as_ref().unwrap();
|
||||
if let Some(current) = patch.current
|
||||
&& let Some(items) = self.items.as_ref()
|
||||
{
|
||||
let some_uid = Some(current);
|
||||
if items.iter().any(|e| e.uid == some_uid) {
|
||||
self.current = some_uid;
|
||||
@@ -97,28 +123,30 @@ impl IProfiles {
|
||||
}
|
||||
|
||||
/// find the item by the uid
|
||||
pub fn get_item(&self, uid: &String) -> Result<&PrfItem> {
|
||||
if let Some(items) = self.items.as_ref() {
|
||||
let some_uid = Some(uid.clone());
|
||||
pub fn get_item(&self, uid: impl AsRef<str>) -> Result<&PrfItem> {
|
||||
let uid_str = uid.as_ref();
|
||||
|
||||
if let Some(items) = self.items.as_ref() {
|
||||
for each in items.iter() {
|
||||
if each.uid == some_uid {
|
||||
if let Some(uid_val) = &each.uid
|
||||
&& uid_val.as_str() == uid_str
|
||||
{
|
||||
return Ok(each);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bail!("failed to get the profile item \"uid:{uid}\"");
|
||||
bail!("failed to get the profile item \"uid:{}\"", uid_str);
|
||||
}
|
||||
|
||||
/// append new item
|
||||
/// if the file_data is some
|
||||
/// then should save the data to file
|
||||
pub fn append_item(&mut self, mut item: PrfItem) -> Result<()> {
|
||||
if item.uid.is_none() {
|
||||
pub async fn append_item(&mut self, item: &mut PrfItem) -> Result<()> {
|
||||
let uid = &item.uid;
|
||||
if uid.is_none() {
|
||||
bail!("the uid should not be null");
|
||||
}
|
||||
let uid = item.uid.clone();
|
||||
|
||||
// save the file data
|
||||
// move the field value after save
|
||||
@@ -127,19 +155,20 @@ impl IProfiles {
|
||||
bail!("the file should not be null");
|
||||
}
|
||||
|
||||
let file = item.file.clone().unwrap();
|
||||
let path = dirs::app_profiles_dir()?.join(&file);
|
||||
let file = item.file.clone().ok_or_else(|| {
|
||||
anyhow::anyhow!("file field is required when file_data is provided")
|
||||
})?;
|
||||
let path = dirs::app_profiles_dir()?.join(file.as_str());
|
||||
|
||||
fs::File::create(path)
|
||||
.with_context(|| format!("failed to create file \"{file}\""))?
|
||||
.write(file_data.as_bytes())
|
||||
fs::write(&path, file_data.as_bytes())
|
||||
.await
|
||||
.with_context(|| format!("failed to write to file \"{file}\""))?;
|
||||
}
|
||||
|
||||
if self.current.is_none()
|
||||
&& (item.itype == Some("remote".to_string()) || item.itype == Some("local".to_string()))
|
||||
&& (item.itype == Some("remote".into()) || item.itype == Some("local".into()))
|
||||
{
|
||||
self.current = uid;
|
||||
self.current = uid.to_owned();
|
||||
}
|
||||
|
||||
if self.items.is_none() {
|
||||
@@ -147,42 +176,43 @@ impl IProfiles {
|
||||
}
|
||||
|
||||
if let Some(items) = self.items.as_mut() {
|
||||
items.push(item)
|
||||
items.push(item.to_owned());
|
||||
}
|
||||
|
||||
self.save_file()
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// reorder items
|
||||
pub fn reorder(&mut self, active_id: String, over_id: String) -> Result<()> {
|
||||
pub async fn reorder(&mut self, active_id: &String, over_id: &String) -> Result<()> {
|
||||
let mut items = self.items.take().unwrap_or_default();
|
||||
let mut old_index = None;
|
||||
let mut new_index = None;
|
||||
|
||||
for (i, _) in items.iter().enumerate() {
|
||||
if items[i].uid == Some(active_id.clone()) {
|
||||
if items[i].uid.as_ref() == Some(active_id) {
|
||||
old_index = Some(i);
|
||||
}
|
||||
if items[i].uid == Some(over_id.clone()) {
|
||||
if items[i].uid.as_ref() == Some(over_id) {
|
||||
new_index = Some(i);
|
||||
}
|
||||
}
|
||||
|
||||
if old_index.is_none() || new_index.is_none() {
|
||||
return Ok(());
|
||||
}
|
||||
let item = items.remove(old_index.unwrap());
|
||||
items.insert(new_index.unwrap(), item);
|
||||
let (old_idx, new_idx) = match (old_index, new_index) {
|
||||
(Some(old), Some(new)) => (old, new),
|
||||
_ => return Ok(()),
|
||||
};
|
||||
let item = items.remove(old_idx);
|
||||
items.insert(new_idx, item);
|
||||
self.items = Some(items);
|
||||
self.save_file()
|
||||
self.save_file().await
|
||||
}
|
||||
|
||||
/// update the item value
|
||||
pub fn patch_item(&mut self, uid: String, item: PrfItem) -> Result<()> {
|
||||
pub async fn patch_item(&mut self, uid: &String, item: &PrfItem) -> Result<()> {
|
||||
let mut items = self.items.take().unwrap_or_default();
|
||||
|
||||
for each in items.iter_mut() {
|
||||
if each.uid == Some(uid.clone()) {
|
||||
if each.uid.as_ref() == Some(uid) {
|
||||
patch!(each, item, itype);
|
||||
patch!(each, item, name);
|
||||
patch!(each, item, desc);
|
||||
@@ -194,7 +224,7 @@ impl IProfiles {
|
||||
patch!(each, item, option);
|
||||
|
||||
self.items = Some(items);
|
||||
return self.save_file();
|
||||
return self.save_file().await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -204,13 +234,13 @@ impl IProfiles {
|
||||
|
||||
/// be used to update the remote item
|
||||
/// only patch `updated` `extra` `file_data`
|
||||
pub fn update_item(&mut self, uid: String, mut item: PrfItem) -> Result<()> {
|
||||
pub async fn update_item(&mut self, uid: &String, item: &mut PrfItem) -> Result<()> {
|
||||
if self.items.is_none() {
|
||||
self.items = Some(vec![]);
|
||||
}
|
||||
|
||||
// find the item
|
||||
let _ = self.get_item(&uid)?;
|
||||
let _ = self.get_item(uid)?;
|
||||
|
||||
if let Some(items) = self.items.as_mut() {
|
||||
let some_uid = Some(uid.clone());
|
||||
@@ -219,23 +249,25 @@ impl IProfiles {
|
||||
if each.uid == some_uid {
|
||||
each.extra = item.extra;
|
||||
each.updated = item.updated;
|
||||
each.home = item.home;
|
||||
each.option = PrfOption::merge(each.option.clone(), item.option);
|
||||
each.home = item.home.to_owned();
|
||||
each.option = PrfOption::merge(each.option.as_ref(), item.option.as_ref());
|
||||
// save the file data
|
||||
// move the field value after save
|
||||
if let Some(file_data) = item.file_data.take() {
|
||||
let file = each.file.take();
|
||||
let file =
|
||||
file.unwrap_or(item.file.take().unwrap_or(format!("{}.yaml", &uid)));
|
||||
let file = file.unwrap_or_else(|| {
|
||||
item.file
|
||||
.take()
|
||||
.unwrap_or_else(|| format!("{}.yaml", &uid).into())
|
||||
});
|
||||
|
||||
// the file must exists
|
||||
each.file = Some(file.clone());
|
||||
|
||||
let path = dirs::app_profiles_dir()?.join(&file);
|
||||
let path = dirs::app_profiles_dir()?.join(file.as_str());
|
||||
|
||||
fs::File::create(path)
|
||||
.with_context(|| format!("failed to create file \"{file}\""))?
|
||||
.write(file_data.as_bytes())
|
||||
fs::write(&path, file_data.as_bytes())
|
||||
.await
|
||||
.with_context(|| format!("failed to write to file \"{file}\""))?;
|
||||
}
|
||||
|
||||
@@ -244,137 +276,66 @@ impl IProfiles {
|
||||
}
|
||||
}
|
||||
|
||||
self.save_file()
|
||||
self.save_file().await
|
||||
}
|
||||
|
||||
/// delete item
|
||||
/// if delete the current then return true
|
||||
pub fn delete_item(&mut self, uid: String) -> Result<bool> {
|
||||
let current = self.current.as_ref().unwrap_or(&uid);
|
||||
pub async fn delete_item(&mut self, uid: &String) -> Result<bool> {
|
||||
let current = self.current.as_ref().unwrap_or(uid);
|
||||
let current = current.clone();
|
||||
let item = self.get_item(&uid)?;
|
||||
let item = self.get_item(uid)?;
|
||||
let merge_uid = item.option.as_ref().and_then(|e| e.merge.clone());
|
||||
let script_uid = item.option.as_ref().and_then(|e| e.script.clone());
|
||||
let rules_uid = item.option.as_ref().and_then(|e| e.rules.clone());
|
||||
let proxies_uid = item.option.as_ref().and_then(|e| e.proxies.clone());
|
||||
let groups_uid = item.option.as_ref().and_then(|e| e.groups.clone());
|
||||
let mut items = self.items.take().unwrap_or_default();
|
||||
let mut index = None;
|
||||
let mut merge_index = None;
|
||||
let mut script_index = None;
|
||||
let mut rules_index = None;
|
||||
let mut proxies_index = None;
|
||||
let mut groups_index = None;
|
||||
|
||||
// get the index
|
||||
for (i, _) in items.iter().enumerate() {
|
||||
if items[i].uid == Some(uid.clone()) {
|
||||
index = Some(i);
|
||||
break;
|
||||
// remove the main item (if exists) and delete its file
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, Some(uid.clone())) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
}
|
||||
|
||||
// remove related extension items (merge, script, rules, proxies, groups)
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, merge_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
}
|
||||
if let Some(index) = index {
|
||||
if let Some(file) = items.remove(index).file {
|
||||
let _ = dirs::app_profiles_dir().map(|path| {
|
||||
let path = path.join(file);
|
||||
if path.exists() {
|
||||
let _ = fs::remove_file(path);
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, script_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
}
|
||||
});
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, rules_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
}
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, proxies_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
}
|
||||
// get the merge index
|
||||
for (i, _) in items.iter().enumerate() {
|
||||
if items[i].uid == merge_uid {
|
||||
merge_index = Some(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(index) = merge_index {
|
||||
if let Some(file) = items.remove(index).file {
|
||||
let _ = dirs::app_profiles_dir().map(|path| {
|
||||
let path = path.join(file);
|
||||
if path.exists() {
|
||||
let _ = fs::remove_file(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// get the script index
|
||||
for (i, _) in items.iter().enumerate() {
|
||||
if items[i].uid == script_uid {
|
||||
script_index = Some(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(index) = script_index {
|
||||
if let Some(file) = items.remove(index).file {
|
||||
let _ = dirs::app_profiles_dir().map(|path| {
|
||||
let path = path.join(file);
|
||||
if path.exists() {
|
||||
let _ = fs::remove_file(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// get the rules index
|
||||
for (i, _) in items.iter().enumerate() {
|
||||
if items[i].uid == rules_uid {
|
||||
rules_index = Some(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(index) = rules_index {
|
||||
if let Some(file) = items.remove(index).file {
|
||||
let _ = dirs::app_profiles_dir().map(|path| {
|
||||
let path = path.join(file);
|
||||
if path.exists() {
|
||||
let _ = fs::remove_file(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// get the proxies index
|
||||
for (i, _) in items.iter().enumerate() {
|
||||
if items[i].uid == proxies_uid {
|
||||
proxies_index = Some(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(index) = proxies_index {
|
||||
if let Some(file) = items.remove(index).file {
|
||||
let _ = dirs::app_profiles_dir().map(|path| {
|
||||
let path = path.join(file);
|
||||
if path.exists() {
|
||||
let _ = fs::remove_file(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
// get the groups index
|
||||
for (i, _) in items.iter().enumerate() {
|
||||
if items[i].uid == groups_uid {
|
||||
groups_index = Some(i);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if let Some(index) = groups_index {
|
||||
if let Some(file) = items.remove(index).file {
|
||||
let _ = dirs::app_profiles_dir().map(|path| {
|
||||
let path = path.join(file);
|
||||
if path.exists() {
|
||||
let _ = fs::remove_file(path);
|
||||
}
|
||||
});
|
||||
}
|
||||
if let Some(file) = Self::take_item_file_by_uid(&mut items, groups_uid.clone()) {
|
||||
let _ = dirs::app_profiles_dir()?
|
||||
.join(file.as_str())
|
||||
.remove_if_exists()
|
||||
.await;
|
||||
}
|
||||
// delete the original uid
|
||||
if current == uid {
|
||||
if current == *uid {
|
||||
self.current = None;
|
||||
for item in items.iter() {
|
||||
if item.itype == Some("remote".to_string())
|
||||
|| item.itype == Some("local".to_string())
|
||||
{
|
||||
if item.itype == Some("remote".into()) || item.itype == Some("local".into()) {
|
||||
self.current = item.uid.clone();
|
||||
break;
|
||||
}
|
||||
@@ -382,20 +343,20 @@ impl IProfiles {
|
||||
}
|
||||
|
||||
self.items = Some(items);
|
||||
self.save_file()?;
|
||||
Ok(current == uid)
|
||||
self.save_file().await?;
|
||||
Ok(current == *uid)
|
||||
}
|
||||
|
||||
/// 获取current指向的订阅内容
|
||||
pub fn current_mapping(&self) -> Result<Mapping> {
|
||||
pub async fn current_mapping(&self) -> Result<Mapping> {
|
||||
match (self.current.as_ref(), self.items.as_ref()) {
|
||||
(Some(current), Some(items)) => {
|
||||
if let Some(item) = items.iter().find(|e| e.uid.as_ref() == Some(current)) {
|
||||
let file_path = match item.file.as_ref() {
|
||||
Some(file) => dirs::app_profiles_dir()?.join(file),
|
||||
Some(file) => dirs::app_profiles_dir()?.join(file.as_str()),
|
||||
None => bail!("failed to get the file field"),
|
||||
};
|
||||
return help::read_mapping(&file_path);
|
||||
return help::read_mapping(&file_path).await;
|
||||
}
|
||||
bail!("failed to find the current profile \"uid:{current}\"");
|
||||
}
|
||||
@@ -495,7 +456,7 @@ impl IProfiles {
|
||||
}
|
||||
|
||||
/// 以 app 中的 profile 列表为准,删除不再需要的文件
|
||||
pub fn cleanup_orphaned_files(&self) -> Result<CleanupResult> {
|
||||
pub async fn cleanup_orphaned_files(&self) -> Result<CleanupResult> {
|
||||
let profiles_dir = dirs::app_profiles_dir()?;
|
||||
|
||||
if !profiles_dir.exists() {
|
||||
@@ -527,25 +488,29 @@ impl IProfiles {
|
||||
|
||||
total_files += 1;
|
||||
|
||||
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
|
||||
if Self::is_profile_file(file_name) {
|
||||
if let Some(file_name) = path.file_name().and_then(|n| n.to_str())
|
||||
&& Self::is_profile_file(file_name)
|
||||
{
|
||||
// 检查是否为全局扩展文件
|
||||
if protected_files.contains(file_name) {
|
||||
log::debug!(target: "app", "保护全局扩展配置文件: {file_name}");
|
||||
logging!(debug, Type::Config, "保护全局扩展配置文件: {file_name}");
|
||||
continue;
|
||||
}
|
||||
|
||||
// 检查是否为活跃文件
|
||||
if !active_files.contains(file_name) {
|
||||
match std::fs::remove_file(&path) {
|
||||
match path.to_path_buf().remove_if_exists().await {
|
||||
Ok(_) => {
|
||||
deleted_files.push(file_name.to_string());
|
||||
log::info!(target: "app", "已清理冗余文件: {file_name}");
|
||||
deleted_files.push(file_name.into());
|
||||
logging!(info, Type::Config, "已清理冗余文件: {file_name}");
|
||||
}
|
||||
Err(e) => {
|
||||
failed_deletions.push(format!("{file_name}: {e}"));
|
||||
log::warn!(target: "app", "清理文件失败: {file_name} - {e}");
|
||||
}
|
||||
failed_deletions.push(format!("{file_name}: {e}").into());
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
"Warning: 清理文件失败: {file_name} - {e}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -558,8 +523,9 @@ impl IProfiles {
|
||||
failed_deletions,
|
||||
};
|
||||
|
||||
log::info!(
|
||||
target: "app",
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
"Profile 文件清理完成: 总文件数={}, 删除文件数={}, 失败数={}",
|
||||
result.total_files,
|
||||
result.deleted_files.len(),
|
||||
@@ -573,8 +539,8 @@ impl IProfiles {
|
||||
fn get_protected_global_files(&self) -> HashSet<String> {
|
||||
let mut protected_files = HashSet::new();
|
||||
|
||||
protected_files.insert("Merge.yaml".to_string());
|
||||
protected_files.insert("Script.js".to_string());
|
||||
protected_files.insert("Merge.yaml".into());
|
||||
protected_files.insert("Script.js".into());
|
||||
|
||||
protected_files
|
||||
}
|
||||
@@ -591,54 +557,48 @@ impl IProfiles {
|
||||
}
|
||||
|
||||
// 对于主 profile 类型(remote/local),还需要收集其关联的扩展文件
|
||||
if let Some(itype) = &item.itype {
|
||||
if itype == "remote" || itype == "local" {
|
||||
if let Some(option) = &item.option {
|
||||
if let Some(itype) = &item.itype
|
||||
&& (itype == "remote" || itype == "local")
|
||||
&& let Some(option) = &item.option
|
||||
{
|
||||
// 收集关联的扩展文件
|
||||
if let Some(merge_uid) = &option.merge {
|
||||
if let Ok(merge_item) = self.get_item(merge_uid) {
|
||||
if let Some(file) = &merge_item.file {
|
||||
if let Some(merge_uid) = &option.merge
|
||||
&& let Ok(merge_item) = self.get_item(merge_uid)
|
||||
&& let Some(file) = &merge_item.file
|
||||
{
|
||||
active_files.insert(file.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(script_uid) = &option.script {
|
||||
if let Ok(script_item) = self.get_item(script_uid) {
|
||||
if let Some(file) = &script_item.file {
|
||||
if let Some(script_uid) = &option.script
|
||||
&& let Ok(script_item) = self.get_item(script_uid)
|
||||
&& let Some(file) = &script_item.file
|
||||
{
|
||||
active_files.insert(file.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(rules_uid) = &option.rules {
|
||||
if let Ok(rules_item) = self.get_item(rules_uid) {
|
||||
if let Some(file) = &rules_item.file {
|
||||
if let Some(rules_uid) = &option.rules
|
||||
&& let Ok(rules_item) = self.get_item(rules_uid)
|
||||
&& let Some(file) = &rules_item.file
|
||||
{
|
||||
active_files.insert(file.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(proxies_uid) = &option.proxies {
|
||||
if let Ok(proxies_item) = self.get_item(proxies_uid) {
|
||||
if let Some(file) = &proxies_item.file {
|
||||
if let Some(proxies_uid) = &option.proxies
|
||||
&& let Ok(proxies_item) = self.get_item(proxies_uid)
|
||||
&& let Some(file) = &proxies_item.file
|
||||
{
|
||||
active_files.insert(file.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(groups_uid) = &option.groups {
|
||||
if let Ok(groups_item) = self.get_item(groups_uid) {
|
||||
if let Some(file) = &groups_item.file {
|
||||
if let Some(groups_uid) = &option.groups
|
||||
&& let Ok(groups_item) = self.get_item(groups_uid)
|
||||
&& let Some(file) = &groups_item.file
|
||||
{
|
||||
active_files.insert(file.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
active_files
|
||||
}
|
||||
@@ -667,23 +627,75 @@ impl IProfiles {
|
||||
.unwrap_or(false)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn auto_cleanup(&self) -> Result<()> {
|
||||
match self.cleanup_orphaned_files() {
|
||||
Ok(result) => {
|
||||
if !result.deleted_files.is_empty() {
|
||||
log::info!(
|
||||
target: "app",
|
||||
"自动清理完成,删除了 {} 个冗余文件",
|
||||
result.deleted_files.len()
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!(target: "app", "自动清理失败: {e}");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 特殊的Send-safe helper函数,完全避免跨await持有guard
|
||||
use crate::config::Config;
|
||||
|
||||
pub async fn profiles_append_item_with_filedata_safe(
|
||||
item: &PrfItem,
|
||||
file_data: Option<String>,
|
||||
) -> Result<()> {
|
||||
let item = &mut PrfItem::from(item, file_data).await?;
|
||||
profiles_append_item_safe(item).await
|
||||
}
|
||||
|
||||
pub async fn profiles_append_item_safe(item: &mut PrfItem) -> Result<()> {
|
||||
Config::profiles()
|
||||
.await
|
||||
.with_data_modify(|mut profiles| async move {
|
||||
profiles.append_item(item).await?;
|
||||
Ok((profiles, ()))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn profiles_patch_item_safe(index: &String, item: &PrfItem) -> Result<()> {
|
||||
Config::profiles()
|
||||
.await
|
||||
.with_data_modify(|mut profiles| async move {
|
||||
profiles.patch_item(index, item).await?;
|
||||
Ok((profiles, ()))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn profiles_delete_item_safe(index: &String) -> Result<bool> {
|
||||
Config::profiles()
|
||||
.await
|
||||
.with_data_modify(|mut profiles| async move {
|
||||
let deleted = profiles.delete_item(index).await?;
|
||||
Ok((profiles, deleted))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn profiles_reorder_safe(active_id: &String, over_id: &String) -> Result<()> {
|
||||
Config::profiles()
|
||||
.await
|
||||
.with_data_modify(|mut profiles| async move {
|
||||
profiles.reorder(active_id, over_id).await?;
|
||||
Ok((profiles, ()))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn profiles_save_file_safe() -> Result<()> {
|
||||
Config::profiles()
|
||||
.await
|
||||
.with_data_modify(|profiles| async move {
|
||||
profiles.save_file().await?;
|
||||
Ok((profiles, ()))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn profiles_draft_update_item_safe(index: &String, item: &mut PrfItem) -> Result<()> {
|
||||
Config::profiles()
|
||||
.await
|
||||
.with_data_modify(|mut profiles| async move {
|
||||
profiles.update_item(index, item).await?;
|
||||
Ok((profiles, ()))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use crate::enhance::field::use_keys;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_yaml::{Mapping, Value};
|
||||
use serde_yaml_ng::{Mapping, Value};
|
||||
use smartstring::alias::String;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
|
||||
pub struct IRuntime {
|
||||
pub config: Option<Mapping>,
|
||||
@@ -30,15 +32,15 @@ impl IRuntime {
|
||||
let patch_tun = patch.get("tun");
|
||||
if patch_tun.is_some() {
|
||||
let tun = config.get("tun");
|
||||
let mut tun = tun.map_or(Mapping::new(), |val| {
|
||||
val.as_mapping().cloned().unwrap_or(Mapping::new())
|
||||
let mut tun: Mapping = tun.map_or_else(Mapping::new, |val| {
|
||||
val.as_mapping().cloned().unwrap_or_else(Mapping::new)
|
||||
});
|
||||
let patch_tun = patch_tun.map_or(Mapping::new(), |val| {
|
||||
val.as_mapping().cloned().unwrap_or(Mapping::new())
|
||||
let patch_tun = patch_tun.map_or_else(Mapping::new, |val| {
|
||||
val.as_mapping().cloned().unwrap_or_else(Mapping::new)
|
||||
});
|
||||
use_keys(&patch_tun).into_iter().for_each(|key| {
|
||||
if let Some(value) = patch_tun.get(&key).to_owned() {
|
||||
tun.insert(key.into(), value.clone());
|
||||
if let Some(value) = patch_tun.get(key.as_str()) {
|
||||
tun.insert(Value::from(key.as_str()), value.clone());
|
||||
}
|
||||
});
|
||||
|
||||
@@ -46,4 +48,76 @@ impl IRuntime {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//跟新链式代理配置文件
|
||||
/// {
|
||||
/// "proxies":[
|
||||
/// {
|
||||
/// name : 入口节点,
|
||||
/// type: xxx
|
||||
/// server: xxx
|
||||
/// port: xxx
|
||||
/// ports: xxx
|
||||
/// password: xxx
|
||||
/// skip-cert-verify: xxx,
|
||||
/// },
|
||||
/// {
|
||||
/// name : hop_node_1_xxxx,
|
||||
/// type: xxx
|
||||
/// server: xxx
|
||||
/// port: xxx
|
||||
/// ports: xxx
|
||||
/// password: xxx
|
||||
/// skip-cert-verify: xxx,
|
||||
/// dialer-proxy : "入口节点"
|
||||
/// },
|
||||
/// {
|
||||
/// name : 出口节点,
|
||||
/// type: xxx
|
||||
/// server: xxx
|
||||
/// port: xxx
|
||||
/// ports: xxx
|
||||
/// password: xxx
|
||||
/// skip-cert-verify: xxx,
|
||||
/// dialer-proxy : "hop_node_1_xxxx"
|
||||
/// }
|
||||
/// ],
|
||||
/// "proxy-groups" : [
|
||||
/// {
|
||||
/// name : "proxy_chain",
|
||||
/// type: "select",
|
||||
/// proxies ["出口节点"]
|
||||
/// }
|
||||
/// ]
|
||||
/// }
|
||||
///
|
||||
/// 传入none 为删除
|
||||
pub fn update_proxy_chain_config(&mut self, proxy_chain_config: Option<Value>) {
|
||||
if let Some(config) = self.config.as_mut() {
|
||||
if let Some(Value::Sequence(proxies)) = config.get_mut("proxies") {
|
||||
proxies.iter_mut().for_each(|proxy| {
|
||||
if let Some(proxy) = proxy.as_mapping_mut()
|
||||
&& proxy.get("dialer-proxy").is_some()
|
||||
{
|
||||
proxy.remove("dialer-proxy");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(Value::Sequence(dialer_proxies)) = proxy_chain_config
|
||||
&& let Some(Value::Sequence(proxies)) = config.get_mut("proxies")
|
||||
{
|
||||
for (i, dialer_proxy) in dialer_proxies.iter().enumerate() {
|
||||
if let Some(Value::Mapping(proxy)) = proxies
|
||||
.iter_mut()
|
||||
.find(|proxy| proxy.get("name") == Some(dialer_proxy))
|
||||
&& i != 0
|
||||
&& let Some(dialer_proxy) = dialer_proxies.get(i - 1)
|
||||
{
|
||||
proxy.insert("dialer-proxy".into(), dialer_proxy.to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use crate::config::Config;
|
||||
use crate::{
|
||||
config::{deserialize_encrypted, serialize_encrypted, DEFAULT_PAC},
|
||||
config::{DEFAULT_PAC, deserialize_encrypted, serialize_encrypted},
|
||||
logging,
|
||||
utils::{dirs, help, i18n, logging::Type},
|
||||
};
|
||||
use anyhow::Result;
|
||||
use log::LevelFilter;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smartstring::alias::String;
|
||||
|
||||
/// ### `verge.yaml` schema
|
||||
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
|
||||
@@ -14,6 +16,12 @@ pub struct IVerge {
|
||||
/// silent | error | warn | info | debug | trace
|
||||
pub app_log_level: Option<String>,
|
||||
|
||||
/// app log max size in KB
|
||||
pub app_log_max_size: Option<u64>,
|
||||
|
||||
/// app log max count
|
||||
pub app_log_max_count: Option<usize>,
|
||||
|
||||
// i18n
|
||||
pub language: Option<String>,
|
||||
|
||||
@@ -50,6 +58,9 @@ pub struct IVerge {
|
||||
/// menu icon
|
||||
pub menu_icon: Option<String>,
|
||||
|
||||
/// menu order
|
||||
pub menu_order: Option<Vec<String>>,
|
||||
|
||||
/// sysproxy tray icon
|
||||
pub sysproxy_tray_icon: Option<bool>,
|
||||
|
||||
@@ -125,6 +136,9 @@ pub struct IVerge {
|
||||
/// 默认的延迟测试超时时间
|
||||
pub default_latency_timeout: Option<i32>,
|
||||
|
||||
/// 是否自动检测当前节点延迟
|
||||
pub enable_auto_delay_detection: Option<bool>,
|
||||
|
||||
/// 是否使用内部的脚本支持,默认为真
|
||||
pub enable_builtin_enhanced: Option<bool>,
|
||||
|
||||
@@ -138,9 +152,6 @@ pub struct IVerge {
|
||||
/// 0: 不清理; 1: 1天;2: 7天; 3: 30天; 4: 90天
|
||||
pub auto_log_clean: Option<i32>,
|
||||
|
||||
/// 是否启用随机端口
|
||||
pub enable_random_port: Option<bool>,
|
||||
|
||||
/// verge 的各种 port 用于覆盖 clash 的各种 port
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub verge_redir_port: Option<u16>,
|
||||
@@ -193,7 +204,9 @@ pub struct IVerge {
|
||||
|
||||
pub enable_tray_speed: Option<bool>,
|
||||
|
||||
pub enable_tray_icon: Option<bool>,
|
||||
// pub enable_tray_icon: Option<bool>,
|
||||
/// show proxy groups directly on tray root menu
|
||||
pub tray_inline_proxy_groups: Option<bool>,
|
||||
|
||||
/// 自动进入轻量模式
|
||||
pub enable_auto_light_weight_mode: Option<bool>,
|
||||
@@ -204,8 +217,11 @@ pub struct IVerge {
|
||||
/// 启用代理页面自动滚动
|
||||
pub enable_hover_jump_navigator: Option<bool>,
|
||||
|
||||
/// 服务状态跟踪
|
||||
pub service_state: Option<crate::core::service::ServiceState>,
|
||||
/// 代理页面自动滚动延迟(毫秒)
|
||||
pub hover_jump_navigator_delay: Option<u64>,
|
||||
|
||||
/// 启用外部控制器
|
||||
pub enable_external_controller: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Deserialize, Serialize)]
|
||||
@@ -237,9 +253,9 @@ impl IVerge {
|
||||
pub const VALID_CLASH_CORES: &'static [&'static str] = &["verge-mihomo", "verge-mihomo-alpha"];
|
||||
|
||||
/// 验证并修正配置文件中的clash_core值
|
||||
pub fn validate_and_fix_config() -> Result<()> {
|
||||
pub async fn validate_and_fix_config() -> Result<()> {
|
||||
let config_path = dirs::verge_path()?;
|
||||
let mut config = match help::read_yaml::<IVerge>(&config_path) {
|
||||
let mut config = match help::read_yaml::<IVerge>(&config_path).await {
|
||||
Ok(config) => config,
|
||||
Err(_) => Self::template(),
|
||||
};
|
||||
@@ -252,41 +268,33 @@ impl IVerge {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Config,
|
||||
true,
|
||||
"启动时发现无效的clash_core配置: '{}', 将自动修正为 'verge-mihomo'",
|
||||
core
|
||||
);
|
||||
config.clash_core = Some("verge-mihomo".to_string());
|
||||
config.clash_core = Some("verge-mihomo".into());
|
||||
needs_fix = true;
|
||||
}
|
||||
} else {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
true,
|
||||
"启动时发现未配置clash_core, 将设置为默认值 'verge-mihomo'"
|
||||
);
|
||||
config.clash_core = Some("verge-mihomo".to_string());
|
||||
config.clash_core = Some("verge-mihomo".into());
|
||||
needs_fix = true;
|
||||
}
|
||||
|
||||
// 修正后保存配置
|
||||
if needs_fix {
|
||||
logging!(info, Type::Config, true, "正在保存修正后的配置文件...");
|
||||
help::save_yaml(&config_path, &config, Some("# Clash Verge Config"))?;
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
true,
|
||||
"配置文件修正完成,需要重新加载配置"
|
||||
);
|
||||
logging!(info, Type::Config, "正在保存修正后的配置文件...");
|
||||
help::save_yaml(&config_path, &config, Some("# Clash Verge Config")).await?;
|
||||
logging!(info, Type::Config, "配置文件修正完成,需要重新加载配置");
|
||||
|
||||
Self::reload_config_after_fix(config)?;
|
||||
Self::reload_config_after_fix(config).await?;
|
||||
} else {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
true,
|
||||
"clash_core配置验证通过: {:?}",
|
||||
config.clash_core
|
||||
);
|
||||
@@ -296,50 +304,61 @@ impl IVerge {
|
||||
}
|
||||
|
||||
/// 配置修正后重新加载配置
|
||||
fn reload_config_after_fix(updated_config: IVerge) -> Result<()> {
|
||||
use crate::config::Config;
|
||||
|
||||
let config_draft = Config::verge();
|
||||
*config_draft.draft() = Box::new(updated_config.clone());
|
||||
config_draft.apply();
|
||||
|
||||
async fn reload_config_after_fix(updated_config: IVerge) -> Result<()> {
|
||||
logging!(
|
||||
info,
|
||||
Type::Config,
|
||||
true,
|
||||
"内存配置已强制更新,新的clash_core: {:?}",
|
||||
updated_config.clash_core
|
||||
&updated_config.clash_core
|
||||
);
|
||||
|
||||
let config_draft = Config::verge().await;
|
||||
**config_draft.draft_mut() = updated_config;
|
||||
config_draft.apply();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_valid_clash_core(&self) -> String {
|
||||
self.clash_core
|
||||
.clone()
|
||||
.unwrap_or_else(|| "verge-mihomo".to_string())
|
||||
.unwrap_or_else(|| "verge-mihomo".into())
|
||||
}
|
||||
|
||||
fn get_system_language() -> String {
|
||||
let sys_lang = sys_locale::get_locale()
|
||||
.unwrap_or_else(|| String::from("en"))
|
||||
.unwrap_or_else(|| "en".into())
|
||||
.to_lowercase();
|
||||
|
||||
let lang_code = sys_lang.split(['_', '-']).next().unwrap_or("en");
|
||||
let supported_languages = i18n::get_supported_languages();
|
||||
|
||||
if supported_languages.contains(&lang_code.to_string()) {
|
||||
lang_code.to_string()
|
||||
if supported_languages.contains(&lang_code.into()) {
|
||||
lang_code.into()
|
||||
} else {
|
||||
String::from("en")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new() -> Self {
|
||||
match dirs::verge_path().and_then(|path| help::read_yaml::<IVerge>(&path)) {
|
||||
Ok(config) => config,
|
||||
pub async fn new() -> Self {
|
||||
match dirs::verge_path() {
|
||||
Ok(path) => match help::read_yaml::<IVerge>(&path).await {
|
||||
Ok(mut config) => {
|
||||
// compatibility
|
||||
if let Some(start_page) = config.start_page.clone()
|
||||
&& start_page == "/home"
|
||||
{
|
||||
config.start_page = Some(String::from("/"));
|
||||
}
|
||||
config
|
||||
}
|
||||
Err(err) => {
|
||||
log::error!(target: "app", "{err}");
|
||||
logging!(error, Type::Config, "{err}");
|
||||
Self::template()
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
logging!(error, Type::Config, "{err}");
|
||||
Self::template()
|
||||
}
|
||||
}
|
||||
@@ -347,6 +366,8 @@ impl IVerge {
|
||||
|
||||
pub fn template() -> Self {
|
||||
Self {
|
||||
app_log_max_size: Some(128),
|
||||
app_log_max_count: Some(8),
|
||||
clash_core: Some("verge-mihomo".into()),
|
||||
language: Some(Self::get_system_language()),
|
||||
theme_mode: Some("system".into()),
|
||||
@@ -354,7 +375,7 @@ impl IVerge {
|
||||
env_type: Some("bash".into()),
|
||||
#[cfg(target_os = "windows")]
|
||||
env_type: Some("powershell".into()),
|
||||
start_page: Some("/home".into()),
|
||||
start_page: Some("/".into()),
|
||||
traffic_graph: Some(true),
|
||||
enable_memory_usage: Some(true),
|
||||
enable_group_icon: Some(true),
|
||||
@@ -367,11 +388,11 @@ impl IVerge {
|
||||
enable_auto_launch: Some(false),
|
||||
enable_silent_start: Some(false),
|
||||
enable_hover_jump_navigator: Some(true),
|
||||
hover_jump_navigator_delay: Some(280),
|
||||
enable_system_proxy: Some(false),
|
||||
proxy_auto_config: Some(false),
|
||||
pac_file_content: Some(DEFAULT_PAC.into()),
|
||||
proxy_host: Some("127.0.0.1".into()),
|
||||
enable_random_port: Some(false),
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
verge_redir_port: Some(7895),
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
@@ -391,39 +412,44 @@ impl IVerge {
|
||||
auto_close_connection: Some(true),
|
||||
auto_check_update: Some(true),
|
||||
enable_builtin_enhanced: Some(true),
|
||||
auto_log_clean: Some(2),
|
||||
auto_log_clean: Some(2), // 1: 1天, 2: 7天, 3: 30天, 4: 90天
|
||||
webdav_url: None,
|
||||
webdav_username: None,
|
||||
webdav_password: None,
|
||||
enable_tray_speed: Some(false),
|
||||
enable_tray_icon: Some(true),
|
||||
// enable_tray_icon: Some(true),
|
||||
tray_inline_proxy_groups: Some(false),
|
||||
enable_global_hotkey: Some(true),
|
||||
enable_auto_light_weight_mode: Some(false),
|
||||
auto_light_weight_minutes: Some(10),
|
||||
enable_dns_settings: Some(false),
|
||||
home_cards: None,
|
||||
service_state: None,
|
||||
enable_external_controller: Some(false),
|
||||
..Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
/// Save IVerge App Config
|
||||
pub fn save_file(&self) -> Result<()> {
|
||||
help::save_yaml(&dirs::verge_path()?, &self, Some("# Clash Verge Config"))
|
||||
pub async fn save_file(&self) -> Result<()> {
|
||||
help::save_yaml(&dirs::verge_path()?, &self, Some("# Clash Verge Config")).await
|
||||
}
|
||||
|
||||
/// patch verge config
|
||||
/// only save to file
|
||||
pub fn patch_config(&mut self, patch: IVerge) {
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
pub fn patch_config(&mut self, patch: &IVerge) {
|
||||
macro_rules! patch {
|
||||
($key: tt) => {
|
||||
if patch.$key.is_some() {
|
||||
self.$key = patch.$key;
|
||||
self.$key = patch.$key.clone();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
patch!(app_log_level);
|
||||
patch!(app_log_max_size);
|
||||
patch!(app_log_max_count);
|
||||
|
||||
patch!(language);
|
||||
patch!(theme_mode);
|
||||
patch!(tray_event);
|
||||
@@ -436,6 +462,7 @@ impl IVerge {
|
||||
#[cfg(target_os = "macos")]
|
||||
patch!(tray_icon);
|
||||
patch!(menu_icon);
|
||||
patch!(menu_order);
|
||||
patch!(common_tray_icon);
|
||||
patch!(sysproxy_tray_icon);
|
||||
patch!(tun_tray_icon);
|
||||
@@ -444,7 +471,7 @@ impl IVerge {
|
||||
patch!(enable_auto_launch);
|
||||
patch!(enable_silent_start);
|
||||
patch!(enable_hover_jump_navigator);
|
||||
patch!(enable_random_port);
|
||||
patch!(hover_jump_navigator_delay);
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
patch!(verge_redir_port);
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
@@ -476,6 +503,7 @@ impl IVerge {
|
||||
patch!(auto_check_update);
|
||||
patch!(default_latency_test);
|
||||
patch!(default_latency_timeout);
|
||||
patch!(enable_auto_delay_detection);
|
||||
patch!(enable_builtin_enhanced);
|
||||
patch!(proxy_layout_column);
|
||||
patch!(test_list);
|
||||
@@ -485,21 +513,17 @@ impl IVerge {
|
||||
patch!(webdav_username);
|
||||
patch!(webdav_password);
|
||||
patch!(enable_tray_speed);
|
||||
patch!(enable_tray_icon);
|
||||
// patch!(enable_tray_icon);
|
||||
patch!(tray_inline_proxy_groups);
|
||||
patch!(enable_auto_light_weight_mode);
|
||||
patch!(auto_light_weight_minutes);
|
||||
patch!(enable_dns_settings);
|
||||
patch!(home_cards);
|
||||
patch!(service_state);
|
||||
patch!(enable_external_controller);
|
||||
}
|
||||
|
||||
/// 在初始化前尝试拿到单例端口的值
|
||||
pub fn get_singleton_port() -> u16 {
|
||||
#[cfg(not(feature = "verge-dev"))]
|
||||
const SERVER_PORT: u16 = 33331;
|
||||
#[cfg(feature = "verge-dev")]
|
||||
const SERVER_PORT: u16 = 11233;
|
||||
SERVER_PORT
|
||||
crate::constants::network::ports::SINGLETON_SERVER
|
||||
}
|
||||
|
||||
/// 获取日志等级
|
||||
@@ -523,6 +547,8 @@ impl IVerge {
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct IVergeResponse {
|
||||
pub app_log_level: Option<String>,
|
||||
pub app_log_max_size: Option<u64>,
|
||||
pub app_log_max_count: Option<usize>,
|
||||
pub language: Option<String>,
|
||||
pub theme_mode: Option<String>,
|
||||
pub tray_event: Option<String>,
|
||||
@@ -536,6 +562,7 @@ pub struct IVergeResponse {
|
||||
#[cfg(target_os = "macos")]
|
||||
pub tray_icon: Option<String>,
|
||||
pub menu_icon: Option<String>,
|
||||
pub menu_order: Option<Vec<String>>,
|
||||
pub sysproxy_tray_icon: Option<bool>,
|
||||
pub tun_tray_icon: Option<bool>,
|
||||
pub enable_tun_mode: Option<bool>,
|
||||
@@ -558,11 +585,11 @@ pub struct IVergeResponse {
|
||||
pub auto_check_update: Option<bool>,
|
||||
pub default_latency_test: Option<String>,
|
||||
pub default_latency_timeout: Option<i32>,
|
||||
pub enable_auto_delay_detection: Option<bool>,
|
||||
pub enable_builtin_enhanced: Option<bool>,
|
||||
pub proxy_layout_column: Option<i32>,
|
||||
pub test_list: Option<Vec<IVergeTestItem>>,
|
||||
pub auto_log_clean: Option<i32>,
|
||||
pub enable_random_port: Option<bool>,
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub verge_redir_port: Option<u16>,
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
@@ -580,13 +607,15 @@ pub struct IVergeResponse {
|
||||
pub webdav_username: Option<String>,
|
||||
pub webdav_password: Option<String>,
|
||||
pub enable_tray_speed: Option<bool>,
|
||||
pub enable_tray_icon: Option<bool>,
|
||||
// pub enable_tray_icon: Option<bool>,
|
||||
pub tray_inline_proxy_groups: Option<bool>,
|
||||
pub enable_auto_light_weight_mode: Option<bool>,
|
||||
pub auto_light_weight_minutes: Option<u64>,
|
||||
pub enable_dns_settings: Option<bool>,
|
||||
pub home_cards: Option<serde_json::Value>,
|
||||
pub enable_hover_jump_navigator: Option<bool>,
|
||||
pub service_state: Option<crate::core::service::ServiceState>,
|
||||
pub hover_jump_navigator_delay: Option<u64>,
|
||||
pub enable_external_controller: Option<bool>,
|
||||
}
|
||||
|
||||
impl From<IVerge> for IVergeResponse {
|
||||
@@ -595,6 +624,8 @@ impl From<IVerge> for IVergeResponse {
|
||||
let valid_clash_core = verge.get_valid_clash_core();
|
||||
Self {
|
||||
app_log_level: verge.app_log_level,
|
||||
app_log_max_size: verge.app_log_max_size,
|
||||
app_log_max_count: verge.app_log_max_count,
|
||||
language: verge.language,
|
||||
theme_mode: verge.theme_mode,
|
||||
tray_event: verge.tray_event,
|
||||
@@ -608,6 +639,7 @@ impl From<IVerge> for IVergeResponse {
|
||||
#[cfg(target_os = "macos")]
|
||||
tray_icon: verge.tray_icon,
|
||||
menu_icon: verge.menu_icon,
|
||||
menu_order: verge.menu_order,
|
||||
sysproxy_tray_icon: verge.sysproxy_tray_icon,
|
||||
tun_tray_icon: verge.tun_tray_icon,
|
||||
enable_tun_mode: verge.enable_tun_mode,
|
||||
@@ -630,11 +662,11 @@ impl From<IVerge> for IVergeResponse {
|
||||
auto_check_update: verge.auto_check_update,
|
||||
default_latency_test: verge.default_latency_test,
|
||||
default_latency_timeout: verge.default_latency_timeout,
|
||||
enable_auto_delay_detection: verge.enable_auto_delay_detection,
|
||||
enable_builtin_enhanced: verge.enable_builtin_enhanced,
|
||||
proxy_layout_column: verge.proxy_layout_column,
|
||||
test_list: verge.test_list,
|
||||
auto_log_clean: verge.auto_log_clean,
|
||||
enable_random_port: verge.enable_random_port,
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
verge_redir_port: verge.verge_redir_port,
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
@@ -652,13 +684,21 @@ impl From<IVerge> for IVergeResponse {
|
||||
webdav_username: verge.webdav_username,
|
||||
webdav_password: verge.webdav_password,
|
||||
enable_tray_speed: verge.enable_tray_speed,
|
||||
enable_tray_icon: verge.enable_tray_icon,
|
||||
// enable_tray_icon: verge.enable_tray_icon,
|
||||
tray_inline_proxy_groups: verge.tray_inline_proxy_groups,
|
||||
enable_auto_light_weight_mode: verge.enable_auto_light_weight_mode,
|
||||
auto_light_weight_minutes: verge.auto_light_weight_minutes,
|
||||
enable_dns_settings: verge.enable_dns_settings,
|
||||
home_cards: verge.home_cards,
|
||||
enable_hover_jump_navigator: verge.enable_hover_jump_navigator,
|
||||
service_state: verge.service_state,
|
||||
hover_jump_navigator_delay: verge.hover_jump_navigator_delay,
|
||||
enable_external_controller: verge.enable_external_controller,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Box<IVerge>> for IVergeResponse {
|
||||
fn from(verge: Box<IVerge>) -> Self {
|
||||
IVergeResponse::from(*verge)
|
||||
}
|
||||
}
|
||||
|
||||
78
src-tauri/src/constants.rs
Normal file
78
src-tauri/src/constants.rs
Normal file
@@ -0,0 +1,78 @@
|
||||
use std::time::Duration;
|
||||
|
||||
pub mod network {
|
||||
pub const DEFAULT_PROXY_HOST: &str = "127.0.0.1";
|
||||
pub const DEFAULT_EXTERNAL_CONTROLLER: &str = "127.0.0.1:9097";
|
||||
|
||||
pub mod ports {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub const DEFAULT_REDIR: u16 = 7895;
|
||||
#[cfg(target_os = "linux")]
|
||||
pub const DEFAULT_TPROXY: u16 = 7896;
|
||||
pub const DEFAULT_MIXED: u16 = 7897;
|
||||
pub const DEFAULT_SOCKS: u16 = 7898;
|
||||
pub const DEFAULT_HTTP: u16 = 7899;
|
||||
|
||||
#[cfg(not(feature = "verge-dev"))]
|
||||
pub const SINGLETON_SERVER: u16 = 33331;
|
||||
#[cfg(feature = "verge-dev")]
|
||||
pub const SINGLETON_SERVER: u16 = 11233;
|
||||
}
|
||||
}
|
||||
|
||||
pub mod bypass {
|
||||
#[cfg(target_os = "windows")]
|
||||
pub const DEFAULT: &str = "localhost;127.*;192.168.*;10.*;172.16.*;172.17.*;172.18.*;172.19.*;172.20.*;172.21.*;172.22.*;172.23.*;172.24.*;172.25.*;172.26.*;172.27.*;172.28.*;172.29.*;172.30.*;172.31.*;<local>";
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub const DEFAULT: &str =
|
||||
"localhost,127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,172.29.0.0/16,::1";
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub const DEFAULT: &str = "127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,172.29.0.0/16,localhost,*.local,*.crashlytics.com,<local>";
|
||||
}
|
||||
|
||||
pub mod timing {
|
||||
use super::Duration;
|
||||
|
||||
pub const CONFIG_UPDATE_DEBOUNCE: Duration = Duration::from_millis(500);
|
||||
pub const CONFIG_RELOAD_DELAY: Duration = Duration::from_millis(300);
|
||||
pub const EVENT_EMIT_DELAY: Duration = Duration::from_millis(20);
|
||||
pub const STARTUP_ERROR_DELAY: Duration = Duration::from_secs(2);
|
||||
pub const ERROR_BATCH_DELAY: Duration = Duration::from_millis(300);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub const SERVICE_WAIT_MAX: Duration = Duration::from_millis(3000);
|
||||
#[cfg(target_os = "windows")]
|
||||
pub const SERVICE_WAIT_INTERVAL: Duration = Duration::from_millis(200);
|
||||
}
|
||||
|
||||
pub mod retry {
|
||||
pub const EVENT_EMIT_THRESHOLD: u64 = 10;
|
||||
}
|
||||
|
||||
pub mod files {
|
||||
pub const RUNTIME_CONFIG: &str = "clash-verge.yaml";
|
||||
pub const CHECK_CONFIG: &str = "clash-verge-check.yaml";
|
||||
pub const DNS_CONFIG: &str = "dns_config.yaml";
|
||||
pub const WINDOW_STATE: &str = "window_state.json";
|
||||
}
|
||||
|
||||
pub mod error_patterns {
|
||||
pub const CONNECTION_ERRORS: &[&str] = &[
|
||||
"Failed to create connection",
|
||||
"The system cannot find the file specified",
|
||||
"operation timed out",
|
||||
"connection refused",
|
||||
];
|
||||
}
|
||||
|
||||
pub mod tun {
|
||||
#[cfg(target_os = "linux")]
|
||||
pub const DEFAULT_STACK: &str = "mixed";
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
pub const DEFAULT_STACK: &str = "gvisor";
|
||||
|
||||
pub const DNS_HIJACK: &[&str] = &["any:53"];
|
||||
}
|
||||
@@ -1,6 +1,9 @@
|
||||
#[cfg(target_os = "windows")]
|
||||
use crate::process::AsyncHandler;
|
||||
use crate::{logging, utils::logging::Type};
|
||||
use anyhow::Result;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::time::{timeout, Duration};
|
||||
use tokio::time::{Duration, timeout};
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
use anyhow::anyhow;
|
||||
@@ -25,8 +28,8 @@ impl Default for AsyncSysproxy {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
enable: false,
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 7890,
|
||||
host: "127.0.0.1".into(),
|
||||
port: 7897,
|
||||
bypass: String::new(),
|
||||
}
|
||||
}
|
||||
@@ -39,15 +42,21 @@ impl AsyncProxyQuery {
|
||||
pub async fn get_auto_proxy() -> AsyncAutoproxy {
|
||||
match timeout(Duration::from_secs(3), Self::get_auto_proxy_impl()).await {
|
||||
Ok(Ok(proxy)) => {
|
||||
log::debug!(target: "app", "异步获取自动代理成功: enable={}, url={}", proxy.enable, proxy.url);
|
||||
logging!(
|
||||
debug,
|
||||
Type::Network,
|
||||
"异步获取自动代理成功: enable={}, url={}",
|
||||
proxy.enable,
|
||||
proxy.url
|
||||
);
|
||||
proxy
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
log::warn!(target: "app", "异步获取自动代理失败: {e}");
|
||||
logging!(warn, Type::Network, "Warning: 异步获取自动代理失败: {e}");
|
||||
AsyncAutoproxy::default()
|
||||
}
|
||||
Err(_) => {
|
||||
log::warn!(target: "app", "异步获取自动代理超时");
|
||||
logging!(warn, Type::Network, "Warning: 异步获取自动代理超时");
|
||||
AsyncAutoproxy::default()
|
||||
}
|
||||
}
|
||||
@@ -57,15 +66,22 @@ impl AsyncProxyQuery {
|
||||
pub async fn get_system_proxy() -> AsyncSysproxy {
|
||||
match timeout(Duration::from_secs(3), Self::get_system_proxy_impl()).await {
|
||||
Ok(Ok(proxy)) => {
|
||||
log::debug!(target: "app", "异步获取系统代理成功: enable={}, {}:{}", proxy.enable, proxy.host, proxy.port);
|
||||
logging!(
|
||||
debug,
|
||||
Type::Network,
|
||||
"异步获取系统代理成功: enable={}, {}:{}",
|
||||
proxy.enable,
|
||||
proxy.host,
|
||||
proxy.port
|
||||
);
|
||||
proxy
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
log::warn!(target: "app", "异步获取系统代理失败: {e}");
|
||||
logging!(warn, Type::Network, "Warning: 异步获取系统代理失败: {e}");
|
||||
AsyncSysproxy::default()
|
||||
}
|
||||
Err(_) => {
|
||||
log::warn!(target: "app", "异步获取系统代理超时");
|
||||
logging!(warn, Type::Network, "Warning: 异步获取系统代理超时");
|
||||
AsyncSysproxy::default()
|
||||
}
|
||||
}
|
||||
@@ -74,7 +90,7 @@ impl AsyncProxyQuery {
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn get_auto_proxy_impl() -> Result<AsyncAutoproxy> {
|
||||
// Windows: 从注册表读取PAC配置
|
||||
tokio::task::spawn_blocking(move || -> Result<AsyncAutoproxy> {
|
||||
AsyncHandler::spawn_blocking(move || -> Result<AsyncAutoproxy> {
|
||||
Self::get_pac_config_from_registry()
|
||||
})
|
||||
.await?
|
||||
@@ -85,7 +101,7 @@ impl AsyncProxyQuery {
|
||||
use std::ptr;
|
||||
use winapi::shared::minwindef::{DWORD, HKEY};
|
||||
use winapi::um::winnt::{KEY_READ, REG_DWORD, REG_SZ};
|
||||
use winapi::um::winreg::{RegCloseKey, RegOpenKeyExW, RegQueryValueExW, HKEY_CURRENT_USER};
|
||||
use winapi::um::winreg::{HKEY_CURRENT_USER, RegCloseKey, RegOpenKeyExW, RegQueryValueExW};
|
||||
|
||||
unsafe {
|
||||
let key_path = "Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\0"
|
||||
@@ -97,7 +113,7 @@ impl AsyncProxyQuery {
|
||||
RegOpenKeyExW(HKEY_CURRENT_USER, key_path.as_ptr(), 0, KEY_READ, &mut hkey);
|
||||
|
||||
if result != 0 {
|
||||
log::debug!(target: "app", "无法打开注册表项");
|
||||
logging!(debug, Type::Network, "无法打开注册表项");
|
||||
return Ok(AsyncAutoproxy::default());
|
||||
}
|
||||
|
||||
@@ -123,7 +139,7 @@ impl AsyncProxyQuery {
|
||||
.position(|&x| x == 0)
|
||||
.unwrap_or(url_buffer.len());
|
||||
pac_url = String::from_utf16_lossy(&url_buffer[..end_pos]);
|
||||
log::debug!(target: "app", "从注册表读取到PAC URL: {}", pac_url);
|
||||
logging!(debug, Type::Network, "从注册表读取到PAC URL: {pac_url}");
|
||||
}
|
||||
|
||||
// 2. 检查自动检测设置是否启用
|
||||
@@ -148,10 +164,14 @@ impl AsyncProxyQuery {
|
||||
|| (detect_query_result == 0 && detect_value_type == REG_DWORD && auto_detect != 0);
|
||||
|
||||
if pac_enabled {
|
||||
log::debug!(target: "app", "PAC配置启用: URL={}, AutoDetect={}", pac_url, auto_detect);
|
||||
logging!(
|
||||
debug,
|
||||
Type::Network,
|
||||
"PAC配置启用: URL={pac_url}, AutoDetect={auto_detect}"
|
||||
);
|
||||
|
||||
if pac_url.is_empty() && auto_detect != 0 {
|
||||
pac_url = "auto-detect".to_string();
|
||||
pac_url = "auto-detect".into();
|
||||
}
|
||||
|
||||
Ok(AsyncAutoproxy {
|
||||
@@ -159,7 +179,7 @@ impl AsyncProxyQuery {
|
||||
url: pac_url,
|
||||
})
|
||||
} else {
|
||||
log::debug!(target: "app", "PAC配置未启用");
|
||||
logging!(debug, Type::Network, "PAC配置未启用");
|
||||
Ok(AsyncAutoproxy::default())
|
||||
}
|
||||
}
|
||||
@@ -175,7 +195,11 @@ impl AsyncProxyQuery {
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
log::debug!(target: "app", "scutil output: {stdout}");
|
||||
crate::logging!(
|
||||
debug,
|
||||
crate::utils::logging::Type::Network,
|
||||
"scutil output: {stdout}"
|
||||
);
|
||||
|
||||
let mut pac_enabled = false;
|
||||
let mut pac_url = String::new();
|
||||
@@ -189,12 +213,16 @@ impl AsyncProxyQuery {
|
||||
// 正确解析包含冒号的URL
|
||||
// 格式: "ProxyAutoConfigURLString : http://127.0.0.1:11233/commands/pac"
|
||||
if let Some(colon_pos) = line.find(" : ") {
|
||||
pac_url = line[colon_pos + 3..].trim().to_string();
|
||||
pac_url = line[colon_pos + 3..].trim().into();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!(target: "app", "解析结果: pac_enabled={pac_enabled}, pac_url={pac_url}");
|
||||
crate::logging!(
|
||||
debug,
|
||||
crate::utils::logging::Type::Network,
|
||||
"解析结果: pac_enabled={pac_enabled}, pac_url={pac_url}"
|
||||
);
|
||||
|
||||
Ok(AsyncAutoproxy {
|
||||
enable: pac_enabled && !pac_url.is_empty(),
|
||||
@@ -207,14 +235,14 @@ impl AsyncProxyQuery {
|
||||
// Linux: 检查环境变量和GNOME设置
|
||||
|
||||
// 首先检查环境变量
|
||||
if let Ok(auto_proxy) = std::env::var("auto_proxy") {
|
||||
if !auto_proxy.is_empty() {
|
||||
if let Ok(auto_proxy) = std::env::var("auto_proxy")
|
||||
&& !auto_proxy.is_empty()
|
||||
{
|
||||
return Ok(AsyncAutoproxy {
|
||||
enable: true,
|
||||
url: auto_proxy,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 尝试使用 gsettings 获取 GNOME 代理设置
|
||||
let output = Command::new("gsettings")
|
||||
@@ -222,9 +250,10 @@ impl AsyncProxyQuery {
|
||||
.output()
|
||||
.await;
|
||||
|
||||
if let Ok(output) = output {
|
||||
if output.status.success() {
|
||||
let mode = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
if let Ok(output) = output
|
||||
&& output.status.success()
|
||||
{
|
||||
let mode: String = String::from_utf8_lossy(&output.stdout).trim().into();
|
||||
if mode.contains("auto") {
|
||||
// 获取 PAC URL
|
||||
let pac_output = Command::new("gsettings")
|
||||
@@ -232,13 +261,14 @@ impl AsyncProxyQuery {
|
||||
.output()
|
||||
.await;
|
||||
|
||||
if let Ok(pac_output) = pac_output {
|
||||
if pac_output.status.success() {
|
||||
let pac_url = String::from_utf8_lossy(&pac_output.stdout)
|
||||
if let Ok(pac_output) = pac_output
|
||||
&& pac_output.status.success()
|
||||
{
|
||||
let pac_url: String = String::from_utf8_lossy(&pac_output.stdout)
|
||||
.trim()
|
||||
.trim_matches('\'')
|
||||
.trim_matches('"')
|
||||
.to_string();
|
||||
.into();
|
||||
|
||||
if !pac_url.is_empty() {
|
||||
return Ok(AsyncAutoproxy {
|
||||
@@ -249,8 +279,6 @@ impl AsyncProxyQuery {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(AsyncAutoproxy::default())
|
||||
}
|
||||
@@ -258,7 +286,7 @@ impl AsyncProxyQuery {
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn get_system_proxy_impl() -> Result<AsyncSysproxy> {
|
||||
// Windows: 使用注册表直接读取代理设置
|
||||
tokio::task::spawn_blocking(move || -> Result<AsyncSysproxy> {
|
||||
AsyncHandler::spawn_blocking(move || -> Result<AsyncSysproxy> {
|
||||
Self::get_system_proxy_from_registry()
|
||||
})
|
||||
.await?
|
||||
@@ -269,7 +297,7 @@ impl AsyncProxyQuery {
|
||||
use std::ptr;
|
||||
use winapi::shared::minwindef::{DWORD, HKEY};
|
||||
use winapi::um::winnt::{KEY_READ, REG_DWORD, REG_SZ};
|
||||
use winapi::um::winreg::{RegCloseKey, RegOpenKeyExW, RegQueryValueExW, HKEY_CURRENT_USER};
|
||||
use winapi::um::winreg::{HKEY_CURRENT_USER, RegCloseKey, RegOpenKeyExW, RegQueryValueExW};
|
||||
|
||||
unsafe {
|
||||
let key_path = "Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings\0"
|
||||
@@ -354,14 +382,18 @@ impl AsyncProxyQuery {
|
||||
if !proxy_server.is_empty() {
|
||||
// 解析服务器地址和端口
|
||||
let (host, port) = if let Some(colon_pos) = proxy_server.rfind(':') {
|
||||
let host = proxy_server[..colon_pos].to_string();
|
||||
let host = proxy_server[..colon_pos].into();
|
||||
let port = proxy_server[colon_pos + 1..].parse::<u16>().unwrap_or(8080);
|
||||
(host, port)
|
||||
} else {
|
||||
(proxy_server, 8080)
|
||||
};
|
||||
|
||||
log::debug!(target: "app", "从注册表读取到代理设置: {}:{}, bypass: {}", host, port, bypass_list);
|
||||
logging!(
|
||||
debug,
|
||||
Type::Network,
|
||||
"从注册表读取到代理设置: {host}:{port}, bypass: {bypass_list}"
|
||||
);
|
||||
|
||||
Ok(AsyncSysproxy {
|
||||
enable: true,
|
||||
@@ -384,12 +416,12 @@ impl AsyncProxyQuery {
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
log::debug!(target: "app", "scutil proxy output: {stdout}");
|
||||
logging!(debug, Type::Network, "scutil proxy output: {stdout}");
|
||||
|
||||
let mut http_enabled = false;
|
||||
let mut http_host = String::new();
|
||||
let mut http_port = 8080u16;
|
||||
let mut exceptions = Vec::new();
|
||||
let mut exceptions: Vec<String> = Vec::new();
|
||||
|
||||
for line in stdout.lines() {
|
||||
let line = line.trim();
|
||||
@@ -397,20 +429,20 @@ impl AsyncProxyQuery {
|
||||
http_enabled = true;
|
||||
} else if line.contains("HTTPProxy") && !line.contains("Port") {
|
||||
if let Some(host_part) = line.split(':').nth(1) {
|
||||
http_host = host_part.trim().to_string();
|
||||
http_host = host_part.trim().into();
|
||||
}
|
||||
} else if line.contains("HTTPPort") {
|
||||
if let Some(port_part) = line.split(':').nth(1) {
|
||||
if let Ok(port) = port_part.trim().parse::<u16>() {
|
||||
if let Some(port_part) = line.split(':').nth(1)
|
||||
&& let Ok(port) = port_part.trim().parse::<u16>()
|
||||
{
|
||||
http_port = port;
|
||||
}
|
||||
}
|
||||
} else if line.contains("ExceptionsList") {
|
||||
// 解析异常列表
|
||||
if let Some(list_part) = line.split(':').nth(1) {
|
||||
let list = list_part.trim();
|
||||
if !list.is_empty() {
|
||||
exceptions.push(list.to_string());
|
||||
exceptions.push(list.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -429,17 +461,17 @@ impl AsyncProxyQuery {
|
||||
// Linux: 检查环境变量和桌面环境设置
|
||||
|
||||
// 首先检查环境变量
|
||||
if let Ok(http_proxy) = std::env::var("http_proxy") {
|
||||
if let Ok(proxy_info) = Self::parse_proxy_url(&http_proxy) {
|
||||
if let Ok(http_proxy) = std::env::var("http_proxy")
|
||||
&& let Ok(proxy_info) = Self::parse_proxy_url(&http_proxy)
|
||||
{
|
||||
return Ok(proxy_info);
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(https_proxy) = std::env::var("https_proxy") {
|
||||
if let Ok(proxy_info) = Self::parse_proxy_url(&https_proxy) {
|
||||
if let Ok(https_proxy) = std::env::var("https_proxy")
|
||||
&& let Ok(proxy_info) = Self::parse_proxy_url(&https_proxy)
|
||||
{
|
||||
return Ok(proxy_info);
|
||||
}
|
||||
}
|
||||
|
||||
// 尝试使用 gsettings 获取 GNOME 代理设置
|
||||
let mode_output = Command::new("gsettings")
|
||||
@@ -447,11 +479,10 @@ impl AsyncProxyQuery {
|
||||
.output()
|
||||
.await;
|
||||
|
||||
if let Ok(mode_output) = mode_output {
|
||||
if mode_output.status.success() {
|
||||
let mode = String::from_utf8_lossy(&mode_output.stdout)
|
||||
.trim()
|
||||
.to_string();
|
||||
if let Ok(mode_output) = mode_output
|
||||
&& mode_output.status.success()
|
||||
{
|
||||
let mode: String = String::from_utf8_lossy(&mode_output.stdout).trim().into();
|
||||
if mode.contains("manual") {
|
||||
// 获取HTTP代理设置
|
||||
let host_result = Command::new("gsettings")
|
||||
@@ -464,13 +495,15 @@ impl AsyncProxyQuery {
|
||||
.output()
|
||||
.await;
|
||||
|
||||
if let (Ok(host_output), Ok(port_output)) = (host_result, port_result) {
|
||||
if host_output.status.success() && port_output.status.success() {
|
||||
let host = String::from_utf8_lossy(&host_output.stdout)
|
||||
if let (Ok(host_output), Ok(port_output)) = (host_result, port_result)
|
||||
&& host_output.status.success()
|
||||
&& port_output.status.success()
|
||||
{
|
||||
let host: String = String::from_utf8_lossy(&host_output.stdout)
|
||||
.trim()
|
||||
.trim_matches('\'')
|
||||
.trim_matches('"')
|
||||
.to_string();
|
||||
.into();
|
||||
|
||||
let port = String::from_utf8_lossy(&port_output.stdout)
|
||||
.trim()
|
||||
@@ -488,8 +521,6 @@ impl AsyncProxyQuery {
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(AsyncSysproxy::default())
|
||||
}
|
||||
@@ -510,11 +541,11 @@ impl AsyncProxyQuery {
|
||||
|
||||
// 解析主机和端口
|
||||
let (host, port) = if let Some(colon_pos) = url.rfind(':') {
|
||||
let host = url[..colon_pos].to_string();
|
||||
let host: String = url[..colon_pos].into();
|
||||
let port = url[colon_pos + 1..].parse::<u16>().unwrap_or(8080);
|
||||
(host, port)
|
||||
} else {
|
||||
(url.to_string(), 8080)
|
||||
(url.into(), 8080)
|
||||
};
|
||||
|
||||
if host.is_empty() {
|
||||
|
||||
@@ -1,18 +1,24 @@
|
||||
use crate::{config::Config, utils::dirs};
|
||||
use crate::constants::files::DNS_CONFIG;
|
||||
use crate::{
|
||||
config::Config,
|
||||
logging,
|
||||
process::AsyncHandler,
|
||||
utils::{dirs, logging::Type},
|
||||
};
|
||||
use anyhow::Error;
|
||||
use once_cell::sync::OnceCell;
|
||||
use parking_lot::Mutex;
|
||||
use reqwest_dav::list_cmd::{ListEntity, ListFile};
|
||||
use smartstring::alias::String;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
env::{consts::OS, temp_dir},
|
||||
fs,
|
||||
io::Write,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use tokio::time::timeout;
|
||||
use tokio::{fs, time::timeout};
|
||||
use zip::write::SimpleFileOptions;
|
||||
|
||||
// 应用版本常量,来自 tauri.conf.json
|
||||
@@ -74,16 +80,19 @@ impl WebDavClient {
|
||||
|
||||
// 获取或创建配置
|
||||
let config = {
|
||||
let mut lock = self.config.lock();
|
||||
if let Some(cfg) = lock.as_ref() {
|
||||
cfg.clone()
|
||||
// 首先检查是否已有配置
|
||||
let existing_config = self.config.lock().as_ref().cloned();
|
||||
|
||||
if let Some(cfg) = existing_config {
|
||||
cfg
|
||||
} else {
|
||||
let verge = Config::verge().latest().clone();
|
||||
// 释放锁后获取异步配置
|
||||
let verge = Config::verge().await.latest_ref().clone();
|
||||
if verge.webdav_url.is_none()
|
||||
|| verge.webdav_username.is_none()
|
||||
|| verge.webdav_password.is_none()
|
||||
{
|
||||
let msg = "Unable to create web dav client, please make sure the webdav config is correct".to_string();
|
||||
let msg: String = "Unable to create web dav client, please make sure the webdav config is correct".into();
|
||||
return Err(anyhow::Error::msg(msg));
|
||||
}
|
||||
|
||||
@@ -92,12 +101,13 @@ impl WebDavClient {
|
||||
.webdav_url
|
||||
.unwrap_or_default()
|
||||
.trim_end_matches('/')
|
||||
.to_string(),
|
||||
.into(),
|
||||
username: verge.webdav_username.unwrap_or_default(),
|
||||
password: verge.webdav_password.unwrap_or_default(),
|
||||
};
|
||||
|
||||
*lock = Some(config.clone());
|
||||
// 重新获取锁并存储配置
|
||||
*self.config.lock() = Some(config.clone());
|
||||
config
|
||||
}
|
||||
};
|
||||
@@ -117,20 +127,38 @@ impl WebDavClient {
|
||||
attempt.follow()
|
||||
}
|
||||
}))
|
||||
.build()
|
||||
.unwrap(),
|
||||
.build()?,
|
||||
)
|
||||
.set_host(config.url)
|
||||
.set_auth(reqwest_dav::Auth::Basic(config.username, config.password))
|
||||
.set_host(config.url.into())
|
||||
.set_auth(reqwest_dav::Auth::Basic(
|
||||
config.username.into(),
|
||||
config.password.into(),
|
||||
))
|
||||
.build()?;
|
||||
|
||||
// 尝试检查目录是否存在,如果不存在尝试创建,但创建失败不报错
|
||||
// 尝试检查目录是否存在,如果不存在尝试创建
|
||||
if client
|
||||
.list(dirs::BACKUP_DIR, reqwest_dav::Depth::Number(0))
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
let _ = client.mkcol(dirs::BACKUP_DIR).await;
|
||||
match client.mkcol(dirs::BACKUP_DIR).await {
|
||||
Ok(_) => logging!(info, Type::Backup, "Successfully created backup directory"),
|
||||
Err(e) => {
|
||||
logging!(
|
||||
warn,
|
||||
Type::Backup,
|
||||
"Warning: Failed to create backup directory: {}",
|
||||
e
|
||||
);
|
||||
// 清除缓存,强制下次重新尝试
|
||||
self.reset();
|
||||
return Err(anyhow::Error::msg(format!(
|
||||
"Failed to create backup directory: {}",
|
||||
e
|
||||
)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 缓存客户端
|
||||
@@ -149,10 +177,10 @@ impl WebDavClient {
|
||||
|
||||
pub async fn upload(&self, file_path: PathBuf, file_name: String) -> Result<(), Error> {
|
||||
let client = self.get_client(Operation::Upload).await?;
|
||||
let webdav_path: String = format!("{}/{}", dirs::BACKUP_DIR, file_name);
|
||||
let webdav_path: String = format!("{}/{}", dirs::BACKUP_DIR, file_name).into();
|
||||
|
||||
// 读取文件并上传,如果失败尝试一次重试
|
||||
let file_content = fs::read(&file_path)?;
|
||||
let file_content = fs::read(&file_path).await?;
|
||||
|
||||
// 添加超时保护
|
||||
let upload_result = timeout(
|
||||
@@ -163,7 +191,11 @@ impl WebDavClient {
|
||||
|
||||
match upload_result {
|
||||
Err(_) => {
|
||||
log::warn!("Upload timed out, retrying once");
|
||||
logging!(
|
||||
warn,
|
||||
Type::Backup,
|
||||
"Warning: Upload timed out, retrying once"
|
||||
);
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
timeout(
|
||||
Duration::from_secs(TIMEOUT_UPLOAD),
|
||||
@@ -174,7 +206,11 @@ impl WebDavClient {
|
||||
}
|
||||
|
||||
Ok(Err(e)) => {
|
||||
log::warn!("Upload failed, retrying once: {e}");
|
||||
logging!(
|
||||
warn,
|
||||
Type::Backup,
|
||||
"Warning: Upload failed, retrying once: {e}"
|
||||
);
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
timeout(
|
||||
Duration::from_secs(TIMEOUT_UPLOAD),
|
||||
@@ -194,7 +230,7 @@ impl WebDavClient {
|
||||
let fut = async {
|
||||
let response = client.get(path.as_str()).await?;
|
||||
let content = response.bytes().await?;
|
||||
fs::write(&storage_path, &content)?;
|
||||
fs::write(&storage_path, &content).await?;
|
||||
Ok::<(), Error>(())
|
||||
};
|
||||
|
||||
@@ -232,41 +268,53 @@ impl WebDavClient {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create_backup() -> Result<(String, PathBuf), Error> {
|
||||
pub async fn create_backup() -> Result<(String, PathBuf), Error> {
|
||||
let now = chrono::Local::now().format("%Y-%m-%d_%H-%M-%S").to_string();
|
||||
let zip_file_name = format!("{OS}-backup-{now}.zip");
|
||||
let zip_path = temp_dir().join(&zip_file_name);
|
||||
let zip_file_name: String = format!("{OS}-backup-{now}.zip").into();
|
||||
let zip_path = temp_dir().join(zip_file_name.as_str());
|
||||
|
||||
let file = fs::File::create(&zip_path)?;
|
||||
let value = zip_path.clone();
|
||||
let file = AsyncHandler::spawn_blocking(move || std::fs::File::create(&value)).await??;
|
||||
let mut zip = zip::ZipWriter::new(file);
|
||||
zip.add_directory("profiles/", SimpleFileOptions::default())?;
|
||||
let options = SimpleFileOptions::default().compression_method(zip::CompressionMethod::Stored);
|
||||
if let Ok(entries) = fs::read_dir(dirs::app_profiles_dir()?) {
|
||||
for entry in entries {
|
||||
let entry = entry.unwrap();
|
||||
|
||||
if let Ok(mut entries) = fs::read_dir(dirs::app_profiles_dir()?).await {
|
||||
while let Some(entry) = entries.next_entry().await? {
|
||||
let path = entry.path();
|
||||
if path.is_file() {
|
||||
let backup_path = format!("profiles/{}", entry.file_name().to_str().unwrap());
|
||||
let file_name_os = entry.file_name();
|
||||
let file_name = file_name_os
|
||||
.to_str()
|
||||
.ok_or_else(|| anyhow::Error::msg("Invalid file name encoding"))?;
|
||||
let backup_path = format!("profiles/{}", file_name);
|
||||
zip.start_file(backup_path, options)?;
|
||||
zip.write_all(fs::read(path).unwrap().as_slice())?;
|
||||
let file_content = fs::read(&path).await?;
|
||||
zip.write_all(&file_content)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
zip.start_file(dirs::CLASH_CONFIG, options)?;
|
||||
zip.write_all(fs::read(dirs::clash_path()?)?.as_slice())?;
|
||||
zip.write_all(fs::read(dirs::clash_path()?).await?.as_slice())?;
|
||||
|
||||
let mut verge_config: serde_json::Value =
|
||||
serde_yaml::from_str(&fs::read_to_string(dirs::verge_path()?)?)?;
|
||||
let verge_text = fs::read_to_string(dirs::verge_path()?).await?;
|
||||
let mut verge_config: serde_json::Value = serde_yaml_ng::from_str(&verge_text)?;
|
||||
if let Some(obj) = verge_config.as_object_mut() {
|
||||
obj.remove("webdav_username");
|
||||
obj.remove("webdav_password");
|
||||
obj.remove("webdav_url");
|
||||
}
|
||||
zip.start_file(dirs::VERGE_CONFIG, options)?;
|
||||
zip.write_all(serde_yaml::to_string(&verge_config)?.as_bytes())?;
|
||||
zip.write_all(serde_yaml_ng::to_string(&verge_config)?.as_bytes())?;
|
||||
|
||||
let dns_config_path = dirs::app_home_dir()?.join(DNS_CONFIG);
|
||||
if dns_config_path.exists() {
|
||||
zip.start_file(DNS_CONFIG, options)?;
|
||||
zip.write_all(fs::read(&dns_config_path).await?.as_slice())?;
|
||||
}
|
||||
|
||||
zip.start_file(dirs::PROFILE_YAML, options)?;
|
||||
zip.write_all(fs::read(dirs::profiles_path()?)?.as_slice())?;
|
||||
zip.write_all(fs::read(dirs::profiles_path()?).await?.as_slice())?;
|
||||
zip.finish()?;
|
||||
Ok((zip_file_name, zip_path))
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,38 +1,24 @@
|
||||
use parking_lot::RwLock;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::RwLock;
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
use tokio::time::{sleep, timeout, Duration};
|
||||
use tokio::time::{Duration, sleep, timeout};
|
||||
use tokio_stream::{StreamExt, wrappers::UnboundedReceiverStream};
|
||||
|
||||
use crate::config::{Config, IVerge};
|
||||
use crate::core::async_proxy_query::AsyncProxyQuery;
|
||||
use crate::logging_error;
|
||||
use crate::utils::logging::Type;
|
||||
use crate::core::{async_proxy_query::AsyncProxyQuery, handle};
|
||||
use crate::process::AsyncHandler;
|
||||
use crate::{logging, utils::logging::Type};
|
||||
use once_cell::sync::Lazy;
|
||||
use smartstring::alias::String;
|
||||
use sysproxy::{Autoproxy, Sysproxy};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ProxyEvent {
|
||||
/// 配置变更事件
|
||||
ConfigChanged,
|
||||
/// 强制检查代理状态
|
||||
#[allow(dead_code)]
|
||||
ForceCheck,
|
||||
/// 启用系统代理
|
||||
#[allow(dead_code)]
|
||||
EnableProxy,
|
||||
/// 禁用系统代理
|
||||
#[allow(dead_code)]
|
||||
DisableProxy,
|
||||
/// 切换到PAC模式
|
||||
#[allow(dead_code)]
|
||||
SwitchToPac,
|
||||
/// 切换到HTTP代理模式
|
||||
#[allow(dead_code)]
|
||||
SwitchToHttp,
|
||||
/// 应用启动事件
|
||||
AppStarted,
|
||||
/// 应用关闭事件
|
||||
#[allow(dead_code)]
|
||||
AppStopping,
|
||||
}
|
||||
|
||||
@@ -53,13 +39,13 @@ impl Default for ProxyState {
|
||||
pac_enabled: false,
|
||||
auto_proxy: Autoproxy {
|
||||
enable: false,
|
||||
url: "".to_string(),
|
||||
url: "".into(),
|
||||
},
|
||||
sys_proxy: Sysproxy {
|
||||
enable: false,
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 7890,
|
||||
bypass: "".to_string(),
|
||||
host: "127.0.0.1".into(),
|
||||
port: 7897,
|
||||
bypass: "".into(),
|
||||
},
|
||||
last_updated: std::time::Instant::now(),
|
||||
is_healthy: true,
|
||||
@@ -74,7 +60,7 @@ pub struct EventDrivenProxyManager {
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct QueryRequest {
|
||||
pub struct QueryRequest {
|
||||
response_tx: oneshot::Sender<Autoproxy>,
|
||||
}
|
||||
|
||||
@@ -83,6 +69,7 @@ struct ProxyConfig {
|
||||
sys_enabled: bool,
|
||||
pac_enabled: bool,
|
||||
guard_enabled: bool,
|
||||
guard_duration: u64,
|
||||
}
|
||||
|
||||
static PROXY_MANAGER: Lazy<EventDrivenProxyManager> = Lazy::new(EventDrivenProxyManager::new);
|
||||
@@ -97,7 +84,8 @@ impl EventDrivenProxyManager {
|
||||
let (event_tx, event_rx) = mpsc::unbounded_channel();
|
||||
let (query_tx, query_rx) = mpsc::unbounded_channel();
|
||||
|
||||
Self::start_event_loop(state.clone(), event_rx, query_rx);
|
||||
let state_clone = Arc::clone(&state);
|
||||
AsyncHandler::spawn(move || Self::start_event_loop(state_clone, event_rx, query_rx));
|
||||
|
||||
Self {
|
||||
state,
|
||||
@@ -107,8 +95,8 @@ impl EventDrivenProxyManager {
|
||||
}
|
||||
|
||||
/// 获取自动代理配置(缓存)
|
||||
pub fn get_auto_proxy_cached(&self) -> Autoproxy {
|
||||
self.state.read().auto_proxy.clone()
|
||||
pub async fn get_auto_proxy_cached(&self) -> Autoproxy {
|
||||
self.state.read().await.auto_proxy.clone()
|
||||
}
|
||||
|
||||
/// 异步获取最新的自动代理配置
|
||||
@@ -117,15 +105,15 @@ impl EventDrivenProxyManager {
|
||||
let query = QueryRequest { response_tx: tx };
|
||||
|
||||
if self.query_sender.send(query).is_err() {
|
||||
log::error!(target: "app", "发送查询请求失败,返回缓存数据");
|
||||
return self.get_auto_proxy_cached();
|
||||
logging!(error, Type::Network, "发送查询请求失败,返回缓存数据");
|
||||
return self.get_auto_proxy_cached().await;
|
||||
}
|
||||
|
||||
match timeout(Duration::from_secs(5), rx).await {
|
||||
Ok(Ok(result)) => result,
|
||||
_ => {
|
||||
log::warn!(target: "app", "查询超时,返回缓存数据");
|
||||
self.get_auto_proxy_cached()
|
||||
logging!(warn, Type::Network, "Warning: 查询超时,返回缓存数据");
|
||||
self.get_auto_proxy_cached().await
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -141,96 +129,86 @@ impl EventDrivenProxyManager {
|
||||
}
|
||||
|
||||
/// 通知应用即将关闭
|
||||
#[allow(dead_code)]
|
||||
pub fn notify_app_stopping(&self) {
|
||||
self.send_event(ProxyEvent::AppStopping);
|
||||
}
|
||||
|
||||
/// 启用系统代理
|
||||
#[allow(dead_code)]
|
||||
pub fn enable_proxy(&self) {
|
||||
self.send_event(ProxyEvent::EnableProxy);
|
||||
}
|
||||
|
||||
/// 禁用系统代理
|
||||
#[allow(dead_code)]
|
||||
pub fn disable_proxy(&self) {
|
||||
self.send_event(ProxyEvent::DisableProxy);
|
||||
}
|
||||
|
||||
/// 强制检查代理状态
|
||||
#[allow(dead_code)]
|
||||
pub fn force_check(&self) {
|
||||
self.send_event(ProxyEvent::ForceCheck);
|
||||
}
|
||||
|
||||
fn send_event(&self, event: ProxyEvent) {
|
||||
if let Err(e) = self.event_sender.send(event) {
|
||||
log::error!(target: "app", "发送代理事件失败: {e}");
|
||||
logging!(error, Type::Network, "发送代理事件失败: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
fn start_event_loop(
|
||||
pub async fn start_event_loop(
|
||||
state: Arc<RwLock<ProxyState>>,
|
||||
mut event_rx: mpsc::UnboundedReceiver<ProxyEvent>,
|
||||
mut query_rx: mpsc::UnboundedReceiver<QueryRequest>,
|
||||
event_rx: mpsc::UnboundedReceiver<ProxyEvent>,
|
||||
query_rx: mpsc::UnboundedReceiver<QueryRequest>,
|
||||
) {
|
||||
tokio::spawn(async move {
|
||||
log::info!(target: "app", "事件驱动代理管理器启动");
|
||||
logging!(info, Type::Network, "事件驱动代理管理器启动");
|
||||
|
||||
// 将 mpsc 接收器包装成 Stream,避免每次循环创建 future
|
||||
let mut event_stream = UnboundedReceiverStream::new(event_rx);
|
||||
let mut query_stream = UnboundedReceiverStream::new(query_rx);
|
||||
|
||||
// 初始化定时器,用于周期性检查代理设置
|
||||
let config = Self::get_proxy_config().await;
|
||||
let mut guard_interval = tokio::time::interval(Duration::from_secs(config.guard_duration));
|
||||
// 防止首次立即触发
|
||||
guard_interval.tick().await;
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
event = event_rx.recv() => {
|
||||
match event {
|
||||
Some(event) => {
|
||||
log::debug!(target: "app", "处理代理事件: {event:?}");
|
||||
Some(event) = event_stream.next() => {
|
||||
logging!(debug, Type::Network, "处理代理事件: {event:?}");
|
||||
let event_clone = event.clone(); // 保存一份副本用于后续检查
|
||||
Self::handle_event(&state, event).await;
|
||||
}
|
||||
None => {
|
||||
log::info!(target: "app", "事件通道关闭,代理管理器停止");
|
||||
break;
|
||||
|
||||
// 检查是否是配置变更事件,如果是,则可能需要更新定时器
|
||||
if matches!(event_clone, ProxyEvent::ConfigChanged | ProxyEvent::AppStarted) {
|
||||
let new_config = Self::get_proxy_config().await;
|
||||
// 重新设置定时器间隔
|
||||
guard_interval = tokio::time::interval(Duration::from_secs(new_config.guard_duration));
|
||||
// 防止首次立即触发
|
||||
guard_interval.tick().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
query = query_rx.recv() => {
|
||||
match query {
|
||||
Some(query) => {
|
||||
Some(query) = query_stream.next() => {
|
||||
let result = Self::handle_query(&state).await;
|
||||
let _ = query.response_tx.send(result);
|
||||
}
|
||||
None => {
|
||||
log::info!(target: "app", "查询通道关闭");
|
||||
_ = guard_interval.tick() => {
|
||||
// 定时检查代理设置
|
||||
let config = Self::get_proxy_config().await;
|
||||
if config.guard_enabled && config.sys_enabled {
|
||||
logging!(debug, Type::Network, "定时检查代理设置");
|
||||
Self::check_and_restore_proxy(&state).await;
|
||||
}
|
||||
}
|
||||
else => {
|
||||
// 两个通道都关闭时退出
|
||||
logging!(info, Type::Network, "事件或查询通道关闭,代理管理器停止");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fn handle_event(state: &Arc<RwLock<ProxyState>>, event: ProxyEvent) {
|
||||
match event {
|
||||
ProxyEvent::ConfigChanged | ProxyEvent::ForceCheck => {
|
||||
ProxyEvent::ConfigChanged => {
|
||||
Self::update_proxy_config(state).await;
|
||||
}
|
||||
ProxyEvent::EnableProxy => {
|
||||
Self::enable_system_proxy(state).await;
|
||||
}
|
||||
ProxyEvent::DisableProxy => {
|
||||
Self::disable_system_proxy(state).await;
|
||||
}
|
||||
ProxyEvent::SwitchToPac => {
|
||||
Self::switch_proxy_mode(state, true).await;
|
||||
}
|
||||
ProxyEvent::SwitchToHttp => {
|
||||
Self::switch_proxy_mode(state, false).await;
|
||||
}
|
||||
ProxyEvent::AppStarted => {
|
||||
Self::initialize_proxy_state(state).await;
|
||||
}
|
||||
ProxyEvent::AppStopping => {
|
||||
log::info!(target: "app", "清理代理状态");
|
||||
logging!(info, Type::Network, "清理代理状态");
|
||||
Self::update_state_timestamp(state, |s| {
|
||||
s.sys_enabled = false;
|
||||
s.pac_enabled = false;
|
||||
s.is_healthy = false;
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -240,15 +218,16 @@ impl EventDrivenProxyManager {
|
||||
|
||||
Self::update_state_timestamp(state, |s| {
|
||||
s.auto_proxy = auto_proxy.clone();
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
auto_proxy
|
||||
}
|
||||
|
||||
async fn initialize_proxy_state(state: &Arc<RwLock<ProxyState>>) {
|
||||
log::info!(target: "app", "初始化代理状态");
|
||||
logging!(info, Type::Network, "初始化代理状态");
|
||||
|
||||
let config = Self::get_proxy_config();
|
||||
let config = Self::get_proxy_config().await;
|
||||
let auto_proxy = Self::get_auto_proxy_with_timeout().await;
|
||||
let sys_proxy = Self::get_sys_proxy_with_timeout().await;
|
||||
|
||||
@@ -258,20 +237,28 @@ impl EventDrivenProxyManager {
|
||||
s.auto_proxy = auto_proxy;
|
||||
s.sys_proxy = sys_proxy;
|
||||
s.is_healthy = true;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
log::info!(target: "app", "代理状态初始化完成: sys={}, pac={}", config.sys_enabled, config.pac_enabled);
|
||||
logging!(
|
||||
info,
|
||||
Type::Network,
|
||||
"代理状态初始化完成: sys={}, pac={}",
|
||||
config.sys_enabled,
|
||||
config.pac_enabled
|
||||
);
|
||||
}
|
||||
|
||||
async fn update_proxy_config(state: &Arc<RwLock<ProxyState>>) {
|
||||
log::debug!(target: "app", "更新代理配置");
|
||||
logging!(debug, Type::Network, "更新代理配置");
|
||||
|
||||
let config = Self::get_proxy_config();
|
||||
let config = Self::get_proxy_config().await;
|
||||
|
||||
Self::update_state_timestamp(state, |s| {
|
||||
s.sys_enabled = config.sys_enabled;
|
||||
s.pac_enabled = config.pac_enabled;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
if config.guard_enabled && config.sys_enabled {
|
||||
Self::check_and_restore_proxy(state).await;
|
||||
@@ -279,8 +266,12 @@ impl EventDrivenProxyManager {
|
||||
}
|
||||
|
||||
async fn check_and_restore_proxy(state: &Arc<RwLock<ProxyState>>) {
|
||||
if handle::Handle::global().is_exiting() {
|
||||
logging!(debug, Type::Network, "应用正在退出,跳过系统代理守卫检查");
|
||||
return;
|
||||
}
|
||||
let (sys_enabled, pac_enabled) = {
|
||||
let s = state.read();
|
||||
let s = state.read().await;
|
||||
(s.sys_enabled, s.pac_enabled)
|
||||
};
|
||||
|
||||
@@ -288,7 +279,7 @@ impl EventDrivenProxyManager {
|
||||
return;
|
||||
}
|
||||
|
||||
log::debug!(target: "app", "检查代理状态");
|
||||
logging!(debug, Type::Network, "检查代理状态");
|
||||
|
||||
if pac_enabled {
|
||||
Self::check_and_restore_pac_proxy(state).await;
|
||||
@@ -298,16 +289,24 @@ impl EventDrivenProxyManager {
|
||||
}
|
||||
|
||||
async fn check_and_restore_pac_proxy(state: &Arc<RwLock<ProxyState>>) {
|
||||
if handle::Handle::global().is_exiting() {
|
||||
logging!(debug, Type::Network, "应用正在退出,跳过PAC代理恢复检查");
|
||||
return;
|
||||
}
|
||||
|
||||
let current = Self::get_auto_proxy_with_timeout().await;
|
||||
let expected = Self::get_expected_pac_config();
|
||||
let expected = Self::get_expected_pac_config().await;
|
||||
|
||||
Self::update_state_timestamp(state, |s| {
|
||||
s.auto_proxy = current.clone();
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
if !current.enable || current.url != expected.url {
|
||||
log::info!(target: "app", "PAC代理设置异常,正在恢复...");
|
||||
Self::restore_pac_proxy(&expected.url).await;
|
||||
logging!(info, Type::Network, "PAC代理设置异常,正在恢复...");
|
||||
if let Err(e) = Self::restore_pac_proxy(&expected.url).await {
|
||||
logging!(error, Type::Network, "恢复PAC代理失败: {}", e);
|
||||
}
|
||||
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
let restored = Self::get_auto_proxy_with_timeout().await;
|
||||
@@ -315,21 +314,30 @@ impl EventDrivenProxyManager {
|
||||
Self::update_state_timestamp(state, |s| {
|
||||
s.is_healthy = restored.enable && restored.url == expected.url;
|
||||
s.auto_proxy = restored;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn check_and_restore_sys_proxy(state: &Arc<RwLock<ProxyState>>) {
|
||||
if handle::Handle::global().is_exiting() {
|
||||
logging!(debug, Type::Network, "应用正在退出,跳过系统代理恢复检查");
|
||||
return;
|
||||
}
|
||||
|
||||
let current = Self::get_sys_proxy_with_timeout().await;
|
||||
let expected = Self::get_expected_sys_proxy();
|
||||
let expected = Self::get_expected_sys_proxy().await;
|
||||
|
||||
Self::update_state_timestamp(state, |s| {
|
||||
s.sys_proxy = current.clone();
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
if !current.enable || current.host != expected.host || current.port != expected.port {
|
||||
log::info!(target: "app", "系统代理设置异常,正在恢复...");
|
||||
Self::restore_sys_proxy(&expected).await;
|
||||
logging!(info, Type::Network, "系统代理设置异常,正在恢复...");
|
||||
if let Err(e) = Self::restore_sys_proxy(&expected).await {
|
||||
logging!(error, Type::Network, "恢复系统代理失败: {}", e);
|
||||
}
|
||||
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
let restored = Self::get_sys_proxy_with_timeout().await;
|
||||
@@ -339,60 +347,11 @@ impl EventDrivenProxyManager {
|
||||
&& restored.host == expected.host
|
||||
&& restored.port == expected.port;
|
||||
s.sys_proxy = restored;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn enable_system_proxy(state: &Arc<RwLock<ProxyState>>) {
|
||||
log::info!(target: "app", "启用系统代理");
|
||||
|
||||
let pac_enabled = state.read().pac_enabled;
|
||||
|
||||
if pac_enabled {
|
||||
let expected = Self::get_expected_pac_config();
|
||||
Self::restore_pac_proxy(&expected.url).await;
|
||||
} else {
|
||||
let expected = Self::get_expected_sys_proxy();
|
||||
Self::restore_sys_proxy(&expected).await;
|
||||
}
|
||||
|
||||
Self::check_and_restore_proxy(state).await;
|
||||
}
|
||||
|
||||
async fn disable_system_proxy(_state: &Arc<RwLock<ProxyState>>) {
|
||||
log::info!(target: "app", "禁用系统代理");
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
let disabled_sys = Sysproxy::default();
|
||||
let disabled_auto = Autoproxy::default();
|
||||
|
||||
logging_error!(Type::System, true, disabled_auto.set_auto_proxy());
|
||||
logging_error!(Type::System, true, disabled_sys.set_system_proxy());
|
||||
}
|
||||
}
|
||||
|
||||
async fn switch_proxy_mode(state: &Arc<RwLock<ProxyState>>, to_pac: bool) {
|
||||
log::info!(target: "app", "切换到{}模式", if to_pac { "PAC" } else { "HTTP代理" });
|
||||
|
||||
if to_pac {
|
||||
let disabled_sys = Sysproxy::default();
|
||||
logging_error!(Type::System, true, disabled_sys.set_system_proxy());
|
||||
|
||||
let expected = Self::get_expected_pac_config();
|
||||
Self::restore_pac_proxy(&expected.url).await;
|
||||
} else {
|
||||
let disabled_auto = Autoproxy::default();
|
||||
logging_error!(Type::System, true, disabled_auto.set_auto_proxy());
|
||||
|
||||
let expected = Self::get_expected_sys_proxy();
|
||||
Self::restore_sys_proxy(&expected).await;
|
||||
}
|
||||
|
||||
Self::update_state_timestamp(state, |s| s.pac_enabled = to_pac);
|
||||
Self::check_and_restore_proxy(state).await;
|
||||
}
|
||||
|
||||
async fn get_auto_proxy_with_timeout() -> Autoproxy {
|
||||
let async_proxy = AsyncProxyQuery::get_auto_proxy().await;
|
||||
|
||||
@@ -416,116 +375,149 @@ impl EventDrivenProxyManager {
|
||||
}
|
||||
|
||||
// 统一的状态更新方法
|
||||
fn update_state_timestamp<F>(state: &Arc<RwLock<ProxyState>>, update_fn: F)
|
||||
async fn update_state_timestamp<F>(state: &Arc<RwLock<ProxyState>>, update_fn: F)
|
||||
where
|
||||
F: FnOnce(&mut ProxyState),
|
||||
{
|
||||
let mut state_guard = state.write();
|
||||
let mut state_guard = state.write().await;
|
||||
update_fn(&mut state_guard);
|
||||
state_guard.last_updated = std::time::Instant::now();
|
||||
}
|
||||
|
||||
fn get_proxy_config() -> ProxyConfig {
|
||||
let verge_config = Config::verge();
|
||||
let verge = verge_config.latest();
|
||||
async fn get_proxy_config() -> ProxyConfig {
|
||||
let (sys_enabled, pac_enabled, guard_enabled, guard_duration) = {
|
||||
let verge_config = Config::verge().await;
|
||||
let verge = verge_config.latest_ref();
|
||||
(
|
||||
verge.enable_system_proxy.unwrap_or(false),
|
||||
verge.proxy_auto_config.unwrap_or(false),
|
||||
verge.enable_proxy_guard.unwrap_or(false),
|
||||
verge.proxy_guard_duration.unwrap_or(30), // 默认30秒
|
||||
)
|
||||
};
|
||||
ProxyConfig {
|
||||
sys_enabled: verge.enable_system_proxy.unwrap_or(false),
|
||||
pac_enabled: verge.proxy_auto_config.unwrap_or(false),
|
||||
guard_enabled: verge.enable_proxy_guard.unwrap_or(false),
|
||||
sys_enabled,
|
||||
pac_enabled,
|
||||
guard_enabled,
|
||||
guard_duration,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_expected_pac_config() -> Autoproxy {
|
||||
let verge_config = Config::verge();
|
||||
let verge = verge_config.latest();
|
||||
let (proxy_host, pac_port) = (
|
||||
async fn get_expected_pac_config() -> Autoproxy {
|
||||
let proxy_host = {
|
||||
let verge_config = Config::verge().await;
|
||||
let verge = verge_config.latest_ref();
|
||||
verge
|
||||
.proxy_host
|
||||
.clone()
|
||||
.unwrap_or_else(|| "127.0.0.1".to_string()),
|
||||
IVerge::get_singleton_port(),
|
||||
);
|
||||
.unwrap_or_else(|| "127.0.0.1".into())
|
||||
};
|
||||
let pac_port = IVerge::get_singleton_port();
|
||||
Autoproxy {
|
||||
enable: true,
|
||||
url: format!("http://{proxy_host}:{pac_port}/commands/pac"),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_expected_sys_proxy() -> Sysproxy {
|
||||
let verge_config = Config::verge();
|
||||
let verge = verge_config.latest();
|
||||
let port = verge
|
||||
.verge_mixed_port
|
||||
.unwrap_or(Config::clash().data().get_mixed_port());
|
||||
let proxy_host = verge
|
||||
.proxy_host
|
||||
.clone()
|
||||
.unwrap_or_else(|| "127.0.0.1".to_string());
|
||||
async fn get_expected_sys_proxy() -> Sysproxy {
|
||||
use crate::constants::network;
|
||||
|
||||
let (verge_mixed_port, proxy_host) = {
|
||||
let verge_config = Config::verge().await;
|
||||
let verge_ref = verge_config.latest_ref();
|
||||
(verge_ref.verge_mixed_port, verge_ref.proxy_host.clone())
|
||||
};
|
||||
|
||||
let default_port = {
|
||||
let clash_config = Config::clash().await;
|
||||
clash_config.latest_ref().get_mixed_port()
|
||||
};
|
||||
|
||||
let port = verge_mixed_port.unwrap_or(default_port);
|
||||
let host = proxy_host
|
||||
.unwrap_or_else(|| network::DEFAULT_PROXY_HOST.into())
|
||||
.into();
|
||||
|
||||
Sysproxy {
|
||||
enable: true,
|
||||
host: proxy_host,
|
||||
host,
|
||||
port,
|
||||
bypass: Self::get_bypass_config(),
|
||||
bypass: Self::get_bypass_config().await.into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_bypass_config() -> String {
|
||||
let verge_config = Config::verge();
|
||||
let verge = verge_config.latest();
|
||||
async fn get_bypass_config() -> String {
|
||||
use crate::constants::bypass;
|
||||
|
||||
let verge_config = Config::verge().await;
|
||||
let verge = verge_config.latest_ref();
|
||||
let use_default = verge.use_default_bypass.unwrap_or(true);
|
||||
let custom_bypass = verge.system_proxy_bypass.clone().unwrap_or_default();
|
||||
let custom = verge.system_proxy_bypass.as_deref().unwrap_or("");
|
||||
|
||||
match (use_default, custom.is_empty()) {
|
||||
(_, true) => bypass::DEFAULT.into(),
|
||||
(true, false) => format!("{},{}", bypass::DEFAULT, custom).into(),
|
||||
(false, false) => custom.into(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let default_bypass = "localhost;127.*;192.168.*;10.*;172.16.*;172.17.*;172.18.*;172.19.*;172.20.*;172.21.*;172.22.*;172.23.*;172.24.*;172.25.*;172.26.*;172.27.*;172.28.*;172.29.*;172.30.*;172.31.*;<local>";
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
let default_bypass =
|
||||
"localhost,127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,172.29.0.0/16,::1";
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let default_bypass = "127.0.0.1,192.168.0.0/16,10.0.0.0/8,172.16.0.0/12,172.29.0.0/16,localhost,*.local,*.crashlytics.com,<local>";
|
||||
|
||||
if custom_bypass.is_empty() {
|
||||
default_bypass.to_string()
|
||||
} else if use_default {
|
||||
format!("{default_bypass},{custom_bypass}")
|
||||
} else {
|
||||
custom_bypass
|
||||
async fn restore_pac_proxy(expected_url: &str) -> Result<(), anyhow::Error> {
|
||||
if handle::Handle::global().is_exiting() {
|
||||
logging!(debug, Type::Network, "应用正在退出,跳过PAC代理恢复");
|
||||
return Ok(());
|
||||
}
|
||||
Self::execute_sysproxy_command(&["pac", expected_url]).await
|
||||
}
|
||||
|
||||
async fn restore_pac_proxy(expected_url: &str) {
|
||||
#[allow(clippy::unused_async)]
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
async fn restore_pac_proxy(expected_url: &str) -> Result<(), anyhow::Error> {
|
||||
{
|
||||
let new_autoproxy = Autoproxy {
|
||||
enable: true,
|
||||
url: expected_url.to_string(),
|
||||
};
|
||||
logging_error!(Type::System, true, new_autoproxy.set_auto_proxy());
|
||||
// logging_error!(Type::System, true, new_autoproxy.set_auto_proxy());
|
||||
new_autoproxy
|
||||
.set_auto_proxy()
|
||||
.map_err(|e| anyhow::anyhow!("Failed to set auto proxy: {}", e))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
Self::execute_sysproxy_command(&["pac", expected_url]).await;
|
||||
async fn restore_sys_proxy(expected: &Sysproxy) -> Result<(), anyhow::Error> {
|
||||
if handle::Handle::global().is_exiting() {
|
||||
logging!(debug, Type::Network, "应用正在退出,跳过系统代理恢复");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
async fn restore_sys_proxy(expected: &Sysproxy) {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
logging_error!(Type::System, true, expected.set_system_proxy());
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
let address = format!("{}:{}", expected.host, expected.port);
|
||||
Self::execute_sysproxy_command(&["global", &address, &expected.bypass]).await;
|
||||
Self::execute_sysproxy_command(&["global", &address, &expected.bypass]).await
|
||||
}
|
||||
|
||||
#[allow(clippy::unused_async)]
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
async fn restore_sys_proxy(expected: &Sysproxy) -> Result<(), anyhow::Error> {
|
||||
{
|
||||
// logging_error!(Type::System, true, expected.set_system_proxy());
|
||||
expected
|
||||
.set_system_proxy()
|
||||
.map_err(|e| anyhow::anyhow!("Failed to set system proxy: {}", e))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn execute_sysproxy_command(args: &[&str]) {
|
||||
async fn execute_sysproxy_command(args: &[&str]) -> Result<(), anyhow::Error> {
|
||||
if handle::Handle::global().is_exiting() {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Network,
|
||||
"应用正在退出,取消调用 sysproxy.exe,参数: {:?}",
|
||||
args
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
use crate::utils::dirs;
|
||||
#[allow(unused_imports)] // creation_flags必须
|
||||
use std::os::windows::process::CommandExt;
|
||||
@@ -534,38 +526,23 @@ impl EventDrivenProxyManager {
|
||||
let binary_path = match dirs::service_path() {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
log::error!(target: "app", "获取服务路径失败: {}", e);
|
||||
return;
|
||||
logging!(error, Type::Network, "获取服务路径失败: {e}");
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
let sysproxy_exe = binary_path.with_file_name("sysproxy.exe");
|
||||
if !sysproxy_exe.exists() {
|
||||
log::error!(target: "app", "sysproxy.exe 不存在");
|
||||
return;
|
||||
logging!(error, Type::Network, "sysproxy.exe 不存在");
|
||||
}
|
||||
anyhow::ensure!(sysproxy_exe.exists(), "sysproxy.exe does not exist");
|
||||
|
||||
let output = Command::new(sysproxy_exe)
|
||||
let _output = Command::new(sysproxy_exe)
|
||||
.args(args)
|
||||
.creation_flags(0x08000000) // CREATE_NO_WINDOW - 隐藏窗口
|
||||
.output()
|
||||
.await;
|
||||
.await?;
|
||||
|
||||
match output {
|
||||
Ok(output) => {
|
||||
if !output.status.success() {
|
||||
log::error!(target: "app", "执行sysproxy命令失败: {:?}", args);
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
if !stderr.is_empty() {
|
||||
log::error!(target: "app", "sysproxy错误输出: {}", stderr);
|
||||
}
|
||||
} else {
|
||||
log::debug!(target: "app", "成功执行sysproxy命令: {:?}", args);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!(target: "app", "执行sysproxy命令出错: {}", e);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,269 +1,24 @@
|
||||
use once_cell::sync::OnceCell;
|
||||
use crate::{APP_HANDLE, constants::timing, singleton};
|
||||
use parking_lot::RwLock;
|
||||
use std::{
|
||||
sync::{
|
||||
atomic::{AtomicU64, Ordering},
|
||||
mpsc, Arc,
|
||||
},
|
||||
thread,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use tauri::{AppHandle, Emitter, Manager, WebviewWindow};
|
||||
use smartstring::alias::String;
|
||||
use std::{sync::Arc, thread};
|
||||
use tauri::{AppHandle, Manager, WebviewWindow};
|
||||
use tauri_plugin_mihomo::{Mihomo, MihomoExt};
|
||||
use tokio::sync::RwLockReadGuard;
|
||||
|
||||
use crate::{logging, utils::logging::Type};
|
||||
|
||||
/// 不同类型的前端通知
|
||||
#[derive(Debug, Clone)]
|
||||
enum FrontendEvent {
|
||||
RefreshClash,
|
||||
RefreshVerge,
|
||||
NoticeMessage { status: String, message: String },
|
||||
ProfileChanged { current_profile_id: String },
|
||||
TimerUpdated { profile_index: String },
|
||||
StartupCompleted,
|
||||
ProfileUpdateStarted { uid: String },
|
||||
ProfileUpdateCompleted { uid: String },
|
||||
}
|
||||
|
||||
/// 事件发送统计和监控
|
||||
#[derive(Debug, Default)]
|
||||
struct EventStats {
|
||||
total_sent: AtomicU64,
|
||||
total_errors: AtomicU64,
|
||||
last_error_time: RwLock<Option<Instant>>,
|
||||
}
|
||||
|
||||
/// 存储启动期间的错误消息
|
||||
#[derive(Debug, Clone)]
|
||||
struct ErrorMessage {
|
||||
status: String,
|
||||
message: String,
|
||||
}
|
||||
|
||||
/// 全局前端通知系统
|
||||
#[derive(Debug)]
|
||||
struct NotificationSystem {
|
||||
sender: Option<mpsc::Sender<FrontendEvent>>,
|
||||
worker_handle: Option<thread::JoinHandle<()>>,
|
||||
is_running: bool,
|
||||
stats: EventStats,
|
||||
last_emit_time: RwLock<Instant>,
|
||||
/// 当通知系统失败超过阈值时,进入紧急模式
|
||||
emergency_mode: RwLock<bool>,
|
||||
}
|
||||
|
||||
impl Default for NotificationSystem {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl NotificationSystem {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
sender: None,
|
||||
worker_handle: None,
|
||||
is_running: false,
|
||||
stats: EventStats::default(),
|
||||
last_emit_time: RwLock::new(Instant::now()),
|
||||
emergency_mode: RwLock::new(false),
|
||||
}
|
||||
}
|
||||
|
||||
/// 启动通知处理线程
|
||||
fn start(&mut self) {
|
||||
if self.is_running {
|
||||
return;
|
||||
}
|
||||
|
||||
let (tx, rx) = mpsc::channel();
|
||||
self.sender = Some(tx);
|
||||
self.is_running = true;
|
||||
|
||||
*self.last_emit_time.write() = Instant::now();
|
||||
|
||||
self.worker_handle = Some(
|
||||
thread::Builder::new()
|
||||
.name("frontend-notifier".into())
|
||||
.spawn(move || {
|
||||
let handle = Handle::global();
|
||||
|
||||
while !handle.is_exiting() {
|
||||
match rx.recv_timeout(Duration::from_millis(100)) {
|
||||
Ok(event) => {
|
||||
let system_guard = handle.notification_system.read();
|
||||
if system_guard.as_ref().is_none() {
|
||||
log::warn!("NotificationSystem not found in handle while processing event.");
|
||||
continue;
|
||||
}
|
||||
let system = system_guard.as_ref().unwrap();
|
||||
|
||||
let is_emergency = *system.emergency_mode.read();
|
||||
|
||||
if is_emergency {
|
||||
if let FrontendEvent::NoticeMessage { ref status, .. } = event {
|
||||
if status == "info" {
|
||||
log::warn!(
|
||||
"Emergency mode active, skipping info message"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(window) = handle.get_window() {
|
||||
*system.last_emit_time.write() = Instant::now();
|
||||
|
||||
let (event_name_str, payload_result) = match event {
|
||||
FrontendEvent::RefreshClash => {
|
||||
("verge://refresh-clash-config", Ok(serde_json::json!("yes")))
|
||||
}
|
||||
FrontendEvent::RefreshVerge => {
|
||||
("verge://refresh-verge-config", Ok(serde_json::json!("yes")))
|
||||
}
|
||||
FrontendEvent::NoticeMessage { status, message } => {
|
||||
match serde_json::to_value((status, message)) {
|
||||
Ok(p) => ("verge://notice-message", Ok(p)),
|
||||
Err(e) => {
|
||||
log::error!("Failed to serialize NoticeMessage payload: {e}");
|
||||
("verge://notice-message", Err(e))
|
||||
}
|
||||
}
|
||||
}
|
||||
FrontendEvent::ProfileChanged { current_profile_id } => {
|
||||
("profile-changed", Ok(serde_json::json!(current_profile_id)))
|
||||
}
|
||||
FrontendEvent::TimerUpdated { profile_index } => {
|
||||
("verge://timer-updated", Ok(serde_json::json!(profile_index)))
|
||||
}
|
||||
FrontendEvent::StartupCompleted => {
|
||||
("verge://startup-completed", Ok(serde_json::json!(null)))
|
||||
}
|
||||
FrontendEvent::ProfileUpdateStarted { uid } => {
|
||||
("profile-update-started", Ok(serde_json::json!({ "uid": uid })))
|
||||
}
|
||||
FrontendEvent::ProfileUpdateCompleted { uid } => {
|
||||
("profile-update-completed", Ok(serde_json::json!({ "uid": uid })))
|
||||
}
|
||||
};
|
||||
|
||||
if let Ok(payload) = payload_result {
|
||||
match window.emit(event_name_str, payload) {
|
||||
Ok(_) => {
|
||||
system.stats.total_sent.fetch_add(1, Ordering::SeqCst);
|
||||
// 记录成功发送的事件
|
||||
if log::log_enabled!(log::Level::Debug) {
|
||||
log::debug!("Successfully emitted event: {event_name_str}");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Failed to emit event {event_name_str}: {e}");
|
||||
system.stats.total_errors.fetch_add(1, Ordering::SeqCst);
|
||||
*system.stats.last_error_time.write() = Some(Instant::now());
|
||||
|
||||
let errors = system.stats.total_errors.load(Ordering::SeqCst);
|
||||
const EMIT_ERROR_THRESHOLD: u64 = 10;
|
||||
if errors > EMIT_ERROR_THRESHOLD && !*system.emergency_mode.read() {
|
||||
log::warn!(
|
||||
"Reached {EMIT_ERROR_THRESHOLD} emit errors, entering emergency mode"
|
||||
);
|
||||
*system.emergency_mode.write() = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
system.stats.total_errors.fetch_add(1, Ordering::SeqCst);
|
||||
*system.stats.last_error_time.write() = Some(Instant::now());
|
||||
log::warn!("Skipped emitting event due to payload serialization error for {event_name_str}");
|
||||
}
|
||||
} else {
|
||||
log::warn!("No window found, skipping event emit.");
|
||||
}
|
||||
thread::sleep(Duration::from_millis(20));
|
||||
}
|
||||
Err(mpsc::RecvTimeoutError::Timeout) => {
|
||||
continue;
|
||||
}
|
||||
Err(mpsc::RecvTimeoutError::Disconnected) => {
|
||||
log::info!(
|
||||
"Notification channel disconnected, exiting worker thread"
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Notification worker thread exiting");
|
||||
})
|
||||
.expect("Failed to start notification worker thread"),
|
||||
);
|
||||
}
|
||||
|
||||
/// 发送事件到队列
|
||||
fn send_event(&self, event: FrontendEvent) -> bool {
|
||||
if *self.emergency_mode.read() {
|
||||
if let FrontendEvent::NoticeMessage { ref status, .. } = event {
|
||||
if status == "info" {
|
||||
log::info!("Skipping info message in emergency mode");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(sender) = &self.sender {
|
||||
match sender.send(event) {
|
||||
Ok(_) => true,
|
||||
Err(e) => {
|
||||
log::warn!("Failed to send event to notification queue: {e:?}");
|
||||
self.stats.total_errors.fetch_add(1, Ordering::SeqCst);
|
||||
*self.stats.last_error_time.write() = Some(Instant::now());
|
||||
false
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::warn!("Notification system not started, can't send event");
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn shutdown(&mut self) {
|
||||
log::info!("NotificationSystem shutdown initiated");
|
||||
self.is_running = false;
|
||||
|
||||
// 先关闭发送端,让接收端知道不会再有新消息
|
||||
if let Some(sender) = self.sender.take() {
|
||||
drop(sender);
|
||||
}
|
||||
|
||||
// 设置超时避免无限等待
|
||||
if let Some(handle) = self.worker_handle.take() {
|
||||
match handle.join() {
|
||||
Ok(_) => {
|
||||
log::info!("NotificationSystem worker thread joined successfully");
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("NotificationSystem worker thread join failed: {e:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("NotificationSystem shutdown completed");
|
||||
}
|
||||
}
|
||||
use super::notification::{ErrorMessage, FrontendEvent, NotificationSystem};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Handle {
|
||||
pub app_handle: Arc<RwLock<Option<AppHandle>>>,
|
||||
pub is_exiting: Arc<RwLock<bool>>,
|
||||
is_exiting: Arc<RwLock<bool>>,
|
||||
startup_errors: Arc<RwLock<Vec<ErrorMessage>>>,
|
||||
startup_completed: Arc<RwLock<bool>>,
|
||||
notification_system: Arc<RwLock<Option<NotificationSystem>>>,
|
||||
pub(crate) notification_system: Arc<RwLock<Option<NotificationSystem>>>,
|
||||
}
|
||||
|
||||
impl Default for Handle {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
app_handle: Arc::new(RwLock::new(None)),
|
||||
is_exiting: Arc::new(RwLock::new(false)),
|
||||
startup_errors: Arc::new(RwLock::new(Vec::new())),
|
||||
startup_completed: Arc::new(RwLock::new(false)),
|
||||
@@ -272,35 +27,37 @@ impl Default for Handle {
|
||||
}
|
||||
}
|
||||
|
||||
singleton!(Handle, HANDLE);
|
||||
|
||||
impl Handle {
|
||||
pub fn global() -> &'static Handle {
|
||||
static HANDLE: OnceCell<Handle> = OnceCell::new();
|
||||
HANDLE.get_or_init(Handle::default)
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn init(&self, app_handle: &AppHandle) {
|
||||
{
|
||||
let mut handle = self.app_handle.write();
|
||||
*handle = Some(app_handle.clone());
|
||||
pub fn init(&self) {
|
||||
if self.is_exiting() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut system_opt = self.notification_system.write();
|
||||
if let Some(system) = system_opt.as_mut() {
|
||||
if let Some(system) = system_opt.as_mut()
|
||||
&& !system.is_running
|
||||
{
|
||||
system.start();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn app_handle(&self) -> Option<AppHandle> {
|
||||
self.app_handle.read().clone()
|
||||
pub fn app_handle() -> &'static AppHandle {
|
||||
#[allow(clippy::expect_used)]
|
||||
APP_HANDLE.get().expect("App handle not initialized")
|
||||
}
|
||||
|
||||
pub fn get_window(&self) -> Option<WebviewWindow> {
|
||||
let app_handle = self.app_handle()?;
|
||||
let window: Option<WebviewWindow> = app_handle.get_webview_window("main");
|
||||
if window.is_none() {
|
||||
log::debug!(target:"app", "main window not found");
|
||||
pub async fn mihomo() -> RwLockReadGuard<'static, Mihomo> {
|
||||
Self::app_handle().mihomo().read().await
|
||||
}
|
||||
window
|
||||
|
||||
pub fn get_window() -> Option<WebviewWindow> {
|
||||
Self::app_handle().get_webview_window("main")
|
||||
}
|
||||
|
||||
pub fn refresh_clash() {
|
||||
@@ -328,99 +85,29 @@ impl Handle {
|
||||
}
|
||||
|
||||
pub fn notify_profile_changed(profile_id: String) {
|
||||
let handle = Self::global();
|
||||
if handle.is_exiting() {
|
||||
return;
|
||||
}
|
||||
|
||||
let system_opt = handle.notification_system.read();
|
||||
if let Some(system) = system_opt.as_ref() {
|
||||
system.send_event(FrontendEvent::ProfileChanged {
|
||||
Self::send_event(FrontendEvent::ProfileChanged {
|
||||
current_profile_id: profile_id,
|
||||
});
|
||||
} else {
|
||||
log::warn!(
|
||||
"Notification system not initialized when trying to send ProfileChanged event."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn notify_timer_updated(profile_index: String) {
|
||||
let handle = Self::global();
|
||||
if handle.is_exiting() {
|
||||
return;
|
||||
}
|
||||
|
||||
let system_opt = handle.notification_system.read();
|
||||
if let Some(system) = system_opt.as_ref() {
|
||||
system.send_event(FrontendEvent::TimerUpdated { profile_index });
|
||||
} else {
|
||||
log::warn!(
|
||||
"Notification system not initialized when trying to send TimerUpdated event."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn notify_startup_completed() {
|
||||
let handle = Self::global();
|
||||
if handle.is_exiting() {
|
||||
return;
|
||||
}
|
||||
|
||||
let system_opt = handle.notification_system.read();
|
||||
if let Some(system) = system_opt.as_ref() {
|
||||
system.send_event(FrontendEvent::StartupCompleted);
|
||||
} else {
|
||||
log::warn!(
|
||||
"Notification system not initialized when trying to send StartupCompleted event."
|
||||
);
|
||||
}
|
||||
Self::send_event(FrontendEvent::TimerUpdated { profile_index });
|
||||
}
|
||||
|
||||
pub fn notify_profile_update_started(uid: String) {
|
||||
let handle = Self::global();
|
||||
if handle.is_exiting() {
|
||||
return;
|
||||
}
|
||||
|
||||
let system_opt = handle.notification_system.read();
|
||||
if let Some(system) = system_opt.as_ref() {
|
||||
system.send_event(FrontendEvent::ProfileUpdateStarted { uid });
|
||||
} else {
|
||||
log::warn!("Notification system not initialized when trying to send ProfileUpdateStarted event.");
|
||||
}
|
||||
Self::send_event(FrontendEvent::ProfileUpdateStarted { uid });
|
||||
}
|
||||
|
||||
pub fn notify_profile_update_completed(uid: String) {
|
||||
let handle = Self::global();
|
||||
if handle.is_exiting() {
|
||||
return;
|
||||
Self::send_event(FrontendEvent::ProfileUpdateCompleted { uid });
|
||||
}
|
||||
|
||||
let system_opt = handle.notification_system.read();
|
||||
if let Some(system) = system_opt.as_ref() {
|
||||
system.send_event(FrontendEvent::ProfileUpdateCompleted { uid });
|
||||
} else {
|
||||
log::warn!("Notification system not initialized when trying to send ProfileUpdateCompleted event.");
|
||||
}
|
||||
}
|
||||
|
||||
/// 通知前端显示消息队列
|
||||
pub fn notice_message<S: Into<String>, M: Into<String>>(status: S, msg: M) {
|
||||
let handle = Self::global();
|
||||
let status_str = status.into();
|
||||
let msg_str = msg.into();
|
||||
|
||||
if !*handle.startup_completed.read() {
|
||||
logging!(
|
||||
info,
|
||||
Type::Frontend,
|
||||
true,
|
||||
"启动过程中发现错误,加入消息队列: {} - {}",
|
||||
status_str,
|
||||
msg_str
|
||||
);
|
||||
|
||||
let mut errors = handle.startup_errors.write();
|
||||
errors.push(ErrorMessage {
|
||||
status: status_str,
|
||||
@@ -433,25 +120,29 @@ impl Handle {
|
||||
return;
|
||||
}
|
||||
|
||||
let system_opt = handle.notification_system.read();
|
||||
if let Some(system) = system_opt.as_ref() {
|
||||
system.send_event(FrontendEvent::NoticeMessage {
|
||||
Self::send_event(FrontendEvent::NoticeMessage {
|
||||
status: status_str,
|
||||
message: msg_str,
|
||||
});
|
||||
}
|
||||
|
||||
fn send_event(event: FrontendEvent) {
|
||||
let handle = Self::global();
|
||||
if handle.is_exiting() {
|
||||
return;
|
||||
}
|
||||
|
||||
let system_opt = handle.notification_system.read();
|
||||
if let Some(system) = system_opt.as_ref() {
|
||||
system.send_event(event);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mark_startup_completed(&self) {
|
||||
{
|
||||
let mut completed = self.startup_completed.write();
|
||||
*completed = true;
|
||||
}
|
||||
|
||||
*self.startup_completed.write() = true;
|
||||
self.send_startup_errors();
|
||||
}
|
||||
|
||||
/// 发送启动时累积的所有错误消息
|
||||
fn send_startup_errors(&self) {
|
||||
let errors = {
|
||||
let mut errors = self.startup_errors.write();
|
||||
@@ -462,19 +153,10 @@ impl Handle {
|
||||
return;
|
||||
}
|
||||
|
||||
logging!(
|
||||
info,
|
||||
Type::Frontend,
|
||||
true,
|
||||
"发送{}条启动时累积的错误消息",
|
||||
errors.len()
|
||||
);
|
||||
|
||||
// 启动单独线程处理启动错误,避免阻塞主线程
|
||||
let thread_result = thread::Builder::new()
|
||||
let _ = thread::Builder::new()
|
||||
.name("startup-errors-sender".into())
|
||||
.spawn(move || {
|
||||
thread::sleep(Duration::from_secs(2));
|
||||
thread::sleep(timing::STARTUP_ERROR_DELAY);
|
||||
|
||||
let handle = Handle::global();
|
||||
if handle.is_exiting() {
|
||||
@@ -493,19 +175,14 @@ impl Handle {
|
||||
message: error.message,
|
||||
});
|
||||
|
||||
thread::sleep(Duration::from_millis(300));
|
||||
thread::sleep(timing::ERROR_BATCH_DELAY);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if let Err(e) = thread_result {
|
||||
log::error!("Failed to spawn startup errors thread: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_is_exiting(&self) {
|
||||
let mut is_exiting = self.is_exiting.write();
|
||||
*is_exiting = true;
|
||||
*self.is_exiting.write() = true;
|
||||
|
||||
let mut system_opt = self.notification_system.write();
|
||||
if let Some(system) = system_opt.as_mut() {
|
||||
@@ -517,3 +194,20 @@ impl Handle {
|
||||
*self.is_exiting.read()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
impl Handle {
|
||||
pub fn set_activation_policy(&self, policy: tauri::ActivationPolicy) -> Result<(), String> {
|
||||
Self::app_handle()
|
||||
.set_activation_policy(policy)
|
||||
.map_err(|e| e.to_string().into())
|
||||
}
|
||||
|
||||
pub fn set_activation_policy_regular(&self) {
|
||||
let _ = self.set_activation_policy(tauri::ActivationPolicy::Regular);
|
||||
}
|
||||
|
||||
pub fn set_activation_policy_accessory(&self) {
|
||||
let _ = self.set_activation_policy(tauri::ActivationPolicy::Accessory);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +1,284 @@
|
||||
use crate::utils::notification::{notify_event, NotificationEvent};
|
||||
use crate::process::AsyncHandler;
|
||||
use crate::utils::notification::{NotificationEvent, notify_event};
|
||||
use crate::{
|
||||
config::Config, core::handle, feat, logging, logging_error,
|
||||
module::lightweight::entry_lightweight_mode, utils::logging::Type,
|
||||
config::Config, core::handle, feat, logging, module::lightweight::entry_lightweight_mode,
|
||||
singleton_with_logging, utils::logging::Type,
|
||||
};
|
||||
use anyhow::{bail, Result};
|
||||
use once_cell::sync::OnceCell;
|
||||
use anyhow::{Result, bail};
|
||||
use parking_lot::Mutex;
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use tauri::Manager;
|
||||
use smartstring::alias::String;
|
||||
use std::{collections::HashMap, fmt, str::FromStr, sync::Arc};
|
||||
use tauri_plugin_global_shortcut::{Code, GlobalShortcutExt, ShortcutState};
|
||||
|
||||
/// Enum representing all available hotkey functions
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum HotkeyFunction {
|
||||
OpenOrCloseDashboard,
|
||||
ClashModeRule,
|
||||
ClashModeGlobal,
|
||||
ClashModeDirect,
|
||||
ToggleSystemProxy,
|
||||
ToggleTunMode,
|
||||
EntryLightweightMode,
|
||||
Quit,
|
||||
#[cfg(target_os = "macos")]
|
||||
Hide,
|
||||
}
|
||||
|
||||
impl fmt::Display for HotkeyFunction {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let s = match self {
|
||||
HotkeyFunction::OpenOrCloseDashboard => "open_or_close_dashboard",
|
||||
HotkeyFunction::ClashModeRule => "clash_mode_rule",
|
||||
HotkeyFunction::ClashModeGlobal => "clash_mode_global",
|
||||
HotkeyFunction::ClashModeDirect => "clash_mode_direct",
|
||||
HotkeyFunction::ToggleSystemProxy => "toggle_system_proxy",
|
||||
HotkeyFunction::ToggleTunMode => "toggle_tun_mode",
|
||||
HotkeyFunction::EntryLightweightMode => "entry_lightweight_mode",
|
||||
HotkeyFunction::Quit => "quit",
|
||||
#[cfg(target_os = "macos")]
|
||||
HotkeyFunction::Hide => "hide",
|
||||
};
|
||||
write!(f, "{s}")
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for HotkeyFunction {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.trim() {
|
||||
"open_or_close_dashboard" => Ok(HotkeyFunction::OpenOrCloseDashboard),
|
||||
"clash_mode_rule" => Ok(HotkeyFunction::ClashModeRule),
|
||||
"clash_mode_global" => Ok(HotkeyFunction::ClashModeGlobal),
|
||||
"clash_mode_direct" => Ok(HotkeyFunction::ClashModeDirect),
|
||||
"toggle_system_proxy" => Ok(HotkeyFunction::ToggleSystemProxy),
|
||||
"toggle_tun_mode" => Ok(HotkeyFunction::ToggleTunMode),
|
||||
"entry_lightweight_mode" => Ok(HotkeyFunction::EntryLightweightMode),
|
||||
"quit" => Ok(HotkeyFunction::Quit),
|
||||
#[cfg(target_os = "macos")]
|
||||
"hide" => Ok(HotkeyFunction::Hide),
|
||||
_ => bail!("invalid hotkey function: {}", s),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
/// Enum representing predefined system hotkeys
|
||||
pub enum SystemHotkey {
|
||||
CmdQ,
|
||||
CmdW,
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
impl fmt::Display for SystemHotkey {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let s = match self {
|
||||
SystemHotkey::CmdQ => "CMD+Q",
|
||||
SystemHotkey::CmdW => "CMD+W",
|
||||
};
|
||||
write!(f, "{s}")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
impl SystemHotkey {
|
||||
pub fn function(self) -> HotkeyFunction {
|
||||
match self {
|
||||
SystemHotkey::CmdQ => HotkeyFunction::Quit,
|
||||
SystemHotkey::CmdW => HotkeyFunction::Hide,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Hotkey {
|
||||
current: Arc<Mutex<Vec<String>>>,
|
||||
}
|
||||
|
||||
impl Hotkey {
|
||||
pub fn global() -> &'static Hotkey {
|
||||
static HOTKEY: OnceCell<Hotkey> = OnceCell::new();
|
||||
|
||||
HOTKEY.get_or_init(|| Hotkey {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
current: Arc::new(Mutex::new(Vec::new())),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(&self) -> Result<()> {
|
||||
let verge = Config::verge();
|
||||
let enable_global_hotkey = verge.latest().enable_global_hotkey.unwrap_or(true);
|
||||
/// Execute the function associated with a hotkey function enum
|
||||
fn execute_function(function: HotkeyFunction) {
|
||||
match function {
|
||||
HotkeyFunction::OpenOrCloseDashboard => {
|
||||
AsyncHandler::spawn(async move || {
|
||||
crate::feat::open_or_close_dashboard().await;
|
||||
notify_event(NotificationEvent::DashboardToggled).await;
|
||||
});
|
||||
}
|
||||
HotkeyFunction::ClashModeRule => {
|
||||
AsyncHandler::spawn(async move || {
|
||||
feat::change_clash_mode("rule".into()).await;
|
||||
notify_event(NotificationEvent::ClashModeChanged { mode: "Rule" }).await;
|
||||
});
|
||||
}
|
||||
HotkeyFunction::ClashModeGlobal => {
|
||||
AsyncHandler::spawn(async move || {
|
||||
feat::change_clash_mode("global".into()).await;
|
||||
notify_event(NotificationEvent::ClashModeChanged { mode: "Global" }).await;
|
||||
});
|
||||
}
|
||||
HotkeyFunction::ClashModeDirect => {
|
||||
AsyncHandler::spawn(async move || {
|
||||
feat::change_clash_mode("direct".into()).await;
|
||||
notify_event(NotificationEvent::ClashModeChanged { mode: "Direct" }).await;
|
||||
});
|
||||
}
|
||||
HotkeyFunction::ToggleSystemProxy => {
|
||||
AsyncHandler::spawn(async move || {
|
||||
feat::toggle_system_proxy().await;
|
||||
notify_event(NotificationEvent::SystemProxyToggled).await;
|
||||
});
|
||||
}
|
||||
HotkeyFunction::ToggleTunMode => {
|
||||
AsyncHandler::spawn(async move || {
|
||||
feat::toggle_tun_mode(None).await;
|
||||
notify_event(NotificationEvent::TunModeToggled).await;
|
||||
});
|
||||
}
|
||||
HotkeyFunction::EntryLightweightMode => {
|
||||
AsyncHandler::spawn(async move || {
|
||||
entry_lightweight_mode().await;
|
||||
notify_event(NotificationEvent::LightweightModeEntered).await;
|
||||
});
|
||||
}
|
||||
HotkeyFunction::Quit => {
|
||||
AsyncHandler::spawn(async move || {
|
||||
notify_event(NotificationEvent::AppQuit).await;
|
||||
feat::quit().await;
|
||||
});
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
HotkeyFunction::Hide => {
|
||||
AsyncHandler::spawn(async move || {
|
||||
feat::hide().await;
|
||||
notify_event(NotificationEvent::AppHidden).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
/// Register a system hotkey using enum
|
||||
pub async fn register_system_hotkey(&self, hotkey: SystemHotkey) -> Result<()> {
|
||||
let hotkey_str = hotkey.to_string();
|
||||
let function = hotkey.function();
|
||||
self.register_hotkey_with_function(&hotkey_str, function)
|
||||
.await
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
/// Unregister a system hotkey using enum
|
||||
pub fn unregister_system_hotkey(&self, hotkey: SystemHotkey) -> Result<()> {
|
||||
let hotkey_str = hotkey.to_string();
|
||||
self.unregister(&hotkey_str)
|
||||
}
|
||||
|
||||
/// Register a hotkey with function enum
|
||||
#[allow(clippy::unused_async)]
|
||||
pub async fn register_hotkey_with_function(
|
||||
&self,
|
||||
hotkey: &str,
|
||||
function: HotkeyFunction,
|
||||
) -> Result<()> {
|
||||
let app_handle = handle::Handle::app_handle();
|
||||
let manager = app_handle.global_shortcut();
|
||||
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"Attempting to register hotkey: {} for function: {}",
|
||||
hotkey,
|
||||
function
|
||||
);
|
||||
|
||||
if manager.is_registered(hotkey) {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"Hotkey {} was already registered, unregistering first",
|
||||
hotkey
|
||||
);
|
||||
manager.unregister(hotkey)?;
|
||||
}
|
||||
|
||||
let is_quit = matches!(function, HotkeyFunction::Quit);
|
||||
|
||||
manager.on_shortcut(hotkey, move |_app_handle, hotkey_event, event| {
|
||||
let hotkey_event_owned = *hotkey_event;
|
||||
let event_owned = event;
|
||||
let function_owned = function;
|
||||
let is_quit_owned = is_quit;
|
||||
|
||||
AsyncHandler::spawn(move || async move {
|
||||
if event_owned.state == ShortcutState::Pressed {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"Hotkey pressed: {:?}",
|
||||
hotkey_event_owned
|
||||
);
|
||||
|
||||
if hotkey_event_owned.key == Code::KeyQ && is_quit_owned {
|
||||
if let Some(window) = handle::Handle::get_window()
|
||||
&& window.is_focused().unwrap_or(false)
|
||||
{
|
||||
logging!(debug, Type::Hotkey, "Executing quit function");
|
||||
Self::execute_function(function_owned);
|
||||
}
|
||||
} else {
|
||||
logging!(debug, Type::Hotkey, "Executing function directly");
|
||||
|
||||
let is_enable_global_hotkey = Config::verge()
|
||||
.await
|
||||
.latest_ref()
|
||||
.enable_global_hotkey
|
||||
.unwrap_or(true);
|
||||
|
||||
if is_enable_global_hotkey {
|
||||
Self::execute_function(function_owned);
|
||||
} else {
|
||||
use crate::utils::window_manager::WindowManager;
|
||||
let is_visible = WindowManager::is_main_window_visible();
|
||||
let is_focused = WindowManager::is_main_window_focused();
|
||||
|
||||
if is_focused && is_visible {
|
||||
Self::execute_function(function_owned);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
})?;
|
||||
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"Successfully registered hotkey {} for {}",
|
||||
hotkey,
|
||||
function
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Use unified singleton macro
|
||||
singleton_with_logging!(Hotkey, INSTANCE, "Hotkey");
|
||||
|
||||
impl Hotkey {
|
||||
pub async fn init(&self) -> Result<()> {
|
||||
let verge = Config::verge().await;
|
||||
let enable_global_hotkey = verge.latest_ref().enable_global_hotkey.unwrap_or(true);
|
||||
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
true,
|
||||
"Initializing global hotkeys: {}",
|
||||
enable_global_hotkey
|
||||
);
|
||||
@@ -39,11 +287,13 @@ impl Hotkey {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(hotkeys) = verge.latest().hotkeys.as_ref() {
|
||||
// Extract hotkeys data before async operations
|
||||
let hotkeys = verge.latest_ref().hotkeys.as_ref().cloned();
|
||||
|
||||
if let Some(hotkeys) = hotkeys {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
true,
|
||||
"Has {} hotkeys need to register",
|
||||
hotkeys.len()
|
||||
);
|
||||
@@ -58,16 +308,14 @@ impl Hotkey {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
true,
|
||||
"Registering hotkey: {} -> {}",
|
||||
key,
|
||||
func
|
||||
);
|
||||
if let Err(e) = self.register(key, func) {
|
||||
if let Err(e) = self.register(key, func).await {
|
||||
logging!(
|
||||
error,
|
||||
Type::Hotkey,
|
||||
true,
|
||||
"Failed to register hotkey {} -> {}: {:?}",
|
||||
key,
|
||||
func,
|
||||
@@ -89,7 +337,6 @@ impl Hotkey {
|
||||
logging!(
|
||||
error,
|
||||
Type::Hotkey,
|
||||
true,
|
||||
"Invalid hotkey configuration: `{}`:`{}`",
|
||||
key,
|
||||
func
|
||||
@@ -97,7 +344,7 @@ impl Hotkey {
|
||||
}
|
||||
}
|
||||
}
|
||||
self.current.lock().clone_from(hotkeys);
|
||||
self.current.lock().clone_from(&hotkeys);
|
||||
} else {
|
||||
logging!(debug, Type::Hotkey, "No hotkeys configured");
|
||||
}
|
||||
@@ -106,192 +353,30 @@ impl Hotkey {
|
||||
}
|
||||
|
||||
pub fn reset(&self) -> Result<()> {
|
||||
let app_handle = handle::Handle::global().app_handle().unwrap();
|
||||
let app_handle = handle::Handle::app_handle();
|
||||
let manager = app_handle.global_shortcut();
|
||||
manager.unregister_all()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn register(&self, hotkey: &str, func: &str) -> Result<()> {
|
||||
let app_handle = handle::Handle::global().app_handle().unwrap();
|
||||
let manager = app_handle.global_shortcut();
|
||||
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"Attempting to register hotkey: {} for function: {}",
|
||||
hotkey,
|
||||
func
|
||||
);
|
||||
|
||||
if manager.is_registered(hotkey) {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"Hotkey {} was already registered, unregistering first",
|
||||
hotkey
|
||||
);
|
||||
manager.unregister(hotkey)?;
|
||||
}
|
||||
|
||||
let app_handle_clone = app_handle.clone();
|
||||
let f: Box<dyn Fn() + Send + Sync> = match func.trim() {
|
||||
"open_or_close_dashboard" => {
|
||||
let app_handle = app_handle_clone.clone();
|
||||
Box::new(move || {
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
true,
|
||||
"=== Hotkey Dashboard Window Operation Start ==="
|
||||
);
|
||||
|
||||
logging!(
|
||||
info,
|
||||
Type::Hotkey,
|
||||
true,
|
||||
"Using unified WindowManager for hotkey operation (bypass debounce)"
|
||||
);
|
||||
|
||||
crate::feat::open_or_close_dashboard_hotkey();
|
||||
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"=== Hotkey Dashboard Window Operation End ==="
|
||||
);
|
||||
notify_event(&app_handle, NotificationEvent::DashboardToggled);
|
||||
})
|
||||
}
|
||||
"clash_mode_rule" => {
|
||||
let app_handle = app_handle_clone.clone();
|
||||
Box::new(move || {
|
||||
feat::change_clash_mode("rule".into());
|
||||
notify_event(
|
||||
&app_handle,
|
||||
NotificationEvent::ClashModeChanged { mode: "Rule" },
|
||||
);
|
||||
})
|
||||
}
|
||||
"clash_mode_global" => {
|
||||
let app_handle = app_handle_clone.clone();
|
||||
Box::new(move || {
|
||||
feat::change_clash_mode("global".into());
|
||||
notify_event(
|
||||
&app_handle,
|
||||
NotificationEvent::ClashModeChanged { mode: "Global" },
|
||||
);
|
||||
})
|
||||
}
|
||||
"clash_mode_direct" => {
|
||||
let app_handle = app_handle_clone.clone();
|
||||
Box::new(move || {
|
||||
feat::change_clash_mode("direct".into());
|
||||
notify_event(
|
||||
&app_handle,
|
||||
NotificationEvent::ClashModeChanged { mode: "Direct" },
|
||||
);
|
||||
})
|
||||
}
|
||||
"toggle_system_proxy" => {
|
||||
let app_handle = app_handle_clone.clone();
|
||||
Box::new(move || {
|
||||
feat::toggle_system_proxy();
|
||||
notify_event(&app_handle, NotificationEvent::SystemProxyToggled);
|
||||
})
|
||||
}
|
||||
"toggle_tun_mode" => {
|
||||
let app_handle = app_handle_clone.clone();
|
||||
Box::new(move || {
|
||||
feat::toggle_tun_mode(None);
|
||||
notify_event(&app_handle, NotificationEvent::TunModeToggled);
|
||||
})
|
||||
}
|
||||
"entry_lightweight_mode" => {
|
||||
let app_handle = app_handle_clone.clone();
|
||||
Box::new(move || {
|
||||
entry_lightweight_mode();
|
||||
notify_event(&app_handle, NotificationEvent::LightweightModeEntered);
|
||||
})
|
||||
}
|
||||
"quit" => {
|
||||
let app_handle = app_handle_clone.clone();
|
||||
Box::new(move || {
|
||||
feat::quit();
|
||||
notify_event(&app_handle, NotificationEvent::AppQuit);
|
||||
})
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
"hide" => {
|
||||
let app_handle = app_handle_clone.clone();
|
||||
Box::new(move || {
|
||||
feat::hide();
|
||||
notify_event(&app_handle, NotificationEvent::AppHidden);
|
||||
})
|
||||
}
|
||||
_ => {
|
||||
logging!(error, Type::Hotkey, "Invalid function: {}", func);
|
||||
bail!("invalid function \"{func}\"");
|
||||
}
|
||||
};
|
||||
|
||||
let is_quit = func.trim() == "quit";
|
||||
|
||||
let _ = manager.on_shortcut(hotkey, move |app_handle, hotkey, event| {
|
||||
if event.state == ShortcutState::Pressed {
|
||||
logging!(debug, Type::Hotkey, "Hotkey pressed: {:?}", hotkey);
|
||||
|
||||
if hotkey.key == Code::KeyQ && is_quit {
|
||||
if let Some(window) = app_handle.get_webview_window("main") {
|
||||
if window.is_focused().unwrap_or(false) {
|
||||
logging!(debug, Type::Hotkey, "Executing quit function");
|
||||
f();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
logging!(debug, Type::Hotkey, "Executing function directly");
|
||||
|
||||
let is_enable_global_hotkey = Config::verge()
|
||||
.latest()
|
||||
.enable_global_hotkey
|
||||
.unwrap_or(true);
|
||||
|
||||
if is_enable_global_hotkey {
|
||||
f();
|
||||
} else {
|
||||
use crate::utils::window_manager::WindowManager;
|
||||
let is_visible = WindowManager::is_main_window_visible();
|
||||
let is_focused = WindowManager::is_main_window_focused();
|
||||
|
||||
if is_focused && is_visible {
|
||||
f();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
logging!(
|
||||
debug,
|
||||
Type::Hotkey,
|
||||
"Successfully registered hotkey {} for {}",
|
||||
hotkey,
|
||||
func
|
||||
);
|
||||
Ok(())
|
||||
/// Register a hotkey with string-based function (backward compatibility)
|
||||
pub async fn register(&self, hotkey: &str, func: &str) -> Result<()> {
|
||||
let function = HotkeyFunction::from_str(func)?;
|
||||
self.register_hotkey_with_function(hotkey, function).await
|
||||
}
|
||||
|
||||
pub fn unregister(&self, hotkey: &str) -> Result<()> {
|
||||
let app_handle = handle::Handle::global().app_handle().unwrap();
|
||||
let app_handle = handle::Handle::app_handle();
|
||||
let manager = app_handle.global_shortcut();
|
||||
manager.unregister(hotkey)?;
|
||||
logging!(debug, Type::Hotkey, "Unregister hotkey {}", hotkey);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn update(&self, new_hotkeys: Vec<String>) -> Result<()> {
|
||||
let mut current = self.current.lock();
|
||||
let old_map = Self::get_map_from_vec(¤t);
|
||||
pub async fn update(&self, new_hotkeys: Vec<String>) -> Result<()> {
|
||||
// Extract current hotkeys before async operations
|
||||
let current_hotkeys = self.current.lock().clone();
|
||||
let old_map = Self::get_map_from_vec(¤t_hotkeys);
|
||||
let new_map = Self::get_map_from_vec(&new_hotkeys);
|
||||
|
||||
let (del, add) = Self::get_diff(old_map, new_map);
|
||||
@@ -300,11 +385,12 @@ impl Hotkey {
|
||||
let _ = self.unregister(key);
|
||||
});
|
||||
|
||||
add.iter().for_each(|(key, func)| {
|
||||
logging_error!(Type::Hotkey, self.register(key, func));
|
||||
});
|
||||
for (key, func) in add.iter() {
|
||||
self.register(key, func).await?;
|
||||
}
|
||||
|
||||
*current = new_hotkeys;
|
||||
// Update the current hotkeys after all async operations
|
||||
*self.current.lock() = new_hotkeys;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -356,12 +442,11 @@ impl Hotkey {
|
||||
|
||||
impl Drop for Hotkey {
|
||||
fn drop(&mut self) {
|
||||
let app_handle = handle::Handle::global().app_handle().unwrap();
|
||||
let app_handle = handle::Handle::app_handle();
|
||||
if let Err(e) = app_handle.global_shortcut().unregister_all() {
|
||||
logging!(
|
||||
error,
|
||||
Type::Hotkey,
|
||||
true,
|
||||
"Error unregistering all hotkeys: {:?}",
|
||||
e
|
||||
);
|
||||
|
||||
6
src-tauri/src/core/logger.rs
Normal file
6
src-tauri/src/core/logger.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use clash_verge_logger::AsyncLogger;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
pub static CLASH_LOGGER: Lazy<Arc<AsyncLogger>> = Lazy::new(|| Arc::new(AsyncLogger::new()));
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user