mirror of
https://github.com/zhom/donutbrowser.git
synced 2026-05-04 09:35:11 +02:00
Compare commits
591 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b24568043c | |||
| d201cc90d1 | |||
| a118ccc349 | |||
| effe229067 | |||
| 98a8369f60 | |||
| f7ae299771 | |||
| c43f141907 | |||
| cd33accb1a | |||
| ca89b917f4 | |||
| 6ad183ab89 | |||
| c83950bee7 | |||
| 0047c80967 | |||
| 3d7bd2b14c | |||
| 8899e58987 | |||
| acf8651bd1 | |||
| ef534ee779 | |||
| 75bb10cf61 | |||
| 6f9e0de633 | |||
| 39c2a9f6f0 | |||
| 4b6f08fca3 | |||
| 24eff75d4e | |||
| 11869855e9 | |||
| 0d1f1f1497 | |||
| e8026d817f | |||
| d1ca4273de | |||
| e8c382400c | |||
| c40f023d41 | |||
| e16512576c | |||
| f098128988 | |||
| cdba9aac33 | |||
| 01b3109dc1 | |||
| 8aa3885240 | |||
| 5947ec14e6 | |||
| 2c7c07c414 | |||
| 2e26b53db8 | |||
| 966a10c045 | |||
| f72e3066f3 | |||
| cd8e1dcf18 | |||
| dfc8cd4c9f | |||
| 5a1726d119 | |||
| 133ed98df1 | |||
| 4683410a2c | |||
| 44b5e71593 | |||
| a02c16126b | |||
| fd7edfc332 | |||
| 1e48caf129 | |||
| e39047bdfd | |||
| f3fe0fa0e7 | |||
| a7f523ac4c | |||
| 763d5a5a1b | |||
| 65d37d48e2 | |||
| eab5def6b1 | |||
| 8da0dae545 | |||
| 371abf33c1 | |||
| 9ddc63931f | |||
| d2f4988635 | |||
| 68228dcf3c | |||
| 0805c37d33 | |||
| 61dcbbc715 | |||
| 287b5a2190 | |||
| 23f5921eb6 | |||
| 035d36e387 | |||
| 131ef92370 | |||
| c8b259e6ae | |||
| 98b4e9d145 | |||
| 8af318b5ce | |||
| 13c6946798 | |||
| aed24c4df6 | |||
| a48d03a1e4 | |||
| 5d7ed0430e | |||
| 84af35c4f5 | |||
| cff69fbd11 | |||
| 2f639652c9 | |||
| 30a787e50d | |||
| 71e3f4a078 | |||
| 6e3dc6b657 | |||
| 8f0bb4a335 | |||
| 9b770dc2e3 | |||
| 955cf887a0 | |||
| 0acfa66e16 | |||
| 26099b3f7f | |||
| 0b63ad6556 | |||
| bab9301c31 | |||
| a4cb3c6b1d | |||
| e22838ca55 | |||
| 18bfb1ed5b | |||
| c7c910d1ca | |||
| 35ba7e2d96 | |||
| 689eeafc75 | |||
| 40886f2ded | |||
| e02f588a90 | |||
| 3690ceb734 | |||
| d90a333eb0 | |||
| 93b85e760e | |||
| 64328e91a2 | |||
| 8a1943f84e | |||
| cc22384c54 | |||
| a720f914b0 | |||
| b899af0983 | |||
| 43277a9579 | |||
| f4a36996db | |||
| 15e8a1029a | |||
| 43b9f405ca | |||
| f9a527637f | |||
| be0d3053e7 | |||
| 070e40ffe0 | |||
| 416bec77bc | |||
| d3a6c568dc | |||
| 0659d11ee7 | |||
| 3175ecccf0 | |||
| 7b641e9b41 | |||
| f438621bc8 | |||
| 4fc2cb7730 | |||
| c41a5d84b2 | |||
| fda2887aef | |||
| f58b790293 | |||
| 518a02f782 | |||
| 0999a265dc | |||
| 984f529505 | |||
| 3b030df37f | |||
| 03b8cae825 | |||
| 00e486cc85 | |||
| 640185ff2e | |||
| 22fa2cfef0 | |||
| a1db587314 | |||
| 8862630a09 | |||
| 5956daeb9a | |||
| dfde9df72e | |||
| 3cbbd75618 | |||
| 8a32d73a25 | |||
| 2007080d4b | |||
| feb604ffaa | |||
| 14659180d7 | |||
| 82ebd7dc18 | |||
| 1c995e676c | |||
| e5fd63d03d | |||
| 11200dbe09 | |||
| 2bd01376db | |||
| ba36956158 | |||
| ce3e27ca64 | |||
| fd0fb8c7ca | |||
| 701c8aefd3 | |||
| d4a7c347b6 | |||
| 3c3e6df3b2 | |||
| cd4b23bd27 | |||
| 042a348971 | |||
| b8f4e4adda | |||
| e8852a3caf | |||
| 6ed1adafc8 | |||
| 22e6b2762e | |||
| bc7c8d1a1e | |||
| b133f928d4 | |||
| 02185e0480 | |||
| 6402ff302a | |||
| ed830ed789 | |||
| d03f598567 | |||
| 6aedf58264 | |||
| 636f1ea4ba | |||
| adb253e103 | |||
| e12ac66c7a | |||
| e06a824438 | |||
| 4293b7eab5 | |||
| 68b138d5ff | |||
| b79bd94506 | |||
| 181c76980a | |||
| 274b275c03 | |||
| 821cce0986 | |||
| 716a028923 | |||
| 7c25bd3ba2 | |||
| 6d89098263 | |||
| a1a1a2202e | |||
| 485daae40e | |||
| 9f22c57b7a | |||
| 45d959e407 | |||
| d75a367f39 | |||
| a48eb5d631 | |||
| 0d79f385bd | |||
| 25bb1dccdc | |||
| 97044d58fe | |||
| 4748a31714 | |||
| d91c97dd85 | |||
| 8e299fddd4 | |||
| 6c3c9fb58a | |||
| f5066e866b | |||
| e12a5661b1 | |||
| f8a4ec3277 | |||
| 1e5664e3b2 | |||
| d0fea2fec1 | |||
| ce0627030d | |||
| d70ec16706 | |||
| 5863d5549e | |||
| 4df35515ae | |||
| 59f430ec43 | |||
| 9f68a21824 | |||
| 9bf7f39c0c | |||
| d1b45778c4 | |||
| 6d6527d812 | |||
| c30df278fb | |||
| 95592b4aa1 | |||
| 58b0067b37 | |||
| 6260d78901 | |||
| efab286dad | |||
| e51e31911b | |||
| 348a727da7 | |||
| 10f8061acf | |||
| 69348a101e | |||
| f7e116f345 | |||
| 2e6bb2498b | |||
| 178f07bec7 | |||
| c6caf0633e | |||
| 29fe20af09 | |||
| 1cb8e7236d | |||
| ab256cd695 | |||
| 96c42ae55e | |||
| c98e12900f | |||
| 7a0d14642a | |||
| a1f153f4fa | |||
| ff9ad0a5ad | |||
| 3b78fea62a | |||
| 74e1642aa2 | |||
| c9d37519f7 | |||
| da9e1d1b58 | |||
| 77f93cc122 | |||
| f7ccca0075 | |||
| d7c2f08133 | |||
| 8dffd86ab9 | |||
| f3b3207489 | |||
| d7a787586d | |||
| 4a98eedba0 | |||
| 95ee807f3b | |||
| fac99f4a51 | |||
| 88cb154fca | |||
| a6af568d9e | |||
| 7c2ed1e0fc | |||
| 334f894e68 | |||
| a77b733a31 | |||
| c10c3b0f95 | |||
| 4b16341401 | |||
| 016d423d2c | |||
| 0596cc4009 | |||
| 269db678b7 | |||
| f809b975f3 | |||
| e369214715 | |||
| 5f93841bb7 | |||
| 1d71729c9e | |||
| a14da3d2f0 | |||
| 59c69c44a1 | |||
| 025523d0d3 | |||
| 76d17df281 | |||
| 727fa51a64 | |||
| 80305ef903 | |||
| 4d98606f28 | |||
| c2d083a10d | |||
| 6d1d15d366 | |||
| 2b2c855679 | |||
| e80043167f | |||
| 2ee3a90e25 | |||
| 231ac3f26c | |||
| 41c02c539f | |||
| ec78787079 | |||
| 7fc6f985dd | |||
| 5814f00f3d | |||
| 621a2dd0a1 | |||
| 3564762872 | |||
| b12d3af3bd | |||
| 32e70a5943 | |||
| 8b8ba31cce | |||
| 201e0270c7 | |||
| ceb2eec80e | |||
| f2b3b2cc69 | |||
| 8ac077d81b | |||
| dab5ab5805 | |||
| 83a7c0e394 | |||
| f622c77a3e | |||
| 9af33efb08 | |||
| 90cdf34e2b | |||
| bc2cbffcf4 | |||
| 341a461abf | |||
| 69b7963dd4 | |||
| c1079cf7b1 | |||
| 1e3f1d4668 | |||
| bb62ca350c | |||
| 1281fb3955 | |||
| ac878aed48 | |||
| 140621dcbe | |||
| 55b8955a20 | |||
| 1611c8e536 | |||
| c17bb56fec | |||
| cea8030268 | |||
| d9e3e1f3ef | |||
| d48e26c7eb | |||
| b7b75ec3d8 | |||
| 4a8b0bd407 | |||
| 8f24410f11 | |||
| ff4aa572fe | |||
| 0abea50279 | |||
| 1f90b12fe5 | |||
| bc0c31f527 | |||
| 357499168f | |||
| 7b1311f2ca | |||
| d755978b34 | |||
| bb164ce743 | |||
| daa36f008b | |||
| 92ef2798d2 | |||
| a9720676ae | |||
| fbcec2cbc1 | |||
| 5d395f606e | |||
| 6963e07be5 | |||
| c28537d304 | |||
| c303de4a8a | |||
| 21a13fb217 | |||
| 90d8e782de | |||
| ad2d9b73f2 | |||
| e645e212f2 | |||
| 7df92ae8ee | |||
| 18a0254ca7 | |||
| b0a58c3131 | |||
| 467e82ca93 | |||
| 8d9654044a | |||
| 711d94c9c1 | |||
| 305051d03d | |||
| 8695884535 | |||
| bb96936550 | |||
| 730621e5a1 | |||
| 714102eb25 | |||
| e5378c1bb7 | |||
| b7c8d5672a | |||
| 3fb81e2ca0 | |||
| 510de96393 | |||
| 3688e88d67 | |||
| 9646a41788 | |||
| f7679d25ca | |||
| 2d42772718 | |||
| 3980f835d6 | |||
| d0185dd5ae | |||
| de39fa4555 | |||
| f773eb6f1c | |||
| 458c30433d | |||
| 1cb9ffa249 | |||
| 5c58b5c644 | |||
| f41311a7bb | |||
| c8c09c296e | |||
| ca0c2614f4 | |||
| dca5a2970e | |||
| e0a1dd5a8a | |||
| e48b681215 | |||
| 6796912606 | |||
| 7105f6544f | |||
| 3003f868e7 | |||
| b733d26f10 | |||
| 675c2417d7 | |||
| e10a7bf089 | |||
| c8e3cd39ff | |||
| 0103150dc7 | |||
| be57ac3219 | |||
| b4067b5e34 | |||
| 3fa8822139 | |||
| 1e0ef0b497 | |||
| 2da832f100 | |||
| 284dbc5a3b | |||
| f328ceeb4f | |||
| 1bfbb365c5 | |||
| 1a15af1ded | |||
| ca20ccb489 | |||
| 0daba2eb9b | |||
| 1dfdfc6a21 | |||
| ee2f728194 | |||
| 702c1545bf | |||
| 53f403b82c | |||
| 2cae2824d3 | |||
| ca790c74ce | |||
| 25d4b30975 | |||
| 687fc7817f | |||
| 28818bed77 | |||
| d1d953b3e2 | |||
| c2153d463c | |||
| fa142a8cb0 | |||
| cf291fb0d1 | |||
| 5fed6b7c3f | |||
| 9ba51cd4e3 | |||
| a507a3daed | |||
| aabae8d3d4 | |||
| b7e6c1eb84 | |||
| d05f2190e8 | |||
| 34a9418474 | |||
| 63e125738d | |||
| 58d82d12c4 | |||
| b1c86709b0 | |||
| 12651f9f85 | |||
| c1815fdfdc | |||
| 1662c1efba | |||
| 4a8e905a44 | |||
| e165e35f2c | |||
| cbeb099cc9 | |||
| fef0c963cb | |||
| cd28531588 | |||
| ed913309dd | |||
| cecb4579c7 | |||
| 9cfed6d73e | |||
| a461fd4798 | |||
| 5159f943df | |||
| 72af5f682f | |||
| 6d77c872f2 | |||
| 0baecbdb0c | |||
| eb2af5c10b | |||
| 76cef4757a | |||
| 00d74bddaf | |||
| b5b08a0196 | |||
| ff35717cb5 | |||
| 669611ec68 | |||
| 8f1b84f615 | |||
| 2bf6531767 | |||
| da0af075fc | |||
| 83f4c2c162 | |||
| e675441171 | |||
| cf77d96042 | |||
| a4706a7f9a | |||
| b088ae675b | |||
| 54fd9b7282 | |||
| 77a50c60d1 | |||
| 62b9768006 | |||
| 66d3420000 | |||
| 8f05c48594 | |||
| a5709d95c7 | |||
| eef3e19d2f | |||
| 2c57920d44 | |||
| 57a36a5fc2 | |||
| a2a980d203 | |||
| 2deacbacab | |||
| 7cd7d077ae | |||
| 8679d0ca62 | |||
| 20cf9de4fa | |||
| 4202d595f2 | |||
| 3086ea0085 | |||
| 1ddbc5228c | |||
| 2d02095d4d | |||
| 4e0d985996 | |||
| 5af751a9b2 | |||
| da7f791274 | |||
| f4c33ad96e | |||
| 5b31cfaf32 | |||
| 4997854577 | |||
| a43e41a020 | |||
| b22b4cacf9 | |||
| 7f0df6f943 | |||
| dccf843952 | |||
| fc6ddb7cbf | |||
| 63000c72bd | |||
| 2fd344b9bb | |||
| 44bd34d8f0 | |||
| d3822bdd88 | |||
| ed1132bdc3 | |||
| fcae0623c0 | |||
| fe843e14f1 | |||
| b071e971b3 | |||
| 0b7cf547b3 | |||
| f024ce19ae | |||
| e1d3ff9000 | |||
| e2a168b188 | |||
| af767da32c | |||
| b5dfe1233e | |||
| dddf8e2e39 | |||
| adcb20fab9 | |||
| ff9c633b07 | |||
| 7ca76b1f78 | |||
| 4887a3db4d | |||
| e38cd2e560 | |||
| 7e7b47cae3 | |||
| 13ae170166 | |||
| df78e22650 | |||
| 328e6f16ee | |||
| 40ad32af6d | |||
| f299eeaea5 | |||
| 84142caac9 | |||
| d06dbb6c70 | |||
| cf5b498bd6 | |||
| 3c28a169bd | |||
| 25653e166b | |||
| 0b4263140d | |||
| b500c28b96 | |||
| 7c2be81531 | |||
| b55ef469ed | |||
| 76a206093d | |||
| 3e88dbc30e | |||
| 031823587e | |||
| c7a1ac228c | |||
| 8ede335bed | |||
| b170b8846d | |||
| 632d90a022 | |||
| 3bec00a2cd | |||
| 3b78971df8 | |||
| 5f9a716f62 | |||
| 4d07984d99 | |||
| 188e14e5b5 | |||
| bc1b9e9757 | |||
| e742e5fdfa | |||
| 9ce7757cb2 | |||
| 3ca454a2c5 | |||
| 689ac8e3ca | |||
| 0e1c5dcfb6 | |||
| f22a9f3557 | |||
| 5a76fe3221 | |||
| 5edad9b97c | |||
| 38556fc504 | |||
| 703ca2c50b | |||
| 198046fca9 | |||
| fdcce5c86a | |||
| 1cd1c7b59d | |||
| d803361fca | |||
| 2f6f20eb29 | |||
| 59272e0cff | |||
| cac2273ad3 | |||
| 1691a7a06b | |||
| 5a4718fba6 | |||
| 336543d06e | |||
| 73cc6c2ac5 | |||
| f4c96ec0c6 | |||
| f84b3c2812 | |||
| 29603076f7 | |||
| 76bcb73b39 | |||
| 51983bf3a5 | |||
| eda83cf439 | |||
| 7b6ea00838 | |||
| d8f07ddb11 | |||
| 1b0ebbc666 | |||
| d377809c77 | |||
| fbf36b49df | |||
| 341751c9b2 | |||
| eea227d853 | |||
| 29b6aed475 | |||
| 050f8b5353 | |||
| 8793de8c87 | |||
| 7408ec876c | |||
| fc8c358088 | |||
| b11495e3b9 | |||
| 11567ca50e | |||
| 1c2d5b3774 | |||
| 852066ef41 | |||
| 9622d85e73 | |||
| 4e2b87c5f1 | |||
| 2099dadbc0 | |||
| 00e4eb2715 | |||
| 33bc4476a4 | |||
| 0ad8988f7e | |||
| 2b3aaf1e92 | |||
| 5a10e0b696 | |||
| 9e48ddbf3e | |||
| bcbb2c1d42 | |||
| 391bfdabdc | |||
| 7b2dc84b5b | |||
| ddc09726f4 | |||
| e1451d3fbb | |||
| b18df6499f | |||
| c5c2563a4e | |||
| 8475f42821 | |||
| f51aa9ed85 | |||
| 3d3a3b3816 | |||
| e090881917 | |||
| b46976f47d | |||
| 39a978682c | |||
| 38e58e604b | |||
| ffcff2ce7c | |||
| c8ea31f85d | |||
| 7ac6e21dbc | |||
| 7533993909 | |||
| 8176f45e41 | |||
| f55a3f7155 | |||
| 7d74ac09d9 | |||
| d314fa1f71 | |||
| 968969cf1e | |||
| a7a3d99881 | |||
| 80cd2e4e7f | |||
| 6361a039bc | |||
| 8005ec90b6 | |||
| cdf30b7baa | |||
| fadef414fe | |||
| e1c55233f7 | |||
| 801a2b5732 | |||
| abe5c691ce | |||
| 2f9a17c6e0 | |||
| fcdb80f75a | |||
| 7568e7998d | |||
| e0f4f93c30 | |||
| d142b7f79b | |||
| dc5553a5d3 | |||
| 07445ff95b | |||
| 6ecbc39e46 | |||
| 67849c00d5 | |||
| bdf71e4ef8 | |||
| 2d2ebba40e | |||
| 2caac5bf4c |
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
If you are modifying the UI, do not add random colors that are not controlled by src/lib/themes.ts file.
|
||||
@@ -3,4 +3,4 @@ description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Don't leave comments that don't add value
|
||||
Don't leave comments that don't add value.
|
||||
@@ -3,4 +3,4 @@ description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times
|
||||
Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times.
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Anytime you change nodecar's code and try to test, recompile it with "cd nodecar && pnpm build".
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
If there is a global singleton of a struct, only use it inside a method while properly initializing it, unless I have explicitly specified in the request otherwise.
|
||||
@@ -1,4 +1,5 @@
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
# Frontend dependencies (root package.json)
|
||||
- package-ecosystem: "npm"
|
||||
@@ -13,9 +14,6 @@ updates:
|
||||
frontend-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
ignore:
|
||||
- dependency-name: "eslint"
|
||||
versions: ">= 9"
|
||||
commit-message:
|
||||
prefix: "deps"
|
||||
include: "scope"
|
||||
|
||||
@@ -27,35 +27,74 @@ jobs:
|
||||
build-mode: none
|
||||
- language: javascript-typescript
|
||||
build-mode: none
|
||||
# - language: rust
|
||||
# build-mode: none
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 #master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: x86_64-unknown-linux-gnu
|
||||
|
||||
- name: Install system dependencies (Rust only)
|
||||
if: matrix.language == 'rust'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 #v2.8.1
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install dependencies from lockfile
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install rust dependencies
|
||||
if: matrix.language == 'rust'
|
||||
working-directory: ./src-tauri
|
||||
run: |
|
||||
cargo build
|
||||
|
||||
- name: Build nodecar sidecar
|
||||
if: matrix.language == 'rust'
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm run build:linux-x64
|
||||
|
||||
- name: Copy nodecar binary to Tauri binaries
|
||||
if: matrix.language == 'rust'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-x86_64-unknown-linux-gnu
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@b1e4dc3db58c9601794e22a9f6d28d45461b9dbf #v3.29.0
|
||||
with:
|
||||
queries: security-extended
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
|
||||
- if: matrix.build-mode == 'manual'
|
||||
shell: bash
|
||||
run: |
|
||||
pnpm run build
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@b1e4dc3db58c9601794e22a9f6d28d45461b9dbf #v3.29.0
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
contrib-readme-job:
|
||||
runs-on: ubuntu-latest
|
||||
name: Automatically update the contributors list in the README
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
- name: Contribute List
|
||||
uses: akhilmhdh/contributors-readme-action@83ea0b4f1ac928fbfe88b9e8460a932a528eb79f #v2.3.11
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -0,0 +1,80 @@
|
||||
name: Dependabot Automerge
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: write
|
||||
checks: read
|
||||
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
--skip-git
|
||||
--lockfile=pnpm-lock.yaml
|
||||
--lockfile=src-tauri/Cargo.lock
|
||||
--lockfile=nodecar/pnpm-lock.yaml
|
||||
./
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
actions: read
|
||||
|
||||
lint-js:
|
||||
name: Lint JavaScript/TypeScript
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
packages: read
|
||||
actions: read
|
||||
|
||||
spellcheck:
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
dependabot-automerge:
|
||||
name: Dependabot Automerge
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dependabot metadata
|
||||
id: metadata
|
||||
uses: dependabot/fetch-metadata@08eff52bf64351f401fb50d4972fa95b9f2c2d1b #v2.4.0
|
||||
with:
|
||||
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
- name: Enable auto-merge for minor and patch updates
|
||||
if: ${{ steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch' }}
|
||||
run: gh pr merge --auto --squash "$PR_URL"
|
||||
env:
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
timeout-minutes: 10
|
||||
@@ -0,0 +1,16 @@
|
||||
name: Greetings
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
greeting:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/first-interaction@1c4688942c71f71d4f5502a26ea67c331730fa4d # v3.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
pr-message: "Welcome to the community and thank you for your first contribution ❤️ A human will review your PR shortly. Make sure that the pipelines are green, so that the PR is considered ready for a review and could be merged."
|
||||
@@ -0,0 +1,213 @@
|
||||
name: Issue Validation
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
validate-issue:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
|
||||
- name: Get issue templates
|
||||
id: get-templates
|
||||
run: |
|
||||
# Read the issue templates
|
||||
if [ -f ".github/ISSUE_TEMPLATE/01-bug-report.md" ]; then
|
||||
echo "bug-template-exists=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [ -f ".github/ISSUE_TEMPLATE/02-feature-request.md" ]; then
|
||||
echo "feature-template-exists=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create issue analysis prompt
|
||||
id: create-prompt
|
||||
env:
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
ISSUE_LABELS: ${{ join(github.event.issue.labels.*.name, ', ') }}
|
||||
run: |
|
||||
cat > issue_analysis.txt << EOF
|
||||
## Issue Content to Analyze:
|
||||
|
||||
**Title:** $ISSUE_TITLE
|
||||
|
||||
**Body:**
|
||||
$ISSUE_BODY
|
||||
|
||||
**Labels:** $ISSUE_LABELS
|
||||
EOF
|
||||
|
||||
- name: Validate issue with AI
|
||||
id: validate
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
prompt-file: issue_analysis.txt
|
||||
system-prompt: |
|
||||
You are an issue validation assistant for Donut Browser, an anti-detect browser.
|
||||
|
||||
Analyze the provided issue content and determine if it contains sufficient information based on these requirements:
|
||||
|
||||
**For Bug Reports, the issue should include:**
|
||||
1. Clear description of the problem
|
||||
2. Steps to reproduce the issue (numbered list preferred)
|
||||
3. Expected vs actual behavior
|
||||
4. Environment information (OS, browser version, etc.)
|
||||
5. Error messages, stack traces, or screenshots if applicable
|
||||
|
||||
**For Feature Requests, the issue should include:**
|
||||
1. Clear description of the requested feature
|
||||
2. Use case or problem it solves
|
||||
3. Proposed solution or how it should work
|
||||
4. Priority level or importance
|
||||
|
||||
**General Requirements for all issues:**
|
||||
1. Descriptive title
|
||||
2. Sufficient detail to understand and act upon
|
||||
3. Professional tone and clear communication
|
||||
|
||||
Respond in JSON format with the following structure:
|
||||
```json
|
||||
{
|
||||
"is_valid": true|false,
|
||||
"issue_type": "bug_report"|"feature_request"|"other",
|
||||
"missing_info": [
|
||||
"List of missing required information"
|
||||
],
|
||||
"suggestions": [
|
||||
"Specific suggestions for improvement"
|
||||
],
|
||||
"overall_assessment": "Brief assessment of the issue quality"
|
||||
}
|
||||
```
|
||||
|
||||
Be constructive and helpful in your feedback. If the issue is incomplete, provide specific guidance on what's needed.
|
||||
model: openai/gpt-4o
|
||||
|
||||
- name: Check if first-time contributor
|
||||
id: check-first-time
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
ISSUE_AUTHOR: ${{ github.event.issue.user.login }}
|
||||
run: |
|
||||
# Check if user has created issues before (excluding the current one)
|
||||
ISSUE_COUNT=$(gh api "/repos/${{ github.repository }}/issues" \
|
||||
--jq "map(select(.user.login == \"$ISSUE_AUTHOR\" and .number != ${{ github.event.issue.number }})) | length" \
|
||||
--paginate || echo "0")
|
||||
|
||||
if [ "$ISSUE_COUNT" = "0" ]; then
|
||||
echo "is_first_time=true" >> $GITHUB_OUTPUT
|
||||
echo "✅ First-time contributor detected"
|
||||
else
|
||||
echo "is_first_time=false" >> $GITHUB_OUTPUT
|
||||
echo "ℹ️ Returning contributor"
|
||||
fi
|
||||
|
||||
- name: Parse validation result and take action
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Prefer reading from the response file to avoid output truncation
|
||||
RESPONSE_FILE='${{ steps.validate.outputs.response-file }}'
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RAW_OUTPUT=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
RAW_OUTPUT='${{ steps.validate.outputs.response }}'
|
||||
fi
|
||||
|
||||
# Extract JSON if wrapped in markdown code fences; otherwise use raw
|
||||
JSON_RESULT=$(printf "%s" "$RAW_OUTPUT" | sed -n '/```json/,/```/p' | sed '1d;$d')
|
||||
if [ -z "$JSON_RESULT" ]; then
|
||||
JSON_RESULT="$RAW_OUTPUT"
|
||||
fi
|
||||
|
||||
# Parse JSON fields
|
||||
IS_VALID=$(echo "$JSON_RESULT" | jq -r '.is_valid // false')
|
||||
ISSUE_TYPE=$(echo "$JSON_RESULT" | jq -r '.issue_type // "other"')
|
||||
MISSING_INFO=$(echo "$JSON_RESULT" | jq -r '.missing_info[]? // empty' | sed 's/^/- /')
|
||||
SUGGESTIONS=$(echo "$JSON_RESULT" | jq -r '.suggestions[]? // empty' | sed 's/^/- /')
|
||||
ASSESSMENT=$(echo "$JSON_RESULT" | jq -r '.overall_assessment // "No assessment provided"')
|
||||
|
||||
echo "Issue validation result: $IS_VALID"
|
||||
echo "Issue type: $ISSUE_TYPE"
|
||||
|
||||
# Prepare greeting message for first-time contributors
|
||||
IS_FIRST_TIME="${{ steps.check-first-time.outputs.is_first_time }}"
|
||||
GREETING_SECTION=""
|
||||
if [ "$IS_FIRST_TIME" = "true" ]; then
|
||||
GREETING_SECTION="## 👋 Welcome!\n\nThank you for your first issue ❤️ If this is a feature request, please make sure it is clear what you want, why you want it, and how important it is to you. If you posted a bug report, please make sure it includes as much detail as possible.\n\n---\n\n"
|
||||
fi
|
||||
|
||||
if [ "$IS_VALID" = "false" ]; then
|
||||
# Create a comment asking for more information
|
||||
{
|
||||
printf "%b" "$GREETING_SECTION"
|
||||
printf "## 🤖 Issue Validation\n\n"
|
||||
printf "Thank you for submitting this issue! However, it appears that some required information might be missing to help us better understand and address your concern.\n\n"
|
||||
printf "**Issue Type Detected:** \`%s\`\n\n" "$ISSUE_TYPE"
|
||||
printf "**Assessment:** %s\n\n" "$ASSESSMENT"
|
||||
printf "### 📋 Missing Information:\n%s\n\n" "$MISSING_INFO"
|
||||
printf "### 💡 Suggestions for Improvement:\n%s\n\n" "$SUGGESTIONS"
|
||||
printf "### 📝 How to Provide Additional Information:\n\n"
|
||||
printf "Please edit your original issue description to include the missing information. Here are our issue templates for reference:\n\n"
|
||||
printf -- "- **Bug Report Template:** [View Template](.github/ISSUE_TEMPLATE/01-bug-report.md)\n"
|
||||
printf -- "- **Feature Request Template:** [View Template](.github/ISSUE_TEMPLATE/02-feature-request.md)\n\n"
|
||||
printf "### 🔧 Quick Tips:\n"
|
||||
printf -- "- For **bug reports**: Include step-by-step reproduction instructions, your environment details, and any error messages\n"
|
||||
printf -- "- For **feature requests**: Describe the use case, expected behavior, and why this feature would be valuable\n"
|
||||
printf -- "- Add **screenshots** or **logs** when applicable\n\n"
|
||||
printf "Once you have updated the issue with the missing information, feel free to remove this comment or reply to let us know you have made the updates.\n\n"
|
||||
printf -- "---\n*This validation was performed automatically to ensure we have all the information needed to help you effectively.*\n"
|
||||
} > comment.md
|
||||
|
||||
# Post the comment
|
||||
gh issue comment ${{ github.event.issue.number }} --body-file comment.md
|
||||
|
||||
# Add a label to indicate validation needed
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "needs-info"
|
||||
|
||||
echo "✅ Validation comment posted and 'needs-info' label added"
|
||||
else
|
||||
echo "✅ Issue contains sufficient information"
|
||||
|
||||
# Prepare a summary comment even when valid
|
||||
SUGGESTIONS_SECTION=""
|
||||
if [ -n "$SUGGESTIONS" ]; then
|
||||
SUGGESTIONS_SECTION=$(printf "### 💡 Suggestions:\n%s\n\n" "$SUGGESTIONS")
|
||||
fi
|
||||
|
||||
{
|
||||
printf "%b" "$GREETING_SECTION"
|
||||
printf "## 🤖 Issue Validation\n\n"
|
||||
printf "**Issue Type Detected:** \`%s\`\n\n" "$ISSUE_TYPE"
|
||||
printf "**Assessment:** %s\n\n" "$ASSESSMENT"
|
||||
printf "%b" "$SUGGESTIONS_SECTION"
|
||||
printf -- "---\n*This validation was performed automatically to help triage issues.*\n"
|
||||
} > comment.md
|
||||
|
||||
# Post the summary comment
|
||||
gh issue comment ${{ github.event.issue.number }} --body-file comment.md
|
||||
|
||||
# Add appropriate labels based on issue type
|
||||
case "$ISSUE_TYPE" in
|
||||
"bug_report")
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "bug"
|
||||
;;
|
||||
"feature_request")
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "enhancement"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
rm -f issue_analysis.txt comment.md
|
||||
@@ -16,6 +16,9 @@ on:
|
||||
- ".github/workflows/lint-rs.yml"
|
||||
- ".github/workflows/osv.yml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
@@ -31,13 +34,15 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
@@ -45,10 +50,5 @@ jobs:
|
||||
- name: Install dependencies from lockfile
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run lint step
|
||||
run: pnpm run lint:js
|
||||
|
||||
@@ -24,12 +24,14 @@ on:
|
||||
- "tsconfig.json"
|
||||
- "biome.json"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [macos-latest]
|
||||
os: [macos-latest, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
@@ -39,27 +41,33 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 #master
|
||||
with:
|
||||
toolchain: stable
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Install cargo-audit
|
||||
run: cargo install cargo-audit
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install dependencies (Ubuntu only)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt install libwebkit2gtk-4.1-dev build-essential curl wget file libxdo-dev libssl-dev libayatana-appindicator3-dev librsvg2-dev
|
||||
@@ -67,16 +75,11 @@ jobs:
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build nodecar binary
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
if [[ "${{ matrix.os }}" == "ubuntu-latest" ]]; then
|
||||
if [[ "${{ matrix.os }}" == "ubuntu-22.04" ]]; then
|
||||
pnpm run build:linux-x64
|
||||
elif [[ "${{ matrix.os }}" == "macos-latest" ]]; then
|
||||
pnpm run build:mac-aarch64
|
||||
@@ -84,20 +87,50 @@ jobs:
|
||||
pnpm run build:win-x64
|
||||
fi
|
||||
|
||||
# TODO: replace with an integration test that fetches everything from rust
|
||||
# - name: Download Camoufox for testing
|
||||
# run: npx camoufox-js fetch
|
||||
# continue-on-error: true
|
||||
|
||||
- name: Copy nodecar binary to Tauri binaries
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.os }}" == "ubuntu-latest" ]]; then
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-x86_64-unknown-linux-gnu
|
||||
if [[ "${{ matrix.os }}" == "ubuntu-22.04" ]]; then
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-x86_64-unknown-linux-gnu
|
||||
elif [[ "${{ matrix.os }}" == "macos-latest" ]]; then
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-aarch64-apple-darwin
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-aarch64-apple-darwin
|
||||
elif [[ "${{ matrix.os }}" == "windows-latest" ]]; then
|
||||
cp nodecar/dist/nodecar.exe src-tauri/binaries/nodecar-x86_64-pc-windows-msvc.exe
|
||||
cp nodecar/nodecar-bin.exe src-tauri/binaries/nodecar-x86_64-pc-windows-msvc.exe
|
||||
fi
|
||||
|
||||
- name: Create empty 'dist' directory
|
||||
run: mkdir dist
|
||||
- name: Build frontend
|
||||
run: pnpm next build
|
||||
|
||||
- name: Get host target
|
||||
id: host_target
|
||||
shell: bash
|
||||
run: |
|
||||
HOST_TARGET=$(rustc -vV | sed -n 's|host: ||p')
|
||||
echo "target=${HOST_TARGET}" >> $GITHUB_OUTPUT
|
||||
echo "Host target: ${HOST_TARGET}"
|
||||
|
||||
- name: Build donut-proxy sidecar
|
||||
shell: bash
|
||||
working-directory: ./src-tauri
|
||||
run: cargo build --bin donut-proxy
|
||||
|
||||
- name: Copy donut-proxy binary to Tauri binaries
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
HOST_TARGET="${{ steps.host_target.outputs.target }}"
|
||||
if [[ "$HOST_TARGET" == *"windows"* ]]; then
|
||||
cp src-tauri/target/debug/donut-proxy.exe src-tauri/binaries/donut-proxy-${HOST_TARGET}.exe
|
||||
else
|
||||
cp src-tauri/target/debug/donut-proxy src-tauri/binaries/donut-proxy-${HOST_TARGET}
|
||||
chmod +x src-tauri/binaries/donut-proxy-${HOST_TARGET}
|
||||
fi
|
||||
|
||||
- name: Run rustfmt check
|
||||
run: cargo fmt --all -- --check
|
||||
@@ -107,7 +140,7 @@ jobs:
|
||||
run: cargo clippy --all-targets --all-features -- -D warnings -D clippy::all
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Run Rust unit tests
|
||||
- name: Run Rust tests
|
||||
run: cargo test
|
||||
working-directory: src-tauri
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
scan-scheduled:
|
||||
name: Scheduled Security Scan
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'schedule' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
scan-pr:
|
||||
name: PR Security Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -16,16 +16,20 @@ jobs:
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -0,0 +1,123 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
generate-release-notes:
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.event.release.tag_name, 'v') && !github.event.release.prerelease
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
with:
|
||||
fetch-depth: 0 # Fetch full history to compare with previous release
|
||||
|
||||
- name: Get previous release tag
|
||||
id: get-previous-tag
|
||||
run: |
|
||||
# Get the previous release tag (excluding the current one)
|
||||
CURRENT_TAG="${{ github.event.release.tag_name }}"
|
||||
PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | grep -v "$CURRENT_TAG" | head -n 1)
|
||||
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
echo "No previous release found, using initial commit"
|
||||
PREVIOUS_TAG=$(git rev-list --max-parents=0 HEAD)
|
||||
fi
|
||||
|
||||
echo "current-tag=$CURRENT_TAG" >> $GITHUB_OUTPUT
|
||||
echo "previous-tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT
|
||||
echo "Previous release: $PREVIOUS_TAG"
|
||||
echo "Current release: $CURRENT_TAG"
|
||||
|
||||
- name: Get commit messages between releases
|
||||
id: get-commits
|
||||
run: |
|
||||
# Get commit messages between previous and current release
|
||||
PREVIOUS_TAG="${{ steps.get-previous-tag.outputs.previous-tag }}"
|
||||
CURRENT_TAG="${{ steps.get-previous-tag.outputs.current-tag }}"
|
||||
|
||||
# Get commit log with detailed format
|
||||
COMMIT_LOG=$(git log --pretty=format:"- %s (%h by %an)" $PREVIOUS_TAG..$CURRENT_TAG --no-merges)
|
||||
|
||||
# Get changed files summary
|
||||
CHANGED_FILES=$(git diff --name-status $PREVIOUS_TAG..$CURRENT_TAG | head -20)
|
||||
|
||||
# Save to files for AI processing
|
||||
echo "$COMMIT_LOG" > commits.txt
|
||||
echo "$CHANGED_FILES" > changes.txt
|
||||
|
||||
echo "commits-file=commits.txt" >> $GITHUB_OUTPUT
|
||||
echo "changes-file=changes.txt" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate release notes with AI
|
||||
id: generate-notes
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
prompt-file: commits.txt
|
||||
system-prompt: |
|
||||
You are an expert technical writer tasked with generating comprehensive release notes for Donut Browser, a powerful anti-detect browser.
|
||||
|
||||
Analyze the provided commit messages and generate well-structured release notes following this format:
|
||||
|
||||
## What's New in ${{ steps.get-previous-tag.outputs.current-tag }}
|
||||
|
||||
[Brief 1-2 sentence overview of the release]
|
||||
|
||||
### ✨ New Features
|
||||
[List new features with brief descriptions]
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
[List bug fixes]
|
||||
|
||||
### 🔧 Improvements
|
||||
[List improvements and enhancements]
|
||||
|
||||
### 📚 Documentation
|
||||
[List documentation updates if any]
|
||||
|
||||
### 🔄 Dependencies
|
||||
[List dependency updates if any]
|
||||
|
||||
### 🛠️ Developer Experience
|
||||
[List development-related changes if any]
|
||||
|
||||
Guidelines:
|
||||
- Use clear, user-friendly language
|
||||
- Group related commits logically
|
||||
- Omit minor commits like formatting, typos unless significant
|
||||
- Focus on user-facing changes
|
||||
- Use emojis sparingly and consistently
|
||||
- Keep descriptions concise but informative
|
||||
- If commits are unclear, infer the purpose from the context
|
||||
|
||||
The application is a desktop app built with Tauri + Next.js that helps users manage multiple browser profiles with proxy support.
|
||||
model: gpt-4o
|
||||
|
||||
- name: Update release with generated notes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Prefer reading from the response file to avoid output truncation
|
||||
RESPONSE_FILE='${{ steps.generate-notes.outputs.response-file }}'
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RELEASE_NOTES=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
RELEASE_NOTES='${{ steps.generate-notes.outputs.response }}'
|
||||
fi
|
||||
|
||||
# Update the release with the generated notes
|
||||
gh api --method PATCH /repos/${{ github.repository }}/releases/${{ github.event.release.id }} \
|
||||
--field body="$RELEASE_NOTES"
|
||||
|
||||
echo "✅ Release notes updated successfully!"
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
rm -f commits.txt changes.txt
|
||||
@@ -13,7 +13,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -31,11 +31,15 @@ jobs:
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
@@ -51,6 +55,8 @@ jobs:
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
release:
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
@@ -61,37 +67,37 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- platform: "macos-latest"
|
||||
args: "--target aarch64-apple-darwin"
|
||||
args: "--target aarch64-apple-darwin --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-apple-darwin"
|
||||
pkg_target: "latest-macos-arm64"
|
||||
nodecar_script: "build:mac-aarch64"
|
||||
- platform: "macos-latest"
|
||||
args: "--target x86_64-apple-darwin"
|
||||
args: "--target x86_64-apple-darwin --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-apple-darwin"
|
||||
pkg_target: "latest-macos-x64"
|
||||
nodecar_script: "build:mac-x86_64"
|
||||
- platform: "ubuntu-22.04"
|
||||
args: "--target x86_64-unknown-linux-gnu"
|
||||
args: "--target x86_64-unknown-linux-gnu --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-x64"
|
||||
nodecar_script: "build:linux-x64"
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
args: "--target aarch64-unknown-linux-gnu"
|
||||
args: "--target aarch64-unknown-linux-gnu --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-arm64"
|
||||
nodecar_script: "build:linux-arm64"
|
||||
# - platform: "windows-latest"
|
||||
# args: "--target x86_64-pc-windows-msvc"
|
||||
# args: "--target x86_64-pc-windows-msvc --verbose"
|
||||
# arch: "x86_64"
|
||||
# target: "x86_64-pc-windows-msvc"
|
||||
# pkg_target: "latest-win-x64"
|
||||
# nodecar_script: "build:win-x64"
|
||||
# - platform: "windows-11-arm"
|
||||
# args: "--target aarch64-pc-windows-msvc"
|
||||
# args: "--target aarch64-pc-windows-msvc --verbose"
|
||||
# arch: "aarch64"
|
||||
# target: "aarch64-pc-windows-msvc"
|
||||
# pkg_target: "latest-win-arm64"
|
||||
@@ -99,19 +105,23 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 #master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Install dependencies (Ubuntu only)
|
||||
@@ -121,18 +131,16 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
uses: swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 #v2.8.1
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build nodecar sidecar
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
@@ -144,20 +152,53 @@ jobs:
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.platform }}" == "windows-latest" ]]; then
|
||||
cp nodecar/dist/nodecar.exe src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
else
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
fi
|
||||
|
||||
# - name: Download Camoufox for testing
|
||||
# run: npx camoufox-js fetch
|
||||
# continue-on-error: true
|
||||
|
||||
- name: Build frontend
|
||||
run: pnpm build
|
||||
run: pnpm exec next build
|
||||
|
||||
- name: Verify frontend dist exists
|
||||
shell: bash
|
||||
run: |
|
||||
if [ ! -d "dist" ]; then
|
||||
echo "Error: dist directory not found after build"
|
||||
ls -la
|
||||
exit 1
|
||||
fi
|
||||
echo "Frontend dist directory verified at $(pwd)/dist"
|
||||
echo "Checking from src-tauri perspective:"
|
||||
ls -la src-tauri/../dist || echo "Warning: dist not accessible from src-tauri"
|
||||
|
||||
- name: Build donut-proxy sidecar
|
||||
shell: bash
|
||||
working-directory: ./src-tauri
|
||||
run: cargo build --bin donut-proxy --target ${{ matrix.target }} --release
|
||||
|
||||
- name: Copy donut-proxy binary to Tauri binaries
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.platform }}" == "windows-latest" ]]; then
|
||||
cp src-tauri/target/${{ matrix.target }}/release/donut-proxy.exe src-tauri/binaries/donut-proxy-${{ matrix.target }}.exe
|
||||
else
|
||||
cp src-tauri/target/${{ matrix.target }}/release/donut-proxy src-tauri/binaries/donut-proxy-${{ matrix.target }}
|
||||
chmod +x src-tauri/binaries/donut-proxy-${{ matrix.target }}
|
||||
fi
|
||||
|
||||
- name: Build Tauri app
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
uses: tauri-apps/tauri-action@19b93bb55601e3e373a93cfb6eb4242e45f5af20 #v0.6.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_REF_NAME: ${{ github.ref_name }}
|
||||
with:
|
||||
projectPath: ./src-tauri
|
||||
tagName: ${{ github.ref_name }}
|
||||
releaseName: "Donut Browser ${{ github.ref_name }}"
|
||||
releaseBody: "See the assets to download this version and install."
|
||||
@@ -165,9 +206,8 @@ jobs:
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }}
|
||||
|
||||
- name: Commit CHANGELOG.md
|
||||
uses: stefanzweifel/git-auto-commit-action@v6
|
||||
with:
|
||||
branch: main
|
||||
commit_message: "docs: update CHANGELOG.md for ${{ github.ref_name }} [skip ci]"
|
||||
file_pattern: CHANGELOG.md
|
||||
# - name: Commit CHANGELOG.md
|
||||
# uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 #v6.0.1
|
||||
# with:
|
||||
# branch: main
|
||||
# commit_message: "docs: update CHANGELOG.md for ${{ github.ref_name }} [skip ci]"
|
||||
|
||||
@@ -12,7 +12,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -30,11 +30,15 @@ jobs:
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
@@ -50,6 +54,8 @@ jobs:
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
rolling-release:
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
@@ -60,57 +66,61 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- platform: "macos-latest"
|
||||
args: "--target aarch64-apple-darwin"
|
||||
args: "--target aarch64-apple-darwin --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-apple-darwin"
|
||||
pkg_target: "latest-macos-arm64"
|
||||
nodecar_script: "build:mac-aarch64"
|
||||
- platform: "macos-latest"
|
||||
args: "--target x86_64-apple-darwin"
|
||||
args: "--target x86_64-apple-darwin --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-apple-darwin"
|
||||
pkg_target: "latest-macos-x64"
|
||||
nodecar_script: "build:mac-x86_64"
|
||||
- platform: "ubuntu-22.04"
|
||||
args: "--target x86_64-unknown-linux-gnu"
|
||||
args: "--target x86_64-unknown-linux-gnu --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-x64"
|
||||
nodecar_script: "build:linux-x64"
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
args: "--target aarch64-unknown-linux-gnu"
|
||||
args: "--target aarch64-unknown-linux-gnu --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-arm64"
|
||||
nodecar_script: "build:linux-arm64"
|
||||
- platform: "windows-latest"
|
||||
args: "--target x86_64-pc-windows-msvc"
|
||||
args: "--target x86_64-pc-windows-msvc --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-pc-windows-msvc"
|
||||
pkg_target: "latest-win-x64"
|
||||
nodecar_script: "build:win-x64"
|
||||
- platform: "windows-11-arm"
|
||||
args: "--target aarch64-pc-windows-msvc"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-pc-windows-msvc"
|
||||
pkg_target: "latest-win-arm64"
|
||||
nodecar_script: "build:win-arm64"
|
||||
# - platform: "windows-11-arm"
|
||||
# args: "--target aarch64-pc-windows-msvc --verbose"
|
||||
# arch: "aarch64"
|
||||
# target: "aarch64-pc-windows-msvc"
|
||||
# pkg_target: "latest-win-arm64"
|
||||
# nodecar_script: "build:win-arm64"
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 #master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Install dependencies (Ubuntu only)
|
||||
@@ -120,18 +130,16 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
uses: swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 #v2.8.1
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build nodecar sidecar
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
@@ -143,13 +151,45 @@ jobs:
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.platform }}" == "windows-latest" ]]; then
|
||||
cp nodecar/dist/nodecar.exe src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
else
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
fi
|
||||
|
||||
# - name: Download Camoufox for testing
|
||||
# run: npx camoufox-js fetch
|
||||
# continue-on-error: true
|
||||
|
||||
- name: Build frontend
|
||||
run: pnpm build
|
||||
run: pnpm exec next build
|
||||
|
||||
- name: Verify frontend dist exists
|
||||
shell: bash
|
||||
run: |
|
||||
if [ ! -d "dist" ]; then
|
||||
echo "Error: dist directory not found after build"
|
||||
ls -la
|
||||
exit 1
|
||||
fi
|
||||
echo "Frontend dist directory verified at $(pwd)/dist"
|
||||
echo "Checking from src-tauri perspective:"
|
||||
ls -la src-tauri/../dist || echo "Warning: dist not accessible from src-tauri"
|
||||
|
||||
- name: Build donut-proxy sidecar
|
||||
shell: bash
|
||||
working-directory: ./src-tauri
|
||||
run: cargo build --bin donut-proxy --target ${{ matrix.target }} --release
|
||||
|
||||
- name: Copy donut-proxy binary to Tauri binaries
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.platform }}" == "windows-latest" ]]; then
|
||||
cp src-tauri/target/${{ matrix.target }}/release/donut-proxy.exe src-tauri/binaries/donut-proxy-${{ matrix.target }}.exe
|
||||
else
|
||||
cp src-tauri/target/${{ matrix.target }}/release/donut-proxy src-tauri/binaries/donut-proxy-${{ matrix.target }}
|
||||
chmod +x src-tauri/binaries/donut-proxy-${{ matrix.target }}
|
||||
fi
|
||||
|
||||
- name: Generate nightly timestamp
|
||||
id: timestamp
|
||||
@@ -161,13 +201,14 @@ jobs:
|
||||
echo "Generated timestamp: ${TIMESTAMP}-${COMMIT_HASH}"
|
||||
|
||||
- name: Build Tauri app
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
uses: tauri-apps/tauri-action@19b93bb55601e3e373a93cfb6eb4242e45f5af20 #v0.6.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_TAG: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
GITHUB_REF_NAME: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
projectPath: ./src-tauri
|
||||
tagName: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
releaseName: "Donut Browser Nightly (Build ${{ steps.timestamp.outputs.timestamp }})"
|
||||
releaseBody: "⚠️ **Nightly Release** - This is an automatically generated pre-release build from the latest main branch. Use with caution.\n\nCommit: ${{ github.sha }}\nBuild: ${{ steps.timestamp.outputs.timestamp }}"
|
||||
|
||||
@@ -21,6 +21,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions Repository
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
- name: Spell Check Repo
|
||||
uses: crate-ci/typos@v1.33.1
|
||||
uses: crate-ci/typos@626c4bedb751ce0b7f03262ca97ddda9a076ae1c #v1.39.2
|
||||
|
||||
@@ -0,0 +1,21 @@
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "35 23 * * *"
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: "This issue has been inactive for 60 days. Please respond to keep it open."
|
||||
stale-pr-message: "This pull request has been inactive for 60 days. Please respond to keep it open."
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
+3
-3
@@ -46,7 +46,7 @@ yarn-error.log*
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
|
||||
# eslint
|
||||
.eslintcache
|
||||
!**/.gitkeep
|
||||
|
||||
!**/.gitkeep
|
||||
# nodecar
|
||||
nodecar/nodecar-bin
|
||||
Vendored
+135
-5
@@ -1,85 +1,215 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"ABORTIFHUNG",
|
||||
"aboutwelcome",
|
||||
"adwaita",
|
||||
"ahooks",
|
||||
"akhilmhdh",
|
||||
"appimage",
|
||||
"appindicator",
|
||||
"applescript",
|
||||
"asyncio",
|
||||
"autoconfig",
|
||||
"autologin",
|
||||
"biomejs",
|
||||
"breezedark",
|
||||
"browserforge",
|
||||
"busctl",
|
||||
"CAMOU",
|
||||
"camoufox",
|
||||
"catppuccin",
|
||||
"cdylib",
|
||||
"certifi",
|
||||
"CFURL",
|
||||
"checkin",
|
||||
"chrono",
|
||||
"ciphertext",
|
||||
"cksum",
|
||||
"CLICOLOR",
|
||||
"clippy",
|
||||
"cmdk",
|
||||
"codegen",
|
||||
"codesign",
|
||||
"commitish",
|
||||
"CTYPE",
|
||||
"daijro",
|
||||
"dataclasses",
|
||||
"datareporting",
|
||||
"datas",
|
||||
"DBAPI",
|
||||
"dconf",
|
||||
"debuginfo",
|
||||
"devedition",
|
||||
"distro",
|
||||
"doctest",
|
||||
"doesn",
|
||||
"domcontentloaded",
|
||||
"donutbrowser",
|
||||
"doorhanger",
|
||||
"dpkg",
|
||||
"dtolnay",
|
||||
"dyld",
|
||||
"elif",
|
||||
"errorlevel",
|
||||
"esac",
|
||||
"esbuild",
|
||||
"eslintcache",
|
||||
"etree",
|
||||
"firstrun",
|
||||
"flate",
|
||||
"frontmost",
|
||||
"geoip",
|
||||
"getcwd",
|
||||
"gettimezone",
|
||||
"gifs",
|
||||
"gsettings",
|
||||
"healthreport",
|
||||
"hiddenimports",
|
||||
"hkcu",
|
||||
"hooksconfig",
|
||||
"hookspath",
|
||||
"Hoverable",
|
||||
"icns",
|
||||
"idlelib",
|
||||
"idletime",
|
||||
"idna",
|
||||
"infobars",
|
||||
"Inno",
|
||||
"kdeglobals",
|
||||
"keras",
|
||||
"KHTML",
|
||||
"killall",
|
||||
"Kolkata",
|
||||
"kreadconfig",
|
||||
"langpack",
|
||||
"launchservices",
|
||||
"letterboxing",
|
||||
"libatk",
|
||||
"libayatana",
|
||||
"libc",
|
||||
"libcairo",
|
||||
"libgdk",
|
||||
"libglib",
|
||||
"libpango",
|
||||
"librsvg",
|
||||
"libwebkit",
|
||||
"libxdo",
|
||||
"localtime",
|
||||
"lpdw",
|
||||
"lxml",
|
||||
"lzma",
|
||||
"macchiato",
|
||||
"Matchalk",
|
||||
"mmdb",
|
||||
"mountpoint",
|
||||
"msiexec",
|
||||
"mstone",
|
||||
"msvc",
|
||||
"msys",
|
||||
"Mullvad",
|
||||
"mullvadbrowser",
|
||||
"mypy",
|
||||
"noarchive",
|
||||
"nobrowse",
|
||||
"noconfirm",
|
||||
"nodecar",
|
||||
"NODELAY",
|
||||
"nodemon",
|
||||
"norestart",
|
||||
"NSIS",
|
||||
"ntlm",
|
||||
"numpy",
|
||||
"objc",
|
||||
"orhun",
|
||||
"orjson",
|
||||
"osascript",
|
||||
"oscpu",
|
||||
"outpath",
|
||||
"pathex",
|
||||
"pathlib",
|
||||
"peerconnection",
|
||||
"pids",
|
||||
"pixbuf",
|
||||
"pkexec",
|
||||
"pkill",
|
||||
"plasmohq",
|
||||
"platformdirs",
|
||||
"prefs",
|
||||
"PRIO",
|
||||
"propertylist",
|
||||
"psutil",
|
||||
"pycache",
|
||||
"pydantic",
|
||||
"pyee",
|
||||
"pyinstaller",
|
||||
"pyoxidizer",
|
||||
"pytest",
|
||||
"pyyaml",
|
||||
"reportingpolicy",
|
||||
"reqwest",
|
||||
"ridedott",
|
||||
"rlib",
|
||||
"rustc",
|
||||
"rwxr",
|
||||
"SARIF",
|
||||
"scipy",
|
||||
"screeninfo",
|
||||
"selectables",
|
||||
"serde",
|
||||
"sessionstore",
|
||||
"setpriority",
|
||||
"setsid",
|
||||
"SETTINGCHANGE",
|
||||
"setuptools",
|
||||
"shadcn",
|
||||
"showcursor",
|
||||
"shutil",
|
||||
"signon",
|
||||
"signum",
|
||||
"sklearn",
|
||||
"SMTO",
|
||||
"sonner",
|
||||
"splitn",
|
||||
"sspi",
|
||||
"staticlib",
|
||||
"stefanzweifel",
|
||||
"subdirs",
|
||||
"subkey",
|
||||
"SUPPRESSMSGBOXES",
|
||||
"swatinem",
|
||||
"sysinfo",
|
||||
"systempreferences",
|
||||
"systemsetup",
|
||||
"taskkill",
|
||||
"tasklist",
|
||||
"tauri",
|
||||
"TERX",
|
||||
"testpass",
|
||||
"testuser",
|
||||
"timedatectl",
|
||||
"titlebar",
|
||||
"Torbrowser",
|
||||
"tkinter",
|
||||
"tqdm",
|
||||
"trackingprotection",
|
||||
"trailhead",
|
||||
"turbopack",
|
||||
"turtledemo",
|
||||
"udeps",
|
||||
"unlisten",
|
||||
"unminimize",
|
||||
"unrs",
|
||||
"urlencoding",
|
||||
"urllib",
|
||||
"utoipa",
|
||||
"venv",
|
||||
"vercel",
|
||||
"VERYSILENT",
|
||||
"webgl",
|
||||
"webrtc",
|
||||
"winreg",
|
||||
"wiremock",
|
||||
"xattr",
|
||||
"zhom"
|
||||
"xfconf",
|
||||
"xsettings",
|
||||
"zhom",
|
||||
"zipball",
|
||||
"zoneinfo"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
# Instructions for AI Agents
|
||||
|
||||
- After your changes, instead of running specific tests or linting specific files, run "pnpm format && pnpm lint && pnpm test". It means that you first format the code, then lint it, then test it, so that no part is broken after your changes.
|
||||
- Don't leave comments that don't add value
|
||||
- Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times
|
||||
- Don't leave comments that don't add value.
|
||||
- Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times.
|
||||
- Before finishing the task and showing summary, always run "pnpm format && pnpm lint && pnpm test" at the root of the project to ensure that you don't finish with broken application.
|
||||
- Anytime you change nodecar's code and try to test, recompile it with "cd nodecar && pnpm build".
|
||||
- If there is a global singleton of a struct, only use it inside a method while properly initializing it, unless I have explicitly specified in the request otherwise.
|
||||
- If you are modifying the UI, do not add random colors that are not controlled by src/lib/themes.ts file.
|
||||
+6
-42
@@ -26,6 +26,7 @@ Ensure you have the following dependencies installed:
|
||||
- Node.js (see `.node-version` for exact version)
|
||||
- pnpm package manager
|
||||
- Latest Rust and Cargo toolchain
|
||||
- [Banderole](https://github.com/zhom/banderole)
|
||||
- [Tauri prerequisites guide](https://v2.tauri.app/start/prerequisites/).
|
||||
|
||||
## Run Locally
|
||||
@@ -46,12 +47,13 @@ After having the above dependencies installed, proceed through the following ste
|
||||
pnpm install
|
||||
```
|
||||
|
||||
4. **Install nodecar dependencies**
|
||||
4. **Build nodecar**
|
||||
|
||||
Building nodecar requires you to have `banderole` installed.
|
||||
|
||||
```bash
|
||||
cd nodecar
|
||||
pnpm install --frozen-lockfile
|
||||
cd ..
|
||||
pnpm build
|
||||
```
|
||||
|
||||
5. **Start the development server**
|
||||
@@ -105,7 +107,6 @@ Make sure the build completes successfully without errors.
|
||||
## Testing
|
||||
|
||||
- Always test your changes on the target platform
|
||||
- Test both development and production builds
|
||||
- Verify that existing functionality still works
|
||||
- Add tests for new features when possible
|
||||
|
||||
@@ -149,50 +150,13 @@ Refs #00000
|
||||
|
||||
- Ensure that "Allow edits from maintainers" option is checked
|
||||
|
||||
## Types of Contributions
|
||||
|
||||
### Bug Reports
|
||||
|
||||
When filing bug reports, please include:
|
||||
|
||||
- Clear description of the issue
|
||||
- Steps to reproduce
|
||||
- Expected vs actual behavior
|
||||
- Environment details (OS, version, etc.)
|
||||
- Screenshots or error logs if applicable
|
||||
|
||||
### Feature Requests
|
||||
|
||||
When suggesting new features:
|
||||
|
||||
- Explain the use case and why it's valuable
|
||||
- Describe the desired behavior
|
||||
- Consider alternatives you've thought of
|
||||
- Check if it aligns with our roadmap
|
||||
|
||||
### Code Contributions
|
||||
|
||||
- Bug fixes
|
||||
- New features
|
||||
- Performance improvements
|
||||
- Documentation updates
|
||||
- Test coverage improvements
|
||||
|
||||
### Documentation
|
||||
|
||||
- README improvements
|
||||
- Code comments
|
||||
- API documentation
|
||||
- Tutorial content
|
||||
- Translation work
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Donut Browser is built with:
|
||||
|
||||
- **Frontend**: Next.js React application
|
||||
- **Backend**: Tauri (Rust) for native functionality
|
||||
- **Node.js Sidecar**: `nodecar` binary for proxy support
|
||||
- **Node.js Sidecar**: `nodecar` binary for access to JavaScript ecosystem
|
||||
- **Build System**: GitHub Actions for CI/CD
|
||||
|
||||
Understanding this architecture will help you contribute more effectively.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<div align="center">
|
||||
<img src="assets/logo.png" alt="Donut Browser Logo" width="150">
|
||||
<h1>Donut Browser</h1>
|
||||
<strong>A powerful browser orchestrator that puts you in control of your browsing experience. 🍩</strong>
|
||||
<strong>A powerful anti-detect browser that puts you in control of your browsing experience. 🍩</strong>
|
||||
</div>
|
||||
<br>
|
||||
|
||||
@@ -25,10 +25,6 @@
|
||||
</a>
|
||||
</p>
|
||||
|
||||
## Donut Browser
|
||||
|
||||
> A free and open source browser orchestrator built with [Tauri](https://v2.tauri.app/).
|
||||
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="assets/preview-dark.png" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="assets/preview.png" />
|
||||
@@ -38,9 +34,10 @@
|
||||
## Features
|
||||
|
||||
- Create unlimited number of local browser profiles completely isolated from each other
|
||||
- Proxy support with basic auth for all browsers except for TOR Browser
|
||||
- Safely use multiple accounts on one device by using anti-detect browser profiles, powered by [Camoufox](https://camoufox.com)
|
||||
- Proxy support with basic auth for all browsers
|
||||
- Import profiles from your existing browsers
|
||||
- Automatic updates both for browsers and for the app itself
|
||||
- Automatic updates for browsers
|
||||
- Set Donut Browser as your default browser to control in which profile to open links
|
||||
|
||||
## Download
|
||||
@@ -83,6 +80,24 @@ Have questions or want to contribute? We'd love to hear from you!
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
## Contributors
|
||||
|
||||
<!-- readme: collaborators,contributors -start -->
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/zhom">
|
||||
<img src="https://avatars.githubusercontent.com/u/2717306?v=4" width="100;" alt="zhom"/>
|
||||
<br />
|
||||
<sub><b>zhom</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tbody>
|
||||
</table>
|
||||
<!-- readme: collaborators,contributors -end -->
|
||||
|
||||
## Contact
|
||||
|
||||
Have an urgent question or want to report a security vulnerability? Send an email to contact at donutbrowser dot com and we'll get back to you as fast as possible.
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 523 KiB After Width: | Height: | Size: 111 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 540 KiB After Width: | Height: | Size: 114 KiB |
+8
-19
@@ -1,48 +1,37 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.0/schema.json",
|
||||
"vcs": {
|
||||
"enabled": false,
|
||||
"clientKind": "git",
|
||||
"useIgnoreFile": false
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"ignore": []
|
||||
"ignoreUnknown": false
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2
|
||||
},
|
||||
"organizeImports": {
|
||||
"enabled": true
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
"recommended": true,
|
||||
"correctness": {
|
||||
"useUniqueElementIds": "off",
|
||||
"useHookAtTopLevel": "error"
|
||||
},
|
||||
"nursery": {
|
||||
"useGoogleFontDisplay": "error",
|
||||
"noDocumentImportInPage": "error",
|
||||
"noHeadElement": "error",
|
||||
"noHeadImportInDocument": "error",
|
||||
"noImgElement": "off",
|
||||
"useComponentExportOnlyModules": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"allowExportNames": ["metadata", "badgeVariants", "buttonVariants"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"nursery": "off",
|
||||
"suspicious": "off",
|
||||
"a11y": {
|
||||
"useSemanticElements": "off"
|
||||
}
|
||||
}
|
||||
},
|
||||
"css": {
|
||||
"parser": {
|
||||
"tailwindDirectives": true
|
||||
},
|
||||
"formatter": {
|
||||
"quoteStyle": "double"
|
||||
}
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
import eslint from "@eslint/js";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: import.meta.dirname,
|
||||
});
|
||||
|
||||
const eslintConfig = tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
tseslint.configs.strictTypeChecked,
|
||||
tseslint.configs.stylisticTypeChecked,
|
||||
...compat.extends("next/core-web-vitals"),
|
||||
{
|
||||
// Disabled rules taken from https://biomejs.dev/linter/rules-sources for ones that
|
||||
// are already handled by Prettier and TypeScript or are not needed
|
||||
rules: {
|
||||
// eslint-plugin-jsx-a11y rules - some disabled for performance/specific project needs
|
||||
"jsx-a11y/alt-text": "off",
|
||||
"jsx-a11y/anchor-has-content": "off",
|
||||
"jsx-a11y/anchor-is-valid": "off",
|
||||
"jsx-a11y/aria-activedescendant-has-tabindex": "off",
|
||||
"jsx-a11y/aria-props": "off",
|
||||
"jsx-a11y/aria-proptypes": "off",
|
||||
"jsx-a11y/aria-role": "off",
|
||||
"jsx-a11y/aria-unsupported-elements": "off",
|
||||
"jsx-a11y/autocomplete-valid": "off",
|
||||
"jsx-a11y/click-events-have-key-events": "off",
|
||||
"jsx-a11y/heading-has-content": "off",
|
||||
"jsx-a11y/html-has-lang": "off",
|
||||
"jsx-a11y/iframe-has-title": "off",
|
||||
"jsx-a11y/img-redundant-alt": "off",
|
||||
"jsx-a11y/interactive-supports-focus": "off",
|
||||
"jsx-a11y/label-has-associated-control": "off",
|
||||
"jsx-a11y/lang": "off",
|
||||
"jsx-a11y/media-has-caption": "off",
|
||||
"jsx-a11y/mouse-events-have-key-events": "off",
|
||||
"jsx-a11y/no-access-key": "off",
|
||||
"jsx-a11y/no-aria-hidden-on-focusable": "off",
|
||||
"jsx-a11y/no-autofocus": "off",
|
||||
"jsx-a11y/no-distracting-elements": "off",
|
||||
"jsx-a11y/no-interactive-element-to-noninteractive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-element-to-interactive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-tabindex": "off",
|
||||
"jsx-a11y/no-redundant-roles": "off",
|
||||
"jsx-a11y/no-static-element-interactions": "off",
|
||||
"jsx-a11y/prefer-tag-over-role": "off",
|
||||
"jsx-a11y/role-has-required-aria-props": "off",
|
||||
"jsx-a11y/role-supports-aria-props": "off",
|
||||
"jsx-a11y/scope": "off",
|
||||
"jsx-a11y/tabindex-no-positive": "off",
|
||||
// eslint-plugin-react rules - some disabled for performance/specific project needs
|
||||
"react/button-has-type": "off",
|
||||
"react/jsx-boolean-value": "off",
|
||||
"react/jsx-curly-brace-presence": "off",
|
||||
"react/jsx-fragments": "off",
|
||||
"react/jsx-key": "off",
|
||||
"react/jsx-no-comment-textnodes": "off",
|
||||
"react/jsx-no-duplicate-props": "off",
|
||||
"react/jsx-no-target-blank": "off",
|
||||
"react/jsx-no-useless-fragment": "off",
|
||||
"react/no-array-index-key": "off",
|
||||
"react/no-children-prop": "off",
|
||||
"react/no-danger": "off",
|
||||
"react/no-danger-with-children": "off",
|
||||
"react/void-dom-elements-no-children": "off",
|
||||
// eslint-plugin-react-hooks rules - disabled for specific project needs
|
||||
"react-hooks/exhaustive-deps": "off",
|
||||
"react-hooks/rules-of-hooks": "off",
|
||||
// typescript-eslint rules - some handled by TypeScript compiler or disabled for project needs
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
// Custom rules
|
||||
"@typescript-eslint/restrict-template-expressions": [
|
||||
"error",
|
||||
{
|
||||
allowNumber: true,
|
||||
allowBoolean: true,
|
||||
allowNever: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
projectService: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
export default eslintConfig;
|
||||
Vendored
+1
@@ -1,5 +1,6 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
import "./dist/dev/types/routes.d.ts";
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
|
||||
@@ -7,6 +7,9 @@ const nextConfig: NextConfig = {
|
||||
unoptimized: true,
|
||||
},
|
||||
distDir: "dist",
|
||||
compiler: {
|
||||
removeConsole: process.env.NODE_ENV === "production",
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
|
||||
@@ -22,7 +22,4 @@ if [ -z "$TARGET_TRIPLE" ]; then
|
||||
fi
|
||||
|
||||
# Copy the file with target triple suffix
|
||||
cp "dist/nodecar${EXT}" "../src-tauri/binaries/nodecar-${TARGET_TRIPLE}${EXT}"
|
||||
|
||||
# Also copy a generic version for Tauri to find
|
||||
cp "dist/nodecar${EXT}" "../src-tauri/binaries/nodecar${EXT}"
|
||||
cp "nodecar-bin" "../src-tauri/binaries/nodecar-${TARGET_TRIPLE}${EXT}"
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
import eslint from "@eslint/js";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: import.meta.dirname,
|
||||
});
|
||||
|
||||
const eslintConfig = tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
...compat.extends("next/core-web-vitals"),
|
||||
{
|
||||
// Disabled rules taken from https://biomejs.dev/linter/rules-sources for ones that
|
||||
// are already handled by Prettier and TypeScript or are not needed
|
||||
rules: {
|
||||
// eslint-plugin-jsx-a11y rules - some disabled for performance/specific project needs
|
||||
"jsx-a11y/alt-text": "off",
|
||||
"jsx-a11y/anchor-has-content": "off",
|
||||
"jsx-a11y/anchor-is-valid": "off",
|
||||
"jsx-a11y/aria-activedescendant-has-tabindex": "off",
|
||||
"jsx-a11y/aria-props": "off",
|
||||
"jsx-a11y/aria-proptypes": "off",
|
||||
"jsx-a11y/aria-role": "off",
|
||||
"jsx-a11y/aria-unsupported-elements": "off",
|
||||
"jsx-a11y/autocomplete-valid": "off",
|
||||
"jsx-a11y/click-events-have-key-events": "off",
|
||||
"jsx-a11y/heading-has-content": "off",
|
||||
"jsx-a11y/html-has-lang": "off",
|
||||
"jsx-a11y/iframe-has-title": "off",
|
||||
"jsx-a11y/img-redundant-alt": "off",
|
||||
"jsx-a11y/interactive-supports-focus": "off",
|
||||
"jsx-a11y/label-has-associated-control": "off",
|
||||
"jsx-a11y/lang": "off",
|
||||
"jsx-a11y/media-has-caption": "off",
|
||||
"jsx-a11y/mouse-events-have-key-events": "off",
|
||||
"jsx-a11y/no-access-key": "off",
|
||||
"jsx-a11y/no-aria-hidden-on-focusable": "off",
|
||||
"jsx-a11y/no-autofocus": "off",
|
||||
"jsx-a11y/no-distracting-elements": "off",
|
||||
"jsx-a11y/no-interactive-element-to-noninteractive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-element-to-interactive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-tabindex": "off",
|
||||
"jsx-a11y/no-redundant-roles": "off",
|
||||
"jsx-a11y/no-static-element-interactions": "off",
|
||||
"jsx-a11y/prefer-tag-over-role": "off",
|
||||
"jsx-a11y/role-has-required-aria-props": "off",
|
||||
"jsx-a11y/role-supports-aria-props": "off",
|
||||
"jsx-a11y/scope": "off",
|
||||
"jsx-a11y/tabindex-no-positive": "off",
|
||||
// eslint-plugin-react rules - some disabled for performance/specific project needs
|
||||
"react/button-has-type": "off",
|
||||
"react/jsx-boolean-value": "off",
|
||||
"react/jsx-curly-brace-presence": "off",
|
||||
"react/jsx-fragments": "off",
|
||||
"react/jsx-key": "off",
|
||||
"react/jsx-no-comment-textnodes": "off",
|
||||
"react/jsx-no-duplicate-props": "off",
|
||||
"react/jsx-no-target-blank": "off",
|
||||
"react/jsx-no-useless-fragment": "off",
|
||||
"react/no-array-index-key": "off",
|
||||
"react/no-children-prop": "off",
|
||||
"react/no-danger": "off",
|
||||
"react/no-danger-with-children": "off",
|
||||
"react/void-dom-elements-no-children": "off",
|
||||
// eslint-plugin-react-hooks rules - disabled for specific project needs
|
||||
"react-hooks/exhaustive-deps": "off",
|
||||
"react-hooks/rules-of-hooks": "off",
|
||||
// typescript-eslint rules - some handled by TypeScript compiler or disabled for project needs
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
// Custom rules
|
||||
"@typescript-eslint/restrict-template-expressions": [
|
||||
"error",
|
||||
{
|
||||
allowNumber: true,
|
||||
allowBoolean: true,
|
||||
allowNever: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
projectService: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
export default eslintConfig;
|
||||
+21
-16
@@ -3,33 +3,38 @@
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "dist/index.js",
|
||||
"bin": "dist/index.js",
|
||||
"scripts": {
|
||||
"watch": "nodemon --exec ts-node --esm ./src/index.ts --watch src",
|
||||
"dev": "node --loader ts-node/esm ./src/index.ts",
|
||||
"start": "tsc && node ./dist/index.js",
|
||||
"test": "tsc && node ./dist/test-proxy.js",
|
||||
"rename-binary": "sh ./copy-binary.sh",
|
||||
"build": "tsc && pkg ./dist/index.js --targets latest-macos-arm64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:mac-aarch64": "tsc && pkg ./dist/index.js --targets latest-macos-arm64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:mac-x86_64": "tsc && pkg ./dist/index.js --targets latest-macos-x64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:linux-x64": "tsc && pkg ./dist/index.js --targets latest-linux-x64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:linux-arm64": "tsc && pkg ./dist/index.js --targets latest-linux-arm64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:win-x64": "tsc && pkg ./dist/index.js --targets latest-win-x64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:win-arm64": "tsc && pkg ./dist/index.js --targets latest-win-arm64 --output dist/nodecar && pnpm rename-binary"
|
||||
"build": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:mac-aarch64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:mac-x86_64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:linux-x64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:linux-arm64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:win-x64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:win-arm64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@types/node": "^24.0.1",
|
||||
"@yao-pkg/pkg": "^6.5.1",
|
||||
"commander": "^14.0.0",
|
||||
"dotenv": "^16.5.0",
|
||||
"@types/node": "^24.10.1",
|
||||
"commander": "^14.0.2",
|
||||
"donutbrowser-camoufox-js": "^0.7.0",
|
||||
"dotenv": "^17.2.3",
|
||||
"fingerprint-generator": "^2.1.77",
|
||||
"get-port": "^7.1.0",
|
||||
"nodemon": "^3.1.10",
|
||||
"proxy-chain": "^2.5.9",
|
||||
"nodemon": "^3.1.11",
|
||||
"playwright-core": "^1.57.0",
|
||||
"proxy-chain": "^2.6.0",
|
||||
"tmp": "^0.2.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "^8.34.0"
|
||||
"typescript": "^5.9.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tmp": "^0.2.6"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,519 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import { launchOptions } from "donutbrowser-camoufox-js";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import {
|
||||
type CamoufoxConfig,
|
||||
deleteCamoufoxConfig,
|
||||
generateCamoufoxId,
|
||||
getCamoufoxConfig,
|
||||
listCamoufoxConfigs,
|
||||
saveCamoufoxConfig,
|
||||
} from "./camoufox-storage.js";
|
||||
|
||||
/**
|
||||
* Convert camoufox fingerprint format to fingerprint-generator format
|
||||
* @param camoufoxFingerprint The camoufox fingerprint object
|
||||
* @returns fingerprint-generator object
|
||||
*/
|
||||
function convertCamoufoxToFingerprintGenerator(
|
||||
camoufoxFingerprint: Record<string, any>,
|
||||
): any {
|
||||
const fingerprintObj: Record<string, any> = {
|
||||
navigator: {},
|
||||
screen: {},
|
||||
videoCard: {},
|
||||
headers: {},
|
||||
battery: {},
|
||||
};
|
||||
|
||||
// Mapping from camoufox keys to fingerprint-generator structure based on the YAML
|
||||
const mappings: Record<string, string> = {
|
||||
// Navigator properties
|
||||
"navigator.userAgent": "navigator.userAgent",
|
||||
"navigator.platform": "navigator.platform",
|
||||
"navigator.hardwareConcurrency": "navigator.hardwareConcurrency",
|
||||
"navigator.maxTouchPoints": "navigator.maxTouchPoints",
|
||||
"navigator.doNotTrack": "navigator.doNotTrack",
|
||||
"navigator.appCodeName": "navigator.appCodeName",
|
||||
"navigator.appName": "navigator.appName",
|
||||
"navigator.appVersion": "navigator.appVersion",
|
||||
"navigator.oscpu": "navigator.oscpu",
|
||||
"navigator.product": "navigator.product",
|
||||
"navigator.language": "navigator.language",
|
||||
"navigator.languages": "navigator.languages",
|
||||
"navigator.globalPrivacyControl": "navigator.globalPrivacyControl",
|
||||
|
||||
// Screen properties
|
||||
"screen.width": "screen.width",
|
||||
"screen.height": "screen.height",
|
||||
"screen.availWidth": "screen.availWidth",
|
||||
"screen.availHeight": "screen.availHeight",
|
||||
"screen.availTop": "screen.availTop",
|
||||
"screen.availLeft": "screen.availLeft",
|
||||
"screen.colorDepth": "screen.colorDepth",
|
||||
"screen.pixelDepth": "screen.pixelDepth",
|
||||
"window.outerWidth": "screen.outerWidth",
|
||||
"window.outerHeight": "screen.outerHeight",
|
||||
"window.innerWidth": "screen.innerWidth",
|
||||
"window.innerHeight": "screen.innerHeight",
|
||||
"window.screenX": "screen.screenX",
|
||||
"window.screenY": "screen.screenY",
|
||||
"screen.pageXOffset": "screen.pageXOffset",
|
||||
"screen.pageYOffset": "screen.pageYOffset",
|
||||
"window.devicePixelRatio": "screen.devicePixelRatio",
|
||||
"document.body.clientWidth": "screen.clientWidth",
|
||||
"document.body.clientHeight": "screen.clientHeight",
|
||||
|
||||
// WebGL properties
|
||||
"webGl:vendor": "videoCard.vendor",
|
||||
"webGl:renderer": "videoCard.renderer",
|
||||
|
||||
// Headers
|
||||
"headers.Accept-Encoding": "headers.Accept-Encoding",
|
||||
|
||||
// Battery
|
||||
"battery:charging": "battery.charging",
|
||||
"battery:chargingTime": "battery.chargingTime",
|
||||
"battery:dischargingTime": "battery.dischargingTime",
|
||||
};
|
||||
|
||||
// Apply mappings
|
||||
for (const [camoufoxKey, fingerprintPath] of Object.entries(mappings)) {
|
||||
if (camoufoxFingerprint[camoufoxKey] !== undefined) {
|
||||
const pathParts = fingerprintPath.split(".");
|
||||
let current = fingerprintObj;
|
||||
|
||||
// Navigate to the nested property, creating objects as needed
|
||||
for (let i = 0; i < pathParts.length - 1; i++) {
|
||||
const part = pathParts[i];
|
||||
if (!current[part]) {
|
||||
current[part] = {};
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
// Set the final value
|
||||
const finalKey = pathParts[pathParts.length - 1];
|
||||
current[finalKey] = camoufoxFingerprint[camoufoxKey];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle fonts separately
|
||||
if (camoufoxFingerprint.fonts && Array.isArray(camoufoxFingerprint.fonts)) {
|
||||
fingerprintObj.fonts = camoufoxFingerprint.fonts;
|
||||
}
|
||||
|
||||
return { ...camoufoxFingerprint, ...fingerprintObj };
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a Camoufox instance in a separate process
|
||||
* @param options Camoufox launch options
|
||||
* @param profilePath Profile directory path
|
||||
* @param url Optional URL to open
|
||||
* @returns Promise resolving to the Camoufox configuration
|
||||
*/
|
||||
export async function startCamoufoxProcess(
|
||||
options: LaunchOptions = {},
|
||||
profilePath?: string,
|
||||
url?: string,
|
||||
customConfig?: string,
|
||||
): Promise<CamoufoxConfig> {
|
||||
// Generate a unique ID for this instance
|
||||
const id = generateCamoufoxId();
|
||||
|
||||
// Ensure profile path is absolute if provided
|
||||
const absoluteProfilePath = profilePath
|
||||
? path.resolve(profilePath)
|
||||
: undefined;
|
||||
|
||||
// Create the Camoufox configuration
|
||||
const config: CamoufoxConfig = {
|
||||
id,
|
||||
options: JSON.parse(JSON.stringify(options)), // Deep clone to avoid reference sharing
|
||||
profilePath: absoluteProfilePath,
|
||||
url,
|
||||
customConfig,
|
||||
};
|
||||
|
||||
// Save the configuration before starting the process
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
// Build the command arguments
|
||||
const args = [
|
||||
path.join(__dirname, "index.js"),
|
||||
"camoufox-worker",
|
||||
"start",
|
||||
"--id",
|
||||
id,
|
||||
];
|
||||
|
||||
// Spawn the process with proper detachment - similar to proxy implementation
|
||||
const child = spawn(process.execPath, args, {
|
||||
detached: true,
|
||||
stdio: ["ignore", "pipe", "pipe"], // Capture stdout and stderr for startup feedback
|
||||
cwd: process.cwd(),
|
||||
env: {
|
||||
...process.env,
|
||||
NODE_ENV: "production",
|
||||
// Ensure Camoufox can find its dependencies
|
||||
NODE_PATH: process.env.NODE_PATH || "",
|
||||
},
|
||||
});
|
||||
|
||||
// Wait for the worker to start successfully or fail - with shorter timeout for quick response
|
||||
return new Promise<CamoufoxConfig>((resolve, reject) => {
|
||||
let resolved = false;
|
||||
let stdoutBuffer = "";
|
||||
let stderrBuffer = "";
|
||||
|
||||
// Shorter timeout for quick startup feedback
|
||||
const timeout = setTimeout(() => {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
child.kill("SIGKILL");
|
||||
reject(
|
||||
new Error(`Camoufox worker ${id} startup timeout after 5 seconds`),
|
||||
);
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
// Handle stdout - look for success JSON
|
||||
if (child.stdout) {
|
||||
child.stdout.on("data", (data) => {
|
||||
const output = data.toString();
|
||||
stdoutBuffer += output;
|
||||
|
||||
// Look for success JSON message
|
||||
const lines = stdoutBuffer.split("\n");
|
||||
for (const line of lines) {
|
||||
if (line.trim()) {
|
||||
try {
|
||||
const parsed = JSON.parse(line.trim());
|
||||
if (parsed.success && parsed.id === id && parsed.processId) {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
clearTimeout(timeout);
|
||||
config.processId = parsed.processId;
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
// Unref immediately after success to detach properly
|
||||
child.unref();
|
||||
resolve(config);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Not JSON, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Handle stderr - look for error JSON
|
||||
if (child.stderr) {
|
||||
child.stderr.on("data", (data) => {
|
||||
const output = data.toString();
|
||||
stderrBuffer += output;
|
||||
|
||||
// Look for error JSON message
|
||||
const lines = stderrBuffer.split("\n");
|
||||
for (const line of lines) {
|
||||
if (line.trim()) {
|
||||
try {
|
||||
const parsed = JSON.parse(line.trim());
|
||||
if (parsed.error && parsed.id === id) {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
clearTimeout(timeout);
|
||||
reject(
|
||||
new Error(
|
||||
`Camoufox worker failed: ${parsed.message || parsed.error}`,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Not JSON, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
child.on("exit", (code, signal) => {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
clearTimeout(timeout);
|
||||
if (code !== 0) {
|
||||
reject(
|
||||
new Error(
|
||||
`Camoufox worker ${id} exited with code ${code} and signal ${signal}. Stderr: ${stderrBuffer}`,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
// Process exited successfully but we didn't get success message
|
||||
reject(
|
||||
new Error(
|
||||
`Camoufox worker ${id} exited without success confirmation`,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a process is running by PID
|
||||
*/
|
||||
function isProcessRunning(pid: number): boolean {
|
||||
try {
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a Camoufox process
|
||||
* @param id The Camoufox ID to stop
|
||||
* @returns Promise resolving to true if stopped, false if not found
|
||||
*/
|
||||
export async function stopCamoufoxProcess(id: string): Promise<boolean> {
|
||||
const config = getCamoufoxConfig(id);
|
||||
|
||||
if (!config) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const pid = config.processId;
|
||||
|
||||
try {
|
||||
// Method 1: If we have a process ID, kill by PID with proper signal sequence
|
||||
if (pid && isProcessRunning(pid)) {
|
||||
try {
|
||||
// First try SIGTERM for graceful shutdown
|
||||
process.kill(pid, "SIGTERM");
|
||||
|
||||
// Wait up to 3 seconds for graceful shutdown
|
||||
for (let i = 0; i < 30; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
if (!isProcessRunning(pid)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// If still running, force kill
|
||||
if (isProcessRunning(pid)) {
|
||||
process.kill(pid, "SIGKILL");
|
||||
// Wait for SIGKILL to take effect
|
||||
for (let i = 0; i < 20; i++) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
if (!isProcessRunning(pid)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Process might have already exited
|
||||
}
|
||||
}
|
||||
|
||||
// Method 2: Pattern-based kill as fallback (kills any child processes)
|
||||
await new Promise<void>((resolve) => {
|
||||
const killByPattern = spawn(
|
||||
"pkill",
|
||||
["-TERM", "-f", `camoufox-worker.*${id}`],
|
||||
{ stdio: "ignore" },
|
||||
);
|
||||
killByPattern.on("exit", () => resolve());
|
||||
setTimeout(() => resolve(), 1000);
|
||||
});
|
||||
|
||||
// Wait a moment then force kill any remaining
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
const killByPatternForce = spawn(
|
||||
"pkill",
|
||||
["-KILL", "-f", `camoufox-worker.*${id}`],
|
||||
{ stdio: "ignore" },
|
||||
);
|
||||
killByPatternForce.on("exit", () => resolve());
|
||||
setTimeout(() => resolve(), 1000);
|
||||
});
|
||||
|
||||
// Also kill any Firefox processes associated with this profile
|
||||
if (config.profilePath) {
|
||||
await new Promise<void>((resolve) => {
|
||||
const killFirefox = spawn(
|
||||
"pkill",
|
||||
["-KILL", "-f", config.profilePath!],
|
||||
{ stdio: "ignore" },
|
||||
);
|
||||
killFirefox.on("exit", () => resolve());
|
||||
setTimeout(() => resolve(), 1000);
|
||||
});
|
||||
}
|
||||
|
||||
// Verify process is actually dead
|
||||
if (pid && isProcessRunning(pid)) {
|
||||
// Last resort: SIGKILL again
|
||||
try {
|
||||
process.kill(pid, "SIGKILL");
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the configuration
|
||||
deleteCamoufoxConfig(id);
|
||||
return true;
|
||||
} catch {
|
||||
// Delete the configuration even if stopping failed
|
||||
deleteCamoufoxConfig(id);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all Camoufox processes
|
||||
* @returns Promise resolving when all instances are stopped
|
||||
*/
|
||||
export async function stopAllCamoufoxProcesses(): Promise<void> {
|
||||
const configs = listCamoufoxConfigs();
|
||||
|
||||
const stopPromises = configs.map((config) => stopCamoufoxProcess(config.id));
|
||||
await Promise.all(stopPromises);
|
||||
}
|
||||
|
||||
interface GenerateConfigOptions {
|
||||
proxy?: string;
|
||||
maxWidth?: number;
|
||||
maxHeight?: number;
|
||||
minWidth?: number;
|
||||
minHeight?: number;
|
||||
geoip?: string | boolean;
|
||||
blockImages?: boolean;
|
||||
blockWebrtc?: boolean;
|
||||
blockWebgl?: boolean;
|
||||
executablePath?: string;
|
||||
fingerprint?: string;
|
||||
os?: "windows" | "macos" | "linux";
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate Camoufox configuration using launchOptions
|
||||
* @param options Configuration options
|
||||
* @returns Promise resolving to the generated config JSON string
|
||||
*/
|
||||
export async function generateCamoufoxConfig(
|
||||
options: GenerateConfigOptions,
|
||||
): Promise<string> {
|
||||
try {
|
||||
const launchOpts: any = {
|
||||
headless: false,
|
||||
i_know_what_im_doing: true,
|
||||
config: {
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
},
|
||||
};
|
||||
|
||||
if (options.geoip) {
|
||||
launchOpts.geoip = true;
|
||||
}
|
||||
|
||||
if (options.blockImages) {
|
||||
launchOpts.block_images = true;
|
||||
}
|
||||
if (options.blockWebrtc) {
|
||||
launchOpts.block_webrtc = true;
|
||||
}
|
||||
if (options.blockWebgl) {
|
||||
launchOpts.block_webgl = true;
|
||||
}
|
||||
|
||||
if (options.executablePath) {
|
||||
launchOpts.executable_path = options.executablePath;
|
||||
}
|
||||
|
||||
if (options.proxy) {
|
||||
launchOpts.proxy = options.proxy;
|
||||
}
|
||||
|
||||
// If fingerprint is provided, use it and ignore other options except executable_path and block_*
|
||||
if (options.fingerprint) {
|
||||
try {
|
||||
const camoufoxFingerprint = JSON.parse(options.fingerprint);
|
||||
|
||||
if (camoufoxFingerprint.timezone) {
|
||||
launchOpts.config.timezone = camoufoxFingerprint.timezone;
|
||||
}
|
||||
|
||||
// Convert camoufox fingerprint format to fingerprint-generator format
|
||||
const fingerprintObj =
|
||||
convertCamoufoxToFingerprintGenerator(camoufoxFingerprint);
|
||||
launchOpts.fingerprint = fingerprintObj;
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid fingerprint JSON: ${error}`);
|
||||
}
|
||||
} else {
|
||||
// Use individual options to build configuration
|
||||
|
||||
// Build screen configuration with min/max dimensions
|
||||
const screen: {
|
||||
minWidth?: number;
|
||||
maxWidth?: number;
|
||||
minHeight?: number;
|
||||
maxHeight?: number;
|
||||
} = {};
|
||||
|
||||
if (options.minWidth) screen.minWidth = options.minWidth;
|
||||
if (options.maxWidth) screen.maxWidth = options.maxWidth;
|
||||
if (options.minHeight) screen.minHeight = options.minHeight;
|
||||
if (options.maxHeight) screen.maxHeight = options.maxHeight;
|
||||
|
||||
if (Object.keys(screen).length > 0) {
|
||||
launchOpts.screen = screen;
|
||||
}
|
||||
}
|
||||
|
||||
launchOpts.allowAddonNewTab = true;
|
||||
|
||||
// Add OS option for fingerprint generation
|
||||
if (options.os) {
|
||||
launchOpts.os = options.os;
|
||||
}
|
||||
|
||||
// Generate the configuration using launchOptions
|
||||
const generatedOptions = await launchOptions(launchOpts);
|
||||
|
||||
// Extract the environment variables that contain the config
|
||||
const envVars = generatedOptions.env || {};
|
||||
|
||||
// Reconstruct the config from environment variables using getEnvVars utility
|
||||
let configStr = "";
|
||||
let chunkIndex = 1;
|
||||
|
||||
while (envVars[`CAMOU_CONFIG_${chunkIndex}`]) {
|
||||
configStr += envVars[`CAMOU_CONFIG_${chunkIndex}`];
|
||||
chunkIndex++;
|
||||
}
|
||||
|
||||
if (!configStr) {
|
||||
throw new Error("No configuration generated");
|
||||
}
|
||||
|
||||
// Parse and return the config as JSON string
|
||||
const config = JSON.parse(configStr);
|
||||
return JSON.stringify(config);
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to generate Camoufox config: ${error}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,153 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import tmp from "tmp";
|
||||
|
||||
export interface CamoufoxConfig {
|
||||
id: string;
|
||||
options: LaunchOptions;
|
||||
profilePath?: string;
|
||||
url?: string;
|
||||
processId?: number;
|
||||
customConfig?: string; // JSON string of the fingerprint config
|
||||
}
|
||||
|
||||
const STORAGE_DIR = path.join(tmp.tmpdir, "donutbrowser", "camoufox");
|
||||
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
fs.mkdirSync(STORAGE_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a Camoufox configuration to disk
|
||||
* @param config The Camoufox configuration to save
|
||||
*/
|
||||
export function saveCamoufoxConfig(config: CamoufoxConfig): void {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Camoufox configuration by ID
|
||||
* @param id The Camoufox ID
|
||||
* @returns The Camoufox configuration or null if not found
|
||||
*/
|
||||
export function getCamoufoxConfig(id: string): CamoufoxConfig | null {
|
||||
const filePath = path.join(STORAGE_DIR, `${id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf-8");
|
||||
return JSON.parse(content) as CamoufoxConfig;
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: `Error reading Camoufox config ${id}`,
|
||||
error: (error as Error).message,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a Camoufox configuration
|
||||
* @param id The Camoufox ID to delete
|
||||
* @returns True if deleted, false if not found
|
||||
*/
|
||||
export function deleteCamoufoxConfig(id: string): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.unlinkSync(filePath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: `Error deleting Camoufox config ${id}`,
|
||||
error: (error as Error).message,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all saved Camoufox configurations
|
||||
* @returns Array of Camoufox configurations
|
||||
*/
|
||||
export function listCamoufoxConfigs(): CamoufoxConfig[] {
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
return fs
|
||||
.readdirSync(STORAGE_DIR)
|
||||
.filter((file) => file.endsWith(".json"))
|
||||
.map((file) => {
|
||||
try {
|
||||
const content = fs.readFileSync(
|
||||
path.join(STORAGE_DIR, file),
|
||||
"utf-8",
|
||||
);
|
||||
return JSON.parse(content) as CamoufoxConfig;
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: `Error reading Camoufox config ${file}`,
|
||||
error,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter((config): config is CamoufoxConfig => config !== null)
|
||||
.map((config) => {
|
||||
config.options = "Removed for logging" as any;
|
||||
config.customConfig = "Removed for logging" as any;
|
||||
return config;
|
||||
});
|
||||
} catch (error) {
|
||||
console.error({ message: "Error listing Camoufox configs:", error });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a Camoufox configuration
|
||||
* @param config The Camoufox configuration to update
|
||||
* @returns True if updated, false if not found
|
||||
*/
|
||||
export function updateCamoufoxConfig(config: CamoufoxConfig): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
|
||||
try {
|
||||
fs.readFileSync(filePath, "utf-8");
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
console.error({
|
||||
message: `Config ${config.id} was deleted while the app was running`,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
console.error({
|
||||
message: `Error updating Camoufox config ${config.id}`,
|
||||
error,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique ID for a Camoufox instance
|
||||
* @returns A unique ID string
|
||||
*/
|
||||
export function generateCamoufoxId(): string {
|
||||
// Include process ID to ensure uniqueness across multiple processes
|
||||
return `camoufox_${Date.now()}_${process.pid}_${Math.floor(Math.random() * 10000)}`;
|
||||
}
|
||||
@@ -0,0 +1,430 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { launchOptions } from "donutbrowser-camoufox-js";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import { type Browser, type BrowserContext, firefox } from "playwright-core";
|
||||
import tmp from "tmp";
|
||||
import { getCamoufoxConfig, saveCamoufoxConfig } from "./camoufox-storage.js";
|
||||
import { getEnvVars, parseProxyString } from "./utils.js";
|
||||
|
||||
// Set up debug logging to a file
|
||||
const LOG_DIR = path.join(tmp.tmpdir, "donutbrowser", "camoufox-logs");
|
||||
if (!fs.existsSync(LOG_DIR)) {
|
||||
fs.mkdirSync(LOG_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
function debugLog(id: string, message: string, data?: any): void {
|
||||
const logFile = path.join(LOG_DIR, `${id}.log`);
|
||||
const timestamp = new Date().toISOString();
|
||||
const logMessage = data
|
||||
? `[${timestamp}] ${message}: ${JSON.stringify(data, null, 2)}\n`
|
||||
: `[${timestamp}] ${message}\n`;
|
||||
fs.appendFileSync(logFile, logMessage);
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a Camoufox browser server as a worker process
|
||||
* @param id The Camoufox configuration ID
|
||||
*/
|
||||
export async function runCamoufoxWorker(id: string): Promise<void> {
|
||||
debugLog(id, "Worker starting", { pid: process.pid });
|
||||
|
||||
// Get the Camoufox configuration
|
||||
debugLog(id, "Loading Camoufox configuration");
|
||||
const config = getCamoufoxConfig(id);
|
||||
|
||||
if (!config) {
|
||||
debugLog(id, "Configuration not found");
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Configuration not found",
|
||||
id: id,
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
debugLog(id, "Configuration loaded successfully", {
|
||||
profilePath: config.profilePath,
|
||||
hasOptions: !!config.options,
|
||||
hasCustomConfig: !!config.customConfig,
|
||||
hasUrl: !!config.url,
|
||||
});
|
||||
|
||||
config.processId = process.pid;
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
id: id,
|
||||
processId: process.pid,
|
||||
profilePath: config.profilePath,
|
||||
message: "Camoufox worker started successfully",
|
||||
}),
|
||||
);
|
||||
|
||||
// Launch browser in background - this can take time and may fail
|
||||
setImmediate(async () => {
|
||||
debugLog(id, "Starting browser launch in background");
|
||||
let browser: Browser | null = null;
|
||||
let context: BrowserContext | null = null;
|
||||
let windowCheckInterval: NodeJS.Timeout | null = null;
|
||||
|
||||
// Graceful shutdown handler with access to browser and server
|
||||
const gracefulShutdown = async () => {
|
||||
debugLog(id, "Graceful shutdown initiated");
|
||||
try {
|
||||
// Clear any intervals first
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
|
||||
// Close browser context and server if they exist
|
||||
if (context && !context.pages) {
|
||||
// Context is already closed
|
||||
} else if (context) {
|
||||
await context.close();
|
||||
}
|
||||
|
||||
if (browser?.isConnected()) {
|
||||
await browser.close();
|
||||
}
|
||||
} catch {
|
||||
// Ignore cleanup errors during shutdown
|
||||
}
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
// Handle various quit signals for proper macOS Command+Q support
|
||||
process.on("SIGTERM", () => void gracefulShutdown());
|
||||
process.on("SIGINT", () => void gracefulShutdown());
|
||||
process.on("SIGHUP", () => void gracefulShutdown());
|
||||
process.on("SIGQUIT", () => void gracefulShutdown());
|
||||
|
||||
// Handle uncaught exceptions and unhandled rejections
|
||||
process.on("uncaughtException", () => void gracefulShutdown());
|
||||
process.on("unhandledRejection", () => void gracefulShutdown());
|
||||
|
||||
try {
|
||||
debugLog(id, "Preparing launch options");
|
||||
// Deep clone to avoid reference sharing and ensure fresh configuration for each instance
|
||||
const camoufoxOptions: LaunchOptions = JSON.parse(
|
||||
JSON.stringify(config.options || {}),
|
||||
);
|
||||
debugLog(id, "Base options cloned", {
|
||||
hasOptions: Object.keys(camoufoxOptions).length,
|
||||
});
|
||||
|
||||
// Add profile path if provided
|
||||
if (config.profilePath) {
|
||||
camoufoxOptions.user_data_dir = config.profilePath;
|
||||
debugLog(id, "Set user_data_dir", { profilePath: config.profilePath });
|
||||
}
|
||||
|
||||
// Ensure block options are properly set
|
||||
if (camoufoxOptions.block_images) {
|
||||
camoufoxOptions.block_images = true;
|
||||
}
|
||||
|
||||
if (camoufoxOptions.block_webgl) {
|
||||
camoufoxOptions.block_webgl = true;
|
||||
}
|
||||
|
||||
if (camoufoxOptions.block_webrtc) {
|
||||
camoufoxOptions.block_webrtc = true;
|
||||
}
|
||||
|
||||
// Check for headless mode from config (no environment variable check)
|
||||
if (camoufoxOptions.headless) {
|
||||
camoufoxOptions.headless = true;
|
||||
}
|
||||
|
||||
// Always set these defaults - ensure they are applied for each instance
|
||||
camoufoxOptions.i_know_what_im_doing = true;
|
||||
camoufoxOptions.config = {
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
...(camoufoxOptions.config || {}),
|
||||
};
|
||||
debugLog(id, "Set default options", {
|
||||
i_know_what_im_doing: true,
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
});
|
||||
|
||||
// Generate fresh options for this specific instance
|
||||
debugLog(id, "Generating launch options via launchOptions function");
|
||||
const generatedOptions = await launchOptions(camoufoxOptions);
|
||||
debugLog(id, "Launch options generated successfully", {
|
||||
hasEnv: !!generatedOptions.env,
|
||||
argsLength: generatedOptions.args?.length || 0,
|
||||
});
|
||||
|
||||
// Start with process environment to ensure proper inheritance
|
||||
let finalEnv = { ...process.env };
|
||||
debugLog(id, "Base environment variables set", {
|
||||
envVarCount: Object.keys(finalEnv).length,
|
||||
});
|
||||
|
||||
// Add generated options environment variables
|
||||
if (generatedOptions.env) {
|
||||
finalEnv = { ...finalEnv, ...generatedOptions.env };
|
||||
debugLog(id, "Added generated environment variables", {
|
||||
generatedEnvCount: Object.keys(generatedOptions.env).length,
|
||||
totalEnvCount: Object.keys(finalEnv).length,
|
||||
});
|
||||
}
|
||||
|
||||
// If we have a custom config from Rust, use it directly as environment variables
|
||||
if (config.customConfig) {
|
||||
debugLog(id, "Processing custom config", {
|
||||
customConfigLength: config.customConfig.length,
|
||||
});
|
||||
try {
|
||||
// Parse the custom config JSON string
|
||||
const customConfigObj = JSON.parse(config.customConfig);
|
||||
debugLog(id, "Custom config parsed successfully", {
|
||||
customConfigKeys: Object.keys(customConfigObj),
|
||||
});
|
||||
|
||||
// Ensure default config values are preserved even with custom config
|
||||
const mergedConfig = {
|
||||
...customConfigObj,
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
// allowAddonNewTab will be handled from the fingerprint config if present
|
||||
};
|
||||
|
||||
// Convert merged config to environment variables using getEnvVars
|
||||
const customEnvVars = getEnvVars(mergedConfig);
|
||||
debugLog(id, "Custom config converted to environment variables", {
|
||||
customEnvVarCount: Object.keys(customEnvVars).length,
|
||||
});
|
||||
|
||||
// Merge custom config with generated config (custom takes precedence)
|
||||
finalEnv = { ...finalEnv, ...customEnvVars };
|
||||
debugLog(id, "Custom config merged with final environment", {
|
||||
finalEnvCount: Object.keys(finalEnv).length,
|
||||
});
|
||||
} catch (error) {
|
||||
debugLog(id, "Failed to parse custom config", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
console.error(
|
||||
`Camoufox worker ${id}: Failed to parse custom config, using generated config:`,
|
||||
error,
|
||||
);
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
debugLog(id, "No custom config provided");
|
||||
}
|
||||
// Prepare profile path for persistent context
|
||||
const profilePath = config.profilePath || "";
|
||||
debugLog(id, "Profile path prepared", { profilePath });
|
||||
|
||||
// Launch persistent context with the final configuration
|
||||
const finalOptions: any = {
|
||||
...generatedOptions,
|
||||
env: finalEnv,
|
||||
};
|
||||
debugLog(id, "Final launch options prepared", {
|
||||
hasExecutablePath: !!finalOptions.executablePath,
|
||||
hasProxy: !!camoufoxOptions.proxy,
|
||||
profilePath,
|
||||
});
|
||||
|
||||
// If a custom executable path was provided, ensure Playwright uses it
|
||||
if (
|
||||
(camoufoxOptions as any).executable_path &&
|
||||
typeof (camoufoxOptions as any).executable_path === "string"
|
||||
) {
|
||||
finalOptions.executablePath = (camoufoxOptions as any)
|
||||
.executable_path as string;
|
||||
debugLog(id, "Custom executable path set", {
|
||||
executablePath: finalOptions.executablePath,
|
||||
});
|
||||
}
|
||||
|
||||
// Only add proxy if it exists and is valid
|
||||
if (camoufoxOptions.proxy) {
|
||||
debugLog(id, "Processing proxy configuration", {
|
||||
proxyString: camoufoxOptions.proxy,
|
||||
});
|
||||
try {
|
||||
finalOptions.proxy = parseProxyString(camoufoxOptions.proxy);
|
||||
debugLog(id, "Proxy parsed successfully");
|
||||
} catch (error) {
|
||||
debugLog(id, "Failed to parse proxy", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
console.error({
|
||||
message: "Failed to parse proxy, launching without proxy",
|
||||
error,
|
||||
});
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Use launchPersistentContext instead of launchServer
|
||||
debugLog(id, "Launching persistent context", { profilePath });
|
||||
context = await firefox.launchPersistentContext(
|
||||
profilePath,
|
||||
finalOptions,
|
||||
);
|
||||
debugLog(id, "Persistent context launched successfully");
|
||||
|
||||
// Get the browser instance from context
|
||||
browser = context.browser();
|
||||
debugLog(id, "Browser instance obtained from context", {
|
||||
browserConnected: browser?.isConnected(),
|
||||
});
|
||||
|
||||
// Handle browser disconnection for proper cleanup
|
||||
if (browser) {
|
||||
browser.on("disconnected", () => void gracefulShutdown());
|
||||
debugLog(id, "Browser disconnect handler registered");
|
||||
}
|
||||
|
||||
// Handle context close for proper cleanup
|
||||
context.on("close", () => void gracefulShutdown());
|
||||
debugLog(id, "Context close handler registered");
|
||||
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
// Monitor for window closure
|
||||
const startWindowMonitoring = () => {
|
||||
debugLog(id, "Starting window monitoring");
|
||||
windowCheckInterval = setInterval(async () => {
|
||||
try {
|
||||
// Check if context is still active
|
||||
if (!context?.pages || context.pages().length === 0) {
|
||||
debugLog(id, "No pages found in context, shutting down");
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if browser is still connected (if available)
|
||||
if (browser && !browser.isConnected()) {
|
||||
debugLog(id, "Browser disconnected, shutting down");
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check pages in the persistent context
|
||||
const pages = context.pages();
|
||||
if (pages.length === 0) {
|
||||
debugLog(id, "No pages in context, shutting down");
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
}
|
||||
} catch (error) {
|
||||
debugLog(id, "Error in window monitoring", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
// If we can't check windows, assume browser is closing
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
}
|
||||
}, 1000); // Check every second
|
||||
};
|
||||
|
||||
// Handle URL opening if provided
|
||||
if (config.url) {
|
||||
debugLog(id, "Opening URL in browser", { url: config.url });
|
||||
try {
|
||||
const pages = await context.pages();
|
||||
if (pages.length) {
|
||||
const page = pages[0];
|
||||
debugLog(id, "Navigating to URL");
|
||||
await page.goto(config.url, {
|
||||
waitUntil: "domcontentloaded",
|
||||
timeout: 30000,
|
||||
});
|
||||
debugLog(id, "URL opened successfully");
|
||||
|
||||
// Start monitoring after page is created
|
||||
startWindowMonitoring();
|
||||
} else {
|
||||
debugLog(id, "No pages available to open URL");
|
||||
startWindowMonitoring();
|
||||
}
|
||||
} catch (urlError) {
|
||||
debugLog(id, "Failed to open URL", {
|
||||
error:
|
||||
urlError instanceof Error ? urlError.message : String(urlError),
|
||||
});
|
||||
console.error({
|
||||
message: "Failed to open URL",
|
||||
error: urlError,
|
||||
});
|
||||
// URL opening failure doesn't affect startup success
|
||||
// Still start monitoring
|
||||
startWindowMonitoring();
|
||||
}
|
||||
} else {
|
||||
debugLog(id, "No URL provided, starting monitoring");
|
||||
// Start monitoring after page is created
|
||||
startWindowMonitoring();
|
||||
}
|
||||
|
||||
// Monitor browser/context connection
|
||||
debugLog(id, "Starting keep-alive monitoring");
|
||||
const keepAlive = setInterval(async () => {
|
||||
try {
|
||||
// Check if context is still active
|
||||
if (!context?.pages) {
|
||||
debugLog(id, "Context not active in keep-alive, shutting down");
|
||||
clearInterval(keepAlive);
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check browser connection if available
|
||||
if (browser && !browser.isConnected()) {
|
||||
debugLog(id, "Browser not connected in keep-alive, shutting down");
|
||||
clearInterval(keepAlive);
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
debugLog(id, "Error in keep-alive check", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
console.error({
|
||||
message: "Error in keepAlive check",
|
||||
error,
|
||||
});
|
||||
clearInterval(keepAlive);
|
||||
await gracefulShutdown();
|
||||
}
|
||||
}, 2000);
|
||||
} catch (error) {
|
||||
debugLog(id, "Failed to launch Camoufox", {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
});
|
||||
console.error({
|
||||
message: "Failed to launch Camoufox",
|
||||
error,
|
||||
});
|
||||
// Browser launch failed, attempt cleanup
|
||||
await gracefulShutdown();
|
||||
}
|
||||
});
|
||||
|
||||
// Keep process alive
|
||||
process.stdin.resume();
|
||||
}
|
||||
+284
-102
@@ -1,150 +1,332 @@
|
||||
import { program } from "commander";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import {
|
||||
startProxyProcess,
|
||||
stopAllProxyProcesses,
|
||||
stopProxyProcess,
|
||||
} from "./proxy-runner";
|
||||
import { listProxyConfigs } from "./proxy-storage";
|
||||
import { runProxyWorker } from "./proxy-worker";
|
||||
generateCamoufoxConfig,
|
||||
startCamoufoxProcess,
|
||||
stopAllCamoufoxProcesses,
|
||||
stopCamoufoxProcess,
|
||||
} from "./camoufox-launcher.js";
|
||||
import { listCamoufoxConfigs } from "./camoufox-storage.js";
|
||||
import { runCamoufoxWorker } from "./camoufox-worker.js";
|
||||
|
||||
// Command for proxy management
|
||||
// Command for Camoufox management
|
||||
program
|
||||
.command("proxy")
|
||||
.argument("<action>", "start, stop, or list proxies")
|
||||
.option("--host <host>", "upstream proxy host")
|
||||
.option("--proxy-port <port>", "upstream proxy port", Number.parseInt)
|
||||
.option("--type <type>", "proxy type (http, https, socks4, socks5)")
|
||||
.option("--username <username>", "proxy username")
|
||||
.option("--password <password>", "proxy password")
|
||||
.option(
|
||||
"-p, --port <number>",
|
||||
"local port to use (random if not specified)",
|
||||
Number.parseInt,
|
||||
.command("camoufox")
|
||||
.argument(
|
||||
"<action>",
|
||||
"start, stop, list, or generate-config Camoufox instances",
|
||||
)
|
||||
.option("--ignore-certificate", "ignore certificate errors for HTTPS proxies")
|
||||
.option("--id <id>", "proxy ID for stop command")
|
||||
.option("--id <id>", "Camoufox ID for stop command")
|
||||
.option("--profile-path <path>", "profile directory path")
|
||||
.option("--url <url>", "URL to open")
|
||||
|
||||
// Config generation options
|
||||
.option("--proxy <proxy>", "proxy URL for config generation")
|
||||
.option("--max-width <width>", "maximum screen width", parseInt)
|
||||
.option("--max-height <height>", "maximum screen height", parseInt)
|
||||
.option("--min-width <width>", "minimum screen width", parseInt)
|
||||
.option("--min-height <height>", "minimum screen height", parseInt)
|
||||
.option("--geoip", "enable geoip")
|
||||
.option("--block-images", "block images")
|
||||
.option("--block-webrtc", "block WebRTC")
|
||||
.option("--block-webgl", "block WebGL")
|
||||
.option("--executable-path <path>", "executable path")
|
||||
.option("--fingerprint <json>", "fingerprint JSON string")
|
||||
.option("--headless", "run in headless mode")
|
||||
.option("--custom-config <json>", "custom config JSON string")
|
||||
.option(
|
||||
"-u, --upstream <url>",
|
||||
"upstream proxy URL (protocol://[username:password@]host:port)",
|
||||
"--os <os>",
|
||||
"operating system for fingerprint: windows, macos, linux",
|
||||
)
|
||||
.description("manage proxy servers")
|
||||
|
||||
.description("manage Camoufox browser instances")
|
||||
.action(
|
||||
async (
|
||||
action: string,
|
||||
options: {
|
||||
host?: string;
|
||||
proxyPort?: number;
|
||||
type?: string;
|
||||
username?: string;
|
||||
password?: string;
|
||||
port?: number;
|
||||
ignoreCertificate?: boolean;
|
||||
id?: string;
|
||||
upstream?: string;
|
||||
},
|
||||
options: Record<string, string | number | boolean | undefined>,
|
||||
) => {
|
||||
if (action === "start") {
|
||||
let upstreamUrl: string;
|
||||
|
||||
// Build upstream URL from individual components if provided
|
||||
if (options.host && options.proxyPort && options.type) {
|
||||
const protocol =
|
||||
options.type === "socks4" || options.type === "socks5"
|
||||
? options.type
|
||||
: "http";
|
||||
const auth =
|
||||
options.username && options.password
|
||||
? `${encodeURIComponent(options.username)}:${encodeURIComponent(
|
||||
options.password,
|
||||
)}@`
|
||||
: "";
|
||||
upstreamUrl = `${protocol}://${auth}${options.host}:${options.proxyPort}`;
|
||||
} else if (options.upstream) {
|
||||
upstreamUrl = options.upstream;
|
||||
} else {
|
||||
console.error(
|
||||
"Error: Either --upstream URL or --host, --proxy-port, and --type are required",
|
||||
);
|
||||
console.log(
|
||||
"Example: proxy start --host datacenter.proxyempire.io --proxy-port 9000 --type http --username user --password pass",
|
||||
);
|
||||
process.exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const config = await startProxyProcess(upstreamUrl, {
|
||||
port: options.port,
|
||||
ignoreProxyCertificate: options.ignoreCertificate,
|
||||
});
|
||||
// Build Camoufox options in the format expected by camoufox-js
|
||||
const camoufoxOptions: LaunchOptions = {};
|
||||
|
||||
// OS fingerprinting
|
||||
if (options.os && typeof options.os === "string") {
|
||||
camoufoxOptions.os = options.os.includes(",")
|
||||
? (options.os.split(",") as ("windows" | "macos" | "linux")[])
|
||||
: (options.os as "windows" | "macos" | "linux");
|
||||
}
|
||||
|
||||
// Blocking options
|
||||
if (options.blockImages) camoufoxOptions.block_images = true;
|
||||
if (options.blockWebrtc) camoufoxOptions.block_webrtc = true;
|
||||
if (options.blockWebgl) camoufoxOptions.block_webgl = true;
|
||||
|
||||
// Security options
|
||||
if (options.disableCoop) camoufoxOptions.disable_coop = true;
|
||||
|
||||
if (options.geoip) {
|
||||
camoufoxOptions.geoip = true;
|
||||
}
|
||||
|
||||
if (options.latitude && options.longitude) {
|
||||
camoufoxOptions.geolocation = {
|
||||
latitude: options.latitude as number,
|
||||
longitude: options.longitude as number,
|
||||
accuracy: 100,
|
||||
};
|
||||
}
|
||||
if (options.country)
|
||||
camoufoxOptions.country = options.country as string;
|
||||
if (options.timezone)
|
||||
camoufoxOptions.timezone = options.timezone as string;
|
||||
|
||||
if (options.humanize)
|
||||
camoufoxOptions.humanize = options.humanize as boolean;
|
||||
if (options.headless) camoufoxOptions.headless = true;
|
||||
|
||||
// Localization
|
||||
if (options.locale && typeof options.locale === "string") {
|
||||
camoufoxOptions.locale = options.locale.includes(",")
|
||||
? options.locale.split(",")
|
||||
: options.locale;
|
||||
}
|
||||
|
||||
// Extensions and fonts
|
||||
if (options.addons && typeof options.addons === "string")
|
||||
camoufoxOptions.addons = options.addons.split(",");
|
||||
if (options.fonts && typeof options.fonts === "string")
|
||||
camoufoxOptions.fonts = options.fonts.split(",");
|
||||
if (options.customFontsOnly) camoufoxOptions.custom_fonts_only = true;
|
||||
if (
|
||||
options.excludeAddons &&
|
||||
typeof options.excludeAddons === "string"
|
||||
)
|
||||
camoufoxOptions.exclude_addons = options.excludeAddons.split(
|
||||
",",
|
||||
) as "UBO"[];
|
||||
|
||||
// Executable path: forward through to camoufox-js and ultimately Playwright
|
||||
if (
|
||||
options.executablePath &&
|
||||
typeof options.executablePath === "string"
|
||||
) {
|
||||
// camoufox-js uses snake_case for this option
|
||||
(camoufoxOptions as any).executable_path =
|
||||
options.executablePath as string;
|
||||
}
|
||||
|
||||
// Screen and window
|
||||
const screen: {
|
||||
minWidth?: number;
|
||||
maxWidth?: number;
|
||||
minHeight?: number;
|
||||
maxHeight?: number;
|
||||
} = {};
|
||||
if (options.screenMinWidth)
|
||||
screen.minWidth = options.screenMinWidth as number;
|
||||
if (options.screenMaxWidth)
|
||||
screen.maxWidth = options.screenMaxWidth as number;
|
||||
if (options.screenMinHeight)
|
||||
screen.minHeight = options.screenMinHeight as number;
|
||||
if (options.screenMaxHeight)
|
||||
screen.maxHeight = options.screenMaxHeight as number;
|
||||
if (Object.keys(screen).length > 0) camoufoxOptions.screen = screen;
|
||||
|
||||
if (options.windowWidth && options.windowHeight) {
|
||||
camoufoxOptions.window = [
|
||||
options.windowWidth as number,
|
||||
options.windowHeight as number,
|
||||
];
|
||||
}
|
||||
|
||||
// Advanced options
|
||||
if (options.ffVersion)
|
||||
camoufoxOptions.ff_version = options.ffVersion as number;
|
||||
if (options.mainWorldEval) camoufoxOptions.main_world_eval = true;
|
||||
if (options.webglVendor && options.webglRenderer) {
|
||||
camoufoxOptions.webgl_config = [
|
||||
options.webglVendor as string,
|
||||
options.webglRenderer as string,
|
||||
];
|
||||
}
|
||||
|
||||
// Proxy
|
||||
if (options.proxy) camoufoxOptions.proxy = options.proxy as string;
|
||||
|
||||
// Cache and performance - default to enabled
|
||||
camoufoxOptions.enable_cache = !options.disableCache;
|
||||
|
||||
// Environment and debugging
|
||||
if (options.virtualDisplay)
|
||||
camoufoxOptions.virtual_display = options.virtualDisplay as string;
|
||||
if (options.debug) camoufoxOptions.debug = true;
|
||||
|
||||
// Handle headless mode via flag instead of environment variable
|
||||
if (options.headless) {
|
||||
camoufoxOptions.headless = true;
|
||||
}
|
||||
if (options.args && typeof options.args === "string")
|
||||
camoufoxOptions.args = options.args.split(",");
|
||||
if (options.env && typeof options.env === "string") {
|
||||
try {
|
||||
camoufoxOptions.env = JSON.parse(options.env);
|
||||
} catch (e) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Invalid JSON for --env option",
|
||||
message: String(e),
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Firefox preferences
|
||||
if (
|
||||
options.firefoxPrefs &&
|
||||
typeof options.firefoxPrefs === "string"
|
||||
) {
|
||||
try {
|
||||
camoufoxOptions.firefox_user_prefs = JSON.parse(
|
||||
options.firefoxPrefs,
|
||||
);
|
||||
} catch (e) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Invalid JSON for --firefox-prefs option",
|
||||
message: String(e),
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
const config = await startCamoufoxProcess(
|
||||
camoufoxOptions,
|
||||
typeof options.profilePath === "string"
|
||||
? options.profilePath
|
||||
: undefined,
|
||||
typeof options.url === "string" ? options.url : undefined,
|
||||
typeof options.customConfig === "string"
|
||||
? options.customConfig
|
||||
: undefined,
|
||||
);
|
||||
|
||||
// Output the configuration as JSON for the Rust side to parse
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
id: config.id,
|
||||
localPort: config.localPort,
|
||||
localUrl: config.localUrl,
|
||||
upstreamUrl: config.upstreamUrl,
|
||||
processId: config.processId,
|
||||
profilePath: config.profilePath,
|
||||
url: config.url,
|
||||
}),
|
||||
);
|
||||
|
||||
// Exit successfully to allow the process to detach
|
||||
process.exit(0);
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
`Failed to start proxy: ${
|
||||
error instanceof Error ? error.message : JSON.stringify(error)
|
||||
}`,
|
||||
JSON.stringify({
|
||||
error: "Failed to start Camoufox",
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
} else if (action === "stop") {
|
||||
if (options.id) {
|
||||
const stopped = await stopProxyProcess(options.id);
|
||||
if (options.id && typeof options.id === "string") {
|
||||
const stopped = await stopCamoufoxProcess(options.id);
|
||||
console.log(JSON.stringify({ success: stopped }));
|
||||
} else if (options.upstream) {
|
||||
// Find proxies with this upstream URL
|
||||
const configs = listProxyConfigs().filter(
|
||||
(config) => config.upstreamUrl === options.upstream,
|
||||
);
|
||||
|
||||
if (configs.length === 0) {
|
||||
console.error(`No proxies found for ${options.upstream}`);
|
||||
process.exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const config of configs) {
|
||||
const stopped = await stopProxyProcess(config.id);
|
||||
console.log(JSON.stringify({ success: stopped }));
|
||||
}
|
||||
} else {
|
||||
await stopAllProxyProcesses();
|
||||
await stopAllCamoufoxProcesses();
|
||||
console.log(JSON.stringify({ success: true }));
|
||||
}
|
||||
process.exit(0);
|
||||
} else if (action === "list") {
|
||||
const configs = listProxyConfigs();
|
||||
const configs = listCamoufoxConfigs();
|
||||
console.log(JSON.stringify(configs));
|
||||
process.exit(0);
|
||||
} else if (action === "generate-config") {
|
||||
try {
|
||||
const config = await generateCamoufoxConfig({
|
||||
proxy:
|
||||
typeof options.proxy === "string" ? options.proxy : undefined,
|
||||
maxWidth:
|
||||
typeof options.maxWidth === "number"
|
||||
? options.maxWidth
|
||||
: undefined,
|
||||
maxHeight:
|
||||
typeof options.maxHeight === "number"
|
||||
? options.maxHeight
|
||||
: undefined,
|
||||
minWidth:
|
||||
typeof options.minWidth === "number"
|
||||
? options.minWidth
|
||||
: undefined,
|
||||
minHeight:
|
||||
typeof options.minHeight === "number"
|
||||
? options.minHeight
|
||||
: undefined,
|
||||
geoip: Boolean(options.geoip),
|
||||
blockImages:
|
||||
typeof options.blockImages === "boolean"
|
||||
? options.blockImages
|
||||
: undefined,
|
||||
blockWebrtc:
|
||||
typeof options.blockWebrtc === "boolean"
|
||||
? options.blockWebrtc
|
||||
: undefined,
|
||||
blockWebgl:
|
||||
typeof options.blockWebgl === "boolean"
|
||||
? options.blockWebgl
|
||||
: undefined,
|
||||
executablePath:
|
||||
typeof options.executablePath === "string"
|
||||
? options.executablePath
|
||||
: undefined,
|
||||
fingerprint:
|
||||
typeof options.fingerprint === "string"
|
||||
? options.fingerprint
|
||||
: undefined,
|
||||
os:
|
||||
typeof options.os === "string"
|
||||
? (options.os as "windows" | "macos" | "linux")
|
||||
: undefined,
|
||||
});
|
||||
console.log(config);
|
||||
process.exit(0);
|
||||
} catch (error: unknown) {
|
||||
console.error({
|
||||
error: "Failed to generate config",
|
||||
message:
|
||||
error instanceof Error ? error.message : JSON.stringify(error),
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
console.error("Invalid action. Use 'start', 'stop', or 'list'");
|
||||
console.error({
|
||||
error: "Invalid action",
|
||||
message: "Use 'start', 'stop', 'list', or 'generate-config'",
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Command for proxy worker (internal use)
|
||||
// Command for Camoufox worker (internal use)
|
||||
program
|
||||
.command("proxy-worker")
|
||||
.argument("<action>", "start a proxy worker")
|
||||
.requiredOption("--id <id>", "proxy configuration ID")
|
||||
.description("run a proxy worker process")
|
||||
.command("camoufox-worker")
|
||||
.argument("<action>", "start a Camoufox worker")
|
||||
.requiredOption("--id <id>", "Camoufox configuration ID")
|
||||
.description("run a Camoufox worker process")
|
||||
.action(async (action: string, options: { id: string }) => {
|
||||
if (action === "start") {
|
||||
await runProxyWorker(options.id);
|
||||
await runCamoufoxWorker(options.id);
|
||||
} else {
|
||||
console.error("Invalid action for proxy-worker. Use 'start'");
|
||||
console.error({
|
||||
error: "Invalid action for camoufox-worker",
|
||||
message: "Use 'start'",
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,124 +0,0 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import getPort from "get-port";
|
||||
import {
|
||||
type ProxyConfig,
|
||||
deleteProxyConfig,
|
||||
generateProxyId,
|
||||
getProxyConfig,
|
||||
isProcessRunning,
|
||||
listProxyConfigs,
|
||||
saveProxyConfig,
|
||||
} from "./proxy-storage";
|
||||
|
||||
/**
|
||||
* Start a proxy in a separate process
|
||||
* @param upstreamUrl The upstream proxy URL
|
||||
* @param options Optional configuration
|
||||
* @returns Promise resolving to the proxy configuration
|
||||
*/
|
||||
export async function startProxyProcess(
|
||||
upstreamUrl: string,
|
||||
options: { port?: number; ignoreProxyCertificate?: boolean } = {},
|
||||
): Promise<ProxyConfig> {
|
||||
// Generate a unique ID for this proxy
|
||||
const id = generateProxyId();
|
||||
|
||||
// Get a random available port if not specified
|
||||
const port = options.port ?? (await getPort());
|
||||
|
||||
// Create the proxy configuration
|
||||
const config: ProxyConfig = {
|
||||
id,
|
||||
upstreamUrl,
|
||||
localPort: port,
|
||||
ignoreProxyCertificate: options.ignoreProxyCertificate ?? false,
|
||||
};
|
||||
|
||||
// Save the configuration before starting the process
|
||||
saveProxyConfig(config);
|
||||
|
||||
// Build the command arguments
|
||||
const args = [
|
||||
path.join(__dirname, "index.js"),
|
||||
"proxy-worker",
|
||||
"start",
|
||||
"--id",
|
||||
id,
|
||||
];
|
||||
|
||||
// Spawn the process with proper detachment
|
||||
const child = spawn(process.execPath, args, {
|
||||
detached: true,
|
||||
stdio: ["ignore", "ignore", "ignore"], // Completely ignore all stdio
|
||||
cwd: process.cwd(),
|
||||
});
|
||||
|
||||
// Unref the child to allow the parent to exit independently
|
||||
child.unref();
|
||||
|
||||
// Store the process ID and local URL
|
||||
config.pid = child.pid;
|
||||
config.localUrl = `http://127.0.0.1:${port}`;
|
||||
|
||||
// Update the configuration with the process ID
|
||||
saveProxyConfig(config);
|
||||
|
||||
// Give the worker a moment to start before returning
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a proxy process
|
||||
* @param id The proxy ID to stop
|
||||
* @returns Promise resolving to true if stopped, false if not found
|
||||
*/
|
||||
export async function stopProxyProcess(id: string): Promise<boolean> {
|
||||
const config = getProxyConfig(id);
|
||||
|
||||
if (!config?.pid) {
|
||||
// Try to delete the config anyway in case it exists without a PID
|
||||
deleteProxyConfig(id);
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
// Check if the process is running
|
||||
if (isProcessRunning(config.pid)) {
|
||||
// Send SIGTERM to the process
|
||||
process.kill(config.pid, "SIGTERM");
|
||||
|
||||
// Wait a bit to ensure the process has terminated
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
// If still running, send SIGKILL
|
||||
if (isProcessRunning(config.pid)) {
|
||||
process.kill(config.pid, "SIGKILL");
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the configuration
|
||||
deleteProxyConfig(id);
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`Error stopping proxy ${id}:`, error);
|
||||
// Delete the configuration even if stopping failed
|
||||
deleteProxyConfig(id);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all proxy processes
|
||||
* @returns Promise resolving when all proxies are stopped
|
||||
*/
|
||||
export async function stopAllProxyProcesses(): Promise<void> {
|
||||
const configs = listProxyConfigs();
|
||||
|
||||
const stopPromises = configs.map((config) => stopProxyProcess(config.id));
|
||||
await Promise.all(stopPromises);
|
||||
}
|
||||
@@ -1,149 +0,0 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import os from "node:os";
|
||||
|
||||
// Define the proxy configuration type
|
||||
export interface ProxyConfig {
|
||||
id: string;
|
||||
upstreamUrl: string;
|
||||
localPort?: number;
|
||||
ignoreProxyCertificate?: boolean;
|
||||
localUrl?: string;
|
||||
pid?: number;
|
||||
}
|
||||
|
||||
// Path to store proxy configurations
|
||||
const STORAGE_DIR = path.join(os.tmpdir(), "donutbrowser", "proxies");
|
||||
|
||||
// Ensure storage directory exists
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
fs.mkdirSync(STORAGE_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a proxy configuration to disk
|
||||
* @param config The proxy configuration to save
|
||||
*/
|
||||
export function saveProxyConfig(config: ProxyConfig): void {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a proxy configuration by ID
|
||||
* @param id The proxy ID
|
||||
* @returns The proxy configuration or null if not found
|
||||
*/
|
||||
export function getProxyConfig(id: string): ProxyConfig | null {
|
||||
const filePath = path.join(STORAGE_DIR, `${id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf-8");
|
||||
return JSON.parse(content) as ProxyConfig;
|
||||
} catch (error) {
|
||||
console.error(`Error reading proxy config ${id}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a proxy configuration
|
||||
* @param id The proxy ID to delete
|
||||
* @returns True if deleted, false if not found
|
||||
*/
|
||||
export function deleteProxyConfig(id: string): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.unlinkSync(filePath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`Error deleting proxy config ${id}:`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all saved proxy configurations
|
||||
* @returns Array of proxy configurations
|
||||
*/
|
||||
export function listProxyConfigs(): ProxyConfig[] {
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
return fs
|
||||
.readdirSync(STORAGE_DIR)
|
||||
.filter((file) => file.endsWith(".json"))
|
||||
.map((file) => {
|
||||
try {
|
||||
const content = fs.readFileSync(
|
||||
path.join(STORAGE_DIR, file),
|
||||
"utf-8"
|
||||
);
|
||||
return JSON.parse(content) as ProxyConfig;
|
||||
} catch (error) {
|
||||
console.error(`Error reading proxy config ${file}:`, error);
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter((config): config is ProxyConfig => config !== null);
|
||||
} catch (error) {
|
||||
console.error("Error listing proxy configs:", error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a proxy configuration
|
||||
* @param config The proxy configuration to update
|
||||
* @returns True if updated, false if not found
|
||||
*/
|
||||
export function updateProxyConfig(config: ProxyConfig): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`Error updating proxy config ${config.id}:`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a proxy process is running
|
||||
* @param pid The process ID to check
|
||||
* @returns True if running, false otherwise
|
||||
*/
|
||||
export function isProcessRunning(pid: number): boolean {
|
||||
try {
|
||||
// The kill method with signal 0 doesn't actually kill the process
|
||||
// but checks if it exists
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique ID for a proxy
|
||||
* @returns A unique ID string
|
||||
*/
|
||||
export function generateProxyId(): string {
|
||||
return `proxy_${Date.now()}_${Math.floor(Math.random() * 10000)}`;
|
||||
}
|
||||
@@ -1,75 +0,0 @@
|
||||
import { Server } from "proxy-chain";
|
||||
import { getProxyConfig, updateProxyConfig } from "./proxy-storage";
|
||||
|
||||
/**
|
||||
* Run a proxy server as a worker process
|
||||
* @param id The proxy configuration ID
|
||||
*/
|
||||
export async function runProxyWorker(id: string): Promise<void> {
|
||||
// Get the proxy configuration
|
||||
const config = getProxyConfig(id);
|
||||
|
||||
if (!config) {
|
||||
console.error(`Proxy configuration ${id} not found`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Create a new proxy server
|
||||
const server = new Server({
|
||||
port: config.localPort,
|
||||
host: "127.0.0.1",
|
||||
prepareRequestFunction: () => {
|
||||
return {
|
||||
upstreamProxyUrl: config.upstreamUrl,
|
||||
ignoreUpstreamProxyCertificate: config.ignoreProxyCertificate ?? false,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
// Handle process termination gracefully
|
||||
const gracefulShutdown = async (signal: string) => {
|
||||
console.log(`Proxy worker ${id} received ${signal}, shutting down...`);
|
||||
try {
|
||||
await server.close(true);
|
||||
console.log(`Proxy worker ${id} shut down successfully`);
|
||||
} catch (error) {
|
||||
console.error(`Error during shutdown for proxy ${id}:`, error);
|
||||
}
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
process.on("SIGTERM", () => void gracefulShutdown("SIGTERM"));
|
||||
process.on("SIGINT", () => void gracefulShutdown("SIGINT"));
|
||||
|
||||
// Handle uncaught exceptions
|
||||
process.on("uncaughtException", (error) => {
|
||||
console.error(`Uncaught exception in proxy worker ${id}:`, error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
process.on("unhandledRejection", (reason) => {
|
||||
console.error(`Unhandled rejection in proxy worker ${id}:`, reason);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Start the server
|
||||
try {
|
||||
await server.listen();
|
||||
|
||||
// Update the config with the actual port (in case it was auto-assigned)
|
||||
config.localPort = server.port;
|
||||
config.localUrl = `http://127.0.0.1:${server.port}`;
|
||||
updateProxyConfig(config);
|
||||
|
||||
console.log(`Proxy worker ${id} started on port ${server.port}`);
|
||||
console.log(`Forwarding to upstream proxy: ${config.upstreamUrl}`);
|
||||
|
||||
// Keep the process alive
|
||||
setInterval(() => {
|
||||
// Do nothing, just keep the process alive
|
||||
}, 60000);
|
||||
} catch (error) {
|
||||
console.error(`Failed to start proxy worker ${id}:`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
import type { LaunchOptions } from "playwright-core";
|
||||
|
||||
const OS_MAP: { [key: string]: "mac" | "win" | "lin" } = {
|
||||
darwin: "mac",
|
||||
linux: "lin",
|
||||
win32: "win",
|
||||
};
|
||||
|
||||
const OS_NAME: "mac" | "win" | "lin" = OS_MAP[process.platform];
|
||||
|
||||
export function getEnvVars(configMap: Record<string, string>) {
|
||||
const envVars: {
|
||||
[key: string]: string | undefined;
|
||||
} = {};
|
||||
let updatedConfigData: Uint8Array;
|
||||
|
||||
try {
|
||||
// Ensure we're working with a fresh copy of the config
|
||||
const configCopy = JSON.parse(JSON.stringify(configMap));
|
||||
updatedConfigData = new TextEncoder().encode(JSON.stringify(configCopy));
|
||||
} catch (e) {
|
||||
console.error(`Error updating config: ${e}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const chunkSize = OS_NAME === "win" ? 2047 : 32767;
|
||||
const configStr = new TextDecoder().decode(updatedConfigData);
|
||||
|
||||
for (let i = 0; i < configStr.length; i += chunkSize) {
|
||||
const chunk = configStr.slice(i, i + chunkSize);
|
||||
const envName = `CAMOU_CONFIG_${Math.floor(i / chunkSize) + 1}`;
|
||||
try {
|
||||
envVars[envName] = chunk;
|
||||
} catch (e) {
|
||||
console.error(`Error setting ${envName}: ${e}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
return envVars;
|
||||
}
|
||||
|
||||
export function parseProxyString(proxyString: LaunchOptions["proxy"] | string) {
|
||||
if (typeof proxyString === "object") {
|
||||
return proxyString;
|
||||
}
|
||||
|
||||
if (!proxyString || typeof proxyString !== "string") {
|
||||
throw new Error("Invalid proxy string provided");
|
||||
}
|
||||
|
||||
// Remove any leading/trailing whitespace
|
||||
const trimmed = proxyString.trim();
|
||||
|
||||
// Handle different proxy string formats:
|
||||
// 1. http://username:password@host:port
|
||||
// 2. host:port
|
||||
// 3. protocol://host:port
|
||||
// 4. username:password@host:port
|
||||
|
||||
let server = "";
|
||||
let username: string | undefined;
|
||||
let password: string | undefined;
|
||||
|
||||
try {
|
||||
// Try parsing as URL first (handles protocol://username:password@host:port)
|
||||
if (trimmed.includes("://")) {
|
||||
const url = new URL(trimmed);
|
||||
// Playwright accepts short form "host:port" for HTTP proxies
|
||||
server = `${url.hostname}:${url.port}`;
|
||||
|
||||
if (url.username) {
|
||||
username = decodeURIComponent(url.username);
|
||||
}
|
||||
if (url.password) {
|
||||
password = decodeURIComponent(url.password);
|
||||
}
|
||||
} else {
|
||||
// Handle formats without protocol
|
||||
let workingString = trimmed;
|
||||
|
||||
// Check for username:password@ prefix
|
||||
const authMatch = workingString.match(/^([^:@]+):([^@]+)@(.+)$/);
|
||||
if (authMatch) {
|
||||
username = authMatch[1];
|
||||
password = authMatch[2];
|
||||
workingString = authMatch[3];
|
||||
}
|
||||
|
||||
// The remaining part should be host:port
|
||||
server = workingString;
|
||||
}
|
||||
|
||||
// Validate that we have a server
|
||||
if (!server) {
|
||||
throw new Error("Could not extract server information");
|
||||
}
|
||||
|
||||
// Basic validation for host:port format
|
||||
if (!server.includes(":") || server.split(":").length !== 2) {
|
||||
throw new Error("Server must be in host:port format");
|
||||
}
|
||||
|
||||
const result: LaunchOptions["proxy"] = { server };
|
||||
|
||||
if (username !== undefined) {
|
||||
result.username = username;
|
||||
}
|
||||
|
||||
if (password !== undefined) {
|
||||
result.password = password;
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to parse proxy string: ${error instanceof Error ? error.message : "Unknown error"}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
+53
-49
@@ -2,7 +2,7 @@
|
||||
"name": "donutbrowser",
|
||||
"private": true,
|
||||
"license": "AGPL-3.0",
|
||||
"version": "0.5.2",
|
||||
"version": "0.13.3",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "next dev --turbopack",
|
||||
@@ -11,77 +11,81 @@
|
||||
"test": "pnpm test:rust",
|
||||
"test:rust": "cd src-tauri && cargo test",
|
||||
"lint": "pnpm lint:js && pnpm lint:rust",
|
||||
"lint:js": "biome check src/ && tsc --noEmit && next lint",
|
||||
"lint:js": "biome check src/ && tsc --noEmit",
|
||||
"lint:rust": "cd src-tauri && cargo clippy --all-targets --all-features -- -D warnings -D clippy::all && cargo fmt --all",
|
||||
"tauri": "tauri",
|
||||
"shadcn:add": "pnpm dlx shadcn@latest add",
|
||||
"prepare": "husky && husky install",
|
||||
"format:rust": "cd src-tauri && cargo clippy --fix --allow-dirty --all-targets --all-features -- -D warnings -D clippy::all && cargo fmt --all",
|
||||
"format:js": "biome check src/ --fix",
|
||||
"format:js": "biome check src/ --write --unsafe",
|
||||
"format": "pnpm format:js && pnpm format:rust",
|
||||
"cargo": "cd src-tauri && cargo",
|
||||
"unused-exports:js": "ts-unused-exports tsconfig.json",
|
||||
"check-unused-commands": "cd src-tauri && cargo run --bin check_unused_commands"
|
||||
"check-unused-commands": "cd src-tauri && cargo test test_no_unused_tauri_commands",
|
||||
"copy-proxy-binary": "cd src-tauri && bash copy-proxy-binary.sh",
|
||||
"prebuild": "pnpm copy-proxy-binary",
|
||||
"pretauri:dev": "pnpm copy-proxy-binary",
|
||||
"precargo": "pnpm copy-proxy-binary"
|
||||
},
|
||||
"dependencies": {
|
||||
"@radix-ui/react-checkbox": "^1.3.2",
|
||||
"@radix-ui/react-dialog": "^1.1.14",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.15",
|
||||
"@radix-ui/react-label": "^2.1.7",
|
||||
"@radix-ui/react-popover": "^1.1.14",
|
||||
"@radix-ui/react-progress": "^1.1.7",
|
||||
"@radix-ui/react-scroll-area": "^1.2.9",
|
||||
"@radix-ui/react-select": "^2.2.5",
|
||||
"@radix-ui/react-slot": "^1.2.3",
|
||||
"@radix-ui/react-tooltip": "^1.2.7",
|
||||
"@radix-ui/react-checkbox": "^1.3.3",
|
||||
"@radix-ui/react-dialog": "^1.1.15",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||
"@radix-ui/react-label": "^2.1.8",
|
||||
"@radix-ui/react-popover": "^1.1.15",
|
||||
"@radix-ui/react-progress": "^1.1.8",
|
||||
"@radix-ui/react-radio-group": "^1.3.8",
|
||||
"@radix-ui/react-scroll-area": "^1.2.10",
|
||||
"@radix-ui/react-select": "^2.2.6",
|
||||
"@radix-ui/react-slot": "^1.2.4",
|
||||
"@radix-ui/react-tabs": "^1.1.13",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@tanstack/react-table": "^8.21.3",
|
||||
"@tauri-apps/api": "^2.5.0",
|
||||
"@tauri-apps/plugin-deep-link": "^2.3.0",
|
||||
"@tauri-apps/plugin-dialog": "^2.2.2",
|
||||
"@tauri-apps/plugin-fs": "~2.3.0",
|
||||
"@tauri-apps/plugin-opener": "^2.2.7",
|
||||
"ahooks": "^3.8.5",
|
||||
"@tauri-apps/api": "^2.9.1",
|
||||
"@tauri-apps/plugin-deep-link": "^2.4.5",
|
||||
"@tauri-apps/plugin-dialog": "^2.4.2",
|
||||
"@tauri-apps/plugin-fs": "~2.4.4",
|
||||
"@tauri-apps/plugin-log": "^2.7.1",
|
||||
"@tauri-apps/plugin-opener": "^2.5.2",
|
||||
"ahooks": "^3.9.6",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"next": "^15.3.3",
|
||||
"color": "^5.0.3",
|
||||
"flag-icons": "^7.5.0",
|
||||
"lucide-react": "^0.555.0",
|
||||
"motion": "^12.23.24",
|
||||
"next": "^16.0.6",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"radix-ui": "^1.4.3",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react-icons": "^5.5.0",
|
||||
"sonner": "^2.0.5",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"recharts": "3.5.1",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"tauri-plugin-macos-permissions-api": "^2.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.29.0",
|
||||
"@next/eslint-plugin-next": "^15.3.3",
|
||||
"@tailwindcss/postcss": "^4.1.10",
|
||||
"@tauri-apps/cli": "^2.5.0",
|
||||
"@types/node": "^24.0.1",
|
||||
"@types/react": "^19.1.8",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
"@typescript-eslint/eslint-plugin": "^8.34.0",
|
||||
"@typescript-eslint/parser": "^8.34.0",
|
||||
"@vitejs/plugin-react": "^4.5.2",
|
||||
"eslint": "^9.29.0",
|
||||
"eslint-config-next": "^15.3.3",
|
||||
"eslint-plugin-react-hooks": "^5.2.0",
|
||||
"@biomejs/biome": "2.3.8",
|
||||
"@tailwindcss/postcss": "^4.1.17",
|
||||
"@tauri-apps/cli": "^2.9.5",
|
||||
"@types/color": "^4.2.0",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^5.1.1",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^16.1.1",
|
||||
"tailwindcss": "^4.1.10",
|
||||
"lint-staged": "^16.2.7",
|
||||
"tailwindcss": "^4.1.17",
|
||||
"ts-unused-exports": "^11.0.1",
|
||||
"tw-animate-css": "^1.3.4",
|
||||
"typescript": "~5.8.3",
|
||||
"typescript-eslint": "^8.34.0"
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "~5.9.3"
|
||||
},
|
||||
"packageManager": "pnpm@10.11.1",
|
||||
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
|
||||
"lint-staged": {
|
||||
"**/*.{js,jsx,ts,tsx,json,css,md}": [
|
||||
"biome check --fix",
|
||||
"eslint --cache --fix"
|
||||
"**/*.{js,jsx,ts,tsx,json,css}": [
|
||||
"biome check --fix"
|
||||
],
|
||||
"src-tauri/**/*.rs": [
|
||||
"cd src-tauri && cargo fmt --all",
|
||||
|
||||
Generated
+3054
-4411
File diff suppressed because it is too large
Load Diff
+5
-3
@@ -1,9 +1,11 @@
|
||||
packages:
|
||||
- "nodecar"
|
||||
- nodecar
|
||||
|
||||
onlyBuiltDependencies:
|
||||
- "@biomejs/biome"
|
||||
- "@tailwindcss/oxide"
|
||||
- '@biomejs/biome'
|
||||
- '@tailwindcss/oxide'
|
||||
- better-sqlite3
|
||||
- esbuild
|
||||
- sharp
|
||||
- sqlite3
|
||||
- unrs-resolver
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
[build]
|
||||
# Omit jobs setting to use all cores
|
||||
|
||||
incremental = true
|
||||
Generated
+2075
-930
File diff suppressed because it is too large
Load Diff
+75
-14
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "donutbrowser"
|
||||
version = "0.5.2"
|
||||
description = "Simple Yet Powerful Browser Orchestrator"
|
||||
version = "0.13.3"
|
||||
description = "Simple Yet Powerful Anti-Detect Browser"
|
||||
authors = ["zhom@github"]
|
||||
edition = "2021"
|
||||
default-run = "donutbrowser"
|
||||
@@ -12,8 +12,17 @@ default-run = "donutbrowser"
|
||||
# The `_lib` suffix may seem redundant but it is necessary
|
||||
# to make the lib name unique and wouldn't conflict with the bin name.
|
||||
# This seems to be only an issue on Windows, see https://github.com/rust-lang/cargo/issues/8519
|
||||
name = "donutbrowser"
|
||||
name = "donutbrowser_lib"
|
||||
crate-type = ["staticlib", "cdylib", "rlib"]
|
||||
doctest = false
|
||||
|
||||
[[bin]]
|
||||
name = "donutbrowser"
|
||||
path = "src/main.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "donut-proxy"
|
||||
path = "src/bin/proxy_server.rs"
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2", features = [] }
|
||||
@@ -28,28 +37,55 @@ tauri-plugin-shell = "2"
|
||||
tauri-plugin-deep-link = "2"
|
||||
tauri-plugin-dialog = "2"
|
||||
tauri-plugin-macos-permissions = "2"
|
||||
tauri-plugin-log = "2"
|
||||
log = "0.4"
|
||||
env_logger = "0.11"
|
||||
|
||||
directories = "6"
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
sysinfo = "0.35"
|
||||
reqwest = { version = "0.12", features = ["json", "stream", "socks"] }
|
||||
tokio = { version = "1", features = ["full", "sync"] }
|
||||
sysinfo = "0.37"
|
||||
lazy_static = "1.4"
|
||||
base64 = "0.22"
|
||||
zip = "4"
|
||||
libc = "0.2"
|
||||
async-trait = "0.1"
|
||||
futures-util = "0.3"
|
||||
zip = "6"
|
||||
tar = "0"
|
||||
bzip2 = "0"
|
||||
flate2 = "1"
|
||||
lzma-rs = "0"
|
||||
msi-extract = "0"
|
||||
|
||||
uuid = { version = "1.18", features = ["v4", "serde"] }
|
||||
url = "2.5"
|
||||
urlencoding = "2.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
axum = "0.8.7"
|
||||
tower = "0.5"
|
||||
tower-http = { version = "0.6", features = ["cors"] }
|
||||
rand = "0.9.2"
|
||||
utoipa = { version = "5", features = ["axum_extras", "chrono"] }
|
||||
utoipa-axum = "0.2"
|
||||
argon2 = "0.5"
|
||||
aes-gcm = "0.10"
|
||||
hyper = { version = "1.8", features = ["full"] }
|
||||
hyper-util = { version = "0.1", features = ["full"] }
|
||||
http-body-util = "0.1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
async-socks5 = "0.6"
|
||||
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2", features = ["deep-link"] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
core-foundation="0.10"
|
||||
core-foundation = "0.10"
|
||||
objc2 = "0.6.1"
|
||||
objc2-app-kit = { version = "0.3.1", features = ["NSWindow"] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
winreg = "0.55"
|
||||
windows = { version = "0.61", features = [
|
||||
windows = { version = "0.62", features = [
|
||||
"Win32_Foundation",
|
||||
"Win32_System_ProcessStatus",
|
||||
"Win32_System_Threading",
|
||||
@@ -62,19 +98,44 @@ windows = { version = "0.61", features = [
|
||||
] }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.13.0"
|
||||
tokio-test = "0.4.4"
|
||||
tempfile = "3.21.0"
|
||||
wiremock = "0.6"
|
||||
hyper = { version = "1.0", features = ["full"] }
|
||||
hyper = { version = "1.8", features = ["full"] }
|
||||
hyper-util = { version = "0.1", features = ["full"] }
|
||||
http-body-util = "0.1"
|
||||
tower = "0.5"
|
||||
tower-http = { version = "0.6", features = ["fs", "trace"] }
|
||||
futures-util = "0.3"
|
||||
serial_test = "3"
|
||||
|
||||
# Integration test configuration
|
||||
[[test]]
|
||||
name = "donut_proxy_integration"
|
||||
path = "tests/donut_proxy_integration.rs"
|
||||
|
||||
[profile.dev]
|
||||
codegen-units = 256
|
||||
incremental = true
|
||||
opt-level = 0
|
||||
# Split debuginfo on macOS for faster linking (ignored on other platforms)
|
||||
split-debuginfo = "unpacked"
|
||||
|
||||
[profile.release]
|
||||
codegen-units = 1
|
||||
opt-level = 3
|
||||
lto = "thin"
|
||||
# Split debuginfo on macOS for faster linking (ignored on other platforms)
|
||||
split-debuginfo = "unpacked"
|
||||
|
||||
[profile.test]
|
||||
# Optimize test builds for faster compilation
|
||||
codegen-units = 256
|
||||
incremental = true
|
||||
|
||||
[features]
|
||||
# by default Tauri runs in production mode
|
||||
# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` points to the filesystem
|
||||
default = [ "custom-protocol" ]
|
||||
default = ["custom-protocol"]
|
||||
# this feature is used used for production builds where `devPath` points to the filesystem
|
||||
# DO NOT remove this
|
||||
custom-protocol = [ "tauri/custom-protocol" ]
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
|
||||
@@ -3,15 +3,15 @@
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>NSCameraUsageDescription</key>
|
||||
<string>Donut Browser needs camera access to enable camera functionality in web browsers. Each website will still ask for your permission individually.</string>
|
||||
<string>Donut needs camera access to enable camera functionality in web browsers. Each website will still ask for your permission individually.</string>
|
||||
<key>NSMicrophoneUsageDescription</key>
|
||||
<string>Donut Browser needs microphone access to enable microphone functionality in web browsers. Each website will still ask for your permission individually.</string>
|
||||
<string>Donut needs microphone access to enable microphone functionality in web browsers. Each website will still ask for your permission individually.</string>
|
||||
<key>NSLocalNetworkUsageDescription</key>
|
||||
<string>Donut Browser has proxy functionality that requires local network access. You can deny this functionality if you don't plan on setting proxies for browser profiles.</string>
|
||||
<string>Donut has proxy functionality that requires local network access. You can deny this functionality if you don't plan on setting proxies for browser profiles.</string>
|
||||
<key>CFBundleDisplayName</key>
|
||||
<string>Donut Browser</string>
|
||||
<string>Donut</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>Donut Browser</string>
|
||||
<string>Donut</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.donutbrowser</string>
|
||||
<key>CFBundleURLName</key>
|
||||
@@ -25,7 +25,7 @@
|
||||
<key>LSApplicationCategoryType</key>
|
||||
<string>public.app-category.productivity</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>Copyright © 2025 Donut Browser</string>
|
||||
<string>Copyright © 2025 Donut</string>
|
||||
<key>CFBundleDocumentTypes</key>
|
||||
<array>
|
||||
<dict>
|
||||
|
||||
+58
-1
@@ -1,4 +1,6 @@
|
||||
fn main() {
|
||||
println!("cargo::rustc-check-cfg=cfg(mobile)");
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
println!("cargo:rustc-link-lib=framework=CoreFoundation");
|
||||
@@ -26,5 +28,60 @@ fn main() {
|
||||
println!("cargo:rustc-env=BUILD_VERSION=dev-{version}");
|
||||
}
|
||||
|
||||
tauri_build::build()
|
||||
// Inject vault password at build time
|
||||
if let Ok(vault_password) = std::env::var("DONUT_BROWSER_VAULT_PASSWORD") {
|
||||
println!("cargo:rustc-env=DONUT_BROWSER_VAULT_PASSWORD={vault_password}");
|
||||
} else {
|
||||
// Use default password if environment variable is not set
|
||||
println!("cargo:rustc-env=DONUT_BROWSER_VAULT_PASSWORD=donutbrowser-api-vault-password");
|
||||
}
|
||||
|
||||
// Tell Cargo to rebuild if the proxy binary source changes
|
||||
println!("cargo:rerun-if-changed=src/bin/proxy_server.rs");
|
||||
println!("cargo:rerun-if-changed=src/proxy_server.rs");
|
||||
println!("cargo:rerun-if-changed=src/proxy_runner.rs");
|
||||
println!("cargo:rerun-if-changed=src/proxy_storage.rs");
|
||||
|
||||
// Only run tauri_build if all external binaries exist
|
||||
// This allows building donut-proxy sidecar without the other binaries present
|
||||
if external_binaries_exist() {
|
||||
tauri_build::build()
|
||||
} else {
|
||||
println!("cargo:warning=Skipping tauri_build: external binaries not found. This is expected when building sidecar binaries.");
|
||||
}
|
||||
}
|
||||
|
||||
fn external_binaries_exist() -> bool {
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
let manifest_dir = match env::var("CARGO_MANIFEST_DIR") {
|
||||
Ok(dir) => dir,
|
||||
Err(_) => return false,
|
||||
};
|
||||
|
||||
let target = match env::var("TARGET") {
|
||||
Ok(t) => t,
|
||||
Err(_) => return false,
|
||||
};
|
||||
|
||||
let binaries_dir = PathBuf::from(&manifest_dir).join("binaries");
|
||||
|
||||
// Check for both required external binaries
|
||||
let nodecar_name = if target.contains("windows") {
|
||||
format!("nodecar-{}.exe", target)
|
||||
} else {
|
||||
format!("nodecar-{}", target)
|
||||
};
|
||||
|
||||
let donut_proxy_name = if target.contains("windows") {
|
||||
format!("donut-proxy-{}.exe", target)
|
||||
} else {
|
||||
format!("donut-proxy-{}", target)
|
||||
};
|
||||
|
||||
let nodecar_exists = binaries_dir.join(&nodecar_name).exists();
|
||||
let donut_proxy_exists = binaries_dir.join(&donut_proxy_name).exists();
|
||||
|
||||
nodecar_exists && donut_proxy_exists
|
||||
}
|
||||
|
||||
@@ -5,7 +5,15 @@
|
||||
"windows": ["main"],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"core:event:default",
|
||||
"core:event:allow-listen",
|
||||
"core:event:allow-emit",
|
||||
"core:event:allow-emit-to",
|
||||
"core:event:allow-unlisten",
|
||||
"core:image:default",
|
||||
"core:menu:default",
|
||||
"core:path:default",
|
||||
"core:tray:default",
|
||||
"core:webview:default",
|
||||
"core:window:default",
|
||||
"core:window:allow-start-dragging",
|
||||
"core:window:allow-close",
|
||||
@@ -29,6 +37,7 @@
|
||||
"macos-permissions:allow-request-microphone-permission",
|
||||
"macos-permissions:allow-request-camera-permission",
|
||||
"macos-permissions:allow-check-microphone-permission",
|
||||
"macos-permissions:allow-check-camera-permission"
|
||||
"macos-permissions:allow-check-camera-permission",
|
||||
"log:default"
|
||||
]
|
||||
}
|
||||
|
||||
Executable
+69
@@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Get the target triple from environment or use default
|
||||
TARGET="${TARGET:-$(rustc -vV 2>/dev/null | sed -n 's|host: ||p' || echo "unknown")}"
|
||||
MANIFEST_DIR="$(dirname "$0")"
|
||||
|
||||
# Determine binary name based on target
|
||||
if [[ "$TARGET" == *"windows"* ]]; then
|
||||
BIN_NAME="donut-proxy.exe"
|
||||
else
|
||||
BIN_NAME="donut-proxy"
|
||||
fi
|
||||
|
||||
# Determine source path
|
||||
HOST_TARGET=$(rustc -vV 2>/dev/null | sed -n 's|host: ||p' || echo "$TARGET")
|
||||
if [[ "$TARGET" == "$HOST_TARGET" ]] || [[ "$TARGET" == "unknown" ]]; then
|
||||
# Native target - use debug or release based on profile
|
||||
if [[ "${PROFILE:-debug}" == "release" ]]; then
|
||||
SRC_DIR="$MANIFEST_DIR/target/release"
|
||||
else
|
||||
SRC_DIR="$MANIFEST_DIR/target/debug"
|
||||
fi
|
||||
else
|
||||
# Cross-compilation target
|
||||
if [[ "${PROFILE:-debug}" == "release" ]]; then
|
||||
SRC_DIR="$MANIFEST_DIR/target/$TARGET/release"
|
||||
else
|
||||
SRC_DIR="$MANIFEST_DIR/target/$TARGET/debug"
|
||||
fi
|
||||
fi
|
||||
|
||||
SOURCE="$SRC_DIR/$BIN_NAME"
|
||||
DEST_DIR="$MANIFEST_DIR/binaries"
|
||||
# Tauri expects the format: donut-proxy-{target} with hyphens (same as nodecar)
|
||||
DEST_NAME="donut-proxy-$TARGET"
|
||||
if [[ "$TARGET" == *"windows"* ]]; then
|
||||
DEST_NAME="$DEST_NAME.exe"
|
||||
fi
|
||||
DEST="$DEST_DIR/$DEST_NAME"
|
||||
|
||||
# Create binaries directory if it doesn't exist
|
||||
mkdir -p "$DEST_DIR"
|
||||
|
||||
# Copy the binary if it exists
|
||||
if [[ -f "$SOURCE" ]]; then
|
||||
cp "$SOURCE" "$DEST"
|
||||
echo "Copied $BIN_NAME to $DEST"
|
||||
else
|
||||
echo "Warning: Binary not found at $SOURCE"
|
||||
echo "Building donut-proxy binary..."
|
||||
cd "$MANIFEST_DIR"
|
||||
BUILD_ARGS=("build" "--bin" "donut-proxy")
|
||||
if [[ -n "$PROFILE" ]] && [[ "$PROFILE" == "release" ]]; then
|
||||
BUILD_ARGS+=("--release")
|
||||
fi
|
||||
if [[ -n "$TARGET" ]] && [[ "$TARGET" != "unknown" ]] && [[ "$TARGET" != "$HOST_TARGET" ]]; then
|
||||
BUILD_ARGS+=("--target" "$TARGET")
|
||||
fi
|
||||
cargo "${BUILD_ARGS[@]}"
|
||||
if [[ -f "$SOURCE" ]]; then
|
||||
cp "$SOURCE" "$DEST"
|
||||
echo "Built and copied $BIN_NAME to $DEST"
|
||||
else
|
||||
echo "Error: Failed to build donut-proxy binary"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
[Desktop Entry]
|
||||
Version=1.0
|
||||
Type=Application
|
||||
Name=Donut Browser
|
||||
Comment=Simple Yet Powerful Browser Orchestrator
|
||||
Name=Donut
|
||||
Name[en]=Donut
|
||||
GenericName=Web Browser
|
||||
X-GNOME-FullName=Donut
|
||||
Comment=Simple Yet Powerful Anti-Detect Browser
|
||||
Exec=donutbrowser %u
|
||||
Icon=donutbrowser
|
||||
StartupNotify=true
|
||||
NoDisplay=false
|
||||
Categories=Network;WebBrowser;Productivity;
|
||||
Categories=Network;WebBrowser;
|
||||
MimeType=x-scheme-handler/http;x-scheme-handler/https;text/html;application/xhtml+xml;
|
||||
StartupWMClass=donutbrowser
|
||||
Keywords=browser;web;internet;productivity;
|
||||
@@ -28,5 +28,11 @@
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.security.automation.apple-events</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.usb</key>
|
||||
<true/>
|
||||
<key>com.apple.security.inherit</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
+434
-446
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
+1113
-135
File diff suppressed because it is too large
Load Diff
+235
-164
@@ -1,6 +1,5 @@
|
||||
use crate::api_client::is_browser_version_nightly;
|
||||
use crate::browser_runner::{BrowserProfile, BrowserRunner};
|
||||
use crate::browser_version_service::{BrowserVersionInfo, BrowserVersionService};
|
||||
use crate::browser_version_manager::{BrowserVersionInfo, BrowserVersionManager};
|
||||
use crate::profile::{BrowserProfile, ProfileManager};
|
||||
use crate::settings_manager::SettingsManager;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
@@ -29,20 +28,24 @@ pub struct AutoUpdateState {
|
||||
}
|
||||
|
||||
pub struct AutoUpdater {
|
||||
version_service: BrowserVersionService,
|
||||
browser_runner: BrowserRunner,
|
||||
settings_manager: SettingsManager,
|
||||
browser_version_manager: &'static BrowserVersionManager,
|
||||
settings_manager: &'static SettingsManager,
|
||||
profile_manager: &'static ProfileManager,
|
||||
}
|
||||
|
||||
impl AutoUpdater {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
version_service: BrowserVersionService::new(),
|
||||
browser_runner: BrowserRunner::new(),
|
||||
settings_manager: SettingsManager::new(),
|
||||
browser_version_manager: BrowserVersionManager::instance(),
|
||||
settings_manager: SettingsManager::instance(),
|
||||
profile_manager: ProfileManager::instance(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static AutoUpdater {
|
||||
&AUTO_UPDATER
|
||||
}
|
||||
|
||||
/// Check for updates for all profiles
|
||||
pub async fn check_for_updates(
|
||||
&self,
|
||||
@@ -52,7 +55,7 @@ impl AutoUpdater {
|
||||
|
||||
// Group profiles by browser
|
||||
let profiles = self
|
||||
.browser_runner
|
||||
.profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
let mut browser_profiles: HashMap<String, Vec<BrowserProfile>> = HashMap::new();
|
||||
@@ -60,7 +63,7 @@ impl AutoUpdater {
|
||||
for profile in profiles {
|
||||
// Only check supported browsers
|
||||
if !self
|
||||
.version_service
|
||||
.browser_version_manager
|
||||
.is_browser_supported(&profile.browser)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
@@ -76,14 +79,14 @@ impl AutoUpdater {
|
||||
for (browser, profiles) in browser_profiles {
|
||||
// Get cached versions first, then try to fetch if needed
|
||||
let versions = if let Some(cached) = self
|
||||
.version_service
|
||||
.browser_version_manager
|
||||
.get_cached_browser_versions_detailed(&browser)
|
||||
{
|
||||
cached
|
||||
} else if self.version_service.should_update_cache(&browser) {
|
||||
} else if self.browser_version_manager.should_update_cache(&browser) {
|
||||
// Try to fetch fresh versions
|
||||
match self
|
||||
.version_service
|
||||
.browser_version_manager
|
||||
.fetch_browser_versions_detailed(&browser, false)
|
||||
.await
|
||||
{
|
||||
@@ -101,19 +104,19 @@ impl AutoUpdater {
|
||||
if let Some(update) = self.check_profile_update(&profile, &versions)? {
|
||||
// Apply chromium threshold logic
|
||||
if browser == "chromium" {
|
||||
// For chromium, only show notifications if there are 50+ new versions
|
||||
// For chromium, only show notifications if there are 400+ new versions
|
||||
let current_version = &profile.version.parse::<u32>().unwrap();
|
||||
let new_version = &update.new_version.parse::<u32>().unwrap();
|
||||
|
||||
let result = new_version - current_version;
|
||||
println!(
|
||||
log::info!(
|
||||
"Current version: {current_version}, New version: {new_version}, Result: {result}"
|
||||
);
|
||||
if result > 50 {
|
||||
if result > 400 {
|
||||
notifications.push(update);
|
||||
} else {
|
||||
println!(
|
||||
"Skipping chromium update notification: only {result} new versions (need 50+)"
|
||||
log::info!(
|
||||
"Skipping chromium update notification: only {result} new versions (need 400+)"
|
||||
);
|
||||
}
|
||||
} else {
|
||||
@@ -127,62 +130,66 @@ impl AutoUpdater {
|
||||
}
|
||||
|
||||
pub async fn check_for_updates_with_progress(&self, app_handle: &tauri::AppHandle) {
|
||||
println!("Starting auto-update check with progress...");
|
||||
log::info!("Starting auto-update check with progress...");
|
||||
|
||||
// Check for browser updates and trigger auto-downloads
|
||||
match self.check_for_updates().await {
|
||||
Ok(update_notifications) => {
|
||||
if !update_notifications.is_empty() {
|
||||
println!(
|
||||
log::info!(
|
||||
"Found {} browser updates to auto-download",
|
||||
update_notifications.len()
|
||||
);
|
||||
|
||||
// Trigger automatic downloads for each update
|
||||
for notification in update_notifications {
|
||||
println!(
|
||||
log::info!(
|
||||
"Auto-downloading {} version {}",
|
||||
notification.browser, notification.new_version
|
||||
notification.browser,
|
||||
notification.new_version
|
||||
);
|
||||
|
||||
// Clone app_handle for the async task
|
||||
let app_handle_clone = app_handle.clone();
|
||||
let browser = notification.browser.clone();
|
||||
let new_version = notification.new_version.clone();
|
||||
let notification_id = notification.id.clone();
|
||||
let affected_profiles = notification.affected_profiles.clone();
|
||||
let app_handle_clone = app_handle.clone();
|
||||
|
||||
// Spawn async task to handle the download and auto-update
|
||||
tokio::spawn(async move {
|
||||
// TODO: update the logic to use the downloaded browsers registry instance instead of the static method
|
||||
// First, check if browser already exists
|
||||
match crate::browser_runner::is_browser_downloaded(
|
||||
match crate::downloaded_browsers_registry::is_browser_downloaded(
|
||||
browser.clone(),
|
||||
new_version.clone(),
|
||||
) {
|
||||
true => {
|
||||
println!("Browser {browser} {new_version} already downloaded, proceeding to auto-update profiles");
|
||||
log::info!("Browser {browser} {new_version} already downloaded, proceeding to auto-update profiles");
|
||||
|
||||
// Browser already exists, go straight to profile update
|
||||
match crate::auto_updater::complete_browser_update_with_auto_update(
|
||||
browser.clone(),
|
||||
new_version.clone(),
|
||||
)
|
||||
.await
|
||||
match AutoUpdater::instance()
|
||||
.complete_browser_update_with_auto_update(
|
||||
&app_handle_clone,
|
||||
&browser.clone(),
|
||||
&new_version.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(updated_profiles) => {
|
||||
println!(
|
||||
log::info!(
|
||||
"Auto-update completed for {} profiles: {:?}",
|
||||
updated_profiles.len(),
|
||||
updated_profiles
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to complete auto-update for {browser}: {e}");
|
||||
log::error!("Failed to complete auto-update for {browser}: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
false => {
|
||||
println!("Downloading browser {browser} version {new_version}...");
|
||||
log::info!("Downloading browser {browser} version {new_version}...");
|
||||
|
||||
// Emit the auto-update event to trigger frontend handling
|
||||
let auto_update_event = serde_json::json!({
|
||||
@@ -195,20 +202,20 @@ impl AutoUpdater {
|
||||
if let Err(e) =
|
||||
app_handle_clone.emit("browser-auto-update-available", &auto_update_event)
|
||||
{
|
||||
eprintln!("Failed to emit auto-update event for {browser}: {e}");
|
||||
log::error!("Failed to emit auto-update event for {browser}: {e}");
|
||||
} else {
|
||||
println!("Emitted auto-update event for {browser}");
|
||||
log::info!("Emitted auto-update event for {browser}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
println!("No browser updates needed");
|
||||
log::info!("No browser updates needed");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to check for browser updates: {e}");
|
||||
log::error!("Failed to check for browser updates: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -220,7 +227,8 @@ impl AutoUpdater {
|
||||
available_versions: &[BrowserVersionInfo],
|
||||
) -> Result<Option<UpdateNotification>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let current_version = &profile.version;
|
||||
let is_current_nightly = is_browser_version_nightly(&profile.browser, current_version, None);
|
||||
let is_current_nightly =
|
||||
crate::api_client::is_browser_version_nightly(&profile.browser, current_version, None);
|
||||
|
||||
// Find the best available update
|
||||
let best_update = available_versions
|
||||
@@ -228,7 +236,8 @@ impl AutoUpdater {
|
||||
.filter(|v| {
|
||||
// Only consider versions newer than current
|
||||
self.is_version_newer(&v.version, current_version)
|
||||
&& is_browser_version_nightly(&profile.browser, &v.version, None) == is_current_nightly
|
||||
&& crate::api_client::is_browser_version_nightly(&profile.browser, &v.version, None)
|
||||
== is_current_nightly
|
||||
})
|
||||
.max_by(|a, b| self.compare_versions(&a.version, &b.version));
|
||||
|
||||
@@ -291,11 +300,12 @@ impl AutoUpdater {
|
||||
/// Automatically update all affected profile versions after browser download
|
||||
pub async fn auto_update_profile_versions(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
browser: &str,
|
||||
new_version: &str,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let profiles = self
|
||||
.browser_runner
|
||||
.profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
@@ -312,15 +322,16 @@ impl AutoUpdater {
|
||||
// Check if this is an update (newer version)
|
||||
if self.is_version_newer(new_version, &profile.version) {
|
||||
// Update the profile version
|
||||
match self
|
||||
.browser_runner
|
||||
.update_profile_version(&profile.name, new_version)
|
||||
{
|
||||
match self.profile_manager.update_profile_version(
|
||||
app_handle,
|
||||
&profile.id.to_string(),
|
||||
new_version,
|
||||
) {
|
||||
Ok(_) => {
|
||||
updated_profiles.push(profile.name);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to update profile {}: {}", profile.name, e);
|
||||
log::error!("Failed to update profile {}: {}", profile.name, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -333,12 +344,13 @@ impl AutoUpdater {
|
||||
/// Complete browser update process with auto-update of profile versions
|
||||
pub async fn complete_browser_update_with_auto_update(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
browser: &str,
|
||||
new_version: &str,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Auto-update profile versions first
|
||||
let updated_profiles = self
|
||||
.auto_update_profile_versions(browser, new_version)
|
||||
.auto_update_profile_versions(app_handle, browser, new_version)
|
||||
.await?;
|
||||
|
||||
// Remove browser from disabled list and clean up auto-update tracking
|
||||
@@ -348,51 +360,9 @@ impl AutoUpdater {
|
||||
state.auto_update_downloads.remove(&download_key);
|
||||
self.save_auto_update_state(&state)?;
|
||||
|
||||
// Check if auto-delete of unused binaries is enabled and perform cleanup
|
||||
let settings = self
|
||||
.settings_manager
|
||||
.load_settings()
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))?;
|
||||
if settings.auto_delete_unused_binaries {
|
||||
// Perform cleanup in the background - don't fail the update if cleanup fails
|
||||
if let Err(e) = self.cleanup_unused_binaries_internal() {
|
||||
eprintln!("Warning: Failed to cleanup unused binaries after auto-update: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(updated_profiles)
|
||||
}
|
||||
|
||||
/// Internal method to cleanup unused binaries (used by auto-cleanup)
|
||||
fn cleanup_unused_binaries_internal(
|
||||
&self,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Load current profiles
|
||||
let profiles = self
|
||||
.browser_runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to load profiles: {e}"))?;
|
||||
|
||||
// Load registry
|
||||
let mut registry = crate::downloaded_browsers::DownloadedBrowsersRegistry::load()
|
||||
.map_err(|e| format!("Failed to load browser registry: {e}"))?;
|
||||
|
||||
// Get active browser versions
|
||||
let active_versions = registry.get_active_browser_versions(&profiles);
|
||||
|
||||
// Cleanup unused binaries
|
||||
let cleaned_up = registry
|
||||
.cleanup_unused_binaries(&active_versions)
|
||||
.map_err(|e| format!("Failed to cleanup unused binaries: {e}"))?;
|
||||
|
||||
// Save updated registry
|
||||
registry
|
||||
.save()
|
||||
.map_err(|e| format!("Failed to save registry: {e}"))?;
|
||||
|
||||
Ok(cleaned_up)
|
||||
}
|
||||
|
||||
/// Check if browser is disabled due to ongoing update
|
||||
pub fn is_browser_disabled(
|
||||
&self,
|
||||
@@ -414,22 +384,11 @@ impl AutoUpdater {
|
||||
}
|
||||
|
||||
fn is_version_newer(&self, version1: &str, version2: &str) -> bool {
|
||||
self.compare_versions(version1, version2) == std::cmp::Ordering::Greater
|
||||
crate::api_client::is_version_newer(version1, version2)
|
||||
}
|
||||
|
||||
fn compare_versions(&self, version1: &str, version2: &str) -> std::cmp::Ordering {
|
||||
// Basic semantic version comparison
|
||||
let v1_parts = self.parse_version(version1);
|
||||
let v2_parts = self.parse_version(version2);
|
||||
|
||||
v1_parts.cmp(&v2_parts)
|
||||
}
|
||||
|
||||
fn parse_version(&self, version: &str) -> Vec<u32> {
|
||||
version
|
||||
.split(&['.', 'a', 'b', '-', '_'][..])
|
||||
.filter_map(|part| part.parse::<u32>().ok())
|
||||
.collect()
|
||||
crate::api_client::compare_versions(version1, version2)
|
||||
}
|
||||
|
||||
fn get_auto_update_state_file(&self) -> PathBuf {
|
||||
@@ -439,7 +398,7 @@ impl AutoUpdater {
|
||||
.join("auto_update_state.json")
|
||||
}
|
||||
|
||||
fn load_auto_update_state(
|
||||
pub fn load_auto_update_state(
|
||||
&self,
|
||||
) -> Result<AutoUpdateState, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let state_file = self.get_auto_update_state_file();
|
||||
@@ -453,7 +412,7 @@ impl AutoUpdater {
|
||||
Ok(state)
|
||||
}
|
||||
|
||||
fn save_auto_update_state(
|
||||
pub fn save_auto_update_state(
|
||||
&self,
|
||||
state: &AutoUpdateState,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
@@ -466,13 +425,46 @@ impl AutoUpdater {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get pending update versions for a specific browser
|
||||
/// Returns a set of (browser, version) pairs that have pending updates
|
||||
pub fn get_pending_update_versions(
|
||||
&self,
|
||||
) -> Result<std::collections::HashSet<(String, String)>, Box<dyn std::error::Error + Send + Sync>>
|
||||
{
|
||||
let state = self.load_auto_update_state()?;
|
||||
let mut pending_versions = std::collections::HashSet::new();
|
||||
|
||||
for update in &state.pending_updates {
|
||||
pending_versions.insert((update.browser.clone(), update.new_version.clone()));
|
||||
}
|
||||
|
||||
Ok(pending_versions)
|
||||
}
|
||||
|
||||
/// Get pending update for a specific browser version if it exists
|
||||
pub fn get_pending_update(
|
||||
&self,
|
||||
browser: &str,
|
||||
current_version: &str,
|
||||
) -> Result<Option<UpdateNotification>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let state = self.load_auto_update_state()?;
|
||||
|
||||
for update in &state.pending_updates {
|
||||
if update.browser == browser && update.current_version == current_version {
|
||||
return Ok(Some(update.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
// Tauri commands
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn check_for_browser_updates() -> Result<Vec<UpdateNotification>, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let notifications = updater
|
||||
.check_for_updates()
|
||||
.await
|
||||
@@ -481,17 +473,9 @@ pub async fn check_for_browser_updates() -> Result<Vec<UpdateNotification>, Stri
|
||||
Ok(grouped)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn is_browser_disabled_for_update(browser: String) -> Result<bool, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
updater
|
||||
.is_browser_disabled(&browser)
|
||||
.map_err(|e| format!("Failed to check browser status: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn dismiss_update_notification(notification_id: String) -> Result<(), String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
updater
|
||||
.dismiss_update_notification(¬ification_id)
|
||||
.map_err(|e| format!("Failed to dismiss notification: {e}"))
|
||||
@@ -499,19 +483,20 @@ pub async fn dismiss_update_notification(notification_id: String) -> Result<(),
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn complete_browser_update_with_auto_update(
|
||||
app_handle: tauri::AppHandle,
|
||||
browser: String,
|
||||
new_version: String,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
updater
|
||||
.complete_browser_update_with_auto_update(&browser, &new_version)
|
||||
.complete_browser_update_with_auto_update(&app_handle, &browser, &new_version)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to complete browser update: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn check_for_updates_with_progress(app_handle: tauri::AppHandle) {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
updater.check_for_updates_with_progress(&app_handle).await;
|
||||
}
|
||||
|
||||
@@ -521,14 +506,18 @@ mod tests {
|
||||
|
||||
fn create_test_profile(name: &str, browser: &str, version: &str) -> BrowserProfile {
|
||||
BrowserProfile {
|
||||
id: uuid::Uuid::new_v4(),
|
||||
name: name.to_string(),
|
||||
browser: browser.to_string(),
|
||||
version: version.to_string(),
|
||||
profile_path: format!("/tmp/{name}"),
|
||||
process_id: None,
|
||||
proxy: None,
|
||||
proxy_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
group_id: None,
|
||||
tags: Vec::new(),
|
||||
note: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -542,7 +531,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_compare_versions() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
assert_eq!(
|
||||
updater.compare_versions("1.0.0", "1.0.0"),
|
||||
@@ -568,7 +557,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_is_version_newer() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
assert!(updater.is_version_newer("1.0.1", "1.0.0"));
|
||||
assert!(updater.is_version_newer("2.0.0", "1.9.9"));
|
||||
@@ -576,9 +565,71 @@ mod tests {
|
||||
assert!(!updater.is_version_newer("1.0.0", "1.0.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_beta_version_comparison() {
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
// Test the exact user-reported scenario: 135.0.1beta24 vs 135.0beta22
|
||||
assert!(
|
||||
updater.is_version_newer("135.0.1beta24", "135.0beta22"),
|
||||
"135.0.1beta24 should be newer than 135.0beta22"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
updater.compare_versions("135.0.1beta24", "135.0beta22"),
|
||||
std::cmp::Ordering::Greater,
|
||||
"135.0.1beta24 should compare as greater than 135.0beta22"
|
||||
);
|
||||
|
||||
// Test other camoufox beta version combinations
|
||||
assert!(
|
||||
updater.is_version_newer("135.0.5beta24", "135.0.5beta22"),
|
||||
"135.0.5beta24 should be newer than 135.0.5beta22"
|
||||
);
|
||||
|
||||
assert!(
|
||||
updater.is_version_newer("135.0.1beta1", "135.0beta1"),
|
||||
"135.0.1beta1 should be newer than 135.0beta1 due to patch version"
|
||||
);
|
||||
|
||||
// Test that older versions are not considered newer
|
||||
assert!(
|
||||
!updater.is_version_newer("135.0beta22", "135.0.1beta24"),
|
||||
"135.0beta22 should NOT be newer than 135.0.1beta24"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_beta_version_ordering_comprehensive() {
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
// Test various beta version patterns that could appear in camoufox
|
||||
let test_cases = vec![
|
||||
("135.0.1beta24", "135.0beta22", true), // User reported case
|
||||
("135.0.5beta24", "135.0.5beta22", true), // Same patch, different beta
|
||||
("135.1beta1", "135.0beta99", true), // Higher minor beats beta number
|
||||
("136.0beta1", "135.9.9beta99", true), // Higher major beats everything
|
||||
("135.0.1beta1", "135.0beta1", true), // Patch version matters
|
||||
("135.0beta22", "135.0.1beta24", false), // Reverse of user case
|
||||
];
|
||||
|
||||
for (newer, older, should_be_newer) in test_cases {
|
||||
let result = updater.is_version_newer(newer, older);
|
||||
assert_eq!(
|
||||
result,
|
||||
should_be_newer,
|
||||
"Expected {} {} {} but got {}",
|
||||
newer,
|
||||
if should_be_newer { ">" } else { "<=" },
|
||||
older,
|
||||
if result { "true" } else { "false" }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_profile_update_stable_to_stable() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let profile = create_test_profile("test", "firefox", "1.0.0");
|
||||
let versions = vec![
|
||||
create_test_version_info("1.0.1", false), // stable, newer
|
||||
@@ -596,7 +647,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_check_profile_update_alpha_to_alpha() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let profile = create_test_profile("test", "firefox", "1.0.0-alpha");
|
||||
let versions = vec![
|
||||
create_test_version_info("1.0.1", false), // stable, should be included
|
||||
@@ -615,7 +666,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_check_profile_update_no_update_available() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let profile = create_test_profile("test", "firefox", "1.0.0");
|
||||
let versions = vec![
|
||||
create_test_version_info("0.9.0", false), // older
|
||||
@@ -628,7 +679,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_group_update_notifications() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let notifications = vec![
|
||||
UpdateNotification {
|
||||
id: "firefox_1.0.0_to_1.1.0_profile1".to_string(),
|
||||
@@ -726,13 +777,15 @@ mod tests {
|
||||
let state_file = test_settings_manager
|
||||
.get_settings_dir()
|
||||
.join("auto_update_state.json");
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir()).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir())
|
||||
.expect("Failed to create settings directory");
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write state file");
|
||||
|
||||
// Load state
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let loaded_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read state file");
|
||||
let loaded_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize state");
|
||||
|
||||
assert_eq!(loaded_state.disabled_browsers.len(), 1);
|
||||
assert!(loaded_state.disabled_browsers.contains("firefox"));
|
||||
@@ -770,11 +823,15 @@ mod tests {
|
||||
let state_file = test_settings_manager
|
||||
.get_settings_dir()
|
||||
.join("auto_update_state.json");
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir()).unwrap();
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir())
|
||||
.expect("Failed to create settings directory");
|
||||
|
||||
// Initially not disabled (empty state file means default state)
|
||||
let state = AutoUpdateState::default();
|
||||
assert!(!state.disabled_browsers.contains("firefox"));
|
||||
assert!(
|
||||
!state.disabled_browsers.contains("firefox"),
|
||||
"Firefox should not be disabled initially"
|
||||
);
|
||||
|
||||
// Start update (should disable)
|
||||
let mut state = AutoUpdateState::default();
|
||||
@@ -782,27 +839,41 @@ mod tests {
|
||||
state
|
||||
.auto_update_downloads
|
||||
.insert("firefox-1.1.0".to_string());
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write state file");
|
||||
|
||||
// Check that it's disabled
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let loaded_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
assert!(loaded_state.disabled_browsers.contains("firefox"));
|
||||
assert!(loaded_state.auto_update_downloads.contains("firefox-1.1.0"));
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read state file");
|
||||
let loaded_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize state");
|
||||
assert!(
|
||||
loaded_state.disabled_browsers.contains("firefox"),
|
||||
"Firefox should be disabled"
|
||||
);
|
||||
assert!(
|
||||
loaded_state.auto_update_downloads.contains("firefox-1.1.0"),
|
||||
"Firefox download should be tracked"
|
||||
);
|
||||
|
||||
// Complete update (should enable)
|
||||
let mut state = loaded_state;
|
||||
state.disabled_browsers.remove("firefox");
|
||||
state.auto_update_downloads.remove("firefox-1.1.0");
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize final state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write final state file");
|
||||
|
||||
// Check that it's enabled again
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let final_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
assert!(!final_state.disabled_browsers.contains("firefox"));
|
||||
assert!(!final_state.auto_update_downloads.contains("firefox-1.1.0"));
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read final state file");
|
||||
let final_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize final state");
|
||||
assert!(
|
||||
!final_state.disabled_browsers.contains("firefox"),
|
||||
"Firefox should be enabled again"
|
||||
);
|
||||
assert!(
|
||||
!final_state.auto_update_downloads.contains("firefox-1.1.0"),
|
||||
"Firefox download should not be tracked anymore"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -844,31 +915,31 @@ mod tests {
|
||||
let state_file = test_settings_manager
|
||||
.get_settings_dir()
|
||||
.join("auto_update_state.json");
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir()).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir())
|
||||
.expect("Failed to create settings directory");
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize initial state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write initial state file");
|
||||
|
||||
// Dismiss notification (remove from pending updates)
|
||||
state
|
||||
.pending_updates
|
||||
.retain(|n| n.id != "test_notification");
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize updated state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write updated state file");
|
||||
|
||||
// Check that it's removed
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let loaded_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
assert_eq!(loaded_state.pending_updates.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version() {
|
||||
let updater = AutoUpdater::new();
|
||||
|
||||
assert_eq!(updater.parse_version("1.2.3"), vec![1, 2, 3]);
|
||||
assert_eq!(updater.parse_version("1.2.3-alpha"), vec![1, 2, 3]);
|
||||
assert_eq!(updater.parse_version("1.2.3a1"), vec![1, 2, 3, 1]);
|
||||
assert_eq!(updater.parse_version("1.2.3b2"), vec![1, 2, 3, 2]);
|
||||
assert_eq!(updater.parse_version("10.0.0"), vec![10, 0, 0]);
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read updated state file");
|
||||
let loaded_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize updated state");
|
||||
assert_eq!(
|
||||
loaded_state.pending_updates.len(),
|
||||
0,
|
||||
"Pending updates should be empty after dismissal"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref AUTO_UPDATER: AutoUpdater = AutoUpdater::new();
|
||||
}
|
||||
|
||||
@@ -0,0 +1,328 @@
|
||||
use clap::{Arg, Command};
|
||||
use donutbrowser_lib::proxy_runner::{
|
||||
start_proxy_process_with_profile, stop_all_proxy_processes, stop_proxy_process,
|
||||
};
|
||||
use donutbrowser_lib::proxy_server::run_proxy_server;
|
||||
use donutbrowser_lib::proxy_storage::get_proxy_config;
|
||||
use std::process;
|
||||
|
||||
fn set_high_priority() {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
unsafe {
|
||||
// Set high priority (negative nice value = higher priority)
|
||||
// -10 is a reasonably high priority without being too aggressive
|
||||
// This may fail without elevated privileges, which is fine
|
||||
let result = libc::setpriority(libc::PRIO_PROCESS, 0, -10);
|
||||
if result == 0 {
|
||||
log::info!("Set process priority to -10 (high priority)");
|
||||
} else {
|
||||
// Try a less aggressive priority if -10 fails
|
||||
let result = libc::setpriority(libc::PRIO_PROCESS, 0, -5);
|
||||
if result == 0 {
|
||||
log::info!("Set process priority to -5 (above normal)");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Lower OOM score so this process is less likely to be killed under memory pressure
|
||||
// Valid range is -1000 to 1000, lower = less likely to be killed
|
||||
// -500 is a reasonable value that makes us less likely to be killed
|
||||
if let Err(e) = std::fs::write("/proc/self/oom_score_adj", "-500") {
|
||||
log::debug!("Could not set OOM score adjustment: {}", e);
|
||||
} else {
|
||||
log::info!("Set OOM score adjustment to -500");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use windows::Win32::System::Threading::{
|
||||
GetCurrentProcess, SetPriorityClass, ABOVE_NORMAL_PRIORITY_CLASS,
|
||||
};
|
||||
|
||||
unsafe {
|
||||
let process = GetCurrentProcess();
|
||||
if SetPriorityClass(process, ABOVE_NORMAL_PRIORITY_CLASS).is_ok() {
|
||||
log::info!("Set process priority to ABOVE_NORMAL_PRIORITY_CLASS");
|
||||
} else {
|
||||
log::debug!("Could not set process priority class");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_proxy_url(
|
||||
proxy_type: &str,
|
||||
host: &str,
|
||||
port: u16,
|
||||
username: Option<&str>,
|
||||
password: Option<&str>,
|
||||
) -> String {
|
||||
let mut url = format!("{}://", proxy_type.to_lowercase());
|
||||
|
||||
if let (Some(user), Some(pass)) = (username, password) {
|
||||
let encoded_user = urlencoding::encode(user);
|
||||
let encoded_pass = urlencoding::encode(pass);
|
||||
url.push_str(&format!("{}:{}@", encoded_user, encoded_pass));
|
||||
} else if let Some(user) = username {
|
||||
let encoded_user = urlencoding::encode(user);
|
||||
url.push_str(&format!("{}@", encoded_user));
|
||||
}
|
||||
|
||||
url.push_str(host);
|
||||
url.push(':');
|
||||
url.push_str(&port.to_string());
|
||||
|
||||
url
|
||||
}
|
||||
|
||||
#[tokio::main(flavor = "multi_thread")]
|
||||
async fn main() {
|
||||
// Initialize logger to write to stderr (which will be redirected to file)
|
||||
env_logger::Builder::from_default_env()
|
||||
.filter_level(log::LevelFilter::Debug)
|
||||
.format_timestamp_millis()
|
||||
.init();
|
||||
|
||||
// Set up panic handler to log panics before process exits
|
||||
std::panic::set_hook(Box::new(|panic_info| {
|
||||
log::error!("PANIC in proxy worker: {:?}", panic_info);
|
||||
if let Some(location) = panic_info.location() {
|
||||
log::error!(
|
||||
"Location: {}:{}:{}",
|
||||
location.file(),
|
||||
location.line(),
|
||||
location.column()
|
||||
);
|
||||
}
|
||||
if let Some(s) = panic_info.payload().downcast_ref::<&str>() {
|
||||
log::error!("Message: {}", s);
|
||||
}
|
||||
}));
|
||||
|
||||
let matches = Command::new("donut-proxy")
|
||||
.subcommand(
|
||||
Command::new("proxy")
|
||||
.about("Manage proxy servers")
|
||||
.subcommand(
|
||||
Command::new("start")
|
||||
.about("Start a proxy server")
|
||||
.arg(Arg::new("host").long("host").help("Upstream proxy host"))
|
||||
.arg(
|
||||
Arg::new("proxy-port")
|
||||
.long("proxy-port")
|
||||
.value_parser(clap::value_parser!(u16))
|
||||
.help("Upstream proxy port"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("type")
|
||||
.long("type")
|
||||
.help("Proxy type (http, https, socks4, socks5)"),
|
||||
)
|
||||
.arg(Arg::new("username").long("username").help("Proxy username"))
|
||||
.arg(Arg::new("password").long("password").help("Proxy password"))
|
||||
.arg(
|
||||
Arg::new("port")
|
||||
.short('p')
|
||||
.long("port")
|
||||
.value_parser(clap::value_parser!(u16))
|
||||
.help("Local port to use (random if not specified)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("ignore-certificate")
|
||||
.long("ignore-certificate")
|
||||
.help("Ignore certificate errors for HTTPS proxies"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("upstream")
|
||||
.short('u')
|
||||
.long("upstream")
|
||||
.help("Upstream proxy URL (protocol://[username:password@]host:port)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("profile-id")
|
||||
.long("profile-id")
|
||||
.help("ID of the profile this proxy is associated with"),
|
||||
),
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("stop")
|
||||
.about("Stop a proxy server")
|
||||
.arg(Arg::new("id").long("id").help("Proxy ID to stop"))
|
||||
.arg(
|
||||
Arg::new("upstream")
|
||||
.long("upstream")
|
||||
.help("Stop proxies with this upstream URL"),
|
||||
),
|
||||
)
|
||||
.subcommand(Command::new("list").about("List all proxy servers")),
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("proxy-worker")
|
||||
.about("Run a proxy worker process (internal use)")
|
||||
.arg(
|
||||
Arg::new("id")
|
||||
.long("id")
|
||||
.required(true)
|
||||
.help("Proxy configuration ID"),
|
||||
)
|
||||
.arg(Arg::new("action").required(true).help("Action (start)")),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
if let Some(proxy_matches) = matches.subcommand_matches("proxy") {
|
||||
if let Some(start_matches) = proxy_matches.subcommand_matches("start") {
|
||||
let mut upstream_url: Option<String> = None;
|
||||
|
||||
// Build upstream URL from individual components if provided
|
||||
if let (Some(host), Some(port), Some(proxy_type)) = (
|
||||
start_matches.get_one::<String>("host"),
|
||||
start_matches.get_one::<u16>("proxy-port"),
|
||||
start_matches.get_one::<String>("type"),
|
||||
) {
|
||||
let username = start_matches.get_one::<String>("username");
|
||||
let password = start_matches.get_one::<String>("password");
|
||||
upstream_url = Some(build_proxy_url(
|
||||
proxy_type,
|
||||
host,
|
||||
*port,
|
||||
username.map(|s| s.as_str()),
|
||||
password.map(|s| s.as_str()),
|
||||
));
|
||||
} else if let Some(upstream) = start_matches.get_one::<String>("upstream") {
|
||||
upstream_url = Some(upstream.clone());
|
||||
}
|
||||
|
||||
let port = start_matches.get_one::<u16>("port").copied();
|
||||
let profile_id = start_matches.get_one::<String>("profile-id").cloned();
|
||||
|
||||
match start_proxy_process_with_profile(upstream_url, port, profile_id).await {
|
||||
Ok(config) => {
|
||||
// Output the configuration as JSON for the Rust side to parse
|
||||
// Use println! here because this needs to go to stdout for parsing
|
||||
println!(
|
||||
"{}",
|
||||
serde_json::json!({
|
||||
"id": config.id,
|
||||
"localPort": config.local_port,
|
||||
"localUrl": config.local_url,
|
||||
"upstreamUrl": config.upstream_url,
|
||||
})
|
||||
);
|
||||
process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to start proxy: {}", e);
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
} else if let Some(stop_matches) = proxy_matches.subcommand_matches("stop") {
|
||||
if let Some(id) = stop_matches.get_one::<String>("id") {
|
||||
match stop_proxy_process(id).await {
|
||||
Ok(success) => {
|
||||
// Use println! here because this needs to go to stdout for parsing
|
||||
println!("{}", serde_json::json!({ "success": success }));
|
||||
process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to stop proxy: {}", e);
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
} else if let Some(upstream) = stop_matches.get_one::<String>("upstream") {
|
||||
// Find proxies with this upstream URL
|
||||
let configs = donutbrowser_lib::proxy_storage::list_proxy_configs();
|
||||
let matching_configs: Vec<_> = configs
|
||||
.iter()
|
||||
.filter(|config| config.upstream_url == *upstream)
|
||||
.collect();
|
||||
|
||||
if matching_configs.is_empty() {
|
||||
eprintln!("No proxies found for {}", upstream);
|
||||
process::exit(1);
|
||||
}
|
||||
|
||||
for config in matching_configs {
|
||||
let _ = stop_proxy_process(&config.id).await;
|
||||
}
|
||||
|
||||
// Use println! here because this needs to go to stdout for parsing
|
||||
println!("{}", serde_json::json!({ "success": true }));
|
||||
process::exit(0);
|
||||
} else {
|
||||
// Stop all proxies
|
||||
match stop_all_proxy_processes().await {
|
||||
Ok(_) => {
|
||||
// Use println! here because this needs to go to stdout for parsing
|
||||
println!("{}", serde_json::json!({ "success": true }));
|
||||
process::exit(0);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to stop all proxies: {}", e);
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if proxy_matches.subcommand_matches("list").is_some() {
|
||||
let configs = donutbrowser_lib::proxy_storage::list_proxy_configs();
|
||||
// Use println! here because this needs to go to stdout for parsing
|
||||
println!("{}", serde_json::to_string(&configs).unwrap());
|
||||
process::exit(0);
|
||||
} else {
|
||||
log::error!("Invalid action. Use 'start', 'stop', or 'list'");
|
||||
process::exit(1);
|
||||
}
|
||||
} else if let Some(worker_matches) = matches.subcommand_matches("proxy-worker") {
|
||||
let id = worker_matches
|
||||
.get_one::<String>("id")
|
||||
.expect("id is required");
|
||||
let action = worker_matches
|
||||
.get_one::<String>("action")
|
||||
.expect("action is required");
|
||||
|
||||
if action == "start" {
|
||||
// Set high priority so this process is killed last under resource pressure
|
||||
set_high_priority();
|
||||
|
||||
log::error!("Proxy worker starting, looking for config id: {}", id);
|
||||
log::error!("Process PID: {}", std::process::id());
|
||||
|
||||
let config = match get_proxy_config(id) {
|
||||
Some(config) => {
|
||||
log::error!(
|
||||
"Found config: id={}, port={:?}, upstream={}",
|
||||
config.id,
|
||||
config.local_port,
|
||||
config.upstream_url
|
||||
);
|
||||
config
|
||||
}
|
||||
None => {
|
||||
log::error!("Proxy configuration {} not found", id);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
// Run the proxy server - this should never return (infinite loop)
|
||||
log::error!("Starting proxy server for config id: {}", id);
|
||||
if let Err(e) = run_proxy_server(config).await {
|
||||
log::error!("Failed to run proxy server: {}", e);
|
||||
log::error!("Error details: {:?}", e);
|
||||
process::exit(1);
|
||||
}
|
||||
// This should never be reached - run_proxy_server has an infinite loop
|
||||
log::error!("ERROR: Proxy server returned unexpectedly (this should never happen)");
|
||||
process::exit(1);
|
||||
} else {
|
||||
log::error!("Invalid action for proxy-worker. Use 'start'");
|
||||
process::exit(1);
|
||||
}
|
||||
} else {
|
||||
log::error!("No command specified");
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
+513
-171
File diff suppressed because it is too large
Load Diff
+1536
-2681
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,601 @@
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
use crate::profile::BrowserProfile;
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
use tokio::sync::Mutex as AsyncMutex;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CamoufoxConfig {
|
||||
pub proxy: Option<String>,
|
||||
pub screen_max_width: Option<u32>,
|
||||
pub screen_max_height: Option<u32>,
|
||||
pub screen_min_width: Option<u32>,
|
||||
pub screen_min_height: Option<u32>,
|
||||
pub geoip: Option<serde_json::Value>, // Can be String or bool
|
||||
pub block_images: Option<bool>,
|
||||
pub block_webrtc: Option<bool>,
|
||||
pub block_webgl: Option<bool>,
|
||||
pub executable_path: Option<String>,
|
||||
pub fingerprint: Option<String>, // JSON string of the complete fingerprint config
|
||||
pub randomize_fingerprint_on_launch: Option<bool>, // Generate new fingerprint on every launch
|
||||
pub os: Option<String>, // Operating system for fingerprint generation: "windows", "macos", or "linux"
|
||||
}
|
||||
|
||||
impl Default for CamoufoxConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
proxy: None,
|
||||
screen_max_width: None,
|
||||
screen_max_height: None,
|
||||
screen_min_width: None,
|
||||
screen_min_height: None,
|
||||
geoip: Some(serde_json::Value::Bool(true)),
|
||||
block_images: None,
|
||||
block_webrtc: None,
|
||||
block_webgl: None,
|
||||
executable_path: None,
|
||||
fingerprint: None,
|
||||
randomize_fingerprint_on_launch: None,
|
||||
os: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
pub struct CamoufoxLaunchResult {
|
||||
pub id: String,
|
||||
#[serde(alias = "process_id")]
|
||||
pub processId: Option<u32>,
|
||||
#[serde(alias = "profile_path")]
|
||||
pub profilePath: Option<String>,
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CamoufoxInstance {
|
||||
#[allow(dead_code)]
|
||||
id: String,
|
||||
process_id: Option<u32>,
|
||||
profile_path: Option<String>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
struct CamoufoxManagerInner {
|
||||
instances: HashMap<String, CamoufoxInstance>,
|
||||
}
|
||||
|
||||
pub struct CamoufoxManager {
|
||||
inner: Arc<AsyncMutex<CamoufoxManagerInner>>,
|
||||
base_dirs: BaseDirs,
|
||||
}
|
||||
|
||||
impl CamoufoxManager {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
inner: Arc::new(AsyncMutex::new(CamoufoxManagerInner {
|
||||
instances: HashMap::new(),
|
||||
})),
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static CamoufoxManager {
|
||||
&CAMOUFOX_NODECAR_LAUNCHER
|
||||
}
|
||||
|
||||
pub fn get_profiles_dir(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("profiles");
|
||||
path
|
||||
}
|
||||
|
||||
/// Generate Camoufox fingerprint configuration during profile creation
|
||||
pub async fn generate_fingerprint_config(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
profile: &crate::profile::BrowserProfile,
|
||||
config: &CamoufoxConfig,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut config_args = vec!["camoufox".to_string(), "generate-config".to_string()];
|
||||
|
||||
// Always ensure executable_path is set to the user's binary location
|
||||
let executable_path = if let Some(path) = &config.executable_path {
|
||||
path.clone()
|
||||
} else {
|
||||
// Use the browser runner helper with the real profile
|
||||
// Use self.browser_runner instead of instance()
|
||||
BrowserRunner::instance()
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
};
|
||||
config_args.extend(["--executable-path".to_string(), executable_path]);
|
||||
|
||||
// Pass existing fingerprint if provided (for advanced form partial fingerprints)
|
||||
if let Some(fingerprint) = &config.fingerprint {
|
||||
config_args.extend(["--fingerprint".to_string(), fingerprint.clone()]);
|
||||
}
|
||||
|
||||
if let Some(serde_json::Value::Bool(true)) = &config.geoip {
|
||||
config_args.push("--geoip".to_string());
|
||||
}
|
||||
|
||||
// Add proxy if provided (can be passed directly during fingerprint generation)
|
||||
if let Some(proxy) = &config.proxy {
|
||||
config_args.extend(["--proxy".to_string(), proxy.clone()]);
|
||||
}
|
||||
|
||||
// Add screen dimensions if provided
|
||||
if let Some(max_width) = config.screen_max_width {
|
||||
config_args.extend(["--max-width".to_string(), max_width.to_string()]);
|
||||
}
|
||||
|
||||
if let Some(max_height) = config.screen_max_height {
|
||||
config_args.extend(["--max-height".to_string(), max_height.to_string()]);
|
||||
}
|
||||
|
||||
if let Some(min_width) = config.screen_min_width {
|
||||
config_args.extend(["--min-width".to_string(), min_width.to_string()]);
|
||||
}
|
||||
|
||||
if let Some(min_height) = config.screen_min_height {
|
||||
config_args.extend(["--min-height".to_string(), min_height.to_string()]);
|
||||
}
|
||||
|
||||
// Add block_* options
|
||||
if let Some(block_images) = config.block_images {
|
||||
if block_images {
|
||||
config_args.push("--block-images".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(block_webrtc) = config.block_webrtc {
|
||||
if block_webrtc {
|
||||
config_args.push("--block-webrtc".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(block_webgl) = config.block_webgl {
|
||||
if block_webgl {
|
||||
config_args.push("--block-webgl".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Add OS option for fingerprint generation
|
||||
if let Some(os) = &config.os {
|
||||
config_args.extend(["--os".to_string(), os.clone()]);
|
||||
}
|
||||
|
||||
// Execute config generation command
|
||||
let mut config_sidecar = self.get_nodecar_sidecar(app_handle)?;
|
||||
for arg in &config_args {
|
||||
config_sidecar = config_sidecar.arg(arg);
|
||||
}
|
||||
|
||||
let config_output = config_sidecar.output().await?;
|
||||
if !config_output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&config_output.stderr);
|
||||
return Err(format!("Failed to generate camoufox fingerprint config: {stderr}").into());
|
||||
}
|
||||
|
||||
Ok(String::from_utf8_lossy(&config_output.stdout).to_string())
|
||||
}
|
||||
|
||||
/// Get the nodecar sidecar command
|
||||
fn get_nodecar_sidecar(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
) -> Result<tauri_plugin_shell::process::Command, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let shell = app_handle.shell();
|
||||
let sidecar_command = shell
|
||||
.sidecar("nodecar")
|
||||
.map_err(|e| format!("Failed to create nodecar sidecar: {e}"))?;
|
||||
Ok(sidecar_command)
|
||||
}
|
||||
|
||||
/// Launch Camoufox browser using nodecar sidecar
|
||||
pub async fn launch_camoufox(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
profile: &crate::profile::BrowserProfile,
|
||||
profile_path: &str,
|
||||
config: &CamoufoxConfig,
|
||||
url: Option<&str>,
|
||||
) -> Result<CamoufoxLaunchResult, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let custom_config = if let Some(existing_fingerprint) = &config.fingerprint {
|
||||
log::info!("Using existing fingerprint from profile metadata");
|
||||
existing_fingerprint.clone()
|
||||
} else {
|
||||
return Err("No fingerprint provided".into());
|
||||
};
|
||||
|
||||
// Always ensure executable_path is set to the user's binary location
|
||||
let executable_path = if let Some(path) = &config.executable_path {
|
||||
path.clone()
|
||||
} else {
|
||||
// Use the browser runner helper with the real profile
|
||||
// Use self.browser_runner instead of instance()
|
||||
BrowserRunner::instance()
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
};
|
||||
|
||||
// Build nodecar command arguments
|
||||
let mut args = vec!["camoufox".to_string(), "start".to_string()];
|
||||
|
||||
// Add profile path - ensure it's an absolute path
|
||||
let absolute_profile_path = std::path::Path::new(profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(profile_path).to_path_buf())
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
args.extend(["--profile-path".to_string(), absolute_profile_path]);
|
||||
|
||||
// Add URL if provided
|
||||
if let Some(url) = url {
|
||||
args.extend(["--url".to_string(), url.to_string()]);
|
||||
}
|
||||
|
||||
// Always add the executable path
|
||||
args.extend(["--executable-path".to_string(), executable_path]);
|
||||
|
||||
// Always add the generated custom config
|
||||
args.extend(["--custom-config".to_string(), custom_config]);
|
||||
|
||||
// Add proxy if provided
|
||||
if let Some(proxy) = &config.proxy {
|
||||
args.extend(["--proxy".to_string(), proxy.clone()]);
|
||||
}
|
||||
|
||||
// Add headless flag for tests
|
||||
if std::env::var("CAMOUFOX_HEADLESS").is_ok() {
|
||||
args.push("--headless".to_string());
|
||||
}
|
||||
|
||||
// Get the nodecar sidecar command
|
||||
let mut sidecar_command = self.get_nodecar_sidecar(app_handle)?;
|
||||
|
||||
// Add all arguments to the sidecar command
|
||||
for arg in &args {
|
||||
sidecar_command = sidecar_command.arg(arg);
|
||||
}
|
||||
|
||||
// Execute nodecar sidecar command
|
||||
log::info!("Executing nodecar command with args: {args:?}");
|
||||
let output = sidecar_command.output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
log::info!("nodecar camoufox failed - stdout: {stdout}, stderr: {stderr}");
|
||||
return Err(format!("nodecar camoufox failed: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
log::info!("nodecar camoufox output: {stdout}");
|
||||
|
||||
// Parse the JSON output
|
||||
let launch_result: CamoufoxLaunchResult = serde_json::from_str(&stdout)
|
||||
.map_err(|e| format!("Failed to parse nodecar output as JSON: {e}\nOutput was: {stdout}"))?;
|
||||
|
||||
// Store the instance
|
||||
let instance = CamoufoxInstance {
|
||||
id: launch_result.id.clone(),
|
||||
process_id: launch_result.processId,
|
||||
profile_path: launch_result.profilePath.clone(),
|
||||
url: launch_result.url.clone(),
|
||||
};
|
||||
|
||||
{
|
||||
let mut inner = self.inner.lock().await;
|
||||
inner.instances.insert(launch_result.id.clone(), instance);
|
||||
}
|
||||
|
||||
Ok(launch_result)
|
||||
}
|
||||
|
||||
/// Stop a Camoufox process by ID
|
||||
pub async fn stop_camoufox(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
id: &str,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Get the nodecar sidecar command
|
||||
let sidecar_command = self
|
||||
.get_nodecar_sidecar(app_handle)?
|
||||
.arg("camoufox")
|
||||
.arg("stop")
|
||||
.arg("--id")
|
||||
.arg(id);
|
||||
|
||||
// Execute nodecar stop command
|
||||
let output = sidecar_command.output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let _stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let result: serde_json::Value = serde_json::from_str(&stdout)
|
||||
.map_err(|e| format!("Failed to parse nodecar stop output: {e}"))?;
|
||||
|
||||
let success = result
|
||||
.get("success")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
if success {
|
||||
// Remove from our tracking
|
||||
let mut inner = self.inner.lock().await;
|
||||
inner.instances.remove(id);
|
||||
}
|
||||
|
||||
Ok(success)
|
||||
}
|
||||
|
||||
/// Find Camoufox server by profile path (for integration with browser_runner)
|
||||
/// This method first checks in-memory instances, then scans system processes
|
||||
/// to detect Camoufox instances that may have been started before the app restarted.
|
||||
pub async fn find_camoufox_by_profile(
|
||||
&self,
|
||||
profile_path: &str,
|
||||
) -> Result<Option<CamoufoxLaunchResult>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// First clean up any dead instances
|
||||
self.cleanup_dead_instances().await?;
|
||||
|
||||
// Convert paths to canonical form for comparison
|
||||
let target_path = std::path::Path::new(profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(profile_path).to_path_buf());
|
||||
|
||||
// Check in-memory instances first
|
||||
{
|
||||
let inner = self.inner.lock().await;
|
||||
|
||||
for (id, instance) in inner.instances.iter() {
|
||||
if let Some(instance_profile_path) = &instance.profile_path {
|
||||
let instance_path = std::path::Path::new(instance_profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(instance_profile_path).to_path_buf());
|
||||
|
||||
if instance_path == target_path {
|
||||
// Verify the server is actually running by checking the process
|
||||
if let Some(process_id) = instance.process_id {
|
||||
if self.is_server_running(process_id).await {
|
||||
// Found running Camoufox instance
|
||||
return Ok(Some(CamoufoxLaunchResult {
|
||||
id: id.clone(),
|
||||
processId: instance.process_id,
|
||||
profilePath: instance.profile_path.clone(),
|
||||
url: instance.url.clone(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If not found in in-memory instances, scan system processes
|
||||
// This handles the case where the app was restarted but Camoufox is still running
|
||||
if let Some((pid, found_profile_path)) = self.find_camoufox_process_by_profile(&target_path) {
|
||||
log::info!(
|
||||
"Found running Camoufox process (PID: {}) for profile path via system scan",
|
||||
pid
|
||||
);
|
||||
|
||||
// Register this instance in our tracking
|
||||
let instance_id = format!("recovered_{}", pid);
|
||||
let mut inner = self.inner.lock().await;
|
||||
inner.instances.insert(
|
||||
instance_id.clone(),
|
||||
CamoufoxInstance {
|
||||
id: instance_id.clone(),
|
||||
process_id: Some(pid),
|
||||
profile_path: Some(found_profile_path.clone()),
|
||||
url: None,
|
||||
},
|
||||
);
|
||||
|
||||
return Ok(Some(CamoufoxLaunchResult {
|
||||
id: instance_id,
|
||||
processId: Some(pid),
|
||||
profilePath: Some(found_profile_path),
|
||||
url: None,
|
||||
}));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Scan system processes to find a Camoufox process using a specific profile path
|
||||
fn find_camoufox_process_by_profile(
|
||||
&self,
|
||||
target_path: &std::path::Path,
|
||||
) -> Option<(u32, String)> {
|
||||
use sysinfo::{ProcessRefreshKind, RefreshKind, System};
|
||||
|
||||
let system = System::new_with_specifics(
|
||||
RefreshKind::nothing().with_processes(ProcessRefreshKind::everything()),
|
||||
);
|
||||
|
||||
let target_path_str = target_path.to_string_lossy();
|
||||
|
||||
for (pid, process) in system.processes() {
|
||||
let cmd = process.cmd();
|
||||
if cmd.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this is a Camoufox/Firefox process
|
||||
let exe_name = process.name().to_string_lossy().to_lowercase();
|
||||
let is_firefox_like = exe_name.contains("firefox")
|
||||
|| exe_name.contains("camoufox")
|
||||
|| exe_name.contains("firefox-bin");
|
||||
|
||||
if !is_firefox_like {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if the command line contains our profile path
|
||||
for (i, arg) in cmd.iter().enumerate() {
|
||||
if let Some(arg_str) = arg.to_str() {
|
||||
// Check for -profile argument followed by our path
|
||||
if arg_str == "-profile" && i + 1 < cmd.len() {
|
||||
if let Some(next_arg) = cmd.get(i + 1).and_then(|a| a.to_str()) {
|
||||
let cmd_path = std::path::Path::new(next_arg)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(next_arg).to_path_buf());
|
||||
|
||||
if cmd_path == target_path {
|
||||
return Some((pid.as_u32(), next_arg.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also check if the argument contains the profile path directly
|
||||
if arg_str.contains(&*target_path_str) {
|
||||
return Some((pid.as_u32(), target_path_str.to_string()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Check if servers are still alive and clean up dead instances
|
||||
pub async fn cleanup_dead_instances(
|
||||
&self,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut dead_instances = Vec::new();
|
||||
let mut instances_to_remove = Vec::new();
|
||||
|
||||
{
|
||||
let inner = self.inner.lock().await;
|
||||
|
||||
for (id, instance) in inner.instances.iter() {
|
||||
if let Some(process_id) = instance.process_id {
|
||||
// Check if the process is still alive
|
||||
if !self.is_server_running(process_id).await {
|
||||
// Process is dead
|
||||
// Camoufox instance is no longer running
|
||||
dead_instances.push(id.clone());
|
||||
instances_to_remove.push(id.clone());
|
||||
}
|
||||
} else {
|
||||
// No process_id means it's likely a dead instance
|
||||
// Camoufox instance has no PID, marking as dead
|
||||
dead_instances.push(id.clone());
|
||||
instances_to_remove.push(id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove dead instances
|
||||
if !instances_to_remove.is_empty() {
|
||||
let mut inner = self.inner.lock().await;
|
||||
for id in &instances_to_remove {
|
||||
inner.instances.remove(id);
|
||||
// Removed dead Camoufox instance
|
||||
}
|
||||
}
|
||||
|
||||
Ok(dead_instances)
|
||||
}
|
||||
|
||||
/// Check if a Camoufox server is running with the given process ID
|
||||
async fn is_server_running(&self, process_id: u32) -> bool {
|
||||
// Check if the process is still running
|
||||
use sysinfo::{Pid, System};
|
||||
|
||||
let system = System::new_all();
|
||||
if let Some(process) = system.process(Pid::from(process_id as usize)) {
|
||||
// Check if this is actually a Camoufox process by looking at the command line
|
||||
let cmd = process.cmd();
|
||||
let is_camoufox = cmd.iter().any(|arg| {
|
||||
let arg_str = arg.to_str().unwrap_or("");
|
||||
arg_str.contains("camoufox-worker") || arg_str.contains("camoufox")
|
||||
});
|
||||
|
||||
if is_camoufox {
|
||||
// Found running Camoufox process
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl CamoufoxManager {
|
||||
pub async fn launch_camoufox_profile_nodecar(
|
||||
&self,
|
||||
app_handle: AppHandle,
|
||||
profile: BrowserProfile,
|
||||
config: CamoufoxConfig,
|
||||
url: Option<String>,
|
||||
) -> Result<CamoufoxLaunchResult, String> {
|
||||
// Get profile path
|
||||
let profiles_dir = self.get_profiles_dir();
|
||||
let profile_path = profile.get_profile_data_path(&profiles_dir);
|
||||
let profile_path_str = profile_path.to_string_lossy();
|
||||
|
||||
// Check if there's already a running instance for this profile
|
||||
if let Ok(Some(existing)) = self.find_camoufox_by_profile(&profile_path_str).await {
|
||||
// If there's an existing instance, stop it first to avoid conflicts
|
||||
let _ = self.stop_camoufox(&app_handle, &existing.id).await;
|
||||
}
|
||||
|
||||
// Clean up any dead instances before launching
|
||||
let _ = self.cleanup_dead_instances().await;
|
||||
|
||||
self
|
||||
.launch_camoufox(
|
||||
&app_handle,
|
||||
&profile,
|
||||
&profile_path_str,
|
||||
&config,
|
||||
url.as_deref(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to launch Camoufox via nodecar: {e}"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_default_config() {
|
||||
let default_config = CamoufoxConfig::default();
|
||||
|
||||
// Verify defaults
|
||||
assert_eq!(default_config.geoip, Some(serde_json::Value::Bool(true)));
|
||||
assert_eq!(default_config.proxy, None);
|
||||
assert_eq!(default_config.fingerprint, None);
|
||||
assert_eq!(default_config.randomize_fingerprint_on_launch, None);
|
||||
assert_eq!(default_config.os, None);
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref CAMOUFOX_NODECAR_LAUNCHER: CamoufoxManager = CamoufoxManager::new();
|
||||
}
|
||||
@@ -1,5 +1,45 @@
|
||||
use tauri::command;
|
||||
|
||||
pub struct DefaultBrowser {}
|
||||
|
||||
impl DefaultBrowser {
|
||||
fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static DefaultBrowser {
|
||||
&DEFAULT_BROWSER
|
||||
}
|
||||
|
||||
pub async fn is_default_browser(&self) -> Result<bool, String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::is_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
}
|
||||
|
||||
pub async fn set_as_default_browser(&self) -> Result<(), String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::set_as_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod macos {
|
||||
use core_foundation::base::OSStatus;
|
||||
@@ -158,7 +198,7 @@ mod windows {
|
||||
app_key
|
||||
.set_value(
|
||||
"ApplicationDescription",
|
||||
&"Donut Browser - Simple Yet Powerful Browser Orchestrator",
|
||||
&"Donut Browser - Simple Yet Powerful Anti-Detect Browser",
|
||||
)
|
||||
.map_err(|e| format!("Failed to set ApplicationDescription: {}", e))?;
|
||||
|
||||
@@ -174,7 +214,7 @@ mod windows {
|
||||
capabilities
|
||||
.set_value(
|
||||
"ApplicationDescription",
|
||||
&"Donut Browser - Simple Yet Powerful Browser Orchestrator",
|
||||
&"Donut Browser - Simple Yet Powerful Anti-Detect Browser",
|
||||
)
|
||||
.map_err(|e| format!("Failed to set Capabilities description: {}", e))?;
|
||||
|
||||
@@ -482,159 +522,19 @@ mod linux {
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref DEFAULT_BROWSER: DefaultBrowser = DefaultBrowser::new();
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn is_default_browser() -> Result<bool, String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::is_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
let default_browser = DefaultBrowser::instance();
|
||||
default_browser.is_default_browser().await
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn set_as_default_browser() -> Result<(), String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::set_as_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn open_url_with_profile(
|
||||
app_handle: tauri::AppHandle,
|
||||
profile_name: String,
|
||||
url: String,
|
||||
) -> Result<(), String> {
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
|
||||
let runner = BrowserRunner::new();
|
||||
|
||||
// Get the profile by name
|
||||
let profiles = runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
let profile = profiles
|
||||
.into_iter()
|
||||
.find(|p| p.name == profile_name)
|
||||
.ok_or_else(|| format!("Profile '{profile_name}' not found"))?;
|
||||
|
||||
println!("Opening URL '{url}' with profile '{profile_name}'");
|
||||
|
||||
// Use launch_or_open_url which handles both launching new instances and opening in existing ones
|
||||
runner
|
||||
.launch_or_open_url(app_handle, &profile, Some(url.clone()), None)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
println!("Failed to open URL with profile '{profile_name}': {e}");
|
||||
format!("Failed to open URL with profile: {e}")
|
||||
})?;
|
||||
|
||||
println!("Successfully opened URL '{url}' with profile '{profile_name}'");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn smart_open_url(
|
||||
app_handle: tauri::AppHandle,
|
||||
url: String,
|
||||
_is_startup: Option<bool>,
|
||||
) -> Result<String, String> {
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
|
||||
let runner = BrowserRunner::new();
|
||||
|
||||
// Get all profiles
|
||||
let profiles = runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
if profiles.is_empty() {
|
||||
return Err("no_profiles".to_string());
|
||||
}
|
||||
|
||||
println!(
|
||||
"URL opening - Total profiles: {}, checking for running profiles",
|
||||
profiles.len()
|
||||
);
|
||||
|
||||
// Check for running profiles and find the first one that can handle URLs
|
||||
for profile in &profiles {
|
||||
// Check if this profile is running
|
||||
let is_running = runner
|
||||
.check_browser_status(app_handle.clone(), profile)
|
||||
.await
|
||||
.unwrap_or(false);
|
||||
|
||||
if is_running {
|
||||
println!(
|
||||
"Found running profile '{}', attempting to open URL",
|
||||
profile.name
|
||||
);
|
||||
|
||||
// For TOR browser: Check if any other TOR browser is running
|
||||
if profile.browser == "tor-browser" {
|
||||
let mut other_tor_running = false;
|
||||
for p in &profiles {
|
||||
if p.browser == "tor-browser"
|
||||
&& p.name != profile.name
|
||||
&& runner
|
||||
.check_browser_status(app_handle.clone(), p)
|
||||
.await
|
||||
.unwrap_or(false)
|
||||
{
|
||||
other_tor_running = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if other_tor_running {
|
||||
continue; // Skip this one, can't have multiple TOR instances
|
||||
}
|
||||
}
|
||||
|
||||
// For Mullvad browser: skip if running (can't open URLs in running Mullvad)
|
||||
if profile.browser == "mullvad-browser" {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Try to open the URL with this running profile
|
||||
match runner
|
||||
.launch_or_open_url(app_handle.clone(), profile, Some(url.clone()), None)
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
println!(
|
||||
"Successfully opened URL '{}' with running profile '{}'",
|
||||
url, profile.name
|
||||
);
|
||||
return Ok(format!("opened_with_profile:{}", profile.name));
|
||||
}
|
||||
Err(e) => {
|
||||
println!(
|
||||
"Failed to open URL with running profile '{}': {}",
|
||||
profile.name, e
|
||||
);
|
||||
// Continue to try other profiles or show selector
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("No suitable running profiles found, showing profile selector");
|
||||
|
||||
// No suitable running profile found, show the profile selector
|
||||
Err("show_selector".to_string())
|
||||
let default_browser = DefaultBrowser::instance();
|
||||
default_browser.set_as_default_browser().await
|
||||
}
|
||||
|
||||
@@ -1,985 +0,0 @@
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tauri::Emitter;
|
||||
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::browser::BrowserType;
|
||||
use crate::browser_version_service::DownloadInfo;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DownloadProgress {
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
pub downloaded_bytes: u64,
|
||||
pub total_bytes: Option<u64>,
|
||||
pub percentage: f64,
|
||||
pub speed_bytes_per_sec: f64,
|
||||
pub eta_seconds: Option<f64>,
|
||||
pub stage: String, // "downloading", "extracting", "verifying"
|
||||
}
|
||||
|
||||
pub struct Downloader {
|
||||
client: Client,
|
||||
api_client: ApiClient,
|
||||
}
|
||||
|
||||
impl Downloader {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
api_client: ApiClient::new(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn new_with_api_client(api_client: ApiClient) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
api_client,
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve the actual download URL for browsers that need dynamic asset resolution
|
||||
pub async fn resolve_download_url(
|
||||
&self,
|
||||
browser_type: BrowserType,
|
||||
version: &str,
|
||||
download_info: &DownloadInfo,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
match browser_type {
|
||||
BrowserType::Brave => {
|
||||
// For Brave, we need to find the actual platform-specific asset
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_brave_releases_with_caching(true)
|
||||
.await?;
|
||||
|
||||
// Find the release with the matching version
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| {
|
||||
r.tag_name == version || r.tag_name == format!("v{}", version.trim_start_matches('v'))
|
||||
})
|
||||
.ok_or(format!("Brave version {version} not found"))?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset based on platform and architecture
|
||||
let asset_url = self
|
||||
.find_brave_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Brave version {version} on {os}/{arch}"
|
||||
))?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
BrowserType::Zen => {
|
||||
// For Zen, verify the asset exists and handle different naming patterns
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_zen_releases_with_caching(true)
|
||||
.await?;
|
||||
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| r.tag_name == version)
|
||||
.ok_or(format!("Zen version {version} not found"))?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset
|
||||
let asset_url = self
|
||||
.find_zen_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Zen version {version} on {os}/{arch}"
|
||||
))?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
BrowserType::MullvadBrowser => {
|
||||
// For Mullvad, verify the asset exists
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_mullvad_releases_with_caching(true)
|
||||
.await?;
|
||||
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| r.tag_name == version)
|
||||
.ok_or(format!("Mullvad version {version} not found"))?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset
|
||||
let asset_url = self
|
||||
.find_mullvad_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Mullvad version {version} on {os}/{arch}"
|
||||
))?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
_ => {
|
||||
// For other browsers, use the provided URL
|
||||
Ok(download_info.url.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get platform and architecture information
|
||||
fn get_platform_info() -> (String, String) {
|
||||
let os = if cfg!(target_os = "windows") {
|
||||
"windows"
|
||||
} else if cfg!(target_os = "linux") {
|
||||
"linux"
|
||||
} else if cfg!(target_os = "macos") {
|
||||
"macos"
|
||||
} else {
|
||||
"unknown"
|
||||
};
|
||||
|
||||
let arch = if cfg!(target_arch = "x86_64") {
|
||||
"x64"
|
||||
} else if cfg!(target_arch = "aarch64") {
|
||||
"arm64"
|
||||
} else {
|
||||
"unknown"
|
||||
};
|
||||
|
||||
(os.to_string(), arch.to_string())
|
||||
}
|
||||
|
||||
/// Find the appropriate Brave asset for the current platform and architecture
|
||||
fn find_brave_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Brave asset naming patterns:
|
||||
// Windows: BraveBrowserStandaloneNightlySetup.exe, BraveBrowserStandaloneSilentNightlySetup.exe
|
||||
// macOS: Brave-Browser-Nightly-universal.dmg, Brave-Browser-Nightly-universal.pkg
|
||||
// Linux: brave-browser-1.79.119-linux-arm64.zip, brave-browser-1.79.119-linux-amd64.zip
|
||||
|
||||
let asset = match os {
|
||||
"windows" => {
|
||||
// For Windows, look for standalone setup EXE (not the auto-updater one)
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("standalone") && name.ends_with(".exe") && !name.contains("silent")
|
||||
})
|
||||
.or_else(|| {
|
||||
// Fallback to any EXE if standalone not found
|
||||
assets.iter().find(|asset| asset.name.ends_with(".exe"))
|
||||
})
|
||||
}
|
||||
"macos" => {
|
||||
// For macOS, prefer universal DMG
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("universal") && name.ends_with(".dmg")
|
||||
})
|
||||
.or_else(|| {
|
||||
// Fallback to any DMG
|
||||
assets.iter().find(|asset| asset.name.ends_with(".dmg"))
|
||||
})
|
||||
}
|
||||
"linux" => {
|
||||
// For Linux, be strict about architecture matching - same logic as has_compatible_brave_asset
|
||||
let arch_pattern = if arch == "arm64" { "arm64" } else { "amd64" };
|
||||
|
||||
assets.iter().find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("linux") && name.contains(arch_pattern) && name.ends_with(".zip")
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
/// Find the appropriate Zen asset for the current platform and architecture
|
||||
fn find_zen_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Zen asset naming patterns:
|
||||
// Windows: zen.installer.exe, zen.installer-arm64.exe
|
||||
// macOS: zen.macos-universal.dmg
|
||||
// Linux: zen.linux-x86_64.tar.xz, zen.linux-aarch64.tar.xz, zen-x86_64.AppImage, zen-aarch64.AppImage
|
||||
|
||||
let asset = match (os, arch) {
|
||||
("windows", "x64") => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.installer.exe"),
|
||||
("windows", "arm64") => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.installer-arm64.exe"),
|
||||
("macos", _) => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.macos-universal.dmg"),
|
||||
("linux", "x64") => {
|
||||
// Prefer tar.xz, fallback to AppImage
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.linux-x86_64.tar.xz")
|
||||
.or_else(|| {
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen-x86_64.AppImage")
|
||||
})
|
||||
}
|
||||
("linux", "arm64") => {
|
||||
// Prefer tar.xz, fallback to AppImage
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.linux-aarch64.tar.xz")
|
||||
.or_else(|| {
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen-aarch64.AppImage")
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
/// Find the appropriate Mullvad asset for the current platform and architecture
|
||||
fn find_mullvad_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Mullvad asset naming patterns:
|
||||
// Windows: mullvad-browser-windows-x86_64-VERSION.exe
|
||||
// macOS: mullvad-browser-macos-VERSION.dmg
|
||||
// Linux: mullvad-browser-x86_64-VERSION.tar.xz
|
||||
|
||||
let asset = match (os, arch) {
|
||||
("windows", "x64") => assets.iter().find(|asset| {
|
||||
asset.name.contains("windows")
|
||||
&& asset.name.contains("x86_64")
|
||||
&& asset.name.ends_with(".exe")
|
||||
}),
|
||||
("windows", "arm64") => {
|
||||
// Mullvad doesn't support ARM64 on Windows
|
||||
None
|
||||
}
|
||||
("macos", _) => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name.contains("macos") && asset.name.ends_with(".dmg")),
|
||||
("linux", "x64") => assets.iter().find(|asset| {
|
||||
asset.name.contains("x86_64")
|
||||
&& asset.name.ends_with(".tar.xz")
|
||||
&& !asset.name.contains("windows")
|
||||
}),
|
||||
("linux", "arm64") => {
|
||||
// Mullvad doesn't support ARM64 on Linux
|
||||
None
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
pub async fn download_browser<R: tauri::Runtime>(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle<R>,
|
||||
browser_type: BrowserType,
|
||||
version: &str,
|
||||
download_info: &DownloadInfo,
|
||||
dest_path: &Path,
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let file_path = dest_path.join(&download_info.filename);
|
||||
|
||||
// Resolve the actual download URL
|
||||
let download_url = self
|
||||
.resolve_download_url(browser_type.clone(), version, download_info)
|
||||
.await?;
|
||||
|
||||
// Check if this is a twilight release for special handling
|
||||
let is_twilight =
|
||||
browser_type == BrowserType::Zen && version.to_lowercase().contains("twilight");
|
||||
|
||||
// Emit initial progress
|
||||
let progress = DownloadProgress {
|
||||
browser: browser_type.as_str().to_string(),
|
||||
version: version.to_string(),
|
||||
downloaded_bytes: 0,
|
||||
total_bytes: None,
|
||||
percentage: 0.0,
|
||||
speed_bytes_per_sec: 0.0,
|
||||
eta_seconds: None,
|
||||
stage: if is_twilight {
|
||||
"downloading (twilight rolling release)".to_string()
|
||||
} else {
|
||||
"downloading".to_string()
|
||||
},
|
||||
};
|
||||
|
||||
let _ = app_handle.emit("download-progress", &progress);
|
||||
|
||||
// Start download
|
||||
let response = self
|
||||
.client
|
||||
.get(&download_url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
// Check if the response is successful
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Download failed with status: {}", response.status()).into());
|
||||
}
|
||||
|
||||
let total_size = response.content_length();
|
||||
let mut downloaded = 0u64;
|
||||
let start_time = std::time::Instant::now();
|
||||
let mut last_update = start_time;
|
||||
|
||||
let mut file = File::create(&file_path)?;
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
while let Some(chunk) = stream.next().await {
|
||||
let chunk = chunk?;
|
||||
io::copy(&mut chunk.as_ref(), &mut file)?;
|
||||
downloaded += chunk.len() as u64;
|
||||
|
||||
let now = std::time::Instant::now();
|
||||
// Update progress every 100ms to avoid too many events
|
||||
if now.duration_since(last_update).as_millis() >= 100 {
|
||||
let elapsed = start_time.elapsed().as_secs_f64();
|
||||
let speed = if elapsed > 0.0 {
|
||||
downloaded as f64 / elapsed
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let percentage = if let Some(total) = total_size {
|
||||
(downloaded as f64 / total as f64) * 100.0
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let eta = if speed > 0.0 {
|
||||
total_size.map(|total| (total - downloaded) as f64 / speed)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let stage_description = if is_twilight {
|
||||
"downloading (twilight rolling release)".to_string()
|
||||
} else {
|
||||
"downloading".to_string()
|
||||
};
|
||||
|
||||
let progress = DownloadProgress {
|
||||
browser: browser_type.as_str().to_string(),
|
||||
version: version.to_string(),
|
||||
downloaded_bytes: downloaded,
|
||||
total_bytes: total_size,
|
||||
percentage,
|
||||
speed_bytes_per_sec: speed,
|
||||
eta_seconds: eta,
|
||||
stage: stage_description,
|
||||
};
|
||||
|
||||
let _ = app_handle.emit("download-progress", &progress);
|
||||
last_update = now;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(file_path)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::browser::BrowserType;
|
||||
use crate::browser_version_service::DownloadInfo;
|
||||
|
||||
use tempfile::TempDir;
|
||||
use wiremock::matchers::{method, path, query_param};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
async fn setup_mock_server() -> MockServer {
|
||||
MockServer::start().await
|
||||
}
|
||||
|
||||
fn create_test_api_client(server: &MockServer) -> ApiClient {
|
||||
let base_url = server.uri();
|
||||
ApiClient::new_with_base_urls(
|
||||
base_url.clone(), // firefox_api_base
|
||||
base_url.clone(), // firefox_dev_api_base
|
||||
base_url.clone(), // github_api_base
|
||||
base_url.clone(), // chromium_api_base
|
||||
base_url.clone(), // tor_archive_base
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_brave_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.9",
|
||||
"name": "Brave Release 1.81.9",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.9-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-universal.dmg",
|
||||
"size": 200000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/brave/brave-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "brave-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Brave, "v1.81.9", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/brave-1.81.9-universal.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_zen_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "1.11b",
|
||||
"name": "Zen Browser 1.11b",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "zen.macos-universal.dmg",
|
||||
"browser_download_url": "https://example.com/zen-1.11b-universal.dmg",
|
||||
"size": 120000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/zen-browser/desktop/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "zen-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Zen, "1.11b", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/zen-1.11b-universal.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_mullvad_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "14.5a6",
|
||||
"name": "Mullvad Browser 14.5a6",
|
||||
"prerelease": true,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "mullvad-browser-macos-14.5a6.dmg",
|
||||
"browser_download_url": "https://example.com/mullvad-14.5a6.dmg",
|
||||
"size": 100000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "mullvad-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::MullvadBrowser, "14.5a6", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/mullvad-14.5a6.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_firefox_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "https://download.mozilla.org/?product=firefox-139.0&os=osx&lang=en-US".to_string(),
|
||||
filename: "firefox-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Firefox, "139.0", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_chromium_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/1465660/chrome-mac.zip".to_string(),
|
||||
filename: "chromium-test.zip".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Chromium, "1465660", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_tor_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "https://archive.torproject.org/tor-package-archive/torbrowser/14.0.4/tor-browser-macos-14.0.4.dmg".to_string(),
|
||||
filename: "tor-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::TorBrowser, "14.0.4", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_brave_version_not_found() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.8",
|
||||
"name": "Brave Release 1.81.8",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.8-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.8-universal.dmg",
|
||||
"size": 200000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/brave/brave-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "brave-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Brave, "v1.81.9", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("Brave version v1.81.9 not found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_zen_asset_not_found() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "1.11b",
|
||||
"name": "Zen Browser 1.11b",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "zen.linux-universal.tar.bz2",
|
||||
"browser_download_url": "https://example.com/zen-1.11b-linux.tar.bz2",
|
||||
"size": 150000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/zen-browser/desktop/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "zen-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Zen, "1.11b", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("No compatible asset found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_with_progress() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
// Create a temporary directory for the test
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let dest_path = temp_dir.path();
|
||||
|
||||
// Create test file content (simulating a small download)
|
||||
let test_content = b"This is a test file content for download simulation";
|
||||
|
||||
// Mock the download endpoint
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/test-download"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_bytes(test_content)
|
||||
.insert_header("content-length", test_content.len().to_string())
|
||||
.insert_header("content-type", "application/octet-stream"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: format!("{}/test-download", server.uri()),
|
||||
filename: "test-file.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
// Create a mock app handle for testing
|
||||
let app = tauri::test::mock_app();
|
||||
let app_handle = app.handle().clone();
|
||||
|
||||
let result = downloader
|
||||
.download_browser(
|
||||
&app_handle,
|
||||
BrowserType::Firefox,
|
||||
"139.0",
|
||||
&download_info,
|
||||
dest_path,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let downloaded_file = result.unwrap();
|
||||
assert!(downloaded_file.exists());
|
||||
|
||||
// Verify file content
|
||||
let downloaded_content = std::fs::read(&downloaded_file).unwrap();
|
||||
assert_eq!(downloaded_content, test_content);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_network_error() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let dest_path = temp_dir.path();
|
||||
|
||||
// Mock a 404 response
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/missing-file"))
|
||||
.respond_with(ResponseTemplate::new(404))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: format!("{}/missing-file", server.uri()),
|
||||
filename: "missing-file.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let app = tauri::test::mock_app();
|
||||
let app_handle = app.handle().clone();
|
||||
|
||||
let result = downloader
|
||||
.download_browser(
|
||||
&app_handle,
|
||||
BrowserType::Firefox,
|
||||
"139.0",
|
||||
&download_info,
|
||||
dest_path,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_mullvad_asset_not_found() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "14.5a6",
|
||||
"name": "Mullvad Browser 14.5a6",
|
||||
"prerelease": true,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "mullvad-browser-linux-14.5a6.tar.xz",
|
||||
"browser_download_url": "https://example.com/mullvad-14.5a6.tar.xz",
|
||||
"size": 80000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "mullvad-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::MullvadBrowser, "14.5a6", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("No compatible asset found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_brave_version_with_v_prefix() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.9",
|
||||
"name": "Brave Release 1.81.9",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.9-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-universal.dmg",
|
||||
"size": 200000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/brave/brave-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "brave-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
// Test with version without v prefix
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Brave, "1.81.9", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/brave-1.81.9-universal.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_chunked_response() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let dest_path = temp_dir.path();
|
||||
|
||||
// Create larger test content to simulate chunked transfer
|
||||
let test_content = vec![42u8; 1024]; // 1KB of data
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/chunked-download"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_bytes(test_content.clone())
|
||||
.insert_header("content-length", test_content.len().to_string())
|
||||
.insert_header("content-type", "application/octet-stream"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: format!("{}/chunked-download", server.uri()),
|
||||
filename: "chunked-file.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let app = tauri::test::mock_app();
|
||||
let app_handle = app.handle().clone();
|
||||
|
||||
let result = downloader
|
||||
.download_browser(
|
||||
&app_handle,
|
||||
BrowserType::Chromium,
|
||||
"1465660",
|
||||
&download_info,
|
||||
dest_path,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let downloaded_file = result.unwrap();
|
||||
assert!(downloaded_file.exists());
|
||||
|
||||
let downloaded_content = std::fs::read(&downloaded_file).unwrap();
|
||||
assert_eq!(downloaded_content.len(), test_content.len());
|
||||
}
|
||||
}
|
||||
@@ -1,354 +0,0 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DownloadedBrowserInfo {
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
pub download_date: u64,
|
||||
pub file_path: PathBuf,
|
||||
pub verified: bool,
|
||||
pub actual_version: Option<String>, // For browsers like Chromium where we track the actual version
|
||||
pub file_size: Option<u64>, // For tracking file size changes (useful for rolling releases)
|
||||
#[serde(default)] // Add default value (false) for backwards compatibility
|
||||
pub is_rolling_release: bool, // True for Zen's twilight releases and other rolling releases
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Default)]
|
||||
pub struct DownloadedBrowsersRegistry {
|
||||
pub browsers: HashMap<String, HashMap<String, DownloadedBrowserInfo>>, // browser -> version -> info
|
||||
}
|
||||
|
||||
impl DownloadedBrowsersRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn load() -> Result<Self, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let registry_path = Self::get_registry_path()?;
|
||||
|
||||
if !registry_path.exists() {
|
||||
return Ok(Self::new());
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(®istry_path)?;
|
||||
let registry: DownloadedBrowsersRegistry = serde_json::from_str(&content)?;
|
||||
Ok(registry)
|
||||
}
|
||||
|
||||
pub fn save(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let registry_path = Self::get_registry_path()?;
|
||||
|
||||
// Ensure parent directory exists
|
||||
if let Some(parent) = registry_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let content = serde_json::to_string_pretty(self)?;
|
||||
fs::write(®istry_path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_registry_path() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let mut path = base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("data");
|
||||
path.push("downloaded_browsers.json");
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub fn add_browser(&mut self, info: DownloadedBrowserInfo) {
|
||||
self
|
||||
.browsers
|
||||
.entry(info.browser.clone())
|
||||
.or_default()
|
||||
.insert(info.version.clone(), info);
|
||||
}
|
||||
|
||||
pub fn remove_browser(&mut self, browser: &str, version: &str) -> Option<DownloadedBrowserInfo> {
|
||||
self.browsers.get_mut(browser)?.remove(version)
|
||||
}
|
||||
|
||||
pub fn is_browser_downloaded(&self, browser: &str, version: &str) -> bool {
|
||||
self
|
||||
.browsers
|
||||
.get(browser)
|
||||
.and_then(|versions| versions.get(version))
|
||||
.map(|info| info.verified)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn get_downloaded_versions(&self, browser: &str) -> Vec<String> {
|
||||
self
|
||||
.browsers
|
||||
.get(browser)
|
||||
.map(|versions| {
|
||||
versions
|
||||
.iter()
|
||||
.filter(|(_, info)| info.verified)
|
||||
.map(|(version, _)| version.clone())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn mark_download_started(&mut self, browser: &str, version: &str, file_path: PathBuf) {
|
||||
let is_rolling = Self::is_rolling_release(browser, version);
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: browser.to_string(),
|
||||
version: version.to_string(),
|
||||
download_date: std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs(),
|
||||
file_path,
|
||||
verified: false,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: is_rolling,
|
||||
};
|
||||
self.add_browser(info);
|
||||
}
|
||||
|
||||
pub fn mark_download_completed_with_actual_version(
|
||||
&mut self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
actual_version: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
if let Some(info) = self
|
||||
.browsers
|
||||
.get_mut(browser)
|
||||
.and_then(|versions| versions.get_mut(version))
|
||||
{
|
||||
info.verified = true;
|
||||
info.actual_version = actual_version;
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("Browser {browser}:{version} not found in registry"))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_rolling_release(browser: &str, version: &str) -> bool {
|
||||
// Check if this is a rolling release like twilight
|
||||
browser == "zen" && version.to_lowercase() == "twilight"
|
||||
}
|
||||
|
||||
pub fn cleanup_failed_download(
|
||||
&mut self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
if let Some(info) = self.remove_browser(browser, version) {
|
||||
// Clean up any files that might have been left behind
|
||||
if info.file_path.exists() {
|
||||
if info.file_path.is_dir() {
|
||||
fs::remove_dir_all(&info.file_path)?;
|
||||
} else {
|
||||
fs::remove_file(&info.file_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Also clean up the browser directory if it exists
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let mut browser_dir = base_dirs.data_local_dir().to_path_buf();
|
||||
browser_dir.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
browser_dir.push("binaries");
|
||||
browser_dir.push(browser);
|
||||
browser_dir.push(version);
|
||||
|
||||
if browser_dir.exists() {
|
||||
fs::remove_dir_all(&browser_dir)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Find and remove unused browser binaries that are not referenced by any active profiles
|
||||
pub fn cleanup_unused_binaries(
|
||||
&mut self,
|
||||
active_profiles: &[(String, String)], // (browser, version) pairs
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let active_set: std::collections::HashSet<(String, String)> =
|
||||
active_profiles.iter().cloned().collect();
|
||||
let mut cleaned_up = Vec::new();
|
||||
|
||||
// Collect all downloaded browsers that are not in active profiles
|
||||
let mut to_remove = Vec::new();
|
||||
for (browser, versions) in &self.browsers {
|
||||
for (version, info) in versions {
|
||||
if info.verified && !active_set.contains(&(browser.clone(), version.clone())) {
|
||||
to_remove.push((browser.clone(), version.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove unused binaries
|
||||
for (browser, version) in to_remove {
|
||||
if let Err(e) = self.cleanup_failed_download(&browser, &version) {
|
||||
eprintln!("Failed to cleanup unused binary {browser}:{version}: {e}");
|
||||
} else {
|
||||
cleaned_up.push(format!("{browser} {version}"));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cleaned_up)
|
||||
}
|
||||
|
||||
/// Get all browsers and versions referenced by active profiles
|
||||
pub fn get_active_browser_versions(
|
||||
&self,
|
||||
profiles: &[crate::browser_runner::BrowserProfile],
|
||||
) -> Vec<(String, String)> {
|
||||
profiles
|
||||
.iter()
|
||||
.map(|profile| (profile.browser.clone(), profile.version.clone()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_registry_creation() {
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
assert!(registry.browsers.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_and_get_browser() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
download_date: 1234567890,
|
||||
file_path: PathBuf::from("/test/path"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
registry.add_browser(info.clone());
|
||||
|
||||
assert!(registry.is_browser_downloaded("firefox", "139.0"));
|
||||
assert!(!registry.is_browser_downloaded("firefox", "140.0"));
|
||||
assert!(!registry.is_browser_downloaded("chrome", "139.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_downloaded_versions() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
let info1 = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
download_date: 1234567890,
|
||||
file_path: PathBuf::from("/test/path1"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
let info2 = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "140.0".to_string(),
|
||||
download_date: 1234567891,
|
||||
file_path: PathBuf::from("/test/path2"),
|
||||
verified: false, // Not verified, should not be included
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
let info3 = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "141.0".to_string(),
|
||||
download_date: 1234567892,
|
||||
file_path: PathBuf::from("/test/path3"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
registry.add_browser(info1);
|
||||
registry.add_browser(info2);
|
||||
registry.add_browser(info3);
|
||||
|
||||
let versions = registry.get_downloaded_versions("firefox");
|
||||
assert_eq!(versions.len(), 2);
|
||||
assert!(versions.contains(&"139.0".to_string()));
|
||||
assert!(versions.contains(&"141.0".to_string()));
|
||||
assert!(!versions.contains(&"140.0".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mark_download_lifecycle() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
// Mark download started
|
||||
registry.mark_download_started("firefox", "139.0", PathBuf::from("/test/path"));
|
||||
|
||||
// Should not be considered downloaded yet
|
||||
assert!(!registry.is_browser_downloaded("firefox", "139.0"));
|
||||
|
||||
// Mark as completed
|
||||
registry
|
||||
.mark_download_completed_with_actual_version("firefox", "139.0", Some("139.0".to_string()))
|
||||
.unwrap();
|
||||
|
||||
// Now should be considered downloaded
|
||||
assert!(registry.is_browser_downloaded("firefox", "139.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_browser() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
download_date: 1234567890,
|
||||
file_path: PathBuf::from("/test/path"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
registry.add_browser(info);
|
||||
assert!(registry.is_browser_downloaded("firefox", "139.0"));
|
||||
|
||||
let removed = registry.remove_browser("firefox", "139.0");
|
||||
assert!(removed.is_some());
|
||||
assert!(!registry.is_browser_downloaded("firefox", "139.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_twilight_rolling_release() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
// Mark twilight download started
|
||||
registry.mark_download_started("zen", "twilight", PathBuf::from("/test/zen-twilight"));
|
||||
|
||||
// Check that it's marked as rolling release
|
||||
let zen_versions = ®istry.browsers["zen"];
|
||||
let twilight_info = &zen_versions["twilight"];
|
||||
assert!(twilight_info.is_rolling_release);
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
+829
-795
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,388 @@
|
||||
use crate::browser::GithubRelease;
|
||||
use crate::profile::manager::ProfileManager;
|
||||
use directories::BaseDirs;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use tauri::Emitter;
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
const MMDB_REPO: &str = "P3TERX/GeoLite.mmdb";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GeoIPDownloadProgress {
|
||||
pub stage: String, // "downloading", "extracting", "completed"
|
||||
pub percentage: f64,
|
||||
pub message: String,
|
||||
// Extra fields to mirror browser download progress payload
|
||||
pub downloaded_bytes: Option<u64>,
|
||||
pub total_bytes: Option<u64>,
|
||||
pub speed_bytes_per_sec: Option<f64>,
|
||||
pub eta_seconds: Option<f64>,
|
||||
}
|
||||
|
||||
pub struct GeoIPDownloader {
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl GeoIPDownloader {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static GeoIPDownloader {
|
||||
&GEOIP_DOWNLOADER
|
||||
}
|
||||
|
||||
/// Create a new downloader with custom client (for testing)
|
||||
#[cfg(test)]
|
||||
pub fn new_with_client(client: Client) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
|
||||
fn get_cache_dir() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to determine base directories")?;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let cache_dir = base_dirs
|
||||
.data_local_dir()
|
||||
.join("camoufox")
|
||||
.join("camoufox")
|
||||
.join("Cache");
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let cache_dir = base_dirs.cache_dir().join("camoufox");
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
let cache_dir = base_dirs.cache_dir().join("camoufox");
|
||||
|
||||
#[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))]
|
||||
let cache_dir = base_dirs.cache_dir().join("camoufox");
|
||||
|
||||
Ok(cache_dir)
|
||||
}
|
||||
|
||||
fn get_mmdb_file_path() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
Ok(Self::get_cache_dir()?.join("GeoLite2-City.mmdb"))
|
||||
}
|
||||
|
||||
pub fn is_geoip_database_available() -> bool {
|
||||
if let Ok(mmdb_path) = Self::get_mmdb_file_path() {
|
||||
mmdb_path.exists()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
/// Check if GeoIP database is missing for Camoufox profiles
|
||||
pub fn check_missing_geoip_database(
|
||||
&self,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Get all profiles
|
||||
let profiles = ProfileManager::instance()
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
// Check if there are any Camoufox profiles
|
||||
let has_camoufox_profiles = profiles.iter().any(|profile| profile.browser == "camoufox");
|
||||
|
||||
if has_camoufox_profiles {
|
||||
// Check if GeoIP database is available
|
||||
return Ok(!Self::is_geoip_database_available());
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn find_city_mmdb_asset(&self, release: &GithubRelease) -> Option<String> {
|
||||
for asset in &release.assets {
|
||||
if asset.name.ends_with("-City.mmdb") {
|
||||
return Some(asset.browser_download_url.clone());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub async fn download_geoip_database(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Emit initial progress
|
||||
let _ = app_handle.emit(
|
||||
"geoip-download-progress",
|
||||
GeoIPDownloadProgress {
|
||||
stage: "downloading".to_string(),
|
||||
percentage: 0.0,
|
||||
message: "Starting GeoIP database download".to_string(),
|
||||
downloaded_bytes: Some(0),
|
||||
total_bytes: None,
|
||||
speed_bytes_per_sec: Some(0.0),
|
||||
eta_seconds: None,
|
||||
},
|
||||
);
|
||||
|
||||
// Fetch latest release from GitHub
|
||||
let releases = self.fetch_geoip_releases().await?;
|
||||
let latest_release = releases.first().ok_or("No GeoIP database releases found")?;
|
||||
|
||||
let download_url = self
|
||||
.find_city_mmdb_asset(latest_release)
|
||||
.ok_or("No compatible GeoIP database asset found")?;
|
||||
|
||||
// Create cache directory
|
||||
let cache_dir = Self::get_cache_dir()?;
|
||||
fs::create_dir_all(&cache_dir).await?;
|
||||
|
||||
let mmdb_path = Self::get_mmdb_file_path()?;
|
||||
|
||||
// Download the file
|
||||
let response = self.client.get(&download_url).send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to download GeoIP database: HTTP {}",
|
||||
response.status()
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
let total_size = response.content_length().unwrap_or(0);
|
||||
let mut downloaded: u64 = 0;
|
||||
let mut file = fs::File::create(&mmdb_path).await?;
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
use std::time::Instant;
|
||||
let start_time = Instant::now();
|
||||
let mut last_update = Instant::now();
|
||||
while let Some(chunk) = stream.next().await {
|
||||
let chunk = chunk?;
|
||||
downloaded += chunk.len() as u64;
|
||||
file.write_all(&chunk).await?;
|
||||
|
||||
let now = Instant::now();
|
||||
if now.duration_since(last_update).as_millis() >= 100 {
|
||||
let elapsed = start_time.elapsed().as_secs_f64();
|
||||
let speed = if elapsed > 0.0 {
|
||||
downloaded as f64 / elapsed
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let percentage = if total_size > 0 {
|
||||
(downloaded as f64 / total_size as f64) * 100.0
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let eta = if speed > 0.0 && total_size > 0 {
|
||||
Some((total_size.saturating_sub(downloaded)) as f64 / speed)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let _ = app_handle.emit(
|
||||
"geoip-download-progress",
|
||||
GeoIPDownloadProgress {
|
||||
stage: "downloading".to_string(),
|
||||
percentage,
|
||||
message: format!("Downloaded {downloaded} / {total_size} bytes"),
|
||||
downloaded_bytes: Some(downloaded),
|
||||
total_bytes: Some(total_size),
|
||||
speed_bytes_per_sec: Some(speed),
|
||||
eta_seconds: eta,
|
||||
},
|
||||
);
|
||||
last_update = now;
|
||||
}
|
||||
}
|
||||
|
||||
file.flush().await?;
|
||||
|
||||
// Emit completion
|
||||
let _ = app_handle.emit(
|
||||
"geoip-download-progress",
|
||||
GeoIPDownloadProgress {
|
||||
stage: "completed".to_string(),
|
||||
percentage: 100.0,
|
||||
message: "GeoIP database download completed".to_string(),
|
||||
downloaded_bytes: Some(downloaded),
|
||||
total_bytes: Some(total_size),
|
||||
speed_bytes_per_sec: Some(0.0),
|
||||
eta_seconds: Some(0.0),
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_geoip_releases(
|
||||
&self,
|
||||
) -> Result<Vec<GithubRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let url = format!("https://api.github.com/repos/{MMDB_REPO}/releases");
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (compatible; donutbrowser)")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Failed to fetch releases: HTTP {}", response.status()).into());
|
||||
}
|
||||
|
||||
let releases: Vec<GithubRelease> = response.json().await?;
|
||||
Ok(releases)
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn check_missing_geoip_database() -> Result<bool, String> {
|
||||
let geoip_downloader = GeoIPDownloader::instance();
|
||||
geoip_downloader
|
||||
.check_missing_geoip_database()
|
||||
.map_err(|e| format!("Failed to check missing GeoIP database: {e}"))
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref GEOIP_DOWNLOADER: GeoIPDownloader = GeoIPDownloader::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::browser::GithubRelease;
|
||||
use wiremock::matchers::{method, path};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
fn create_mock_release() -> GithubRelease {
|
||||
GithubRelease {
|
||||
tag_name: "v1.0.0".to_string(),
|
||||
name: "Test Release".to_string(),
|
||||
body: Some("Test release body".to_string()),
|
||||
published_at: "2023-01-01T00:00:00Z".to_string(),
|
||||
created_at: Some("2023-01-01T00:00:00Z".to_string()),
|
||||
html_url: Some("https://example.com/release".to_string()),
|
||||
tarball_url: Some("https://example.com/tarball".to_string()),
|
||||
zipball_url: Some("https://example.com/zipball".to_string()),
|
||||
draft: false,
|
||||
prerelease: false,
|
||||
is_nightly: false,
|
||||
id: Some(1),
|
||||
node_id: Some("test_node_id".to_string()),
|
||||
target_commitish: None,
|
||||
assets: vec![crate::browser::GithubAsset {
|
||||
id: Some(1),
|
||||
node_id: Some("test_asset_node_id".to_string()),
|
||||
name: "GeoLite2-City.mmdb".to_string(),
|
||||
label: None,
|
||||
content_type: Some("application/octet-stream".to_string()),
|
||||
state: Some("uploaded".to_string()),
|
||||
size: 1024,
|
||||
download_count: Some(0),
|
||||
created_at: Some("2023-01-01T00:00:00Z".to_string()),
|
||||
updated_at: Some("2023-01-01T00:00:00Z".to_string()),
|
||||
browser_download_url: "https://example.com/GeoLite2-City.mmdb".to_string(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_geoip_releases_success() {
|
||||
let mock_server = MockServer::start().await;
|
||||
let releases = vec![create_mock_release()];
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/repos/{MMDB_REPO}/releases")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(&releases))
|
||||
.mount(&mock_server)
|
||||
.await;
|
||||
|
||||
let client = Client::builder()
|
||||
.build()
|
||||
.expect("Failed to create HTTP client");
|
||||
|
||||
let downloader = GeoIPDownloader::new_with_client(client);
|
||||
|
||||
// Override the URL for testing
|
||||
let url = format!("{}/repos/{}/releases", mock_server.uri(), MMDB_REPO);
|
||||
let response = downloader
|
||||
.client
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (compatible; donutbrowser)")
|
||||
.send()
|
||||
.await
|
||||
.expect("Request should succeed");
|
||||
|
||||
assert!(response.status().is_success());
|
||||
|
||||
let fetched_releases: Vec<GithubRelease> = response.json().await.expect("Should parse JSON");
|
||||
assert_eq!(fetched_releases.len(), 1);
|
||||
assert_eq!(fetched_releases[0].tag_name, "v1.0.0");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_find_city_mmdb_asset() {
|
||||
let downloader = GeoIPDownloader::new();
|
||||
let release = create_mock_release();
|
||||
|
||||
let asset_url = downloader.find_city_mmdb_asset(&release);
|
||||
assert!(asset_url.is_some());
|
||||
assert_eq!(asset_url.unwrap(), "https://example.com/GeoLite2-City.mmdb");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_find_city_mmdb_asset_not_found() {
|
||||
let downloader = GeoIPDownloader::new();
|
||||
let mut release = create_mock_release();
|
||||
release.assets[0].name = "wrong-file.txt".to_string();
|
||||
|
||||
let asset_url = downloader.find_city_mmdb_asset(&release);
|
||||
assert!(asset_url.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_cache_dir() {
|
||||
let cache_dir = GeoIPDownloader::get_cache_dir();
|
||||
assert!(cache_dir.is_ok());
|
||||
|
||||
let path = cache_dir.unwrap();
|
||||
assert!(path.to_string_lossy().contains("camoufox"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_mmdb_file_path() {
|
||||
let mmdb_path = GeoIPDownloader::get_mmdb_file_path();
|
||||
assert!(mmdb_path.is_ok());
|
||||
|
||||
let path = mmdb_path.unwrap();
|
||||
assert!(path.to_string_lossy().ends_with("GeoLite2-City.mmdb"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_geoip_database_available() {
|
||||
// Test that the function works correctly regardless of file system state
|
||||
let is_available = GeoIPDownloader::is_geoip_database_available();
|
||||
|
||||
// The function should return a boolean value (either true or false)
|
||||
// The function should return a boolean value - we just verify it doesn't panic
|
||||
// and returns the expected result based on file existence
|
||||
|
||||
// Verify the function logic by checking if the path resolution works
|
||||
let mmdb_path_result = GeoIPDownloader::get_mmdb_file_path();
|
||||
assert!(
|
||||
mmdb_path_result.is_ok(),
|
||||
"Should be able to get MMDB file path"
|
||||
);
|
||||
|
||||
let mmdb_path = mmdb_path_result.unwrap();
|
||||
let expected_available = mmdb_path.exists();
|
||||
assert_eq!(
|
||||
is_available, expected_available,
|
||||
"Function result should match actual file existence"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,314 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
use tauri::Emitter;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProfileGroup {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GroupWithCount {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct GroupsData {
|
||||
groups: Vec<ProfileGroup>,
|
||||
}
|
||||
|
||||
pub struct GroupManager {
|
||||
base_dirs: BaseDirs,
|
||||
data_dir_override: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl GroupManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
data_dir_override: std::env::var("DONUTBROWSER_DATA_DIR")
|
||||
.ok()
|
||||
.map(PathBuf::from),
|
||||
}
|
||||
}
|
||||
|
||||
// Helper for tests to override data directory without global env var
|
||||
#[allow(dead_code)]
|
||||
pub fn with_data_dir_override(dir: &Path) -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
data_dir_override: Some(dir.to_path_buf()),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_groups_file_path(&self) -> PathBuf {
|
||||
if let Some(dir) = &self.data_dir_override {
|
||||
let mut override_path = dir.clone();
|
||||
// Ensure the directory exists before returning the path
|
||||
let _ = fs::create_dir_all(&override_path);
|
||||
override_path.push("groups.json");
|
||||
return override_path;
|
||||
}
|
||||
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("data");
|
||||
path.push("groups.json");
|
||||
path
|
||||
}
|
||||
|
||||
fn load_groups_data(&self) -> Result<GroupsData, Box<dyn std::error::Error>> {
|
||||
let groups_file = self.get_groups_file_path();
|
||||
|
||||
if !groups_file.exists() {
|
||||
return Ok(GroupsData { groups: Vec::new() });
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(groups_file)?;
|
||||
let groups_data: GroupsData = serde_json::from_str(&content)?;
|
||||
Ok(groups_data)
|
||||
}
|
||||
|
||||
fn save_groups_data(&self, groups_data: &GroupsData) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let groups_file = self.get_groups_file_path();
|
||||
|
||||
// Ensure the parent directory exists
|
||||
if let Some(parent) = groups_file.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let json = serde_json::to_string_pretty(groups_data)?;
|
||||
fs::write(groups_file, json)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_all_groups(&self) -> Result<Vec<ProfileGroup>, Box<dyn std::error::Error>> {
|
||||
let groups_data = self.load_groups_data()?;
|
||||
Ok(groups_data.groups)
|
||||
}
|
||||
|
||||
pub fn create_group(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
name: String,
|
||||
) -> Result<ProfileGroup, Box<dyn std::error::Error>> {
|
||||
let mut groups_data = self.load_groups_data()?;
|
||||
|
||||
// Check if group with this name already exists
|
||||
if groups_data.groups.iter().any(|g| g.name == name) {
|
||||
return Err(format!("Group with name '{name}' already exists").into());
|
||||
}
|
||||
|
||||
let group = ProfileGroup {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
name,
|
||||
};
|
||||
|
||||
groups_data.groups.push(group.clone());
|
||||
self.save_groups_data(&groups_data)?;
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("groups-changed", ()) {
|
||||
log::error!("Failed to emit groups-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(group)
|
||||
}
|
||||
|
||||
pub fn update_group(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
id: String,
|
||||
name: String,
|
||||
) -> Result<ProfileGroup, Box<dyn std::error::Error>> {
|
||||
let mut groups_data = self.load_groups_data()?;
|
||||
|
||||
// Check if another group with this name already exists
|
||||
if groups_data
|
||||
.groups
|
||||
.iter()
|
||||
.any(|g| g.name == name && g.id != id)
|
||||
{
|
||||
return Err(format!("Group with name '{name}' already exists").into());
|
||||
}
|
||||
|
||||
let group = groups_data
|
||||
.groups
|
||||
.iter_mut()
|
||||
.find(|g| g.id == id)
|
||||
.ok_or_else(|| format!("Group with id '{id}' not found"))?;
|
||||
|
||||
group.name = name;
|
||||
let updated_group = group.clone();
|
||||
|
||||
self.save_groups_data(&groups_data)?;
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("groups-changed", ()) {
|
||||
log::error!("Failed to emit groups-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(updated_group)
|
||||
}
|
||||
|
||||
pub fn delete_group(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
id: String,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut groups_data = self.load_groups_data()?;
|
||||
|
||||
let initial_len = groups_data.groups.len();
|
||||
groups_data.groups.retain(|g| g.id != id);
|
||||
|
||||
if groups_data.groups.len() == initial_len {
|
||||
return Err(format!("Group with id '{id}' not found").into());
|
||||
}
|
||||
|
||||
self.save_groups_data(&groups_data)?;
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("groups-changed", ()) {
|
||||
log::error!("Failed to emit groups-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_groups_with_profile_counts(
|
||||
&self,
|
||||
profiles: &[crate::profile::BrowserProfile],
|
||||
) -> Result<Vec<GroupWithCount>, Box<dyn std::error::Error>> {
|
||||
let groups = self.get_all_groups()?;
|
||||
let mut group_counts = HashMap::new();
|
||||
|
||||
// Count profiles in each group
|
||||
for profile in profiles {
|
||||
if let Some(group_id) = &profile.group_id {
|
||||
*group_counts.entry(group_id.clone()).or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Create result including all groups (even those with 0 count)
|
||||
let mut result = Vec::new();
|
||||
for group in groups {
|
||||
let count = group_counts.get(&group.id).copied().unwrap_or(0);
|
||||
result.push(GroupWithCount {
|
||||
id: group.id,
|
||||
name: group.name,
|
||||
count,
|
||||
});
|
||||
}
|
||||
|
||||
// Add default group count (profiles without group_id), always include even if 0
|
||||
let default_count = profiles.iter().filter(|p| p.group_id.is_none()).count();
|
||||
let default_group = GroupWithCount {
|
||||
id: "default".to_string(),
|
||||
name: "Default".to_string(),
|
||||
count: default_count,
|
||||
};
|
||||
// Insert at the beginning for consistent ordering with UI expectations
|
||||
result.insert(0, default_group);
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
// Global instance
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref GROUP_MANAGER: Mutex<GroupManager> = Mutex::new(GroupManager::new());
|
||||
}
|
||||
|
||||
// Helper function to get groups with counts
|
||||
pub fn get_groups_with_counts(profiles: &[crate::profile::BrowserProfile]) -> Vec<GroupWithCount> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.get_groups_with_profile_counts(profiles)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
// Tauri commands
|
||||
#[tauri::command]
|
||||
pub async fn get_profile_groups() -> Result<Vec<ProfileGroup>, String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.get_all_groups()
|
||||
.map_err(|e| format!("Failed to get profile groups: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_groups_with_profile_counts() -> Result<Vec<GroupWithCount>, String> {
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
let profiles = profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
Ok(get_groups_with_counts(&profiles))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn create_profile_group(
|
||||
app_handle: tauri::AppHandle,
|
||||
name: String,
|
||||
) -> Result<ProfileGroup, String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.create_group(&app_handle, name)
|
||||
.map_err(|e| format!("Failed to create group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn update_profile_group(
|
||||
app_handle: tauri::AppHandle,
|
||||
group_id: String,
|
||||
name: String,
|
||||
) -> Result<ProfileGroup, String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.update_group(&app_handle, group_id, name)
|
||||
.map_err(|e| format!("Failed to update group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_profile_group(
|
||||
app_handle: tauri::AppHandle,
|
||||
group_id: String,
|
||||
) -> Result<(), String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.delete_group(&app_handle, group_id)
|
||||
.map_err(|e| format!("Failed to delete group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn assign_profiles_to_group(
|
||||
app_handle: tauri::AppHandle,
|
||||
profile_ids: Vec<String>,
|
||||
group_id: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
profile_manager
|
||||
.assign_profiles_to_group(&app_handle, profile_ids, group_id)
|
||||
.map_err(|e| format!("Failed to assign profiles to group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_selected_profiles(
|
||||
app_handle: tauri::AppHandle,
|
||||
profile_ids: Vec<String>,
|
||||
) -> Result<(), String> {
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
profile_manager
|
||||
.delete_multiple_profiles(&app_handle, profile_ids)
|
||||
.map_err(|e| format!("Failed to delete profiles: {e}"))
|
||||
}
|
||||
+496
-82
@@ -3,53 +3,76 @@ use std::env;
|
||||
use std::sync::Mutex;
|
||||
use tauri::{Emitter, Manager, Runtime, WebviewUrl, WebviewWindow, WebviewWindowBuilder};
|
||||
use tauri_plugin_deep_link::DeepLinkExt;
|
||||
use tauri_plugin_log::{Target, TargetKind};
|
||||
|
||||
// Store pending URLs that need to be handled when the window is ready
|
||||
static PENDING_URLS: Mutex<Vec<String>> = Mutex::new(Vec::new());
|
||||
|
||||
mod api_client;
|
||||
mod api_server;
|
||||
mod app_auto_updater;
|
||||
mod auto_updater;
|
||||
mod browser;
|
||||
mod browser_runner;
|
||||
mod browser_version_service;
|
||||
mod browser_version_manager;
|
||||
mod camoufox_manager;
|
||||
mod default_browser;
|
||||
mod download;
|
||||
mod downloaded_browsers;
|
||||
mod downloaded_browsers_registry;
|
||||
mod downloader;
|
||||
mod extraction;
|
||||
mod geoip_downloader;
|
||||
mod group_manager;
|
||||
mod platform_browser;
|
||||
mod profile;
|
||||
mod profile_importer;
|
||||
mod proxy_manager;
|
||||
pub mod proxy_runner;
|
||||
pub mod proxy_server;
|
||||
pub mod proxy_storage;
|
||||
mod settings_manager;
|
||||
mod theme_detector;
|
||||
pub mod traffic_stats;
|
||||
// mod theme_detector; // removed: theme detection handled in webview via CSS prefers-color-scheme
|
||||
mod tag_manager;
|
||||
mod version_updater;
|
||||
|
||||
extern crate lazy_static;
|
||||
|
||||
use browser_runner::{
|
||||
check_browser_exists, check_browser_status, create_browser_profile_new, delete_profile,
|
||||
download_browser, fetch_browser_versions_cached_first, fetch_browser_versions_with_count,
|
||||
fetch_browser_versions_with_count_cached_first, get_browser_release_types,
|
||||
get_downloaded_browser_versions, get_supported_browsers, is_browser_supported_on_platform,
|
||||
kill_browser_profile, launch_browser_profile, list_browser_profiles, rename_profile,
|
||||
update_profile_proxy, update_profile_version,
|
||||
check_browser_exists, kill_browser_profile, launch_browser_profile, open_url_with_profile,
|
||||
};
|
||||
|
||||
use profile::manager::{
|
||||
check_browser_status, create_browser_profile_new, delete_profile, list_browser_profiles,
|
||||
rename_profile, update_camoufox_config, update_profile_note, update_profile_proxy,
|
||||
update_profile_tags,
|
||||
};
|
||||
|
||||
use browser_version_manager::{
|
||||
fetch_browser_versions_cached_first, fetch_browser_versions_with_count,
|
||||
fetch_browser_versions_with_count_cached_first, get_supported_browsers,
|
||||
is_browser_supported_on_platform,
|
||||
};
|
||||
|
||||
use downloaded_browsers_registry::{
|
||||
check_missing_binaries, ensure_all_binaries_exist, get_downloaded_browser_versions,
|
||||
};
|
||||
|
||||
use downloader::download_browser;
|
||||
|
||||
use settings_manager::{
|
||||
clear_all_version_cache_and_refetch, get_app_settings, get_table_sorting_settings,
|
||||
save_app_settings, save_table_sorting_settings, should_show_settings_on_startup,
|
||||
get_app_settings, get_table_sorting_settings, save_app_settings, save_table_sorting_settings,
|
||||
should_show_settings_on_startup,
|
||||
};
|
||||
|
||||
use default_browser::{
|
||||
is_default_browser, open_url_with_profile, set_as_default_browser, smart_open_url,
|
||||
};
|
||||
use tag_manager::get_all_tags;
|
||||
|
||||
use default_browser::{is_default_browser, set_as_default_browser};
|
||||
|
||||
use version_updater::{
|
||||
get_version_update_status, get_version_updater, trigger_manual_version_update,
|
||||
clear_all_version_cache_and_refetch, get_version_update_status, get_version_updater,
|
||||
trigger_manual_version_update,
|
||||
};
|
||||
|
||||
use auto_updater::{
|
||||
check_for_browser_updates, complete_browser_update_with_auto_update, dismiss_update_notification,
|
||||
is_browser_disabled_for_update,
|
||||
};
|
||||
|
||||
use app_auto_updater::{
|
||||
@@ -58,7 +81,16 @@ use app_auto_updater::{
|
||||
|
||||
use profile_importer::{detect_existing_profiles, import_browser_profile};
|
||||
|
||||
use theme_detector::get_system_theme;
|
||||
use group_manager::{
|
||||
assign_profiles_to_group, create_profile_group, delete_profile_group, delete_selected_profiles,
|
||||
get_groups_with_profile_counts, get_profile_groups, update_profile_group,
|
||||
};
|
||||
|
||||
use geoip_downloader::{check_missing_geoip_database, GeoIPDownloader};
|
||||
|
||||
use browser_version_manager::get_browser_release_types;
|
||||
|
||||
use api_server::{get_api_server_status, start_api_server, stop_api_server};
|
||||
|
||||
// Trait to extend WebviewWindow with transparent titlebar functionality
|
||||
pub trait WindowExt {
|
||||
@@ -105,13 +137,42 @@ impl<R: Runtime> WindowExt for WebviewWindow<R> {
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn warm_up_nodecar(app: tauri::AppHandle) -> Result<(), String> {
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
use tokio::time::{timeout, Duration};
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
// Use sidecar to execute a fast, harmless command that ensures the binary is loaded
|
||||
let cmd = app
|
||||
.shell()
|
||||
.sidecar("nodecar")
|
||||
.map_err(|e| format!("Failed to create nodecar sidecar: {e}"))?
|
||||
.arg("help");
|
||||
|
||||
let exec_future = async { cmd.output().await };
|
||||
match timeout(Duration::from_secs(120), exec_future).await {
|
||||
Ok(Ok(_output)) => {
|
||||
let duration = start_time.elapsed();
|
||||
log::info!(
|
||||
"Nodecar warm-up (frontend-triggered) completed in {:.2}s",
|
||||
duration.as_secs_f64()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
Ok(Err(e)) => Err(format!("Failed to execute nodecar for warm-up: {e}")),
|
||||
Err(_) => Err("Nodecar warm-up timed out after 120s".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn handle_url_open(app: tauri::AppHandle, url: String) -> Result<(), String> {
|
||||
println!("handle_url_open called with URL: {url}");
|
||||
log::info!("handle_url_open called with URL: {url}");
|
||||
|
||||
// Check if the main window exists and is ready
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
println!("Main window exists");
|
||||
log::debug!("Main window exists");
|
||||
|
||||
// Try to show and focus the window first
|
||||
let _ = window.show();
|
||||
@@ -123,7 +184,7 @@ async fn handle_url_open(app: tauri::AppHandle, url: String) -> Result<(), Strin
|
||||
.map_err(|e| format!("Failed to emit URL open event: {e}"))?;
|
||||
} else {
|
||||
// Window doesn't exist yet - add to pending URLs
|
||||
println!("Main window doesn't exist, adding URL to pending list");
|
||||
log::debug!("Main window doesn't exist, adding URL to pending list");
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
pending.push(url);
|
||||
}
|
||||
@@ -132,46 +193,94 @@ async fn handle_url_open(app: tauri::AppHandle, url: String) -> Result<(), Strin
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn check_and_handle_startup_url(app_handle: tauri::AppHandle) -> Result<bool, String> {
|
||||
println!("check_and_handle_startup_url called");
|
||||
async fn create_stored_proxy(
|
||||
app_handle: tauri::AppHandle,
|
||||
name: String,
|
||||
proxy_settings: crate::browser::ProxySettings,
|
||||
) -> Result<crate::proxy_manager::StoredProxy, String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.create_stored_proxy(&app_handle, name, proxy_settings)
|
||||
.map_err(|e| format!("Failed to create stored proxy: {e}"))
|
||||
}
|
||||
|
||||
let pending_urls = {
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
let urls = pending.clone();
|
||||
pending.clear(); // Clear after getting them
|
||||
urls
|
||||
};
|
||||
#[tauri::command]
|
||||
async fn get_stored_proxies() -> Result<Vec<crate::proxy_manager::StoredProxy>, String> {
|
||||
Ok(crate::proxy_manager::PROXY_MANAGER.get_stored_proxies())
|
||||
}
|
||||
|
||||
println!("Found {} pending URLs", pending_urls.len());
|
||||
#[tauri::command]
|
||||
async fn update_stored_proxy(
|
||||
app_handle: tauri::AppHandle,
|
||||
proxy_id: String,
|
||||
name: Option<String>,
|
||||
proxy_settings: Option<crate::browser::ProxySettings>,
|
||||
) -> Result<crate::proxy_manager::StoredProxy, String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.update_stored_proxy(&app_handle, &proxy_id, name, proxy_settings)
|
||||
.map_err(|e| format!("Failed to update stored proxy: {e}"))
|
||||
}
|
||||
|
||||
if !pending_urls.is_empty() {
|
||||
println!(
|
||||
"Handling {} pending URLs from frontend request",
|
||||
pending_urls.len()
|
||||
);
|
||||
#[tauri::command]
|
||||
async fn delete_stored_proxy(app_handle: tauri::AppHandle, proxy_id: String) -> Result<(), String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.delete_stored_proxy(&app_handle, &proxy_id)
|
||||
.map_err(|e| format!("Failed to delete stored proxy: {e}"))
|
||||
}
|
||||
|
||||
// Ensure the main window is visible and focused
|
||||
if let Some(window) = app_handle.get_webview_window("main") {
|
||||
let _ = window.show();
|
||||
let _ = window.set_focus();
|
||||
let _ = window.unminimize();
|
||||
#[tauri::command]
|
||||
async fn check_proxy_validity(
|
||||
proxy_id: String,
|
||||
proxy_settings: crate::browser::ProxySettings,
|
||||
) -> Result<crate::proxy_manager::ProxyCheckResult, String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.check_proxy_validity(&proxy_id, &proxy_settings)
|
||||
.await
|
||||
}
|
||||
|
||||
// Give the window a moment to become visible
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(200)).await;
|
||||
}
|
||||
#[tauri::command]
|
||||
fn get_cached_proxy_check(proxy_id: String) -> Option<crate::proxy_manager::ProxyCheckResult> {
|
||||
crate::proxy_manager::PROXY_MANAGER.get_cached_proxy_check(&proxy_id)
|
||||
}
|
||||
|
||||
for url in pending_urls {
|
||||
println!("Emitting show-profile-selector event for URL: {url}");
|
||||
if let Err(e) = app_handle.emit("show-profile-selector", url.clone()) {
|
||||
eprintln!("Failed to emit URL event: {e}");
|
||||
return Err(format!("Failed to emit URL event: {e}"));
|
||||
}
|
||||
}
|
||||
#[tauri::command]
|
||||
async fn is_geoip_database_available() -> Result<bool, String> {
|
||||
Ok(GeoIPDownloader::is_geoip_database_available())
|
||||
}
|
||||
|
||||
return Ok(true);
|
||||
}
|
||||
#[tauri::command]
|
||||
async fn get_all_traffic_snapshots() -> Result<Vec<crate::traffic_stats::TrafficSnapshot>, String> {
|
||||
Ok(
|
||||
crate::traffic_stats::list_traffic_stats()
|
||||
.into_iter()
|
||||
.map(|s| s.to_snapshot())
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
#[tauri::command]
|
||||
async fn clear_all_traffic_stats() -> Result<(), String> {
|
||||
crate::traffic_stats::clear_all_traffic_stats()
|
||||
.map_err(|e| format!("Failed to clear traffic stats: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn get_traffic_stats_for_period(
|
||||
profile_id: String,
|
||||
seconds: u64,
|
||||
) -> Result<Option<crate::traffic_stats::FilteredTrafficStats>, String> {
|
||||
Ok(crate::traffic_stats::get_traffic_stats_for_period(
|
||||
&profile_id,
|
||||
seconds,
|
||||
))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn download_geoip_database(app_handle: tauri::AppHandle) -> Result<(), String> {
|
||||
let downloader = GeoIPDownloader::instance();
|
||||
downloader
|
||||
.download_geoip_database(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to download GeoIP database: {e}"))
|
||||
}
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
@@ -180,14 +289,49 @@ pub fn run() {
|
||||
let startup_url = args.iter().find(|arg| arg.starts_with("http")).cloned();
|
||||
|
||||
if let Some(url) = startup_url.clone() {
|
||||
println!("Found startup URL in command line: {url}");
|
||||
log::info!("Found startup URL in command line: {url}");
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
pending.push(url.clone());
|
||||
}
|
||||
|
||||
// Configure logging plugin with separate logs for dev and production
|
||||
let log_file_name = if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
};
|
||||
|
||||
tauri::Builder::default()
|
||||
.plugin(
|
||||
tauri_plugin_log::Builder::new()
|
||||
.clear_targets() // Clear default targets to avoid duplicates
|
||||
.target(Target::new(TargetKind::Stdout))
|
||||
.target(Target::new(TargetKind::Webview))
|
||||
.target(Target::new(TargetKind::LogDir {
|
||||
file_name: Some(log_file_name.to_string()),
|
||||
}))
|
||||
.max_file_size(100_000) // 100KB
|
||||
.level(log::LevelFilter::Info)
|
||||
.format(|out, message, record| {
|
||||
use chrono::Local;
|
||||
let now = Local::now();
|
||||
let timestamp = format!(
|
||||
"{}.{:03}",
|
||||
now.format("%Y-%m-%d %H:%M:%S"),
|
||||
now.timestamp_subsec_millis()
|
||||
);
|
||||
out.finish(format_args!(
|
||||
"[{}][{}][{}] {}",
|
||||
timestamp,
|
||||
record.target(),
|
||||
record.level(),
|
||||
message
|
||||
))
|
||||
})
|
||||
.build(),
|
||||
)
|
||||
.plugin(tauri_plugin_single_instance::init(|_, args, _cwd| {
|
||||
println!("Single instance triggered with args: {args:?}");
|
||||
log::info!("Single instance triggered with args: {args:?}");
|
||||
}))
|
||||
.plugin(tauri_plugin_deep_link::init())
|
||||
.plugin(tauri_plugin_fs::init())
|
||||
@@ -200,7 +344,7 @@ pub fn run() {
|
||||
#[allow(unused_variables)]
|
||||
let win_builder = WebviewWindowBuilder::new(app, "main", WebviewUrl::default())
|
||||
.title("Donut Browser")
|
||||
.inner_size(900.0, 600.0)
|
||||
.inner_size(800.0, 500.0)
|
||||
.resizable(false)
|
||||
.fullscreen(false)
|
||||
.center()
|
||||
@@ -214,18 +358,18 @@ pub fn run() {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
if let Err(e) = window.set_transparent_titlebar(true) {
|
||||
eprintln!("Failed to set transparent titlebar: {e}");
|
||||
log::warn!("Failed to set transparent titlebar: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
// Set up deep link handler
|
||||
let handle = app.handle().clone();
|
||||
|
||||
#[cfg(any(windows, target_os = "linux"))]
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// For Windows and Linux, register all deep links at runtime for development
|
||||
// For Windows, register all deep links at runtime
|
||||
if let Err(e) = app.deep_link().register_all() {
|
||||
eprintln!("Failed to register deep links: {e}");
|
||||
log::warn!("Failed to register deep links: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -233,7 +377,7 @@ pub fn run() {
|
||||
{
|
||||
// On macOS, try to register deep links for development builds
|
||||
if let Err(e) = app.deep_link().register_all() {
|
||||
eprintln!(
|
||||
log::debug!(
|
||||
"Note: Deep link registration failed on macOS (this is normal for production): {e}"
|
||||
);
|
||||
}
|
||||
@@ -243,11 +387,11 @@ pub fn run() {
|
||||
let handle = handle.clone();
|
||||
move |event| {
|
||||
let urls = event.urls();
|
||||
println!("Deep link event received with {} URLs", urls.len());
|
||||
log::info!("Deep link event received with {} URLs", urls.len());
|
||||
|
||||
for url in urls {
|
||||
let url_string = url.to_string();
|
||||
println!("Deep link received: {url_string}");
|
||||
log::info!("Deep link received: {url_string}");
|
||||
|
||||
// Clone the handle for each async task
|
||||
let handle_clone = handle.clone();
|
||||
@@ -255,7 +399,7 @@ pub fn run() {
|
||||
// Handle the URL asynchronously
|
||||
tauri::async_runtime::spawn(async move {
|
||||
if let Err(e) = handle_url_open(handle_clone, url_string.clone()).await {
|
||||
eprintln!("Failed to handle deep link URL: {e}");
|
||||
log::error!("Failed to handle deep link URL: {e}");
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -265,9 +409,9 @@ pub fn run() {
|
||||
if let Some(startup_url) = startup_url {
|
||||
let handle_clone = handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
println!("Processing startup URL from command line: {startup_url}");
|
||||
log::info!("Processing startup URL from command line: {startup_url}");
|
||||
if let Err(e) = handle_url_open(handle_clone, startup_url.clone()).await {
|
||||
eprintln!("Failed to handle startup URL: {e}");
|
||||
log::error!("Failed to handle startup URL: {e}");
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -287,7 +431,7 @@ pub fn run() {
|
||||
{
|
||||
let updater_guard = version_updater.lock().await;
|
||||
if let Err(e) = updater_guard.start_background_updates().await {
|
||||
eprintln!("Failed to start background updates: {e}");
|
||||
log::error!("Failed to start background updates: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -304,28 +448,274 @@ pub fn run() {
|
||||
auto_updater::check_for_updates_with_progress(app_handle_auto_updater).await;
|
||||
});
|
||||
|
||||
// Handle any pending URLs that were received before the window was ready
|
||||
let handle_pending = handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
// Wait a bit for the window to be fully ready
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await;
|
||||
|
||||
let pending_urls = {
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
let urls = pending.clone();
|
||||
pending.clear();
|
||||
urls
|
||||
};
|
||||
|
||||
for url in pending_urls {
|
||||
log::info!("Processing pending URL: {url}");
|
||||
if let Err(e) = handle_url_open(handle_pending.clone(), url).await {
|
||||
log::error!("Failed to handle pending URL: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start periodic cleanup task for unused binaries
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(43200)); // Every 12 hours
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
let registry =
|
||||
crate::downloaded_browsers_registry::DownloadedBrowsersRegistry::instance();
|
||||
if let Err(e) = registry.cleanup_unused_binaries() {
|
||||
log::error!("Periodic cleanup failed: {e}");
|
||||
} else {
|
||||
log::debug!("Periodic cleanup completed successfully");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let app_handle_update = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
println!("Starting app update check at startup...");
|
||||
let updater = app_auto_updater::AppAutoUpdater::new();
|
||||
log::info!("Starting app update check at startup...");
|
||||
let updater = app_auto_updater::AppAutoUpdater::instance();
|
||||
match updater.check_for_updates().await {
|
||||
Ok(Some(update_info)) => {
|
||||
println!(
|
||||
log::info!(
|
||||
"App update available: {} -> {}",
|
||||
update_info.current_version, update_info.new_version
|
||||
update_info.current_version,
|
||||
update_info.new_version
|
||||
);
|
||||
// Emit update available event to the frontend
|
||||
if let Err(e) = app_handle_update.emit("app-update-available", &update_info) {
|
||||
eprintln!("Failed to emit app update event: {e}");
|
||||
log::error!("Failed to emit app update event: {e}");
|
||||
} else {
|
||||
println!("App update event emitted successfully");
|
||||
log::debug!("App update event emitted successfully");
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
println!("No app updates available");
|
||||
log::debug!("No app updates available");
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to check for app updates: {e}");
|
||||
log::error!("Failed to check for app updates: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start Camoufox cleanup task
|
||||
let _app_handle_cleanup = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let camoufox_manager = crate::camoufox_manager::CamoufoxManager::instance();
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(5));
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
match camoufox_manager.cleanup_dead_instances().await {
|
||||
Ok(_) => {
|
||||
// Cleanup completed silently
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Error during Camoufox cleanup: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Check and download GeoIP database at startup if needed
|
||||
let app_handle_geoip = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
// Wait a bit for the app to fully initialize
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
|
||||
|
||||
let geoip_downloader = crate::geoip_downloader::GeoIPDownloader::instance();
|
||||
match geoip_downloader.check_missing_geoip_database() {
|
||||
Ok(true) => {
|
||||
log::info!(
|
||||
"GeoIP database is missing for Camoufox profiles, downloading at startup..."
|
||||
);
|
||||
let geoip_downloader = GeoIPDownloader::instance();
|
||||
if let Err(e) = geoip_downloader
|
||||
.download_geoip_database(&app_handle_geoip)
|
||||
.await
|
||||
{
|
||||
log::error!("Failed to download GeoIP database at startup: {e}");
|
||||
} else {
|
||||
log::info!("GeoIP database downloaded successfully at startup");
|
||||
}
|
||||
}
|
||||
Ok(false) => {
|
||||
// No Camoufox profiles or GeoIP database already available
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to check GeoIP database status at startup: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start proxy cleanup task for dead browser processes
|
||||
let app_handle_proxy_cleanup = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(30));
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
match crate::proxy_manager::PROXY_MANAGER
|
||||
.cleanup_dead_proxies(app_handle_proxy_cleanup.clone())
|
||||
.await
|
||||
{
|
||||
Ok(dead_pids) => {
|
||||
if !dead_pids.is_empty() {
|
||||
log::info!(
|
||||
"Cleaned up proxies for {} dead browser processes",
|
||||
dead_pids.len()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Error during proxy cleanup: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Periodically broadcast browser running status to the frontend
|
||||
let app_handle_status = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_millis(500));
|
||||
let mut last_running_states: std::collections::HashMap<String, bool> =
|
||||
std::collections::HashMap::new();
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
let runner = crate::browser_runner::BrowserRunner::instance();
|
||||
// If listing profiles fails, skip this tick
|
||||
let profiles = match runner.profile_manager.list_profiles() {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
log::warn!("Failed to list profiles in status checker: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
for profile in profiles {
|
||||
// Check browser status and track changes
|
||||
match runner
|
||||
.check_browser_status(app_handle_status.clone(), &profile)
|
||||
.await
|
||||
{
|
||||
Ok(is_running) => {
|
||||
let profile_id = profile.id.to_string();
|
||||
let last_state = last_running_states
|
||||
.get(&profile_id)
|
||||
.copied()
|
||||
.unwrap_or(false);
|
||||
|
||||
// Only emit event if state actually changed
|
||||
if last_state != is_running {
|
||||
log::debug!(
|
||||
"Status checker detected change for profile {}: {} -> {}",
|
||||
profile.name,
|
||||
last_state,
|
||||
is_running
|
||||
);
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
struct RunningChangedPayload {
|
||||
id: String,
|
||||
is_running: bool,
|
||||
}
|
||||
|
||||
let payload = RunningChangedPayload {
|
||||
id: profile_id.clone(),
|
||||
is_running,
|
||||
};
|
||||
|
||||
if let Err(e) = app_handle_status.emit("profile-running-changed", &payload) {
|
||||
log::warn!("Failed to emit profile running changed event: {e}");
|
||||
} else {
|
||||
log::debug!(
|
||||
"Status checker emitted profile-running-changed event for {}: running={}",
|
||||
profile.name,
|
||||
is_running
|
||||
);
|
||||
}
|
||||
|
||||
last_running_states.insert(profile_id, is_running);
|
||||
} else {
|
||||
// Update the state even if unchanged to ensure we have it tracked
|
||||
last_running_states.insert(profile_id, is_running);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Status check failed for profile {}: {}", profile.name, e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Nodecar warm-up is now triggered from the frontend to allow UI blocking overlay
|
||||
|
||||
// Start API server if enabled in settings
|
||||
let app_handle_api = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
match crate::settings_manager::get_app_settings(app_handle_api.clone()).await {
|
||||
Ok(settings) => {
|
||||
if settings.api_enabled {
|
||||
log::info!("API is enabled in settings, starting API server...");
|
||||
match crate::api_server::start_api_server_internal(settings.api_port, &app_handle_api)
|
||||
.await
|
||||
{
|
||||
Ok(port) => {
|
||||
log::info!("API server started successfully on port {port}");
|
||||
// Emit success toast to frontend
|
||||
if let Err(e) = app_handle_api.emit(
|
||||
"show-toast",
|
||||
crate::api_server::ToastPayload {
|
||||
message: "API server started successfully".to_string(),
|
||||
variant: "success".to_string(),
|
||||
title: "Local API Started".to_string(),
|
||||
description: Some(format!("API server running on port {port}")),
|
||||
},
|
||||
) {
|
||||
log::error!("Failed to emit API start toast: {e}");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to start API server at startup: {e}");
|
||||
// Emit error toast to frontend
|
||||
if let Err(toast_err) = app_handle_api.emit(
|
||||
"show-toast",
|
||||
crate::api_server::ToastPayload {
|
||||
message: "Failed to start API server".to_string(),
|
||||
variant: "error".to_string(),
|
||||
title: "Failed to Start Local API".to_string(),
|
||||
description: Some(format!("Error: {e}")),
|
||||
},
|
||||
) {
|
||||
log::error!("Failed to emit API error toast: {toast_err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to load app settings for API startup: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -345,9 +735,11 @@ pub fn run() {
|
||||
fetch_browser_versions_cached_first,
|
||||
fetch_browser_versions_with_count_cached_first,
|
||||
get_downloaded_browser_versions,
|
||||
get_all_tags,
|
||||
get_browser_release_types,
|
||||
update_profile_proxy,
|
||||
update_profile_version,
|
||||
update_profile_tags,
|
||||
update_profile_note,
|
||||
check_browser_status,
|
||||
kill_browser_profile,
|
||||
rename_profile,
|
||||
@@ -360,20 +752,42 @@ pub fn run() {
|
||||
is_default_browser,
|
||||
open_url_with_profile,
|
||||
set_as_default_browser,
|
||||
smart_open_url,
|
||||
check_and_handle_startup_url,
|
||||
trigger_manual_version_update,
|
||||
get_version_update_status,
|
||||
check_for_browser_updates,
|
||||
is_browser_disabled_for_update,
|
||||
dismiss_update_notification,
|
||||
complete_browser_update_with_auto_update,
|
||||
check_for_app_updates,
|
||||
check_for_app_updates_manual,
|
||||
download_and_install_app_update,
|
||||
get_system_theme,
|
||||
detect_existing_profiles,
|
||||
import_browser_profile,
|
||||
check_missing_binaries,
|
||||
check_missing_geoip_database,
|
||||
ensure_all_binaries_exist,
|
||||
create_stored_proxy,
|
||||
get_stored_proxies,
|
||||
update_stored_proxy,
|
||||
delete_stored_proxy,
|
||||
check_proxy_validity,
|
||||
get_cached_proxy_check,
|
||||
update_camoufox_config,
|
||||
get_profile_groups,
|
||||
get_groups_with_profile_counts,
|
||||
create_profile_group,
|
||||
update_profile_group,
|
||||
delete_profile_group,
|
||||
assign_profiles_to_group,
|
||||
delete_selected_profiles,
|
||||
is_geoip_database_available,
|
||||
download_geoip_database,
|
||||
warm_up_nodecar,
|
||||
start_api_server,
|
||||
stop_api_server,
|
||||
get_api_server_status,
|
||||
get_all_traffic_snapshots,
|
||||
clear_all_traffic_stats,
|
||||
get_traffic_stats_for_period
|
||||
])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
|
||||
@@ -2,5 +2,5 @@
|
||||
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
|
||||
|
||||
fn main() {
|
||||
donutbrowser::run()
|
||||
donutbrowser_lib::run()
|
||||
}
|
||||
|
||||
@@ -0,0 +1,888 @@
|
||||
use crate::browser::{create_browser, BrowserType};
|
||||
use crate::profile::BrowserProfile;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
// Platform-specific modules
|
||||
#[cfg(target_os = "macos")]
|
||||
pub mod macos {
|
||||
use super::*;
|
||||
use sysinfo::{Pid, System};
|
||||
|
||||
pub async fn launch_browser_process(
|
||||
executable_path: &std::path::Path,
|
||||
args: &[String],
|
||||
) -> Result<std::process::Child, Box<dyn std::error::Error + Send + Sync>> {
|
||||
log::info!("Launching browser on macOS: {executable_path:?} with args: {args:?}");
|
||||
// If the executable is inside an app bundle, launch via Launch Services so
|
||||
// macOS recognizes the real application for privacy permissions (e.g. Screen Recording).
|
||||
// This ensures TCC prompts are attributed to the browser app, not our launcher.
|
||||
let mut current = Some(executable_path);
|
||||
let mut app_bundle: Option<std::path::PathBuf> = None;
|
||||
while let Some(path) = current {
|
||||
if let Some(file_name) = path.file_name().and_then(|s| s.to_str()) {
|
||||
if file_name.ends_with(".app") {
|
||||
app_bundle = Some(path.to_path_buf());
|
||||
break;
|
||||
}
|
||||
}
|
||||
current = path.parent();
|
||||
}
|
||||
|
||||
if let Some(app_path) = app_bundle {
|
||||
// Use `open -n -a <App>.app --args ...` to launch the app bundle.
|
||||
// Note: The returned child PID will belong to `open`, not the browser.
|
||||
// The caller should resolve the actual browser PID after launch.
|
||||
let mut cmd = Command::new("open");
|
||||
cmd.arg("-n");
|
||||
cmd.arg("-a");
|
||||
cmd.arg(app_path);
|
||||
cmd.arg("--args");
|
||||
for a in args {
|
||||
cmd.arg(a);
|
||||
}
|
||||
Ok(cmd.spawn()?)
|
||||
} else {
|
||||
// Fallback: direct spawn if this is not an app bundle
|
||||
Ok(Command::new(executable_path).args(args).spawn()?)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_firefox_like(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
browser_type: BrowserType,
|
||||
browser_dir: &Path,
|
||||
profiles_dir: &Path,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let pid = profile.process_id.unwrap();
|
||||
let profile_data_path = profile.get_profile_data_path(profiles_dir);
|
||||
|
||||
// First try: Use Firefox remote command
|
||||
log::info!("Trying Firefox remote command for PID: {pid}");
|
||||
let browser = create_browser(browser_type);
|
||||
if let Ok(executable_path) = browser.get_executable_path(browser_dir) {
|
||||
let remote_args = vec![
|
||||
"-profile".to_string(),
|
||||
profile_data_path.to_string_lossy().to_string(),
|
||||
"-new-tab".to_string(),
|
||||
url.to_string(),
|
||||
];
|
||||
|
||||
let remote_output = Command::new(executable_path).args(&remote_args).output();
|
||||
|
||||
match remote_output {
|
||||
Ok(output) if output.status.success() => {
|
||||
log::info!("Firefox remote command succeeded");
|
||||
return Ok(());
|
||||
}
|
||||
Ok(output) => {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
log::info!(
|
||||
"Firefox remote command failed with stderr: {stderr}, trying AppleScript fallback"
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
log::info!("Firefox remote command error: {e}, trying AppleScript fallback");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: Use AppleScript
|
||||
let escaped_url = url
|
||||
.replace("\"", "\\\"")
|
||||
.replace("\\", "\\\\")
|
||||
.replace("'", "\\'");
|
||||
|
||||
let script = format!(
|
||||
r#"
|
||||
try
|
||||
tell application "System Events"
|
||||
-- Find the exact process by PID
|
||||
set targetProcess to (first application process whose unix id is {pid})
|
||||
|
||||
-- Verify the process exists
|
||||
if not (exists targetProcess) then
|
||||
error "No process found with PID {pid}"
|
||||
end if
|
||||
|
||||
-- Get the process name for verification
|
||||
set processName to name of targetProcess
|
||||
|
||||
-- Bring the process to the front first
|
||||
set frontmost of targetProcess to true
|
||||
delay 1.0
|
||||
|
||||
-- Check if the process has any visible windows
|
||||
set windowList to windows of targetProcess
|
||||
set hasVisibleWindow to false
|
||||
repeat with w in windowList
|
||||
if visible of w is true then
|
||||
set hasVisibleWindow to true
|
||||
exit repeat
|
||||
end if
|
||||
end repeat
|
||||
|
||||
if not hasVisibleWindow then
|
||||
-- No visible windows, create a new one
|
||||
tell targetProcess
|
||||
keystroke "n" using command down
|
||||
delay 2.0
|
||||
end tell
|
||||
end if
|
||||
|
||||
-- Ensure the process is frontmost again
|
||||
set frontmost of targetProcess to true
|
||||
delay 0.5
|
||||
|
||||
-- Focus on the address bar and open URL
|
||||
tell targetProcess
|
||||
-- Open a new tab
|
||||
keystroke "t" using command down
|
||||
delay 1.5
|
||||
|
||||
-- Focus address bar (Cmd+L)
|
||||
keystroke "l" using command down
|
||||
delay 0.5
|
||||
|
||||
-- Type the URL
|
||||
keystroke "{escaped_url}"
|
||||
delay 0.5
|
||||
|
||||
-- Press Enter to navigate
|
||||
keystroke return
|
||||
end tell
|
||||
|
||||
return "Successfully opened URL in " & processName & " (PID: {pid})"
|
||||
end tell
|
||||
on error errMsg number errNum
|
||||
return "AppleScript failed: " & errMsg & " (Error " & errNum & ")"
|
||||
end try
|
||||
"#
|
||||
);
|
||||
|
||||
log::info!("Executing AppleScript fallback for Firefox-based browser (PID: {pid})...");
|
||||
let output = Command::new("osascript").args(["-e", &script]).output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
let error_msg = String::from_utf8_lossy(&output.stderr);
|
||||
log::info!("AppleScript failed: {error_msg}");
|
||||
return Err(
|
||||
format!(
|
||||
"Both Firefox remote command and AppleScript failed. AppleScript error: {error_msg}"
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
} else {
|
||||
log::info!("AppleScript succeeded");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn kill_browser_process_impl(
|
||||
pid: u32,
|
||||
profile_data_path: Option<&str>,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
log::info!("Attempting to kill browser process with PID: {pid}");
|
||||
|
||||
let mut pids_to_kill = vec![pid];
|
||||
|
||||
let descendants = get_all_descendant_pids(pid).await;
|
||||
pids_to_kill.extend(descendants);
|
||||
|
||||
if let Some(profile_path) = profile_data_path {
|
||||
let additional_pids = find_processes_by_profile_path(profile_path).await;
|
||||
for p in additional_pids {
|
||||
if !pids_to_kill.contains(&p) {
|
||||
log::info!("Found additional process {} using profile path", p);
|
||||
pids_to_kill.push(p);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Total processes to kill: {:?}", pids_to_kill);
|
||||
|
||||
for &p in &pids_to_kill {
|
||||
log::info!("Sending SIGKILL to PID: {p}");
|
||||
let _ = Command::new("kill")
|
||||
.args(["-KILL", &p.to_string()])
|
||||
.output();
|
||||
}
|
||||
|
||||
let pid_str = pid.to_string();
|
||||
|
||||
let _ = Command::new("pkill")
|
||||
.args(["-KILL", "-P", &pid_str])
|
||||
.output();
|
||||
|
||||
let _ = Command::new("pkill")
|
||||
.args(["-KILL", "-g", &pid_str])
|
||||
.output();
|
||||
|
||||
for &p in &pids_to_kill {
|
||||
let system = System::new_all();
|
||||
if system.process(Pid::from(p as usize)).is_some() {
|
||||
log::info!("Process {p} still running, retrying kill");
|
||||
let _ = Command::new("kill")
|
||||
.args(["-KILL", &p.to_string()])
|
||||
.output();
|
||||
}
|
||||
}
|
||||
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
||||
|
||||
let system = System::new_all();
|
||||
let mut still_running = Vec::new();
|
||||
for &p in &pids_to_kill {
|
||||
if system.process(Pid::from(p as usize)).is_some() {
|
||||
still_running.push(p);
|
||||
}
|
||||
}
|
||||
|
||||
if !still_running.is_empty() {
|
||||
log::info!(
|
||||
"Processes {:?} still running, trying final termination",
|
||||
still_running
|
||||
);
|
||||
|
||||
for p in &still_running {
|
||||
let _ = Command::new("/bin/kill")
|
||||
.args(["-KILL", &p.to_string()])
|
||||
.output();
|
||||
}
|
||||
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await;
|
||||
|
||||
let system = System::new_all();
|
||||
let mut final_still_running = Vec::new();
|
||||
for &p in &pids_to_kill {
|
||||
if system.process(Pid::from(p as usize)).is_some() {
|
||||
final_still_running.push(p);
|
||||
}
|
||||
}
|
||||
|
||||
if !final_still_running.is_empty() {
|
||||
log::error!(
|
||||
"ERROR: Processes {:?} could not be terminated despite aggressive attempts",
|
||||
final_still_running
|
||||
);
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to terminate browser processes {:?} - still running",
|
||||
final_still_running
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Browser termination completed for PID: {pid}");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn find_processes_by_profile_path(profile_path: &str) -> Vec<u32> {
|
||||
use sysinfo::System;
|
||||
|
||||
let mut pids = Vec::new();
|
||||
let system = System::new_all();
|
||||
|
||||
for (pid, process) in system.processes() {
|
||||
let cmd = process.cmd();
|
||||
if cmd.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if any command line argument contains the profile path
|
||||
let has_profile = cmd.iter().any(|arg| {
|
||||
if let Some(arg_str) = arg.to_str() {
|
||||
arg_str.contains(profile_path)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
if has_profile {
|
||||
pids.push(pid.as_u32());
|
||||
}
|
||||
}
|
||||
|
||||
pids
|
||||
}
|
||||
|
||||
// Recursively find all descendant processes
|
||||
async fn get_all_descendant_pids(parent_pid: u32) -> Vec<u32> {
|
||||
use sysinfo::System;
|
||||
|
||||
let system = System::new_all();
|
||||
let mut descendants = Vec::new();
|
||||
let mut to_check = vec![parent_pid];
|
||||
let mut checked = std::collections::HashSet::new();
|
||||
|
||||
while let Some(current_pid) = to_check.pop() {
|
||||
if checked.contains(¤t_pid) {
|
||||
continue;
|
||||
}
|
||||
checked.insert(current_pid);
|
||||
|
||||
// Find direct children of current_pid
|
||||
for (pid, process) in system.processes() {
|
||||
let pid_u32 = pid.as_u32();
|
||||
if let Some(parent) = process.parent() {
|
||||
if parent.as_u32() == current_pid && !checked.contains(&pid_u32) {
|
||||
descendants.push(pid_u32);
|
||||
to_check.push(pid_u32);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
descendants
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_chromium(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
browser_type: BrowserType,
|
||||
browser_dir: &Path,
|
||||
_profiles_dir: &Path,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let pid = profile.process_id.unwrap();
|
||||
|
||||
// First, try using the browser's built-in URL opening capability
|
||||
log::info!("Trying Chromium URL opening for PID: {pid}");
|
||||
|
||||
let browser = create_browser(browser_type);
|
||||
if let Ok(executable_path) = browser.get_executable_path(browser_dir) {
|
||||
let profile_data_path = profile.get_profile_data_path(_profiles_dir);
|
||||
let remote_output = Command::new(executable_path)
|
||||
.args([
|
||||
&format!("--user-data-dir={}", profile_data_path.to_string_lossy()),
|
||||
url,
|
||||
])
|
||||
.output();
|
||||
|
||||
match remote_output {
|
||||
Ok(output) if output.status.success() => {
|
||||
log::info!("Chromium URL opening succeeded");
|
||||
return Ok(());
|
||||
}
|
||||
Ok(output) => {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
log::info!("Chromium URL opening failed: {stderr}, trying AppleScript");
|
||||
}
|
||||
Err(e) => {
|
||||
log::info!("Chromium URL opening error: {e}, trying AppleScript");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to AppleScript
|
||||
let escaped_url = url
|
||||
.replace("\"", "\\\"")
|
||||
.replace("\\", "\\\\")
|
||||
.replace("'", "\\'");
|
||||
|
||||
let script = format!(
|
||||
r#"
|
||||
try
|
||||
tell application "System Events"
|
||||
-- Find the exact process by PID
|
||||
set targetProcess to (first application process whose unix id is {pid})
|
||||
|
||||
-- Verify the process exists
|
||||
if not (exists targetProcess) then
|
||||
error "No process found with PID {pid}"
|
||||
end if
|
||||
|
||||
-- Get the process name for verification
|
||||
set processName to name of targetProcess
|
||||
|
||||
-- Bring the process to the front first
|
||||
set frontmost of targetProcess to true
|
||||
delay 1.0
|
||||
|
||||
-- Check if the process has any visible windows
|
||||
set windowList to windows of targetProcess
|
||||
set hasVisibleWindow to false
|
||||
repeat with w in windowList
|
||||
if visible of w is true then
|
||||
set hasVisibleWindow to true
|
||||
exit repeat
|
||||
end if
|
||||
end repeat
|
||||
|
||||
if not hasVisibleWindow then
|
||||
-- No visible windows, create a new one
|
||||
tell targetProcess
|
||||
keystroke "n" using command down
|
||||
delay 2.0
|
||||
end tell
|
||||
end if
|
||||
|
||||
-- Ensure the process is frontmost again
|
||||
set frontmost of targetProcess to true
|
||||
delay 0.5
|
||||
|
||||
-- Focus on the address bar and open URL
|
||||
tell targetProcess
|
||||
-- Open a new tab
|
||||
keystroke "t" using command down
|
||||
delay 1.5
|
||||
|
||||
-- Focus address bar (Cmd+L)
|
||||
keystroke "l" using command down
|
||||
delay 0.5
|
||||
|
||||
-- Type the URL
|
||||
keystroke "{escaped_url}"
|
||||
delay 0.5
|
||||
|
||||
-- Press Enter to navigate
|
||||
keystroke return
|
||||
end tell
|
||||
|
||||
return "Successfully opened URL in " & processName & " (PID: {pid})"
|
||||
end tell
|
||||
on error errMsg number errNum
|
||||
return "AppleScript failed: " & errMsg & " (Error " & errNum & ")"
|
||||
end try
|
||||
"#
|
||||
);
|
||||
|
||||
log::info!("Executing AppleScript for Chromium-based browser (PID: {pid})...");
|
||||
let output = Command::new("osascript").args(["-e", &script]).output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
let error_msg = String::from_utf8_lossy(&output.stderr);
|
||||
log::info!("AppleScript failed: {error_msg}");
|
||||
return Err(
|
||||
format!("Failed to open URL in existing Chromium-based browser: {error_msg}").into(),
|
||||
);
|
||||
} else {
|
||||
log::info!("AppleScript succeeded");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub mod windows {
|
||||
use super::*;
|
||||
|
||||
pub async fn launch_browser_process(
|
||||
executable_path: &std::path::Path,
|
||||
args: &[String],
|
||||
) -> Result<std::process::Child, Box<dyn std::error::Error + Send + Sync>> {
|
||||
log::info!(
|
||||
"Launching browser on Windows: {:?} with args: {:?}",
|
||||
executable_path,
|
||||
args
|
||||
);
|
||||
|
||||
// Check if the executable exists
|
||||
if !executable_path.exists() {
|
||||
return Err(format!("Browser executable not found: {:?}", executable_path).into());
|
||||
}
|
||||
|
||||
// On Windows, set up the command with proper working directory
|
||||
let mut cmd = Command::new(executable_path);
|
||||
cmd.args(args);
|
||||
|
||||
// Set working directory to the executable's directory for better compatibility
|
||||
if let Some(parent_dir) = executable_path.parent() {
|
||||
cmd.current_dir(parent_dir);
|
||||
}
|
||||
|
||||
// For Windows 7 compatibility, set some environment variables
|
||||
cmd.env(
|
||||
"PROCESSOR_ARCHITECTURE",
|
||||
std::env::var("PROCESSOR_ARCHITECTURE").unwrap_or_else(|_| "x86".to_string()),
|
||||
);
|
||||
|
||||
// Ensure proper PATH for DLL loading
|
||||
if let Some(exe_dir) = executable_path.parent() {
|
||||
let mut path_var = std::env::var("PATH").unwrap_or_default();
|
||||
if !path_var.is_empty() {
|
||||
path_var = format!("{};{}", exe_dir.display(), path_var);
|
||||
} else {
|
||||
path_var = exe_dir.display().to_string();
|
||||
}
|
||||
cmd.env("PATH", path_var);
|
||||
}
|
||||
|
||||
// Launch the process
|
||||
let child = cmd
|
||||
.spawn()
|
||||
.map_err(|e| format!("Failed to launch browser process: {}", e))?;
|
||||
|
||||
log::info!(
|
||||
"Successfully launched browser process with PID: {}",
|
||||
child.id()
|
||||
);
|
||||
Ok(child)
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_firefox_like(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
browser_type: BrowserType,
|
||||
browser_dir: &Path,
|
||||
profiles_dir: &Path,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let browser = create_browser(browser_type);
|
||||
let executable_path = browser
|
||||
.get_executable_path(browser_dir)
|
||||
.map_err(|e| format!("Failed to get executable path: {}", e))?;
|
||||
|
||||
let profile_data_path = profile.get_profile_data_path(profiles_dir);
|
||||
|
||||
// For Windows, try using the -requestPending approach for Firefox
|
||||
let mut cmd = Command::new(executable_path);
|
||||
cmd.args([
|
||||
"-profile",
|
||||
&profile_data_path.to_string_lossy(),
|
||||
"-requestPending",
|
||||
"-new-tab",
|
||||
url,
|
||||
]);
|
||||
|
||||
// Set working directory
|
||||
if let Some(parent_dir) = browser_dir
|
||||
.parent()
|
||||
.or_else(|| browser_dir.ancestors().nth(1))
|
||||
{
|
||||
cmd.current_dir(parent_dir);
|
||||
}
|
||||
|
||||
let output = cmd.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
// Fallback: try without -requestPending
|
||||
let executable_path = browser
|
||||
.get_executable_path(browser_dir)
|
||||
.map_err(|e| format!("Failed to get executable path: {}", e))?;
|
||||
let mut fallback_cmd = Command::new(executable_path);
|
||||
let profile_data_path = profile.get_profile_data_path(profiles_dir);
|
||||
fallback_cmd.args([
|
||||
"-profile",
|
||||
&profile_data_path.to_string_lossy(),
|
||||
"-new-tab",
|
||||
url,
|
||||
]);
|
||||
|
||||
if let Some(parent_dir) = browser_dir
|
||||
.parent()
|
||||
.or_else(|| browser_dir.ancestors().nth(1))
|
||||
{
|
||||
fallback_cmd.current_dir(parent_dir);
|
||||
}
|
||||
|
||||
let fallback_output = fallback_cmd.output()?;
|
||||
|
||||
if !fallback_output.status.success() {
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to open URL in existing browser: {}",
|
||||
String::from_utf8_lossy(&fallback_output.stderr)
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_chromium(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
browser_type: BrowserType,
|
||||
browser_dir: &Path,
|
||||
profiles_dir: &Path,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let browser = create_browser(browser_type.clone());
|
||||
let executable_path = browser
|
||||
.get_executable_path(browser_dir)
|
||||
.map_err(|e| format!("Failed to get executable path: {}", e))?;
|
||||
|
||||
let mut cmd = Command::new(&executable_path);
|
||||
cmd.args([
|
||||
&format!(
|
||||
"--user-data-dir={}",
|
||||
profile
|
||||
.get_profile_data_path(profiles_dir)
|
||||
.to_string_lossy()
|
||||
),
|
||||
"--new-window",
|
||||
url,
|
||||
]);
|
||||
|
||||
// Set working directory
|
||||
if let Some(parent_dir) = browser_dir
|
||||
.parent()
|
||||
.or_else(|| browser_dir.ancestors().nth(1))
|
||||
{
|
||||
cmd.current_dir(parent_dir);
|
||||
}
|
||||
|
||||
// Do not call output() to avoid blocking the UI thread while the browser processes the request.
|
||||
// Spawn the helper process and return immediately. This applies to Chromium-based browsers
|
||||
// including Brave to prevent UI freezes observed in production.
|
||||
let _child = cmd.spawn()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn kill_browser_process_impl(
|
||||
pid: u32,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// First try using sysinfo (cross-platform approach)
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
if let Some(process) = system.process(Pid::from(pid as usize)) {
|
||||
if process.kill() {
|
||||
log::info!("Successfully killed browser process with PID: {pid}");
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to Windows-specific process termination
|
||||
use std::process::Command;
|
||||
|
||||
// Try taskkill command as fallback
|
||||
let output = Command::new("taskkill")
|
||||
.args(["/F", "/PID", &pid.to_string()])
|
||||
.output();
|
||||
|
||||
match output {
|
||||
Ok(result) => {
|
||||
if result.status.success() {
|
||||
log::info!("Successfully killed browser process with PID: {pid} using taskkill");
|
||||
Ok(())
|
||||
} else {
|
||||
Err(
|
||||
format!(
|
||||
"Failed to kill process {} with taskkill: {}",
|
||||
pid,
|
||||
String::from_utf8_lossy(&result.stderr)
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
}
|
||||
Err(e) => Err(format!("Failed to execute taskkill for process {}: {}", pid, e).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
pub mod linux {
|
||||
use super::*;
|
||||
|
||||
pub async fn launch_browser_process(
|
||||
executable_path: &std::path::Path,
|
||||
args: &[String],
|
||||
) -> Result<std::process::Child, Box<dyn std::error::Error + Send + Sync>> {
|
||||
log::info!(
|
||||
"Launching browser on Linux: {:?} with args: {:?}",
|
||||
executable_path,
|
||||
args
|
||||
);
|
||||
|
||||
// Check if the executable exists and is executable
|
||||
if !executable_path.exists() {
|
||||
return Err(format!("Browser executable not found: {:?}", executable_path).into());
|
||||
}
|
||||
|
||||
// Check if we can read the executable to detect architecture issues early
|
||||
if let Err(e) = std::fs::File::open(executable_path) {
|
||||
return Err(format!("Cannot access browser executable: {}", e).into());
|
||||
}
|
||||
|
||||
// Ensure the executable has proper permissions
|
||||
if let Err(e) = std::fs::metadata(executable_path) {
|
||||
return Err(format!("Cannot get executable metadata: {}", e).into());
|
||||
}
|
||||
|
||||
// On Linux, we might need to set LD_LIBRARY_PATH for some browsers
|
||||
let mut cmd = Command::new(executable_path);
|
||||
cmd.args(args);
|
||||
|
||||
// For Firefox-based browsers, ensure library path includes the installation directory
|
||||
if let Some(install_dir) = executable_path.parent() {
|
||||
let mut ld_library_path = Vec::new();
|
||||
|
||||
// Add multiple potential library directories
|
||||
let lib_dirs = [
|
||||
install_dir.join("lib"),
|
||||
install_dir.join("../lib"), // Parent directory lib
|
||||
install_dir.join("../../lib"), // Grandparent directory lib
|
||||
install_dir.to_path_buf(), // Installation directory itself
|
||||
];
|
||||
|
||||
for lib_dir in &lib_dirs {
|
||||
if lib_dir.exists() {
|
||||
ld_library_path.push(lib_dir.to_string_lossy().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// For Firefox specifically, add common system library paths that might be needed
|
||||
let firefox_lib_paths = [
|
||||
"/usr/lib/firefox",
|
||||
"/usr/lib/x86_64-linux-gnu",
|
||||
"/usr/lib/aarch64-linux-gnu",
|
||||
"/lib/x86_64-linux-gnu",
|
||||
"/lib/aarch64-linux-gnu",
|
||||
];
|
||||
|
||||
for lib_path in &firefox_lib_paths {
|
||||
let path = std::path::Path::new(lib_path);
|
||||
if path.exists() {
|
||||
ld_library_path.push(lib_path.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve existing LD_LIBRARY_PATH
|
||||
if let Ok(existing_path) = std::env::var("LD_LIBRARY_PATH") {
|
||||
ld_library_path.push(existing_path);
|
||||
}
|
||||
|
||||
// Set the combined LD_LIBRARY_PATH
|
||||
if !ld_library_path.is_empty() {
|
||||
cmd.env("LD_LIBRARY_PATH", ld_library_path.join(":"));
|
||||
log::info!("Set LD_LIBRARY_PATH to: {}", ld_library_path.join(":"));
|
||||
}
|
||||
}
|
||||
|
||||
// Additional Linux-specific environment variables for better compatibility
|
||||
cmd.env(
|
||||
"DISPLAY",
|
||||
std::env::var("DISPLAY").unwrap_or(":0".to_string()),
|
||||
);
|
||||
|
||||
// Set MOZ_ENABLE_WAYLAND for better Wayland support
|
||||
if std::env::var("WAYLAND_DISPLAY").is_ok() {
|
||||
cmd.env("MOZ_ENABLE_WAYLAND", "1");
|
||||
}
|
||||
|
||||
// Disable GPU acceleration if running in headless environments
|
||||
if std::env::var("DISPLAY").is_err() || std::env::var("WAYLAND_DISPLAY").is_err() {
|
||||
log::info!("No display detected, browser may fail to start");
|
||||
}
|
||||
|
||||
// Attempt to spawn with better error handling for architecture issues
|
||||
match cmd.spawn() {
|
||||
Ok(child) => Ok(child),
|
||||
Err(e) => {
|
||||
// Detect architecture mismatch errors
|
||||
if e.kind() == std::io::ErrorKind::Other {
|
||||
let error_msg = e.to_string();
|
||||
if error_msg.contains("Exec format error") {
|
||||
return Err(format!(
|
||||
"Architecture mismatch: The browser executable is not compatible with your system architecture ({}). \
|
||||
This typically happens when trying to run x86_64 binaries on ARM64 systems. \
|
||||
Please use a browser that supports your architecture, such as Zen Browser or Brave. \
|
||||
Executable: {:?}",
|
||||
std::env::consts::ARCH,
|
||||
executable_path
|
||||
).into());
|
||||
} else if error_msg.contains("No such file or directory") {
|
||||
return Err(format!(
|
||||
"Executable or required library not found. This might be due to missing dependencies or incorrect executable path. \
|
||||
Try installing missing libraries or verify the browser installation. \
|
||||
Executable: {:?}, Error: {}",
|
||||
executable_path, error_msg
|
||||
).into());
|
||||
}
|
||||
}
|
||||
Err(format!("Failed to launch browser: {}", e).into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_firefox_like(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
browser_type: BrowserType,
|
||||
browser_dir: &Path,
|
||||
profiles_dir: &Path,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let browser = create_browser(browser_type);
|
||||
let executable_path = browser
|
||||
.get_executable_path(browser_dir)
|
||||
.map_err(|e| format!("Failed to get executable path: {}", e))?;
|
||||
|
||||
let profile_data_path = profile.get_profile_data_path(profiles_dir);
|
||||
let output = Command::new(executable_path)
|
||||
.args([
|
||||
"-profile",
|
||||
&profile_data_path.to_string_lossy(),
|
||||
"-new-tab",
|
||||
url,
|
||||
])
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to open URL in existing browser: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_chromium(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
browser_type: BrowserType,
|
||||
browser_dir: &Path,
|
||||
profiles_dir: &Path,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let browser = create_browser(browser_type);
|
||||
let executable_path = browser
|
||||
.get_executable_path(browser_dir)
|
||||
.map_err(|e| format!("Failed to get executable path: {}", e))?;
|
||||
|
||||
let profile_data_path = profile.get_profile_data_path(profiles_dir);
|
||||
let output = Command::new(executable_path)
|
||||
.args([
|
||||
&format!("--user-data-dir={}", profile_data_path.to_string_lossy()),
|
||||
url,
|
||||
])
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to open URL in existing Chromium-based browser: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn kill_browser_process_impl(
|
||||
pid: u32,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
if let Some(process) = system.process(Pid::from(pid as usize)) {
|
||||
if !process.kill() {
|
||||
return Err(format!("Failed to kill process {}", pid).into());
|
||||
}
|
||||
} else {
|
||||
return Err(format!("Process {} not found", pid).into());
|
||||
}
|
||||
|
||||
log::info!("Successfully killed browser process with PID: {pid}");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,5 @@
|
||||
pub mod manager;
|
||||
pub mod types;
|
||||
|
||||
pub use manager::ProfileManager;
|
||||
pub use types::BrowserProfile;
|
||||
@@ -0,0 +1,38 @@
|
||||
use crate::camoufox_manager::CamoufoxConfig;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct BrowserProfile {
|
||||
pub id: uuid::Uuid,
|
||||
pub name: String,
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
#[serde(default)]
|
||||
pub proxy_id: Option<String>, // Reference to stored proxy
|
||||
#[serde(default)]
|
||||
pub process_id: Option<u32>,
|
||||
#[serde(default)]
|
||||
pub last_launch: Option<u64>,
|
||||
#[serde(default = "default_release_type")]
|
||||
pub release_type: String, // "stable" or "nightly"
|
||||
#[serde(default)]
|
||||
pub camoufox_config: Option<CamoufoxConfig>, // Camoufox configuration
|
||||
#[serde(default)]
|
||||
pub group_id: Option<String>, // Reference to profile group
|
||||
#[serde(default)]
|
||||
pub tags: Vec<String>, // Free-form tags
|
||||
#[serde(default)]
|
||||
pub note: Option<String>, // User note
|
||||
}
|
||||
|
||||
pub fn default_release_type() -> String {
|
||||
"stable".to_string()
|
||||
}
|
||||
|
||||
impl BrowserProfile {
|
||||
/// Get the path to the profile data directory (profiles/{uuid}/profile)
|
||||
pub fn get_profile_data_path(&self, profiles_dir: &Path) -> PathBuf {
|
||||
profiles_dir.join(self.id.to_string()).join("profile")
|
||||
}
|
||||
}
|
||||
+242
-182
@@ -5,7 +5,8 @@ use std::fs::{self, create_dir_all};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::browser::BrowserType;
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
use crate::downloaded_browsers_registry::DownloadedBrowsersRegistry;
|
||||
use crate::profile::ProfileManager;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DetectedProfile {
|
||||
@@ -17,17 +18,23 @@ pub struct DetectedProfile {
|
||||
|
||||
pub struct ProfileImporter {
|
||||
base_dirs: BaseDirs,
|
||||
browser_runner: BrowserRunner,
|
||||
downloaded_browsers_registry: &'static DownloadedBrowsersRegistry,
|
||||
profile_manager: &'static ProfileManager,
|
||||
}
|
||||
|
||||
impl ProfileImporter {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
browser_runner: BrowserRunner::new(),
|
||||
downloaded_browsers_registry: DownloadedBrowsersRegistry::instance(),
|
||||
profile_manager: ProfileManager::instance(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static ProfileImporter {
|
||||
&PROFILE_IMPORTER
|
||||
}
|
||||
|
||||
/// Detect existing browser profiles on the system
|
||||
pub fn detect_existing_profiles(
|
||||
&self,
|
||||
@@ -49,15 +56,9 @@ impl ProfileImporter {
|
||||
// Detect Chromium profiles
|
||||
detected_profiles.extend(self.detect_chromium_profiles()?);
|
||||
|
||||
// Detect Mullvad Browser profiles
|
||||
detected_profiles.extend(self.detect_mullvad_browser_profiles()?);
|
||||
|
||||
// Detect Zen Browser profiles
|
||||
detected_profiles.extend(self.detect_zen_browser_profiles()?);
|
||||
|
||||
// Detect TOR Browser profiles
|
||||
detected_profiles.extend(self.detect_tor_browser_profiles()?);
|
||||
|
||||
// Remove duplicates based on path
|
||||
let mut seen_paths = HashSet::new();
|
||||
let unique_profiles: Vec<DetectedProfile> = detected_profiles
|
||||
@@ -240,45 +241,6 @@ impl ProfileImporter {
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Detect Mullvad Browser profiles
|
||||
fn detect_mullvad_browser_profiles(
|
||||
&self,
|
||||
) -> Result<Vec<DetectedProfile>, Box<dyn std::error::Error>> {
|
||||
let mut profiles = Vec::new();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let mullvad_dir = self
|
||||
.base_dirs
|
||||
.home_dir()
|
||||
.join("Library/Application Support/MullvadBrowser/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_dir, "mullvad-browser")?);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Primary location in AppData\Roaming
|
||||
let app_data = self.base_dirs.data_dir();
|
||||
let mullvad_dir = app_data.join("MullvadBrowser/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_dir, "mullvad-browser")?);
|
||||
|
||||
// Also check common installation locations
|
||||
let local_app_data = self.base_dirs.data_local_dir();
|
||||
let mullvad_local_dir = local_app_data.join("MullvadBrowser/Profiles");
|
||||
if mullvad_local_dir.exists() {
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_local_dir, "mullvad-browser")?);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let mullvad_dir = self.base_dirs.home_dir().join(".mullvad-browser");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_dir, "mullvad-browser")?);
|
||||
}
|
||||
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Detect Zen Browser profiles
|
||||
fn detect_zen_browser_profiles(
|
||||
&self,
|
||||
@@ -310,107 +272,6 @@ impl ProfileImporter {
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Detect TOR Browser profiles
|
||||
fn detect_tor_browser_profiles(
|
||||
&self,
|
||||
) -> Result<Vec<DetectedProfile>, Box<dyn std::error::Error>> {
|
||||
let mut profiles = Vec::new();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// TOR Browser on macOS is typically in Applications
|
||||
let tor_dir = self
|
||||
.base_dirs
|
||||
.home_dir()
|
||||
.join("Library/Application Support/TorBrowser-Data/Browser/profile.default");
|
||||
|
||||
if tor_dir.exists() {
|
||||
profiles.push(DetectedProfile {
|
||||
browser: "tor-browser".to_string(),
|
||||
name: "TOR Browser - Default Profile".to_string(),
|
||||
path: tor_dir.to_string_lossy().to_string(),
|
||||
description: "Default TOR Browser profile".to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Check common TOR Browser installation locations on Windows
|
||||
let possible_paths = [
|
||||
// Default installation in user directory
|
||||
(
|
||||
"Desktop",
|
||||
"Desktop/Tor Browser/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
),
|
||||
// AppData locations
|
||||
(
|
||||
"AppData/Roaming",
|
||||
"TorBrowser/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
),
|
||||
(
|
||||
"AppData/Local",
|
||||
"TorBrowser/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
),
|
||||
];
|
||||
|
||||
let home_dir = self.base_dirs.home_dir();
|
||||
|
||||
for (location_name, relative_path) in &possible_paths {
|
||||
let tor_dir = home_dir.join(relative_path);
|
||||
if tor_dir.exists() {
|
||||
profiles.push(DetectedProfile {
|
||||
browser: "tor-browser".to_string(),
|
||||
name: format!("TOR Browser - {} Profile", location_name),
|
||||
path: tor_dir.to_string_lossy().to_string(),
|
||||
description: format!("TOR Browser profile from {}", location_name),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Also check AppData directories if available
|
||||
let app_data = self.base_dirs.data_dir();
|
||||
let tor_app_data =
|
||||
app_data.join("TorBrowser/Browser/TorBrowser/Data/Browser/profile.default");
|
||||
if tor_app_data.exists() {
|
||||
profiles.push(DetectedProfile {
|
||||
browser: "tor-browser".to_string(),
|
||||
name: "TOR Browser - AppData Profile".to_string(),
|
||||
path: tor_app_data.to_string_lossy().to_string(),
|
||||
description: "TOR Browser profile from AppData".to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Common TOR Browser locations on Linux
|
||||
let possible_paths = [
|
||||
".local/share/torbrowser/tbb/x86_64/tor-browser_en-US/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
"tor-browser_en-US/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
".tor-browser/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
"Downloads/tor-browser_en-US/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
];
|
||||
|
||||
let home_dir = self.base_dirs.home_dir();
|
||||
|
||||
for relative_path in &possible_paths {
|
||||
let tor_dir = home_dir.join(relative_path);
|
||||
if tor_dir.exists() {
|
||||
profiles.push(DetectedProfile {
|
||||
browser: "tor-browser".to_string(),
|
||||
name: "TOR Browser - Default Profile".to_string(),
|
||||
path: tor_dir.to_string_lossy().to_string(),
|
||||
description: "TOR Browser profile".to_string(),
|
||||
});
|
||||
break; // Only add the first one found to avoid duplicates
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Scan Firefox-style profiles directory
|
||||
fn scan_firefox_profiles_dir(
|
||||
&self,
|
||||
@@ -631,9 +492,7 @@ impl ProfileImporter {
|
||||
"firefox-developer" => "Firefox Developer",
|
||||
"chromium" => "Chrome/Chromium",
|
||||
"brave" => "Brave",
|
||||
"mullvad-browser" => "Mullvad Browser",
|
||||
"zen" => "Zen Browser",
|
||||
"tor-browser" => "Tor Browser",
|
||||
_ => "Unknown Browser",
|
||||
}
|
||||
}
|
||||
@@ -656,7 +515,7 @@ impl ProfileImporter {
|
||||
.map_err(|_| format!("Invalid browser type: {browser_type}"))?;
|
||||
|
||||
// Check if a profile with this name already exists
|
||||
let existing_profiles = self.browser_runner.list_profiles()?;
|
||||
let existing_profiles = self.profile_manager.list_profiles()?;
|
||||
if existing_profiles
|
||||
.iter()
|
||||
.any(|p| p.name.to_lowercase() == new_profile_name.to_lowercase())
|
||||
@@ -664,35 +523,41 @@ impl ProfileImporter {
|
||||
return Err(format!("Profile with name '{new_profile_name}' already exists").into());
|
||||
}
|
||||
|
||||
// Create the new profile directory
|
||||
let snake_case_name = new_profile_name.to_lowercase().replace(' ', "_");
|
||||
let profiles_dir = self.browser_runner.get_profiles_dir();
|
||||
let new_profile_path = profiles_dir.join(&snake_case_name);
|
||||
// Generate UUID for new profile and create the directory structure
|
||||
let profile_id = uuid::Uuid::new_v4();
|
||||
let profiles_dir = self.profile_manager.get_profiles_dir();
|
||||
let new_profile_uuid_dir = profiles_dir.join(profile_id.to_string());
|
||||
let new_profile_data_dir = new_profile_uuid_dir.join("profile");
|
||||
|
||||
create_dir_all(&new_profile_path)?;
|
||||
create_dir_all(&new_profile_uuid_dir)?;
|
||||
create_dir_all(&new_profile_data_dir)?;
|
||||
|
||||
// Copy all files from source to destination
|
||||
Self::copy_directory_recursive(source_path, &new_profile_path)?;
|
||||
// Copy all files from source to destination profile subdirectory
|
||||
Self::copy_directory_recursive(source_path, &new_profile_data_dir)?;
|
||||
|
||||
// Create the profile metadata without overwriting the imported data
|
||||
// We need to find a suitable version for this browser type
|
||||
let available_versions = self.get_default_version_for_browser(browser_type)?;
|
||||
|
||||
let profile = crate::browser_runner::BrowserProfile {
|
||||
let profile = crate::profile::BrowserProfile {
|
||||
id: profile_id,
|
||||
name: new_profile_name.to_string(),
|
||||
browser: browser_type.to_string(),
|
||||
version: available_versions,
|
||||
profile_path: new_profile_path.to_string_lossy().to_string(),
|
||||
proxy: None,
|
||||
proxy_id: None,
|
||||
process_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
group_id: None,
|
||||
tags: Vec::new(),
|
||||
note: None,
|
||||
};
|
||||
|
||||
// Save the profile metadata
|
||||
self.browser_runner.save_profile(&profile)?;
|
||||
self.profile_manager.save_profile(&profile)?;
|
||||
|
||||
println!(
|
||||
log::info!(
|
||||
"Successfully imported profile '{}' from '{}'",
|
||||
new_profile_name,
|
||||
source_path.display()
|
||||
@@ -706,26 +571,21 @@ impl ProfileImporter {
|
||||
&self,
|
||||
browser_type: &str,
|
||||
) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Try to get a downloaded version first, fallback to a reasonable default
|
||||
let registry =
|
||||
crate::downloaded_browsers::DownloadedBrowsersRegistry::load().unwrap_or_default();
|
||||
let downloaded_versions = registry.get_downloaded_versions(browser_type);
|
||||
// Check if any version of the browser is downloaded
|
||||
let downloaded_versions = self
|
||||
.downloaded_browsers_registry
|
||||
.get_downloaded_versions(browser_type);
|
||||
|
||||
if let Some(version) = downloaded_versions.first() {
|
||||
return Ok(version.clone());
|
||||
}
|
||||
|
||||
// If no downloaded versions, return a sensible default
|
||||
match browser_type {
|
||||
"firefox" => Ok("latest".to_string()),
|
||||
"firefox-developer" => Ok("latest".to_string()),
|
||||
"chromium" => Ok("latest".to_string()),
|
||||
"brave" => Ok("latest".to_string()),
|
||||
"zen" => Ok("latest".to_string()),
|
||||
"mullvad-browser" => Ok("13.5.16".to_string()), // Mullvad Browser common version
|
||||
"tor-browser" => Ok("latest".to_string()),
|
||||
_ => Ok("latest".to_string()),
|
||||
}
|
||||
// If no downloaded versions found, return an error
|
||||
Err(format!(
|
||||
"No downloaded versions found for browser '{}'. Please download a version of {} first before importing profiles.",
|
||||
browser_type,
|
||||
self.get_browser_display_name(browser_type)
|
||||
).into())
|
||||
}
|
||||
|
||||
/// Recursively copy directory contents
|
||||
@@ -756,7 +616,7 @@ impl ProfileImporter {
|
||||
// Tauri commands
|
||||
#[tauri::command]
|
||||
pub async fn detect_existing_profiles() -> Result<Vec<DetectedProfile>, String> {
|
||||
let importer = ProfileImporter::new();
|
||||
let importer = ProfileImporter::instance();
|
||||
importer
|
||||
.detect_existing_profiles()
|
||||
.map_err(|e| format!("Failed to detect existing profiles: {e}"))
|
||||
@@ -768,8 +628,208 @@ pub async fn import_browser_profile(
|
||||
browser_type: String,
|
||||
new_profile_name: String,
|
||||
) -> Result<(), String> {
|
||||
let importer = ProfileImporter::new();
|
||||
let importer = ProfileImporter::instance();
|
||||
importer
|
||||
.import_profile(&source_path, &browser_type, &new_profile_name)
|
||||
.map_err(|e| format!("Failed to import profile: {e}"))
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref PROFILE_IMPORTER: ProfileImporter = ProfileImporter::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn create_test_profile_importer() -> (ProfileImporter, TempDir) {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Set up a temporary home directory for testing
|
||||
env::set_var("HOME", temp_dir.path());
|
||||
|
||||
let importer = ProfileImporter::new();
|
||||
(importer, temp_dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_profile_importer_creation() {
|
||||
let (_importer, _temp_dir) = create_test_profile_importer();
|
||||
// Test passes if no panic occurs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_browser_display_name() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
assert_eq!(importer.get_browser_display_name("firefox"), "Firefox");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("firefox-developer"),
|
||||
"Firefox Developer"
|
||||
);
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("chromium"),
|
||||
"Chrome/Chromium"
|
||||
);
|
||||
assert_eq!(importer.get_browser_display_name("brave"), "Brave");
|
||||
assert_eq!(importer.get_browser_display_name("zen"), "Zen Browser");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("unknown"),
|
||||
"Unknown Browser"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_existing_profiles_no_panic() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
// This should not panic even if no browser profiles exist
|
||||
let result = importer.detect_existing_profiles();
|
||||
assert!(result.is_ok(), "detect_existing_profiles should not fail");
|
||||
|
||||
let _profiles = result.unwrap();
|
||||
// We can't assert specific profiles since they depend on the system
|
||||
// but we can verify the result is a valid Vec
|
||||
// We can't assert specific profiles since they depend on the system
|
||||
// but we can verify the result is a valid Vec (length check is always true for Vec, but shows intent)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scan_firefox_profiles_dir_nonexistent() {
|
||||
let (importer, temp_dir) = create_test_profile_importer();
|
||||
|
||||
let nonexistent_dir = temp_dir.path().join("nonexistent");
|
||||
let result = importer.scan_firefox_profiles_dir(&nonexistent_dir, "firefox");
|
||||
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent directory gracefully"
|
||||
);
|
||||
let profiles = result.unwrap();
|
||||
assert!(
|
||||
profiles.is_empty(),
|
||||
"Should return empty vector for nonexistent directory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scan_chrome_profiles_dir_nonexistent() {
|
||||
let (importer, temp_dir) = create_test_profile_importer();
|
||||
|
||||
let nonexistent_dir = temp_dir.path().join("nonexistent");
|
||||
let result = importer.scan_chrome_profiles_dir(&nonexistent_dir, "chromium");
|
||||
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent directory gracefully"
|
||||
);
|
||||
let profiles = result.unwrap();
|
||||
assert!(
|
||||
profiles.is_empty(),
|
||||
"Should return empty vector for nonexistent directory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_firefox_profiles_ini_empty() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
let empty_content = "";
|
||||
let profiles_dir = Path::new("/tmp");
|
||||
let result = importer.parse_firefox_profiles_ini(empty_content, profiles_dir, "firefox");
|
||||
|
||||
assert!(result.is_ok(), "Should handle empty profiles.ini");
|
||||
let profiles = result.unwrap();
|
||||
assert!(
|
||||
profiles.is_empty(),
|
||||
"Should return empty vector for empty content"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_firefox_profiles_ini_valid() {
|
||||
let (importer, temp_dir) = create_test_profile_importer();
|
||||
|
||||
// Create a mock profile directory
|
||||
let profiles_dir = temp_dir.path().join("profiles");
|
||||
let profile_dir = profiles_dir.join("test.profile");
|
||||
fs::create_dir_all(&profile_dir).expect("Should create profile directory");
|
||||
|
||||
// Create a prefs.js file to make it look like a valid profile
|
||||
let prefs_file = profile_dir.join("prefs.js");
|
||||
fs::write(&prefs_file, "// Firefox preferences").expect("Should create prefs.js");
|
||||
|
||||
let profiles_ini_content = r#"
|
||||
[Profile0]
|
||||
Name=Test Profile
|
||||
IsRelative=1
|
||||
Path=test.profile
|
||||
"#;
|
||||
|
||||
let result =
|
||||
importer.parse_firefox_profiles_ini(profiles_ini_content, &profiles_dir, "firefox");
|
||||
|
||||
assert!(result.is_ok(), "Should parse valid profiles.ini");
|
||||
let profiles = result.unwrap();
|
||||
assert_eq!(profiles.len(), 1, "Should find one profile");
|
||||
assert_eq!(profiles[0].name, "Firefox - Test Profile");
|
||||
assert_eq!(profiles[0].browser, "firefox");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_directory_recursive() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Create source directory structure
|
||||
let source_dir = temp_dir.path().join("source");
|
||||
let source_subdir = source_dir.join("subdir");
|
||||
fs::create_dir_all(&source_subdir).expect("Should create source directories");
|
||||
|
||||
// Create some test files
|
||||
let source_file1 = source_dir.join("file1.txt");
|
||||
let source_file2 = source_subdir.join("file2.txt");
|
||||
fs::write(&source_file1, "content1").expect("Should create file1");
|
||||
fs::write(&source_file2, "content2").expect("Should create file2");
|
||||
|
||||
// Create destination directory
|
||||
let dest_dir = temp_dir.path().join("dest");
|
||||
|
||||
// Copy recursively
|
||||
let result = ProfileImporter::copy_directory_recursive(&source_dir, &dest_dir);
|
||||
assert!(result.is_ok(), "Should copy directory successfully");
|
||||
|
||||
// Verify files were copied
|
||||
let dest_file1 = dest_dir.join("file1.txt");
|
||||
let dest_file2 = dest_dir.join("subdir").join("file2.txt");
|
||||
|
||||
assert!(dest_file1.exists(), "file1.txt should be copied");
|
||||
assert!(dest_file2.exists(), "file2.txt should be copied");
|
||||
|
||||
let content1 = fs::read_to_string(&dest_file1).expect("Should read file1");
|
||||
let content2 = fs::read_to_string(&dest_file2).expect("Should read file2");
|
||||
|
||||
assert_eq!(content1, "content1", "file1 content should match");
|
||||
assert_eq!(content2, "content2", "file2 content should match");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_default_version_for_browser_no_versions() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
// This should fail since no versions are downloaded in test environment
|
||||
let result = importer.get_default_version_for_browser("firefox");
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"Should fail when no versions are available"
|
||||
);
|
||||
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("No downloaded versions found"),
|
||||
"Error should mention no versions found"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+991
-311
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,263 @@
|
||||
use crate::proxy_storage::{
|
||||
delete_proxy_config, generate_proxy_id, get_proxy_config, is_process_running, list_proxy_configs,
|
||||
save_proxy_config, ProxyConfig,
|
||||
};
|
||||
use std::process::Stdio;
|
||||
lazy_static::lazy_static! {
|
||||
static ref PROXY_PROCESSES: std::sync::Mutex<std::collections::HashMap<String, u32>> =
|
||||
std::sync::Mutex::new(std::collections::HashMap::new());
|
||||
}
|
||||
|
||||
pub async fn start_proxy_process(
|
||||
upstream_url: Option<String>,
|
||||
port: Option<u16>,
|
||||
) -> Result<ProxyConfig, Box<dyn std::error::Error>> {
|
||||
start_proxy_process_with_profile(upstream_url, port, None).await
|
||||
}
|
||||
|
||||
pub async fn start_proxy_process_with_profile(
|
||||
upstream_url: Option<String>,
|
||||
port: Option<u16>,
|
||||
profile_id: Option<String>,
|
||||
) -> Result<ProxyConfig, Box<dyn std::error::Error>> {
|
||||
let id = generate_proxy_id();
|
||||
let upstream = upstream_url.unwrap_or_else(|| "DIRECT".to_string());
|
||||
|
||||
// Get available port if not specified
|
||||
let local_port = port.unwrap_or_else(|| {
|
||||
// Find an available port
|
||||
let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap();
|
||||
listener.local_addr().unwrap().port()
|
||||
});
|
||||
|
||||
let config =
|
||||
ProxyConfig::new(id.clone(), upstream, Some(local_port)).with_profile_id(profile_id.clone());
|
||||
save_proxy_config(&config)?;
|
||||
|
||||
// Log profile_id for debugging
|
||||
if let Some(ref pid) = profile_id {
|
||||
log::info!("Saved proxy config {} with profile_id: {}", id, pid);
|
||||
} else {
|
||||
log::info!("Saved proxy config {} without profile_id", id);
|
||||
}
|
||||
|
||||
// Spawn proxy worker process in the background using std::process::Command
|
||||
// This ensures proper process detachment on Unix systems
|
||||
let exe = std::env::current_exe()?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::process::CommandExt;
|
||||
use std::process::Command as StdCommand;
|
||||
|
||||
let mut cmd = StdCommand::new(&exe);
|
||||
cmd.arg("proxy-worker");
|
||||
cmd.arg("start");
|
||||
cmd.arg("--id");
|
||||
cmd.arg(&id);
|
||||
|
||||
cmd.stdin(Stdio::null());
|
||||
cmd.stdout(Stdio::null());
|
||||
|
||||
// Always log to file for diagnostics (both debug and release builds)
|
||||
let log_path = std::path::PathBuf::from("/tmp").join(format!("donut-proxy-{}.log", id));
|
||||
if let Ok(file) = std::fs::File::create(&log_path) {
|
||||
log::info!("Proxy worker stderr will be logged to: {:?}", log_path);
|
||||
cmd.stderr(Stdio::from(file));
|
||||
} else {
|
||||
cmd.stderr(Stdio::null());
|
||||
}
|
||||
|
||||
// Properly detach the process on Unix by creating a new session
|
||||
unsafe {
|
||||
cmd.pre_exec(|| {
|
||||
// Create a new process group so the process survives parent exit
|
||||
libc::setsid();
|
||||
|
||||
// Set high priority so the proxy is killed last under resource pressure
|
||||
// Negative nice value = higher priority. Try -10, fall back to -5 if it fails.
|
||||
if libc::setpriority(libc::PRIO_PROCESS, 0, -10) != 0 {
|
||||
let _ = libc::setpriority(libc::PRIO_PROCESS, 0, -5);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
// Spawn detached process
|
||||
let child = cmd.spawn()?;
|
||||
let pid = child.id();
|
||||
|
||||
// Store PID
|
||||
{
|
||||
let mut processes = PROXY_PROCESSES.lock().unwrap();
|
||||
processes.insert(id.clone(), pid);
|
||||
}
|
||||
|
||||
// Update config with PID
|
||||
let mut config_with_pid = config.clone();
|
||||
config_with_pid.pid = Some(pid);
|
||||
save_proxy_config(&config_with_pid)?;
|
||||
|
||||
// Don't wait for the child - it's detached
|
||||
drop(child);
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::os::windows::process::CommandExt;
|
||||
use std::process::Command as StdCommand;
|
||||
use windows::Win32::Foundation::CloseHandle;
|
||||
use windows::Win32::System::Threading::{
|
||||
OpenProcess, SetPriorityClass, ABOVE_NORMAL_PRIORITY_CLASS, PROCESS_SET_INFORMATION,
|
||||
};
|
||||
|
||||
let mut cmd = StdCommand::new(&exe);
|
||||
cmd.arg("proxy-worker");
|
||||
cmd.arg("start");
|
||||
cmd.arg("--id");
|
||||
cmd.arg(&id);
|
||||
|
||||
cmd.stdin(Stdio::null());
|
||||
cmd.stdout(Stdio::null());
|
||||
cmd.stderr(Stdio::null());
|
||||
|
||||
// On Windows, use CREATE_NEW_PROCESS_GROUP flag for proper detachment
|
||||
const CREATE_NEW_PROCESS_GROUP: u32 = 0x00000200;
|
||||
cmd.creation_flags(CREATE_NEW_PROCESS_GROUP);
|
||||
|
||||
let child = cmd.spawn()?;
|
||||
let pid = child.id();
|
||||
|
||||
// Set high priority so the proxy is killed last under resource pressure
|
||||
unsafe {
|
||||
if let Ok(handle) = OpenProcess(PROCESS_SET_INFORMATION, false, pid) {
|
||||
let _ = SetPriorityClass(handle, ABOVE_NORMAL_PRIORITY_CLASS);
|
||||
let _ = CloseHandle(handle);
|
||||
}
|
||||
}
|
||||
|
||||
// Store PID
|
||||
{
|
||||
let mut processes = PROXY_PROCESSES.lock().unwrap();
|
||||
processes.insert(id.clone(), pid);
|
||||
}
|
||||
|
||||
// Update config with PID
|
||||
let mut config_with_pid = config.clone();
|
||||
config_with_pid.pid = Some(pid);
|
||||
save_proxy_config(&config_with_pid)?;
|
||||
|
||||
drop(child);
|
||||
}
|
||||
|
||||
// Give the process a moment to start up before checking
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
|
||||
// Wait for the worker to bind to the port and update config
|
||||
// Since we pre-allocated the port, the worker should bind immediately
|
||||
// We check quickly with short intervals to make startup fast
|
||||
let mut attempts = 0;
|
||||
let max_attempts = 40; // 4 seconds max (40 * 100ms) - give it more time to start
|
||||
|
||||
loop {
|
||||
// Use shorter sleep for faster startup
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
|
||||
if let Some(updated_config) = get_proxy_config(&id) {
|
||||
// Check if local_url is set (worker has bound and updated config)
|
||||
if let Some(ref local_url) = updated_config.local_url {
|
||||
if !local_url.is_empty() {
|
||||
if let Some(port) = updated_config.local_port {
|
||||
// Try to connect immediately - port should be ready since we pre-allocated it
|
||||
match tokio::time::timeout(
|
||||
tokio::time::Duration::from_millis(100),
|
||||
tokio::net::TcpStream::connect(("127.0.0.1", port)),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(Ok(_stream)) => {
|
||||
// Port is listening and accepting connections!
|
||||
return Ok(updated_config);
|
||||
}
|
||||
Ok(Err(_)) | Err(_) => {
|
||||
// Port not ready yet, continue waiting
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
attempts += 1;
|
||||
if attempts >= max_attempts {
|
||||
// Try to get the config one more time for better error message
|
||||
if let Some(config) = get_proxy_config(&id) {
|
||||
// Check if process is still running
|
||||
let process_running = config.pid.map(is_process_running).unwrap_or(false);
|
||||
return Err(
|
||||
format!(
|
||||
"Proxy worker failed to start in time. Config: id={}, local_url={:?}, local_port={:?}, pid={:?}, process_running={}",
|
||||
config.id, config.local_url, config.local_port, config.pid, process_running
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
return Err(
|
||||
format!(
|
||||
"Proxy worker failed to start in time. Config not found for id: {}",
|
||||
id
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn stop_proxy_process(id: &str) -> Result<bool, Box<dyn std::error::Error>> {
|
||||
let config = get_proxy_config(id);
|
||||
|
||||
if let Some(config) = config {
|
||||
if let Some(pid) = config.pid {
|
||||
// Kill the process
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::process::Command;
|
||||
let _ = Command::new("kill")
|
||||
.arg("-TERM")
|
||||
.arg(pid.to_string())
|
||||
.output();
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::process::Command;
|
||||
let _ = Command::new("taskkill")
|
||||
.args(["/F", "/PID", &pid.to_string()])
|
||||
.output();
|
||||
}
|
||||
|
||||
// Wait a bit for the process to exit
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
||||
|
||||
// Remove from tracking
|
||||
{
|
||||
let mut processes = PROXY_PROCESSES.lock().unwrap();
|
||||
processes.remove(id);
|
||||
}
|
||||
|
||||
// Delete the config file
|
||||
delete_proxy_config(id);
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
pub async fn stop_all_proxy_processes() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let configs = list_proxy_configs();
|
||||
for config in configs {
|
||||
let _ = stop_proxy_process(&config.id).await;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -0,0 +1,907 @@
|
||||
use crate::proxy_storage::ProxyConfig;
|
||||
use crate::traffic_stats::{get_traffic_tracker, init_traffic_tracker};
|
||||
use http_body_util::{BodyExt, Full};
|
||||
use hyper::body::Bytes;
|
||||
use hyper::server::conn::http1;
|
||||
use hyper::service::service_fn;
|
||||
use hyper::{Method, Request, Response, StatusCode};
|
||||
use hyper_util::rt::TokioIo;
|
||||
use std::convert::Infallible;
|
||||
use std::io;
|
||||
use std::net::SocketAddr;
|
||||
use std::pin::Pin;
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::task::{Context, Poll};
|
||||
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, ReadBuf};
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::net::TcpStream;
|
||||
use url::Url;
|
||||
|
||||
/// Wrapper stream that counts bytes read and written
|
||||
struct CountingStream<S> {
|
||||
inner: S,
|
||||
bytes_read: Arc<AtomicU64>,
|
||||
bytes_written: Arc<AtomicU64>,
|
||||
}
|
||||
|
||||
impl<S> CountingStream<S> {
|
||||
fn new(inner: S) -> Self {
|
||||
Self {
|
||||
inner,
|
||||
bytes_read: Arc::new(AtomicU64::new(0)),
|
||||
bytes_written: Arc::new(AtomicU64::new(0)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: AsyncRead + Unpin> AsyncRead for CountingStream<S> {
|
||||
fn poll_read(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &mut ReadBuf<'_>,
|
||||
) -> Poll<io::Result<()>> {
|
||||
let filled_before = buf.filled().len();
|
||||
let result = Pin::new(&mut self.inner).poll_read(cx, buf);
|
||||
if let Poll::Ready(Ok(())) = &result {
|
||||
let bytes_read = buf.filled().len() - filled_before;
|
||||
if bytes_read > 0 {
|
||||
self
|
||||
.bytes_read
|
||||
.fetch_add(bytes_read as u64, Ordering::Relaxed);
|
||||
// Update global tracker - count as received (data coming into proxy)
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
tracker.add_bytes_received(bytes_read as u64);
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: AsyncWrite + Unpin> AsyncWrite for CountingStream<S> {
|
||||
fn poll_write(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &[u8],
|
||||
) -> Poll<io::Result<usize>> {
|
||||
let result = Pin::new(&mut self.inner).poll_write(cx, buf);
|
||||
if let Poll::Ready(Ok(n)) = &result {
|
||||
self.bytes_written.fetch_add(*n as u64, Ordering::Relaxed);
|
||||
// Update global tracker - count as sent (data going out of proxy)
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
tracker.add_bytes_sent(*n as u64);
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
|
||||
Pin::new(&mut self.inner).poll_flush(cx)
|
||||
}
|
||||
|
||||
fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
|
||||
Pin::new(&mut self.inner).poll_shutdown(cx)
|
||||
}
|
||||
}
|
||||
|
||||
// Wrapper to prepend consumed bytes to a stream
|
||||
struct PrependReader {
|
||||
prepended: Vec<u8>,
|
||||
prepended_pos: usize,
|
||||
inner: TcpStream,
|
||||
}
|
||||
|
||||
impl AsyncRead for PrependReader {
|
||||
fn poll_read(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &mut ReadBuf<'_>,
|
||||
) -> Poll<io::Result<()>> {
|
||||
// First, read from prepended bytes if any
|
||||
if self.prepended_pos < self.prepended.len() {
|
||||
let available = self.prepended.len() - self.prepended_pos;
|
||||
let to_copy = available.min(buf.remaining());
|
||||
buf.put_slice(&self.prepended[self.prepended_pos..self.prepended_pos + to_copy]);
|
||||
self.prepended_pos += to_copy;
|
||||
return Poll::Ready(Ok(()));
|
||||
}
|
||||
|
||||
// Then read from inner stream
|
||||
Pin::new(&mut self.inner).poll_read(cx, buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl AsyncWrite for PrependReader {
|
||||
fn poll_write(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &[u8],
|
||||
) -> Poll<io::Result<usize>> {
|
||||
Pin::new(&mut self.inner).poll_write(cx, buf)
|
||||
}
|
||||
|
||||
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
|
||||
Pin::new(&mut self.inner).poll_flush(cx)
|
||||
}
|
||||
|
||||
fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<io::Result<()>> {
|
||||
Pin::new(&mut self.inner).poll_shutdown(cx)
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_request(
|
||||
req: Request<hyper::body::Incoming>,
|
||||
upstream_url: Option<String>,
|
||||
) -> Result<Response<Full<Bytes>>, Infallible> {
|
||||
// Handle CONNECT method for HTTPS tunneling
|
||||
if req.method() == Method::CONNECT {
|
||||
return handle_connect(req, upstream_url).await;
|
||||
}
|
||||
|
||||
// Handle regular HTTP requests
|
||||
handle_http(req, upstream_url).await
|
||||
}
|
||||
|
||||
async fn handle_connect(
|
||||
req: Request<hyper::body::Incoming>,
|
||||
upstream_url: Option<String>,
|
||||
) -> Result<Response<Full<Bytes>>, Infallible> {
|
||||
let authority = req.uri().authority().cloned();
|
||||
|
||||
if let Some(authority) = authority {
|
||||
let target_addr = format!("{}", authority);
|
||||
|
||||
// Parse target host and port
|
||||
let (target_host, target_port) = if let Some(colon_pos) = target_addr.find(':') {
|
||||
let host = &target_addr[..colon_pos];
|
||||
let port: u16 = target_addr[colon_pos + 1..].parse().unwrap_or(443);
|
||||
(host, port)
|
||||
} else {
|
||||
(&target_addr[..], 443)
|
||||
};
|
||||
|
||||
// If no upstream proxy, connect directly
|
||||
if upstream_url.is_none()
|
||||
|| upstream_url
|
||||
.as_ref()
|
||||
.map(|s| s == "DIRECT")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
match TcpStream::connect(&target_addr).await {
|
||||
Ok(_stream) => {
|
||||
let mut response = Response::new(Full::new(Bytes::from("")));
|
||||
*response.status_mut() = StatusCode::from_u16(200).unwrap();
|
||||
return Ok(response);
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to connect to {}: {}", target_addr, e);
|
||||
let mut response =
|
||||
Response::new(Full::new(Bytes::from(format!("Connection failed: {}", e))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Connect through upstream proxy
|
||||
let upstream = match upstream_url.as_ref().and_then(|u| Url::parse(u).ok()) {
|
||||
Some(url) => url,
|
||||
None => {
|
||||
let mut response = Response::new(Full::new(Bytes::from("Invalid upstream URL")));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
};
|
||||
|
||||
let scheme = upstream.scheme();
|
||||
match scheme {
|
||||
"http" | "https" => {
|
||||
// Use manual CONNECT for HTTP/HTTPS proxies
|
||||
match connect_via_http_proxy(&upstream, target_host, target_port).await {
|
||||
Ok(_) => {
|
||||
let mut response = Response::new(Full::new(Bytes::from("")));
|
||||
*response.status_mut() = StatusCode::from_u16(200).unwrap();
|
||||
Ok(response)
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("HTTP proxy CONNECT failed: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Proxy connection failed: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
}
|
||||
"socks4" | "socks5" => {
|
||||
// Use async-socks5 for SOCKS proxies
|
||||
let host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let port = upstream.port().unwrap_or(1080);
|
||||
let socks_addr = format!("{}:{}", host, port);
|
||||
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
|
||||
match connect_via_socks(
|
||||
&socks_addr,
|
||||
target_host,
|
||||
target_port,
|
||||
scheme == "socks5",
|
||||
if !username.is_empty() {
|
||||
Some((username, password))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_stream) => {
|
||||
let mut response = Response::new(Full::new(Bytes::from("")));
|
||||
*response.status_mut() = StatusCode::from_u16(200).unwrap();
|
||||
Ok(response)
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("SOCKS connection failed: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"SOCKS connection failed: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
let mut response = Response::new(Full::new(Bytes::from("Unsupported upstream scheme")));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let mut response = Response::new(Full::new(Bytes::from("Bad Request")));
|
||||
*response.status_mut() = StatusCode::BAD_REQUEST;
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
|
||||
async fn connect_via_http_proxy(
|
||||
upstream: &Url,
|
||||
target_host: &str,
|
||||
target_port: u16,
|
||||
) -> Result<TcpStream, Box<dyn std::error::Error>> {
|
||||
let proxy_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let proxy_port = upstream.port().unwrap_or(8080);
|
||||
let mut stream = TcpStream::connect((proxy_host, proxy_port)).await?;
|
||||
|
||||
// Add proxy authentication if provided
|
||||
let mut connect_req = format!(
|
||||
"CONNECT {}:{} HTTP/1.1\r\nHost: {}:{}\r\n",
|
||||
target_host, target_port, target_host, target_port
|
||||
);
|
||||
|
||||
if !upstream.username().is_empty() {
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
let auth = general_purpose::STANDARD.encode(format!("{}:{}", username, password));
|
||||
connect_req.push_str(&format!("Proxy-Authorization: Basic {}\r\n", auth));
|
||||
}
|
||||
|
||||
connect_req.push_str("\r\n");
|
||||
|
||||
stream.write_all(connect_req.as_bytes()).await?;
|
||||
|
||||
let mut buffer = [0u8; 4096];
|
||||
let n = stream.read(&mut buffer).await?;
|
||||
let response = String::from_utf8_lossy(&buffer[..n]);
|
||||
|
||||
if response.starts_with("HTTP/1.1 200") || response.starts_with("HTTP/1.0 200") {
|
||||
Ok(stream)
|
||||
} else {
|
||||
Err(format!("Upstream proxy CONNECT failed: {}", response).into())
|
||||
}
|
||||
}
|
||||
|
||||
async fn connect_via_socks(
|
||||
socks_addr: &str,
|
||||
target_host: &str,
|
||||
target_port: u16,
|
||||
is_socks5: bool,
|
||||
auth: Option<(&str, &str)>,
|
||||
) -> Result<TcpStream, Box<dyn std::error::Error>> {
|
||||
let mut stream = TcpStream::connect(socks_addr).await?;
|
||||
|
||||
if is_socks5 {
|
||||
// SOCKS5 connection using async_socks5
|
||||
use async_socks5::{connect, AddrKind, Auth};
|
||||
|
||||
let target = if let Ok(ip) = target_host.parse::<std::net::IpAddr>() {
|
||||
AddrKind::Ip(std::net::SocketAddr::new(ip, target_port))
|
||||
} else {
|
||||
AddrKind::Domain(target_host.to_string(), target_port)
|
||||
};
|
||||
|
||||
let auth_info: Option<Auth> = auth.map(|(user, pass)| Auth {
|
||||
username: user.to_string(),
|
||||
password: pass.to_string(),
|
||||
});
|
||||
|
||||
connect(&mut stream, target, auth_info).await?;
|
||||
Ok(stream)
|
||||
} else {
|
||||
// SOCKS4 - simplified implementation
|
||||
let ip: std::net::IpAddr = target_host.parse()?;
|
||||
|
||||
let mut request = vec![0x04, 0x01]; // SOCKS4, CONNECT
|
||||
request.extend_from_slice(&target_port.to_be_bytes());
|
||||
match ip {
|
||||
std::net::IpAddr::V4(ipv4) => {
|
||||
request.extend_from_slice(&ipv4.octets());
|
||||
}
|
||||
std::net::IpAddr::V6(_) => {
|
||||
return Err("SOCKS4 does not support IPv6".into());
|
||||
}
|
||||
}
|
||||
request.push(0); // NULL terminator for userid
|
||||
|
||||
stream.write_all(&request).await?;
|
||||
|
||||
let mut response = [0u8; 8];
|
||||
stream.read_exact(&mut response).await?;
|
||||
|
||||
if response[1] != 0x5A {
|
||||
return Err("SOCKS4 connection failed".into());
|
||||
}
|
||||
|
||||
Ok(stream)
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_http(
|
||||
req: Request<hyper::body::Incoming>,
|
||||
upstream_url: Option<String>,
|
||||
) -> Result<Response<Full<Bytes>>, Infallible> {
|
||||
// Use reqwest for all HTTP requests as it handles proxies better
|
||||
// This is faster and more reliable than trying to use hyper-proxy with version conflicts
|
||||
use reqwest::Client;
|
||||
|
||||
log::error!(
|
||||
"DEBUG: Handling HTTP request: {} {} (host: {:?})",
|
||||
req.method(),
|
||||
req.uri(),
|
||||
req.uri().host()
|
||||
);
|
||||
|
||||
// Extract domain for traffic tracking
|
||||
let domain = req
|
||||
.uri()
|
||||
.host()
|
||||
.map(|h| h.to_string())
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
|
||||
let client_builder = Client::builder();
|
||||
let client = if let Some(ref upstream) = upstream_url {
|
||||
if upstream == "DIRECT" {
|
||||
client_builder.build().unwrap_or_default()
|
||||
} else {
|
||||
// Build reqwest client with proxy
|
||||
match build_reqwest_client_with_proxy(upstream) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
log::error!("Failed to create proxy client: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Proxy configuration error: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
client_builder.build().unwrap_or_default()
|
||||
};
|
||||
|
||||
// Convert hyper request to reqwest request
|
||||
let uri = req.uri().to_string();
|
||||
let method = req.method().clone();
|
||||
let headers = req.headers().clone();
|
||||
|
||||
let mut request_builder = match method.as_str() {
|
||||
"GET" => client.get(&uri),
|
||||
"POST" => client.post(&uri),
|
||||
"PUT" => client.put(&uri),
|
||||
"DELETE" => client.delete(&uri),
|
||||
"PATCH" => client.patch(&uri),
|
||||
"HEAD" => client.head(&uri),
|
||||
_ => {
|
||||
let mut response = Response::new(Full::new(Bytes::from("Unsupported method")));
|
||||
*response.status_mut() = StatusCode::METHOD_NOT_ALLOWED;
|
||||
return Ok(response);
|
||||
}
|
||||
};
|
||||
|
||||
// Copy headers, but skip proxy-specific headers that shouldn't be forwarded
|
||||
for (name, value) in headers.iter() {
|
||||
// Skip proxy-specific headers - these are for the local proxy, not the upstream
|
||||
if name.as_str().eq_ignore_ascii_case("proxy-authorization")
|
||||
|| name.as_str().eq_ignore_ascii_case("proxy-connection")
|
||||
|| name.as_str().eq_ignore_ascii_case("proxy-authenticate")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Ok(val) = value.to_str() {
|
||||
request_builder = request_builder.header(name.as_str(), val);
|
||||
}
|
||||
}
|
||||
|
||||
// Get body
|
||||
let body_bytes = match req.collect().await {
|
||||
Ok(collected) => collected.to_bytes(),
|
||||
Err(_) => Bytes::new(),
|
||||
};
|
||||
|
||||
if !body_bytes.is_empty() {
|
||||
request_builder = request_builder.body(body_bytes.to_vec());
|
||||
}
|
||||
|
||||
// Execute request
|
||||
match request_builder.send().await {
|
||||
Ok(response) => {
|
||||
let status = response.status();
|
||||
let headers = response.headers().clone();
|
||||
let body = response.bytes().await.unwrap_or_default();
|
||||
|
||||
// Record request in traffic tracker
|
||||
let response_size = body.len() as u64;
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
tracker.record_request(&domain, body_bytes.len() as u64, response_size);
|
||||
}
|
||||
|
||||
let mut hyper_response = Response::new(Full::new(body));
|
||||
*hyper_response.status_mut() = StatusCode::from_u16(status.as_u16()).unwrap();
|
||||
|
||||
// Copy response headers
|
||||
for (name, value) in headers.iter() {
|
||||
if let Ok(val) = value.to_str() {
|
||||
hyper_response
|
||||
.headers_mut()
|
||||
.insert(name, val.parse().unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(hyper_response)
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Request failed: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!("Request failed: {}", e))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_reqwest_client_with_proxy(
|
||||
upstream_url: &str,
|
||||
) -> Result<reqwest::Client, Box<dyn std::error::Error>> {
|
||||
use reqwest::Proxy;
|
||||
|
||||
let client_builder = reqwest::Client::builder();
|
||||
|
||||
// Parse the upstream URL
|
||||
let url = Url::parse(upstream_url)?;
|
||||
let scheme = url.scheme();
|
||||
|
||||
let proxy = match scheme {
|
||||
"http" | "https" => {
|
||||
// For HTTP/HTTPS proxies, reqwest handles them directly
|
||||
Proxy::http(upstream_url)?
|
||||
}
|
||||
"socks5" => {
|
||||
// For SOCKS5, reqwest supports it directly
|
||||
Proxy::all(upstream_url)?
|
||||
}
|
||||
"socks4" => {
|
||||
// SOCKS4 is not directly supported by reqwest, would need custom handling
|
||||
return Err("SOCKS4 not supported for HTTP requests via reqwest".into());
|
||||
}
|
||||
_ => {
|
||||
return Err(format!("Unsupported proxy scheme: {}", scheme).into());
|
||||
}
|
||||
};
|
||||
|
||||
Ok(client_builder.proxy(proxy).build()?)
|
||||
}
|
||||
|
||||
pub async fn run_proxy_server(config: ProxyConfig) -> Result<(), Box<dyn std::error::Error>> {
|
||||
log::error!(
|
||||
"Proxy worker starting, looking for config id: {}",
|
||||
config.id
|
||||
);
|
||||
|
||||
// Load the config from disk to get the latest state
|
||||
let config = match crate::proxy_storage::get_proxy_config(&config.id) {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
log::error!("Config not found for id: {}", config.id);
|
||||
return Err("Config not found".into());
|
||||
}
|
||||
};
|
||||
|
||||
log::error!(
|
||||
"Found config: id={}, port={:?}, upstream={}, profile_id={:?}",
|
||||
config.id,
|
||||
config.local_port,
|
||||
config.upstream_url,
|
||||
config.profile_id
|
||||
);
|
||||
|
||||
log::error!("Starting proxy server for config id: {}", config.id);
|
||||
|
||||
// Initialize traffic tracker with profile ID if available
|
||||
// This can now be called multiple times to update the tracker
|
||||
init_traffic_tracker(config.id.clone(), config.profile_id.clone());
|
||||
log::error!(
|
||||
"Traffic tracker initialized for proxy: {} (profile_id: {:?})",
|
||||
config.id,
|
||||
config.profile_id
|
||||
);
|
||||
|
||||
// Verify tracker was initialized correctly
|
||||
if let Some(tracker) = crate::traffic_stats::get_traffic_tracker() {
|
||||
log::error!(
|
||||
"Tracker verified: proxy_id={}, profile_id={:?}",
|
||||
tracker.proxy_id,
|
||||
tracker.profile_id
|
||||
);
|
||||
} else {
|
||||
log::error!("WARNING: Tracker was not initialized!");
|
||||
}
|
||||
|
||||
// Determine the bind address
|
||||
let bind_addr = SocketAddr::from(([127, 0, 0, 1], config.local_port.unwrap_or(0)));
|
||||
|
||||
log::error!("Attempting to bind proxy server to {}", bind_addr);
|
||||
|
||||
// Bind to the port
|
||||
let listener = TcpListener::bind(bind_addr).await?;
|
||||
let actual_port = listener.local_addr()?.port();
|
||||
|
||||
log::error!("Successfully bound to port {}", actual_port);
|
||||
|
||||
// Update config with actual port and local_url
|
||||
let mut updated_config = config.clone();
|
||||
updated_config.local_port = Some(actual_port);
|
||||
updated_config.local_url = Some(format!("http://127.0.0.1:{}", actual_port));
|
||||
|
||||
// Save the updated config
|
||||
log::error!(
|
||||
"Saving updated config with local_url={:?}",
|
||||
updated_config.local_url
|
||||
);
|
||||
if !crate::proxy_storage::update_proxy_config(&updated_config) {
|
||||
log::error!("Failed to update proxy config");
|
||||
return Err("Failed to update proxy config".into());
|
||||
}
|
||||
|
||||
let upstream_url = if updated_config.upstream_url == "DIRECT" {
|
||||
None
|
||||
} else {
|
||||
Some(updated_config.upstream_url.clone())
|
||||
};
|
||||
|
||||
log::error!("Proxy server bound to 127.0.0.1:{}", actual_port);
|
||||
log::error!(
|
||||
"Proxy server listening on 127.0.0.1:{} (ready to accept connections)",
|
||||
actual_port
|
||||
);
|
||||
log::error!("Proxy server entering accept loop - process should stay alive");
|
||||
|
||||
// Start a background task to periodically flush traffic stats to disk
|
||||
tokio::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(1));
|
||||
loop {
|
||||
interval.tick().await;
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
if let Err(e) = tracker.flush_to_disk() {
|
||||
log::error!("Failed to flush traffic stats: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Keep the runtime alive with an infinite loop
|
||||
// This ensures the process doesn't exit even if there are no active connections
|
||||
loop {
|
||||
match listener.accept().await {
|
||||
Ok((mut stream, peer_addr)) => {
|
||||
// Enable TCP_NODELAY to ensure small packets are sent immediately
|
||||
// This is critical for CONNECT responses to be sent before tunneling begins
|
||||
let _ = stream.set_nodelay(true);
|
||||
log::error!("DEBUG: Accepted connection from {:?}", peer_addr);
|
||||
|
||||
let upstream = upstream_url.clone();
|
||||
|
||||
tokio::task::spawn(async move {
|
||||
// Read first bytes to detect CONNECT requests
|
||||
// CONNECT requests need special handling for tunneling
|
||||
let mut peek_buffer = [0u8; 8];
|
||||
match stream.read(&mut peek_buffer).await {
|
||||
Ok(0) => {
|
||||
log::error!("DEBUG: Connection closed immediately (0 bytes read)");
|
||||
}
|
||||
Ok(n) => {
|
||||
let request_start = String::from_utf8_lossy(&peek_buffer[..n.min(7)]);
|
||||
log::error!("DEBUG: Read {} bytes, starts with: {:?}", n, request_start);
|
||||
if n >= 7 && request_start.starts_with("CONNECT") {
|
||||
// Handle CONNECT request manually for tunneling
|
||||
let mut full_request = Vec::with_capacity(4096);
|
||||
full_request.extend_from_slice(&peek_buffer[..n]);
|
||||
|
||||
// Read the rest of the CONNECT request
|
||||
let mut remaining = [0u8; 4096];
|
||||
loop {
|
||||
match stream.read(&mut remaining).await {
|
||||
Ok(0) => break,
|
||||
Ok(m) => {
|
||||
full_request.extend_from_slice(&remaining[..m]);
|
||||
if full_request.ends_with(b"\r\n\r\n") || full_request.ends_with(b"\n\n") {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
|
||||
// Handle CONNECT manually
|
||||
log::error!(
|
||||
"DEBUG: Handling CONNECT manually for: {}",
|
||||
String::from_utf8_lossy(&full_request[..full_request.len().min(100)])
|
||||
);
|
||||
if let Err(e) = handle_connect_from_buffer(stream, full_request, upstream).await {
|
||||
log::error!("Error handling CONNECT request: {:?}", e);
|
||||
} else {
|
||||
log::error!("DEBUG: CONNECT handled successfully");
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Not CONNECT (or partial read) - reconstruct stream with consumed bytes prepended
|
||||
// This is critical: we MUST prepend any bytes we consumed, even if < 7 bytes
|
||||
log::error!(
|
||||
"DEBUG: Non-CONNECT request, first {} bytes: {:?}",
|
||||
n,
|
||||
String::from_utf8_lossy(&peek_buffer[..n])
|
||||
);
|
||||
let prepended_bytes = peek_buffer[..n].to_vec();
|
||||
let prepended_reader = PrependReader {
|
||||
prepended: prepended_bytes,
|
||||
prepended_pos: 0,
|
||||
inner: stream,
|
||||
};
|
||||
let io = TokioIo::new(prepended_reader);
|
||||
let service = service_fn(move |req| handle_request(req, upstream.clone()));
|
||||
|
||||
if let Err(err) = http1::Builder::new().serve_connection(io, service).await {
|
||||
log::error!("Error serving connection: {:?}", err);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Error reading from connection: {:?}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Error accepting connection: {:?}", e);
|
||||
// Continue accepting connections even if one fails
|
||||
// Add a small delay to avoid busy-waiting on errors
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_connect_from_buffer(
|
||||
mut client_stream: TcpStream,
|
||||
request_buffer: Vec<u8>,
|
||||
upstream_url: Option<String>,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Parse the CONNECT request from the buffer
|
||||
let request_str = String::from_utf8_lossy(&request_buffer);
|
||||
let lines: Vec<&str> = request_str.lines().collect();
|
||||
|
||||
if lines.is_empty() {
|
||||
let _ = client_stream
|
||||
.write_all(b"HTTP/1.1 400 Bad Request\r\n\r\n")
|
||||
.await;
|
||||
return Err("Empty CONNECT request".into());
|
||||
}
|
||||
|
||||
// Parse CONNECT request: "CONNECT host:port HTTP/1.1"
|
||||
let parts: Vec<&str> = lines[0].split_whitespace().collect();
|
||||
if parts.len() < 2 || parts[0] != "CONNECT" {
|
||||
let _ = client_stream
|
||||
.write_all(b"HTTP/1.1 400 Bad Request\r\n\r\n")
|
||||
.await;
|
||||
return Err("Invalid CONNECT request".into());
|
||||
}
|
||||
|
||||
let target = parts[1];
|
||||
let (target_host, target_port) = if let Some(colon_pos) = target.find(':') {
|
||||
let host = &target[..colon_pos];
|
||||
let port: u16 = target[colon_pos + 1..].parse().unwrap_or(443);
|
||||
(host, port)
|
||||
} else {
|
||||
(target, 443)
|
||||
};
|
||||
|
||||
// Record domain access in traffic tracker
|
||||
let domain = target_host.to_string();
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
tracker.record_request(&domain, 0, 0);
|
||||
}
|
||||
|
||||
// Connect to target (directly or via upstream proxy)
|
||||
let target_stream = if upstream_url.is_none()
|
||||
|| upstream_url
|
||||
.as_ref()
|
||||
.map(|s| s == "DIRECT")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
// Direct connection
|
||||
TcpStream::connect((target_host, target_port)).await?
|
||||
} else {
|
||||
// Connect via upstream proxy
|
||||
let upstream = Url::parse(upstream_url.as_ref().unwrap())?;
|
||||
let scheme = upstream.scheme();
|
||||
|
||||
match scheme {
|
||||
"http" | "https" => {
|
||||
// Connect via HTTP proxy CONNECT
|
||||
let proxy_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let proxy_port = upstream.port().unwrap_or(8080);
|
||||
let mut proxy_stream = TcpStream::connect((proxy_host, proxy_port)).await?;
|
||||
|
||||
// Add authentication if provided
|
||||
let mut connect_req = format!(
|
||||
"CONNECT {}:{} HTTP/1.1\r\nHost: {}:{}\r\n",
|
||||
target_host, target_port, target_host, target_port
|
||||
);
|
||||
|
||||
if !upstream.username().is_empty() {
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
let auth = general_purpose::STANDARD.encode(format!("{}:{}", username, password));
|
||||
connect_req.push_str(&format!("Proxy-Authorization: Basic {}\r\n", auth));
|
||||
}
|
||||
|
||||
connect_req.push_str("\r\n");
|
||||
|
||||
// Send CONNECT request to upstream proxy
|
||||
proxy_stream.write_all(connect_req.as_bytes()).await?;
|
||||
|
||||
// Read response
|
||||
let mut buffer = [0u8; 4096];
|
||||
let n = proxy_stream.read(&mut buffer).await?;
|
||||
let response = String::from_utf8_lossy(&buffer[..n]);
|
||||
|
||||
if !response.starts_with("HTTP/1.1 200") && !response.starts_with("HTTP/1.0 200") {
|
||||
return Err(format!("Upstream proxy CONNECT failed: {}", response).into());
|
||||
}
|
||||
|
||||
proxy_stream
|
||||
}
|
||||
"socks4" | "socks5" => {
|
||||
// Connect via SOCKS proxy
|
||||
let socks_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let socks_port = upstream.port().unwrap_or(1080);
|
||||
let socks_addr = format!("{}:{}", socks_host, socks_port);
|
||||
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
|
||||
connect_via_socks(
|
||||
&socks_addr,
|
||||
target_host,
|
||||
target_port,
|
||||
scheme == "socks5",
|
||||
if !username.is_empty() {
|
||||
Some((username, password))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
.await?
|
||||
}
|
||||
_ => {
|
||||
return Err(format!("Unsupported upstream proxy scheme: {}", scheme).into());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Enable TCP_NODELAY on target stream for immediate data transfer
|
||||
let _ = target_stream.set_nodelay(true);
|
||||
|
||||
// Send 200 Connection Established response to client
|
||||
// CRITICAL: Must flush after writing to ensure response is sent before tunneling
|
||||
client_stream
|
||||
.write_all(b"HTTP/1.1 200 Connection Established\r\n\r\n")
|
||||
.await?;
|
||||
client_stream.flush().await?;
|
||||
|
||||
log::error!("DEBUG: Sent 200 Connection Established response, starting tunnel");
|
||||
|
||||
// Now tunnel data bidirectionally with counting
|
||||
// Wrap streams to count bytes transferred
|
||||
let counting_client = CountingStream::new(client_stream);
|
||||
let counting_target = CountingStream::new(target_stream);
|
||||
|
||||
// Get references for final stats
|
||||
let client_read_counter = counting_client.bytes_read.clone();
|
||||
let client_write_counter = counting_client.bytes_written.clone();
|
||||
let target_read_counter = counting_target.bytes_read.clone();
|
||||
let target_write_counter = counting_target.bytes_written.clone();
|
||||
|
||||
// Split streams for bidirectional copying
|
||||
let (mut client_read, mut client_write) = tokio::io::split(counting_client);
|
||||
let (mut target_read, mut target_write) = tokio::io::split(counting_target);
|
||||
|
||||
log::error!("DEBUG: Starting bidirectional tunnel");
|
||||
|
||||
// Spawn two tasks to forward data in both directions
|
||||
let client_to_target = tokio::spawn(async move {
|
||||
let result = tokio::io::copy(&mut client_read, &mut target_write).await;
|
||||
match result {
|
||||
Ok(bytes) => {
|
||||
log::error!("DEBUG: Tunneled {} bytes from client->target", bytes);
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Error forwarding client->target: {:?}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let target_to_client = tokio::spawn(async move {
|
||||
let result = tokio::io::copy(&mut target_read, &mut client_write).await;
|
||||
match result {
|
||||
Ok(bytes) => {
|
||||
log::error!("DEBUG: Tunneled {} bytes from target->client", bytes);
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Error forwarding target->client: {:?}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for either direction to finish (connection closed)
|
||||
tokio::select! {
|
||||
_ = client_to_target => {
|
||||
log::error!("DEBUG: Client->target tunnel closed");
|
||||
}
|
||||
_ = target_to_client => {
|
||||
log::error!("DEBUG: Target->client tunnel closed");
|
||||
}
|
||||
}
|
||||
|
||||
// Log final byte counts and update domain stats
|
||||
let final_sent =
|
||||
client_read_counter.load(Ordering::Relaxed) + target_write_counter.load(Ordering::Relaxed);
|
||||
let final_recv =
|
||||
target_read_counter.load(Ordering::Relaxed) + client_write_counter.load(Ordering::Relaxed);
|
||||
log::error!(
|
||||
"DEBUG: Tunnel closed - sent: {} bytes, received: {} bytes",
|
||||
final_sent,
|
||||
final_recv
|
||||
);
|
||||
|
||||
// Update domain-specific byte counts now that tunnel is complete
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
tracker.update_domain_bytes(&domain, final_sent, final_recv);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -0,0 +1,125 @@
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::proxy_runner::{start_proxy_process, stop_proxy_process};
|
||||
use crate::proxy_storage::{delete_proxy_config, generate_proxy_id, list_proxy_configs};
|
||||
use std::process::Command;
|
||||
use std::time::Duration;
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::time::sleep;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_storage() {
|
||||
// Test proxy config storage
|
||||
let id = generate_proxy_id();
|
||||
let config = crate::proxy_storage::ProxyConfig::new(id.clone(), "DIRECT".to_string(), Some(8080));
|
||||
|
||||
// Save config
|
||||
crate::proxy_storage::save_proxy_config(&config).unwrap();
|
||||
|
||||
// Load config
|
||||
let loaded = crate::proxy_storage::get_proxy_config(&id).unwrap();
|
||||
assert_eq!(loaded.id, id);
|
||||
assert_eq!(loaded.upstream_url, "DIRECT");
|
||||
assert_eq!(loaded.local_port, Some(8080));
|
||||
|
||||
// Delete config
|
||||
assert!(crate::proxy_storage::delete_proxy_config(&id));
|
||||
assert!(crate::proxy_storage::get_proxy_config(&id).is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_id_generation() {
|
||||
let id1 = generate_proxy_id();
|
||||
let id2 = generate_proxy_id();
|
||||
assert_ne!(id1, id2);
|
||||
assert!(id1.starts_with("proxy_"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_process_lifecycle() {
|
||||
// Start a direct proxy
|
||||
let config = start_proxy_process(None, Some(0)).await.unwrap();
|
||||
let id = config.id.clone();
|
||||
|
||||
// Verify config was saved
|
||||
let loaded = crate::proxy_storage::get_proxy_config(&id).unwrap();
|
||||
assert_eq!(loaded.id, id);
|
||||
|
||||
// Wait a bit for the proxy to start
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
|
||||
// Stop the proxy
|
||||
let stopped = stop_proxy_process(&id).await.unwrap();
|
||||
assert!(stopped);
|
||||
|
||||
// Verify config was deleted
|
||||
assert!(crate::proxy_storage::get_proxy_config(&id).is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_with_upstream_http() {
|
||||
// Start a proxy with HTTP upstream (using a non-existent proxy for testing)
|
||||
let upstream_url = "http://127.0.0.1:9999";
|
||||
let config = start_proxy_process(Some(upstream_url.to_string()), Some(0))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let id = config.id.clone();
|
||||
|
||||
// Wait a bit
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
|
||||
// Clean up
|
||||
let _ = stop_proxy_process(&id).await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_with_upstream_socks5() {
|
||||
// Start a proxy with SOCKS5 upstream
|
||||
let upstream_url = "socks5://127.0.0.1:1080";
|
||||
let config = start_proxy_process(Some(upstream_url.to_string()), Some(0))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let id = config.id.clone();
|
||||
|
||||
// Wait a bit
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
|
||||
// Clean up
|
||||
let _ = stop_proxy_process(&id).await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_port_assignment() {
|
||||
// Start multiple proxies and verify they get different ports
|
||||
let config1 = start_proxy_process(None, None).await.unwrap();
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
let config2 = start_proxy_process(None, None).await.unwrap();
|
||||
|
||||
// They should have different IDs
|
||||
assert_ne!(config1.id, config2.id);
|
||||
|
||||
// Clean up
|
||||
let _ = stop_proxy_process(&config1.id).await;
|
||||
let _ = stop_proxy_process(&config2.id).await;
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_list() {
|
||||
// Start a few proxies
|
||||
let config1 = start_proxy_process(None, None).await.unwrap();
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
let config2 = start_proxy_process(None, None).await.unwrap();
|
||||
|
||||
// List all proxies
|
||||
let configs = list_proxy_configs();
|
||||
assert!(configs.len() >= 2);
|
||||
|
||||
// Clean up
|
||||
let _ = stop_proxy_process(&config1.id).await;
|
||||
let _ = stop_proxy_process(&config2.id).await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,138 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProxyConfig {
|
||||
pub id: String,
|
||||
pub upstream_url: String, // Can be "DIRECT" for direct proxy
|
||||
pub local_port: Option<u16>,
|
||||
pub ignore_proxy_certificate: Option<bool>,
|
||||
pub local_url: Option<String>,
|
||||
pub pid: Option<u32>,
|
||||
#[serde(default)]
|
||||
pub profile_id: Option<String>,
|
||||
}
|
||||
|
||||
impl ProxyConfig {
|
||||
pub fn new(id: String, upstream_url: String, local_port: Option<u16>) -> Self {
|
||||
Self {
|
||||
id,
|
||||
upstream_url,
|
||||
local_port,
|
||||
ignore_proxy_certificate: None,
|
||||
local_url: None,
|
||||
pid: None,
|
||||
profile_id: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_profile_id(mut self, profile_id: Option<String>) -> Self {
|
||||
self.profile_id = profile_id;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_storage_dir() -> PathBuf {
|
||||
let base_dirs = BaseDirs::new().expect("Failed to get base directories");
|
||||
let mut path = base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("proxies");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn save_proxy_config(config: &ProxyConfig) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let storage_dir = get_storage_dir();
|
||||
fs::create_dir_all(&storage_dir)?;
|
||||
|
||||
let file_path = storage_dir.join(format!("{}.json", config.id));
|
||||
let content = serde_json::to_string_pretty(config)?;
|
||||
fs::write(&file_path, content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_proxy_config(id: &str) -> Option<ProxyConfig> {
|
||||
let storage_dir = get_storage_dir();
|
||||
let file_path = storage_dir.join(format!("{}.json", id));
|
||||
|
||||
if !file_path.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match fs::read_to_string(&file_path) {
|
||||
Ok(content) => serde_json::from_str(&content).ok(),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn delete_proxy_config(id: &str) -> bool {
|
||||
let storage_dir = get_storage_dir();
|
||||
let file_path = storage_dir.join(format!("{}.json", id));
|
||||
|
||||
if !file_path.exists() {
|
||||
return false;
|
||||
}
|
||||
|
||||
fs::remove_file(&file_path).is_ok()
|
||||
}
|
||||
|
||||
pub fn list_proxy_configs() -> Vec<ProxyConfig> {
|
||||
let storage_dir = get_storage_dir();
|
||||
|
||||
if !storage_dir.exists() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut configs = Vec::new();
|
||||
if let Ok(entries) = fs::read_dir(&storage_dir) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.extension().is_some_and(|ext| ext == "json") {
|
||||
if let Ok(content) = fs::read_to_string(&path) {
|
||||
if let Ok(config) = serde_json::from_str::<ProxyConfig>(&content) {
|
||||
configs.push(config);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configs
|
||||
}
|
||||
|
||||
pub fn update_proxy_config(config: &ProxyConfig) -> bool {
|
||||
let storage_dir = get_storage_dir();
|
||||
let file_path = storage_dir.join(format!("{}.json", config.id));
|
||||
|
||||
if !file_path.exists() {
|
||||
return false;
|
||||
}
|
||||
|
||||
match serde_json::to_string_pretty(config) {
|
||||
Ok(content) => fs::write(&file_path, content).is_ok(),
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate_proxy_id() -> String {
|
||||
format!(
|
||||
"proxy_{}_{}",
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs(),
|
||||
rand::random::<u32>()
|
||||
)
|
||||
}
|
||||
|
||||
pub fn is_process_running(pid: u32) -> bool {
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
system.process(Pid::from(pid as usize)).is_some()
|
||||
}
|
||||
@@ -3,8 +3,11 @@ use serde::{Deserialize, Serialize};
|
||||
use std::fs::{self, create_dir_all};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::version_updater;
|
||||
use aes_gcm::{
|
||||
aead::{Aead, AeadCore, KeyInit, OsRng},
|
||||
Aes256Gcm, Key, Nonce,
|
||||
};
|
||||
use argon2::{password_hash::SaltString, Argon2, PasswordHasher};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct TableSortingSettings {
|
||||
@@ -25,25 +28,35 @@ impl Default for TableSortingSettings {
|
||||
pub struct AppSettings {
|
||||
#[serde(default)]
|
||||
pub set_as_default_browser: bool,
|
||||
#[serde(default)]
|
||||
pub show_settings_on_startup: bool,
|
||||
#[serde(default = "default_theme")]
|
||||
pub theme: String, // "light", "dark", or "system"
|
||||
#[serde(default)]
|
||||
pub auto_delete_unused_binaries: bool,
|
||||
pub custom_theme: Option<std::collections::HashMap<String, String>>, // CSS var name -> value (e.g., "--background": "#1a1b26")
|
||||
#[serde(default)]
|
||||
pub api_enabled: bool,
|
||||
#[serde(default = "default_api_port")]
|
||||
pub api_port: u16,
|
||||
#[serde(default)]
|
||||
pub api_token: Option<String>, // Displayed token for user to copy
|
||||
}
|
||||
|
||||
fn default_theme() -> String {
|
||||
"system".to_string()
|
||||
}
|
||||
|
||||
fn default_api_port() -> u16 {
|
||||
10108
|
||||
}
|
||||
|
||||
impl Default for AppSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
set_as_default_browser: false,
|
||||
show_settings_on_startup: true,
|
||||
theme: "system".to_string(),
|
||||
auto_delete_unused_binaries: true,
|
||||
custom_theme: None,
|
||||
api_enabled: false,
|
||||
api_port: 10108,
|
||||
api_token: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -53,12 +66,16 @@ pub struct SettingsManager {
|
||||
}
|
||||
|
||||
impl SettingsManager {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static SettingsManager {
|
||||
&SETTINGS_MANAGER
|
||||
}
|
||||
|
||||
pub fn get_settings_dir(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
@@ -93,17 +110,17 @@ impl SettingsManager {
|
||||
Ok(settings) => {
|
||||
// Save the settings back to ensure any missing fields are written with defaults
|
||||
if let Err(e) = self.save_settings(&settings) {
|
||||
eprintln!("Warning: Failed to update settings file with defaults: {e}");
|
||||
log::warn!("Warning: Failed to update settings file with defaults: {e}");
|
||||
}
|
||||
Ok(settings)
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Warning: Failed to parse settings file, using defaults: {e}");
|
||||
log::warn!("Warning: Failed to parse settings file, using defaults: {e}");
|
||||
let default_settings = AppSettings::default();
|
||||
|
||||
// Try to save default settings to fix the corrupted file
|
||||
if let Err(save_error) = self.save_settings(&default_settings) {
|
||||
eprintln!("Warning: Failed to save default settings: {save_error}");
|
||||
log::warn!("Warning: Failed to save default settings: {save_error}");
|
||||
}
|
||||
|
||||
Ok(default_settings)
|
||||
@@ -150,35 +167,265 @@ impl SettingsManager {
|
||||
}
|
||||
|
||||
pub fn should_show_settings_on_startup(&self) -> Result<bool, Box<dyn std::error::Error>> {
|
||||
let settings = self.load_settings()?;
|
||||
// Always return false - we don't show settings on startup anymore
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
// Show prompt if:
|
||||
// 1. User wants to see the prompt
|
||||
// 2. Donut Browser is not set as default
|
||||
// 3. User hasn't explicitly disabled the default browser setting
|
||||
Ok(settings.show_settings_on_startup && !settings.set_as_default_browser)
|
||||
fn get_vault_password() -> String {
|
||||
env!("DONUT_BROWSER_VAULT_PASSWORD").to_string()
|
||||
}
|
||||
|
||||
pub async fn generate_api_token(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Generate a secure random token (base64 encoded for URL safety)
|
||||
let token_bytes: [u8; 32] = {
|
||||
use rand::RngCore;
|
||||
let mut rng = rand::rng();
|
||||
let mut bytes = [0u8; 32];
|
||||
rng.fill_bytes(&mut bytes);
|
||||
bytes
|
||||
};
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
let token = general_purpose::URL_SAFE_NO_PAD.encode(token_bytes);
|
||||
|
||||
// Store token securely
|
||||
self.store_api_token(app_handle, &token).await?;
|
||||
|
||||
Ok(token)
|
||||
}
|
||||
|
||||
pub async fn store_api_token(
|
||||
&self,
|
||||
_app_handle: &tauri::AppHandle,
|
||||
token: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Store token in an encrypted file using Argon2 + AES-GCM
|
||||
let token_file = self.get_settings_dir().join("api_token.dat");
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
if let Some(parent) = token_file.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let vault_password = Self::get_vault_password();
|
||||
|
||||
// Generate a random salt for Argon2
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
|
||||
// Use Argon2 to derive a 32-byte key from the vault password
|
||||
let argon2 = Argon2::default();
|
||||
let password_hash = argon2
|
||||
.hash_password(vault_password.as_bytes(), &salt)
|
||||
.map_err(|e| format!("Argon2 key derivation failed: {e}"))?;
|
||||
let hash_value = password_hash.hash.unwrap();
|
||||
let hash_bytes = hash_value.as_bytes();
|
||||
|
||||
// Take first 32 bytes for AES-256 key
|
||||
let key_bytes: [u8; 32] = hash_bytes[..32]
|
||||
.try_into()
|
||||
.map_err(|_| "Invalid key length")?;
|
||||
let key = Key::<Aes256Gcm>::from(key_bytes);
|
||||
let cipher = Aes256Gcm::new(&key);
|
||||
|
||||
// Generate a random nonce
|
||||
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
|
||||
|
||||
// Encrypt the token
|
||||
let ciphertext = cipher
|
||||
.encrypt(&nonce, token.as_bytes())
|
||||
.map_err(|e| format!("Encryption failed: {e}"))?;
|
||||
|
||||
// Create file data with header, salt, nonce, and encrypted data
|
||||
let mut file_data = Vec::new();
|
||||
file_data.extend_from_slice(b"DBAPI"); // 5-byte header
|
||||
file_data.push(2u8); // Version 2 (Argon2 + AES-GCM)
|
||||
|
||||
// Store salt length and salt
|
||||
let salt_str = salt.as_str();
|
||||
file_data.push(salt_str.len() as u8);
|
||||
file_data.extend_from_slice(salt_str.as_bytes());
|
||||
|
||||
// Store nonce (12 bytes for AES-GCM)
|
||||
file_data.extend_from_slice(&nonce);
|
||||
|
||||
// Store ciphertext length and ciphertext
|
||||
file_data.extend_from_slice(&(ciphertext.len() as u32).to_le_bytes());
|
||||
file_data.extend_from_slice(&ciphertext);
|
||||
|
||||
std::fs::write(token_file, file_data)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_api_token(
|
||||
&self,
|
||||
_app_handle: &tauri::AppHandle,
|
||||
) -> Result<Option<String>, Box<dyn std::error::Error>> {
|
||||
let token_file = self.get_settings_dir().join("api_token.dat");
|
||||
|
||||
if !token_file.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let file_data = std::fs::read(token_file)?;
|
||||
|
||||
// Validate header
|
||||
if file_data.len() < 6 || &file_data[0..5] != b"DBAPI" {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let version = file_data[5];
|
||||
|
||||
// Only support Argon2 + AES-GCM (version 2)
|
||||
if version != 2 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Argon2 + AES-GCM decryption
|
||||
let mut offset = 6;
|
||||
|
||||
// Read salt
|
||||
if offset >= file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let salt_len = file_data[offset] as usize;
|
||||
offset += 1;
|
||||
|
||||
if offset + salt_len > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let salt_bytes = &file_data[offset..offset + salt_len];
|
||||
let salt_str = std::str::from_utf8(salt_bytes).map_err(|_| "Invalid salt encoding")?;
|
||||
let salt = SaltString::from_b64(salt_str).map_err(|_| "Invalid salt format")?;
|
||||
offset += salt_len;
|
||||
|
||||
// Read nonce (12 bytes)
|
||||
if offset + 12 > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let nonce_bytes: [u8; 12] = file_data[offset..offset + 12]
|
||||
.try_into()
|
||||
.map_err(|_| "Invalid nonce length")?;
|
||||
let nonce = Nonce::from(nonce_bytes);
|
||||
offset += 12;
|
||||
|
||||
// Read ciphertext
|
||||
if offset + 4 > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let ciphertext_len = u32::from_le_bytes([
|
||||
file_data[offset],
|
||||
file_data[offset + 1],
|
||||
file_data[offset + 2],
|
||||
file_data[offset + 3],
|
||||
]) as usize;
|
||||
offset += 4;
|
||||
|
||||
if offset + ciphertext_len > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let ciphertext = &file_data[offset..offset + ciphertext_len];
|
||||
|
||||
// Derive key using Argon2
|
||||
let vault_password = Self::get_vault_password();
|
||||
let argon2 = Argon2::default();
|
||||
let password_hash = argon2
|
||||
.hash_password(vault_password.as_bytes(), &salt)
|
||||
.map_err(|e| format!("Argon2 key derivation failed: {e}"))?;
|
||||
let hash_value = password_hash.hash.unwrap();
|
||||
let hash_bytes = hash_value.as_bytes();
|
||||
|
||||
let key_bytes: [u8; 32] = hash_bytes[..32]
|
||||
.try_into()
|
||||
.map_err(|_| "Invalid key length")?;
|
||||
let key = Key::<Aes256Gcm>::from(key_bytes);
|
||||
let cipher = Aes256Gcm::new(&key);
|
||||
|
||||
// Decrypt the token
|
||||
let plaintext = cipher
|
||||
.decrypt(&nonce, ciphertext)
|
||||
.map_err(|_| "Decryption failed")?;
|
||||
|
||||
match String::from_utf8(plaintext) {
|
||||
Ok(token) => Ok(Some(token)),
|
||||
Err(_) => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn remove_api_token(
|
||||
&self,
|
||||
_app_handle: &tauri::AppHandle,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let token_file = self.get_settings_dir().join("api_token.dat");
|
||||
|
||||
if token_file.exists() {
|
||||
std::fs::remove_file(token_file)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_settings() -> Result<AppSettings, String> {
|
||||
let manager = SettingsManager::new();
|
||||
manager
|
||||
pub async fn get_app_settings(app_handle: tauri::AppHandle) -> Result<AppSettings, String> {
|
||||
let manager = SettingsManager::instance();
|
||||
let mut settings = manager
|
||||
.load_settings()
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))?;
|
||||
|
||||
// Always load token for display purposes if it exists
|
||||
settings.api_token = manager
|
||||
.get_api_token(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to load API token: {e}"))?;
|
||||
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_app_settings(settings: AppSettings) -> Result<(), String> {
|
||||
let manager = SettingsManager::new();
|
||||
pub async fn save_app_settings(
|
||||
app_handle: tauri::AppHandle,
|
||||
mut settings: AppSettings,
|
||||
) -> Result<AppSettings, String> {
|
||||
let manager = SettingsManager::instance();
|
||||
|
||||
if settings.api_enabled {
|
||||
if let Some(ref token) = settings.api_token {
|
||||
manager
|
||||
.store_api_token(&app_handle, token)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to store API token: {e}"))?;
|
||||
} else {
|
||||
let token = manager
|
||||
.generate_api_token(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to generate API token: {e}"))?;
|
||||
settings.api_token = Some(token);
|
||||
}
|
||||
}
|
||||
|
||||
// If API is being disabled, remove the token
|
||||
if !settings.api_enabled {
|
||||
manager
|
||||
.remove_api_token(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to remove API token: {e}"))?;
|
||||
settings.api_token = None;
|
||||
}
|
||||
|
||||
let mut persist_settings = settings.clone();
|
||||
persist_settings.api_token = None;
|
||||
manager
|
||||
.save_settings(&settings)
|
||||
.map_err(|e| format!("Failed to save settings: {e}"))
|
||||
.save_settings(&persist_settings)
|
||||
.map_err(|e| format!("Failed to save settings: {e}"))?;
|
||||
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn should_show_settings_on_startup() -> Result<bool, String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.should_show_settings_on_startup()
|
||||
.map_err(|e| format!("Failed to check prompt setting: {e}"))
|
||||
@@ -186,7 +433,7 @@ pub async fn should_show_settings_on_startup() -> Result<bool, String> {
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_table_sorting_settings() -> Result<TableSortingSettings, String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.load_table_sorting()
|
||||
.map_err(|e| format!("Failed to load table sorting settings: {e}"))
|
||||
@@ -194,30 +441,227 @@ pub async fn get_table_sorting_settings() -> Result<TableSortingSettings, String
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_table_sorting_settings(sorting: TableSortingSettings) -> Result<(), String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.save_table_sorting(&sorting)
|
||||
.map_err(|e| format!("Failed to save table sorting settings: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn clear_all_version_cache_and_refetch(
|
||||
app_handle: tauri::AppHandle,
|
||||
) -> Result<(), String> {
|
||||
let api_client = ApiClient::new();
|
||||
|
||||
// Clear all cache first
|
||||
api_client
|
||||
.clear_all_cache()
|
||||
.map_err(|e| format!("Failed to clear version cache: {e}"))?;
|
||||
|
||||
let updater = version_updater::get_version_updater();
|
||||
let updater_guard = updater.lock().await;
|
||||
|
||||
updater_guard
|
||||
.trigger_manual_update(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to trigger version update: {e}"))?;
|
||||
|
||||
Ok(())
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref SETTINGS_MANAGER: SettingsManager = SettingsManager::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn create_test_settings_manager() -> (SettingsManager, TempDir) {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Set up a temporary home directory for testing
|
||||
env::set_var("HOME", temp_dir.path());
|
||||
|
||||
let manager = SettingsManager::new();
|
||||
(manager, temp_dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_settings_manager_creation() {
|
||||
let (_manager, _temp_dir) = create_test_settings_manager();
|
||||
// Test passes if no panic occurs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_app_settings() {
|
||||
let default_settings = AppSettings::default();
|
||||
|
||||
assert!(
|
||||
!default_settings.set_as_default_browser,
|
||||
"Default should not set as default browser"
|
||||
);
|
||||
assert_eq!(
|
||||
default_settings.theme, "system",
|
||||
"Default theme should be system"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_table_sorting_settings() {
|
||||
let default_sorting = TableSortingSettings::default();
|
||||
|
||||
assert_eq!(
|
||||
default_sorting.column, "name",
|
||||
"Default sort column should be name"
|
||||
);
|
||||
assert_eq!(
|
||||
default_sorting.direction, "asc",
|
||||
"Default sort direction should be asc"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_settings_nonexistent_file() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let result = manager.load_settings();
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent settings file gracefully"
|
||||
);
|
||||
|
||||
let settings = result.unwrap();
|
||||
assert!(
|
||||
!settings.set_as_default_browser,
|
||||
"Should return default settings"
|
||||
);
|
||||
assert_eq!(settings.theme, "system", "Should return default theme");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_and_load_settings() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let test_settings = AppSettings {
|
||||
set_as_default_browser: true,
|
||||
theme: "dark".to_string(),
|
||||
custom_theme: None,
|
||||
api_enabled: false,
|
||||
api_port: 10108,
|
||||
api_token: None,
|
||||
};
|
||||
|
||||
// Save settings
|
||||
let save_result = manager.save_settings(&test_settings);
|
||||
assert!(save_result.is_ok(), "Should save settings successfully");
|
||||
|
||||
// Load settings back
|
||||
let load_result = manager.load_settings();
|
||||
assert!(load_result.is_ok(), "Should load settings successfully");
|
||||
|
||||
let loaded_settings = load_result.unwrap();
|
||||
assert!(
|
||||
loaded_settings.set_as_default_browser,
|
||||
"Loaded settings should match saved"
|
||||
);
|
||||
assert_eq!(
|
||||
loaded_settings.theme, "dark",
|
||||
"Loaded theme should match saved"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_table_sorting_nonexistent_file() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let result = manager.load_table_sorting();
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent sorting file gracefully"
|
||||
);
|
||||
|
||||
let sorting = result.unwrap();
|
||||
assert_eq!(sorting.column, "name", "Should return default sorting");
|
||||
assert_eq!(sorting.direction, "asc", "Should return default direction");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_and_load_table_sorting() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let test_sorting = TableSortingSettings {
|
||||
column: "browser".to_string(),
|
||||
direction: "desc".to_string(),
|
||||
};
|
||||
|
||||
// Save sorting
|
||||
let save_result = manager.save_table_sorting(&test_sorting);
|
||||
assert!(save_result.is_ok(), "Should save sorting successfully");
|
||||
|
||||
// Load sorting back
|
||||
let load_result = manager.load_table_sorting();
|
||||
assert!(load_result.is_ok(), "Should load sorting successfully");
|
||||
|
||||
let loaded_sorting = load_result.unwrap();
|
||||
assert_eq!(
|
||||
loaded_sorting.column, "browser",
|
||||
"Loaded column should match saved"
|
||||
);
|
||||
assert_eq!(
|
||||
loaded_sorting.direction, "desc",
|
||||
"Loaded direction should match saved"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_show_settings_on_startup() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let result = manager.should_show_settings_on_startup();
|
||||
assert!(result.is_ok(), "Should not fail");
|
||||
|
||||
let should_show = result.unwrap();
|
||||
assert!(
|
||||
!should_show,
|
||||
"Should always return false as per implementation"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_corrupted_settings_file() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
// Create settings directory
|
||||
let settings_dir = manager.get_settings_dir();
|
||||
fs::create_dir_all(&settings_dir).expect("Should create settings directory");
|
||||
|
||||
// Write corrupted JSON
|
||||
let settings_file = manager.get_settings_file();
|
||||
fs::write(&settings_file, "{ invalid json }").expect("Should write corrupted file");
|
||||
|
||||
// Should handle corrupted file gracefully
|
||||
let result = manager.load_settings();
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle corrupted settings file gracefully"
|
||||
);
|
||||
|
||||
let settings = result.unwrap();
|
||||
assert!(
|
||||
!settings.set_as_default_browser,
|
||||
"Should return default settings for corrupted file"
|
||||
);
|
||||
assert_eq!(
|
||||
settings.theme, "system",
|
||||
"Should return default theme for corrupted file"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_settings_file_paths() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let settings_dir = manager.get_settings_dir();
|
||||
let settings_file = manager.get_settings_file();
|
||||
let sorting_file = manager.get_table_sorting_file();
|
||||
|
||||
assert!(
|
||||
settings_dir.to_string_lossy().contains("settings"),
|
||||
"Settings dir should contain 'settings'"
|
||||
);
|
||||
assert!(
|
||||
settings_file
|
||||
.to_string_lossy()
|
||||
.ends_with("app_settings.json"),
|
||||
"Settings file should end with app_settings.json"
|
||||
);
|
||||
assert!(
|
||||
sorting_file
|
||||
.to_string_lossy()
|
||||
.ends_with("table_sorting.json"),
|
||||
"Sorting file should end with table_sorting.json"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
use crate::profile::BrowserProfile;
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
|
||||
struct TagsData {
|
||||
tags: Vec<String>,
|
||||
}
|
||||
|
||||
pub struct TagManager {
|
||||
base_dirs: BaseDirs,
|
||||
data_dir_override: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl TagManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
data_dir_override: std::env::var("DONUTBROWSER_DATA_DIR")
|
||||
.ok()
|
||||
.map(PathBuf::from),
|
||||
}
|
||||
}
|
||||
|
||||
// Helper for tests to override data directory without global env var
|
||||
#[allow(dead_code)]
|
||||
pub fn with_data_dir_override(dir: &Path) -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
data_dir_override: Some(dir.to_path_buf()),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_tags_file_path(&self) -> PathBuf {
|
||||
if let Some(dir) = &self.data_dir_override {
|
||||
let mut override_path = dir.clone();
|
||||
let _ = fs::create_dir_all(&override_path);
|
||||
override_path.push("tags.json");
|
||||
return override_path;
|
||||
}
|
||||
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("data");
|
||||
path.push("tags.json");
|
||||
path
|
||||
}
|
||||
|
||||
fn load_tags_data(&self) -> Result<TagsData, Box<dyn std::error::Error>> {
|
||||
let file_path = self.get_tags_file_path();
|
||||
if !file_path.exists() {
|
||||
return Ok(TagsData::default());
|
||||
}
|
||||
let content = fs::read_to_string(file_path)?;
|
||||
let data: TagsData = serde_json::from_str(&content)?;
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
fn save_tags_data(&self, data: &TagsData) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let file_path = self.get_tags_file_path();
|
||||
if let Some(parent) = file_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
let json = serde_json::to_string_pretty(data)?;
|
||||
fs::write(file_path, json)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_all_tags(&self) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
let mut all = self.load_tags_data()?.tags;
|
||||
// Ensure deterministic order
|
||||
all.sort();
|
||||
all.dedup();
|
||||
Ok(all)
|
||||
}
|
||||
|
||||
pub fn rebuild_from_profiles(
|
||||
&self,
|
||||
profiles: &[BrowserProfile],
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
// Build a set of all tags currently used by any profile
|
||||
let mut set: BTreeSet<String> = BTreeSet::new();
|
||||
for profile in profiles {
|
||||
for tag in &profile.tags {
|
||||
// Store exactly as provided (no normalization) to preserve characters
|
||||
set.insert(tag.clone());
|
||||
}
|
||||
}
|
||||
let combined: Vec<String> = set.into_iter().collect();
|
||||
self.save_tags_data(&TagsData {
|
||||
tags: combined.clone(),
|
||||
})?;
|
||||
Ok(combined)
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_all_tags() -> Result<Vec<String>, String> {
|
||||
let tag_manager = crate::tag_manager::TAG_MANAGER.lock().unwrap();
|
||||
tag_manager
|
||||
.get_all_tags()
|
||||
.map_err(|e| format!("Failed to get tags: {e}"))
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref TAG_MANAGER: std::sync::Mutex<TagManager> = std::sync::Mutex::new(TagManager::new());
|
||||
}
|
||||
@@ -1,539 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process::Command;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct SystemTheme {
|
||||
pub theme: String, // "light", "dark", or "unknown"
|
||||
}
|
||||
|
||||
pub struct ThemeDetector;
|
||||
|
||||
impl ThemeDetector {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
/// Detect the system theme preference
|
||||
pub fn detect_system_theme(&self) -> SystemTheme {
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::detect_system_theme();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::detect_system_theme();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::detect_system_theme();
|
||||
|
||||
#[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))]
|
||||
return SystemTheme {
|
||||
theme: "unknown".to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
mod linux {
|
||||
use super::*;
|
||||
|
||||
pub fn detect_system_theme() -> SystemTheme {
|
||||
// Try multiple methods in order of preference
|
||||
|
||||
// 1. Try GNOME/GTK settings via gsettings
|
||||
if let Ok(theme) = detect_gnome_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 2. Try KDE Plasma settings via kreadconfig5/kreadconfig6
|
||||
if let Ok(theme) = detect_kde_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 3. Try XFCE settings via xfconf-query
|
||||
if let Ok(theme) = detect_xfce_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 4. Try looking at current GTK theme name
|
||||
if let Ok(theme) = detect_gtk_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 5. Try dconf directly (fallback for GNOME-based systems)
|
||||
if let Ok(theme) = detect_dconf_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 6. Try environment variables
|
||||
if let Ok(theme) = detect_env_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 7. Try freedesktop portal
|
||||
if let Ok(theme) = detect_portal_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 8. Try looking at system color scheme files
|
||||
if let Ok(theme) = detect_system_files_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// Fallback to unknown
|
||||
SystemTheme {
|
||||
theme: "unknown".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn detect_gnome_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Check if gsettings is available
|
||||
if !is_command_available("gsettings") {
|
||||
return Err("gsettings not available".into());
|
||||
}
|
||||
|
||||
// Try GNOME color scheme first (modern way)
|
||||
if let Ok(output) = Command::new("gsettings")
|
||||
.args(["get", "org.gnome.desktop.interface", "color-scheme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let scheme = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
match scheme.as_str() {
|
||||
"'prefer-dark'" => return Ok("dark".to_string()),
|
||||
"'prefer-light'" => return Ok("light".to_string()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to GTK theme name detection
|
||||
if let Ok(output) = Command::new("gsettings")
|
||||
.args(["get", "org.gnome.desktop.interface", "gtk-theme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme_name = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.trim_matches('\'')
|
||||
.to_lowercase();
|
||||
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect GNOME theme".into())
|
||||
}
|
||||
|
||||
fn detect_kde_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Try KDE Plasma 6 first
|
||||
if is_command_available("kreadconfig6") {
|
||||
if let Ok(output) = Command::new("kreadconfig6")
|
||||
.args([
|
||||
"--file",
|
||||
"kdeglobals",
|
||||
"--group",
|
||||
"KDE",
|
||||
"--key",
|
||||
"LookAndFeelPackage",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("breezedark") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") || theme.contains("breeze") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try color scheme as well
|
||||
if let Ok(output) = Command::new("kreadconfig6")
|
||||
.args([
|
||||
"--file",
|
||||
"kdeglobals",
|
||||
"--group",
|
||||
"General",
|
||||
"--key",
|
||||
"ColorScheme",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let scheme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if scheme.contains("dark") || scheme.contains("breezedark") {
|
||||
return Ok("dark".to_string());
|
||||
} else if scheme.contains("light") || scheme.contains("breeze") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try KDE Plasma 5 as fallback
|
||||
if is_command_available("kreadconfig5") {
|
||||
if let Ok(output) = Command::new("kreadconfig5")
|
||||
.args([
|
||||
"--file",
|
||||
"kdeglobals",
|
||||
"--group",
|
||||
"KDE",
|
||||
"--key",
|
||||
"LookAndFeelPackage",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("breezedark") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") || theme.contains("breeze") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect KDE theme".into())
|
||||
}
|
||||
|
||||
fn detect_xfce_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
if !is_command_available("xfconf-query") {
|
||||
return Err("xfconf-query not available".into());
|
||||
}
|
||||
|
||||
// Check XFCE theme
|
||||
if let Ok(output) = Command::new("xfconf-query")
|
||||
.args(["-c", "xsettings", "-p", "/Net/ThemeName"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check XFCE window manager theme as backup
|
||||
if let Ok(output) = Command::new("xfconf-query")
|
||||
.args(["-c", "xfwm4", "-p", "/general/theme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect XFCE theme".into())
|
||||
}
|
||||
|
||||
fn detect_gtk_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Try to read GTK3 settings file
|
||||
if let Ok(home) = std::env::var("HOME") {
|
||||
let gtk3_settings = std::path::Path::new(&home).join(".config/gtk-3.0/settings.ini");
|
||||
if gtk3_settings.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(gtk3_settings) {
|
||||
for line in content.lines() {
|
||||
if line.starts_with("gtk-theme-name=") {
|
||||
let theme_name = line.split('=').nth(1).unwrap_or("").trim().to_lowercase();
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try GTK4 settings
|
||||
let gtk4_settings = std::path::Path::new(&home).join(".config/gtk-4.0/settings.ini");
|
||||
if gtk4_settings.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(gtk4_settings) {
|
||||
for line in content.lines() {
|
||||
if line.starts_with("gtk-theme-name=") {
|
||||
let theme_name = line.split('=').nth(1).unwrap_or("").trim().to_lowercase();
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect GTK theme".into())
|
||||
}
|
||||
|
||||
fn detect_dconf_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
if !is_command_available("dconf") {
|
||||
return Err("dconf not available".into());
|
||||
}
|
||||
|
||||
// Try reading color scheme directly from dconf
|
||||
if let Ok(output) = Command::new("dconf")
|
||||
.args(["read", "/org/gnome/desktop/interface/color-scheme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let scheme = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
match scheme.as_str() {
|
||||
"'prefer-dark'" => return Ok("dark".to_string()),
|
||||
"'prefer-light'" => return Ok("light".to_string()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try reading GTK theme from dconf
|
||||
if let Ok(output) = Command::new("dconf")
|
||||
.args(["read", "/org/gnome/desktop/interface/gtk-theme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme_name = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.trim_matches('\'')
|
||||
.to_lowercase();
|
||||
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect dconf theme".into())
|
||||
}
|
||||
|
||||
fn detect_env_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Check common environment variables
|
||||
if let Ok(theme) = std::env::var("GTK_THEME") {
|
||||
let theme_lower = theme.to_lowercase();
|
||||
if theme_lower.contains("dark") || theme_lower.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_lower.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(theme) = std::env::var("QT_STYLE_OVERRIDE") {
|
||||
let theme_lower = theme.to_lowercase();
|
||||
if theme_lower.contains("dark") || theme_lower.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_lower.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect theme from environment".into())
|
||||
}
|
||||
|
||||
fn detect_portal_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
if !is_command_available("busctl") {
|
||||
return Err("busctl not available".into());
|
||||
}
|
||||
|
||||
// Try to query the color scheme via org.freedesktop.portal.Settings
|
||||
if let Ok(output) = Command::new("busctl")
|
||||
.args([
|
||||
"--user",
|
||||
"call",
|
||||
"org.freedesktop.portal.Desktop",
|
||||
"/org/freedesktop/portal/desktop",
|
||||
"org.freedesktop.portal.Settings",
|
||||
"Read",
|
||||
"ss",
|
||||
"org.freedesktop.appearance",
|
||||
"color-scheme",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let response = String::from_utf8_lossy(&output.stdout);
|
||||
// Parse DBus response - look for preference values
|
||||
if response.contains(" 1 ") {
|
||||
return Ok("dark".to_string());
|
||||
} else if response.contains(" 2 ") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect portal theme".into())
|
||||
}
|
||||
|
||||
fn detect_system_files_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Check if we're in a dark terminal (heuristic)
|
||||
if let Ok(term) = std::env::var("TERM") {
|
||||
let term_lower = term.to_lowercase();
|
||||
if term_lower.contains("dark") || term_lower.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we can determine from desktop session
|
||||
if let Ok(desktop) = std::env::var("XDG_CURRENT_DESKTOP") {
|
||||
let desktop_lower = desktop.to_lowercase();
|
||||
// Some desktops default to dark
|
||||
if desktop_lower.contains("i3") || desktop_lower.contains("sway") {
|
||||
// Window managers often use dark themes by default
|
||||
return Ok("dark".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect theme from system files".into())
|
||||
}
|
||||
|
||||
fn is_command_available(command: &str) -> bool {
|
||||
Command::new("which")
|
||||
.arg(command)
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod macos {
|
||||
use super::*;
|
||||
|
||||
pub fn detect_system_theme() -> SystemTheme {
|
||||
// macOS theme detection using osascript
|
||||
if let Ok(output) = Command::new("osascript")
|
||||
.args([
|
||||
"-e",
|
||||
"tell application \"System Events\" to tell appearance preferences to get dark mode",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let result = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
let result = result.trim();
|
||||
match result {
|
||||
"true" => {
|
||||
return SystemTheme {
|
||||
theme: "dark".to_string(),
|
||||
}
|
||||
}
|
||||
"false" => {
|
||||
return SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback method using defaults
|
||||
if let Ok(output) = Command::new("defaults")
|
||||
.args(["read", "-g", "AppleInterfaceStyle"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let style = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
let style = style.trim();
|
||||
if style.to_lowercase() == "dark" {
|
||||
return SystemTheme {
|
||||
theme: "dark".to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Default to light if we can't determine
|
||||
SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
mod windows {
|
||||
use super::*;
|
||||
|
||||
pub fn detect_system_theme() -> SystemTheme {
|
||||
// Windows theme detection via registry
|
||||
// This is a simplified implementation - you might want to use winreg crate for better registry access
|
||||
if let Ok(output) = Command::new("reg")
|
||||
.args([
|
||||
"query",
|
||||
"HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Themes\\Personalize",
|
||||
"/v",
|
||||
"AppsUseLightTheme",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let result = String::from_utf8_lossy(&output.stdout);
|
||||
if result.contains("0x0") {
|
||||
return SystemTheme {
|
||||
theme: "dark".to_string(),
|
||||
};
|
||||
} else if result.contains("0x1") {
|
||||
return SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Default to light if we can't determine
|
||||
SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Command to expose this functionality to the frontend
|
||||
#[tauri::command]
|
||||
pub fn get_system_theme() -> SystemTheme {
|
||||
let detector = ThemeDetector::new();
|
||||
detector.detect_system_theme()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_theme_detector_creation() {
|
||||
let detector = ThemeDetector::new();
|
||||
let theme = detector.detect_system_theme();
|
||||
|
||||
// Should return a valid theme string
|
||||
assert!(matches!(theme.theme.as_str(), "light" | "dark" | "unknown"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_system_theme_command() {
|
||||
let theme = get_system_theme();
|
||||
assert!(matches!(theme.theme.as_str(), "light" | "dark" | "unknown"));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,740 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
/// Individual bandwidth data point for time-series tracking
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BandwidthDataPoint {
|
||||
/// Unix timestamp in seconds
|
||||
pub timestamp: u64,
|
||||
/// Bytes sent in this interval
|
||||
pub bytes_sent: u64,
|
||||
/// Bytes received in this interval
|
||||
pub bytes_received: u64,
|
||||
}
|
||||
|
||||
/// Individual domain access data point for time-series tracking
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DomainAccessPoint {
|
||||
/// Unix timestamp in seconds
|
||||
pub timestamp: u64,
|
||||
/// Domain name
|
||||
pub domain: String,
|
||||
/// Bytes sent in this request
|
||||
pub bytes_sent: u64,
|
||||
/// Bytes received in this request
|
||||
pub bytes_received: u64,
|
||||
}
|
||||
|
||||
/// Domain access information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DomainAccess {
|
||||
/// Domain name
|
||||
pub domain: String,
|
||||
/// Number of requests to this domain
|
||||
pub request_count: u64,
|
||||
/// Total bytes sent to this domain
|
||||
pub bytes_sent: u64,
|
||||
/// Total bytes received from this domain
|
||||
pub bytes_received: u64,
|
||||
/// First access timestamp
|
||||
pub first_access: u64,
|
||||
/// Last access timestamp
|
||||
pub last_access: u64,
|
||||
}
|
||||
|
||||
/// Lightweight snapshot for real-time updates (sent via events)
|
||||
/// Contains only the data needed for the mini chart and summary display
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrafficSnapshot {
|
||||
/// Profile ID (for matching)
|
||||
pub profile_id: Option<String>,
|
||||
/// Session start timestamp
|
||||
pub session_start: u64,
|
||||
/// Last update timestamp
|
||||
pub last_update: u64,
|
||||
/// Total bytes sent across all time
|
||||
pub total_bytes_sent: u64,
|
||||
/// Total bytes received across all time
|
||||
pub total_bytes_received: u64,
|
||||
/// Total requests made
|
||||
pub total_requests: u64,
|
||||
/// Current bandwidth (bytes per second) sent
|
||||
pub current_bytes_sent: u64,
|
||||
/// Current bandwidth (bytes per second) received
|
||||
pub current_bytes_received: u64,
|
||||
/// Recent bandwidth history (last 60 seconds only, for mini chart)
|
||||
pub recent_bandwidth: Vec<BandwidthDataPoint>,
|
||||
}
|
||||
|
||||
/// Traffic statistics for a profile/proxy session
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct TrafficStats {
|
||||
/// Proxy ID this stats belong to (for backwards compatibility)
|
||||
pub proxy_id: String,
|
||||
/// Profile ID (if associated) - this is now the primary key for storage
|
||||
pub profile_id: Option<String>,
|
||||
/// Session start timestamp
|
||||
pub session_start: u64,
|
||||
/// Last update timestamp
|
||||
pub last_update: u64,
|
||||
/// Total bytes sent across all time
|
||||
pub total_bytes_sent: u64,
|
||||
/// Total bytes received across all time
|
||||
pub total_bytes_received: u64,
|
||||
/// Total requests made
|
||||
pub total_requests: u64,
|
||||
/// Bandwidth data points (time-series, 1 point per second, stored indefinitely)
|
||||
#[serde(default)]
|
||||
pub bandwidth_history: Vec<BandwidthDataPoint>,
|
||||
/// Domain access statistics (aggregated all-time)
|
||||
#[serde(default)]
|
||||
pub domains: HashMap<String, DomainAccess>,
|
||||
/// Domain access history (time-series for filtering by period)
|
||||
#[serde(default)]
|
||||
pub domain_access_history: Vec<DomainAccessPoint>,
|
||||
/// Unique IPs accessed
|
||||
#[serde(default)]
|
||||
pub unique_ips: Vec<String>,
|
||||
}
|
||||
|
||||
impl TrafficStats {
|
||||
pub fn new(proxy_id: String, profile_id: Option<String>) -> Self {
|
||||
let now = current_timestamp();
|
||||
Self {
|
||||
proxy_id,
|
||||
profile_id,
|
||||
session_start: now,
|
||||
last_update: now,
|
||||
total_bytes_sent: 0,
|
||||
total_bytes_received: 0,
|
||||
total_requests: 0,
|
||||
bandwidth_history: Vec::new(),
|
||||
domains: HashMap::new(),
|
||||
domain_access_history: Vec::new(),
|
||||
unique_ips: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a lightweight snapshot for real-time UI updates
|
||||
pub fn to_snapshot(&self) -> TrafficSnapshot {
|
||||
let now = current_timestamp();
|
||||
let cutoff = now.saturating_sub(60); // Last 60 seconds for mini chart
|
||||
|
||||
// Get current bandwidth from last data point
|
||||
let (current_sent, current_recv) = self
|
||||
.bandwidth_history
|
||||
.last()
|
||||
.filter(|dp| dp.timestamp >= now.saturating_sub(2)) // Within last 2 seconds
|
||||
.map(|dp| (dp.bytes_sent, dp.bytes_received))
|
||||
.unwrap_or((0, 0));
|
||||
|
||||
TrafficSnapshot {
|
||||
profile_id: self.profile_id.clone(),
|
||||
session_start: self.session_start,
|
||||
last_update: self.last_update,
|
||||
total_bytes_sent: self.total_bytes_sent,
|
||||
total_bytes_received: self.total_bytes_received,
|
||||
total_requests: self.total_requests,
|
||||
current_bytes_sent: current_sent,
|
||||
current_bytes_received: current_recv,
|
||||
recent_bandwidth: self
|
||||
.bandwidth_history
|
||||
.iter()
|
||||
.filter(|dp| dp.timestamp >= cutoff)
|
||||
.cloned()
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Record bandwidth for current second (data is stored indefinitely)
|
||||
pub fn record_bandwidth(&mut self, bytes_sent: u64, bytes_received: u64) {
|
||||
let now = current_timestamp();
|
||||
self.last_update = now;
|
||||
self.total_bytes_sent += bytes_sent;
|
||||
self.total_bytes_received += bytes_received;
|
||||
|
||||
// Find or create data point for this second
|
||||
if let Some(last) = self.bandwidth_history.last_mut() {
|
||||
if last.timestamp == now {
|
||||
last.bytes_sent += bytes_sent;
|
||||
last.bytes_received += bytes_received;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Add new data point (even if bytes are zero, to ensure chart has continuous data)
|
||||
self.bandwidth_history.push(BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent,
|
||||
bytes_received,
|
||||
});
|
||||
}
|
||||
|
||||
/// Record a request to a domain
|
||||
pub fn record_request(&mut self, domain: &str, bytes_sent: u64, bytes_received: u64) {
|
||||
let now = current_timestamp();
|
||||
self.total_requests += 1;
|
||||
|
||||
// Update aggregated domain stats
|
||||
let entry = self
|
||||
.domains
|
||||
.entry(domain.to_string())
|
||||
.or_insert(DomainAccess {
|
||||
domain: domain.to_string(),
|
||||
request_count: 0,
|
||||
bytes_sent: 0,
|
||||
bytes_received: 0,
|
||||
first_access: now,
|
||||
last_access: now,
|
||||
});
|
||||
|
||||
entry.request_count += 1;
|
||||
entry.bytes_sent += bytes_sent;
|
||||
entry.bytes_received += bytes_received;
|
||||
entry.last_access = now;
|
||||
|
||||
// Add to domain access history for time-period filtering
|
||||
self.domain_access_history.push(DomainAccessPoint {
|
||||
timestamp: now,
|
||||
domain: domain.to_string(),
|
||||
bytes_sent,
|
||||
bytes_received,
|
||||
});
|
||||
}
|
||||
|
||||
/// Record an IP address access
|
||||
pub fn record_ip(&mut self, ip: &str) {
|
||||
if !self.unique_ips.contains(&ip.to_string()) {
|
||||
self.unique_ips.push(ip.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
/// Get bandwidth data for the last N seconds
|
||||
pub fn get_recent_bandwidth(&self, seconds: u64) -> Vec<BandwidthDataPoint> {
|
||||
let now = current_timestamp();
|
||||
let cutoff = now.saturating_sub(seconds);
|
||||
self
|
||||
.bandwidth_history
|
||||
.iter()
|
||||
.filter(|dp| dp.timestamp >= cutoff)
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Get current Unix timestamp in seconds
|
||||
fn current_timestamp() -> u64 {
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs()
|
||||
}
|
||||
|
||||
/// Get the traffic stats storage directory
|
||||
pub fn get_traffic_stats_dir() -> PathBuf {
|
||||
let base_dirs = BaseDirs::new().expect("Failed to get base directories");
|
||||
let mut path = base_dirs.cache_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("traffic_stats");
|
||||
path
|
||||
}
|
||||
|
||||
/// Get the storage key for traffic stats (profile_id if available, otherwise proxy_id)
|
||||
fn get_stats_storage_key(stats: &TrafficStats) -> String {
|
||||
stats
|
||||
.profile_id
|
||||
.clone()
|
||||
.unwrap_or_else(|| stats.proxy_id.clone())
|
||||
}
|
||||
|
||||
/// Save traffic stats to disk using profile_id as the key
|
||||
pub fn save_traffic_stats(stats: &TrafficStats) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let storage_dir = get_traffic_stats_dir();
|
||||
fs::create_dir_all(&storage_dir)?;
|
||||
|
||||
let key = get_stats_storage_key(stats);
|
||||
let file_path = storage_dir.join(format!("{key}.json"));
|
||||
let content = serde_json::to_string(stats)?;
|
||||
fs::write(&file_path, content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Load traffic stats from disk by profile_id or proxy_id
|
||||
pub fn load_traffic_stats(id: &str) -> Option<TrafficStats> {
|
||||
let storage_dir = get_traffic_stats_dir();
|
||||
let file_path = storage_dir.join(format!("{id}.json"));
|
||||
|
||||
if !file_path.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&file_path).ok()?;
|
||||
serde_json::from_str(&content).ok()
|
||||
}
|
||||
|
||||
/// Load traffic stats by profile_id
|
||||
pub fn load_traffic_stats_by_profile(profile_id: &str) -> Option<TrafficStats> {
|
||||
load_traffic_stats(profile_id)
|
||||
}
|
||||
|
||||
/// List all traffic stats files and migrate old proxy-id based files to profile-id based
|
||||
pub fn list_traffic_stats() -> Vec<TrafficStats> {
|
||||
let storage_dir = get_traffic_stats_dir();
|
||||
|
||||
if !storage_dir.exists() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let mut stats_map: HashMap<String, TrafficStats> = HashMap::new();
|
||||
let mut files_to_delete: Vec<std::path::PathBuf> = Vec::new();
|
||||
|
||||
if let Ok(entries) = fs::read_dir(&storage_dir) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if path.extension().is_some_and(|ext| ext == "json") {
|
||||
if let Ok(content) = fs::read_to_string(&path) {
|
||||
if let Ok(s) = serde_json::from_str::<TrafficStats>(&content) {
|
||||
// Determine the key for this stats entry
|
||||
let key = s.profile_id.clone().unwrap_or_else(|| s.proxy_id.clone());
|
||||
|
||||
// Check if this is an old proxy-id based file that should be migrated
|
||||
let file_stem = path.file_stem().and_then(|s| s.to_str()).unwrap_or("");
|
||||
let is_old_proxy_file = file_stem.starts_with("proxy_")
|
||||
&& s.profile_id.is_some()
|
||||
&& file_stem != s.profile_id.as_ref().unwrap();
|
||||
|
||||
if let Some(existing) = stats_map.get_mut(&key) {
|
||||
// Merge stats from this file into existing
|
||||
merge_traffic_stats(existing, &s);
|
||||
if is_old_proxy_file {
|
||||
files_to_delete.push(path.clone());
|
||||
}
|
||||
} else {
|
||||
stats_map.insert(key.clone(), s);
|
||||
if is_old_proxy_file {
|
||||
files_to_delete.push(path.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save merged stats and delete old files
|
||||
for stats in stats_map.values() {
|
||||
if let Err(e) = save_traffic_stats(stats) {
|
||||
log::warn!("Failed to save merged traffic stats: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
for path in files_to_delete {
|
||||
if let Err(e) = fs::remove_file(&path) {
|
||||
log::warn!("Failed to delete old traffic stats file {:?}: {}", path, e);
|
||||
}
|
||||
}
|
||||
|
||||
stats_map.into_values().collect()
|
||||
}
|
||||
|
||||
/// Merge traffic stats from source into destination
|
||||
fn merge_traffic_stats(dest: &mut TrafficStats, src: &TrafficStats) {
|
||||
// Update totals
|
||||
dest.total_bytes_sent += src.total_bytes_sent;
|
||||
dest.total_bytes_received += src.total_bytes_received;
|
||||
dest.total_requests += src.total_requests;
|
||||
|
||||
// Update timestamps
|
||||
dest.session_start = dest.session_start.min(src.session_start);
|
||||
dest.last_update = dest.last_update.max(src.last_update);
|
||||
|
||||
// Merge bandwidth history (keep all data, sorted by timestamp)
|
||||
let mut combined_history: Vec<BandwidthDataPoint> = dest.bandwidth_history.clone();
|
||||
for point in &src.bandwidth_history {
|
||||
if !combined_history
|
||||
.iter()
|
||||
.any(|p| p.timestamp == point.timestamp)
|
||||
{
|
||||
combined_history.push(point.clone());
|
||||
}
|
||||
}
|
||||
combined_history.sort_by_key(|p| p.timestamp);
|
||||
dest.bandwidth_history = combined_history;
|
||||
|
||||
// Merge domains
|
||||
for (domain, access) in &src.domains {
|
||||
let entry = dest.domains.entry(domain.clone()).or_insert(DomainAccess {
|
||||
domain: domain.clone(),
|
||||
request_count: 0,
|
||||
bytes_sent: 0,
|
||||
bytes_received: 0,
|
||||
first_access: access.first_access,
|
||||
last_access: access.last_access,
|
||||
});
|
||||
entry.request_count += access.request_count;
|
||||
entry.bytes_sent += access.bytes_sent;
|
||||
entry.bytes_received += access.bytes_received;
|
||||
entry.first_access = entry.first_access.min(access.first_access);
|
||||
entry.last_access = entry.last_access.max(access.last_access);
|
||||
}
|
||||
|
||||
// Merge domain access history
|
||||
let mut combined_domain_history: Vec<DomainAccessPoint> = dest.domain_access_history.clone();
|
||||
for point in &src.domain_access_history {
|
||||
combined_domain_history.push(point.clone());
|
||||
}
|
||||
combined_domain_history.sort_by_key(|p| p.timestamp);
|
||||
dest.domain_access_history = combined_domain_history;
|
||||
|
||||
// Merge unique IPs
|
||||
for ip in &src.unique_ips {
|
||||
if !dest.unique_ips.contains(ip) {
|
||||
dest.unique_ips.push(ip.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Delete traffic stats by id (profile_id or proxy_id)
|
||||
pub fn delete_traffic_stats(id: &str) -> bool {
|
||||
let storage_dir = get_traffic_stats_dir();
|
||||
let file_path = storage_dir.join(format!("{id}.json"));
|
||||
|
||||
if file_path.exists() {
|
||||
fs::remove_file(&file_path).is_ok()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Clear all traffic stats (used when clearing cache)
|
||||
pub fn clear_all_traffic_stats() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let storage_dir = get_traffic_stats_dir();
|
||||
|
||||
if storage_dir.exists() {
|
||||
for entry in fs::read_dir(&storage_dir)?.flatten() {
|
||||
let path = entry.path();
|
||||
if path.extension().is_some_and(|ext| ext == "json") {
|
||||
let _ = fs::remove_file(&path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Live bandwidth tracker for real-time stats collection in the proxy
|
||||
/// This is designed to be used from within the proxy server
|
||||
pub struct LiveTrafficTracker {
|
||||
pub proxy_id: String,
|
||||
pub profile_id: Option<String>,
|
||||
bytes_sent: AtomicU64,
|
||||
bytes_received: AtomicU64,
|
||||
requests: AtomicU64,
|
||||
domain_stats: RwLock<HashMap<String, (u64, u64, u64)>>, // domain -> (count, sent, recv)
|
||||
ips: RwLock<Vec<String>>,
|
||||
#[allow(dead_code)]
|
||||
session_start: u64,
|
||||
}
|
||||
|
||||
impl LiveTrafficTracker {
|
||||
pub fn new(proxy_id: String, profile_id: Option<String>) -> Self {
|
||||
Self {
|
||||
proxy_id,
|
||||
profile_id,
|
||||
bytes_sent: AtomicU64::new(0),
|
||||
bytes_received: AtomicU64::new(0),
|
||||
requests: AtomicU64::new(0),
|
||||
domain_stats: RwLock::new(HashMap::new()),
|
||||
ips: RwLock::new(Vec::new()),
|
||||
session_start: current_timestamp(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_bytes_sent(&self, bytes: u64) {
|
||||
self.bytes_sent.fetch_add(bytes, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub fn add_bytes_received(&self, bytes: u64) {
|
||||
self.bytes_received.fetch_add(bytes, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
pub fn record_request(&self, domain: &str, bytes_sent: u64, bytes_received: u64) {
|
||||
self.requests.fetch_add(1, Ordering::Relaxed);
|
||||
// Also update total byte counters for HTTP requests (not tunneled)
|
||||
self.bytes_sent.fetch_add(bytes_sent, Ordering::Relaxed);
|
||||
self
|
||||
.bytes_received
|
||||
.fetch_add(bytes_received, Ordering::Relaxed);
|
||||
if let Ok(mut stats) = self.domain_stats.write() {
|
||||
let entry = stats.entry(domain.to_string()).or_insert((0, 0, 0));
|
||||
entry.0 += 1;
|
||||
entry.1 += bytes_sent;
|
||||
entry.2 += bytes_received;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn record_ip(&self, ip: &str) {
|
||||
if let Ok(mut ips) = self.ips.write() {
|
||||
if !ips.contains(&ip.to_string()) {
|
||||
ips.push(ip.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update domain-specific byte counts (called when CONNECT tunnel closes)
|
||||
pub fn update_domain_bytes(&self, domain: &str, bytes_sent: u64, bytes_received: u64) {
|
||||
if let Ok(mut stats) = self.domain_stats.write() {
|
||||
let entry = stats.entry(domain.to_string()).or_insert((0, 0, 0));
|
||||
entry.1 += bytes_sent;
|
||||
entry.2 += bytes_received;
|
||||
}
|
||||
}
|
||||
|
||||
/// Get current stats snapshot
|
||||
pub fn get_snapshot(&self) -> (u64, u64, u64) {
|
||||
(
|
||||
self.bytes_sent.load(Ordering::Relaxed),
|
||||
self.bytes_received.load(Ordering::Relaxed),
|
||||
self.requests.load(Ordering::Relaxed),
|
||||
)
|
||||
}
|
||||
|
||||
/// Flush current stats to disk and return the delta
|
||||
pub fn flush_to_disk(&self) -> Result<(u64, u64), Box<dyn std::error::Error>> {
|
||||
let bytes_sent = self.bytes_sent.swap(0, Ordering::Relaxed);
|
||||
let bytes_received = self.bytes_received.swap(0, Ordering::Relaxed);
|
||||
|
||||
// Use profile_id as storage key if available, otherwise fall back to proxy_id
|
||||
let storage_key = self
|
||||
.profile_id
|
||||
.clone()
|
||||
.unwrap_or_else(|| self.proxy_id.clone());
|
||||
|
||||
// Load or create stats using the storage key
|
||||
let mut stats = load_traffic_stats(&storage_key)
|
||||
.unwrap_or_else(|| TrafficStats::new(self.proxy_id.clone(), self.profile_id.clone()));
|
||||
|
||||
// Ensure profile_id is set (in case stats were loaded from disk without it)
|
||||
if stats.profile_id.is_none() && self.profile_id.is_some() {
|
||||
stats.profile_id = self.profile_id.clone();
|
||||
}
|
||||
|
||||
// Update the proxy_id to current session (for debugging/tracking)
|
||||
stats.proxy_id = self.proxy_id.clone();
|
||||
|
||||
// Update bandwidth history
|
||||
stats.record_bandwidth(bytes_sent, bytes_received);
|
||||
|
||||
// Update domain stats
|
||||
if let Ok(mut domain_map) = self.domain_stats.write() {
|
||||
for (domain, (count, sent, recv)) in domain_map.drain() {
|
||||
stats.record_request(&domain, sent, recv);
|
||||
// Adjust request count (record_request increments total_requests)
|
||||
stats.total_requests = stats.total_requests.saturating_sub(1) + count;
|
||||
}
|
||||
}
|
||||
|
||||
// Update IPs
|
||||
if let Ok(ips) = self.ips.read() {
|
||||
for ip in ips.iter() {
|
||||
stats.record_ip(ip);
|
||||
}
|
||||
}
|
||||
|
||||
// Save to disk
|
||||
save_traffic_stats(&stats)?;
|
||||
|
||||
Ok((bytes_sent, bytes_received))
|
||||
}
|
||||
}
|
||||
|
||||
/// Global traffic tracker that can be accessed from connection handlers
|
||||
/// Using RwLock to allow reinitialization when proxy config changes
|
||||
static TRAFFIC_TRACKER: std::sync::RwLock<Option<Arc<LiveTrafficTracker>>> =
|
||||
std::sync::RwLock::new(None);
|
||||
|
||||
/// Initialize the global traffic tracker
|
||||
/// This can be called multiple times to update the tracker when proxy config changes
|
||||
pub fn init_traffic_tracker(proxy_id: String, profile_id: Option<String>) {
|
||||
let tracker = Arc::new(LiveTrafficTracker::new(proxy_id, profile_id));
|
||||
if let Ok(mut guard) = TRAFFIC_TRACKER.write() {
|
||||
*guard = Some(tracker);
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the global traffic tracker
|
||||
pub fn get_traffic_tracker() -> Option<Arc<LiveTrafficTracker>> {
|
||||
TRAFFIC_TRACKER.read().ok().and_then(|guard| guard.clone())
|
||||
}
|
||||
|
||||
/// Filtered traffic stats for client display (only contains data for requested period)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FilteredTrafficStats {
|
||||
pub profile_id: Option<String>,
|
||||
pub session_start: u64,
|
||||
pub last_update: u64,
|
||||
pub total_bytes_sent: u64,
|
||||
pub total_bytes_received: u64,
|
||||
pub total_requests: u64,
|
||||
/// Bandwidth history filtered to requested time period
|
||||
pub bandwidth_history: Vec<BandwidthDataPoint>,
|
||||
/// Period stats: bytes sent/received within the requested period
|
||||
pub period_bytes_sent: u64,
|
||||
pub period_bytes_received: u64,
|
||||
/// Period requests within the requested period
|
||||
pub period_requests: u64,
|
||||
/// Domain access statistics filtered to requested time period
|
||||
pub domains: HashMap<String, DomainAccess>,
|
||||
/// Unique IPs accessed
|
||||
pub unique_ips: Vec<String>,
|
||||
}
|
||||
|
||||
/// Get traffic stats for a profile, filtered to a specific time period
|
||||
/// seconds: number of seconds to include (0 = all time)
|
||||
pub fn get_traffic_stats_for_period(
|
||||
profile_id: &str,
|
||||
seconds: u64,
|
||||
) -> Option<FilteredTrafficStats> {
|
||||
let stats = load_traffic_stats(profile_id)?;
|
||||
|
||||
let now = current_timestamp();
|
||||
let cutoff = if seconds == 0 {
|
||||
0 // All time
|
||||
} else {
|
||||
now.saturating_sub(seconds)
|
||||
};
|
||||
|
||||
// Filter bandwidth history to requested period
|
||||
let filtered_history: Vec<BandwidthDataPoint> = stats
|
||||
.bandwidth_history
|
||||
.iter()
|
||||
.filter(|dp| dp.timestamp >= cutoff)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
// Calculate period totals for bandwidth
|
||||
let period_bytes_sent: u64 = filtered_history.iter().map(|dp| dp.bytes_sent).sum();
|
||||
let period_bytes_received: u64 = filtered_history.iter().map(|dp| dp.bytes_received).sum();
|
||||
|
||||
// Filter and aggregate domain stats for the period
|
||||
let mut filtered_domains: HashMap<String, DomainAccess> = HashMap::new();
|
||||
let mut period_requests: u64 = 0;
|
||||
|
||||
for access in stats
|
||||
.domain_access_history
|
||||
.iter()
|
||||
.filter(|a| a.timestamp >= cutoff)
|
||||
{
|
||||
period_requests += 1;
|
||||
let entry = filtered_domains
|
||||
.entry(access.domain.clone())
|
||||
.or_insert(DomainAccess {
|
||||
domain: access.domain.clone(),
|
||||
request_count: 0,
|
||||
bytes_sent: 0,
|
||||
bytes_received: 0,
|
||||
first_access: access.timestamp,
|
||||
last_access: access.timestamp,
|
||||
});
|
||||
|
||||
entry.request_count += 1;
|
||||
entry.bytes_sent += access.bytes_sent;
|
||||
entry.bytes_received += access.bytes_received;
|
||||
entry.first_access = entry.first_access.min(access.timestamp);
|
||||
entry.last_access = entry.last_access.max(access.timestamp);
|
||||
}
|
||||
|
||||
// If no domain_access_history exists (old data), fall back to all-time domains
|
||||
let domains = if stats.domain_access_history.is_empty() {
|
||||
stats.domains
|
||||
} else {
|
||||
filtered_domains
|
||||
};
|
||||
|
||||
Some(FilteredTrafficStats {
|
||||
profile_id: stats.profile_id,
|
||||
session_start: stats.session_start,
|
||||
last_update: stats.last_update,
|
||||
total_bytes_sent: stats.total_bytes_sent,
|
||||
total_bytes_received: stats.total_bytes_received,
|
||||
total_requests: stats.total_requests,
|
||||
bandwidth_history: filtered_history,
|
||||
period_bytes_sent,
|
||||
period_bytes_received,
|
||||
period_requests,
|
||||
domains,
|
||||
unique_ips: stats.unique_ips,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get lightweight traffic snapshot for a profile (for mini charts, only recent 60 seconds)
|
||||
pub fn get_traffic_snapshot_for_profile(profile_id: &str) -> Option<TrafficSnapshot> {
|
||||
let stats = load_traffic_stats(profile_id)?;
|
||||
Some(stats.to_snapshot())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_traffic_stats_creation() {
|
||||
let stats = TrafficStats::new(
|
||||
"test_proxy".to_string(),
|
||||
Some("test-profile-id".to_string()),
|
||||
);
|
||||
assert_eq!(stats.proxy_id, "test_proxy");
|
||||
assert_eq!(stats.profile_id, Some("test-profile-id".to_string()));
|
||||
assert_eq!(stats.total_bytes_sent, 0);
|
||||
assert_eq!(stats.total_bytes_received, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bandwidth_recording() {
|
||||
let mut stats = TrafficStats::new("test_proxy".to_string(), None);
|
||||
|
||||
stats.record_bandwidth(1000, 2000);
|
||||
assert_eq!(stats.total_bytes_sent, 1000);
|
||||
assert_eq!(stats.total_bytes_received, 2000);
|
||||
assert_eq!(stats.bandwidth_history.len(), 1);
|
||||
|
||||
stats.record_bandwidth(500, 1000);
|
||||
assert_eq!(stats.total_bytes_sent, 1500);
|
||||
assert_eq!(stats.total_bytes_received, 3000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_domain_recording() {
|
||||
let mut stats = TrafficStats::new("test_proxy".to_string(), None);
|
||||
|
||||
stats.record_request("example.com", 100, 500);
|
||||
stats.record_request("example.com", 200, 1000);
|
||||
stats.record_request("google.com", 50, 200);
|
||||
|
||||
assert_eq!(stats.domains.len(), 2);
|
||||
assert_eq!(stats.domains["example.com"].request_count, 2);
|
||||
assert_eq!(stats.domains["example.com"].bytes_sent, 300);
|
||||
assert_eq!(stats.domains["google.com"].request_count, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_ip_recording() {
|
||||
let mut stats = TrafficStats::new("test_proxy".to_string(), None);
|
||||
|
||||
stats.record_ip("192.168.1.1");
|
||||
stats.record_ip("192.168.1.1"); // Duplicate
|
||||
stats.record_ip("10.0.0.1");
|
||||
|
||||
assert_eq!(stats.unique_ips.len(), 2);
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,7 @@ use tokio::sync::Mutex;
|
||||
use tokio::time::interval;
|
||||
|
||||
use crate::auto_updater::AutoUpdater;
|
||||
use crate::browser_version_service::BrowserVersionService;
|
||||
use crate::browser_version_manager::BrowserVersionManager;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct VersionUpdateProgress {
|
||||
@@ -41,22 +41,23 @@ impl Default for BackgroundUpdateState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
last_update_time: 0,
|
||||
update_interval_hours: 3,
|
||||
update_interval_hours: 12,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension of auto_updater.rs for background updates
|
||||
pub struct VersionUpdater {
|
||||
version_service: BrowserVersionService,
|
||||
auto_updater: AutoUpdater,
|
||||
browser_version_manager: &'static BrowserVersionManager,
|
||||
auto_updater: &'static AutoUpdater,
|
||||
app_handle: Option<tauri::AppHandle>,
|
||||
}
|
||||
|
||||
impl VersionUpdater {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
version_service: BrowserVersionService::new(),
|
||||
auto_updater: AutoUpdater::new(),
|
||||
browser_version_manager: BrowserVersionManager::instance(),
|
||||
auto_updater: AutoUpdater::instance(),
|
||||
app_handle: None,
|
||||
}
|
||||
}
|
||||
@@ -128,14 +129,14 @@ impl VersionUpdater {
|
||||
let should_update = state.last_update_time == 0 || elapsed_secs >= update_interval_secs;
|
||||
|
||||
if should_update {
|
||||
println!(
|
||||
log::debug!(
|
||||
"Background update needed: last_update={}, elapsed={}h, required={}h",
|
||||
state.last_update_time,
|
||||
elapsed_secs / 3600,
|
||||
state.update_interval_hours
|
||||
);
|
||||
} else {
|
||||
println!(
|
||||
log::debug!(
|
||||
"Background update not needed: last_update={}, elapsed={}h, required={}h",
|
||||
state.last_update_time,
|
||||
elapsed_secs / 3600,
|
||||
@@ -151,12 +152,12 @@ impl VersionUpdater {
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Only run if an update is actually needed
|
||||
if !Self::should_run_background_update() {
|
||||
println!("No startup version update needed");
|
||||
log::debug!("No startup version update needed");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if let Some(ref app_handle) = self.app_handle {
|
||||
println!("Running startup version update...");
|
||||
log::info!("Running startup version update...");
|
||||
|
||||
match self.update_all_browser_versions(app_handle).await {
|
||||
Ok(_) => {
|
||||
@@ -167,13 +168,13 @@ impl VersionUpdater {
|
||||
};
|
||||
|
||||
if let Err(e) = Self::save_background_update_state(&state) {
|
||||
eprintln!("Failed to save background update state: {e}");
|
||||
log::error!("Failed to save background update state: {e}");
|
||||
} else {
|
||||
println!("Startup version update completed successfully");
|
||||
log::info!("Startup version update completed successfully");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Startup version update failed: {e}");
|
||||
log::error!("Startup version update failed: {e}");
|
||||
return Err(e);
|
||||
}
|
||||
}
|
||||
@@ -263,7 +264,7 @@ impl VersionUpdater {
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<Vec<BackgroundUpdateResult>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let supported_browsers = self.version_service.get_supported_browsers();
|
||||
let supported_browsers = self.browser_version_manager.get_supported_browsers();
|
||||
let total_browsers = supported_browsers.len();
|
||||
let mut results = Vec::new();
|
||||
let mut total_new_versions = 0;
|
||||
@@ -279,11 +280,11 @@ impl VersionUpdater {
|
||||
};
|
||||
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &initial_progress) {
|
||||
eprintln!("Failed to emit initial progress: {e}");
|
||||
log::error!("Failed to emit initial progress: {e}");
|
||||
}
|
||||
|
||||
for (index, browser) in supported_browsers.iter().enumerate() {
|
||||
println!("Updating browser versions for: {browser}");
|
||||
log::debug!("Updating browser versions for: {browser}");
|
||||
|
||||
// Emit progress update for current browser
|
||||
let progress = VersionUpdateProgress {
|
||||
@@ -296,7 +297,7 @@ impl VersionUpdater {
|
||||
};
|
||||
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &progress) {
|
||||
eprintln!("Failed to emit progress for {browser}: {e}");
|
||||
log::error!("Failed to emit progress for {browser}: {e}");
|
||||
}
|
||||
|
||||
match self.update_browser_versions(browser).await {
|
||||
@@ -322,7 +323,7 @@ impl VersionUpdater {
|
||||
};
|
||||
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &progress) {
|
||||
eprintln!("Failed to emit progress with versions for {browser}: {e}");
|
||||
log::error!("Failed to emit progress with versions for {browser}: {e}");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -374,7 +375,7 @@ impl VersionUpdater {
|
||||
browser: &str,
|
||||
) -> Result<usize, Box<dyn std::error::Error + Send + Sync>> {
|
||||
self
|
||||
.version_service
|
||||
.browser_version_manager
|
||||
.update_browser_versions_incrementally(browser)
|
||||
.await
|
||||
}
|
||||
@@ -392,7 +393,7 @@ impl VersionUpdater {
|
||||
};
|
||||
|
||||
if let Err(e) = Self::save_background_update_state(&state) {
|
||||
eprintln!("Failed to save background update state after manual update: {e}");
|
||||
log::error!("Failed to save background update state after manual update: {e}");
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
@@ -455,6 +456,63 @@ pub async fn get_version_update_status() -> Result<(Option<u64>, u64), String> {
|
||||
Ok((last_update, time_until_next))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn clear_all_version_cache_and_refetch(
|
||||
app_handle: tauri::AppHandle,
|
||||
) -> Result<(), String> {
|
||||
let api_client = crate::api_client::ApiClient::instance();
|
||||
let version_updater = VersionUpdater::new();
|
||||
|
||||
// Clear all cache first
|
||||
api_client
|
||||
.clear_all_cache()
|
||||
.map_err(|e| format!("Failed to clear version cache: {e}"))?;
|
||||
|
||||
// Disable all browsers during the update process
|
||||
let supported_browsers = version_updater
|
||||
.browser_version_manager
|
||||
.get_supported_browsers();
|
||||
|
||||
// Load current state and disable all browsers
|
||||
let mut state = version_updater
|
||||
.auto_updater
|
||||
.load_auto_update_state()
|
||||
.map_err(|e| format!("Failed to load auto update state: {e}"))?;
|
||||
for browser in &supported_browsers {
|
||||
state.disabled_browsers.insert(browser.clone());
|
||||
}
|
||||
version_updater
|
||||
.auto_updater
|
||||
.save_auto_update_state(&state)
|
||||
.map_err(|e| format!("Failed to save auto update state: {e}"))?;
|
||||
|
||||
let updater = get_version_updater();
|
||||
let updater_guard = updater.lock().await;
|
||||
|
||||
let result = updater_guard
|
||||
.trigger_manual_update(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to trigger version update: {e}"));
|
||||
|
||||
// Re-enable all browsers after the update completes (regardless of success/failure)
|
||||
let mut final_state = version_updater
|
||||
.auto_updater
|
||||
.load_auto_update_state()
|
||||
.unwrap_or_default();
|
||||
for browser in &supported_browsers {
|
||||
final_state.disabled_browsers.remove(browser);
|
||||
}
|
||||
if let Err(e) = version_updater
|
||||
.auto_updater
|
||||
.save_auto_update_state(&final_state)
|
||||
{
|
||||
log::warn!("Failed to re-enable browsers after cache clear: {e}");
|
||||
}
|
||||
|
||||
result?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -487,13 +545,22 @@ mod tests {
|
||||
Err(_) => return BackgroundUpdateState::default(),
|
||||
};
|
||||
|
||||
serde_json::from_str(&content).unwrap_or_default()
|
||||
match serde_json::from_str(&content) {
|
||||
Ok(state) => state,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to parse test state file {:?}: {}", state_file, e);
|
||||
BackgroundUpdateState::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_background_update_state_persistence() {
|
||||
let test_name = "persistence";
|
||||
|
||||
// Clean up any existing test file first
|
||||
let _ = fs::remove_file(get_test_state_file(test_name));
|
||||
|
||||
// Create a test state
|
||||
let test_state = BackgroundUpdateState {
|
||||
last_update_time: 1609459200, // 2021-01-01 00:00:00 UTC
|
||||
@@ -503,14 +570,22 @@ mod tests {
|
||||
// Save the state
|
||||
save_test_state(test_name, &test_state).unwrap();
|
||||
|
||||
// Verify file was created
|
||||
let state_file = get_test_state_file(test_name);
|
||||
assert!(state_file.exists(), "State file should exist after saving");
|
||||
|
||||
// Load the state back
|
||||
let loaded_state = load_test_state(test_name);
|
||||
|
||||
// Verify the values match
|
||||
assert_eq!(loaded_state.last_update_time, test_state.last_update_time);
|
||||
assert_eq!(
|
||||
loaded_state.update_interval_hours,
|
||||
test_state.update_interval_hours
|
||||
loaded_state.last_update_time, test_state.last_update_time,
|
||||
"last_update_time should match. Expected: {}, Got: {}",
|
||||
test_state.last_update_time, loaded_state.last_update_time
|
||||
);
|
||||
assert_eq!(
|
||||
loaded_state.update_interval_hours, test_state.update_interval_hours,
|
||||
"update_interval_hours should match"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
@@ -519,31 +594,110 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_should_run_background_update_logic() {
|
||||
// Note: This test uses the shared state file, so results may vary
|
||||
// depending on previous test runs. This is expected behavior.
|
||||
// Create isolated test states to avoid interference
|
||||
let current_time = VersionUpdater::get_current_timestamp();
|
||||
|
||||
// Test with recent update (should not update)
|
||||
let recent_state = BackgroundUpdateState {
|
||||
last_update_time: VersionUpdater::get_current_timestamp() - 60, // 1 minute ago
|
||||
last_update_time: current_time - 60, // 1 minute ago
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
VersionUpdater::save_background_update_state(&recent_state).unwrap();
|
||||
assert!(!VersionUpdater::should_run_background_update());
|
||||
|
||||
// Save and test recent state
|
||||
let save_result = VersionUpdater::save_background_update_state(&recent_state);
|
||||
assert!(save_result.is_ok(), "Should save recent state successfully");
|
||||
|
||||
let should_update_recent = VersionUpdater::should_run_background_update();
|
||||
assert!(
|
||||
!should_update_recent,
|
||||
"Should not update when last update was recent"
|
||||
);
|
||||
|
||||
// Test with old update (should update)
|
||||
let old_state = BackgroundUpdateState {
|
||||
last_update_time: VersionUpdater::get_current_timestamp() - (4 * 60 * 60), // 4 hours ago
|
||||
last_update_time: current_time - (4 * 60 * 60), // 4 hours ago
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
VersionUpdater::save_background_update_state(&old_state).unwrap();
|
||||
assert!(VersionUpdater::should_run_background_update());
|
||||
|
||||
// Save and test old state
|
||||
let save_result = VersionUpdater::save_background_update_state(&old_state);
|
||||
assert!(save_result.is_ok(), "Should save old state successfully");
|
||||
|
||||
let should_update_old = VersionUpdater::should_run_background_update();
|
||||
assert!(should_update_old, "Should update when last update was old");
|
||||
|
||||
// Test with never updated (should update)
|
||||
let never_updated_state = BackgroundUpdateState {
|
||||
last_update_time: 0,
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
|
||||
let save_result = VersionUpdater::save_background_update_state(&never_updated_state);
|
||||
assert!(
|
||||
save_result.is_ok(),
|
||||
"Should save never updated state successfully"
|
||||
);
|
||||
|
||||
let should_update_never = VersionUpdater::should_run_background_update();
|
||||
assert!(
|
||||
should_update_never,
|
||||
"Should update when never updated before"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_dir_creation() {
|
||||
// This should not panic and should create the directory if it doesn't exist
|
||||
let cache_dir = VersionUpdater::get_cache_dir().unwrap();
|
||||
assert!(cache_dir.exists());
|
||||
assert!(cache_dir.is_dir());
|
||||
fn test_version_updater_creation() {
|
||||
let updater = VersionUpdater::new();
|
||||
|
||||
// Should have valid references to services
|
||||
assert!(
|
||||
!std::ptr::eq(
|
||||
updater.browser_version_manager as *const _,
|
||||
std::ptr::null()
|
||||
),
|
||||
"Version service should not be null"
|
||||
);
|
||||
assert!(
|
||||
!std::ptr::eq(updater.auto_updater as *const _, std::ptr::null()),
|
||||
"Auto updater should not be null"
|
||||
);
|
||||
assert!(
|
||||
updater.app_handle.is_none(),
|
||||
"App handle should initially be None"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_current_timestamp() {
|
||||
let timestamp1 = VersionUpdater::get_current_timestamp();
|
||||
|
||||
// Should be a reasonable timestamp (after year 2020)
|
||||
assert!(
|
||||
timestamp1 > 1577836800,
|
||||
"Timestamp should be after 2020-01-01"
|
||||
); // 2020-01-01 00:00:00 UTC
|
||||
|
||||
// Should be before year 2100
|
||||
assert!(
|
||||
timestamp1 < 4102444800,
|
||||
"Timestamp should be before 2100-01-01"
|
||||
); // 2100-01-01 00:00:00 UTC
|
||||
|
||||
// Wait a tiny bit and check it increases
|
||||
std::thread::sleep(std::time::Duration::from_millis(1));
|
||||
let timestamp2 = VersionUpdater::get_current_timestamp();
|
||||
assert!(timestamp2 >= timestamp1, "Timestamp should not decrease");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_version_updater_singleton() {
|
||||
let updater1 = get_version_updater();
|
||||
let updater2 = get_version_updater();
|
||||
|
||||
// Should return the same Arc instance
|
||||
assert!(
|
||||
Arc::ptr_eq(&updater1, &updater2),
|
||||
"Should return same singleton instance"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+15
-14
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"$schema": "https://schema.tauri.app/config/2",
|
||||
"productName": "Donut Browser",
|
||||
"version": "0.5.2",
|
||||
"productName": "Donut",
|
||||
"version": "0.13.3",
|
||||
"identifier": "com.donutbrowser",
|
||||
"build": {
|
||||
"beforeDevCommand": "pnpm dev",
|
||||
"beforeDevCommand": "pnpm copy-proxy-binary && pnpm dev",
|
||||
"devUrl": "http://localhost:3000",
|
||||
"beforeBuildCommand": "pnpm build",
|
||||
"beforeBuildCommand": "pnpm copy-proxy-binary && (test -d ../dist || pnpm build)",
|
||||
"frontendDist": "../dist"
|
||||
},
|
||||
"app": {
|
||||
@@ -17,9 +17,9 @@
|
||||
},
|
||||
"bundle": {
|
||||
"active": true,
|
||||
"targets": "all",
|
||||
"targets": ["app", "dmg", "nsis", "deb", "rpm", "appimage"],
|
||||
"category": "Productivity",
|
||||
"externalBin": ["binaries/nodecar"],
|
||||
"externalBin": ["binaries/nodecar", "binaries/donut-proxy"],
|
||||
"icon": [
|
||||
"icons/32x32.png",
|
||||
"icons/128x128.png",
|
||||
@@ -41,22 +41,23 @@
|
||||
},
|
||||
"linux": {
|
||||
"deb": {
|
||||
"depends": ["xdg-utils"],
|
||||
"files": {
|
||||
"/usr/share/applications/donutbrowser.desktop": "donutbrowser.desktop"
|
||||
}
|
||||
"desktopTemplate": "donutbrowser.desktop",
|
||||
"depends": ["xdg-utils"]
|
||||
},
|
||||
"rpm": {
|
||||
"depends": ["xdg-utils"],
|
||||
"files": {
|
||||
"/usr/share/applications/donutbrowser.desktop": "donutbrowser.desktop"
|
||||
}
|
||||
"desktopTemplate": "donutbrowser.desktop",
|
||||
"depends": ["xdg-utils"]
|
||||
},
|
||||
"appimage": {
|
||||
"files": {
|
||||
"usr/share/applications/donutbrowser.desktop": "donutbrowser.desktop"
|
||||
}
|
||||
}
|
||||
},
|
||||
"windows": {
|
||||
"certificateThumbprint": null,
|
||||
"digestAlgorithm": "sha256",
|
||||
"timestampUrl": ""
|
||||
}
|
||||
},
|
||||
"plugins": {
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
Hello, World!
|
||||
Binary file not shown.
@@ -0,0 +1,21 @@
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
|
||||
/// Utility functions for integration tests
|
||||
pub struct TestUtils;
|
||||
|
||||
impl TestUtils {
|
||||
/// Execute a command (generic, for donut-proxy tests)
|
||||
#[allow(dead_code)]
|
||||
pub async fn execute_command(
|
||||
binary_path: &PathBuf,
|
||||
args: &[&str],
|
||||
) -> Result<std::process::Output, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut cmd = Command::new(binary_path);
|
||||
cmd.args(args);
|
||||
|
||||
let output = tokio::process::Command::from(cmd).output().await?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,640 @@
|
||||
mod common;
|
||||
use common::TestUtils;
|
||||
use serde_json::Value;
|
||||
use serial_test::serial;
|
||||
use std::time::Duration;
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::time::sleep;
|
||||
|
||||
/// Setup function to ensure donut-proxy binary exists and cleanup stale proxies
|
||||
async fn setup_test() -> Result<std::path::PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let cargo_manifest_dir = std::env::var("CARGO_MANIFEST_DIR")?;
|
||||
let project_root = std::path::PathBuf::from(cargo_manifest_dir)
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_path_buf();
|
||||
|
||||
// Build donut-proxy binary if it doesn't exist
|
||||
let proxy_binary = project_root
|
||||
.join("src-tauri")
|
||||
.join("target")
|
||||
.join("debug")
|
||||
.join("donut-proxy");
|
||||
|
||||
if !proxy_binary.exists() {
|
||||
println!("Building donut-proxy binary for integration tests...");
|
||||
let build_status = std::process::Command::new("cargo")
|
||||
.args(["build", "--bin", "donut-proxy"])
|
||||
.current_dir(project_root.join("src-tauri"))
|
||||
.status()?;
|
||||
|
||||
if !build_status.success() {
|
||||
return Err("Failed to build donut-proxy binary".into());
|
||||
}
|
||||
}
|
||||
|
||||
if !proxy_binary.exists() {
|
||||
return Err("donut-proxy binary was not created successfully".into());
|
||||
}
|
||||
|
||||
// Clean up any stale proxies from previous test runs
|
||||
let _ = TestUtils::execute_command(&proxy_binary, &["proxy", "stop"]).await;
|
||||
|
||||
Ok(proxy_binary)
|
||||
}
|
||||
|
||||
/// Helper to track and cleanup proxy processes
|
||||
struct ProxyTestTracker {
|
||||
proxy_ids: Vec<String>,
|
||||
binary_path: std::path::PathBuf,
|
||||
}
|
||||
|
||||
impl ProxyTestTracker {
|
||||
fn new(binary_path: std::path::PathBuf) -> Self {
|
||||
Self {
|
||||
proxy_ids: Vec::new(),
|
||||
binary_path,
|
||||
}
|
||||
}
|
||||
|
||||
fn track_proxy(&mut self, proxy_id: String) {
|
||||
self.proxy_ids.push(proxy_id);
|
||||
}
|
||||
|
||||
async fn cleanup_all(&self) {
|
||||
for proxy_id in &self.proxy_ids {
|
||||
let _ =
|
||||
TestUtils::execute_command(&self.binary_path, &["proxy", "stop", "--id", proxy_id]).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for ProxyTestTracker {
|
||||
fn drop(&mut self) {
|
||||
let proxy_ids = self.proxy_ids.clone();
|
||||
let binary_path = self.binary_path.clone();
|
||||
tokio::spawn(async move {
|
||||
for proxy_id in &proxy_ids {
|
||||
let _ =
|
||||
TestUtils::execute_command(&binary_path, &["proxy", "stop", "--id", proxy_id]).await;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Test starting a local proxy without upstream proxy (DIRECT)
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_local_proxy_direct() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let binary_path = setup_test().await?;
|
||||
let mut tracker = ProxyTestTracker::new(binary_path.clone());
|
||||
|
||||
println!("Starting local proxy without upstream (DIRECT)...");
|
||||
|
||||
let output = TestUtils::execute_command(&binary_path, &["proxy", "start"]).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
return Err(format!("Proxy start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
let local_port = config["localPort"].as_u64().unwrap() as u16;
|
||||
let local_url = config["localUrl"].as_str().unwrap();
|
||||
let upstream_url = config["upstreamUrl"].as_str().unwrap();
|
||||
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
println!(
|
||||
"Proxy started: id={}, port={}, url={}, upstream={}",
|
||||
proxy_id, local_port, local_url, upstream_url
|
||||
);
|
||||
|
||||
// Verify proxy is listening
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
match TcpStream::connect(("127.0.0.1", local_port)).await {
|
||||
Ok(_) => {
|
||||
println!("Proxy is listening on port {local_port}");
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Proxy port {local_port} is not listening: {e}").into());
|
||||
}
|
||||
}
|
||||
|
||||
// Test making an HTTP request through the proxy
|
||||
let mut stream = TcpStream::connect(("127.0.0.1", local_port)).await?;
|
||||
let request =
|
||||
b"GET http://httpbin.org/ip HTTP/1.1\r\nHost: httpbin.org\r\nConnection: close\r\n\r\n";
|
||||
stream.write_all(request).await?;
|
||||
|
||||
let mut response = Vec::new();
|
||||
stream.read_to_end(&mut response).await?;
|
||||
let response_str = String::from_utf8_lossy(&response);
|
||||
|
||||
if response_str.contains("200 OK") || response_str.contains("origin") {
|
||||
println!("Proxy successfully forwarded HTTP request");
|
||||
} else {
|
||||
println!(
|
||||
"Warning: Proxy response may be unexpected: {}",
|
||||
&response_str[..response_str.len().min(200)]
|
||||
);
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
tracker.cleanup_all().await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test chaining local proxies (local proxy -> local proxy -> internet)
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_chained_local_proxies() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let binary_path = setup_test().await?;
|
||||
let mut tracker = ProxyTestTracker::new(binary_path.clone());
|
||||
|
||||
println!("Testing chained local proxies...");
|
||||
|
||||
// Start first proxy (DIRECT - connects to internet)
|
||||
let output1 = TestUtils::execute_command(&binary_path, &["proxy", "start"]).await?;
|
||||
if !output1.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output1.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output1.stdout);
|
||||
return Err(format!("Failed to start first proxy - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let config1: Value = serde_json::from_str(&String::from_utf8(output1.stdout)?)?;
|
||||
let proxy1_id = config1["id"].as_str().unwrap().to_string();
|
||||
let proxy1_port = config1["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(proxy1_id.clone());
|
||||
|
||||
println!("First proxy started on port {}", proxy1_port);
|
||||
|
||||
// Wait for first proxy to be ready
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
match TcpStream::connect(("127.0.0.1", proxy1_port)).await {
|
||||
Ok(_) => println!("First proxy is ready"),
|
||||
Err(e) => return Err(format!("First proxy not ready: {e}").into()),
|
||||
}
|
||||
|
||||
// Start second proxy chained to first proxy
|
||||
let output2 = TestUtils::execute_command(
|
||||
&binary_path,
|
||||
&[
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"127.0.0.1",
|
||||
"--proxy-port",
|
||||
&proxy1_port.to_string(),
|
||||
"--type",
|
||||
"http",
|
||||
],
|
||||
)
|
||||
.await?;
|
||||
|
||||
if !output2.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output2.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output2.stdout);
|
||||
return Err(
|
||||
format!("Failed to start second proxy - stdout: {stdout}, stderr: {stderr}").into(),
|
||||
);
|
||||
}
|
||||
|
||||
let config2: Value = serde_json::from_str(&String::from_utf8(output2.stdout)?)?;
|
||||
let proxy2_id = config2["id"].as_str().unwrap().to_string();
|
||||
let proxy2_port = config2["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(proxy2_id.clone());
|
||||
|
||||
println!(
|
||||
"Second proxy started on port {} (chained to proxy on port {})",
|
||||
proxy2_port, proxy1_port
|
||||
);
|
||||
|
||||
// Wait for second proxy to be ready
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
match TcpStream::connect(("127.0.0.1", proxy2_port)).await {
|
||||
Ok(_) => println!("Second proxy is ready"),
|
||||
Err(e) => return Err(format!("Second proxy not ready: {e}").into()),
|
||||
}
|
||||
|
||||
// Test making an HTTP request through the chained proxy
|
||||
let mut stream = TcpStream::connect(("127.0.0.1", proxy2_port)).await?;
|
||||
let request =
|
||||
b"GET http://httpbin.org/ip HTTP/1.1\r\nHost: httpbin.org\r\nConnection: close\r\n\r\n";
|
||||
stream.write_all(request).await?;
|
||||
|
||||
let mut response = Vec::new();
|
||||
stream.read_to_end(&mut response).await?;
|
||||
let response_str = String::from_utf8_lossy(&response);
|
||||
|
||||
if response_str.contains("200 OK") || response_str.contains("origin") {
|
||||
println!("Chained proxy successfully forwarded HTTP request");
|
||||
} else {
|
||||
println!(
|
||||
"Warning: Chained proxy response may be unexpected: {}",
|
||||
&response_str[..response_str.len().min(200)]
|
||||
);
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
tracker.cleanup_all().await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test starting a local proxy with HTTP upstream proxy
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_local_proxy_with_http_upstream(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let binary_path = setup_test().await?;
|
||||
let mut tracker = ProxyTestTracker::new(binary_path.clone());
|
||||
|
||||
// Start a mock HTTP upstream proxy server
|
||||
let upstream_listener = tokio::net::TcpListener::bind("127.0.0.1:0").await?;
|
||||
let upstream_addr = upstream_listener.local_addr()?;
|
||||
let upstream_port = upstream_addr.port();
|
||||
|
||||
let upstream_handle = tokio::spawn(async move {
|
||||
use http_body_util::Full;
|
||||
use hyper::body::Bytes;
|
||||
use hyper::server::conn::http1;
|
||||
use hyper::service::service_fn;
|
||||
use hyper::{Response, StatusCode};
|
||||
use hyper_util::rt::TokioIo;
|
||||
|
||||
while let Ok((stream, _)) = upstream_listener.accept().await {
|
||||
let io = TokioIo::new(stream);
|
||||
tokio::task::spawn(async move {
|
||||
let service = service_fn(|_req| async {
|
||||
Ok::<_, hyper::Error>(
|
||||
Response::builder()
|
||||
.status(StatusCode::OK)
|
||||
.body(Full::new(Bytes::from("Upstream Proxy Response")))
|
||||
.unwrap(),
|
||||
)
|
||||
});
|
||||
let _ = http1::Builder::new().serve_connection(io, service).await;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
sleep(Duration::from_millis(200)).await;
|
||||
|
||||
println!("Starting local proxy with HTTP upstream proxy...");
|
||||
|
||||
let output = TestUtils::execute_command(
|
||||
&binary_path,
|
||||
&[
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"127.0.0.1",
|
||||
"--proxy-port",
|
||||
&upstream_port.to_string(),
|
||||
"--type",
|
||||
"http",
|
||||
],
|
||||
)
|
||||
.await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
upstream_handle.abort();
|
||||
return Err(format!("Proxy start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
let local_port = config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
println!("Proxy started: id={}, port={}", proxy_id, local_port);
|
||||
|
||||
// Verify proxy is listening
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
match TcpStream::connect(("127.0.0.1", local_port)).await {
|
||||
Ok(_) => {
|
||||
println!("Proxy is listening on port {local_port}");
|
||||
}
|
||||
Err(e) => {
|
||||
upstream_handle.abort();
|
||||
return Err(format!("Proxy port {local_port} is not listening: {e}").into());
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
tracker.cleanup_all().await;
|
||||
upstream_handle.abort();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test multiple proxies running simultaneously
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_multiple_proxies_simultaneously(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let binary_path = setup_test().await?;
|
||||
let mut tracker = ProxyTestTracker::new(binary_path.clone());
|
||||
|
||||
println!("Starting multiple proxies simultaneously...");
|
||||
|
||||
let mut proxy_ports = Vec::new();
|
||||
|
||||
// Start 3 proxies, waiting for each to be ready before starting the next
|
||||
// This avoids race conditions on macOS where processes need time to initialize
|
||||
for i in 0..3 {
|
||||
let output = TestUtils::execute_command(&binary_path, &["proxy", "start"]).await?;
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to start proxy {} - stdout: {}, stderr: {}",
|
||||
i + 1,
|
||||
stdout,
|
||||
stderr
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
let config: Value = serde_json::from_str(&String::from_utf8(output.stdout)?)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
let local_port = config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(proxy_id);
|
||||
proxy_ports.push(local_port);
|
||||
|
||||
println!("Proxy {} started on port {}", i + 1, local_port);
|
||||
|
||||
// Wait for this proxy to be ready before starting the next one
|
||||
// This prevents race conditions on macOS where processes need time to initialize
|
||||
let mut attempts = 0;
|
||||
let max_attempts = 50; // 5 seconds max (50 * 100ms)
|
||||
loop {
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
match TcpStream::connect(("127.0.0.1", local_port)).await {
|
||||
Ok(_) => {
|
||||
println!("Proxy {} is ready on port {}", i + 1, local_port);
|
||||
break;
|
||||
}
|
||||
Err(_) => {
|
||||
attempts += 1;
|
||||
if attempts >= max_attempts {
|
||||
return Err(
|
||||
format!(
|
||||
"Proxy {} on port {} failed to become ready after {} attempts",
|
||||
i + 1,
|
||||
local_port,
|
||||
max_attempts
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Verify all proxies are still listening
|
||||
for (i, port) in proxy_ports.iter().enumerate() {
|
||||
match TcpStream::connect(("127.0.0.1", *port)).await {
|
||||
Ok(_) => {
|
||||
println!("Proxy {} is listening on port {}", i + 1, port);
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("Proxy {} on port {} is not listening: {e}", i + 1, port).into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
tracker.cleanup_all().await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test proxy listing
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_proxy_list() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let binary_path = setup_test().await?;
|
||||
let mut tracker = ProxyTestTracker::new(binary_path.clone());
|
||||
|
||||
// Start a proxy
|
||||
let output = TestUtils::execute_command(&binary_path, &["proxy", "start"]).await?;
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
return Err(format!("Failed to start proxy - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let config: Value = serde_json::from_str(&String::from_utf8(output.stdout)?)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
// List proxies
|
||||
let list_output = TestUtils::execute_command(&binary_path, &["proxy", "list"]).await?;
|
||||
if !list_output.status.success() {
|
||||
return Err("Failed to list proxies".into());
|
||||
}
|
||||
|
||||
let list_stdout = String::from_utf8(list_output.stdout)?;
|
||||
let proxies: Vec<Value> = serde_json::from_str(&list_stdout)?;
|
||||
|
||||
// Verify our proxy is in the list
|
||||
let found = proxies.iter().any(|p| p["id"].as_str() == Some(&proxy_id));
|
||||
assert!(found, "Proxy should be in the list");
|
||||
|
||||
// Cleanup
|
||||
tracker.cleanup_all().await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test traffic tracking through proxy
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_traffic_tracking() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let binary_path = setup_test().await?;
|
||||
let mut tracker = ProxyTestTracker::new(binary_path.clone());
|
||||
|
||||
println!("Testing traffic tracking through proxy...");
|
||||
|
||||
// Start a proxy
|
||||
let output = TestUtils::execute_command(&binary_path, &["proxy", "start"]).await?;
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
return Err(format!("Failed to start proxy - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let config: Value = serde_json::from_str(&String::from_utf8(output.stdout)?)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
let local_port = config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
println!("Proxy started on port {}", local_port);
|
||||
|
||||
// Wait for proxy to be ready
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
|
||||
// Make an HTTP request through the proxy
|
||||
let mut stream = TcpStream::connect(("127.0.0.1", local_port)).await?;
|
||||
let request =
|
||||
b"GET http://httpbin.org/ip HTTP/1.1\r\nHost: httpbin.org\r\nConnection: close\r\n\r\n";
|
||||
|
||||
// Track bytes sent
|
||||
let bytes_sent = request.len();
|
||||
stream.write_all(request).await?;
|
||||
|
||||
// Read response
|
||||
let mut response = Vec::new();
|
||||
stream.read_to_end(&mut response).await?;
|
||||
let bytes_received = response.len();
|
||||
|
||||
println!(
|
||||
"HTTP request completed: sent {} bytes, received {} bytes",
|
||||
bytes_sent, bytes_received
|
||||
);
|
||||
|
||||
// Wait for traffic stats to be flushed (happens every second)
|
||||
sleep(Duration::from_secs(2)).await;
|
||||
|
||||
// Verify traffic was tracked by checking traffic stats file exists
|
||||
// Note: Traffic stats are stored in the cache directory
|
||||
let cache_dir = directories::BaseDirs::new()
|
||||
.expect("Failed to get base directories")
|
||||
.cache_dir()
|
||||
.to_path_buf();
|
||||
let traffic_stats_dir = cache_dir.join("DonutBrowserDev").join("traffic_stats");
|
||||
let stats_file = traffic_stats_dir.join(format!("{}.json", proxy_id));
|
||||
|
||||
if stats_file.exists() {
|
||||
let content = std::fs::read_to_string(&stats_file)?;
|
||||
let stats: Value = serde_json::from_str(&content)?;
|
||||
|
||||
let total_sent = stats["total_bytes_sent"].as_u64().unwrap_or(0);
|
||||
let total_received = stats["total_bytes_received"].as_u64().unwrap_or(0);
|
||||
let total_requests = stats["total_requests"].as_u64().unwrap_or(0);
|
||||
|
||||
println!(
|
||||
"Traffic stats recorded: sent {} bytes, received {} bytes, {} requests",
|
||||
total_sent, total_received, total_requests
|
||||
);
|
||||
|
||||
// Check if domains are being tracked
|
||||
let mut domain_traffic = false;
|
||||
if let Some(domains) = stats.get("domains") {
|
||||
if let Some(domain_map) = domains.as_object() {
|
||||
println!("Domains tracked: {}", domain_map.len());
|
||||
for (domain, domain_stats) in domain_map {
|
||||
println!(" - {}", domain);
|
||||
// Check if any domain has traffic
|
||||
if let Some(domain_obj) = domain_stats.as_object() {
|
||||
let domain_sent = domain_obj
|
||||
.get("bytes_sent")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(0);
|
||||
let domain_recv = domain_obj
|
||||
.get("bytes_received")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(0);
|
||||
let domain_reqs = domain_obj
|
||||
.get("request_count")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(0);
|
||||
println!(
|
||||
" sent: {}, received: {}, requests: {}",
|
||||
domain_sent, domain_recv, domain_reqs
|
||||
);
|
||||
if domain_sent > 0 || domain_recv > 0 || domain_reqs > 0 {
|
||||
domain_traffic = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Verify that some traffic was recorded - check either total bytes or domain traffic
|
||||
assert!(
|
||||
total_sent > 0 || total_received > 0 || total_requests > 0 || domain_traffic,
|
||||
"Traffic stats should record some activity (sent: {}, received: {}, requests: {})",
|
||||
total_sent,
|
||||
total_received,
|
||||
total_requests
|
||||
);
|
||||
|
||||
println!("Traffic tracking test passed!");
|
||||
} else {
|
||||
println!("Warning: Traffic stats file not found at {:?}", stats_file);
|
||||
// This is not necessarily a failure - the file may not have been created yet
|
||||
// The important thing is that the proxy is working
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
tracker.cleanup_all().await;
|
||||
|
||||
// Clean up the traffic stats file
|
||||
if stats_file.exists() {
|
||||
let _ = std::fs::remove_file(&stats_file);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test proxy stop
|
||||
#[tokio::test]
|
||||
#[serial]
|
||||
async fn test_proxy_stop() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let binary_path = setup_test().await?;
|
||||
let _tracker = ProxyTestTracker::new(binary_path.clone());
|
||||
|
||||
// Start a proxy
|
||||
let output = TestUtils::execute_command(&binary_path, &["proxy", "start"]).await?;
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
return Err(format!("Failed to start proxy - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let config: Value = serde_json::from_str(&String::from_utf8(output.stdout)?)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
let local_port = config["localPort"].as_u64().unwrap() as u16;
|
||||
|
||||
// Verify proxy is running
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
match TcpStream::connect(("127.0.0.1", local_port)).await {
|
||||
Ok(_) => println!("Proxy is running"),
|
||||
Err(_) => return Err("Proxy is not running".into()),
|
||||
}
|
||||
|
||||
// Stop the proxy
|
||||
let stop_output =
|
||||
TestUtils::execute_command(&binary_path, &["proxy", "stop", "--id", &proxy_id]).await?;
|
||||
|
||||
if !stop_output.status.success() {
|
||||
return Err("Failed to stop proxy".into());
|
||||
}
|
||||
|
||||
// Wait a bit for the process to exit
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
|
||||
// Verify proxy is stopped (connection should fail)
|
||||
match TcpStream::connect(("127.0.0.1", local_port)).await {
|
||||
Ok(_) => return Err("Proxy should be stopped but is still listening".into()),
|
||||
Err(_) => println!("Proxy successfully stopped"),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
+8
-1
@@ -1,10 +1,13 @@
|
||||
"use client";
|
||||
import { Geist, Geist_Mono } from "next/font/google";
|
||||
import "@/styles/globals.css";
|
||||
import "flag-icons/css/flag-icons.min.css";
|
||||
import { useEffect } from "react";
|
||||
import { CustomThemeProvider } from "@/components/theme-provider";
|
||||
import { Toaster } from "@/components/ui/sonner";
|
||||
import { TooltipProvider } from "@/components/ui/tooltip";
|
||||
import { WindowDragArea } from "@/components/window-drag-area";
|
||||
import { setupLogging } from "@/lib/logger";
|
||||
|
||||
const geistSans = Geist({
|
||||
variable: "--font-geist-sans",
|
||||
@@ -21,10 +24,14 @@ export default function RootLayout({
|
||||
}: Readonly<{
|
||||
children: React.ReactNode;
|
||||
}>) {
|
||||
useEffect(() => {
|
||||
void setupLogging();
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<html lang="en" suppressHydrationWarning>
|
||||
<body
|
||||
className={`${geistSans.variable} ${geistMono.variable} antialiased`}
|
||||
className={`${geistSans.variable} ${geistMono.variable} antialiased overflow-hidden bg-background`}
|
||||
>
|
||||
<CustomThemeProvider>
|
||||
<WindowDragArea />
|
||||
|
||||
+647
-382
File diff suppressed because it is too large
Load Diff
@@ -1,26 +1,52 @@
|
||||
"use client";
|
||||
|
||||
import { FaDownload, FaExternalLinkAlt, FaTimes } from "react-icons/fa";
|
||||
import { LuCheckCheck, LuCog, LuRefreshCw } from "react-icons/lu";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import React from "react";
|
||||
import { FaDownload, FaTimes } from "react-icons/fa";
|
||||
import { LuRefreshCw } from "react-icons/lu";
|
||||
|
||||
interface AppUpdateInfo {
|
||||
current_version: string;
|
||||
new_version: string;
|
||||
release_notes: string;
|
||||
download_url: string;
|
||||
is_nightly: boolean;
|
||||
published_at: string;
|
||||
}
|
||||
import type { AppUpdateInfo, AppUpdateProgress } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface AppUpdateToastProps {
|
||||
updateInfo: AppUpdateInfo;
|
||||
onUpdate: (updateInfo: AppUpdateInfo) => Promise<void>;
|
||||
onDismiss: () => void;
|
||||
isUpdating?: boolean;
|
||||
updateProgress?: string;
|
||||
updateProgress?: AppUpdateProgress | null;
|
||||
}
|
||||
|
||||
function getStageIcon(stage?: string, isUpdating?: boolean) {
|
||||
if (!isUpdating) {
|
||||
return <FaDownload className="flex-shrink-0 w-5 h-5" />;
|
||||
}
|
||||
|
||||
switch (stage) {
|
||||
case "downloading":
|
||||
return <FaDownload className="flex-shrink-0 w-5 h-5" />;
|
||||
case "extracting":
|
||||
return <LuRefreshCw className="flex-shrink-0 w-5 h-5 animate-spin" />;
|
||||
case "installing":
|
||||
return <LuCog className="flex-shrink-0 w-5 h-5 animate-spin" />;
|
||||
case "completed":
|
||||
return <LuCheckCheck className="flex-shrink-0 w-5 h-5" />;
|
||||
default:
|
||||
return <LuRefreshCw className="flex-shrink-0 w-5 h-5 animate-spin" />;
|
||||
}
|
||||
}
|
||||
|
||||
function getStageDisplayName(stage?: string) {
|
||||
switch (stage) {
|
||||
case "downloading":
|
||||
return "Downloading";
|
||||
case "extracting":
|
||||
return "Extracting";
|
||||
case "installing":
|
||||
return "Installing";
|
||||
case "completed":
|
||||
return "Completed";
|
||||
default:
|
||||
return "Updating";
|
||||
}
|
||||
}
|
||||
|
||||
export function AppUpdateToast({
|
||||
@@ -34,22 +60,42 @@ export function AppUpdateToast({
|
||||
await onUpdate(updateInfo);
|
||||
};
|
||||
|
||||
const handleViewRelease = () => {
|
||||
if (updateInfo.release_page_url) {
|
||||
// Trigger the same URL handling logic as if the URL came from the system
|
||||
const event = new CustomEvent("url-open-request", {
|
||||
detail: updateInfo.release_page_url,
|
||||
});
|
||||
window.dispatchEvent(event);
|
||||
}
|
||||
};
|
||||
|
||||
const showDownloadProgress =
|
||||
isUpdating &&
|
||||
updateProgress?.stage === "downloading" &&
|
||||
updateProgress.percentage !== undefined;
|
||||
|
||||
const showOtherStageProgress =
|
||||
isUpdating &&
|
||||
updateProgress &&
|
||||
(updateProgress.stage === "extracting" ||
|
||||
updateProgress.stage === "installing" ||
|
||||
updateProgress.stage === "completed");
|
||||
|
||||
return (
|
||||
<div className="flex items-start w-full bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4 shadow-lg max-w-md">
|
||||
<div className="flex items-start p-4 w-full max-w-md rounded-lg border shadow-lg bg-card border-border text-card-foreground">
|
||||
<div className="mr-3 mt-0.5">
|
||||
{isUpdating ? (
|
||||
<LuRefreshCw className="h-5 w-5 text-blue-500 animate-spin flex-shrink-0" />
|
||||
) : (
|
||||
<FaDownload className="h-5 w-5 text-blue-500 flex-shrink-0" />
|
||||
)}
|
||||
{getStageIcon(updateProgress?.stage, isUpdating)}
|
||||
</div>
|
||||
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-start justify-between gap-2">
|
||||
<div className="flex gap-2 justify-between items-start">
|
||||
<div className="flex flex-col gap-1">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="font-semibold text-foreground text-sm">
|
||||
Donut Browser Update Available
|
||||
<div className="flex gap-2 items-center">
|
||||
<span className="text-sm font-semibold text-foreground">
|
||||
{isUpdating
|
||||
? `${getStageDisplayName(updateProgress?.stage)} Donut Browser Update`
|
||||
: "Donut Browser Update Available"}
|
||||
</span>
|
||||
<Badge
|
||||
variant={updateInfo.is_nightly ? "secondary" : "default"}
|
||||
@@ -59,8 +105,19 @@ export function AppUpdateToast({
|
||||
</Badge>
|
||||
</div>
|
||||
<div className="text-xs text-muted-foreground">
|
||||
Update from {updateInfo.current_version} to{" "}
|
||||
<span className="font-medium">{updateInfo.new_version}</span>
|
||||
{isUpdating ? (
|
||||
updateProgress?.message || "Updating..."
|
||||
) : (
|
||||
<>
|
||||
Update from {updateInfo.current_version} to{" "}
|
||||
<span className="font-medium">{updateInfo.new_version}</span>
|
||||
{updateInfo.manual_update_required && (
|
||||
<span className="block mt-1 text-muted-foreground/80">
|
||||
Manual download required on Linux
|
||||
</span>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -69,52 +126,77 @@ export function AppUpdateToast({
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={onDismiss}
|
||||
className="h-6 w-6 p-0 shrink-0"
|
||||
className="p-0 w-6 h-6 shrink-0"
|
||||
>
|
||||
<FaTimes className="h-3 w-3" />
|
||||
<FaTimes className="w-3 h-3" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isUpdating && updateProgress && (
|
||||
<div className="mt-2">
|
||||
<p className="text-xs text-muted-foreground">{updateProgress}</p>
|
||||
{/* Download progress */}
|
||||
{showDownloadProgress && updateProgress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<div className="flex justify-between items-center">
|
||||
<p className="flex-1 min-w-0 text-xs text-muted-foreground">
|
||||
{updateProgress.percentage?.toFixed(1)}%
|
||||
{updateProgress.speed && ` • ${updateProgress.speed} MB/s`}
|
||||
{updateProgress.eta && ` • ${updateProgress.eta} remaining`}
|
||||
</p>
|
||||
</div>
|
||||
<div className="w-full bg-muted rounded-full h-1.5">
|
||||
<div
|
||||
className="bg-primary h-1.5 rounded-full transition-all duration-300"
|
||||
style={{ width: `${updateProgress.percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Other stage progress (with visual indicators) */}
|
||||
{showOtherStageProgress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
{/* Progress indicator for non-downloading stages */}
|
||||
<div className="w-full bg-muted rounded-full h-1.5">
|
||||
<div
|
||||
className={`h-1.5 rounded-full transition-all duration-500 ${
|
||||
updateProgress.stage === "completed"
|
||||
? "bg-green-500 w-full"
|
||||
: "bg-primary w-full animate-pulse"
|
||||
}`}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isUpdating && (
|
||||
<div className="flex items-center gap-2 mt-3">
|
||||
<Button
|
||||
onClick={() => void handleUpdateClick()}
|
||||
size="sm"
|
||||
className="flex items-center gap-2 text-xs"
|
||||
>
|
||||
<FaDownload className="h-3 w-3" />
|
||||
Update Now
|
||||
</Button>
|
||||
<Button
|
||||
<div className="flex gap-2 items-center mt-3">
|
||||
{updateInfo.manual_update_required ? (
|
||||
<RippleButton
|
||||
onClick={handleViewRelease}
|
||||
size="sm"
|
||||
className="flex gap-2 items-center text-xs"
|
||||
>
|
||||
<FaExternalLinkAlt className="w-3 h-3" />
|
||||
View Release
|
||||
</RippleButton>
|
||||
) : (
|
||||
<RippleButton
|
||||
onClick={() => void handleUpdateClick()}
|
||||
size="sm"
|
||||
className="flex gap-2 items-center text-xs"
|
||||
>
|
||||
<FaDownload className="w-3 h-3" />
|
||||
Update Now
|
||||
</RippleButton>
|
||||
)}
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={onDismiss}
|
||||
size="sm"
|
||||
className="text-xs"
|
||||
>
|
||||
Later
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{updateInfo.release_notes && !isUpdating && (
|
||||
<div className="mt-2">
|
||||
<details className="text-xs">
|
||||
<summary className="cursor-pointer text-muted-foreground hover:text-foreground">
|
||||
Release Notes
|
||||
</summary>
|
||||
<div className="mt-1 text-muted-foreground whitespace-pre-wrap max-h-32 overflow-y-auto">
|
||||
{updateInfo.release_notes.length > 200
|
||||
? `${updateInfo.release_notes.substring(0, 200)}...`
|
||||
: updateInfo.release_notes}
|
||||
</div>
|
||||
</details>
|
||||
</RippleButton>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user