mirror of
https://github.com/zhom/donutbrowser.git
synced 2026-05-04 09:35:11 +02:00
Compare commits
508 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| ce3e27ca64 | |||
| fd0fb8c7ca | |||
| 701c8aefd3 | |||
| d4a7c347b6 | |||
| 3c3e6df3b2 | |||
| cd4b23bd27 | |||
| 042a348971 | |||
| b8f4e4adda | |||
| e8852a3caf | |||
| 6ed1adafc8 | |||
| 22e6b2762e | |||
| bc7c8d1a1e | |||
| b133f928d4 | |||
| 02185e0480 | |||
| 6402ff302a | |||
| ed830ed789 | |||
| d03f598567 | |||
| 6aedf58264 | |||
| 636f1ea4ba | |||
| adb253e103 | |||
| e12ac66c7a | |||
| e06a824438 | |||
| 4293b7eab5 | |||
| 68b138d5ff | |||
| b79bd94506 | |||
| 181c76980a | |||
| 274b275c03 | |||
| 821cce0986 | |||
| 716a028923 | |||
| 7c25bd3ba2 | |||
| 6d89098263 | |||
| a1a1a2202e | |||
| 485daae40e | |||
| 9f22c57b7a | |||
| 45d959e407 | |||
| d75a367f39 | |||
| a48eb5d631 | |||
| 0d79f385bd | |||
| 25bb1dccdc | |||
| 97044d58fe | |||
| 4748a31714 | |||
| d91c97dd85 | |||
| 8e299fddd4 | |||
| 6c3c9fb58a | |||
| f5066e866b | |||
| e12a5661b1 | |||
| f8a4ec3277 | |||
| 1e5664e3b2 | |||
| d0fea2fec1 | |||
| ce0627030d | |||
| d70ec16706 | |||
| 5863d5549e | |||
| 4df35515ae | |||
| 59f430ec43 | |||
| 9f68a21824 | |||
| 9bf7f39c0c | |||
| d1b45778c4 | |||
| 6d6527d812 | |||
| c30df278fb | |||
| 95592b4aa1 | |||
| 58b0067b37 | |||
| 6260d78901 | |||
| efab286dad | |||
| e51e31911b | |||
| 348a727da7 | |||
| 10f8061acf | |||
| 69348a101e | |||
| f7e116f345 | |||
| 2e6bb2498b | |||
| 178f07bec7 | |||
| c6caf0633e | |||
| 29fe20af09 | |||
| 1cb8e7236d | |||
| ab256cd695 | |||
| 96c42ae55e | |||
| c98e12900f | |||
| 7a0d14642a | |||
| a1f153f4fa | |||
| ff9ad0a5ad | |||
| 3b78fea62a | |||
| 74e1642aa2 | |||
| c9d37519f7 | |||
| da9e1d1b58 | |||
| 77f93cc122 | |||
| f7ccca0075 | |||
| d7c2f08133 | |||
| 8dffd86ab9 | |||
| f3b3207489 | |||
| d7a787586d | |||
| 4a98eedba0 | |||
| 95ee807f3b | |||
| fac99f4a51 | |||
| 88cb154fca | |||
| a6af568d9e | |||
| 7c2ed1e0fc | |||
| 334f894e68 | |||
| a77b733a31 | |||
| c10c3b0f95 | |||
| 4b16341401 | |||
| 016d423d2c | |||
| 0596cc4009 | |||
| 269db678b7 | |||
| f809b975f3 | |||
| e369214715 | |||
| 5f93841bb7 | |||
| 1d71729c9e | |||
| a14da3d2f0 | |||
| 59c69c44a1 | |||
| 025523d0d3 | |||
| 76d17df281 | |||
| 727fa51a64 | |||
| 80305ef903 | |||
| 4d98606f28 | |||
| c2d083a10d | |||
| 6d1d15d366 | |||
| 2b2c855679 | |||
| e80043167f | |||
| 2ee3a90e25 | |||
| 231ac3f26c | |||
| 41c02c539f | |||
| ec78787079 | |||
| 7fc6f985dd | |||
| 5814f00f3d | |||
| 621a2dd0a1 | |||
| 3564762872 | |||
| b12d3af3bd | |||
| 32e70a5943 | |||
| 8b8ba31cce | |||
| 201e0270c7 | |||
| ceb2eec80e | |||
| f2b3b2cc69 | |||
| 8ac077d81b | |||
| dab5ab5805 | |||
| 83a7c0e394 | |||
| f622c77a3e | |||
| 9af33efb08 | |||
| 90cdf34e2b | |||
| bc2cbffcf4 | |||
| 341a461abf | |||
| 69b7963dd4 | |||
| c1079cf7b1 | |||
| 1e3f1d4668 | |||
| bb62ca350c | |||
| 1281fb3955 | |||
| ac878aed48 | |||
| 140621dcbe | |||
| 55b8955a20 | |||
| 1611c8e536 | |||
| c17bb56fec | |||
| cea8030268 | |||
| d9e3e1f3ef | |||
| d48e26c7eb | |||
| b7b75ec3d8 | |||
| 4a8b0bd407 | |||
| 8f24410f11 | |||
| ff4aa572fe | |||
| 0abea50279 | |||
| 1f90b12fe5 | |||
| bc0c31f527 | |||
| 357499168f | |||
| 7b1311f2ca | |||
| d755978b34 | |||
| bb164ce743 | |||
| daa36f008b | |||
| 92ef2798d2 | |||
| a9720676ae | |||
| fbcec2cbc1 | |||
| 5d395f606e | |||
| 6963e07be5 | |||
| c28537d304 | |||
| c303de4a8a | |||
| 21a13fb217 | |||
| 90d8e782de | |||
| ad2d9b73f2 | |||
| e645e212f2 | |||
| 7df92ae8ee | |||
| 18a0254ca7 | |||
| b0a58c3131 | |||
| 467e82ca93 | |||
| 8d9654044a | |||
| 711d94c9c1 | |||
| 305051d03d | |||
| 8695884535 | |||
| bb96936550 | |||
| 730621e5a1 | |||
| 714102eb25 | |||
| e5378c1bb7 | |||
| b7c8d5672a | |||
| 3fb81e2ca0 | |||
| 510de96393 | |||
| 3688e88d67 | |||
| 9646a41788 | |||
| f7679d25ca | |||
| 2d42772718 | |||
| 3980f835d6 | |||
| d0185dd5ae | |||
| de39fa4555 | |||
| f773eb6f1c | |||
| 458c30433d | |||
| 1cb9ffa249 | |||
| 5c58b5c644 | |||
| f41311a7bb | |||
| c8c09c296e | |||
| ca0c2614f4 | |||
| dca5a2970e | |||
| e0a1dd5a8a | |||
| e48b681215 | |||
| 6796912606 | |||
| 7105f6544f | |||
| 3003f868e7 | |||
| b733d26f10 | |||
| 675c2417d7 | |||
| e10a7bf089 | |||
| c8e3cd39ff | |||
| 0103150dc7 | |||
| be57ac3219 | |||
| b4067b5e34 | |||
| 3fa8822139 | |||
| 1e0ef0b497 | |||
| 2da832f100 | |||
| 284dbc5a3b | |||
| f328ceeb4f | |||
| 1bfbb365c5 | |||
| 1a15af1ded | |||
| ca20ccb489 | |||
| 0daba2eb9b | |||
| 1dfdfc6a21 | |||
| ee2f728194 | |||
| 702c1545bf | |||
| 53f403b82c | |||
| 2cae2824d3 | |||
| ca790c74ce | |||
| 25d4b30975 | |||
| 687fc7817f | |||
| 28818bed77 | |||
| d1d953b3e2 | |||
| c2153d463c | |||
| fa142a8cb0 | |||
| cf291fb0d1 | |||
| 5fed6b7c3f | |||
| 9ba51cd4e3 | |||
| a507a3daed | |||
| aabae8d3d4 | |||
| b7e6c1eb84 | |||
| d05f2190e8 | |||
| 34a9418474 | |||
| 63e125738d | |||
| 58d82d12c4 | |||
| b1c86709b0 | |||
| 12651f9f85 | |||
| c1815fdfdc | |||
| 1662c1efba | |||
| 4a8e905a44 | |||
| e165e35f2c | |||
| cbeb099cc9 | |||
| fef0c963cb | |||
| cd28531588 | |||
| ed913309dd | |||
| cecb4579c7 | |||
| 9cfed6d73e | |||
| a461fd4798 | |||
| 5159f943df | |||
| 72af5f682f | |||
| 6d77c872f2 | |||
| 0baecbdb0c | |||
| eb2af5c10b | |||
| 76cef4757a | |||
| 00d74bddaf | |||
| b5b08a0196 | |||
| ff35717cb5 | |||
| 669611ec68 | |||
| 8f1b84f615 | |||
| 2bf6531767 | |||
| da0af075fc | |||
| 83f4c2c162 | |||
| e675441171 | |||
| cf77d96042 | |||
| a4706a7f9a | |||
| b088ae675b | |||
| 54fd9b7282 | |||
| 77a50c60d1 | |||
| 62b9768006 | |||
| 66d3420000 | |||
| 8f05c48594 | |||
| a5709d95c7 | |||
| eef3e19d2f | |||
| 2c57920d44 | |||
| 57a36a5fc2 | |||
| a2a980d203 | |||
| 2deacbacab | |||
| 7cd7d077ae | |||
| 8679d0ca62 | |||
| 20cf9de4fa | |||
| 4202d595f2 | |||
| 3086ea0085 | |||
| 1ddbc5228c | |||
| 2d02095d4d | |||
| 4e0d985996 | |||
| 5af751a9b2 | |||
| da7f791274 | |||
| f4c33ad96e | |||
| 5b31cfaf32 | |||
| 4997854577 | |||
| a43e41a020 | |||
| b22b4cacf9 | |||
| 7f0df6f943 | |||
| dccf843952 | |||
| fc6ddb7cbf | |||
| 63000c72bd | |||
| 2fd344b9bb | |||
| 44bd34d8f0 | |||
| d3822bdd88 | |||
| ed1132bdc3 | |||
| fcae0623c0 | |||
| fe843e14f1 | |||
| b071e971b3 | |||
| 0b7cf547b3 | |||
| f024ce19ae | |||
| e1d3ff9000 | |||
| e2a168b188 | |||
| af767da32c | |||
| b5dfe1233e | |||
| dddf8e2e39 | |||
| adcb20fab9 | |||
| ff9c633b07 | |||
| 7ca76b1f78 | |||
| 4887a3db4d | |||
| e38cd2e560 | |||
| 7e7b47cae3 | |||
| 13ae170166 | |||
| df78e22650 | |||
| 328e6f16ee | |||
| 40ad32af6d | |||
| f299eeaea5 | |||
| 84142caac9 | |||
| d06dbb6c70 | |||
| cf5b498bd6 | |||
| 3c28a169bd | |||
| 25653e166b | |||
| 0b4263140d | |||
| b500c28b96 | |||
| 7c2be81531 | |||
| b55ef469ed | |||
| 76a206093d | |||
| 3e88dbc30e | |||
| 031823587e | |||
| c7a1ac228c | |||
| 8ede335bed | |||
| b170b8846d | |||
| 632d90a022 | |||
| 3bec00a2cd | |||
| 3b78971df8 | |||
| 5f9a716f62 | |||
| 4d07984d99 | |||
| 188e14e5b5 | |||
| bc1b9e9757 | |||
| e742e5fdfa | |||
| 9ce7757cb2 | |||
| 3ca454a2c5 | |||
| 689ac8e3ca | |||
| 0e1c5dcfb6 | |||
| f22a9f3557 | |||
| 5a76fe3221 | |||
| 5edad9b97c | |||
| 38556fc504 | |||
| 703ca2c50b | |||
| 198046fca9 | |||
| fdcce5c86a | |||
| 1cd1c7b59d | |||
| d803361fca | |||
| 2f6f20eb29 | |||
| 59272e0cff | |||
| cac2273ad3 | |||
| 1691a7a06b | |||
| 5a4718fba6 | |||
| 336543d06e | |||
| 73cc6c2ac5 | |||
| f4c96ec0c6 | |||
| f84b3c2812 | |||
| 29603076f7 | |||
| 76bcb73b39 | |||
| 51983bf3a5 | |||
| eda83cf439 | |||
| 7b6ea00838 | |||
| d8f07ddb11 | |||
| 1b0ebbc666 | |||
| d377809c77 | |||
| fbf36b49df | |||
| 341751c9b2 | |||
| eea227d853 | |||
| 29b6aed475 | |||
| 050f8b5353 | |||
| 8793de8c87 | |||
| 7408ec876c | |||
| fc8c358088 | |||
| b11495e3b9 | |||
| 11567ca50e | |||
| 1c2d5b3774 | |||
| 852066ef41 | |||
| 9622d85e73 | |||
| 4e2b87c5f1 | |||
| 2099dadbc0 | |||
| 00e4eb2715 | |||
| 33bc4476a4 | |||
| 0ad8988f7e | |||
| 2b3aaf1e92 | |||
| 5a10e0b696 | |||
| 9e48ddbf3e | |||
| bcbb2c1d42 | |||
| 391bfdabdc | |||
| 7b2dc84b5b | |||
| ddc09726f4 | |||
| e1451d3fbb | |||
| b18df6499f | |||
| c5c2563a4e | |||
| 8475f42821 | |||
| f51aa9ed85 | |||
| 3d3a3b3816 | |||
| e090881917 | |||
| b46976f47d | |||
| 39a978682c | |||
| 38e58e604b | |||
| ffcff2ce7c | |||
| c8ea31f85d | |||
| 7ac6e21dbc | |||
| 7533993909 | |||
| 8176f45e41 | |||
| f55a3f7155 | |||
| 7d74ac09d9 | |||
| d314fa1f71 | |||
| 968969cf1e | |||
| a7a3d99881 | |||
| 80cd2e4e7f | |||
| 6361a039bc | |||
| 8005ec90b6 | |||
| cdf30b7baa | |||
| fadef414fe | |||
| e1c55233f7 | |||
| 801a2b5732 | |||
| abe5c691ce | |||
| 2f9a17c6e0 | |||
| fcdb80f75a | |||
| 7568e7998d | |||
| e0f4f93c30 | |||
| d142b7f79b | |||
| dc5553a5d3 | |||
| 07445ff95b | |||
| 6ecbc39e46 | |||
| 67849c00d5 | |||
| bdf71e4ef8 | |||
| 2d2ebba40e | |||
| 2caac5bf4c | |||
| a816fbb140 | |||
| c954668ed1 | |||
| 2db27b5ffd | |||
| 845e9f28ad | |||
| ee8c6dcc85 | |||
| 08453fe9a6 | |||
| b486f00875 | |||
| 703154b30f | |||
| 130f8b86d1 | |||
| 607ed66e29 | |||
| 9570b6d605 | |||
| 2d92cbb0e5 | |||
| 251016609f | |||
| bddf796946 | |||
| 8d793a6868 | |||
| 469f161293 | |||
| 9756e64319 | |||
| 800544ede9 | |||
| aa2228a8aa | |||
| 432e5bff90 | |||
| f4b60eb6c7 | |||
| 30122c5781 | |||
| b71d84fda4 | |||
| 859af72724 | |||
| 0360a89ceb | |||
| cb6f744d6b | |||
| 575d7f80b1 | |||
| d05b69ff3d | |||
| 54abb11129 | |||
| 04c690c750 | |||
| 9a4be86e95 | |||
| 6d013d86aa | |||
| 769fbf9d75 | |||
| 6e62abc601 | |||
| 8848fa8130 | |||
| 1f0ecbe36e | |||
| f83f2033fe | |||
| 821cd4ea82 | |||
| d3a63c37bf | |||
| 95cd2426c3 | |||
| 5a3fb7b2b0 | |||
| 767a0701ce | |||
| ec61d51c07 | |||
| 545c518a55 | |||
| c99eee2c21 | |||
| 7f3683cc2e | |||
| baac3a533a | |||
| 5cd1774ffc | |||
| cb87641890 | |||
| 3df5ac671b | |||
| 390f79f97b | |||
| c4dc2ed50c | |||
| 3b7315cc0d | |||
| bbd0f5df0c | |||
| 8e7982bdf8 | |||
| 9ac662aee8 |
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
If you are modifying the UI, do not add random colors that are not controlled by src/lib/themes.ts file.
|
||||
@@ -3,4 +3,4 @@ description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Don't leave comments that don't add value
|
||||
Don't leave comments that don't add value.
|
||||
@@ -3,4 +3,4 @@ description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times
|
||||
Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times.
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Anytime you change nodecar's code and try to test, recompile it with "cd nodecar && pnpm build".
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
If there is a global singleton of a struct, only use it inside a method while properly initializing it, unless I have explicitly specified in the request otherwise.
|
||||
+1
-20
@@ -1,4 +1,5 @@
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
# Frontend dependencies (root package.json)
|
||||
- package-ecosystem: "npm"
|
||||
@@ -13,30 +14,10 @@ updates:
|
||||
frontend-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
ignore:
|
||||
- dependency-name: "eslint"
|
||||
versions: ">= 9"
|
||||
commit-message:
|
||||
prefix: "deps"
|
||||
include: "scope"
|
||||
|
||||
# Nodecar dependencies
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/nodecar"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "saturday"
|
||||
time: "09:00"
|
||||
allow:
|
||||
- dependency-type: "all"
|
||||
groups:
|
||||
nodecar-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
commit-message:
|
||||
prefix: "deps(nodecar)"
|
||||
include: "scope"
|
||||
|
||||
# Rust dependencies
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/src-tauri"
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
name: Generate changelog
|
||||
on:
|
||||
release:
|
||||
types: [created, edited]
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
name: Generate changelog
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Generate a changelog
|
||||
uses: orhun/git-cliff-action@v4
|
||||
id: git-cliff
|
||||
with:
|
||||
args: --verbose
|
||||
env:
|
||||
OUTPUT: CHANGELOG.md
|
||||
|
||||
- name: Print the changelog
|
||||
run: cat "${{ steps.git-cliff.outputs.changelog }}"
|
||||
@@ -0,0 +1,98 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
schedule:
|
||||
- cron: "16 13 * * 5"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
security-events: write
|
||||
packages: read
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: actions
|
||||
build-mode: none
|
||||
- language: javascript-typescript
|
||||
build-mode: none
|
||||
# - language: rust
|
||||
# build-mode: none
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 #master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: x86_64-unknown-linux-gnu
|
||||
|
||||
- name: Install system dependencies (Rust only)
|
||||
if: matrix.language == 'rust'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 #v2.8.0
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install dependencies from lockfile
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install rust dependencies
|
||||
if: matrix.language == 'rust'
|
||||
working-directory: ./src-tauri
|
||||
run: |
|
||||
cargo build
|
||||
|
||||
- name: Build nodecar sidecar
|
||||
if: matrix.language == 'rust'
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm run build:linux-x64
|
||||
|
||||
- name: Copy nodecar binary to Tauri binaries
|
||||
if: matrix.language == 'rust'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-x86_64-unknown-linux-gnu
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@b1e4dc3db58c9601794e22a9f6d28d45461b9dbf #v3.29.0
|
||||
with:
|
||||
queries: security-extended
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@b1e4dc3db58c9601794e22a9f6d28d45461b9dbf #v3.29.0
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
@@ -0,0 +1,21 @@
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
contrib-readme-job:
|
||||
runs-on: ubuntu-latest
|
||||
name: Automatically update the contributors list in the README
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Contribute List
|
||||
uses: akhilmhdh/contributors-readme-action@83ea0b4f1ac928fbfe88b9e8460a932a528eb79f #v2.3.11
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@90b209d0ea55cea1da9fc0c4e65782cc6acb6e2e" # v2.2.2
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -32,29 +32,51 @@ jobs:
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
packages: read
|
||||
actions: read
|
||||
|
||||
spellcheck:
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
dependabot-automerge:
|
||||
name: Dependabot Automerge
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
needs: [security-scan, lint-js, lint-rust]
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dependabot metadata
|
||||
id: metadata
|
||||
uses: dependabot/fetch-metadata@v2
|
||||
uses: dependabot/fetch-metadata@08eff52bf64351f401fb50d4972fa95b9f2c2d1b #v2.4.0
|
||||
with:
|
||||
compat-lookup: true
|
||||
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- name: Auto-merge minor and patch updates
|
||||
uses: ridedott/merge-me-action@v2
|
||||
uses: ridedott/merge-me-action@ad649157c69da4d34e601ee360de7a74ce4e2090 #v2.10.126
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.SECRET_DEPENDABOT_GITHUB_TOKEN }}
|
||||
PRESET: DEPENDABOT_MINOR
|
||||
MERGE_METHOD: SQUASH
|
||||
PRESET: DEPENDABOT_MINOR
|
||||
MAXIMUM_RETRIES: 5
|
||||
timeout-minutes: 10
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
name: Greetings
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
greeting:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/first-interaction@753c925c8d1ac6fede23781875376600628d9b5d # v3.0.0
|
||||
with:
|
||||
issue-message: "Thank you for your first issue ❤️ If it's a feature request, please make sure it's clear what you want, why you want it, and how important it is to you. If you posted a bug report, please make sure it includes as much detail as possible."
|
||||
pr-message: "Welcome to the community and thank you for your first contribution ❤️ A human will review your PR shortly. Make sure that the pipelines are green, so that the PR is considered ready for a review and could be merged."
|
||||
@@ -0,0 +1,199 @@
|
||||
name: Issue Validation
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
validate-issue:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
|
||||
- name: Get issue templates
|
||||
id: get-templates
|
||||
run: |
|
||||
# Read the issue templates
|
||||
if [ -f ".github/ISSUE_TEMPLATE/01-bug-report.md" ]; then
|
||||
echo "bug-template-exists=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [ -f ".github/ISSUE_TEMPLATE/02-feature-request.md" ]; then
|
||||
echo "feature-template-exists=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create issue analysis prompt
|
||||
id: create-prompt
|
||||
env:
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
ISSUE_LABELS: ${{ join(github.event.issue.labels.*.name, ', ') }}
|
||||
run: |
|
||||
cat > issue_analysis.txt << EOF
|
||||
## Issue Content to Analyze:
|
||||
|
||||
**Title:** $ISSUE_TITLE
|
||||
|
||||
**Body:**
|
||||
$ISSUE_BODY
|
||||
|
||||
**Labels:** $ISSUE_LABELS
|
||||
EOF
|
||||
|
||||
- name: Validate issue with AI
|
||||
id: validate
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
prompt-file: issue_analysis.txt
|
||||
system-prompt: |
|
||||
You are an issue validation assistant for Donut Browser, an anti-detect browser.
|
||||
|
||||
Analyze the provided issue content and determine if it contains sufficient information based on these requirements:
|
||||
|
||||
**For Bug Reports, the issue should include:**
|
||||
1. Clear description of the problem
|
||||
2. Steps to reproduce the issue (numbered list preferred)
|
||||
3. Expected vs actual behavior
|
||||
4. Environment information (OS, browser version, etc.)
|
||||
5. Error messages, stack traces, or screenshots if applicable
|
||||
|
||||
**For Feature Requests, the issue should include:**
|
||||
1. Clear description of the requested feature
|
||||
2. Use case or problem it solves
|
||||
3. Proposed solution or how it should work
|
||||
4. Priority level or importance
|
||||
|
||||
**General Requirements for all issues:**
|
||||
1. Descriptive title
|
||||
2. Sufficient detail to understand and act upon
|
||||
3. Professional tone and clear communication
|
||||
|
||||
Respond in JSON format with the following structure:
|
||||
```json
|
||||
{
|
||||
"is_valid": true|false,
|
||||
"issue_type": "bug_report"|"feature_request"|"other",
|
||||
"missing_info": [
|
||||
"List of missing required information"
|
||||
],
|
||||
"suggestions": [
|
||||
"Specific suggestions for improvement"
|
||||
],
|
||||
"overall_assessment": "Brief assessment of the issue quality"
|
||||
}
|
||||
```
|
||||
|
||||
Be constructive and helpful in your feedback. If the issue is incomplete, provide specific guidance on what's needed.
|
||||
model: openai/gpt-4o
|
||||
|
||||
- name: Parse validation result and take action
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Prefer reading from the response file to avoid output truncation
|
||||
RESPONSE_FILE='${{ steps.validate.outputs.response-file }}'
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RAW_OUTPUT=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
RAW_OUTPUT='${{ steps.validate.outputs.response }}'
|
||||
fi
|
||||
|
||||
# Extract JSON if wrapped in markdown code fences; otherwise use raw
|
||||
JSON_RESULT=$(printf "%s" "$RAW_OUTPUT" | sed -n '/```json/,/```/p' | sed '1d;$d')
|
||||
if [ -z "$JSON_RESULT" ]; then
|
||||
JSON_RESULT="$RAW_OUTPUT"
|
||||
fi
|
||||
|
||||
# Parse JSON fields
|
||||
IS_VALID=$(echo "$JSON_RESULT" | jq -r '.is_valid // false')
|
||||
ISSUE_TYPE=$(echo "$JSON_RESULT" | jq -r '.issue_type // "other"')
|
||||
MISSING_INFO=$(echo "$JSON_RESULT" | jq -r '.missing_info[]? // empty' | sed 's/^/- /')
|
||||
SUGGESTIONS=$(echo "$JSON_RESULT" | jq -r '.suggestions[]? // empty' | sed 's/^/- /')
|
||||
ASSESSMENT=$(echo "$JSON_RESULT" | jq -r '.overall_assessment // "No assessment provided"')
|
||||
|
||||
echo "Issue validation result: $IS_VALID"
|
||||
echo "Issue type: $ISSUE_TYPE"
|
||||
|
||||
if [ "$IS_VALID" = "false" ]; then
|
||||
# Create a comment asking for more information
|
||||
cat > comment.md << EOF
|
||||
## 🤖 Issue Validation
|
||||
|
||||
Thank you for submitting this issue! However, it appears that some required information might be missing to help us better understand and address your concern.
|
||||
|
||||
**Issue Type Detected:** \`$ISSUE_TYPE\`
|
||||
|
||||
**Assessment:** $ASSESSMENT
|
||||
|
||||
### 📋 Missing Information:
|
||||
$MISSING_INFO
|
||||
|
||||
### 💡 Suggestions for Improvement:
|
||||
$SUGGESTIONS
|
||||
|
||||
### 📝 How to Provide Additional Information:
|
||||
|
||||
Please edit your original issue description to include the missing information. Here are our issue templates for reference:
|
||||
|
||||
- **Bug Report Template:** [View Template](.github/ISSUE_TEMPLATE/01-bug-report.md)
|
||||
- **Feature Request Template:** [View Template](.github/ISSUE_TEMPLATE/02-feature-request.md)
|
||||
|
||||
### 🔧 Quick Tips:
|
||||
- For **bug reports**: Include step-by-step reproduction instructions, your environment details, and any error messages
|
||||
- For **feature requests**: Describe the use case, expected behavior, and why this feature would be valuable
|
||||
- Add **screenshots** or **logs** when applicable
|
||||
|
||||
Once you've updated the issue with the missing information, feel free to remove this comment or reply to let us know you've made the updates.
|
||||
|
||||
---
|
||||
*This validation was performed automatically to ensure we have all the information needed to help you effectively.*
|
||||
EOF
|
||||
|
||||
# Post the comment
|
||||
gh issue comment ${{ github.event.issue.number }} --body-file comment.md
|
||||
|
||||
# Add a label to indicate validation needed
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "needs-info"
|
||||
|
||||
echo "✅ Validation comment posted and 'needs-info' label added"
|
||||
else
|
||||
echo "✅ Issue contains sufficient information"
|
||||
|
||||
# Prepare a summary comment even when valid
|
||||
cat > comment.md << EOF
|
||||
## 🤖 Issue Validation
|
||||
|
||||
**Issue Type Detected:** \`$ISSUE_TYPE\`
|
||||
|
||||
**Assessment:** $ASSESSMENT
|
||||
|
||||
$( [ -n "$SUGGESTIONS" ] && echo "### 💡 Suggestions:" && echo "$SUGGESTIONS" )
|
||||
|
||||
---
|
||||
*This validation was performed automatically to help triage issues.*
|
||||
EOF
|
||||
|
||||
# Post the summary comment
|
||||
gh issue comment ${{ github.event.issue.number }} --body-file comment.md
|
||||
|
||||
# Add appropriate labels based on issue type
|
||||
case "$ISSUE_TYPE" in
|
||||
"bug_report")
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "bug"
|
||||
;;
|
||||
"feature_request")
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "enhancement"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
rm -f issue_analysis.txt comment.md
|
||||
@@ -16,6 +16,9 @@ on:
|
||||
- ".github/workflows/lint-rs.yml"
|
||||
- ".github/workflows/osv.yml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
@@ -31,13 +34,13 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Set up Node.js v22
|
||||
uses: actions/setup-node@v4
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
@@ -45,10 +48,5 @@ jobs:
|
||||
- name: Install dependencies from lockfile
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run lint step
|
||||
run: pnpm run lint:js
|
||||
|
||||
@@ -24,12 +24,14 @@ on:
|
||||
- "tsconfig.json"
|
||||
- "biome.json"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [macos-latest]
|
||||
os: [macos-latest, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
@@ -39,27 +41,31 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 #master
|
||||
with:
|
||||
toolchain: stable
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Install cargo-audit
|
||||
run: cargo install cargo-audit
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install dependencies (Ubuntu only)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt install libwebkit2gtk-4.1-dev build-essential curl wget file libxdo-dev libssl-dev libayatana-appindicator3-dev librsvg2-dev
|
||||
@@ -67,16 +73,11 @@ jobs:
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build nodecar binary
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
if [[ "${{ matrix.os }}" == "ubuntu-latest" ]]; then
|
||||
if [[ "${{ matrix.os }}" == "ubuntu-22.04" ]]; then
|
||||
pnpm run build:linux-x64
|
||||
elif [[ "${{ matrix.os }}" == "macos-latest" ]]; then
|
||||
pnpm run build:mac-aarch64
|
||||
@@ -84,16 +85,21 @@ jobs:
|
||||
pnpm run build:win-x64
|
||||
fi
|
||||
|
||||
# TODO: replace with an integration test that fetches everything from rust
|
||||
# - name: Download Camoufox for testing
|
||||
# run: npx camoufox-js fetch
|
||||
# continue-on-error: true
|
||||
|
||||
- name: Copy nodecar binary to Tauri binaries
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.os }}" == "ubuntu-latest" ]]; then
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-x86_64-unknown-linux-gnu
|
||||
if [[ "${{ matrix.os }}" == "ubuntu-22.04" ]]; then
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-x86_64-unknown-linux-gnu
|
||||
elif [[ "${{ matrix.os }}" == "macos-latest" ]]; then
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-aarch64-apple-darwin
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-aarch64-apple-darwin
|
||||
elif [[ "${{ matrix.os }}" == "windows-latest" ]]; then
|
||||
cp nodecar/dist/nodecar.exe src-tauri/binaries/nodecar-x86_64-pc-windows-msvc.exe
|
||||
cp nodecar/nodecar-bin.exe src-tauri/binaries/nodecar-x86_64-pc-windows-msvc.exe
|
||||
fi
|
||||
|
||||
- name: Create empty 'dist' directory
|
||||
@@ -107,7 +113,7 @@ jobs:
|
||||
run: cargo clippy --all-targets --all-features -- -D warnings -D clippy::all
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Run Rust unit tests
|
||||
- name: Run Rust tests
|
||||
run: cargo test
|
||||
working-directory: src-tauri
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
scan-scheduled:
|
||||
name: Scheduled Security Scan
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'schedule' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@90b209d0ea55cea1da9fc0c4e65782cc6acb6e2e" # v2.2.2
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
scan-pr:
|
||||
name: PR Security Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@90b209d0ea55cea1da9fc0c4e65782cc6acb6e2e" # v2.2.2
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -16,16 +16,20 @@ jobs:
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@90b209d0ea55cea1da9fc0c4e65782cc6acb6e2e" # v2.2.2
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -0,0 +1,118 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
generate-release-notes:
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v') && !github.event.release.prerelease
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
with:
|
||||
fetch-depth: 0 # Fetch full history to compare with previous release
|
||||
|
||||
- name: Get previous release tag
|
||||
id: get-previous-tag
|
||||
run: |
|
||||
# Get the previous release tag (excluding the current one)
|
||||
CURRENT_TAG="${{ github.ref_name }}"
|
||||
PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | grep -v "$CURRENT_TAG" | head -n 1)
|
||||
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
echo "No previous release found, using initial commit"
|
||||
PREVIOUS_TAG=$(git rev-list --max-parents=0 HEAD)
|
||||
fi
|
||||
|
||||
echo "current-tag=$CURRENT_TAG" >> $GITHUB_OUTPUT
|
||||
echo "previous-tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT
|
||||
echo "Previous release: $PREVIOUS_TAG"
|
||||
echo "Current release: $CURRENT_TAG"
|
||||
|
||||
- name: Get commit messages between releases
|
||||
id: get-commits
|
||||
run: |
|
||||
# Get commit messages between previous and current release
|
||||
PREVIOUS_TAG="${{ steps.get-previous-tag.outputs.previous-tag }}"
|
||||
CURRENT_TAG="${{ steps.get-previous-tag.outputs.current-tag }}"
|
||||
|
||||
# Get commit log with detailed format
|
||||
COMMIT_LOG=$(git log --pretty=format:"- %s (%h by %an)" $PREVIOUS_TAG..$CURRENT_TAG --no-merges)
|
||||
|
||||
# Get changed files summary
|
||||
CHANGED_FILES=$(git diff --name-status $PREVIOUS_TAG..$CURRENT_TAG | head -20)
|
||||
|
||||
# Save to files for AI processing
|
||||
echo "$COMMIT_LOG" > commits.txt
|
||||
echo "$CHANGED_FILES" > changes.txt
|
||||
|
||||
echo "commits-file=commits.txt" >> $GITHUB_OUTPUT
|
||||
echo "changes-file=changes.txt" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate release notes with AI
|
||||
id: generate-notes
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
with:
|
||||
prompt-file: commits.txt
|
||||
system-prompt: |
|
||||
You are an expert technical writer tasked with generating comprehensive release notes for Donut Browser, a powerful anti-detect browser.
|
||||
|
||||
Analyze the provided commit messages and generate well-structured release notes following this format:
|
||||
|
||||
## What's New in ${{ steps.get-previous-tag.outputs.current-tag }}
|
||||
|
||||
[Brief 1-2 sentence overview of the release]
|
||||
|
||||
### ✨ New Features
|
||||
[List new features with brief descriptions]
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
[List bug fixes]
|
||||
|
||||
### 🔧 Improvements
|
||||
[List improvements and enhancements]
|
||||
|
||||
### 📚 Documentation
|
||||
[List documentation updates if any]
|
||||
|
||||
### 🔄 Dependencies
|
||||
[List dependency updates if any]
|
||||
|
||||
### 🛠️ Developer Experience
|
||||
[List development-related changes if any]
|
||||
|
||||
Guidelines:
|
||||
- Use clear, user-friendly language
|
||||
- Group related commits logically
|
||||
- Omit minor commits like formatting, typos unless significant
|
||||
- Focus on user-facing changes
|
||||
- Use emojis sparingly and consistently
|
||||
- Keep descriptions concise but informative
|
||||
- If commits are unclear, infer the purpose from the context
|
||||
|
||||
The application is a desktop app built with Tauri + Next.js that helps users manage multiple browser profiles with proxy support.
|
||||
model: gpt-4o
|
||||
|
||||
- name: Update release with generated notes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Get the generated release notes
|
||||
RELEASE_NOTES="${{ steps.generate-notes.outputs.response }}"
|
||||
|
||||
# Update the release with the generated notes
|
||||
gh api --method PATCH /repos/${{ github.repository }}/releases/${{ github.event.release.id }} \
|
||||
--field body="$RELEASE_NOTES"
|
||||
|
||||
echo "✅ Release notes updated successfully!"
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
rm -f commits.txt changes.txt
|
||||
@@ -13,7 +13,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@90b209d0ea55cea1da9fc0c4e65782cc6acb6e2e" # v2.2.2
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -31,14 +31,35 @@ jobs:
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
packages: read
|
||||
actions: read
|
||||
|
||||
spellcheck:
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
release:
|
||||
needs: [security-scan, lint-js, lint-rust]
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
@@ -46,37 +67,37 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- platform: "macos-latest"
|
||||
args: "--target aarch64-apple-darwin"
|
||||
args: "--target aarch64-apple-darwin --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-apple-darwin"
|
||||
pkg_target: "latest-macos-arm64"
|
||||
nodecar_script: "build:mac-aarch64"
|
||||
- platform: "macos-latest"
|
||||
args: "--target x86_64-apple-darwin"
|
||||
args: "--target x86_64-apple-darwin --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-apple-darwin"
|
||||
pkg_target: "latest-macos-x64"
|
||||
nodecar_script: "build:mac-x86_64"
|
||||
- platform: "ubuntu-22.04"
|
||||
args: "--target x86_64-unknown-linux-gnu"
|
||||
args: "--target x86_64-unknown-linux-gnu --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-x64"
|
||||
nodecar_script: "build:linux-x64"
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
args: "--target aarch64-unknown-linux-gnu"
|
||||
args: "--target aarch64-unknown-linux-gnu --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-arm64"
|
||||
nodecar_script: "build:linux-arm64"
|
||||
# - platform: "windows-latest"
|
||||
# args: "--target x86_64-pc-windows-msvc"
|
||||
# args: "--target x86_64-pc-windows-msvc --verbose"
|
||||
# arch: "x86_64"
|
||||
# target: "x86_64-pc-windows-msvc"
|
||||
# pkg_target: "latest-win-x64"
|
||||
# nodecar_script: "build:win-x64"
|
||||
# - platform: "windows-11-arm"
|
||||
# args: "--target aarch64-pc-windows-msvc"
|
||||
# args: "--target aarch64-pc-windows-msvc --verbose"
|
||||
# arch: "aarch64"
|
||||
# target: "aarch64-pc-windows-msvc"
|
||||
# pkg_target: "latest-win-arm64"
|
||||
@@ -84,19 +105,20 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 #master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Install dependencies (Ubuntu only)
|
||||
@@ -106,18 +128,16 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
uses: swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 #v2.8.0
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build nodecar sidecar
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
@@ -129,16 +149,20 @@ jobs:
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.platform }}" == "windows-latest" ]]; then
|
||||
cp nodecar/dist/nodecar.exe src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
else
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
fi
|
||||
|
||||
# - name: Download Camoufox for testing
|
||||
# run: npx camoufox-js fetch
|
||||
# continue-on-error: true
|
||||
|
||||
- name: Build frontend
|
||||
run: pnpm build
|
||||
|
||||
- name: Build Tauri app
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
uses: tauri-apps/tauri-action@e834788a94591d81e3ae0bd9ec06366f5afb8994 #v0.5.23
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_REF_NAME: ${{ github.ref_name }}
|
||||
@@ -149,3 +173,9 @@ jobs:
|
||||
releaseDraft: false
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }}
|
||||
|
||||
# - name: Commit CHANGELOG.md
|
||||
# uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 #v6.0.1
|
||||
# with:
|
||||
# branch: main
|
||||
# commit_message: "docs: update CHANGELOG.md for ${{ github.ref_name }} [skip ci]"
|
||||
|
||||
@@ -12,7 +12,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@90b209d0ea55cea1da9fc0c4e65782cc6acb6e2e" # v2.2.2
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -30,14 +30,35 @@ jobs:
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
packages: read
|
||||
actions: read
|
||||
|
||||
spellcheck:
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
rolling-release:
|
||||
needs: [security-scan, lint-js, lint-rust]
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
@@ -45,57 +66,58 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- platform: "macos-latest"
|
||||
args: "--target aarch64-apple-darwin"
|
||||
args: "--target aarch64-apple-darwin --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-apple-darwin"
|
||||
pkg_target: "latest-macos-arm64"
|
||||
nodecar_script: "build:mac-aarch64"
|
||||
- platform: "macos-latest"
|
||||
args: "--target x86_64-apple-darwin"
|
||||
args: "--target x86_64-apple-darwin --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-apple-darwin"
|
||||
pkg_target: "latest-macos-x64"
|
||||
nodecar_script: "build:mac-x86_64"
|
||||
- platform: "ubuntu-22.04"
|
||||
args: "--target x86_64-unknown-linux-gnu"
|
||||
args: "--target x86_64-unknown-linux-gnu --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-x64"
|
||||
nodecar_script: "build:linux-x64"
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
args: "--target aarch64-unknown-linux-gnu"
|
||||
args: "--target aarch64-unknown-linux-gnu --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-arm64"
|
||||
nodecar_script: "build:linux-arm64"
|
||||
- platform: "windows-latest"
|
||||
args: "--target x86_64-pc-windows-msvc"
|
||||
args: "--target x86_64-pc-windows-msvc --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-pc-windows-msvc"
|
||||
pkg_target: "latest-win-x64"
|
||||
nodecar_script: "build:win-x64"
|
||||
- platform: "windows-11-arm"
|
||||
args: "--target aarch64-pc-windows-msvc"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-pc-windows-msvc"
|
||||
pkg_target: "latest-win-arm64"
|
||||
nodecar_script: "build:win-arm64"
|
||||
# - platform: "windows-11-arm"
|
||||
# args: "--target aarch64-pc-windows-msvc --verbose"
|
||||
# arch: "aarch64"
|
||||
# target: "aarch64-pc-windows-msvc"
|
||||
# pkg_target: "latest-win-arm64"
|
||||
# nodecar_script: "build:win-arm64"
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@e97e2d8cc328f1b50210efc529dca0028893a2d9 #master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Install dependencies (Ubuntu only)
|
||||
@@ -105,18 +127,16 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
uses: swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 #v2.8.0
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build nodecar sidecar
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
@@ -128,11 +148,15 @@ jobs:
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.platform }}" == "windows-latest" ]]; then
|
||||
cp nodecar/dist/nodecar.exe src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
else
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
fi
|
||||
|
||||
# - name: Download Camoufox for testing
|
||||
# run: npx camoufox-js fetch
|
||||
# continue-on-error: true
|
||||
|
||||
- name: Build frontend
|
||||
run: pnpm build
|
||||
|
||||
@@ -146,7 +170,7 @@ jobs:
|
||||
echo "Generated timestamp: ${TIMESTAMP}-${COMMIT_HASH}"
|
||||
|
||||
- name: Build Tauri app
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
uses: tauri-apps/tauri-action@e834788a94591d81e3ae0bd9ec06366f5afb8994 #v0.5.23
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_TAG: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
name: Spell Check
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
CARGO_TERM_COLOR: always
|
||||
CLICOLOR: 1
|
||||
|
||||
jobs:
|
||||
spelling:
|
||||
name: Spell Check with Typos
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions Repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 #v5.0.0
|
||||
- name: Spell Check Repo
|
||||
uses: crate-ci/typos@65f69f021b736bdbe548ce72200500752d42b40e #v1.35.7
|
||||
@@ -0,0 +1,21 @@
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "35 23 * * *"
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: "This issue has been inactive for 60 days. Please respond to keep it open."
|
||||
stale-pr-message: "This pull request has been inactive for 60 days. Please respond to keep it open."
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
+3
-3
@@ -46,7 +46,7 @@ yarn-error.log*
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
|
||||
# eslint
|
||||
.eslintcache
|
||||
!**/.gitkeep
|
||||
|
||||
!**/.gitkeep
|
||||
# nodecar
|
||||
nodecar/nodecar-bin
|
||||
Vendored
+120
-2
@@ -1,34 +1,79 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"ABORTIFHUNG",
|
||||
"adwaita",
|
||||
"ahooks",
|
||||
"akhilmhdh",
|
||||
"appimage",
|
||||
"appindicator",
|
||||
"applescript",
|
||||
"asyncio",
|
||||
"autoconfig",
|
||||
"autologin",
|
||||
"biomejs",
|
||||
"breezedark",
|
||||
"browserforge",
|
||||
"busctl",
|
||||
"CAMOU",
|
||||
"camoufox",
|
||||
"cdylib",
|
||||
"certifi",
|
||||
"CFURL",
|
||||
"checkin",
|
||||
"chrono",
|
||||
"ciphertext",
|
||||
"CLICOLOR",
|
||||
"clippy",
|
||||
"cmdk",
|
||||
"codegen",
|
||||
"codesign",
|
||||
"commitish",
|
||||
"CTYPE",
|
||||
"daijro",
|
||||
"dataclasses",
|
||||
"datareporting",
|
||||
"datas",
|
||||
"DBAPI",
|
||||
"dconf",
|
||||
"devedition",
|
||||
"distro",
|
||||
"doctest",
|
||||
"doesn",
|
||||
"domcontentloaded",
|
||||
"donutbrowser",
|
||||
"doorhanger",
|
||||
"dpkg",
|
||||
"dtolnay",
|
||||
"dyld",
|
||||
"elif",
|
||||
"errorlevel",
|
||||
"esac",
|
||||
"esbuild",
|
||||
"eslintcache",
|
||||
"etree",
|
||||
"flate",
|
||||
"frontmost",
|
||||
"geoip",
|
||||
"getcwd",
|
||||
"gettimezone",
|
||||
"gifs",
|
||||
"gsettings",
|
||||
"healthreport",
|
||||
"hiddenimports",
|
||||
"hkcu",
|
||||
"hooksconfig",
|
||||
"hookspath",
|
||||
"icns",
|
||||
"idlelib",
|
||||
"idletime",
|
||||
"idna",
|
||||
"Inno",
|
||||
"kdeglobals",
|
||||
"keras",
|
||||
"KHTML",
|
||||
"Kolkata",
|
||||
"kreadconfig",
|
||||
"launchservices",
|
||||
"letterboxing",
|
||||
"libatk",
|
||||
"libayatana",
|
||||
"libcairo",
|
||||
@@ -37,43 +82,116 @@
|
||||
"libpango",
|
||||
"librsvg",
|
||||
"libwebkit",
|
||||
"libxdo",
|
||||
"localtime",
|
||||
"lpdw",
|
||||
"lxml",
|
||||
"lzma",
|
||||
"Matchalk",
|
||||
"mmdb",
|
||||
"mountpoint",
|
||||
"msiexec",
|
||||
"msvc",
|
||||
"msys",
|
||||
"Mullvad",
|
||||
"mullvadbrowser",
|
||||
"mypy",
|
||||
"noarchive",
|
||||
"nobrowse",
|
||||
"noconfirm",
|
||||
"nodecar",
|
||||
"nodemon",
|
||||
"norestart",
|
||||
"NSIS",
|
||||
"ntlm",
|
||||
"numpy",
|
||||
"objc",
|
||||
"orhun",
|
||||
"orjson",
|
||||
"osascript",
|
||||
"oscpu",
|
||||
"outpath",
|
||||
"pathex",
|
||||
"pathlib",
|
||||
"peerconnection",
|
||||
"pids",
|
||||
"pixbuf",
|
||||
"pkexec",
|
||||
"pkill",
|
||||
"plasmohq",
|
||||
"platformdirs",
|
||||
"prefs",
|
||||
"propertylist",
|
||||
"psutil",
|
||||
"pycache",
|
||||
"pydantic",
|
||||
"pyee",
|
||||
"pyinstaller",
|
||||
"pyoxidizer",
|
||||
"pytest",
|
||||
"pyyaml",
|
||||
"reqwest",
|
||||
"ridedott",
|
||||
"rlib",
|
||||
"rustc",
|
||||
"SARIF",
|
||||
"scipy",
|
||||
"screeninfo",
|
||||
"selectables",
|
||||
"serde",
|
||||
"SETTINGCHANGE",
|
||||
"setuptools",
|
||||
"shadcn",
|
||||
"showcursor",
|
||||
"shutil",
|
||||
"signon",
|
||||
"signum",
|
||||
"sklearn",
|
||||
"SMTO",
|
||||
"sonner",
|
||||
"splitn",
|
||||
"sspi",
|
||||
"staticlib",
|
||||
"stefanzweifel",
|
||||
"subdirs",
|
||||
"subkey",
|
||||
"SUPPRESSMSGBOXES",
|
||||
"swatinem",
|
||||
"sysinfo",
|
||||
"systempreferences",
|
||||
"systemsetup",
|
||||
"taskkill",
|
||||
"tasklist",
|
||||
"tauri",
|
||||
"TERX",
|
||||
"testpass",
|
||||
"testuser",
|
||||
"timedatectl",
|
||||
"titlebar",
|
||||
"tkinter",
|
||||
"Torbrowser",
|
||||
"tqdm",
|
||||
"trackingprotection",
|
||||
"turbopack",
|
||||
"turtledemo",
|
||||
"udeps",
|
||||
"unlisten",
|
||||
"unminimize",
|
||||
"unrs",
|
||||
"urlencoding",
|
||||
"urllib",
|
||||
"venv",
|
||||
"vercel",
|
||||
"VERYSILENT",
|
||||
"webgl",
|
||||
"webrtc",
|
||||
"winreg",
|
||||
"wiremock",
|
||||
"xattr",
|
||||
"zhom"
|
||||
"xfconf",
|
||||
"xsettings",
|
||||
"zhom",
|
||||
"zipball",
|
||||
"zoneinfo"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
# Instructions for AI Agents
|
||||
|
||||
- After your changes, instead of running specific tests or linting specific files, run "pnpm format && pnpm lint && pnpm test". It means that you first format the code, then lint it, then test it, so that no part is broken after your changes.
|
||||
- Don't leave comments that don't add value
|
||||
- Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times
|
||||
- Don't leave comments that don't add value.
|
||||
- Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times.
|
||||
- Before finishing the task and showing summary, always run "pnpm format && pnpm lint && pnpm test" at the root of the project to ensure that you don't finish with broken application.
|
||||
- Anytime you change nodecar's code and try to test, recompile it with "cd nodecar && pnpm build".
|
||||
- If there is a global singleton of a struct, only use it inside a method while properly initializing it, unless I have explicitly specified in the request otherwise.
|
||||
- If you are modifying the UI, do not add random colors that are not controlled by src/lib/themes.ts file.
|
||||
+1
-1
@@ -23,6 +23,6 @@ Examples of unacceptable behavior by participants include:
|
||||
|
||||
## Enforcement
|
||||
|
||||
Violations of the Code of Conduct may be reported by pinging @zhom on Github. All reports will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. Further details of specific enforcement policies may be posted separately.
|
||||
Violations of the Code of Conduct may be reported to contact at donutbrowser dot com. All reports will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
We hold the right and responsibility to remove comments or other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any members for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
+6
-42
@@ -26,6 +26,7 @@ Ensure you have the following dependencies installed:
|
||||
- Node.js (see `.node-version` for exact version)
|
||||
- pnpm package manager
|
||||
- Latest Rust and Cargo toolchain
|
||||
- [Banderole](https://github.com/zhom/banderole)
|
||||
- [Tauri prerequisites guide](https://v2.tauri.app/start/prerequisites/).
|
||||
|
||||
## Run Locally
|
||||
@@ -46,12 +47,13 @@ After having the above dependencies installed, proceed through the following ste
|
||||
pnpm install
|
||||
```
|
||||
|
||||
4. **Install nodecar dependencies**
|
||||
4. **Build nodecar**
|
||||
|
||||
Building nodecar requires you to have `banderole` installed.
|
||||
|
||||
```bash
|
||||
cd nodecar
|
||||
pnpm install --frozen-lockfile
|
||||
cd ..
|
||||
pnpm build
|
||||
```
|
||||
|
||||
5. **Start the development server**
|
||||
@@ -105,7 +107,6 @@ Make sure the build completes successfully without errors.
|
||||
## Testing
|
||||
|
||||
- Always test your changes on the target platform
|
||||
- Test both development and production builds
|
||||
- Verify that existing functionality still works
|
||||
- Add tests for new features when possible
|
||||
|
||||
@@ -149,50 +150,13 @@ Refs #00000
|
||||
|
||||
- Ensure that "Allow edits from maintainers" option is checked
|
||||
|
||||
## Types of Contributions
|
||||
|
||||
### Bug Reports
|
||||
|
||||
When filing bug reports, please include:
|
||||
|
||||
- Clear description of the issue
|
||||
- Steps to reproduce
|
||||
- Expected vs actual behavior
|
||||
- Environment details (OS, version, etc.)
|
||||
- Screenshots or error logs if applicable
|
||||
|
||||
### Feature Requests
|
||||
|
||||
When suggesting new features:
|
||||
|
||||
- Explain the use case and why it's valuable
|
||||
- Describe the desired behavior
|
||||
- Consider alternatives you've thought of
|
||||
- Check if it aligns with our roadmap
|
||||
|
||||
### Code Contributions
|
||||
|
||||
- Bug fixes
|
||||
- New features
|
||||
- Performance improvements
|
||||
- Documentation updates
|
||||
- Test coverage improvements
|
||||
|
||||
### Documentation
|
||||
|
||||
- README improvements
|
||||
- Code comments
|
||||
- API documentation
|
||||
- Tutorial content
|
||||
- Translation work
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Donut Browser is built with:
|
||||
|
||||
- **Frontend**: Next.js React application
|
||||
- **Backend**: Tauri (Rust) for native functionality
|
||||
- **Node.js Sidecar**: `nodecar` binary for proxy support
|
||||
- **Node.js Sidecar**: `nodecar` binary for access to JavaScript ecosystem
|
||||
- **Build System**: GitHub Actions for CI/CD
|
||||
|
||||
Understanding this architecture will help you contribute more effectively.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<div align="center">
|
||||
<img src="assets/logo.png" alt="Donut Browser Logo" width="150">
|
||||
<h1>Donut Browser</h1>
|
||||
<strong>A powerful browser orchestrator that puts you in control of your browsing experience. 🍩</strong>
|
||||
<strong>A powerful anti-detect browser that puts you in control of your browsing experience. 🍩</strong>
|
||||
</div>
|
||||
<br>
|
||||
|
||||
@@ -17,23 +17,27 @@
|
||||
<a href="https://app.codacy.com/gh/zhom/donutbrowser/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade">
|
||||
<img src="https://app.codacy.com/project/badge/Grade/b9c9beafc92d4bc8bc7c5b42c6c4ba81"/>
|
||||
</a>
|
||||
<a href="https://app.fossa.com/projects/git%2Bgithub.com%2Fzhom%2Fdonutbrowser?ref=badge_shield&issueType=security" alt="FOSSA Status">
|
||||
<img src="https://app.fossa.com/api/projects/git%2Bgithub.com%2Fzhom%2Fdonutbrowser.svg?type=shield&issueType=security"/>
|
||||
</a>
|
||||
<a style="text-decoration: none;" href="https://github.com/zhom/donutbrowser/stargazers" target="_blank">
|
||||
<img src="https://img.shields.io/github/stars/zhom/donutbrowser?style=social" alt="GitHub stars">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
## Donut Browser
|
||||
|
||||
> A free and open source browser orchestrator built with [Tauri](https://v2.tauri.app/).
|
||||
|
||||

|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="assets/preview-dark.png" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="assets/preview.png" />
|
||||
<img alt="Preview" src="assets/preview.png" />
|
||||
</picture>
|
||||
|
||||
## Features
|
||||
|
||||
- Create unlimited number of local browser profiles completely isolated from each other
|
||||
- Proxy support with basic auth for all browsers except for TOR Browser
|
||||
- Safely use multiple accounts on one device by using anti-detect browser profiles, powered by [Camoufox](https://camoufox.com)
|
||||
- Proxy support with basic auth for all browsers
|
||||
- Import profiles from your existing browsers
|
||||
- Automatic updates both for browsers and for the app itself
|
||||
- Automatic updates for browsers
|
||||
- Set Donut Browser as your default browser to control in which profile to open links
|
||||
|
||||
## Download
|
||||
@@ -76,6 +80,24 @@ Have questions or want to contribute? We'd love to hear from you!
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
## Contributors
|
||||
|
||||
<!-- readme: collaborators,contributors -start -->
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/zhom">
|
||||
<img src="https://avatars.githubusercontent.com/u/2717306?v=4" width="100;" alt="zhom"/>
|
||||
<br />
|
||||
<sub><b>zhom</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tbody>
|
||||
</table>
|
||||
<!-- readme: collaborators,contributors -end -->
|
||||
|
||||
## Contact
|
||||
|
||||
Have an urgent question or want to report a security vulnerability? Send an email to contact at donutbrowser dot com and we'll get back to you as fast as possible.
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 111 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 163 KiB After Width: | Height: | Size: 114 KiB |
+5
-19
@@ -1,42 +1,28 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
|
||||
"$schema": "https://biomejs.dev/schemas/2.2.0/schema.json",
|
||||
"vcs": {
|
||||
"enabled": false,
|
||||
"clientKind": "git",
|
||||
"useIgnoreFile": false
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"ignore": []
|
||||
"ignoreUnknown": false
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2
|
||||
},
|
||||
"organizeImports": {
|
||||
"enabled": true
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
"recommended": true,
|
||||
"correctness": {
|
||||
"useUniqueElementIds": "off",
|
||||
"useHookAtTopLevel": "error"
|
||||
},
|
||||
"nursery": {
|
||||
"useGoogleFontDisplay": "error",
|
||||
"noDocumentImportInPage": "error",
|
||||
"noHeadElement": "error",
|
||||
"noHeadImportInDocument": "error",
|
||||
"noImgElement": "off",
|
||||
"useComponentExportOnlyModules": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"allowExportNames": ["metadata", "badgeVariants", "buttonVariants"]
|
||||
}
|
||||
}
|
||||
},
|
||||
"nursery": "off",
|
||||
"suspicious": "off",
|
||||
"a11y": {
|
||||
"useSemanticElements": "off"
|
||||
}
|
||||
|
||||
@@ -1,92 +0,0 @@
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
import eslint from "@eslint/js";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: import.meta.dirname,
|
||||
});
|
||||
|
||||
const eslintConfig = tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
tseslint.configs.strictTypeChecked,
|
||||
tseslint.configs.stylisticTypeChecked,
|
||||
...compat.extends("next/core-web-vitals"),
|
||||
{
|
||||
// Disabled rules taken from https://biomejs.dev/linter/rules-sources for ones that
|
||||
// are already handled by Prettier and TypeScript or are not needed
|
||||
rules: {
|
||||
// eslint-plugin-jsx-a11y rules - some disabled for performance/specific project needs
|
||||
"jsx-a11y/alt-text": "off",
|
||||
"jsx-a11y/anchor-has-content": "off",
|
||||
"jsx-a11y/anchor-is-valid": "off",
|
||||
"jsx-a11y/aria-activedescendant-has-tabindex": "off",
|
||||
"jsx-a11y/aria-props": "off",
|
||||
"jsx-a11y/aria-proptypes": "off",
|
||||
"jsx-a11y/aria-role": "off",
|
||||
"jsx-a11y/aria-unsupported-elements": "off",
|
||||
"jsx-a11y/autocomplete-valid": "off",
|
||||
"jsx-a11y/click-events-have-key-events": "off",
|
||||
"jsx-a11y/heading-has-content": "off",
|
||||
"jsx-a11y/html-has-lang": "off",
|
||||
"jsx-a11y/iframe-has-title": "off",
|
||||
"jsx-a11y/img-redundant-alt": "off",
|
||||
"jsx-a11y/interactive-supports-focus": "off",
|
||||
"jsx-a11y/label-has-associated-control": "off",
|
||||
"jsx-a11y/lang": "off",
|
||||
"jsx-a11y/media-has-caption": "off",
|
||||
"jsx-a11y/mouse-events-have-key-events": "off",
|
||||
"jsx-a11y/no-access-key": "off",
|
||||
"jsx-a11y/no-aria-hidden-on-focusable": "off",
|
||||
"jsx-a11y/no-autofocus": "off",
|
||||
"jsx-a11y/no-distracting-elements": "off",
|
||||
"jsx-a11y/no-interactive-element-to-noninteractive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-element-to-interactive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-tabindex": "off",
|
||||
"jsx-a11y/no-redundant-roles": "off",
|
||||
"jsx-a11y/no-static-element-interactions": "off",
|
||||
"jsx-a11y/prefer-tag-over-role": "off",
|
||||
"jsx-a11y/role-has-required-aria-props": "off",
|
||||
"jsx-a11y/role-supports-aria-props": "off",
|
||||
"jsx-a11y/scope": "off",
|
||||
"jsx-a11y/tabindex-no-positive": "off",
|
||||
// eslint-plugin-react rules - some disabled for performance/specific project needs
|
||||
"react/button-has-type": "off",
|
||||
"react/jsx-boolean-value": "off",
|
||||
"react/jsx-curly-brace-presence": "off",
|
||||
"react/jsx-fragments": "off",
|
||||
"react/jsx-key": "off",
|
||||
"react/jsx-no-comment-textnodes": "off",
|
||||
"react/jsx-no-duplicate-props": "off",
|
||||
"react/jsx-no-target-blank": "off",
|
||||
"react/jsx-no-useless-fragment": "off",
|
||||
"react/no-array-index-key": "off",
|
||||
"react/no-children-prop": "off",
|
||||
"react/no-danger": "off",
|
||||
"react/no-danger-with-children": "off",
|
||||
"react/void-dom-elements-no-children": "off",
|
||||
// eslint-plugin-react-hooks rules - disabled for specific project needs
|
||||
"react-hooks/exhaustive-deps": "off",
|
||||
"react-hooks/rules-of-hooks": "off",
|
||||
// typescript-eslint rules - some handled by TypeScript compiler or disabled for project needs
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
// Custom rules
|
||||
"@typescript-eslint/restrict-template-expressions": [
|
||||
"error",
|
||||
{
|
||||
allowNumber: true,
|
||||
allowBoolean: true,
|
||||
allowNever: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
projectService: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
export default eslintConfig;
|
||||
Vendored
+1
@@ -1,5 +1,6 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
/// <reference path="./.next/types/routes.d.ts" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
|
||||
@@ -7,6 +7,9 @@ const nextConfig: NextConfig = {
|
||||
unoptimized: true,
|
||||
},
|
||||
distDir: "dist",
|
||||
compiler: {
|
||||
removeConsole: process.env.NODE_ENV === "production",
|
||||
},
|
||||
};
|
||||
|
||||
export default nextConfig;
|
||||
|
||||
@@ -22,7 +22,7 @@ if [ -z "$TARGET_TRIPLE" ]; then
|
||||
fi
|
||||
|
||||
# Copy the file with target triple suffix
|
||||
cp "dist/nodecar${EXT}" "../src-tauri/binaries/nodecar-${TARGET_TRIPLE}${EXT}"
|
||||
cp "nodecar-bin" "../src-tauri/binaries/nodecar-${TARGET_TRIPLE}${EXT}"
|
||||
|
||||
# Also copy a generic version for Tauri to find
|
||||
cp "dist/nodecar${EXT}" "../src-tauri/binaries/nodecar${EXT}"
|
||||
cp "nodecar-bin" "../src-tauri/binaries/nodecar${EXT}"
|
||||
@@ -1,90 +0,0 @@
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
import eslint from "@eslint/js";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: import.meta.dirname,
|
||||
});
|
||||
|
||||
const eslintConfig = tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
...compat.extends("next/core-web-vitals"),
|
||||
{
|
||||
// Disabled rules taken from https://biomejs.dev/linter/rules-sources for ones that
|
||||
// are already handled by Prettier and TypeScript or are not needed
|
||||
rules: {
|
||||
// eslint-plugin-jsx-a11y rules - some disabled for performance/specific project needs
|
||||
"jsx-a11y/alt-text": "off",
|
||||
"jsx-a11y/anchor-has-content": "off",
|
||||
"jsx-a11y/anchor-is-valid": "off",
|
||||
"jsx-a11y/aria-activedescendant-has-tabindex": "off",
|
||||
"jsx-a11y/aria-props": "off",
|
||||
"jsx-a11y/aria-proptypes": "off",
|
||||
"jsx-a11y/aria-role": "off",
|
||||
"jsx-a11y/aria-unsupported-elements": "off",
|
||||
"jsx-a11y/autocomplete-valid": "off",
|
||||
"jsx-a11y/click-events-have-key-events": "off",
|
||||
"jsx-a11y/heading-has-content": "off",
|
||||
"jsx-a11y/html-has-lang": "off",
|
||||
"jsx-a11y/iframe-has-title": "off",
|
||||
"jsx-a11y/img-redundant-alt": "off",
|
||||
"jsx-a11y/interactive-supports-focus": "off",
|
||||
"jsx-a11y/label-has-associated-control": "off",
|
||||
"jsx-a11y/lang": "off",
|
||||
"jsx-a11y/media-has-caption": "off",
|
||||
"jsx-a11y/mouse-events-have-key-events": "off",
|
||||
"jsx-a11y/no-access-key": "off",
|
||||
"jsx-a11y/no-aria-hidden-on-focusable": "off",
|
||||
"jsx-a11y/no-autofocus": "off",
|
||||
"jsx-a11y/no-distracting-elements": "off",
|
||||
"jsx-a11y/no-interactive-element-to-noninteractive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-element-to-interactive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-tabindex": "off",
|
||||
"jsx-a11y/no-redundant-roles": "off",
|
||||
"jsx-a11y/no-static-element-interactions": "off",
|
||||
"jsx-a11y/prefer-tag-over-role": "off",
|
||||
"jsx-a11y/role-has-required-aria-props": "off",
|
||||
"jsx-a11y/role-supports-aria-props": "off",
|
||||
"jsx-a11y/scope": "off",
|
||||
"jsx-a11y/tabindex-no-positive": "off",
|
||||
// eslint-plugin-react rules - some disabled for performance/specific project needs
|
||||
"react/button-has-type": "off",
|
||||
"react/jsx-boolean-value": "off",
|
||||
"react/jsx-curly-brace-presence": "off",
|
||||
"react/jsx-fragments": "off",
|
||||
"react/jsx-key": "off",
|
||||
"react/jsx-no-comment-textnodes": "off",
|
||||
"react/jsx-no-duplicate-props": "off",
|
||||
"react/jsx-no-target-blank": "off",
|
||||
"react/jsx-no-useless-fragment": "off",
|
||||
"react/no-array-index-key": "off",
|
||||
"react/no-children-prop": "off",
|
||||
"react/no-danger": "off",
|
||||
"react/no-danger-with-children": "off",
|
||||
"react/void-dom-elements-no-children": "off",
|
||||
// eslint-plugin-react-hooks rules - disabled for specific project needs
|
||||
"react-hooks/exhaustive-deps": "off",
|
||||
"react-hooks/rules-of-hooks": "off",
|
||||
// typescript-eslint rules - some handled by TypeScript compiler or disabled for project needs
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
// Custom rules
|
||||
"@typescript-eslint/restrict-template-expressions": [
|
||||
"error",
|
||||
{
|
||||
allowNumber: true,
|
||||
allowBoolean: true,
|
||||
allowNever: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
projectService: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
export default eslintConfig;
|
||||
+18
-13
@@ -3,33 +3,38 @@
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "dist/index.js",
|
||||
"bin": "dist/index.js",
|
||||
"scripts": {
|
||||
"watch": "nodemon --exec ts-node --esm ./src/index.ts --watch src",
|
||||
"dev": "node --loader ts-node/esm ./src/index.ts",
|
||||
"start": "tsc && node ./dist/index.js",
|
||||
"test": "tsc && node ./dist/test-proxy.js",
|
||||
"rename-binary": "sh ./copy-binary.sh",
|
||||
"build": "tsc && pkg ./dist/index.js --targets latest-macos-arm64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:mac-aarch64": "tsc && pkg ./dist/index.js --targets latest-macos-arm64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:mac-x86_64": "tsc && pkg ./dist/index.js --targets latest-macos-x64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:linux-x64": "tsc && pkg ./dist/index.js --targets latest-linux-x64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:linux-arm64": "tsc && pkg ./dist/index.js --targets latest-linux-arm64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:win-x64": "tsc && pkg ./dist/index.js --targets latest-win-x64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:win-arm64": "tsc && pkg ./dist/index.js --targets latest-win-arm64 --output dist/nodecar && pnpm rename-binary"
|
||||
"build": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:mac-aarch64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:mac-x86_64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:linux-x64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:linux-arm64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:win-x64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:win-arm64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@types/node": "^22.15.30",
|
||||
"@yao-pkg/pkg": "^6.5.1",
|
||||
"@types/node": "^24.3.0",
|
||||
"commander": "^14.0.0",
|
||||
"dotenv": "^16.5.0",
|
||||
"donutbrowser-camoufox-js": "^0.6.6",
|
||||
"dotenv": "^17.2.1",
|
||||
"fingerprint-generator": "^2.1.70",
|
||||
"get-port": "^7.1.0",
|
||||
"nodemon": "^3.1.10",
|
||||
"playwright-core": "^1.55.0",
|
||||
"proxy-chain": "^2.5.9",
|
||||
"tmp": "^0.2.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.8.3",
|
||||
"typescript-eslint": "^8.33.1"
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tmp": "^0.2.6"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,459 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import { launchOptions } from "donutbrowser-camoufox-js";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import {
|
||||
type CamoufoxConfig,
|
||||
deleteCamoufoxConfig,
|
||||
generateCamoufoxId,
|
||||
getCamoufoxConfig,
|
||||
listCamoufoxConfigs,
|
||||
saveCamoufoxConfig,
|
||||
} from "./camoufox-storage.js";
|
||||
|
||||
/**
|
||||
* Convert camoufox fingerprint format to fingerprint-generator format
|
||||
* @param camoufoxFingerprint The camoufox fingerprint object
|
||||
* @returns fingerprint-generator object
|
||||
*/
|
||||
function convertCamoufoxToFingerprintGenerator(
|
||||
camoufoxFingerprint: Record<string, any>,
|
||||
): any {
|
||||
const fingerprintObj: Record<string, any> = {
|
||||
navigator: {},
|
||||
screen: {},
|
||||
videoCard: {},
|
||||
headers: {},
|
||||
battery: {},
|
||||
};
|
||||
|
||||
// Mapping from camoufox keys to fingerprint-generator structure based on the YAML
|
||||
const mappings: Record<string, string> = {
|
||||
// Navigator properties
|
||||
"navigator.userAgent": "navigator.userAgent",
|
||||
"navigator.platform": "navigator.platform",
|
||||
"navigator.hardwareConcurrency": "navigator.hardwareConcurrency",
|
||||
"navigator.maxTouchPoints": "navigator.maxTouchPoints",
|
||||
"navigator.doNotTrack": "navigator.doNotTrack",
|
||||
"navigator.appCodeName": "navigator.appCodeName",
|
||||
"navigator.appName": "navigator.appName",
|
||||
"navigator.appVersion": "navigator.appVersion",
|
||||
"navigator.oscpu": "navigator.oscpu",
|
||||
"navigator.product": "navigator.product",
|
||||
"navigator.language": "navigator.language",
|
||||
"navigator.languages": "navigator.languages",
|
||||
"navigator.globalPrivacyControl": "navigator.globalPrivacyControl",
|
||||
|
||||
// Screen properties
|
||||
"screen.width": "screen.width",
|
||||
"screen.height": "screen.height",
|
||||
"screen.availWidth": "screen.availWidth",
|
||||
"screen.availHeight": "screen.availHeight",
|
||||
"screen.availTop": "screen.availTop",
|
||||
"screen.availLeft": "screen.availLeft",
|
||||
"screen.colorDepth": "screen.colorDepth",
|
||||
"screen.pixelDepth": "screen.pixelDepth",
|
||||
"window.outerWidth": "screen.outerWidth",
|
||||
"window.outerHeight": "screen.outerHeight",
|
||||
"window.innerWidth": "screen.innerWidth",
|
||||
"window.innerHeight": "screen.innerHeight",
|
||||
"window.screenX": "screen.screenX",
|
||||
"window.screenY": "screen.screenY",
|
||||
"screen.pageXOffset": "screen.pageXOffset",
|
||||
"screen.pageYOffset": "screen.pageYOffset",
|
||||
"window.devicePixelRatio": "screen.devicePixelRatio",
|
||||
"document.body.clientWidth": "screen.clientWidth",
|
||||
"document.body.clientHeight": "screen.clientHeight",
|
||||
|
||||
// WebGL properties
|
||||
"webGl:vendor": "videoCard.vendor",
|
||||
"webGl:renderer": "videoCard.renderer",
|
||||
|
||||
// Headers
|
||||
"headers.Accept-Encoding": "headers.Accept-Encoding",
|
||||
|
||||
// Battery
|
||||
"battery:charging": "battery.charging",
|
||||
"battery:chargingTime": "battery.chargingTime",
|
||||
"battery:dischargingTime": "battery.dischargingTime",
|
||||
};
|
||||
|
||||
// Apply mappings
|
||||
for (const [camoufoxKey, fingerprintPath] of Object.entries(mappings)) {
|
||||
if (camoufoxFingerprint[camoufoxKey] !== undefined) {
|
||||
const pathParts = fingerprintPath.split(".");
|
||||
let current = fingerprintObj;
|
||||
|
||||
// Navigate to the nested property, creating objects as needed
|
||||
for (let i = 0; i < pathParts.length - 1; i++) {
|
||||
const part = pathParts[i];
|
||||
if (!current[part]) {
|
||||
current[part] = {};
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
// Set the final value
|
||||
const finalKey = pathParts[pathParts.length - 1];
|
||||
current[finalKey] = camoufoxFingerprint[camoufoxKey];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle fonts separately
|
||||
if (camoufoxFingerprint.fonts && Array.isArray(camoufoxFingerprint.fonts)) {
|
||||
fingerprintObj.fonts = camoufoxFingerprint.fonts;
|
||||
}
|
||||
|
||||
return { ...camoufoxFingerprint, ...fingerprintObj };
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a Camoufox instance in a separate process
|
||||
* @param options Camoufox launch options
|
||||
* @param profilePath Profile directory path
|
||||
* @param url Optional URL to open
|
||||
* @returns Promise resolving to the Camoufox configuration
|
||||
*/
|
||||
export async function startCamoufoxProcess(
|
||||
options: LaunchOptions = {},
|
||||
profilePath?: string,
|
||||
url?: string,
|
||||
customConfig?: string,
|
||||
): Promise<CamoufoxConfig> {
|
||||
// Generate a unique ID for this instance
|
||||
const id = generateCamoufoxId();
|
||||
|
||||
// Ensure profile path is absolute if provided
|
||||
const absoluteProfilePath = profilePath
|
||||
? path.resolve(profilePath)
|
||||
: undefined;
|
||||
|
||||
// Create the Camoufox configuration
|
||||
const config: CamoufoxConfig = {
|
||||
id,
|
||||
options: JSON.parse(JSON.stringify(options)), // Deep clone to avoid reference sharing
|
||||
profilePath: absoluteProfilePath,
|
||||
url,
|
||||
customConfig,
|
||||
};
|
||||
|
||||
// Save the configuration before starting the process
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
// Build the command arguments
|
||||
const args = [
|
||||
path.join(__dirname, "index.js"),
|
||||
"camoufox-worker",
|
||||
"start",
|
||||
"--id",
|
||||
id,
|
||||
];
|
||||
|
||||
// Spawn the process with proper detachment - similar to proxy implementation
|
||||
const child = spawn(process.execPath, args, {
|
||||
detached: true,
|
||||
stdio: ["ignore", "pipe", "pipe"], // Capture stdout and stderr for startup feedback
|
||||
cwd: process.cwd(),
|
||||
env: {
|
||||
...process.env,
|
||||
NODE_ENV: "production",
|
||||
// Ensure Camoufox can find its dependencies
|
||||
NODE_PATH: process.env.NODE_PATH || "",
|
||||
},
|
||||
});
|
||||
|
||||
// Wait for the worker to start successfully or fail - with shorter timeout for quick response
|
||||
return new Promise<CamoufoxConfig>((resolve, reject) => {
|
||||
let resolved = false;
|
||||
let stdoutBuffer = "";
|
||||
let stderrBuffer = "";
|
||||
|
||||
// Shorter timeout for quick startup feedback
|
||||
const timeout = setTimeout(() => {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
child.kill("SIGKILL");
|
||||
reject(
|
||||
new Error(`Camoufox worker ${id} startup timeout after 5 seconds`),
|
||||
);
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
// Handle stdout - look for success JSON
|
||||
if (child.stdout) {
|
||||
child.stdout.on("data", (data) => {
|
||||
const output = data.toString();
|
||||
stdoutBuffer += output;
|
||||
|
||||
// Look for success JSON message
|
||||
const lines = stdoutBuffer.split("\n");
|
||||
for (const line of lines) {
|
||||
if (line.trim()) {
|
||||
try {
|
||||
const parsed = JSON.parse(line.trim());
|
||||
if (parsed.success && parsed.id === id && parsed.processId) {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
clearTimeout(timeout);
|
||||
config.processId = parsed.processId;
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
// Unref immediately after success to detach properly
|
||||
child.unref();
|
||||
resolve(config);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Not JSON, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Handle stderr - look for error JSON
|
||||
if (child.stderr) {
|
||||
child.stderr.on("data", (data) => {
|
||||
const output = data.toString();
|
||||
stderrBuffer += output;
|
||||
|
||||
// Look for error JSON message
|
||||
const lines = stderrBuffer.split("\n");
|
||||
for (const line of lines) {
|
||||
if (line.trim()) {
|
||||
try {
|
||||
const parsed = JSON.parse(line.trim());
|
||||
if (parsed.error && parsed.id === id) {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
clearTimeout(timeout);
|
||||
reject(
|
||||
new Error(
|
||||
`Camoufox worker failed: ${parsed.message || parsed.error}`,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Not JSON, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
child.on("exit", (code, signal) => {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
clearTimeout(timeout);
|
||||
if (code !== 0) {
|
||||
reject(
|
||||
new Error(
|
||||
`Camoufox worker ${id} exited with code ${code} and signal ${signal}. Stderr: ${stderrBuffer}`,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
// Process exited successfully but we didn't get success message
|
||||
reject(
|
||||
new Error(
|
||||
`Camoufox worker ${id} exited without success confirmation`,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a Camoufox process
|
||||
* @param id The Camoufox ID to stop
|
||||
* @returns Promise resolving to true if stopped, false if not found
|
||||
*/
|
||||
export async function stopCamoufoxProcess(id: string): Promise<boolean> {
|
||||
const config = getCamoufoxConfig(id);
|
||||
|
||||
if (!config) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
// Method 1: If we have a process ID, kill by PID with proper signal sequence
|
||||
if (config.processId) {
|
||||
try {
|
||||
// First try SIGTERM for graceful shutdown
|
||||
process.kill(config.processId, "SIGTERM");
|
||||
// Give it more time to terminate gracefully (increased from 2s to 5s)
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
|
||||
// Check if process is still running
|
||||
try {
|
||||
process.kill(config.processId, 0); // Signal 0 checks if process exists
|
||||
process.kill(config.processId, "SIGKILL");
|
||||
} catch {}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Method 2: Pattern-based kill as fallback
|
||||
const killByPattern = spawn(
|
||||
"pkill",
|
||||
["-TERM", "-f", `camoufox-worker.*${id}`],
|
||||
{
|
||||
stdio: "ignore",
|
||||
},
|
||||
);
|
||||
|
||||
// Wait for pattern-based kill command to complete
|
||||
await new Promise<void>((resolve) => {
|
||||
killByPattern.on("exit", () => resolve());
|
||||
// Timeout after 3 seconds
|
||||
setTimeout(() => resolve(), 3000);
|
||||
});
|
||||
|
||||
// Final cleanup with SIGKILL if needed
|
||||
setTimeout(() => {
|
||||
spawn("pkill", ["-KILL", "-f", `camoufox-worker.*${id}`], {
|
||||
stdio: "ignore",
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
// Delete the configuration
|
||||
deleteCamoufoxConfig(id);
|
||||
return true;
|
||||
} catch {
|
||||
// Delete the configuration even if stopping failed
|
||||
deleteCamoufoxConfig(id);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all Camoufox processes
|
||||
* @returns Promise resolving when all instances are stopped
|
||||
*/
|
||||
export async function stopAllCamoufoxProcesses(): Promise<void> {
|
||||
const configs = listCamoufoxConfigs();
|
||||
|
||||
const stopPromises = configs.map((config) => stopCamoufoxProcess(config.id));
|
||||
await Promise.all(stopPromises);
|
||||
}
|
||||
|
||||
interface GenerateConfigOptions {
|
||||
proxy?: string;
|
||||
maxWidth?: number;
|
||||
maxHeight?: number;
|
||||
minWidth?: number;
|
||||
minHeight?: number;
|
||||
geoip?: string | boolean;
|
||||
blockImages?: boolean;
|
||||
blockWebrtc?: boolean;
|
||||
blockWebgl?: boolean;
|
||||
executablePath?: string;
|
||||
fingerprint?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate Camoufox configuration using launchOptions
|
||||
* @param options Configuration options
|
||||
* @returns Promise resolving to the generated config JSON string
|
||||
*/
|
||||
export async function generateCamoufoxConfig(
|
||||
options: GenerateConfigOptions,
|
||||
): Promise<string> {
|
||||
try {
|
||||
const launchOpts: any = {
|
||||
headless: false,
|
||||
i_know_what_im_doing: true,
|
||||
config: {
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
},
|
||||
};
|
||||
|
||||
if (options.geoip) {
|
||||
launchOpts.geoip = true;
|
||||
}
|
||||
|
||||
if (options.blockImages) {
|
||||
launchOpts.block_images = true;
|
||||
}
|
||||
if (options.blockWebrtc) {
|
||||
launchOpts.block_webrtc = true;
|
||||
}
|
||||
if (options.blockWebgl) {
|
||||
launchOpts.block_webgl = true;
|
||||
}
|
||||
|
||||
if (options.executablePath) {
|
||||
launchOpts.executable_path = options.executablePath;
|
||||
}
|
||||
|
||||
if (options.proxy) {
|
||||
launchOpts.proxy = options.proxy;
|
||||
}
|
||||
|
||||
// If fingerprint is provided, use it and ignore other options except executable_path and block_*
|
||||
if (options.fingerprint) {
|
||||
try {
|
||||
const camoufoxFingerprint = JSON.parse(options.fingerprint);
|
||||
|
||||
if (camoufoxFingerprint.timezone) {
|
||||
launchOpts.config.timezone = camoufoxFingerprint.timezone;
|
||||
}
|
||||
|
||||
// Convert camoufox fingerprint format to fingerprint-generator format
|
||||
const fingerprintObj =
|
||||
convertCamoufoxToFingerprintGenerator(camoufoxFingerprint);
|
||||
launchOpts.fingerprint = fingerprintObj;
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid fingerprint JSON: ${error}`);
|
||||
}
|
||||
} else {
|
||||
// Use individual options to build configuration
|
||||
|
||||
// Build screen configuration with min/max dimensions
|
||||
const screen: {
|
||||
minWidth?: number;
|
||||
maxWidth?: number;
|
||||
minHeight?: number;
|
||||
maxHeight?: number;
|
||||
} = {};
|
||||
|
||||
if (options.minWidth) screen.minWidth = options.minWidth;
|
||||
if (options.maxWidth) screen.maxWidth = options.maxWidth;
|
||||
if (options.minHeight) screen.minHeight = options.minHeight;
|
||||
if (options.maxHeight) screen.maxHeight = options.maxHeight;
|
||||
|
||||
if (Object.keys(screen).length > 0) {
|
||||
launchOpts.screen = screen;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the configuration using launchOptions
|
||||
const generatedOptions = await launchOptions(launchOpts);
|
||||
|
||||
// Extract the environment variables that contain the config
|
||||
const envVars = generatedOptions.env || {};
|
||||
|
||||
// Reconstruct the config from environment variables using getEnvVars utility
|
||||
let configStr = "";
|
||||
let chunkIndex = 1;
|
||||
|
||||
while (envVars[`CAMOU_CONFIG_${chunkIndex}`]) {
|
||||
configStr += envVars[`CAMOU_CONFIG_${chunkIndex}`];
|
||||
chunkIndex++;
|
||||
}
|
||||
|
||||
if (!configStr) {
|
||||
throw new Error("No configuration generated");
|
||||
}
|
||||
|
||||
// Parse and return the config as JSON string
|
||||
const config = JSON.parse(configStr);
|
||||
return JSON.stringify(config);
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to generate Camoufox config: ${error}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,153 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import tmp from "tmp";
|
||||
|
||||
export interface CamoufoxConfig {
|
||||
id: string;
|
||||
options: LaunchOptions;
|
||||
profilePath?: string;
|
||||
url?: string;
|
||||
processId?: number;
|
||||
customConfig?: string; // JSON string of the fingerprint config
|
||||
}
|
||||
|
||||
const STORAGE_DIR = path.join(tmp.tmpdir, "donutbrowser", "camoufox");
|
||||
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
fs.mkdirSync(STORAGE_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a Camoufox configuration to disk
|
||||
* @param config The Camoufox configuration to save
|
||||
*/
|
||||
export function saveCamoufoxConfig(config: CamoufoxConfig): void {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Camoufox configuration by ID
|
||||
* @param id The Camoufox ID
|
||||
* @returns The Camoufox configuration or null if not found
|
||||
*/
|
||||
export function getCamoufoxConfig(id: string): CamoufoxConfig | null {
|
||||
const filePath = path.join(STORAGE_DIR, `${id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf-8");
|
||||
return JSON.parse(content) as CamoufoxConfig;
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: `Error reading Camoufox config ${id}`,
|
||||
error: (error as Error).message,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a Camoufox configuration
|
||||
* @param id The Camoufox ID to delete
|
||||
* @returns True if deleted, false if not found
|
||||
*/
|
||||
export function deleteCamoufoxConfig(id: string): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.unlinkSync(filePath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: `Error deleting Camoufox config ${id}`,
|
||||
error: (error as Error).message,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all saved Camoufox configurations
|
||||
* @returns Array of Camoufox configurations
|
||||
*/
|
||||
export function listCamoufoxConfigs(): CamoufoxConfig[] {
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
return fs
|
||||
.readdirSync(STORAGE_DIR)
|
||||
.filter((file) => file.endsWith(".json"))
|
||||
.map((file) => {
|
||||
try {
|
||||
const content = fs.readFileSync(
|
||||
path.join(STORAGE_DIR, file),
|
||||
"utf-8",
|
||||
);
|
||||
return JSON.parse(content) as CamoufoxConfig;
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: `Error reading Camoufox config ${file}`,
|
||||
error,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter((config): config is CamoufoxConfig => config !== null)
|
||||
.map((config) => {
|
||||
config.options = "Removed for logging" as any;
|
||||
config.customConfig = "Removed for logging" as any;
|
||||
return config;
|
||||
});
|
||||
} catch (error) {
|
||||
console.error({ message: "Error listing Camoufox configs:", error });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a Camoufox configuration
|
||||
* @param config The Camoufox configuration to update
|
||||
* @returns True if updated, false if not found
|
||||
*/
|
||||
export function updateCamoufoxConfig(config: CamoufoxConfig): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
|
||||
try {
|
||||
fs.readFileSync(filePath, "utf-8");
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
console.error({
|
||||
message: `Config ${config.id} was deleted while the app was running`,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
console.error({
|
||||
message: `Error updating Camoufox config ${config.id}`,
|
||||
error,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique ID for a Camoufox instance
|
||||
* @returns A unique ID string
|
||||
*/
|
||||
export function generateCamoufoxId(): string {
|
||||
// Include process ID to ensure uniqueness across multiple processes
|
||||
return `camoufox_${Date.now()}_${process.pid}_${Math.floor(Math.random() * 10000)}`;
|
||||
}
|
||||
@@ -0,0 +1,307 @@
|
||||
import { launchOptions } from "donutbrowser-camoufox-js";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import { type Browser, type BrowserContext, firefox } from "playwright-core";
|
||||
import { getCamoufoxConfig, saveCamoufoxConfig } from "./camoufox-storage.js";
|
||||
import { getEnvVars, parseProxyString } from "./utils.js";
|
||||
|
||||
/**
|
||||
* Run a Camoufox browser server as a worker process
|
||||
* @param id The Camoufox configuration ID
|
||||
*/
|
||||
export async function runCamoufoxWorker(id: string): Promise<void> {
|
||||
// Get the Camoufox configuration
|
||||
const config = getCamoufoxConfig(id);
|
||||
|
||||
if (!config) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Configuration not found",
|
||||
id: id,
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
config.processId = process.pid;
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
id: id,
|
||||
processId: process.pid,
|
||||
profilePath: config.profilePath,
|
||||
message: "Camoufox worker started successfully",
|
||||
}),
|
||||
);
|
||||
|
||||
// Launch browser in background - this can take time and may fail
|
||||
setImmediate(async () => {
|
||||
let browser: Browser | null = null;
|
||||
let context: BrowserContext | null = null;
|
||||
let windowCheckInterval: NodeJS.Timeout | null = null;
|
||||
|
||||
// Graceful shutdown handler with access to browser and server
|
||||
const gracefulShutdown = async () => {
|
||||
try {
|
||||
// Clear any intervals first
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
|
||||
// Close browser context and server if they exist
|
||||
if (context && !context.pages) {
|
||||
// Context is already closed
|
||||
} else if (context) {
|
||||
await context.close();
|
||||
}
|
||||
|
||||
if (browser?.isConnected()) {
|
||||
await browser.close();
|
||||
}
|
||||
} catch {
|
||||
// Ignore cleanup errors during shutdown
|
||||
}
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
// Handle various quit signals for proper macOS Command+Q support
|
||||
process.on("SIGTERM", () => void gracefulShutdown());
|
||||
process.on("SIGINT", () => void gracefulShutdown());
|
||||
process.on("SIGHUP", () => void gracefulShutdown());
|
||||
process.on("SIGQUIT", () => void gracefulShutdown());
|
||||
|
||||
// Handle uncaught exceptions and unhandled rejections
|
||||
process.on("uncaughtException", () => void gracefulShutdown());
|
||||
process.on("unhandledRejection", () => void gracefulShutdown());
|
||||
|
||||
try {
|
||||
// Deep clone to avoid reference sharing and ensure fresh configuration for each instance
|
||||
const camoufoxOptions: LaunchOptions = JSON.parse(
|
||||
JSON.stringify(config.options || {}),
|
||||
);
|
||||
|
||||
// Add profile path if provided
|
||||
if (config.profilePath) {
|
||||
camoufoxOptions.user_data_dir = config.profilePath;
|
||||
}
|
||||
|
||||
// Ensure block options are properly set
|
||||
if (camoufoxOptions.block_images) {
|
||||
camoufoxOptions.block_images = true;
|
||||
}
|
||||
|
||||
if (camoufoxOptions.block_webgl) {
|
||||
camoufoxOptions.block_webgl = true;
|
||||
}
|
||||
|
||||
if (camoufoxOptions.block_webrtc) {
|
||||
camoufoxOptions.block_webrtc = true;
|
||||
}
|
||||
|
||||
// Check for headless mode from config (no environment variable check)
|
||||
if (camoufoxOptions.headless) {
|
||||
camoufoxOptions.headless = true;
|
||||
}
|
||||
|
||||
// Always set these defaults - ensure they are applied for each instance
|
||||
camoufoxOptions.i_know_what_im_doing = true;
|
||||
camoufoxOptions.config = {
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
...(camoufoxOptions.config || {}),
|
||||
};
|
||||
|
||||
// Generate fresh options for this specific instance
|
||||
const generatedOptions = await launchOptions(camoufoxOptions);
|
||||
|
||||
// Start with process environment to ensure proper inheritance
|
||||
let finalEnv = { ...process.env };
|
||||
|
||||
// Add generated options environment variables
|
||||
if (generatedOptions.env) {
|
||||
finalEnv = { ...finalEnv, ...generatedOptions.env };
|
||||
}
|
||||
|
||||
// If we have a custom config from Rust, use it directly as environment variables
|
||||
if (config.customConfig) {
|
||||
try {
|
||||
// Parse the custom config JSON string
|
||||
const customConfigObj = JSON.parse(config.customConfig);
|
||||
|
||||
// Ensure default config values are preserved even with custom config
|
||||
const mergedConfig = {
|
||||
...customConfigObj,
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
};
|
||||
|
||||
// Convert merged config to environment variables using getEnvVars
|
||||
const customEnvVars = getEnvVars(mergedConfig);
|
||||
|
||||
// Merge custom config with generated config (custom takes precedence)
|
||||
finalEnv = { ...finalEnv, ...customEnvVars };
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Camoufox worker ${id}: Failed to parse custom config, using generated config:`,
|
||||
error,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// Prepare profile path for persistent context
|
||||
const profilePath = config.profilePath || "";
|
||||
|
||||
// Launch persistent context with the final configuration
|
||||
const finalOptions: any = {
|
||||
...generatedOptions,
|
||||
env: finalEnv,
|
||||
};
|
||||
|
||||
// If a custom executable path was provided, ensure Playwright uses it
|
||||
if (
|
||||
(camoufoxOptions as any).executable_path &&
|
||||
typeof (camoufoxOptions as any).executable_path === "string"
|
||||
) {
|
||||
finalOptions.executablePath = (camoufoxOptions as any)
|
||||
.executable_path as string;
|
||||
}
|
||||
|
||||
// Only add proxy if it exists and is valid
|
||||
if (camoufoxOptions.proxy) {
|
||||
try {
|
||||
finalOptions.proxy = parseProxyString(camoufoxOptions.proxy);
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: "Failed to parse proxy, launching without proxy",
|
||||
error,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Use launchPersistentContext instead of launchServer
|
||||
context = await firefox.launchPersistentContext(
|
||||
profilePath,
|
||||
finalOptions,
|
||||
);
|
||||
|
||||
// Get the browser instance from context
|
||||
browser = context.browser();
|
||||
|
||||
// Handle browser disconnection for proper cleanup
|
||||
if (browser) {
|
||||
browser.on("disconnected", () => void gracefulShutdown());
|
||||
}
|
||||
|
||||
// Handle context close for proper cleanup
|
||||
context.on("close", () => void gracefulShutdown());
|
||||
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
// Monitor for window closure
|
||||
const startWindowMonitoring = () => {
|
||||
windowCheckInterval = setInterval(async () => {
|
||||
try {
|
||||
// Check if context is still active
|
||||
if (!context?.pages || context.pages().length === 0) {
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if browser is still connected (if available)
|
||||
if (browser && !browser.isConnected()) {
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check pages in the persistent context
|
||||
const pages = context.pages();
|
||||
if (pages.length === 0) {
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
}
|
||||
} catch {
|
||||
// If we can't check windows, assume browser is closing
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
}
|
||||
}, 1000); // Check every second
|
||||
};
|
||||
|
||||
// Handle URL opening if provided
|
||||
if (config.url) {
|
||||
try {
|
||||
const pages = await context.pages();
|
||||
if (pages.length) {
|
||||
const page = pages[0];
|
||||
await page.goto(config.url, {
|
||||
waitUntil: "domcontentloaded",
|
||||
timeout: 30000,
|
||||
});
|
||||
|
||||
// Start monitoring after page is created
|
||||
startWindowMonitoring();
|
||||
}
|
||||
} catch (urlError) {
|
||||
console.error({
|
||||
message: "Failed to open URL",
|
||||
error: urlError,
|
||||
});
|
||||
// URL opening failure doesn't affect startup success
|
||||
// Still start monitoring
|
||||
startWindowMonitoring();
|
||||
}
|
||||
} else {
|
||||
// Start monitoring after page is created
|
||||
startWindowMonitoring();
|
||||
}
|
||||
|
||||
// Monitor browser/context connection
|
||||
const keepAlive = setInterval(async () => {
|
||||
try {
|
||||
// Check if context is still active
|
||||
if (!context?.pages) {
|
||||
clearInterval(keepAlive);
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check browser connection if available
|
||||
if (browser && !browser.isConnected()) {
|
||||
clearInterval(keepAlive);
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: "Error in keepAlive check",
|
||||
error,
|
||||
});
|
||||
clearInterval(keepAlive);
|
||||
await gracefulShutdown();
|
||||
}
|
||||
}, 2000);
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: "Failed to launch Camoufox",
|
||||
error,
|
||||
});
|
||||
// Browser launch failed, attempt cleanup
|
||||
await gracefulShutdown();
|
||||
}
|
||||
});
|
||||
|
||||
// Keep process alive
|
||||
process.stdin.resume();
|
||||
}
|
||||
+327
-14
@@ -1,4 +1,13 @@
|
||||
import { program } from "commander";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import {
|
||||
generateCamoufoxConfig,
|
||||
startCamoufoxProcess,
|
||||
stopAllCamoufoxProcesses,
|
||||
stopCamoufoxProcess,
|
||||
} from "./camoufox-launcher.js";
|
||||
import { listCamoufoxConfigs } from "./camoufox-storage.js";
|
||||
import { runCamoufoxWorker } from "./camoufox-worker.js";
|
||||
import {
|
||||
startProxyProcess,
|
||||
stopAllProxyProcesses,
|
||||
@@ -44,14 +53,12 @@ program
|
||||
},
|
||||
) => {
|
||||
if (action === "start") {
|
||||
let upstreamUrl: string;
|
||||
let upstreamUrl: string | undefined;
|
||||
|
||||
// Build upstream URL from individual components if provided
|
||||
if (options.host && options.proxyPort && options.type) {
|
||||
const protocol =
|
||||
options.type === "socks4" || options.type === "socks5"
|
||||
? options.type
|
||||
: "http";
|
||||
// Preserve provided scheme (http, https, socks4, socks5)
|
||||
const protocol = String(options.type).toLowerCase();
|
||||
const auth =
|
||||
options.username && options.password
|
||||
? `${encodeURIComponent(options.username)}:${encodeURIComponent(
|
||||
@@ -61,16 +68,8 @@ program
|
||||
upstreamUrl = `${protocol}://${auth}${options.host}:${options.proxyPort}`;
|
||||
} else if (options.upstream) {
|
||||
upstreamUrl = options.upstream;
|
||||
} else {
|
||||
console.error(
|
||||
"Error: Either --upstream URL or --host, --proxy-port, and --type are required",
|
||||
);
|
||||
console.log(
|
||||
"Example: proxy start --host datacenter.proxyempire.io --proxy-port 9000 --type http --username user --password pass",
|
||||
);
|
||||
process.exit(1);
|
||||
return;
|
||||
}
|
||||
// If no upstream is provided, create a direct proxy
|
||||
|
||||
try {
|
||||
const config = await startProxyProcess(upstreamUrl, {
|
||||
@@ -149,4 +148,318 @@ program
|
||||
}
|
||||
});
|
||||
|
||||
// Command for Camoufox management
|
||||
program
|
||||
.command("camoufox")
|
||||
.argument(
|
||||
"<action>",
|
||||
"start, stop, list, or generate-config Camoufox instances",
|
||||
)
|
||||
.option("--id <id>", "Camoufox ID for stop command")
|
||||
.option("--profile-path <path>", "profile directory path")
|
||||
.option("--url <url>", "URL to open")
|
||||
|
||||
// Config generation options
|
||||
.option("--proxy <proxy>", "proxy URL for config generation")
|
||||
.option("--max-width <width>", "maximum screen width", parseInt)
|
||||
.option("--max-height <height>", "maximum screen height", parseInt)
|
||||
.option("--min-width <width>", "minimum screen width", parseInt)
|
||||
.option("--min-height <height>", "minimum screen height", parseInt)
|
||||
.option("--geoip", "enable geoip")
|
||||
.option("--block-images", "block images")
|
||||
.option("--block-webrtc", "block WebRTC")
|
||||
.option("--block-webgl", "block WebGL")
|
||||
.option("--executable-path <path>", "executable path")
|
||||
.option("--fingerprint <json>", "fingerprint JSON string")
|
||||
.option("--headless", "run in headless mode")
|
||||
.option("--custom-config <json>", "custom config JSON string")
|
||||
|
||||
.description("manage Camoufox browser instances")
|
||||
.action(
|
||||
async (
|
||||
action: string,
|
||||
options: Record<string, string | number | boolean | undefined>,
|
||||
) => {
|
||||
if (action === "start") {
|
||||
try {
|
||||
// Build Camoufox options in the format expected by camoufox-js
|
||||
const camoufoxOptions: LaunchOptions = {};
|
||||
|
||||
// OS fingerprinting
|
||||
if (options.os && typeof options.os === "string") {
|
||||
camoufoxOptions.os = options.os.includes(",")
|
||||
? (options.os.split(",") as ("windows" | "macos" | "linux")[])
|
||||
: (options.os as "windows" | "macos" | "linux");
|
||||
}
|
||||
|
||||
// Blocking options
|
||||
if (options.blockImages) camoufoxOptions.block_images = true;
|
||||
if (options.blockWebrtc) camoufoxOptions.block_webrtc = true;
|
||||
if (options.blockWebgl) camoufoxOptions.block_webgl = true;
|
||||
|
||||
// Security options
|
||||
if (options.disableCoop) camoufoxOptions.disable_coop = true;
|
||||
|
||||
if (options.geoip) {
|
||||
camoufoxOptions.geoip = true;
|
||||
}
|
||||
|
||||
if (options.latitude && options.longitude) {
|
||||
camoufoxOptions.geolocation = {
|
||||
latitude: options.latitude as number,
|
||||
longitude: options.longitude as number,
|
||||
accuracy: 100,
|
||||
};
|
||||
}
|
||||
if (options.country)
|
||||
camoufoxOptions.country = options.country as string;
|
||||
if (options.timezone)
|
||||
camoufoxOptions.timezone = options.timezone as string;
|
||||
|
||||
if (options.humanize)
|
||||
camoufoxOptions.humanize = options.humanize as boolean;
|
||||
if (options.headless) camoufoxOptions.headless = true;
|
||||
|
||||
// Localization
|
||||
if (options.locale && typeof options.locale === "string") {
|
||||
camoufoxOptions.locale = options.locale.includes(",")
|
||||
? options.locale.split(",")
|
||||
: options.locale;
|
||||
}
|
||||
|
||||
// Extensions and fonts
|
||||
if (options.addons && typeof options.addons === "string")
|
||||
camoufoxOptions.addons = options.addons.split(",");
|
||||
if (options.fonts && typeof options.fonts === "string")
|
||||
camoufoxOptions.fonts = options.fonts.split(",");
|
||||
if (options.customFontsOnly) camoufoxOptions.custom_fonts_only = true;
|
||||
if (
|
||||
options.excludeAddons &&
|
||||
typeof options.excludeAddons === "string"
|
||||
)
|
||||
camoufoxOptions.exclude_addons = options.excludeAddons.split(
|
||||
",",
|
||||
) as "UBO"[];
|
||||
|
||||
// Executable path: forward through to camoufox-js and ultimately Playwright
|
||||
if (
|
||||
options.executablePath &&
|
||||
typeof options.executablePath === "string"
|
||||
) {
|
||||
// camoufox-js uses snake_case for this option
|
||||
(camoufoxOptions as any).executable_path =
|
||||
options.executablePath as string;
|
||||
}
|
||||
|
||||
// Screen and window
|
||||
const screen: {
|
||||
minWidth?: number;
|
||||
maxWidth?: number;
|
||||
minHeight?: number;
|
||||
maxHeight?: number;
|
||||
} = {};
|
||||
if (options.screenMinWidth)
|
||||
screen.minWidth = options.screenMinWidth as number;
|
||||
if (options.screenMaxWidth)
|
||||
screen.maxWidth = options.screenMaxWidth as number;
|
||||
if (options.screenMinHeight)
|
||||
screen.minHeight = options.screenMinHeight as number;
|
||||
if (options.screenMaxHeight)
|
||||
screen.maxHeight = options.screenMaxHeight as number;
|
||||
if (Object.keys(screen).length > 0) camoufoxOptions.screen = screen;
|
||||
|
||||
if (options.windowWidth && options.windowHeight) {
|
||||
camoufoxOptions.window = [
|
||||
options.windowWidth as number,
|
||||
options.windowHeight as number,
|
||||
];
|
||||
}
|
||||
|
||||
// Advanced options
|
||||
if (options.ffVersion)
|
||||
camoufoxOptions.ff_version = options.ffVersion as number;
|
||||
if (options.mainWorldEval) camoufoxOptions.main_world_eval = true;
|
||||
if (options.webglVendor && options.webglRenderer) {
|
||||
camoufoxOptions.webgl_config = [
|
||||
options.webglVendor as string,
|
||||
options.webglRenderer as string,
|
||||
];
|
||||
}
|
||||
|
||||
// Proxy
|
||||
if (options.proxy) camoufoxOptions.proxy = options.proxy as string;
|
||||
|
||||
// Cache and performance - default to enabled
|
||||
camoufoxOptions.enable_cache = !options.disableCache;
|
||||
|
||||
// Environment and debugging
|
||||
if (options.virtualDisplay)
|
||||
camoufoxOptions.virtual_display = options.virtualDisplay as string;
|
||||
if (options.debug) camoufoxOptions.debug = true;
|
||||
|
||||
// Handle headless mode via flag instead of environment variable
|
||||
if (options.headless) {
|
||||
camoufoxOptions.headless = true;
|
||||
}
|
||||
if (options.args && typeof options.args === "string")
|
||||
camoufoxOptions.args = options.args.split(",");
|
||||
if (options.env && typeof options.env === "string") {
|
||||
try {
|
||||
camoufoxOptions.env = JSON.parse(options.env);
|
||||
} catch (e) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Invalid JSON for --env option",
|
||||
message: String(e),
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Firefox preferences
|
||||
if (
|
||||
options.firefoxPrefs &&
|
||||
typeof options.firefoxPrefs === "string"
|
||||
) {
|
||||
try {
|
||||
camoufoxOptions.firefox_user_prefs = JSON.parse(
|
||||
options.firefoxPrefs,
|
||||
);
|
||||
} catch (e) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Invalid JSON for --firefox-prefs option",
|
||||
message: String(e),
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
const config = await startCamoufoxProcess(
|
||||
camoufoxOptions,
|
||||
typeof options.profilePath === "string"
|
||||
? options.profilePath
|
||||
: undefined,
|
||||
typeof options.url === "string" ? options.url : undefined,
|
||||
typeof options.customConfig === "string"
|
||||
? options.customConfig
|
||||
: undefined,
|
||||
);
|
||||
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
id: config.id,
|
||||
processId: config.processId,
|
||||
profilePath: config.profilePath,
|
||||
url: config.url,
|
||||
}),
|
||||
);
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Failed to start Camoufox",
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
} else if (action === "stop") {
|
||||
if (options.id && typeof options.id === "string") {
|
||||
const stopped = await stopCamoufoxProcess(options.id);
|
||||
console.log(JSON.stringify({ success: stopped }));
|
||||
} else {
|
||||
await stopAllCamoufoxProcesses();
|
||||
console.log(JSON.stringify({ success: true }));
|
||||
}
|
||||
process.exit(0);
|
||||
} else if (action === "list") {
|
||||
const configs = listCamoufoxConfigs();
|
||||
console.log(JSON.stringify(configs));
|
||||
process.exit(0);
|
||||
} else if (action === "generate-config") {
|
||||
try {
|
||||
const config = await generateCamoufoxConfig({
|
||||
proxy:
|
||||
typeof options.proxy === "string" ? options.proxy : undefined,
|
||||
maxWidth:
|
||||
typeof options.maxWidth === "number"
|
||||
? options.maxWidth
|
||||
: undefined,
|
||||
maxHeight:
|
||||
typeof options.maxHeight === "number"
|
||||
? options.maxHeight
|
||||
: undefined,
|
||||
minWidth:
|
||||
typeof options.minWidth === "number"
|
||||
? options.minWidth
|
||||
: undefined,
|
||||
minHeight:
|
||||
typeof options.minHeight === "number"
|
||||
? options.minHeight
|
||||
: undefined,
|
||||
geoip: Boolean(options.geoip),
|
||||
blockImages:
|
||||
typeof options.blockImages === "boolean"
|
||||
? options.blockImages
|
||||
: undefined,
|
||||
blockWebrtc:
|
||||
typeof options.blockWebrtc === "boolean"
|
||||
? options.blockWebrtc
|
||||
: undefined,
|
||||
blockWebgl:
|
||||
typeof options.blockWebgl === "boolean"
|
||||
? options.blockWebgl
|
||||
: undefined,
|
||||
executablePath:
|
||||
typeof options.executablePath === "string"
|
||||
? options.executablePath
|
||||
: undefined,
|
||||
fingerprint:
|
||||
typeof options.fingerprint === "string"
|
||||
? options.fingerprint
|
||||
: undefined,
|
||||
});
|
||||
console.log(config);
|
||||
process.exit(0);
|
||||
} catch (error: unknown) {
|
||||
console.error({
|
||||
error: "Failed to generate config",
|
||||
message:
|
||||
error instanceof Error ? error.message : JSON.stringify(error),
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
console.error({
|
||||
error: "Invalid action",
|
||||
message: "Use 'start', 'stop', 'list', or 'generate-config'",
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Command for Camoufox worker (internal use)
|
||||
program
|
||||
.command("camoufox-worker")
|
||||
.argument("<action>", "start a Camoufox worker")
|
||||
.requiredOption("--id <id>", "Camoufox configuration ID")
|
||||
.description("run a Camoufox worker process")
|
||||
.action(async (action: string, options: { id: string }) => {
|
||||
if (action === "start") {
|
||||
await runCamoufoxWorker(options.id);
|
||||
} else {
|
||||
console.error({
|
||||
error: "Invalid action for camoufox-worker",
|
||||
message: "Use 'start'",
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
program.parse();
|
||||
|
||||
@@ -2,23 +2,23 @@ import { spawn } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import getPort from "get-port";
|
||||
import {
|
||||
type ProxyConfig,
|
||||
deleteProxyConfig,
|
||||
generateProxyId,
|
||||
getProxyConfig,
|
||||
isProcessRunning,
|
||||
listProxyConfigs,
|
||||
type ProxyConfig,
|
||||
saveProxyConfig,
|
||||
} from "./proxy-storage";
|
||||
|
||||
/**
|
||||
* Start a proxy in a separate process
|
||||
* @param upstreamUrl The upstream proxy URL
|
||||
* @param upstreamUrl The upstream proxy URL (optional for direct proxy)
|
||||
* @param options Optional configuration
|
||||
* @returns Promise resolving to the proxy configuration
|
||||
*/
|
||||
export async function startProxyProcess(
|
||||
upstreamUrl: string,
|
||||
upstreamUrl?: string,
|
||||
options: { port?: number; ignoreProxyCertificate?: boolean } = {},
|
||||
): Promise<ProxyConfig> {
|
||||
// Generate a unique ID for this proxy
|
||||
@@ -30,7 +30,7 @@ export async function startProxyProcess(
|
||||
// Create the proxy configuration
|
||||
const config: ProxyConfig = {
|
||||
id,
|
||||
upstreamUrl,
|
||||
upstreamUrl: upstreamUrl || "DIRECT",
|
||||
localPort: port,
|
||||
ignoreProxyCertificate: options.ignoreProxyCertificate ?? false,
|
||||
};
|
||||
|
||||
@@ -1,21 +1,18 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import os from "node:os";
|
||||
import tmp from "tmp";
|
||||
|
||||
// Define the proxy configuration type
|
||||
export interface ProxyConfig {
|
||||
id: string;
|
||||
upstreamUrl: string;
|
||||
upstreamUrl: string; // Can be "DIRECT" for direct proxy
|
||||
localPort?: number;
|
||||
ignoreProxyCertificate?: boolean;
|
||||
localUrl?: string;
|
||||
pid?: number;
|
||||
}
|
||||
|
||||
// Path to store proxy configurations
|
||||
const STORAGE_DIR = path.join(os.tmpdir(), "donutbrowser", "proxies");
|
||||
const STORAGE_DIR = path.join(tmp.tmpdir, "donutbrowser", "proxies");
|
||||
|
||||
// Ensure storage directory exists
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
fs.mkdirSync(STORAGE_DIR, { recursive: true });
|
||||
}
|
||||
@@ -88,7 +85,7 @@ export function listProxyConfigs(): ProxyConfig[] {
|
||||
try {
|
||||
const content = fs.readFileSync(
|
||||
path.join(STORAGE_DIR, file),
|
||||
"utf-8"
|
||||
"utf-8",
|
||||
);
|
||||
return JSON.parse(content) as ProxyConfig;
|
||||
} catch (error) {
|
||||
@@ -111,14 +108,18 @@ export function listProxyConfigs(): ProxyConfig[] {
|
||||
export function updateProxyConfig(config: ProxyConfig): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.readFileSync(filePath, "utf-8");
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
console.error(
|
||||
`Config ${config.id} was deleted while the app was running`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
console.error(`Error updating proxy config ${config.id}:`, error);
|
||||
return false;
|
||||
}
|
||||
@@ -135,7 +136,7 @@ export function isProcessRunning(pid: number): boolean {
|
||||
// but checks if it exists
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch (error) {
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
+10
-15
@@ -19,6 +19,10 @@ export async function runProxyWorker(id: string): Promise<void> {
|
||||
port: config.localPort,
|
||||
host: "127.0.0.1",
|
||||
prepareRequestFunction: () => {
|
||||
// If upstreamUrl is "DIRECT", don't use upstream proxy
|
||||
if (config.upstreamUrl === "DIRECT") {
|
||||
return {};
|
||||
}
|
||||
return {
|
||||
upstreamProxyUrl: config.upstreamUrl,
|
||||
ignoreUpstreamProxyCertificate: config.ignoreProxyCertificate ?? false,
|
||||
@@ -27,28 +31,22 @@ export async function runProxyWorker(id: string): Promise<void> {
|
||||
});
|
||||
|
||||
// Handle process termination gracefully
|
||||
const gracefulShutdown = async (signal: string) => {
|
||||
console.log(`Proxy worker ${id} received ${signal}, shutting down...`);
|
||||
const gracefulShutdown = async () => {
|
||||
try {
|
||||
await server.close(true);
|
||||
console.log(`Proxy worker ${id} shut down successfully`);
|
||||
} catch (error) {
|
||||
console.error(`Error during shutdown for proxy ${id}:`, error);
|
||||
}
|
||||
} catch {}
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
process.on("SIGTERM", () => void gracefulShutdown("SIGTERM"));
|
||||
process.on("SIGINT", () => void gracefulShutdown("SIGINT"));
|
||||
process.on("SIGTERM", () => void gracefulShutdown());
|
||||
process.on("SIGINT", () => void gracefulShutdown());
|
||||
|
||||
// Handle uncaught exceptions
|
||||
process.on("uncaughtException", (error) => {
|
||||
console.error(`Uncaught exception in proxy worker ${id}:`, error);
|
||||
process.on("uncaughtException", () => {
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
process.on("unhandledRejection", (reason) => {
|
||||
console.error(`Unhandled rejection in proxy worker ${id}:`, reason);
|
||||
process.on("unhandledRejection", () => {
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
@@ -61,9 +59,6 @@ export async function runProxyWorker(id: string): Promise<void> {
|
||||
config.localUrl = `http://127.0.0.1:${server.port}`;
|
||||
updateProxyConfig(config);
|
||||
|
||||
console.log(`Proxy worker ${id} started on port ${server.port}`);
|
||||
console.log(`Forwarding to upstream proxy: ${config.upstreamUrl}`);
|
||||
|
||||
// Keep the process alive
|
||||
setInterval(() => {
|
||||
// Do nothing, just keep the process alive
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
import type { LaunchOptions } from "playwright-core";
|
||||
|
||||
const OS_MAP: { [key: string]: "mac" | "win" | "lin" } = {
|
||||
darwin: "mac",
|
||||
linux: "lin",
|
||||
win32: "win",
|
||||
};
|
||||
|
||||
const OS_NAME: "mac" | "win" | "lin" = OS_MAP[process.platform];
|
||||
|
||||
export function getEnvVars(configMap: Record<string, string>) {
|
||||
const envVars: {
|
||||
[key: string]: string | undefined;
|
||||
} = {};
|
||||
let updatedConfigData: Uint8Array;
|
||||
|
||||
try {
|
||||
// Ensure we're working with a fresh copy of the config
|
||||
const configCopy = JSON.parse(JSON.stringify(configMap));
|
||||
updatedConfigData = new TextEncoder().encode(JSON.stringify(configCopy));
|
||||
} catch (e) {
|
||||
console.error(`Error updating config: ${e}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const chunkSize = OS_NAME === "win" ? 2047 : 32767;
|
||||
const configStr = new TextDecoder().decode(updatedConfigData);
|
||||
|
||||
for (let i = 0; i < configStr.length; i += chunkSize) {
|
||||
const chunk = configStr.slice(i, i + chunkSize);
|
||||
const envName = `CAMOU_CONFIG_${Math.floor(i / chunkSize) + 1}`;
|
||||
try {
|
||||
envVars[envName] = chunk;
|
||||
} catch (e) {
|
||||
console.error(`Error setting ${envName}: ${e}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
return envVars;
|
||||
}
|
||||
|
||||
export function parseProxyString(proxyString: LaunchOptions["proxy"] | string) {
|
||||
if (typeof proxyString === "object") {
|
||||
return proxyString;
|
||||
}
|
||||
|
||||
if (!proxyString || typeof proxyString !== "string") {
|
||||
throw new Error("Invalid proxy string provided");
|
||||
}
|
||||
|
||||
// Remove any leading/trailing whitespace
|
||||
const trimmed = proxyString.trim();
|
||||
|
||||
// Handle different proxy string formats:
|
||||
// 1. http://username:password@host:port
|
||||
// 2. host:port
|
||||
// 3. protocol://host:port
|
||||
// 4. username:password@host:port
|
||||
|
||||
let server = "";
|
||||
let username: string | undefined;
|
||||
let password: string | undefined;
|
||||
|
||||
try {
|
||||
// Try parsing as URL first (handles protocol://username:password@host:port)
|
||||
if (trimmed.includes("://")) {
|
||||
const url = new URL(trimmed);
|
||||
server = `${url.hostname}:${url.port}`;
|
||||
|
||||
if (url.username) {
|
||||
username = decodeURIComponent(url.username);
|
||||
}
|
||||
if (url.password) {
|
||||
password = decodeURIComponent(url.password);
|
||||
}
|
||||
} else {
|
||||
// Handle formats without protocol
|
||||
let workingString = trimmed;
|
||||
|
||||
// Check for username:password@ prefix
|
||||
const authMatch = workingString.match(/^([^:@]+):([^@]+)@(.+)$/);
|
||||
if (authMatch) {
|
||||
username = authMatch[1];
|
||||
password = authMatch[2];
|
||||
workingString = authMatch[3];
|
||||
}
|
||||
|
||||
// The remaining part should be host:port
|
||||
server = workingString;
|
||||
}
|
||||
|
||||
// Validate that we have a server
|
||||
if (!server) {
|
||||
throw new Error("Could not extract server information");
|
||||
}
|
||||
|
||||
// Basic validation for host:port format
|
||||
if (!server.includes(":") || server.split(":").length !== 2) {
|
||||
throw new Error("Server must be in host:port format");
|
||||
}
|
||||
|
||||
const result: LaunchOptions["proxy"] = { server };
|
||||
|
||||
if (username !== undefined) {
|
||||
result.username = username;
|
||||
}
|
||||
|
||||
if (password !== undefined) {
|
||||
result.password = password;
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to parse proxy string: ${error instanceof Error ? error.message : "Unknown error"}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
+45
-45
@@ -2,7 +2,7 @@
|
||||
"name": "donutbrowser",
|
||||
"private": true,
|
||||
"license": "AGPL-3.0",
|
||||
"version": "0.4.0",
|
||||
"version": "0.12.2",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "next dev --turbopack",
|
||||
@@ -11,74 +11,74 @@
|
||||
"test": "pnpm test:rust",
|
||||
"test:rust": "cd src-tauri && cargo test",
|
||||
"lint": "pnpm lint:js && pnpm lint:rust",
|
||||
"lint:js": "biome check src/ && tsc --noEmit && next lint",
|
||||
"lint:js": "biome check src/ && tsc --noEmit",
|
||||
"lint:rust": "cd src-tauri && cargo clippy --all-targets --all-features -- -D warnings -D clippy::all && cargo fmt --all",
|
||||
"tauri": "tauri",
|
||||
"shadcn:add": "pnpm dlx shadcn@latest add",
|
||||
"prepare": "husky && husky install",
|
||||
"format:rust": "cd src-tauri && cargo clippy --fix --allow-dirty --all-targets --all-features -- -D warnings -D clippy::all && cargo fmt --all",
|
||||
"format:js": "biome check src/ --fix",
|
||||
"format:js": "biome check src/ --write --unsafe",
|
||||
"format": "pnpm format:js && pnpm format:rust",
|
||||
"cargo": "cd src-tauri && cargo",
|
||||
"check-unused-commands": "cd src-tauri && cargo run --bin check_unused_commands"
|
||||
"unused-exports:js": "ts-unused-exports tsconfig.json",
|
||||
"check-unused-commands": "cd src-tauri && cargo test test_no_unused_tauri_commands"
|
||||
},
|
||||
"dependencies": {
|
||||
"@radix-ui/react-checkbox": "^1.3.2",
|
||||
"@radix-ui/react-dialog": "^1.1.14",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.15",
|
||||
"@radix-ui/react-checkbox": "^1.3.3",
|
||||
"@radix-ui/react-dialog": "^1.1.15",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.16",
|
||||
"@radix-ui/react-label": "^2.1.7",
|
||||
"@radix-ui/react-popover": "^1.1.14",
|
||||
"@radix-ui/react-popover": "^1.1.15",
|
||||
"@radix-ui/react-progress": "^1.1.7",
|
||||
"@radix-ui/react-scroll-area": "^1.2.9",
|
||||
"@radix-ui/react-select": "^2.2.5",
|
||||
"@radix-ui/react-radio-group": "^1.3.8",
|
||||
"@radix-ui/react-scroll-area": "^1.2.10",
|
||||
"@radix-ui/react-select": "^2.2.6",
|
||||
"@radix-ui/react-slot": "^1.2.3",
|
||||
"@radix-ui/react-tooltip": "^1.2.7",
|
||||
"@radix-ui/react-tabs": "^1.1.13",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@tanstack/react-table": "^8.21.3",
|
||||
"@tauri-apps/api": "^2.5.0",
|
||||
"@tauri-apps/plugin-dialog": "^2.2.2",
|
||||
"@tauri-apps/plugin-fs": "~2.3.0",
|
||||
"@tauri-apps/plugin-opener": "^2.2.7",
|
||||
"ahooks": "^3.8.5",
|
||||
"@tauri-apps/api": "^2.8.0",
|
||||
"@tauri-apps/plugin-deep-link": "^2.4.2",
|
||||
"@tauri-apps/plugin-dialog": "^2.3.3",
|
||||
"@tauri-apps/plugin-fs": "~2.4.2",
|
||||
"@tauri-apps/plugin-opener": "^2.5.0",
|
||||
"ahooks": "^3.9.4",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"next": "^15.3.3",
|
||||
"color": "^5.0.0",
|
||||
"lucide-react": "^0.542.0",
|
||||
"motion": "^12.23.12",
|
||||
"next": "^15.5.2",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"radix-ui": "^1.4.3",
|
||||
"react": "^19.1.1",
|
||||
"react-dom": "^19.1.1",
|
||||
"react-icons": "^5.5.0",
|
||||
"sonner": "^2.0.5",
|
||||
"tailwind-merge": "^3.3.0",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"tauri-plugin-macos-permissions-api": "^2.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.28.0",
|
||||
"@next/eslint-plugin-next": "^15.3.3",
|
||||
"@tailwindcss/postcss": "^4.1.8",
|
||||
"@tauri-apps/cli": "^2.5.0",
|
||||
"@types/node": "^22.15.30",
|
||||
"@types/react": "^19.1.6",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
"@typescript-eslint/eslint-plugin": "^8.33.1",
|
||||
"@typescript-eslint/parser": "^8.33.1",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"eslint": "^9.28.0",
|
||||
"eslint-config-next": "^15.3.3",
|
||||
"eslint-plugin-react-hooks": "^5.2.0",
|
||||
"@biomejs/biome": "2.2.2",
|
||||
"@tailwindcss/postcss": "^4.1.12",
|
||||
"@tauri-apps/cli": "^2.8.3",
|
||||
"@types/color": "^4.2.0",
|
||||
"@types/node": "^24.3.0",
|
||||
"@types/react": "^19.1.12",
|
||||
"@types/react-dom": "^19.1.9",
|
||||
"@vitejs/plugin-react": "^5.0.2",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^16.1.0",
|
||||
"tailwindcss": "^4.1.8",
|
||||
"tw-animate-css": "^1.3.4",
|
||||
"typescript": "~5.8.3",
|
||||
"typescript-eslint": "^8.33.1"
|
||||
"lint-staged": "^16.1.5",
|
||||
"tailwindcss": "^4.1.12",
|
||||
"ts-unused-exports": "^11.0.1",
|
||||
"tw-animate-css": "^1.3.7",
|
||||
"typescript": "~5.9.2"
|
||||
},
|
||||
"packageManager": "pnpm@10.11.1",
|
||||
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
|
||||
"lint-staged": {
|
||||
"**/*.{js,jsx,ts,tsx,json,css,md}": [
|
||||
"biome check --fix",
|
||||
"eslint --cache --fix"
|
||||
"**/*.{js,jsx,ts,tsx,json,css}": [
|
||||
"biome check --fix"
|
||||
],
|
||||
"src-tauri/**/*.rs": [
|
||||
"cd src-tauri && cargo fmt --all",
|
||||
|
||||
Generated
+3078
-3822
File diff suppressed because it is too large
Load Diff
+4
-4
@@ -1,9 +1,9 @@
|
||||
packages:
|
||||
- "nodecar"
|
||||
|
||||
- nodecar
|
||||
onlyBuiltDependencies:
|
||||
- "@biomejs/biome"
|
||||
- "@tailwindcss/oxide"
|
||||
- '@biomejs/biome'
|
||||
- '@tailwindcss/oxide'
|
||||
- esbuild
|
||||
- sharp
|
||||
- sqlite3
|
||||
- unrs-resolver
|
||||
|
||||
Generated
+1427
-674
File diff suppressed because it is too large
Load Diff
+37
-12
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "donutbrowser"
|
||||
version = "0.4.0"
|
||||
description = "Simple Yet Powerful Browser Orchestrator"
|
||||
version = "0.12.2"
|
||||
description = "Simple Yet Powerful Anti-Detect Browser"
|
||||
authors = ["zhom@github"]
|
||||
edition = "2021"
|
||||
default-run = "donutbrowser"
|
||||
@@ -14,6 +14,7 @@ default-run = "donutbrowser"
|
||||
# This seems to be only an issue on Windows, see https://github.com/rust-lang/cargo/issues/8519
|
||||
name = "donutbrowser"
|
||||
crate-type = ["staticlib", "cdylib", "rlib"]
|
||||
doctest = false
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2", features = [] }
|
||||
@@ -28,19 +29,38 @@ tauri-plugin-shell = "2"
|
||||
tauri-plugin-deep-link = "2"
|
||||
tauri-plugin-dialog = "2"
|
||||
tauri-plugin-macos-permissions = "2"
|
||||
|
||||
|
||||
directories = "6"
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
sysinfo = "0.35"
|
||||
tokio = { version = "1", features = ["full", "sync"] }
|
||||
sysinfo = "0.37"
|
||||
lazy_static = "1.4"
|
||||
base64 = "0.22"
|
||||
zip = "4"
|
||||
async-trait = "0.1"
|
||||
futures-util = "0.3"
|
||||
urlencoding = "2.1"
|
||||
zip = "4"
|
||||
tar = "0"
|
||||
bzip2 = "0"
|
||||
flate2 = "1"
|
||||
lzma-rs = "0"
|
||||
msi-extract = "0"
|
||||
|
||||
uuid = { version = "1.0", features = ["v4", "serde"] }
|
||||
url = "2.5"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
axum = "0.8.4"
|
||||
tower = "0.5"
|
||||
tower-http = { version = "0.6", features = ["cors"] }
|
||||
rand = "0.9.2"
|
||||
argon2 = "0.5"
|
||||
aes-gcm = "0.10"
|
||||
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2", features = ["deep-link"] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
core-foundation="0.10"
|
||||
core-foundation = "0.10"
|
||||
objc2 = "0.6.1"
|
||||
objc2-app-kit = { version = "0.3.1", features = ["NSWindow"] }
|
||||
|
||||
@@ -59,19 +79,24 @@ windows = { version = "0.61", features = [
|
||||
] }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.13.0"
|
||||
tokio-test = "0.4.4"
|
||||
tempfile = "3.21.0"
|
||||
wiremock = "0.6"
|
||||
hyper = { version = "1.0", features = ["full"] }
|
||||
hyper = { version = "1.7", features = ["full"] }
|
||||
hyper-util = { version = "0.1", features = ["full"] }
|
||||
http-body-util = "0.1"
|
||||
tower = "0.5"
|
||||
tower-http = { version = "0.6", features = ["fs", "trace"] }
|
||||
futures-util = "0.3"
|
||||
|
||||
# Integration test configuration
|
||||
[[test]]
|
||||
name = "nodecar_integration"
|
||||
path = "tests/nodecar_integration.rs"
|
||||
|
||||
[features]
|
||||
# by default Tauri runs in production mode
|
||||
# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` points to the filesystem
|
||||
default = [ "custom-protocol" ]
|
||||
default = ["custom-protocol"]
|
||||
# this feature is used used for production builds where `devPath` points to the filesystem
|
||||
# DO NOT remove this
|
||||
custom-protocol = [ "tauri/custom-protocol" ]
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
|
||||
@@ -26,5 +26,33 @@
|
||||
<string>public.app-category.productivity</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>Copyright © 2025 Donut Browser</string>
|
||||
<key>CFBundleDocumentTypes</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>CFBundleTypeName</key>
|
||||
<string>HTML document</string>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>Viewer</string>
|
||||
<key>LSHandlerRank</key>
|
||||
<string>Default</string>
|
||||
<key>LSItemContentTypes</key>
|
||||
<array>
|
||||
<string>public.html</string>
|
||||
<string>public.xhtml</string>
|
||||
</array>
|
||||
</dict>
|
||||
</array>
|
||||
<key>CFBundleURLTypes</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>Web site URL</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>http</string>
|
||||
<string>https</string>
|
||||
</array>
|
||||
</dict>
|
||||
</array>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -26,5 +26,13 @@ fn main() {
|
||||
println!("cargo:rustc-env=BUILD_VERSION=dev-{version}");
|
||||
}
|
||||
|
||||
// Inject vault password at build time
|
||||
if let Ok(vault_password) = std::env::var("DONUT_BROWSER_VAULT_PASSWORD") {
|
||||
println!("cargo:rustc-env=DONUT_BROWSER_VAULT_PASSWORD={vault_password}");
|
||||
} else {
|
||||
// Use default password if environment variable is not set
|
||||
println!("cargo:rustc-env=DONUT_BROWSER_VAULT_PASSWORD=donutbrowser-api-vault-password");
|
||||
}
|
||||
|
||||
tauri_build::build()
|
||||
}
|
||||
|
||||
@@ -19,6 +19,10 @@
|
||||
"shell:allow-spawn",
|
||||
"shell:allow-stdin-write",
|
||||
"deep-link:default",
|
||||
"deep-link:allow-register",
|
||||
"deep-link:allow-unregister",
|
||||
"deep-link:allow-is-registered",
|
||||
"deep-link:allow-get-current",
|
||||
"dialog:default",
|
||||
"dialog:allow-open",
|
||||
"macos-permissions:default",
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
Version=1.0
|
||||
Type=Application
|
||||
Name=Donut Browser
|
||||
Comment=Simple Yet Powerful Browser Orchestrator
|
||||
Comment=Simple Yet Powerful Anti-Detect Browser
|
||||
Exec=donutbrowser %u
|
||||
Icon=donutbrowser
|
||||
StartupNotify=true
|
||||
NoDisplay=false
|
||||
Categories=Network;WebBrowser;Productivity;
|
||||
Categories=Network;WebBrowser;
|
||||
MimeType=x-scheme-handler/http;x-scheme-handler/https;text/html;application/xhtml+xml;
|
||||
StartupWMClass=donutbrowser
|
||||
Keywords=browser;web;internet;productivity;
|
||||
@@ -28,5 +28,11 @@
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.security.automation.apple-events</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.usb</key>
|
||||
<true/>
|
||||
<key>com.apple.security.inherit</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
+535
-165
@@ -9,21 +9,21 @@ use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use crate::browser::GithubRelease;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct VersionComponent {
|
||||
major: u32,
|
||||
minor: u32,
|
||||
patch: u32,
|
||||
pre_release: Option<PreRelease>,
|
||||
pub struct VersionComponent {
|
||||
pub major: u32,
|
||||
pub minor: u32,
|
||||
pub patch: u32,
|
||||
pub pre_release: Option<PreRelease>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct PreRelease {
|
||||
kind: PreReleaseKind,
|
||||
number: Option<u32>,
|
||||
pub struct PreRelease {
|
||||
pub kind: PreReleaseKind,
|
||||
pub number: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
enum PreReleaseKind {
|
||||
pub enum PreReleaseKind {
|
||||
Alpha,
|
||||
Beta,
|
||||
RC,
|
||||
@@ -32,8 +32,14 @@ enum PreReleaseKind {
|
||||
}
|
||||
|
||||
impl VersionComponent {
|
||||
fn parse(version: &str) -> Self {
|
||||
pub fn parse(version: &str) -> Self {
|
||||
let version = version.trim();
|
||||
// Normalize common tag prefixes like 'v1.2.3' -> '1.2.3'
|
||||
let version = if version.starts_with('v') || version.starts_with('V') {
|
||||
&version[1..]
|
||||
} else {
|
||||
version
|
||||
};
|
||||
|
||||
// Handle special case for Zen Browser twilight releases
|
||||
if version.to_lowercase() == "twilight" {
|
||||
@@ -215,11 +221,28 @@ pub fn sort_versions(versions: &mut [String]) {
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to compare two versions
|
||||
pub fn compare_versions(version1: &str, version2: &str) -> std::cmp::Ordering {
|
||||
let version_a = VersionComponent::parse(version1);
|
||||
let version_b = VersionComponent::parse(version2);
|
||||
version_a.cmp(&version_b)
|
||||
}
|
||||
|
||||
pub fn is_version_newer(version1: &str, version2: &str) -> bool {
|
||||
// Use the proper VersionComponent comparison from api_client.rs
|
||||
let version_a = VersionComponent::parse(version1);
|
||||
let version_b = VersionComponent::parse(version2);
|
||||
version_a > version_b
|
||||
}
|
||||
|
||||
// Helper function to sort GitHub releases
|
||||
pub fn sort_github_releases(releases: &mut [GithubRelease]) {
|
||||
releases.sort_by(|a, b| {
|
||||
let version_a = VersionComponent::parse(&a.tag_name);
|
||||
let version_b = VersionComponent::parse(&b.tag_name);
|
||||
// Normalize tags like "v1.81.9" -> "1.81.9" for correct ordering
|
||||
let tag_a = a.tag_name.trim_start_matches('v');
|
||||
let tag_b = b.tag_name.trim_start_matches('v');
|
||||
let version_a = VersionComponent::parse(tag_a);
|
||||
let version_b = VersionComponent::parse(tag_b);
|
||||
version_b.cmp(&version_a) // Descending order (newest first)
|
||||
});
|
||||
}
|
||||
@@ -242,14 +265,29 @@ pub fn is_browser_version_nightly(
|
||||
version.to_lowercase() == "twilight"
|
||||
}
|
||||
"brave" => {
|
||||
// For Brave Browser, only releases titled "Release" are stable, everything else is nightly
|
||||
// For Brave Browser, only releases whose name starts with "Release" (case-insensitive) are stable.
|
||||
if let Some(name) = release_name {
|
||||
!name.starts_with("Release")
|
||||
} else {
|
||||
true
|
||||
let normalized = name.trim_start().to_ascii_lowercase();
|
||||
return !normalized.starts_with("release");
|
||||
}
|
||||
|
||||
// Fallback: try cached GitHub releases
|
||||
if let Some(releases) = ApiClient::instance().get_cached_github_releases("brave") {
|
||||
if let Some(found) = releases.iter().find(|r| r.tag_name == version) {
|
||||
let normalized = found.name.trim_start().to_ascii_lowercase();
|
||||
return !normalized.starts_with("release");
|
||||
}
|
||||
}
|
||||
|
||||
// Last resort: when no name available, treat as nightly (non-Release)
|
||||
true
|
||||
}
|
||||
"firefox" | "firefox-developer" => {
|
||||
"firefox-developer" => {
|
||||
// For Firefox Developer Edition, always treat as nightly/prerelease
|
||||
// This ensures consistent behavior regardless of cache state or API response parsing
|
||||
true
|
||||
}
|
||||
"firefox" => {
|
||||
// For Firefox, use the category from the API response to determine stability
|
||||
// This will be handled in the API parsing, so this fallback is for cached versions
|
||||
is_nightly_version(version)
|
||||
@@ -259,6 +297,10 @@ pub fn is_browser_version_nightly(
|
||||
// Chromium builds are generally stable snapshots
|
||||
false
|
||||
}
|
||||
"camoufox" => {
|
||||
// For Camoufox, beta versions are actually the stable releases
|
||||
false
|
||||
}
|
||||
_ => {
|
||||
// Default fallback
|
||||
is_nightly_version(version)
|
||||
@@ -291,7 +333,7 @@ pub struct BrowserRelease {
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct CachedVersionData {
|
||||
versions: Vec<String>,
|
||||
releases: Vec<BrowserRelease>,
|
||||
timestamp: u64,
|
||||
}
|
||||
|
||||
@@ -312,8 +354,13 @@ pub struct ApiClient {
|
||||
|
||||
impl ApiClient {
|
||||
pub fn new() -> Self {
|
||||
let client = Client::builder()
|
||||
.timeout(std::time::Duration::from_secs(30))
|
||||
.build()
|
||||
.unwrap_or_else(|_| Client::new());
|
||||
|
||||
Self {
|
||||
client: Client::new(),
|
||||
client,
|
||||
firefox_api_base: "https://product-details.mozilla.org/1.0".to_string(),
|
||||
firefox_dev_api_base: "https://product-details.mozilla.org/1.0".to_string(),
|
||||
github_api_base: "https://api.github.com".to_string(),
|
||||
@@ -323,6 +370,69 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_github_releases_multiple_pages(
|
||||
&self,
|
||||
base_releases_url: &str,
|
||||
) -> Result<Vec<GithubRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut all_releases: Vec<GithubRelease> = Vec::new();
|
||||
|
||||
// For now, only fetch 1 page
|
||||
for page in 1..=1 {
|
||||
let url = format!("{base_releases_url}?per_page=100&page={page}");
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header(
|
||||
"User-Agent",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36",
|
||||
)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
// If the first page fails, propagate error; otherwise stop pagination
|
||||
if page == 1 {
|
||||
return Err(
|
||||
format!(
|
||||
"GitHub API returned status for page {}: {}",
|
||||
page,
|
||||
response.status()
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let text = response.text().await?;
|
||||
let mut page_releases: Vec<GithubRelease> = serde_json::from_str(&text).map_err(|e| {
|
||||
eprintln!("Failed to parse GitHub API response (page {page}): {e}");
|
||||
eprintln!(
|
||||
"Response text (first 500 chars): {}",
|
||||
if text.len() > 500 {
|
||||
&text[..500]
|
||||
} else {
|
||||
&text
|
||||
}
|
||||
);
|
||||
format!("Failed to parse GitHub API response: {e}")
|
||||
})?;
|
||||
|
||||
if page_releases.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
all_releases.append(&mut page_releases);
|
||||
}
|
||||
|
||||
Ok(all_releases)
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static ApiClient {
|
||||
&API_CLIENT
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn new_with_base_urls(
|
||||
firefox_api_base: String,
|
||||
@@ -366,7 +476,7 @@ impl ApiClient {
|
||||
current_time - timestamp < cache_duration
|
||||
}
|
||||
|
||||
pub fn load_cached_versions(&self, browser: &str) -> Option<Vec<String>> {
|
||||
pub fn load_cached_versions(&self, browser: &str) -> Option<Vec<BrowserRelease>> {
|
||||
let cache_dir = Self::get_cache_dir().ok()?;
|
||||
let cache_file = cache_dir.join(format!("{browser}_versions.json"));
|
||||
|
||||
@@ -375,11 +485,27 @@ impl ApiClient {
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&cache_file).ok()?;
|
||||
let cached_data: CachedVersionData = serde_json::from_str(&content).ok()?;
|
||||
if let Ok(cached) = serde_json::from_str::<CachedVersionData>(&content) {
|
||||
// Always return cached releases regardless of age - they're always valid
|
||||
println!("Using cached versions for {browser}");
|
||||
return Some(cached.releases);
|
||||
}
|
||||
|
||||
// Always return cached versions regardless of age - they're always valid
|
||||
println!("Using cached versions for {browser}");
|
||||
Some(cached_data.versions)
|
||||
// Backward compatibility: legacy caches stored just an array of version strings
|
||||
if let Ok(legacy_versions) = serde_json::from_str::<Vec<String>>(&content) {
|
||||
println!("Using legacy cached versions for {browser}; upgrading in-memory");
|
||||
let releases: Vec<BrowserRelease> = legacy_versions
|
||||
.into_iter()
|
||||
.map(|version| BrowserRelease {
|
||||
is_prerelease: is_browser_version_nightly(browser, &version, None),
|
||||
version,
|
||||
date: "".to_string(),
|
||||
})
|
||||
.collect();
|
||||
return Some(releases);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn is_cache_expired(&self, browser: &str) -> bool {
|
||||
@@ -410,19 +536,19 @@ impl ApiClient {
|
||||
pub fn save_cached_versions(
|
||||
&self,
|
||||
browser: &str,
|
||||
versions: &[String],
|
||||
releases: &[BrowserRelease],
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let cache_dir = Self::get_cache_dir()?;
|
||||
let cache_file = cache_dir.join(format!("{browser}_versions.json"));
|
||||
|
||||
let cached_data = CachedVersionData {
|
||||
versions: versions.to_vec(),
|
||||
releases: releases.to_vec(),
|
||||
timestamp: Self::get_current_timestamp(),
|
||||
};
|
||||
|
||||
let content = serde_json::to_string_pretty(&cached_data)?;
|
||||
fs::write(&cache_file, content)?;
|
||||
println!("Cached {} versions for {}", versions.len(), browser);
|
||||
println!("Cached {} versions for {}", releases.len(), browser);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -442,6 +568,11 @@ impl ApiClient {
|
||||
Some(cached_data.releases)
|
||||
}
|
||||
|
||||
/// Public accessor for cached GitHub releases (used by other modules for classification)
|
||||
pub fn get_cached_github_releases(&self, browser: &str) -> Option<Vec<GithubRelease>> {
|
||||
self.load_cached_github_releases(browser)
|
||||
}
|
||||
|
||||
fn save_cached_github_releases(
|
||||
&self,
|
||||
browser: &str,
|
||||
@@ -467,19 +598,8 @@ impl ApiClient {
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_versions) = self.load_cached_versions("firefox") {
|
||||
return Ok(
|
||||
cached_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // Cache doesn't store dates
|
||||
is_prerelease: is_browser_version_nightly("firefox", &version, None),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
if let Some(cached_releases) = self.load_cached_versions("firefox") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -525,12 +645,9 @@ impl ApiClient {
|
||||
version_b.cmp(&version_a)
|
||||
});
|
||||
|
||||
// Extract versions for caching
|
||||
let versions: Vec<String> = releases.iter().map(|r| r.version.clone()).collect();
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_versions("firefox", &versions) {
|
||||
if let Err(e) = self.save_cached_versions("firefox", &releases) {
|
||||
eprintln!("Failed to cache Firefox versions: {e}");
|
||||
}
|
||||
}
|
||||
@@ -544,19 +661,8 @@ impl ApiClient {
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_versions) = self.load_cached_versions("firefox-developer") {
|
||||
return Ok(
|
||||
cached_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // Cache doesn't store dates
|
||||
is_prerelease: is_browser_version_nightly("firefox-developer", &version, None),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
if let Some(cached_releases) = self.load_cached_versions("firefox-developer") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -565,19 +671,19 @@ impl ApiClient {
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(url)
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to fetch Firefox Developer Edition versions: {}",
|
||||
response.status()
|
||||
)
|
||||
.into(),
|
||||
let error_msg = format!(
|
||||
"Failed to fetch Firefox Developer Edition versions: {} - URL: {}",
|
||||
response.status(),
|
||||
url
|
||||
);
|
||||
eprintln!("{error_msg}");
|
||||
return Err(error_msg.into());
|
||||
}
|
||||
|
||||
let firefox_response: FirefoxApiResponse = response.json().await?;
|
||||
@@ -608,12 +714,9 @@ impl ApiClient {
|
||||
version_b.cmp(&version_a)
|
||||
});
|
||||
|
||||
// Extract versions for caching
|
||||
let versions: Vec<String> = releases.iter().map(|r| r.version.clone()).collect();
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_versions("firefox-developer", &versions) {
|
||||
if let Err(e) = self.save_cached_versions("firefox-developer", &releases) {
|
||||
eprintln!("Failed to cache Firefox Developer versions: {e}");
|
||||
}
|
||||
}
|
||||
@@ -632,19 +735,12 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fetching Mullvad releases from GitHub API...");
|
||||
let url = format!(
|
||||
"{}/repos/mullvad/mullvad-browser/releases?per_page=100",
|
||||
println!("Fetching Mullvad releases from GitHub API");
|
||||
let base_url = format!(
|
||||
"{}/repos/mullvad/mullvad-browser/releases",
|
||||
self.github_api_base
|
||||
);
|
||||
let releases = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?
|
||||
.json::<Vec<GithubRelease>>()
|
||||
.await?;
|
||||
let releases = self.fetch_github_releases_multiple_pages(&base_url).await?;
|
||||
|
||||
let mut releases: Vec<GithubRelease> = releases
|
||||
.into_iter()
|
||||
@@ -678,19 +774,13 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fetching Zen releases from GitHub API...");
|
||||
let url = format!(
|
||||
"{}/repos/zen-browser/desktop/releases?per_page=100",
|
||||
println!("Fetching Zen releases from GitHub API");
|
||||
let base_url = format!(
|
||||
"{}/repos/zen-browser/desktop/releases",
|
||||
self.github_api_base
|
||||
);
|
||||
let mut releases = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?
|
||||
.json::<Vec<GithubRelease>>()
|
||||
.await?;
|
||||
let mut releases: Vec<GithubRelease> =
|
||||
self.fetch_github_releases_multiple_pages(&base_url).await?;
|
||||
|
||||
// Check for twilight updates and mark alpha releases
|
||||
for release in &mut releases {
|
||||
@@ -735,31 +825,25 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fetching Brave releases from GitHub API...");
|
||||
let url = format!(
|
||||
"{}/repos/brave/brave-browser/releases?per_page=100",
|
||||
println!("Fetching Brave releases from GitHub API");
|
||||
let base_url = format!(
|
||||
"{}/repos/brave/brave-browser/releases",
|
||||
self.github_api_base
|
||||
);
|
||||
let releases = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?
|
||||
.json::<Vec<GithubRelease>>()
|
||||
.await?;
|
||||
let releases: Vec<GithubRelease> = self.fetch_github_releases_multiple_pages(&base_url).await?;
|
||||
|
||||
// Get platform info to filter appropriate releases
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
let (os, _) = Self::get_platform_info();
|
||||
|
||||
// Filter releases that have assets compatible with the current platform
|
||||
let mut filtered_releases: Vec<GithubRelease> = releases
|
||||
.into_iter()
|
||||
.filter_map(|mut release| {
|
||||
// Check if this release has compatible assets for the current platform
|
||||
let has_compatible_asset = Self::has_compatible_brave_asset(&release.assets, &os, &arch);
|
||||
let has_compatible_asset = Self::has_compatible_brave_asset(&release.assets, &os);
|
||||
|
||||
if has_compatible_asset {
|
||||
println!("release.name: {:?}", release.name);
|
||||
// Use the centralized nightly detection function
|
||||
release.is_nightly =
|
||||
is_browser_version_nightly("brave", &release.tag_name, Some(&release.name));
|
||||
@@ -773,22 +857,40 @@ impl ApiClient {
|
||||
// Sort releases using the new version sorting system
|
||||
sort_github_releases(&mut filtered_releases);
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_github_releases("brave", &filtered_releases) {
|
||||
eprintln!("Failed to cache Brave releases: {e}");
|
||||
}
|
||||
if let Err(e) = self.save_cached_github_releases("brave", &filtered_releases) {
|
||||
eprintln!("Failed to cache Brave releases: {e}");
|
||||
}
|
||||
|
||||
Ok(filtered_releases)
|
||||
}
|
||||
|
||||
/// Check if a Brave release has compatible assets for the given platform and architecture
|
||||
fn has_compatible_brave_asset(
|
||||
fn has_compatible_camoufox_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> bool {
|
||||
let (os_name, arch_name) = match (os, arch) {
|
||||
("windows", "x64") => ("win", "x86_64"),
|
||||
("windows", "arm64") => ("win", "arm64"),
|
||||
("linux", "x64") => ("lin", "x86_64"),
|
||||
("linux", "arm64") => ("lin", "arm64"),
|
||||
("macos", "x64") => ("mac", "x86_64"),
|
||||
("macos", "arm64") => ("mac", "arm64"),
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
// Look for assets matching the pattern: camoufox-{version}-{release}-{os}.{arch}.zip
|
||||
assets.iter().any(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.starts_with("camoufox-")
|
||||
&& name.contains(&format!("-{os_name}.{arch_name}.zip"))
|
||||
&& name.ends_with(".zip")
|
||||
})
|
||||
}
|
||||
|
||||
fn has_compatible_brave_asset(assets: &[crate::browser::GithubAsset], os: &str) -> bool {
|
||||
match os {
|
||||
"windows" => {
|
||||
// For Windows, look for standalone setup EXE (not the auto-updater one)
|
||||
@@ -805,12 +907,9 @@ impl ApiClient {
|
||||
}) || assets.iter().any(|asset| asset.name.ends_with(".dmg"))
|
||||
}
|
||||
"linux" => {
|
||||
// For Linux, be strict about architecture matching - only allow assets that explicitly match the current architecture
|
||||
let arch_pattern = if arch == "arm64" { "arm64" } else { "amd64" };
|
||||
|
||||
if assets.iter().any(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("linux") && name.contains(arch_pattern) && name.ends_with(".zip")
|
||||
name.contains("lin")
|
||||
}) {
|
||||
return true;
|
||||
}
|
||||
@@ -874,19 +973,8 @@ impl ApiClient {
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_versions) = self.load_cached_versions("chromium") {
|
||||
return Ok(
|
||||
cached_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // Cache doesn't store dates
|
||||
is_prerelease: false, // Chromium versions are generally stable builds
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
if let Some(cached_releases) = self.load_cached_versions("chromium") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -905,23 +993,112 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
// Convert to BrowserRelease objects
|
||||
let releases: Vec<BrowserRelease> = versions
|
||||
.into_iter()
|
||||
.map(|version| BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(),
|
||||
is_prerelease: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_versions("chromium", &versions) {
|
||||
if let Err(e) = self.save_cached_versions("chromium", &releases) {
|
||||
eprintln!("Failed to cache Chromium versions: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(
|
||||
versions
|
||||
.into_iter()
|
||||
.map(|version| BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(),
|
||||
is_prerelease: false,
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
Ok(releases)
|
||||
}
|
||||
|
||||
pub async fn fetch_camoufox_releases_with_caching(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
) -> Result<Vec<GithubRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_releases) = self.load_cached_github_releases("camoufox") {
|
||||
println!(
|
||||
"Using cached Camoufox releases, count: {}",
|
||||
cached_releases.len()
|
||||
);
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fetching Camoufox releases from GitHub API");
|
||||
let base_url = format!("{}/repos/daijro/camoufox/releases", self.github_api_base);
|
||||
let releases: Vec<GithubRelease> = self.fetch_github_releases_multiple_pages(&base_url).await?;
|
||||
|
||||
println!(
|
||||
"Fetched {} total Camoufox releases from GitHub",
|
||||
releases.len()
|
||||
);
|
||||
|
||||
// Get platform info to filter appropriate releases
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
println!("Filtering for platform: {os}/{arch}");
|
||||
|
||||
// Filter releases that have assets compatible with the current platform
|
||||
let mut compatible_releases: Vec<GithubRelease> = releases
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.filter_map(|(i, release)| {
|
||||
let has_compatible = self.has_compatible_camoufox_asset(&release.assets, &os, &arch);
|
||||
if !has_compatible {
|
||||
println!(
|
||||
"Release {} ({}) has no compatible assets for {}/{}",
|
||||
i, release.tag_name, os, arch
|
||||
);
|
||||
println!(
|
||||
" Available assets: {:?}",
|
||||
release.assets.iter().map(|a| &a.name).collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
if has_compatible {
|
||||
Some(release)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
println!(
|
||||
"After platform filtering: {} compatible releases",
|
||||
compatible_releases.len()
|
||||
);
|
||||
|
||||
// Sort by version (latest first) with debugging
|
||||
println!(
|
||||
"Before sorting: {:?}",
|
||||
compatible_releases
|
||||
.iter()
|
||||
.map(|r| &r.tag_name)
|
||||
.take(10)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
sort_github_releases(&mut compatible_releases);
|
||||
println!(
|
||||
"After sorting: {:?}",
|
||||
compatible_releases
|
||||
.iter()
|
||||
.map(|r| &r.tag_name)
|
||||
.take(10)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_github_releases("camoufox", &compatible_releases) {
|
||||
eprintln!("Failed to cache Camoufox releases: {e}");
|
||||
} else {
|
||||
println!("Cached {} Camoufox releases", compatible_releases.len());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(compatible_releases)
|
||||
}
|
||||
|
||||
pub async fn fetch_tor_releases_with_caching(
|
||||
@@ -930,19 +1107,8 @@ impl ApiClient {
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_versions) = self.load_cached_versions("tor-browser") {
|
||||
return Ok(
|
||||
cached_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // Cache doesn't store dates
|
||||
is_prerelease: is_browser_version_nightly("tor-browser", &version, None),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
if let Some(cached_releases) = self.load_cached_versions("tor-browser") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -998,25 +1164,24 @@ impl ApiClient {
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
// Convert to BrowserRelease objects
|
||||
let releases: Vec<BrowserRelease> = version_strings
|
||||
.into_iter()
|
||||
.map(|version| BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // TOR archive doesn't provide structured dates
|
||||
is_prerelease: false, // Assume all archived versions are stable
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_versions("tor-browser", &version_strings) {
|
||||
if let Err(e) = self.save_cached_versions("tor-browser", &releases) {
|
||||
eprintln!("Failed to cache TOR versions: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(
|
||||
version_strings
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // TOR archive doesn't provide structured dates
|
||||
is_prerelease: false, // Assume all archived versions are stable
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
Ok(releases)
|
||||
}
|
||||
|
||||
async fn check_tor_version_has_macos(
|
||||
@@ -1113,6 +1278,11 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref API_CLIENT: ApiClient = ApiClient::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -1443,14 +1613,25 @@ mod tests {
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.9",
|
||||
"name": "Brave Release 1.81.9",
|
||||
"name": "Release v1.81.9 (Chromium 137.0.7151.104)",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"draft": false,
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.9-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-universal.dmg",
|
||||
"size": 200000000
|
||||
},
|
||||
{
|
||||
"name": "brave-browser-1.81.9-linux-amd64.zip",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-linux-amd64.zip",
|
||||
"size": 180000000
|
||||
},
|
||||
{
|
||||
"name": "BraveBrowserStandaloneSetup.exe",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-setup.exe",
|
||||
"size": 150000000
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1476,7 +1657,7 @@ mod tests {
|
||||
let releases = result.unwrap();
|
||||
assert!(!releases.is_empty());
|
||||
assert_eq!(releases[0].tag_name, "v1.81.9");
|
||||
assert!(releases[0].is_nightly);
|
||||
assert!(!releases[0].is_nightly); // "Release v1.81.9 (Chromium 137.0.7151.104)" starts with "Release" so it should be stable
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -1726,4 +1907,193 @@ mod tests {
|
||||
let result = client.fetch_zen_releases_with_caching(true).await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_mullvad_pagination_two_pages() {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
// Page 1 response with Link: rel="next" header
|
||||
let mock_page1 = r#"[
|
||||
{
|
||||
"tag_name": "100.0",
|
||||
"name": "Mullvad Browser 100.0",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-07-01T00:00:00Z",
|
||||
"assets": [
|
||||
{ "name": "mullvad-browser-macos-100.0.dmg", "browser_download_url": "https://example.com/100.0.dmg", "size": 1 }
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
// Page 2 response
|
||||
let mock_page2 = r#"[
|
||||
{
|
||||
"tag_name": "99.0",
|
||||
"name": "Mullvad Browser 99.0",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-06-01T00:00:00Z",
|
||||
"assets": [
|
||||
{ "name": "mullvad-browser-macos-99.0.dmg", "browser_download_url": "https://example.com/99.0.dmg", "size": 1 }
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
// Mock page 1
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.and(query_param("page", "1"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_page1)
|
||||
.insert_header("content-type", "application/json")
|
||||
.insert_header(
|
||||
"link",
|
||||
format!(
|
||||
"<{}?per_page=100&page=2>; rel=\"next\", <{}?per_page=100&page=2>; rel=\"last\"",
|
||||
server.uri().to_string() + "/repos/mullvad/mullvad-browser/releases",
|
||||
server.uri().to_string() + "/repos/mullvad/mullvad-browser/releases"
|
||||
),
|
||||
),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
// Mock page 2
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.and(query_param("page", "2"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_page2)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let result = client.fetch_mullvad_releases_with_caching(true).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let releases = result.unwrap();
|
||||
// We currently only fetch 1 page intentionally; ensure we at least got page 1
|
||||
assert_eq!(
|
||||
releases.len(),
|
||||
1,
|
||||
"Should fetch only the first page of results"
|
||||
);
|
||||
assert_eq!(releases[0].tag_name, "100.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_beta_version_parsing() {
|
||||
// Test specific Camoufox beta versions that are causing issues
|
||||
let v22 = VersionComponent::parse("135.0.5beta22");
|
||||
let v24 = VersionComponent::parse("135.0.5beta24");
|
||||
|
||||
println!("v22: {v22:?}");
|
||||
println!("v24: {v24:?}");
|
||||
|
||||
// v24 should be greater than v22
|
||||
assert!(
|
||||
v24 > v22,
|
||||
"135.0.5beta24 should be greater than 135.0.5beta22"
|
||||
);
|
||||
|
||||
// Test other beta version combinations
|
||||
let v1 = VersionComponent::parse("135.0.5beta1");
|
||||
let v2 = VersionComponent::parse("135.0.5beta2");
|
||||
assert!(v2 > v1, "135.0.5beta2 should be greater than 135.0.5beta1");
|
||||
|
||||
// Test sorting of multiple versions
|
||||
let mut versions = vec![
|
||||
"135.0.5beta22".to_string(),
|
||||
"135.0.5beta24".to_string(),
|
||||
"135.0.5beta23".to_string(),
|
||||
"135.0.5beta21".to_string(),
|
||||
];
|
||||
|
||||
sort_versions(&mut versions);
|
||||
|
||||
println!("Sorted versions: {versions:?}");
|
||||
|
||||
// Should be sorted from newest to oldest
|
||||
assert_eq!(versions[0], "135.0.5beta24");
|
||||
assert_eq!(versions[1], "135.0.5beta23");
|
||||
assert_eq!(versions[2], "135.0.5beta22");
|
||||
assert_eq!(versions[3], "135.0.5beta21");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_user_reported_versions() {
|
||||
// Test the exact versions reported by the user: 135.0.1beta24 vs 135.0beta22
|
||||
let v22 = VersionComponent::parse("135.0beta22");
|
||||
let v24 = VersionComponent::parse("135.0.1beta24");
|
||||
|
||||
println!("User reported v22: {v22:?}");
|
||||
println!("User reported v24: {v24:?}");
|
||||
|
||||
// 135.0.1beta24 should be greater than 135.0beta22 (newer patch version)
|
||||
assert!(
|
||||
v24 > v22,
|
||||
"135.0.1beta24 should be greater than 135.0beta22, but got: v24={v24:?} vs v22={v22:?}"
|
||||
);
|
||||
|
||||
// Test sorting of the exact user-reported versions
|
||||
let mut versions = vec!["135.0beta22".to_string(), "135.0.1beta24".to_string()];
|
||||
|
||||
sort_versions(&mut versions);
|
||||
|
||||
println!("User reported sorted versions: {versions:?}");
|
||||
|
||||
// Should be sorted from newest to oldest
|
||||
assert_eq!(
|
||||
versions[0], "135.0.1beta24",
|
||||
"135.0.1beta24 should be first (newest)"
|
||||
);
|
||||
assert_eq!(
|
||||
versions[1], "135.0beta22",
|
||||
"135.0beta22 should be second (older)"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_version_classification() {
|
||||
// Test that Camoufox beta versions are now correctly classified as stable (not nightly)
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0beta22", None),
|
||||
"135.0beta22 should be classified as stable for Camoufox"
|
||||
);
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0.1beta24", None),
|
||||
"135.0.1beta24 should be classified as stable for Camoufox"
|
||||
);
|
||||
|
||||
// Test with release names too - beta releases should be stable
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0beta22", Some("Release Beta 22")),
|
||||
"Release with 'Beta' in name should be classified as stable for Camoufox"
|
||||
);
|
||||
|
||||
// Test that stable versions are not classified as nightly
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0", None),
|
||||
"135.0 should be classified as stable"
|
||||
);
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0.1", None),
|
||||
"135.0.1 should be classified as stable"
|
||||
);
|
||||
|
||||
// Test alpha and RC versions are still considered nightly
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "136.0alpha1", None),
|
||||
"136.0alpha1 should not be classified as nightly/prerelease"
|
||||
);
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "136.0rc1", None),
|
||||
"136.0rc1 should not be classified as nightly/prerelease"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,872 @@
|
||||
use crate::camoufox_manager::CamoufoxConfig;
|
||||
use crate::group_manager::GROUP_MANAGER;
|
||||
use crate::profile::manager::ProfileManager;
|
||||
use crate::proxy_manager::PROXY_MANAGER;
|
||||
use crate::tag_manager::TAG_MANAGER;
|
||||
use axum::{
|
||||
extract::{Path, Query, State},
|
||||
http::{HeaderMap, StatusCode},
|
||||
middleware::{self, Next},
|
||||
response::{Json, Response},
|
||||
routing::{delete, get, post, put},
|
||||
Router,
|
||||
};
|
||||
use lazy_static::lazy_static;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
use tauri::Emitter;
|
||||
use tokio::net::TcpListener;
|
||||
use tokio::sync::{mpsc, Mutex};
|
||||
use tower_http::cors::CorsLayer;
|
||||
|
||||
// API Types
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct ApiProfile {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
pub proxy_id: Option<String>,
|
||||
pub process_id: Option<u32>,
|
||||
pub last_launch: Option<u64>,
|
||||
pub release_type: String,
|
||||
pub camoufox_config: Option<serde_json::Value>,
|
||||
pub group_id: Option<String>,
|
||||
pub tags: Vec<String>,
|
||||
pub is_running: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ApiProfilesResponse {
|
||||
pub profiles: Vec<ApiProfile>,
|
||||
pub total: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ApiProfileResponse {
|
||||
pub profile: ApiProfile,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CreateProfileRequest {
|
||||
pub name: String,
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
pub proxy_id: Option<String>,
|
||||
pub release_type: Option<String>,
|
||||
pub camoufox_config: Option<serde_json::Value>,
|
||||
pub group_id: Option<String>,
|
||||
pub tags: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct UpdateProfileRequest {
|
||||
pub name: Option<String>,
|
||||
pub browser: Option<String>,
|
||||
pub version: Option<String>,
|
||||
pub proxy_id: Option<String>,
|
||||
pub release_type: Option<String>,
|
||||
pub camoufox_config: Option<serde_json::Value>,
|
||||
pub group_id: Option<String>,
|
||||
pub tags: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ApiServerState {
|
||||
app_handle: tauri::AppHandle,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct ApiGroupResponse {
|
||||
id: String,
|
||||
name: String,
|
||||
profile_count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct CreateGroupRequest {
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UpdateGroupRequest {
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct ApiProxyResponse {
|
||||
id: String,
|
||||
name: String,
|
||||
proxy_settings: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct CreateProxyRequest {
|
||||
name: String,
|
||||
proxy_settings: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UpdateProxyRequest {
|
||||
name: Option<String>,
|
||||
proxy_settings: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct DownloadBrowserRequest {
|
||||
browser: String,
|
||||
version: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct DownloadBrowserResponse {
|
||||
browser: String,
|
||||
version: String,
|
||||
status: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ToastPayload {
|
||||
pub message: String,
|
||||
pub variant: String,
|
||||
pub title: String,
|
||||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct RunProfileResponse {
|
||||
profile_id: String,
|
||||
remote_debugging_port: u16,
|
||||
headless: bool,
|
||||
}
|
||||
|
||||
pub struct ApiServer {
|
||||
port: Option<u16>,
|
||||
shutdown_tx: Option<mpsc::Sender<()>>,
|
||||
task_handle: Option<tokio::task::JoinHandle<()>>,
|
||||
}
|
||||
|
||||
impl ApiServer {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
port: None,
|
||||
shutdown_tx: None,
|
||||
task_handle: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_port(&self) -> Option<u16> {
|
||||
self.port
|
||||
}
|
||||
|
||||
async fn start(
|
||||
&mut self,
|
||||
app_handle: tauri::AppHandle,
|
||||
preferred_port: u16,
|
||||
) -> Result<u16, String> {
|
||||
// Stop existing server if running
|
||||
self.stop().await.ok();
|
||||
|
||||
let (shutdown_tx, mut shutdown_rx) = mpsc::channel(1);
|
||||
let state = ApiServerState {
|
||||
app_handle: app_handle.clone(),
|
||||
};
|
||||
|
||||
// Try preferred port first, then random port
|
||||
let listener = match TcpListener::bind(format!("127.0.0.1:{preferred_port}")).await {
|
||||
Ok(listener) => listener,
|
||||
Err(_) => {
|
||||
// Port conflict, try random port
|
||||
let random_port = rand::random::<u16>().saturating_add(10000);
|
||||
match TcpListener::bind(format!("127.0.0.1:{random_port}")).await {
|
||||
Ok(listener) => {
|
||||
let _ = app_handle.emit(
|
||||
"api-port-conflict",
|
||||
format!("API server using fallback port {random_port}"),
|
||||
);
|
||||
listener
|
||||
}
|
||||
Err(e) => return Err(format!("Failed to bind to any port: {e}")),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let actual_port = listener
|
||||
.local_addr()
|
||||
.map_err(|e| format!("Failed to get local address: {e}"))?
|
||||
.port();
|
||||
|
||||
// Create router with CORS, authentication, and versioning
|
||||
let v1_routes = Router::new()
|
||||
.route("/profiles", get(get_profiles))
|
||||
.route("/profiles", post(create_profile))
|
||||
.route("/profiles/{id}", get(get_profile))
|
||||
.route("/profiles/{id}", put(update_profile))
|
||||
.route("/profiles/{id}", delete(delete_profile))
|
||||
.route("/profiles/{id}/run", post(run_profile))
|
||||
.route("/groups", get(get_groups).post(create_group))
|
||||
.route(
|
||||
"/groups/{id}",
|
||||
get(get_group).put(update_group).delete(delete_group),
|
||||
)
|
||||
.route("/tags", get(get_tags))
|
||||
.route("/proxies", get(get_proxies).post(create_proxy))
|
||||
.route(
|
||||
"/proxies/{id}",
|
||||
get(get_proxy).put(update_proxy).delete(delete_proxy),
|
||||
)
|
||||
.route("/browsers/download", post(download_browser_api))
|
||||
.route("/browsers/{browser}/versions", get(get_browser_versions))
|
||||
.route(
|
||||
"/browsers/{browser}/versions/{version}/downloaded",
|
||||
get(check_browser_downloaded),
|
||||
)
|
||||
.layer(middleware::from_fn_with_state(
|
||||
state.clone(),
|
||||
auth_middleware,
|
||||
));
|
||||
|
||||
let app = Router::new()
|
||||
.nest("/v1", v1_routes)
|
||||
.layer(CorsLayer::permissive())
|
||||
.with_state(state);
|
||||
|
||||
// Start server task
|
||||
let task_handle = tokio::spawn(async move {
|
||||
let server = axum::serve(listener, app);
|
||||
tokio::select! {
|
||||
_ = server => {},
|
||||
_ = shutdown_rx.recv() => {},
|
||||
}
|
||||
});
|
||||
|
||||
self.port = Some(actual_port);
|
||||
self.shutdown_tx = Some(shutdown_tx);
|
||||
self.task_handle = Some(task_handle);
|
||||
|
||||
Ok(actual_port)
|
||||
}
|
||||
|
||||
async fn stop(&mut self) -> Result<(), String> {
|
||||
if let Some(shutdown_tx) = self.shutdown_tx.take() {
|
||||
let _ = shutdown_tx.send(()).await;
|
||||
}
|
||||
|
||||
if let Some(handle) = self.task_handle.take() {
|
||||
handle.abort();
|
||||
}
|
||||
|
||||
self.port = None;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Authentication middleware
|
||||
async fn auth_middleware(
|
||||
State(state): State<ApiServerState>,
|
||||
headers: HeaderMap,
|
||||
request: axum::extract::Request,
|
||||
next: Next,
|
||||
) -> Result<Response, StatusCode> {
|
||||
// Get the Authorization header
|
||||
let auth_header = headers
|
||||
.get("Authorization")
|
||||
.and_then(|h| h.to_str().ok())
|
||||
.and_then(|h| h.strip_prefix("Bearer "));
|
||||
|
||||
let token = match auth_header {
|
||||
Some(token) => token,
|
||||
None => return Err(StatusCode::UNAUTHORIZED),
|
||||
};
|
||||
|
||||
// Get the stored token
|
||||
let settings_manager = crate::settings_manager::SettingsManager::instance();
|
||||
let stored_token = match settings_manager.get_api_token(&state.app_handle).await {
|
||||
Ok(Some(stored_token)) => stored_token,
|
||||
Ok(None) => return Err(StatusCode::UNAUTHORIZED),
|
||||
Err(_) => return Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
};
|
||||
|
||||
// Compare tokens
|
||||
if token != stored_token {
|
||||
return Err(StatusCode::UNAUTHORIZED);
|
||||
}
|
||||
|
||||
// Token is valid, continue with the request
|
||||
Ok(next.run(request).await)
|
||||
}
|
||||
|
||||
// Global API server instance
|
||||
lazy_static! {
|
||||
pub static ref API_SERVER: Arc<Mutex<ApiServer>> = Arc::new(Mutex::new(ApiServer::new()));
|
||||
}
|
||||
|
||||
// Tauri commands
|
||||
#[tauri::command]
|
||||
pub async fn start_api_server_internal(
|
||||
port: u16,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<u16, String> {
|
||||
let mut server_guard = API_SERVER.lock().await;
|
||||
server_guard.start(app_handle.clone(), port).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn stop_api_server() -> Result<(), String> {
|
||||
let mut server_guard = API_SERVER.lock().await;
|
||||
server_guard.stop().await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn start_api_server(
|
||||
port: Option<u16>,
|
||||
app_handle: tauri::AppHandle,
|
||||
) -> Result<u16, String> {
|
||||
let actual_port = port.unwrap_or(10108);
|
||||
start_api_server_internal(actual_port, &app_handle).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_api_server_status() -> Result<Option<u16>, String> {
|
||||
let server_guard = API_SERVER.lock().await;
|
||||
Ok(server_guard.get_port())
|
||||
}
|
||||
|
||||
// API Handlers - Profiles
|
||||
async fn get_profiles() -> Result<Json<ApiProfilesResponse>, StatusCode> {
|
||||
let profile_manager = ProfileManager::instance();
|
||||
match profile_manager.list_profiles() {
|
||||
Ok(profiles) => {
|
||||
let api_profiles: Vec<ApiProfile> = profiles
|
||||
.iter()
|
||||
.map(|profile| ApiProfile {
|
||||
id: profile.id.to_string(),
|
||||
name: profile.name.clone(),
|
||||
browser: profile.browser.clone(),
|
||||
version: profile.version.clone(),
|
||||
proxy_id: profile.proxy_id.clone(),
|
||||
process_id: profile.process_id,
|
||||
last_launch: profile.last_launch,
|
||||
release_type: profile.release_type.clone(),
|
||||
camoufox_config: profile
|
||||
.camoufox_config
|
||||
.as_ref()
|
||||
.and_then(|c| serde_json::to_value(c).ok()),
|
||||
group_id: profile.group_id.clone(),
|
||||
tags: profile.tags.clone(),
|
||||
is_running: profile.process_id.is_some(), // Simple check based on process_id
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(ApiProfilesResponse {
|
||||
profiles: api_profiles,
|
||||
total: profiles.len(),
|
||||
}))
|
||||
}
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_profile(
|
||||
Path(id): Path<String>,
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<ApiProfileResponse>, StatusCode> {
|
||||
let profile_manager = ProfileManager::instance();
|
||||
match profile_manager.list_profiles() {
|
||||
Ok(profiles) => {
|
||||
if let Some(profile) = profiles.iter().find(|p| p.id.to_string() == id) {
|
||||
Ok(Json(ApiProfileResponse {
|
||||
profile: ApiProfile {
|
||||
id: profile.id.to_string(),
|
||||
name: profile.name.clone(),
|
||||
browser: profile.browser.clone(),
|
||||
version: profile.version.clone(),
|
||||
proxy_id: profile.proxy_id.clone(),
|
||||
process_id: profile.process_id,
|
||||
last_launch: profile.last_launch,
|
||||
release_type: profile.release_type.clone(),
|
||||
camoufox_config: profile
|
||||
.camoufox_config
|
||||
.as_ref()
|
||||
.and_then(|c| serde_json::to_value(c).ok()),
|
||||
group_id: profile.group_id.clone(),
|
||||
tags: profile.tags.clone(),
|
||||
is_running: profile.process_id.is_some(), // Simple check based on process_id
|
||||
},
|
||||
}))
|
||||
} else {
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
}
|
||||
}
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_profile(
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<CreateProfileRequest>,
|
||||
) -> Result<Json<ApiProfileResponse>, StatusCode> {
|
||||
let profile_manager = ProfileManager::instance();
|
||||
|
||||
// Parse camoufox config if provided
|
||||
let camoufox_config = if let Some(config) = &request.camoufox_config {
|
||||
serde_json::from_value(config.clone()).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Create profile using the async create_profile_with_group method
|
||||
match profile_manager
|
||||
.create_profile_with_group(
|
||||
&state.app_handle,
|
||||
&request.name,
|
||||
&request.browser,
|
||||
&request.version,
|
||||
request.release_type.as_deref().unwrap_or("stable"),
|
||||
request.proxy_id.clone(),
|
||||
camoufox_config,
|
||||
request.group_id.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(mut profile) => {
|
||||
// Apply tags if provided
|
||||
if let Some(tags) = &request.tags {
|
||||
if profile_manager
|
||||
.update_profile_tags(&state.app_handle, &profile.name, tags.clone())
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::INTERNAL_SERVER_ERROR);
|
||||
}
|
||||
profile.tags = tags.clone();
|
||||
}
|
||||
|
||||
// Update tag manager with new tags
|
||||
if let Ok(profiles) = profile_manager.list_profiles() {
|
||||
let _ = crate::tag_manager::TAG_MANAGER
|
||||
.lock()
|
||||
.map(|manager| manager.rebuild_from_profiles(&profiles));
|
||||
}
|
||||
|
||||
Ok(Json(ApiProfileResponse {
|
||||
profile: ApiProfile {
|
||||
id: profile.id.to_string(),
|
||||
name: profile.name,
|
||||
browser: profile.browser,
|
||||
version: profile.version,
|
||||
proxy_id: profile.proxy_id,
|
||||
process_id: profile.process_id,
|
||||
last_launch: profile.last_launch,
|
||||
release_type: profile.release_type,
|
||||
camoufox_config: profile
|
||||
.camoufox_config
|
||||
.as_ref()
|
||||
.and_then(|c| serde_json::to_value(c).ok()),
|
||||
group_id: profile.group_id,
|
||||
tags: profile.tags,
|
||||
is_running: false,
|
||||
},
|
||||
}))
|
||||
}
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_profile(
|
||||
Path(id): Path<String>,
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<UpdateProfileRequest>,
|
||||
) -> Result<Json<ApiProfileResponse>, StatusCode> {
|
||||
let profile_manager = ProfileManager::instance();
|
||||
|
||||
// Update profile fields
|
||||
if let Some(new_name) = request.name {
|
||||
if profile_manager
|
||||
.rename_profile(&state.app_handle, &id, &new_name)
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(version) = request.version {
|
||||
if profile_manager
|
||||
.update_profile_version(&state.app_handle, &id, &version)
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(proxy_id) = request.proxy_id {
|
||||
if profile_manager
|
||||
.update_profile_proxy(state.app_handle.clone(), &id, Some(proxy_id))
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(camoufox_config) = request.camoufox_config {
|
||||
let config: Result<CamoufoxConfig, _> = serde_json::from_value(camoufox_config);
|
||||
match config {
|
||||
Ok(config) => {
|
||||
if profile_manager
|
||||
.update_camoufox_config(state.app_handle.clone(), &id, config)
|
||||
.await
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
Err(_) => return Err(StatusCode::BAD_REQUEST),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(group_id) = request.group_id {
|
||||
if profile_manager
|
||||
.assign_profiles_to_group(&state.app_handle, vec![id.clone()], Some(group_id))
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(tags) = request.tags {
|
||||
if profile_manager
|
||||
.update_profile_tags(&state.app_handle, &id, tags)
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
|
||||
// Update tag manager with new tags from all profiles
|
||||
if let Ok(profiles) = profile_manager.list_profiles() {
|
||||
let _ = crate::tag_manager::TAG_MANAGER
|
||||
.lock()
|
||||
.map(|manager| manager.rebuild_from_profiles(&profiles));
|
||||
}
|
||||
}
|
||||
|
||||
// Return updated profile
|
||||
get_profile(Path(id), State(state)).await
|
||||
}
|
||||
|
||||
async fn delete_profile(
|
||||
Path(id): Path<String>,
|
||||
State(state): State<ApiServerState>,
|
||||
) -> Result<StatusCode, StatusCode> {
|
||||
let profile_manager = ProfileManager::instance();
|
||||
match profile_manager.delete_profile(&state.app_handle, &id) {
|
||||
Ok(_) => Ok(StatusCode::NO_CONTENT),
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
}
|
||||
}
|
||||
|
||||
// API Handlers - Groups
|
||||
async fn get_groups(
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<Vec<ApiGroupResponse>>, StatusCode> {
|
||||
match GROUP_MANAGER.lock() {
|
||||
Ok(manager) => {
|
||||
match manager.get_all_groups() {
|
||||
Ok(groups) => {
|
||||
let api_groups = groups
|
||||
.into_iter()
|
||||
.map(|group| ApiGroupResponse {
|
||||
id: group.id,
|
||||
name: group.name,
|
||||
profile_count: 0, // Would need profile list to calculate this
|
||||
})
|
||||
.collect();
|
||||
Ok(Json(api_groups))
|
||||
}
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_group(
|
||||
Path(id): Path<String>,
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<ApiGroupResponse>, StatusCode> {
|
||||
match GROUP_MANAGER.lock() {
|
||||
Ok(manager) => match manager.get_all_groups() {
|
||||
Ok(groups) => {
|
||||
if let Some(group) = groups.into_iter().find(|g| g.id == id) {
|
||||
Ok(Json(ApiGroupResponse {
|
||||
id: group.id,
|
||||
name: group.name,
|
||||
profile_count: 0,
|
||||
}))
|
||||
} else {
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
}
|
||||
}
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
},
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_group(
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<CreateGroupRequest>,
|
||||
) -> Result<Json<ApiGroupResponse>, StatusCode> {
|
||||
match GROUP_MANAGER.lock() {
|
||||
Ok(manager) => match manager.create_group(&state.app_handle, request.name) {
|
||||
Ok(group) => Ok(Json(ApiGroupResponse {
|
||||
id: group.id,
|
||||
name: group.name,
|
||||
profile_count: 0,
|
||||
})),
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
},
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_group(
|
||||
Path(id): Path<String>,
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<UpdateGroupRequest>,
|
||||
) -> Result<Json<ApiGroupResponse>, StatusCode> {
|
||||
match GROUP_MANAGER.lock() {
|
||||
Ok(manager) => match manager.update_group(&state.app_handle, id.clone(), request.name) {
|
||||
Ok(group) => Ok(Json(ApiGroupResponse {
|
||||
id: group.id,
|
||||
name: group.name,
|
||||
profile_count: 0,
|
||||
})),
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
},
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
async fn delete_group(
|
||||
Path(id): Path<String>,
|
||||
State(state): State<ApiServerState>,
|
||||
) -> Result<StatusCode, StatusCode> {
|
||||
match GROUP_MANAGER.lock() {
|
||||
Ok(manager) => match manager.delete_group(&state.app_handle, id.clone()) {
|
||||
Ok(_) => Ok(StatusCode::NO_CONTENT),
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
},
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
// API Handlers - Tags
|
||||
async fn get_tags(State(_state): State<ApiServerState>) -> Result<Json<Vec<String>>, StatusCode> {
|
||||
match TAG_MANAGER.lock() {
|
||||
Ok(manager) => match manager.get_all_tags() {
|
||||
Ok(tags) => Ok(Json(tags)),
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
},
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
// API Handlers - Proxies
|
||||
async fn get_proxies(
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<Vec<ApiProxyResponse>>, StatusCode> {
|
||||
let proxies = PROXY_MANAGER.get_stored_proxies();
|
||||
Ok(Json(
|
||||
proxies
|
||||
.into_iter()
|
||||
.map(|p| ApiProxyResponse {
|
||||
id: p.id,
|
||||
name: p.name,
|
||||
proxy_settings: serde_json::to_value(p.proxy_settings).unwrap_or_default(),
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
}
|
||||
|
||||
async fn get_proxy(
|
||||
Path(id): Path<String>,
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<ApiProxyResponse>, StatusCode> {
|
||||
let proxies = PROXY_MANAGER.get_stored_proxies();
|
||||
if let Some(proxy) = proxies.into_iter().find(|p| p.id == id) {
|
||||
Ok(Json(ApiProxyResponse {
|
||||
id: proxy.id,
|
||||
name: proxy.name,
|
||||
proxy_settings: serde_json::to_value(proxy.proxy_settings).unwrap_or_default(),
|
||||
}))
|
||||
} else {
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
}
|
||||
}
|
||||
|
||||
async fn create_proxy(
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<CreateProxyRequest>,
|
||||
) -> Result<Json<ApiProxyResponse>, StatusCode> {
|
||||
// Convert JSON value to ProxySettings
|
||||
match serde_json::from_value(request.proxy_settings.clone()) {
|
||||
Ok(proxy_settings) => {
|
||||
match PROXY_MANAGER.create_stored_proxy(
|
||||
&state.app_handle,
|
||||
request.name.clone(),
|
||||
proxy_settings,
|
||||
) {
|
||||
Ok(_) => {
|
||||
// Find the created proxy to return it
|
||||
let proxies = PROXY_MANAGER.get_stored_proxies();
|
||||
if let Some(proxy) = proxies.into_iter().find(|p| p.name == request.name) {
|
||||
Ok(Json(ApiProxyResponse {
|
||||
id: proxy.id,
|
||||
name: proxy.name,
|
||||
proxy_settings: request.proxy_settings,
|
||||
}))
|
||||
} else {
|
||||
Err(StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
}
|
||||
}
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_proxy(
|
||||
Path(id): Path<String>,
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<UpdateProxyRequest>,
|
||||
) -> Result<Json<ApiProxyResponse>, StatusCode> {
|
||||
let proxies = PROXY_MANAGER.get_stored_proxies();
|
||||
if let Some(proxy) = proxies.into_iter().find(|p| p.id == id) {
|
||||
let new_name = request.name.unwrap_or(proxy.name.clone());
|
||||
let new_proxy_settings = if let Some(settings_json) = request.proxy_settings {
|
||||
match serde_json::from_value(settings_json) {
|
||||
Ok(settings) => settings,
|
||||
Err(_) => return Err(StatusCode::BAD_REQUEST),
|
||||
}
|
||||
} else {
|
||||
proxy.proxy_settings.clone()
|
||||
};
|
||||
|
||||
match PROXY_MANAGER.update_stored_proxy(
|
||||
&state.app_handle,
|
||||
&id,
|
||||
Some(new_name.clone()),
|
||||
Some(new_proxy_settings.clone()),
|
||||
) {
|
||||
Ok(_) => Ok(Json(ApiProxyResponse {
|
||||
id,
|
||||
name: new_name,
|
||||
proxy_settings: serde_json::to_value(new_proxy_settings).unwrap_or_default(),
|
||||
})),
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
}
|
||||
} else {
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
}
|
||||
}
|
||||
|
||||
async fn delete_proxy(
|
||||
Path(id): Path<String>,
|
||||
State(state): State<ApiServerState>,
|
||||
) -> Result<StatusCode, StatusCode> {
|
||||
match PROXY_MANAGER.delete_stored_proxy(&state.app_handle, &id) {
|
||||
Ok(_) => Ok(StatusCode::NO_CONTENT),
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
}
|
||||
}
|
||||
|
||||
// API Handler - Run Profile with Remote Debugging
|
||||
async fn run_profile(
|
||||
Path(id): Path<String>,
|
||||
Query(params): Query<HashMap<String, String>>,
|
||||
State(state): State<ApiServerState>,
|
||||
) -> Result<Json<RunProfileResponse>, StatusCode> {
|
||||
let headless = params
|
||||
.get("headless")
|
||||
.and_then(|v| v.parse::<bool>().ok())
|
||||
.unwrap_or(false);
|
||||
|
||||
let profile_manager = ProfileManager::instance();
|
||||
let profiles = profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
let profile = profiles
|
||||
.iter()
|
||||
.find(|p| p.id.to_string() == id)
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
// Generate a random port for remote debugging
|
||||
let remote_debugging_port = rand::random::<u16>().saturating_add(9000).max(9000);
|
||||
|
||||
// Use the same launch method as the main app, but with remote debugging enabled
|
||||
match crate::browser_runner::launch_browser_profile_with_debugging(
|
||||
state.app_handle.clone(),
|
||||
profile.clone(),
|
||||
None,
|
||||
Some(remote_debugging_port),
|
||||
headless,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(updated_profile) => Ok(Json(RunProfileResponse {
|
||||
profile_id: updated_profile.id.to_string(),
|
||||
remote_debugging_port,
|
||||
headless,
|
||||
})),
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
// API Handler - Download Browser
|
||||
async fn download_browser_api(
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<DownloadBrowserRequest>,
|
||||
) -> Result<Json<DownloadBrowserResponse>, StatusCode> {
|
||||
match crate::downloader::download_browser(
|
||||
state.app_handle.clone(),
|
||||
request.browser.clone(),
|
||||
request.version.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(_) => Ok(Json(DownloadBrowserResponse {
|
||||
browser: request.browser,
|
||||
version: request.version,
|
||||
status: "downloaded".to_string(),
|
||||
})),
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
// API Handler - Get Browser Versions
|
||||
async fn get_browser_versions(
|
||||
Path(browser): Path<String>,
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<Vec<String>>, StatusCode> {
|
||||
let version_manager = crate::browser_version_manager::BrowserVersionManager::instance();
|
||||
|
||||
match version_manager
|
||||
.fetch_browser_versions_with_count(&browser, false)
|
||||
.await
|
||||
{
|
||||
Ok(result) => Ok(Json(result.versions)),
|
||||
Err(_) => Err(StatusCode::INTERNAL_SERVER_ERROR),
|
||||
}
|
||||
}
|
||||
|
||||
// API Handler - Check if Browser is Downloaded
|
||||
async fn check_browser_downloaded(
|
||||
Path((browser, version)): Path<(String, String)>,
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<bool>, StatusCode> {
|
||||
let is_downloaded = crate::downloaded_browsers_registry::is_browser_downloaded(browser, version);
|
||||
Ok(Json(is_downloaded))
|
||||
}
|
||||
+1020
-93
File diff suppressed because it is too large
Load Diff
+333
-234
@@ -1,10 +1,11 @@
|
||||
use crate::browser_runner::{BrowserProfile, BrowserRunner};
|
||||
use crate::browser_version_service::{BrowserVersionInfo, BrowserVersionService};
|
||||
use crate::browser_version_manager::{BrowserVersionInfo, BrowserVersionManager};
|
||||
use crate::profile::{BrowserProfile, ProfileManager};
|
||||
use crate::settings_manager::SettingsManager;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use tauri::Emitter;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct UpdateNotification {
|
||||
@@ -27,61 +28,65 @@ pub struct AutoUpdateState {
|
||||
}
|
||||
|
||||
pub struct AutoUpdater {
|
||||
version_service: BrowserVersionService,
|
||||
browser_runner: BrowserRunner,
|
||||
settings_manager: SettingsManager,
|
||||
browser_version_manager: &'static BrowserVersionManager,
|
||||
settings_manager: &'static SettingsManager,
|
||||
profile_manager: &'static ProfileManager,
|
||||
}
|
||||
|
||||
impl AutoUpdater {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
version_service: BrowserVersionService::new(),
|
||||
browser_runner: BrowserRunner::new(),
|
||||
settings_manager: SettingsManager::new(),
|
||||
browser_version_manager: BrowserVersionManager::instance(),
|
||||
settings_manager: SettingsManager::instance(),
|
||||
profile_manager: ProfileManager::instance(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static AutoUpdater {
|
||||
&AUTO_UPDATER
|
||||
}
|
||||
|
||||
/// Check for updates for all profiles
|
||||
pub async fn check_for_updates(
|
||||
&self,
|
||||
) -> Result<Vec<UpdateNotification>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check if auto-updates are enabled
|
||||
let settings = self
|
||||
.settings_manager
|
||||
.load_settings()
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))?;
|
||||
if !settings.auto_updates_enabled {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let profiles = self
|
||||
.browser_runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
let mut notifications = Vec::new();
|
||||
let mut browser_versions: HashMap<String, Vec<BrowserVersionInfo>> = HashMap::new();
|
||||
|
||||
// Group profiles by browser type
|
||||
// Group profiles by browser
|
||||
let profiles = self
|
||||
.profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
let mut browser_profiles: HashMap<String, Vec<BrowserProfile>> = HashMap::new();
|
||||
|
||||
for profile in profiles {
|
||||
// Only check supported browsers
|
||||
if !self
|
||||
.browser_version_manager
|
||||
.is_browser_supported(&profile.browser)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
browser_profiles
|
||||
.entry(profile.browser.clone())
|
||||
.or_default()
|
||||
.push(profile);
|
||||
}
|
||||
|
||||
// Check each browser type
|
||||
for (browser, profiles) in browser_profiles {
|
||||
// Get cached versions first, then try to fetch if needed
|
||||
let versions = if let Some(cached) = self
|
||||
.version_service
|
||||
.browser_version_manager
|
||||
.get_cached_browser_versions_detailed(&browser)
|
||||
{
|
||||
cached
|
||||
} else if self.version_service.should_update_cache(&browser) {
|
||||
} else if self.browser_version_manager.should_update_cache(&browser) {
|
||||
// Try to fetch fresh versions
|
||||
match self
|
||||
.version_service
|
||||
.browser_version_manager
|
||||
.fetch_browser_versions_detailed(&browser, false)
|
||||
.await
|
||||
{
|
||||
@@ -97,7 +102,26 @@ impl AutoUpdater {
|
||||
// Check each profile for updates
|
||||
for profile in profiles {
|
||||
if let Some(update) = self.check_profile_update(&profile, &versions)? {
|
||||
notifications.push(update);
|
||||
// Apply chromium threshold logic
|
||||
if browser == "chromium" {
|
||||
// For chromium, only show notifications if there are 400+ new versions
|
||||
let current_version = &profile.version.parse::<u32>().unwrap();
|
||||
let new_version = &update.new_version.parse::<u32>().unwrap();
|
||||
|
||||
let result = new_version - current_version;
|
||||
println!(
|
||||
"Current version: {current_version}, New version: {new_version}, Result: {result}"
|
||||
);
|
||||
if result > 400 {
|
||||
notifications.push(update);
|
||||
} else {
|
||||
println!(
|
||||
"Skipping chromium update notification: only {result} new versions (need 400+)"
|
||||
);
|
||||
}
|
||||
} else {
|
||||
notifications.push(update);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -105,6 +129,96 @@ impl AutoUpdater {
|
||||
Ok(notifications)
|
||||
}
|
||||
|
||||
pub async fn check_for_updates_with_progress(&self, app_handle: &tauri::AppHandle) {
|
||||
println!("Starting auto-update check with progress...");
|
||||
|
||||
// Check for browser updates and trigger auto-downloads
|
||||
match self.check_for_updates().await {
|
||||
Ok(update_notifications) => {
|
||||
if !update_notifications.is_empty() {
|
||||
println!(
|
||||
"Found {} browser updates to auto-download",
|
||||
update_notifications.len()
|
||||
);
|
||||
|
||||
// Trigger automatic downloads for each update
|
||||
for notification in update_notifications {
|
||||
println!(
|
||||
"Auto-downloading {} version {}",
|
||||
notification.browser, notification.new_version
|
||||
);
|
||||
|
||||
// Clone app_handle for the async task
|
||||
let browser = notification.browser.clone();
|
||||
let new_version = notification.new_version.clone();
|
||||
let notification_id = notification.id.clone();
|
||||
let affected_profiles = notification.affected_profiles.clone();
|
||||
let app_handle_clone = app_handle.clone();
|
||||
|
||||
// Spawn async task to handle the download and auto-update
|
||||
tokio::spawn(async move {
|
||||
// TODO: update the logic to use the downloaded browsers registry instance instead of the static method
|
||||
// First, check if browser already exists
|
||||
match crate::downloaded_browsers_registry::is_browser_downloaded(
|
||||
browser.clone(),
|
||||
new_version.clone(),
|
||||
) {
|
||||
true => {
|
||||
println!("Browser {browser} {new_version} already downloaded, proceeding to auto-update profiles");
|
||||
|
||||
// Browser already exists, go straight to profile update
|
||||
match AutoUpdater::instance()
|
||||
.complete_browser_update_with_auto_update(
|
||||
&app_handle_clone,
|
||||
&browser.clone(),
|
||||
&new_version.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(updated_profiles) => {
|
||||
println!(
|
||||
"Auto-update completed for {} profiles: {:?}",
|
||||
updated_profiles.len(),
|
||||
updated_profiles
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to complete auto-update for {browser}: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
false => {
|
||||
println!("Downloading browser {browser} version {new_version}...");
|
||||
|
||||
// Emit the auto-update event to trigger frontend handling
|
||||
let auto_update_event = serde_json::json!({
|
||||
"browser": browser,
|
||||
"new_version": new_version,
|
||||
"notification_id": notification_id,
|
||||
"affected_profiles": affected_profiles
|
||||
});
|
||||
|
||||
if let Err(e) =
|
||||
app_handle_clone.emit("browser-auto-update-available", &auto_update_event)
|
||||
{
|
||||
eprintln!("Failed to emit auto-update event for {browser}: {e}");
|
||||
} else {
|
||||
println!("Emitted auto-update event for {browser}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
println!("No browser updates needed");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to check for browser updates: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a specific profile has an available update
|
||||
fn check_profile_update(
|
||||
&self,
|
||||
@@ -112,16 +226,17 @@ impl AutoUpdater {
|
||||
available_versions: &[BrowserVersionInfo],
|
||||
) -> Result<Option<UpdateNotification>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let current_version = &profile.version;
|
||||
let is_current_stable = !self.is_nightly_version(current_version);
|
||||
let is_current_nightly =
|
||||
crate::api_client::is_browser_version_nightly(&profile.browser, current_version, None);
|
||||
|
||||
// Find the best available update
|
||||
let best_update = available_versions
|
||||
.iter()
|
||||
.filter(|v| {
|
||||
// Only consider versions newer than current
|
||||
self.is_version_newer(&v.version, current_version) &&
|
||||
// Respect version type preference
|
||||
is_current_stable != v.is_prerelease
|
||||
self.is_version_newer(&v.version, current_version)
|
||||
&& crate::api_client::is_browser_version_nightly(&profile.browser, &v.version, None)
|
||||
== is_current_nightly
|
||||
})
|
||||
.max_by(|a, b| self.compare_versions(&a.version, &b.version));
|
||||
|
||||
@@ -181,51 +296,15 @@ impl AutoUpdater {
|
||||
result
|
||||
}
|
||||
|
||||
/// Mark download as auto-update
|
||||
pub fn mark_auto_update_download(
|
||||
&self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut state = self.load_auto_update_state()?;
|
||||
let download_key = format!("{browser}-{version}");
|
||||
state.auto_update_downloads.insert(download_key);
|
||||
self.save_auto_update_state(&state)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove auto-update download tracking
|
||||
pub fn remove_auto_update_download(
|
||||
&self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut state = self.load_auto_update_state()?;
|
||||
let download_key = format!("{browser}-{version}");
|
||||
state.auto_update_downloads.remove(&download_key);
|
||||
self.save_auto_update_state(&state)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if download is marked as auto-update
|
||||
pub fn is_auto_update_download(
|
||||
&self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let state = self.load_auto_update_state()?;
|
||||
let download_key = format!("{browser}-{version}");
|
||||
Ok(state.auto_update_downloads.contains(&download_key))
|
||||
}
|
||||
|
||||
/// Automatically update all affected profile versions after browser download
|
||||
pub async fn auto_update_profile_versions(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
browser: &str,
|
||||
new_version: &str,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let profiles = self
|
||||
.browser_runner
|
||||
.profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
@@ -242,10 +321,11 @@ impl AutoUpdater {
|
||||
// Check if this is an update (newer version)
|
||||
if self.is_version_newer(new_version, &profile.version) {
|
||||
// Update the profile version
|
||||
match self
|
||||
.browser_runner
|
||||
.update_profile_version(&profile.name, new_version)
|
||||
{
|
||||
match self.profile_manager.update_profile_version(
|
||||
app_handle,
|
||||
&profile.id.to_string(),
|
||||
new_version,
|
||||
) {
|
||||
Ok(_) => {
|
||||
updated_profiles.push(profile.name);
|
||||
}
|
||||
@@ -263,12 +343,13 @@ impl AutoUpdater {
|
||||
/// Complete browser update process with auto-update of profile versions
|
||||
pub async fn complete_browser_update_with_auto_update(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
browser: &str,
|
||||
new_version: &str,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Auto-update profile versions first
|
||||
let updated_profiles = self
|
||||
.auto_update_profile_versions(browser, new_version)
|
||||
.auto_update_profile_versions(app_handle, browser, new_version)
|
||||
.await?;
|
||||
|
||||
// Remove browser from disabled list and clean up auto-update tracking
|
||||
@@ -278,51 +359,9 @@ impl AutoUpdater {
|
||||
state.auto_update_downloads.remove(&download_key);
|
||||
self.save_auto_update_state(&state)?;
|
||||
|
||||
// Check if auto-delete of unused binaries is enabled and perform cleanup
|
||||
let settings = self
|
||||
.settings_manager
|
||||
.load_settings()
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))?;
|
||||
if settings.auto_delete_unused_binaries {
|
||||
// Perform cleanup in the background - don't fail the update if cleanup fails
|
||||
if let Err(e) = self.cleanup_unused_binaries_internal() {
|
||||
eprintln!("Warning: Failed to cleanup unused binaries after auto-update: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(updated_profiles)
|
||||
}
|
||||
|
||||
/// Internal method to cleanup unused binaries (used by auto-cleanup)
|
||||
fn cleanup_unused_binaries_internal(
|
||||
&self,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Load current profiles
|
||||
let profiles = self
|
||||
.browser_runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to load profiles: {e}"))?;
|
||||
|
||||
// Load registry
|
||||
let mut registry = crate::downloaded_browsers::DownloadedBrowsersRegistry::load()
|
||||
.map_err(|e| format!("Failed to load browser registry: {e}"))?;
|
||||
|
||||
// Get active browser versions
|
||||
let active_versions = registry.get_active_browser_versions(&profiles);
|
||||
|
||||
// Cleanup unused binaries
|
||||
let cleaned_up = registry
|
||||
.cleanup_unused_binaries(&active_versions)
|
||||
.map_err(|e| format!("Failed to cleanup unused binaries: {e}"))?;
|
||||
|
||||
// Save updated registry
|
||||
registry
|
||||
.save()
|
||||
.map_err(|e| format!("Failed to save registry: {e}"))?;
|
||||
|
||||
Ok(cleaned_up)
|
||||
}
|
||||
|
||||
/// Check if browser is disabled due to ongoing update
|
||||
pub fn is_browser_disabled(
|
||||
&self,
|
||||
@@ -343,31 +382,12 @@ impl AutoUpdater {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
|
||||
fn is_nightly_version(&self, version: &str) -> bool {
|
||||
// Use the centralized nightly detection function
|
||||
// Since we don't have browser context here, use the general fallback
|
||||
crate::api_client::is_nightly_version(version)
|
||||
}
|
||||
|
||||
fn is_version_newer(&self, version1: &str, version2: &str) -> bool {
|
||||
self.compare_versions(version1, version2) == std::cmp::Ordering::Greater
|
||||
crate::api_client::is_version_newer(version1, version2)
|
||||
}
|
||||
|
||||
fn compare_versions(&self, version1: &str, version2: &str) -> std::cmp::Ordering {
|
||||
// Basic semantic version comparison
|
||||
let v1_parts = self.parse_version(version1);
|
||||
let v2_parts = self.parse_version(version2);
|
||||
|
||||
v1_parts.cmp(&v2_parts)
|
||||
}
|
||||
|
||||
fn parse_version(&self, version: &str) -> Vec<u32> {
|
||||
version
|
||||
.split(&['.', 'a', 'b', '-', '_'][..])
|
||||
.filter_map(|part| part.parse::<u32>().ok())
|
||||
.collect()
|
||||
crate::api_client::compare_versions(version1, version2)
|
||||
}
|
||||
|
||||
fn get_auto_update_state_file(&self) -> PathBuf {
|
||||
@@ -377,7 +397,7 @@ impl AutoUpdater {
|
||||
.join("auto_update_state.json")
|
||||
}
|
||||
|
||||
fn load_auto_update_state(
|
||||
pub fn load_auto_update_state(
|
||||
&self,
|
||||
) -> Result<AutoUpdateState, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let state_file = self.get_auto_update_state_file();
|
||||
@@ -391,7 +411,7 @@ impl AutoUpdater {
|
||||
Ok(state)
|
||||
}
|
||||
|
||||
fn save_auto_update_state(
|
||||
pub fn save_auto_update_state(
|
||||
&self,
|
||||
state: &AutoUpdateState,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
@@ -404,13 +424,46 @@ impl AutoUpdater {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get pending update versions for a specific browser
|
||||
/// Returns a set of (browser, version) pairs that have pending updates
|
||||
pub fn get_pending_update_versions(
|
||||
&self,
|
||||
) -> Result<std::collections::HashSet<(String, String)>, Box<dyn std::error::Error + Send + Sync>>
|
||||
{
|
||||
let state = self.load_auto_update_state()?;
|
||||
let mut pending_versions = std::collections::HashSet::new();
|
||||
|
||||
for update in &state.pending_updates {
|
||||
pending_versions.insert((update.browser.clone(), update.new_version.clone()));
|
||||
}
|
||||
|
||||
Ok(pending_versions)
|
||||
}
|
||||
|
||||
/// Get pending update for a specific browser version if it exists
|
||||
pub fn get_pending_update(
|
||||
&self,
|
||||
browser: &str,
|
||||
current_version: &str,
|
||||
) -> Result<Option<UpdateNotification>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let state = self.load_auto_update_state()?;
|
||||
|
||||
for update in &state.pending_updates {
|
||||
if update.browser == browser && update.current_version == current_version {
|
||||
return Ok(Some(update.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
// Tauri commands
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn check_for_browser_updates() -> Result<Vec<UpdateNotification>, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let notifications = updater
|
||||
.check_for_updates()
|
||||
.await
|
||||
@@ -419,17 +472,9 @@ pub async fn check_for_browser_updates() -> Result<Vec<UpdateNotification>, Stri
|
||||
Ok(grouped)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn is_browser_disabled_for_update(browser: String) -> Result<bool, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
updater
|
||||
.is_browser_disabled(&browser)
|
||||
.map_err(|e| format!("Failed to check browser status: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn dismiss_update_notification(notification_id: String) -> Result<(), String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
updater
|
||||
.dismiss_update_notification(¬ification_id)
|
||||
.map_err(|e| format!("Failed to dismiss notification: {e}"))
|
||||
@@ -437,38 +482,21 @@ pub async fn dismiss_update_notification(notification_id: String) -> Result<(),
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn complete_browser_update_with_auto_update(
|
||||
app_handle: tauri::AppHandle,
|
||||
browser: String,
|
||||
new_version: String,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
updater
|
||||
.complete_browser_update_with_auto_update(&browser, &new_version)
|
||||
.complete_browser_update_with_auto_update(&app_handle, &browser, &new_version)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to complete browser update: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn mark_auto_update_download(browser: String, version: String) -> Result<(), String> {
|
||||
let updater = AutoUpdater::new();
|
||||
updater
|
||||
.mark_auto_update_download(&browser, &version)
|
||||
.map_err(|e| format!("Failed to mark auto-update download: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn remove_auto_update_download(browser: String, version: String) -> Result<(), String> {
|
||||
let updater = AutoUpdater::new();
|
||||
updater
|
||||
.remove_auto_update_download(&browser, &version)
|
||||
.map_err(|e| format!("Failed to remove auto-update download: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn is_auto_update_download(browser: String, version: String) -> Result<bool, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
updater
|
||||
.is_auto_update_download(&browser, &version)
|
||||
.map_err(|e| format!("Failed to check auto-update download: {e}"))
|
||||
pub async fn check_for_updates_with_progress(app_handle: tauri::AppHandle) {
|
||||
let updater = AutoUpdater::instance();
|
||||
updater.check_for_updates_with_progress(&app_handle).await;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -477,13 +505,17 @@ mod tests {
|
||||
|
||||
fn create_test_profile(name: &str, browser: &str, version: &str) -> BrowserProfile {
|
||||
BrowserProfile {
|
||||
id: uuid::Uuid::new_v4(),
|
||||
name: name.to_string(),
|
||||
browser: browser.to_string(),
|
||||
version: version.to_string(),
|
||||
profile_path: format!("/tmp/{name}"),
|
||||
process_id: None,
|
||||
proxy: None,
|
||||
proxy_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
group_id: None,
|
||||
tags: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -495,24 +527,9 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_nightly_version() {
|
||||
let updater = AutoUpdater::new();
|
||||
|
||||
assert!(updater.is_nightly_version("1.0.0-alpha"));
|
||||
assert!(updater.is_nightly_version("1.0.0-beta"));
|
||||
assert!(updater.is_nightly_version("1.0.0-rc"));
|
||||
assert!(updater.is_nightly_version("1.0.0a1"));
|
||||
assert!(updater.is_nightly_version("1.0.0b1"));
|
||||
assert!(updater.is_nightly_version("1.0.0-dev"));
|
||||
|
||||
assert!(!updater.is_nightly_version("1.0.0"));
|
||||
assert!(!updater.is_nightly_version("1.2.3"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compare_versions() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
assert_eq!(
|
||||
updater.compare_versions("1.0.0", "1.0.0"),
|
||||
@@ -538,7 +555,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_is_version_newer() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
assert!(updater.is_version_newer("1.0.1", "1.0.0"));
|
||||
assert!(updater.is_version_newer("2.0.0", "1.9.9"));
|
||||
@@ -546,9 +563,71 @@ mod tests {
|
||||
assert!(!updater.is_version_newer("1.0.0", "1.0.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_beta_version_comparison() {
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
// Test the exact user-reported scenario: 135.0.1beta24 vs 135.0beta22
|
||||
assert!(
|
||||
updater.is_version_newer("135.0.1beta24", "135.0beta22"),
|
||||
"135.0.1beta24 should be newer than 135.0beta22"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
updater.compare_versions("135.0.1beta24", "135.0beta22"),
|
||||
std::cmp::Ordering::Greater,
|
||||
"135.0.1beta24 should compare as greater than 135.0beta22"
|
||||
);
|
||||
|
||||
// Test other camoufox beta version combinations
|
||||
assert!(
|
||||
updater.is_version_newer("135.0.5beta24", "135.0.5beta22"),
|
||||
"135.0.5beta24 should be newer than 135.0.5beta22"
|
||||
);
|
||||
|
||||
assert!(
|
||||
updater.is_version_newer("135.0.1beta1", "135.0beta1"),
|
||||
"135.0.1beta1 should be newer than 135.0beta1 due to patch version"
|
||||
);
|
||||
|
||||
// Test that older versions are not considered newer
|
||||
assert!(
|
||||
!updater.is_version_newer("135.0beta22", "135.0.1beta24"),
|
||||
"135.0beta22 should NOT be newer than 135.0.1beta24"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_beta_version_ordering_comprehensive() {
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
// Test various beta version patterns that could appear in camoufox
|
||||
let test_cases = vec![
|
||||
("135.0.1beta24", "135.0beta22", true), // User reported case
|
||||
("135.0.5beta24", "135.0.5beta22", true), // Same patch, different beta
|
||||
("135.1beta1", "135.0beta99", true), // Higher minor beats beta number
|
||||
("136.0beta1", "135.9.9beta99", true), // Higher major beats everything
|
||||
("135.0.1beta1", "135.0beta1", true), // Patch version matters
|
||||
("135.0beta22", "135.0.1beta24", false), // Reverse of user case
|
||||
];
|
||||
|
||||
for (newer, older, should_be_newer) in test_cases {
|
||||
let result = updater.is_version_newer(newer, older);
|
||||
assert_eq!(
|
||||
result,
|
||||
should_be_newer,
|
||||
"Expected {} {} {} but got {}",
|
||||
newer,
|
||||
if should_be_newer { ">" } else { "<=" },
|
||||
older,
|
||||
if result { "true" } else { "false" }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_profile_update_stable_to_stable() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let profile = create_test_profile("test", "firefox", "1.0.0");
|
||||
let versions = vec![
|
||||
create_test_version_info("1.0.1", false), // stable, newer
|
||||
@@ -566,7 +645,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_check_profile_update_alpha_to_alpha() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let profile = create_test_profile("test", "firefox", "1.0.0-alpha");
|
||||
let versions = vec![
|
||||
create_test_version_info("1.0.1", false), // stable, should be included
|
||||
@@ -585,7 +664,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_check_profile_update_no_update_available() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let profile = create_test_profile("test", "firefox", "1.0.0");
|
||||
let versions = vec![
|
||||
create_test_version_info("0.9.0", false), // older
|
||||
@@ -598,7 +677,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_group_update_notifications() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let notifications = vec![
|
||||
UpdateNotification {
|
||||
id: "firefox_1.0.0_to_1.1.0_profile1".to_string(),
|
||||
@@ -696,13 +775,15 @@ mod tests {
|
||||
let state_file = test_settings_manager
|
||||
.get_settings_dir()
|
||||
.join("auto_update_state.json");
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir()).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir())
|
||||
.expect("Failed to create settings directory");
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write state file");
|
||||
|
||||
// Load state
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let loaded_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read state file");
|
||||
let loaded_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize state");
|
||||
|
||||
assert_eq!(loaded_state.disabled_browsers.len(), 1);
|
||||
assert!(loaded_state.disabled_browsers.contains("firefox"));
|
||||
@@ -740,11 +821,15 @@ mod tests {
|
||||
let state_file = test_settings_manager
|
||||
.get_settings_dir()
|
||||
.join("auto_update_state.json");
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir()).unwrap();
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir())
|
||||
.expect("Failed to create settings directory");
|
||||
|
||||
// Initially not disabled (empty state file means default state)
|
||||
let state = AutoUpdateState::default();
|
||||
assert!(!state.disabled_browsers.contains("firefox"));
|
||||
assert!(
|
||||
!state.disabled_browsers.contains("firefox"),
|
||||
"Firefox should not be disabled initially"
|
||||
);
|
||||
|
||||
// Start update (should disable)
|
||||
let mut state = AutoUpdateState::default();
|
||||
@@ -752,27 +837,41 @@ mod tests {
|
||||
state
|
||||
.auto_update_downloads
|
||||
.insert("firefox-1.1.0".to_string());
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write state file");
|
||||
|
||||
// Check that it's disabled
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let loaded_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
assert!(loaded_state.disabled_browsers.contains("firefox"));
|
||||
assert!(loaded_state.auto_update_downloads.contains("firefox-1.1.0"));
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read state file");
|
||||
let loaded_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize state");
|
||||
assert!(
|
||||
loaded_state.disabled_browsers.contains("firefox"),
|
||||
"Firefox should be disabled"
|
||||
);
|
||||
assert!(
|
||||
loaded_state.auto_update_downloads.contains("firefox-1.1.0"),
|
||||
"Firefox download should be tracked"
|
||||
);
|
||||
|
||||
// Complete update (should enable)
|
||||
let mut state = loaded_state;
|
||||
state.disabled_browsers.remove("firefox");
|
||||
state.auto_update_downloads.remove("firefox-1.1.0");
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize final state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write final state file");
|
||||
|
||||
// Check that it's enabled again
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let final_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
assert!(!final_state.disabled_browsers.contains("firefox"));
|
||||
assert!(!final_state.auto_update_downloads.contains("firefox-1.1.0"));
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read final state file");
|
||||
let final_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize final state");
|
||||
assert!(
|
||||
!final_state.disabled_browsers.contains("firefox"),
|
||||
"Firefox should be enabled again"
|
||||
);
|
||||
assert!(
|
||||
!final_state.auto_update_downloads.contains("firefox-1.1.0"),
|
||||
"Firefox download should not be tracked anymore"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -814,31 +913,31 @@ mod tests {
|
||||
let state_file = test_settings_manager
|
||||
.get_settings_dir()
|
||||
.join("auto_update_state.json");
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir()).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir())
|
||||
.expect("Failed to create settings directory");
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize initial state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write initial state file");
|
||||
|
||||
// Dismiss notification (remove from pending updates)
|
||||
state
|
||||
.pending_updates
|
||||
.retain(|n| n.id != "test_notification");
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize updated state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write updated state file");
|
||||
|
||||
// Check that it's removed
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let loaded_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
assert_eq!(loaded_state.pending_updates.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version() {
|
||||
let updater = AutoUpdater::new();
|
||||
|
||||
assert_eq!(updater.parse_version("1.2.3"), vec![1, 2, 3]);
|
||||
assert_eq!(updater.parse_version("1.2.3-alpha"), vec![1, 2, 3]);
|
||||
assert_eq!(updater.parse_version("1.2.3a1"), vec![1, 2, 3, 1]);
|
||||
assert_eq!(updater.parse_version("1.2.3b2"), vec![1, 2, 3, 2]);
|
||||
assert_eq!(updater.parse_version("10.0.0"), vec![10, 0, 0]);
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read updated state file");
|
||||
let loaded_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize updated state");
|
||||
assert_eq!(
|
||||
loaded_state.pending_updates.len(),
|
||||
0,
|
||||
"Pending updates should be empty after dismissal"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref AUTO_UPDATER: AutoUpdater = AutoUpdater::new();
|
||||
}
|
||||
|
||||
+535
-117
@@ -1,11 +1,8 @@
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProxySettings {
|
||||
pub enabled: bool,
|
||||
pub proxy_type: String, // "http", "https", "socks4", or "socks5"
|
||||
pub host: String,
|
||||
pub port: u16,
|
||||
@@ -22,6 +19,7 @@ pub enum BrowserType {
|
||||
Brave,
|
||||
Zen,
|
||||
TorBrowser,
|
||||
Camoufox,
|
||||
}
|
||||
|
||||
impl BrowserType {
|
||||
@@ -34,6 +32,7 @@ impl BrowserType {
|
||||
BrowserType::Brave => "brave",
|
||||
BrowserType::Zen => "zen",
|
||||
BrowserType::TorBrowser => "tor-browser",
|
||||
BrowserType::Camoufox => "camoufox",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +45,7 @@ impl BrowserType {
|
||||
"brave" => Ok(BrowserType::Brave),
|
||||
"zen" => Ok(BrowserType::Zen),
|
||||
"tor-browser" => Ok(BrowserType::TorBrowser),
|
||||
"camoufox" => Ok(BrowserType::Camoufox),
|
||||
_ => Err(format!("Unknown browser type: {s}")),
|
||||
}
|
||||
}
|
||||
@@ -56,8 +56,10 @@ pub trait Browser: Send + Sync {
|
||||
fn create_launch_args(
|
||||
&self,
|
||||
profile_path: &str,
|
||||
_proxy_settings: Option<&ProxySettings>,
|
||||
proxy_settings: Option<&ProxySettings>,
|
||||
url: Option<String>,
|
||||
remote_debugging_port: Option<u16>,
|
||||
headless: bool,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>>;
|
||||
fn is_version_downloaded(&self, version: &str, binaries_dir: &Path) -> bool;
|
||||
fn prepare_executable(&self, executable_path: &Path) -> Result<(), Box<dyn std::error::Error>>;
|
||||
@@ -92,6 +94,7 @@ mod macos {
|
||||
|| name.starts_with("mullvad")
|
||||
|| name.starts_with("zen")
|
||||
|| name.starts_with("tor")
|
||||
|| name.starts_with("camoufox")
|
||||
|| name.contains("Browser")
|
||||
})
|
||||
.map(|entry| entry.path())
|
||||
@@ -167,17 +170,25 @@ mod linux {
|
||||
install_dir: &Path,
|
||||
browser_type: &BrowserType,
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
// Expected structure: install_dir/<browser>/<binary>
|
||||
// Expected structure examples:
|
||||
// - Firefox/Firefox Developer on Linux often extract to: install_dir/firefox/firefox
|
||||
// - Some archives may extract directly under: install_dir/firefox or install_dir/firefox-bin
|
||||
// - For some flavors we may have: install_dir/<browser_type>/<binary>
|
||||
let browser_subdir = install_dir.join(browser_type.as_str());
|
||||
|
||||
// Try firefox first (preferred), then firefox-bin
|
||||
// Try common firefox executable locations (nested and flat)
|
||||
let possible_executables = match browser_type {
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper => {
|
||||
vec![
|
||||
browser_subdir.join("firefox"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
]
|
||||
}
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper => vec![
|
||||
// Nested "firefox/firefox" or "firefox/firefox-bin"
|
||||
install_dir.join("firefox").join("firefox"),
|
||||
install_dir.join("firefox").join("firefox-bin"),
|
||||
// Flat under version directory
|
||||
install_dir.join("firefox"),
|
||||
install_dir.join("firefox-bin"),
|
||||
// Under a subdirectory matching the browser type
|
||||
browser_subdir.join("firefox"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
],
|
||||
BrowserType::MullvadBrowser => {
|
||||
vec![
|
||||
browser_subdir.join("firefox"),
|
||||
@@ -190,9 +201,20 @@ mod linux {
|
||||
}
|
||||
BrowserType::TorBrowser => {
|
||||
vec![
|
||||
browser_subdir.join("firefox"),
|
||||
// Common Tor Browser launchers
|
||||
browser_subdir.join("tor-browser"),
|
||||
// Firefox-based binaries
|
||||
browser_subdir.join("firefox"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
// Sometimes packaged similarly to Firefox
|
||||
install_dir.join("firefox").join("firefox"),
|
||||
install_dir.join("firefox").join("firefox-bin"),
|
||||
]
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
vec![
|
||||
install_dir.join("camoufox-bin"),
|
||||
install_dir.join("camoufox"),
|
||||
]
|
||||
}
|
||||
_ => vec![],
|
||||
@@ -206,9 +228,9 @@ mod linux {
|
||||
|
||||
Err(
|
||||
format!(
|
||||
"Firefox executable not found in {}/{}",
|
||||
"Executable not found for {} in {}",
|
||||
browser_type.as_str(),
|
||||
install_dir.display(),
|
||||
browser_type.as_str()
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
@@ -219,12 +241,29 @@ mod linux {
|
||||
browser_type: &BrowserType,
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
let possible_executables = match browser_type {
|
||||
BrowserType::Chromium => vec![install_dir.join("chromium"), install_dir.join("chrome")],
|
||||
BrowserType::Chromium => vec![
|
||||
// Direct paths (for manual installations)
|
||||
install_dir.join("chromium"),
|
||||
install_dir.join("chrome"),
|
||||
install_dir.join("chromium-browser"),
|
||||
// Subdirectory paths (for downloaded archives)
|
||||
install_dir.join("chrome-linux").join("chrome"),
|
||||
install_dir.join("chrome-linux").join("chromium"),
|
||||
install_dir.join("chromium").join("chromium"),
|
||||
install_dir.join("chromium").join("chrome"),
|
||||
// Binary subdirectory
|
||||
install_dir.join("bin").join("chromium"),
|
||||
install_dir.join("bin").join("chrome"),
|
||||
],
|
||||
BrowserType::Brave => vec![
|
||||
install_dir.join("brave"),
|
||||
install_dir.join("brave-browser"),
|
||||
install_dir.join("brave-browser-nightly"),
|
||||
install_dir.join("brave-browser-beta"),
|
||||
// Subdirectory paths
|
||||
install_dir.join("brave").join("brave"),
|
||||
install_dir.join("brave-browser").join("brave"),
|
||||
install_dir.join("bin").join("brave"),
|
||||
],
|
||||
_ => vec![],
|
||||
};
|
||||
@@ -246,18 +285,21 @@ mod linux {
|
||||
}
|
||||
|
||||
pub fn is_firefox_version_downloaded(install_dir: &Path, browser_type: &BrowserType) -> bool {
|
||||
// Expected structure: install_dir/<browser>/<binary>
|
||||
// Expected structure (most common):
|
||||
// install_dir/<browser>/<binary>
|
||||
// However, Firefox Developer tarballs often extract to a "firefox" subfolder
|
||||
// rather than "firefox-developer". Support both layouts.
|
||||
let browser_subdir = install_dir.join(browser_type.as_str());
|
||||
|
||||
if !browser_subdir.exists() || !browser_subdir.is_dir() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let possible_executables = match browser_type {
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper => {
|
||||
vec![
|
||||
// Preferred: executable inside a subdirectory named after the browser type
|
||||
browser_subdir.join("firefox-bin"),
|
||||
browser_subdir.join("firefox"),
|
||||
// Fallback: executable inside a generic "firefox" subdirectory
|
||||
install_dir.join("firefox").join("firefox-bin"),
|
||||
install_dir.join("firefox").join("firefox"),
|
||||
]
|
||||
}
|
||||
BrowserType::MullvadBrowser => {
|
||||
@@ -277,6 +319,12 @@ mod linux {
|
||||
browser_subdir.join("firefox"),
|
||||
]
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
vec![
|
||||
install_dir.join("camoufox-bin"),
|
||||
install_dir.join("camoufox"),
|
||||
]
|
||||
}
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
@@ -291,12 +339,29 @@ mod linux {
|
||||
|
||||
pub fn is_chromium_version_downloaded(install_dir: &Path, browser_type: &BrowserType) -> bool {
|
||||
let possible_executables = match browser_type {
|
||||
BrowserType::Chromium => vec![install_dir.join("chromium"), install_dir.join("chrome")],
|
||||
BrowserType::Chromium => vec![
|
||||
// Direct paths (for manual installations)
|
||||
install_dir.join("chromium"),
|
||||
install_dir.join("chrome"),
|
||||
install_dir.join("chromium-browser"),
|
||||
// Subdirectory paths (for downloaded archives)
|
||||
install_dir.join("chrome-linux").join("chrome"),
|
||||
install_dir.join("chrome-linux").join("chromium"),
|
||||
install_dir.join("chromium").join("chromium"),
|
||||
install_dir.join("chromium").join("chrome"),
|
||||
// Binary subdirectory
|
||||
install_dir.join("bin").join("chromium"),
|
||||
install_dir.join("bin").join("chrome"),
|
||||
],
|
||||
BrowserType::Brave => vec![
|
||||
install_dir.join("brave"),
|
||||
install_dir.join("brave-browser"),
|
||||
install_dir.join("brave-browser-nightly"),
|
||||
install_dir.join("brave-browser-beta"),
|
||||
// Subdirectory paths
|
||||
install_dir.join("brave").join("brave"),
|
||||
install_dir.join("brave-browser").join("brave"),
|
||||
install_dir.join("bin").join("brave"),
|
||||
],
|
||||
_ => vec![],
|
||||
};
|
||||
@@ -361,6 +426,7 @@ mod windows {
|
||||
|| name.starts_with("mullvad")
|
||||
|| name.starts_with("zen")
|
||||
|| name.starts_with("tor")
|
||||
|| name.starts_with("camoufox")
|
||||
|| name.contains("browser")
|
||||
{
|
||||
return Ok(path);
|
||||
@@ -383,11 +449,18 @@ mod windows {
|
||||
install_dir.join("chrome.exe"),
|
||||
install_dir.join("chromium-browser.exe"),
|
||||
install_dir.join("bin").join("chromium.exe"),
|
||||
// Common archive extraction patterns
|
||||
install_dir.join("chrome-win").join("chrome.exe"),
|
||||
install_dir.join("chromium").join("chromium.exe"),
|
||||
install_dir.join("chromium").join("chrome.exe"),
|
||||
],
|
||||
BrowserType::Brave => vec![
|
||||
install_dir.join("brave.exe"),
|
||||
install_dir.join("brave-browser.exe"),
|
||||
install_dir.join("bin").join("brave.exe"),
|
||||
// Subdirectory patterns
|
||||
install_dir.join("brave").join("brave.exe"),
|
||||
install_dir.join("brave-browser").join("brave.exe"),
|
||||
],
|
||||
_ => vec![],
|
||||
};
|
||||
@@ -439,6 +512,7 @@ mod windows {
|
||||
|| name.starts_with("mullvad")
|
||||
|| name.starts_with("zen")
|
||||
|| name.starts_with("tor")
|
||||
|| name.starts_with("camoufox")
|
||||
|| name.contains("browser")
|
||||
{
|
||||
return true;
|
||||
@@ -458,11 +532,18 @@ mod windows {
|
||||
install_dir.join("chrome.exe"),
|
||||
install_dir.join("chromium-browser.exe"),
|
||||
install_dir.join("bin").join("chromium.exe"),
|
||||
// Common archive extraction patterns
|
||||
install_dir.join("chrome-win").join("chrome.exe"),
|
||||
install_dir.join("chromium").join("chromium.exe"),
|
||||
install_dir.join("chromium").join("chrome.exe"),
|
||||
],
|
||||
BrowserType::Brave => vec![
|
||||
install_dir.join("brave.exe"),
|
||||
install_dir.join("brave-browser.exe"),
|
||||
install_dir.join("bin").join("brave.exe"),
|
||||
// Subdirectory patterns
|
||||
install_dir.join("brave").join("brave.exe"),
|
||||
install_dir.join("brave-browser").join("brave.exe"),
|
||||
],
|
||||
_ => vec![],
|
||||
};
|
||||
@@ -526,17 +607,36 @@ impl Browser for FirefoxBrowser {
|
||||
profile_path: &str,
|
||||
_proxy_settings: Option<&ProxySettings>,
|
||||
url: Option<String>,
|
||||
remote_debugging_port: Option<u16>,
|
||||
headless: bool,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
let mut args = vec!["-profile".to_string(), profile_path.to_string()];
|
||||
|
||||
// Only use -no-remote for browsers that require it for security (Mullvad, Tor)
|
||||
// Regular Firefox browsers can use remote commands for better URL handling
|
||||
// Add remote debugging if requested
|
||||
if let Some(port) = remote_debugging_port {
|
||||
args.push("--start-debugger-server".to_string());
|
||||
args.push(port.to_string());
|
||||
}
|
||||
|
||||
// Add headless mode if requested
|
||||
if headless {
|
||||
args.push("--headless".to_string());
|
||||
}
|
||||
|
||||
// Use -no-remote for browsers that require it for security (Mullvad, Tor) or when remote debugging
|
||||
match self.browser_type {
|
||||
BrowserType::MullvadBrowser | BrowserType::TorBrowser => {
|
||||
args.push("-no-remote".to_string());
|
||||
}
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper | BrowserType::Zen => {
|
||||
// Don't use -no-remote so we can communicate with existing instances
|
||||
BrowserType::Firefox
|
||||
| BrowserType::FirefoxDeveloper
|
||||
| BrowserType::Zen
|
||||
| BrowserType::Camoufox => {
|
||||
// Use -no-remote when remote debugging to avoid conflicts
|
||||
if remote_debugging_port.is_some() {
|
||||
args.push("-no-remote".to_string());
|
||||
}
|
||||
// Don't use -no-remote for normal launches so we can communicate with existing instances
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@@ -625,6 +725,8 @@ impl Browser for ChromiumBrowser {
|
||||
profile_path: &str,
|
||||
proxy_settings: Option<&ProxySettings>,
|
||||
url: Option<String>,
|
||||
remote_debugging_port: Option<u16>,
|
||||
headless: bool,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
let mut args = vec![
|
||||
format!("--user-data-dir={}", profile_path),
|
||||
@@ -633,20 +735,26 @@ impl Browser for ChromiumBrowser {
|
||||
"--disable-component-update".to_string(),
|
||||
"--disable-background-timer-throttling".to_string(),
|
||||
"--crash-server-url=".to_string(),
|
||||
"--disable-updater".to_string(),
|
||||
];
|
||||
|
||||
// Add remote debugging if requested
|
||||
if let Some(port) = remote_debugging_port {
|
||||
args.push("--remote-debugging-address=0.0.0.0".to_string());
|
||||
args.push(format!("--remote-debugging-port={port}"));
|
||||
}
|
||||
|
||||
// Add headless mode if requested
|
||||
if headless {
|
||||
args.push("--headless".to_string());
|
||||
}
|
||||
|
||||
// Add proxy configuration if provided
|
||||
if let Some(proxy) = proxy_settings {
|
||||
if proxy.enabled {
|
||||
let pac_path = Path::new(profile_path).join("proxy.pac");
|
||||
if pac_path.exists() {
|
||||
let pac_content = fs::read(&pac_path)?;
|
||||
let pac_base64 = general_purpose::STANDARD.encode(&pac_content);
|
||||
args.push(format!(
|
||||
"--proxy-pac-url=data:application/x-javascript-config;base64,{pac_base64}"
|
||||
));
|
||||
}
|
||||
}
|
||||
args.push(format!(
|
||||
"--proxy-server=http://{}:{}",
|
||||
proxy.host, proxy.port
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(url) = url {
|
||||
@@ -700,18 +808,123 @@ impl Browser for ChromiumBrowser {
|
||||
}
|
||||
}
|
||||
|
||||
// Factory function to create browser instances
|
||||
pub fn create_browser(browser_type: BrowserType) -> Box<dyn Browser> {
|
||||
match browser_type {
|
||||
BrowserType::MullvadBrowser
|
||||
| BrowserType::Firefox
|
||||
| BrowserType::FirefoxDeveloper
|
||||
| BrowserType::Zen
|
||||
| BrowserType::TorBrowser => Box::new(FirefoxBrowser::new(browser_type)),
|
||||
BrowserType::Chromium | BrowserType::Brave => Box::new(ChromiumBrowser::new(browser_type)),
|
||||
pub struct CamoufoxBrowser;
|
||||
|
||||
impl CamoufoxBrowser {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl Browser for CamoufoxBrowser {
|
||||
fn get_executable_path(&self, install_dir: &Path) -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::get_firefox_executable_path(install_dir);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::get_firefox_executable_path(install_dir, &BrowserType::Camoufox);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::get_firefox_executable_path(install_dir);
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
||||
Err("Unsupported platform".into())
|
||||
}
|
||||
|
||||
fn create_launch_args(
|
||||
&self,
|
||||
profile_path: &str,
|
||||
_proxy_settings: Option<&ProxySettings>,
|
||||
url: Option<String>,
|
||||
remote_debugging_port: Option<u16>,
|
||||
headless: bool,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
// For Camoufox, we handle launching through the camoufox launcher
|
||||
// This method won't be used directly, but we provide basic Firefox args as fallback
|
||||
let mut args = vec![
|
||||
"-profile".to_string(),
|
||||
profile_path.to_string(),
|
||||
"-no-remote".to_string(),
|
||||
];
|
||||
|
||||
// Add remote debugging if requested
|
||||
if let Some(port) = remote_debugging_port {
|
||||
args.push("--start-debugger-server".to_string());
|
||||
args.push(port.to_string());
|
||||
}
|
||||
|
||||
// Add headless mode if requested
|
||||
if headless {
|
||||
args.push("--headless".to_string());
|
||||
}
|
||||
|
||||
if let Some(url) = url {
|
||||
args.push(url);
|
||||
}
|
||||
|
||||
Ok(args)
|
||||
}
|
||||
|
||||
fn is_version_downloaded(&self, version: &str, binaries_dir: &Path) -> bool {
|
||||
let install_dir = binaries_dir.join("camoufox").join(version);
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::is_firefox_version_downloaded(&install_dir);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::is_firefox_version_downloaded(&install_dir, &BrowserType::Camoufox);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::is_firefox_version_downloaded(&install_dir);
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
||||
false
|
||||
}
|
||||
|
||||
fn prepare_executable(&self, executable_path: &Path) -> Result<(), Box<dyn std::error::Error>> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::prepare_executable(executable_path);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::prepare_executable(executable_path);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::prepare_executable(executable_path);
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
||||
Err("Unsupported platform".into())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BrowserFactory;
|
||||
|
||||
impl BrowserFactory {
|
||||
fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static BrowserFactory {
|
||||
&BROWSER_FACTORY
|
||||
}
|
||||
|
||||
pub fn create_browser(&self, browser_type: BrowserType) -> Box<dyn Browser> {
|
||||
match browser_type {
|
||||
BrowserType::MullvadBrowser
|
||||
| BrowserType::Firefox
|
||||
| BrowserType::FirefoxDeveloper
|
||||
| BrowserType::Zen
|
||||
| BrowserType::TorBrowser => Box::new(FirefoxBrowser::new(browser_type)),
|
||||
BrowserType::Chromium | BrowserType::Brave => Box::new(ChromiumBrowser::new(browser_type)),
|
||||
BrowserType::Camoufox => Box::new(CamoufoxBrowser::new()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Factory function to create browser instances (kept for backward compatibility)
|
||||
pub fn create_browser(browser_type: BrowserType) -> Box<dyn Browser> {
|
||||
BrowserFactory::instance().create_browser(browser_type)
|
||||
}
|
||||
|
||||
// Add GithubRelease and GithubAsset structs to browser.rs if they don't already exist
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct GithubRelease {
|
||||
@@ -725,6 +938,24 @@ pub struct GithubRelease {
|
||||
pub is_nightly: bool,
|
||||
#[serde(default)]
|
||||
pub prerelease: bool,
|
||||
#[serde(default)]
|
||||
pub draft: bool,
|
||||
#[serde(default)]
|
||||
pub body: Option<String>,
|
||||
#[serde(default)]
|
||||
pub html_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub id: Option<u64>,
|
||||
#[serde(default)]
|
||||
pub node_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub target_commitish: Option<String>,
|
||||
#[serde(default)]
|
||||
pub created_at: Option<String>,
|
||||
#[serde(default)]
|
||||
pub tarball_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub zipball_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
@@ -733,6 +964,22 @@ pub struct GithubAsset {
|
||||
pub browser_download_url: String,
|
||||
#[serde(default)]
|
||||
pub size: u64,
|
||||
#[serde(default)]
|
||||
pub download_count: Option<u64>,
|
||||
#[serde(default)]
|
||||
pub id: Option<u64>,
|
||||
#[serde(default)]
|
||||
pub node_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub label: Option<String>,
|
||||
#[serde(default)]
|
||||
pub content_type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub state: Option<String>,
|
||||
#[serde(default)]
|
||||
pub created_at: Option<String>,
|
||||
#[serde(default)]
|
||||
pub updated_at: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -751,114 +998,211 @@ mod tests {
|
||||
assert_eq!(BrowserType::Brave.as_str(), "brave");
|
||||
assert_eq!(BrowserType::Zen.as_str(), "zen");
|
||||
assert_eq!(BrowserType::TorBrowser.as_str(), "tor-browser");
|
||||
assert_eq!(BrowserType::Camoufox.as_str(), "camoufox");
|
||||
|
||||
// Test from_str
|
||||
// Test from_str - use expect with descriptive messages instead of unwrap
|
||||
assert_eq!(
|
||||
BrowserType::from_str("mullvad-browser").unwrap(),
|
||||
BrowserType::from_str("mullvad-browser").expect("mullvad-browser should be valid"),
|
||||
BrowserType::MullvadBrowser
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("firefox").unwrap(),
|
||||
BrowserType::from_str("firefox").expect("firefox should be valid"),
|
||||
BrowserType::Firefox
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("firefox-developer").unwrap(),
|
||||
BrowserType::from_str("firefox-developer").expect("firefox-developer should be valid"),
|
||||
BrowserType::FirefoxDeveloper
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("chromium").unwrap(),
|
||||
BrowserType::from_str("chromium").expect("chromium should be valid"),
|
||||
BrowserType::Chromium
|
||||
);
|
||||
assert_eq!(BrowserType::from_str("brave").unwrap(), BrowserType::Brave);
|
||||
assert_eq!(BrowserType::from_str("zen").unwrap(), BrowserType::Zen);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("tor-browser").unwrap(),
|
||||
BrowserType::from_str("brave").expect("brave should be valid"),
|
||||
BrowserType::Brave
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("zen").expect("zen should be valid"),
|
||||
BrowserType::Zen
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("tor-browser").expect("tor-browser should be valid"),
|
||||
BrowserType::TorBrowser
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("camoufox").expect("camoufox should be valid"),
|
||||
BrowserType::Camoufox
|
||||
);
|
||||
|
||||
// Test invalid browser type
|
||||
assert!(BrowserType::from_str("invalid").is_err());
|
||||
assert!(BrowserType::from_str("").is_err());
|
||||
assert!(BrowserType::from_str("Firefox").is_err()); // Case sensitive
|
||||
// Test invalid browser type - these should properly fail
|
||||
let invalid_result = BrowserType::from_str("invalid");
|
||||
assert!(
|
||||
invalid_result.is_err(),
|
||||
"Invalid browser type should return error"
|
||||
);
|
||||
|
||||
let empty_result = BrowserType::from_str("");
|
||||
assert!(empty_result.is_err(), "Empty string should return error");
|
||||
|
||||
let case_sensitive_result = BrowserType::from_str("Firefox");
|
||||
assert!(
|
||||
case_sensitive_result.is_err(),
|
||||
"Case sensitive check should fail"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_firefox_launch_args() {
|
||||
// Test regular Firefox (should not use -no-remote)
|
||||
// Test regular Firefox (should not use -no-remote for normal launch)
|
||||
let browser = FirefoxBrowser::new(BrowserType::Firefox);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.create_launch_args("/path/to/profile", None, None, None, false)
|
||||
.expect("Failed to create launch args for Firefox");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile"]);
|
||||
assert!(!args.contains(&"-no-remote".to_string()));
|
||||
assert!(
|
||||
!args.contains(&"-no-remote".to_string()),
|
||||
"Firefox should not use -no-remote for normal launch"
|
||||
);
|
||||
|
||||
let args = browser
|
||||
.create_launch_args(
|
||||
"/path/to/profile",
|
||||
None,
|
||||
Some("https://example.com".to_string()),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
.expect("Failed to create launch args for Firefox with URL");
|
||||
assert_eq!(
|
||||
args,
|
||||
vec!["-profile", "/path/to/profile", "https://example.com"]
|
||||
);
|
||||
|
||||
// Test Mullvad Browser (should use -no-remote)
|
||||
// Test Firefox with remote debugging (should use -no-remote)
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None, Some(9222), false)
|
||||
.expect("Failed to create launch args for Firefox with remote debugging");
|
||||
assert!(
|
||||
args.contains(&"-no-remote".to_string()),
|
||||
"Firefox should use -no-remote for remote debugging"
|
||||
);
|
||||
assert!(
|
||||
args.contains(&"--start-debugger-server".to_string()),
|
||||
"Firefox should include debugger server arg"
|
||||
);
|
||||
assert!(
|
||||
args.contains(&"9222".to_string()),
|
||||
"Firefox should include debugging port"
|
||||
);
|
||||
|
||||
// Test Mullvad Browser (should always use -no-remote)
|
||||
let browser = FirefoxBrowser::new(BrowserType::MullvadBrowser);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.create_launch_args("/path/to/profile", None, None, None, false)
|
||||
.expect("Failed to create launch args for Mullvad Browser");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile", "-no-remote"]);
|
||||
|
||||
// Test Tor Browser (should use -no-remote)
|
||||
// Test Tor Browser (should always use -no-remote)
|
||||
let browser = FirefoxBrowser::new(BrowserType::TorBrowser);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.create_launch_args("/path/to/profile", None, None, None, false)
|
||||
.expect("Failed to create launch args for Tor Browser");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile", "-no-remote"]);
|
||||
|
||||
// Test Zen Browser (should not use -no-remote)
|
||||
// Test Zen Browser (should not use -no-remote for normal launch)
|
||||
let browser = FirefoxBrowser::new(BrowserType::Zen);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.create_launch_args("/path/to/profile", None, None, None, false)
|
||||
.expect("Failed to create launch args for Zen Browser");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile"]);
|
||||
assert!(!args.contains(&"-no-remote".to_string()));
|
||||
assert!(
|
||||
!args.contains(&"-no-remote".to_string()),
|
||||
"Zen Browser should not use -no-remote for normal launch"
|
||||
);
|
||||
|
||||
// Test headless mode
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None, None, true)
|
||||
.expect("Failed to create launch args for Zen Browser headless");
|
||||
assert!(
|
||||
args.contains(&"--headless".to_string()),
|
||||
"Browser should include headless flag when requested"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chromium_launch_args() {
|
||||
let browser = ChromiumBrowser::new(BrowserType::Chromium);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.create_launch_args("/path/to/profile", None, None, None, false)
|
||||
.expect("Failed to create launch args for Chromium");
|
||||
|
||||
// Test that basic required arguments are present
|
||||
assert!(args.contains(&"--user-data-dir=/path/to/profile".to_string()));
|
||||
assert!(args.contains(&"--no-default-browser-check".to_string()));
|
||||
assert!(
|
||||
args.contains(&"--user-data-dir=/path/to/profile".to_string()),
|
||||
"Chromium args should contain user-data-dir"
|
||||
);
|
||||
assert!(
|
||||
args.contains(&"--no-default-browser-check".to_string()),
|
||||
"Chromium args should contain no-default-browser-check"
|
||||
);
|
||||
|
||||
// Test that automatic update disabling arguments are present
|
||||
assert!(args.contains(&"--disable-background-mode".to_string()));
|
||||
assert!(args.contains(&"--disable-component-update".to_string()));
|
||||
assert!(
|
||||
args.contains(&"--disable-background-mode".to_string()),
|
||||
"Chromium args should contain disable-background-mode"
|
||||
);
|
||||
assert!(
|
||||
args.contains(&"--disable-component-update".to_string()),
|
||||
"Chromium args should contain disable-component-update"
|
||||
);
|
||||
|
||||
let args_with_url = browser
|
||||
.create_launch_args(
|
||||
"/path/to/profile",
|
||||
None,
|
||||
Some("https://example.com".to_string()),
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.unwrap();
|
||||
assert!(args_with_url.contains(&"https://example.com".to_string()));
|
||||
.expect("Failed to create launch args for Chromium with URL");
|
||||
assert!(
|
||||
args_with_url.contains(&"https://example.com".to_string()),
|
||||
"Chromium args should contain the URL"
|
||||
);
|
||||
|
||||
// Verify URL is at the end
|
||||
assert_eq!(args_with_url.last().unwrap(), "https://example.com");
|
||||
assert_eq!(
|
||||
args_with_url.last().expect("Args should not be empty"),
|
||||
"https://example.com"
|
||||
);
|
||||
|
||||
// Test remote debugging
|
||||
let args_with_debug = browser
|
||||
.create_launch_args("/path/to/profile", None, None, Some(9222), false)
|
||||
.expect("Failed to create launch args for Chromium with remote debugging");
|
||||
assert!(
|
||||
args_with_debug.contains(&"--remote-debugging-port=9222".to_string()),
|
||||
"Chromium args should contain remote debugging port"
|
||||
);
|
||||
assert!(
|
||||
args_with_debug.contains(&"--remote-debugging-address=0.0.0.0".to_string()),
|
||||
"Chromium args should contain remote debugging address"
|
||||
);
|
||||
|
||||
// Test headless mode
|
||||
let args_headless = browser
|
||||
.create_launch_args("/path/to/profile", None, None, None, true)
|
||||
.expect("Failed to create launch args for Chromium headless");
|
||||
assert!(
|
||||
args_headless.contains(&"--headless".to_string()),
|
||||
"Chromium args should contain headless flag when requested"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proxy_settings_creation() {
|
||||
let proxy = ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 8080,
|
||||
@@ -866,14 +1210,12 @@ mod tests {
|
||||
password: None,
|
||||
};
|
||||
|
||||
assert!(proxy.enabled);
|
||||
assert_eq!(proxy.proxy_type, "http");
|
||||
assert_eq!(proxy.host, "127.0.0.1");
|
||||
assert_eq!(proxy.port, 8080);
|
||||
|
||||
// Test different proxy types
|
||||
let socks_proxy = ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "socks5".to_string(),
|
||||
host: "proxy.example.com".to_string(),
|
||||
port: 1080,
|
||||
@@ -888,16 +1230,45 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_version_downloaded_check() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
let binaries_dir = temp_dir.path();
|
||||
|
||||
// Create a mock Firefox browser installation with new path structure: binaries/<browser>/<version>/
|
||||
let browser_dir = binaries_dir.join("firefox").join("139.0");
|
||||
fs::create_dir_all(&browser_dir).unwrap();
|
||||
fs::create_dir_all(&browser_dir).expect("Failed to create browser directory");
|
||||
|
||||
// Create a mock .app directory
|
||||
let app_dir = browser_dir.join("Firefox.app");
|
||||
fs::create_dir_all(&app_dir).unwrap();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// Create a mock .app directory for macOS
|
||||
let app_dir = browser_dir.join("Firefox.app");
|
||||
fs::create_dir_all(&app_dir).expect("Failed to create Firefox.app directory");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Create a mock firefox subdirectory and executable for Linux
|
||||
let firefox_subdir = browser_dir.join("firefox");
|
||||
fs::create_dir_all(&firefox_subdir).expect("Failed to create firefox subdirectory");
|
||||
let executable_path = firefox_subdir.join("firefox");
|
||||
fs::write(&executable_path, "mock executable").expect("Failed to write mock executable");
|
||||
|
||||
// Set executable permissions on Linux
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut permissions = executable_path
|
||||
.metadata()
|
||||
.expect("Failed to get file metadata")
|
||||
.permissions();
|
||||
permissions.set_mode(0o755);
|
||||
fs::set_permissions(&executable_path, permissions)
|
||||
.expect("Failed to set executable permissions");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Create a mock firefox.exe for Windows
|
||||
let executable_path = browser_dir.join("firefox.exe");
|
||||
fs::write(&executable_path, "mock executable").expect("Failed to write mock executable");
|
||||
}
|
||||
|
||||
let browser = FirefoxBrowser::new(BrowserType::Firefox);
|
||||
assert!(browser.is_version_downloaded("139.0", binaries_dir));
|
||||
@@ -905,36 +1276,76 @@ mod tests {
|
||||
|
||||
// Test with Chromium browser with new path structure
|
||||
let chromium_dir = binaries_dir.join("chromium").join("1465660");
|
||||
fs::create_dir_all(&chromium_dir).unwrap();
|
||||
let chromium_app_dir = chromium_dir.join("Chromium.app");
|
||||
fs::create_dir_all(chromium_app_dir.join("Contents").join("MacOS")).unwrap();
|
||||
fs::create_dir_all(&chromium_dir).expect("Failed to create chromium directory");
|
||||
|
||||
// Create a mock executable
|
||||
let executable_path = chromium_app_dir
|
||||
.join("Contents")
|
||||
.join("MacOS")
|
||||
.join("Chromium");
|
||||
fs::write(&executable_path, "mock executable").unwrap();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let chromium_app_dir = chromium_dir.join("Chromium.app");
|
||||
fs::create_dir_all(chromium_app_dir.join("Contents").join("MacOS"))
|
||||
.expect("Failed to create Chromium.app structure");
|
||||
|
||||
// Create a mock executable
|
||||
let executable_path = chromium_app_dir
|
||||
.join("Contents")
|
||||
.join("MacOS")
|
||||
.join("Chromium");
|
||||
fs::write(&executable_path, "mock executable")
|
||||
.expect("Failed to write mock Chromium executable");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Create a mock chromium executable for Linux
|
||||
let executable_path = chromium_dir.join("chromium");
|
||||
fs::write(&executable_path, "mock executable")
|
||||
.expect("Failed to write mock chromium executable");
|
||||
|
||||
// Set executable permissions on Linux
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut permissions = executable_path
|
||||
.metadata()
|
||||
.expect("Failed to get chromium metadata")
|
||||
.permissions();
|
||||
permissions.set_mode(0o755);
|
||||
fs::set_permissions(&executable_path, permissions)
|
||||
.expect("Failed to set chromium permissions");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Create a mock chromium.exe for Windows
|
||||
let executable_path = chromium_dir.join("chromium.exe");
|
||||
fs::write(&executable_path, "mock executable").expect("Failed to write mock chromium.exe");
|
||||
}
|
||||
|
||||
let chromium_browser = ChromiumBrowser::new(BrowserType::Chromium);
|
||||
assert!(chromium_browser.is_version_downloaded("1465660", binaries_dir));
|
||||
assert!(!chromium_browser.is_version_downloaded("1465661", binaries_dir));
|
||||
assert!(
|
||||
chromium_browser.is_version_downloaded("1465660", binaries_dir),
|
||||
"Chromium version should be detected as downloaded"
|
||||
);
|
||||
assert!(
|
||||
!chromium_browser.is_version_downloaded("1465661", binaries_dir),
|
||||
"Non-existent Chromium version should not be detected as downloaded"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_downloaded_no_app_directory() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
let binaries_dir = temp_dir.path();
|
||||
|
||||
// Create browser directory but no .app directory with new path structure
|
||||
// Create browser directory but no proper executable structure
|
||||
let browser_dir = binaries_dir.join("firefox").join("139.0");
|
||||
fs::create_dir_all(&browser_dir).unwrap();
|
||||
fs::create_dir_all(&browser_dir).expect("Failed to create browser directory");
|
||||
|
||||
// Create some other files but no .app
|
||||
fs::write(browser_dir.join("readme.txt"), "Some content").unwrap();
|
||||
// Create some other files but no proper executable structure
|
||||
fs::write(browser_dir.join("readme.txt"), "Some content").expect("Failed to write readme file");
|
||||
|
||||
let browser = FirefoxBrowser::new(BrowserType::Firefox);
|
||||
assert!(!browser.is_version_downloaded("139.0", binaries_dir));
|
||||
assert!(
|
||||
!browser.is_version_downloaded("139.0", binaries_dir),
|
||||
"Firefox version should not be detected without proper executable structure"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -951,7 +1362,6 @@ mod tests {
|
||||
#[test]
|
||||
fn test_proxy_settings_serialization() {
|
||||
let proxy = ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 8080,
|
||||
@@ -960,16 +1370,24 @@ mod tests {
|
||||
};
|
||||
|
||||
// Test that it can be serialized (implements Serialize)
|
||||
let json = serde_json::to_string(&proxy).unwrap();
|
||||
assert!(json.contains("127.0.0.1"));
|
||||
assert!(json.contains("8080"));
|
||||
assert!(json.contains("http"));
|
||||
let json = serde_json::to_string(&proxy).expect("Failed to serialize proxy settings");
|
||||
assert!(json.contains("127.0.0.1"), "JSON should contain host IP");
|
||||
assert!(json.contains("8080"), "JSON should contain port number");
|
||||
assert!(json.contains("http"), "JSON should contain proxy type");
|
||||
|
||||
// Test that it can be deserialized (implements Deserialize)
|
||||
let deserialized: ProxySettings = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.enabled, proxy.enabled);
|
||||
assert_eq!(deserialized.proxy_type, proxy.proxy_type);
|
||||
assert_eq!(deserialized.host, proxy.host);
|
||||
assert_eq!(deserialized.port, proxy.port);
|
||||
let deserialized: ProxySettings =
|
||||
serde_json::from_str(&json).expect("Failed to deserialize proxy settings");
|
||||
assert_eq!(
|
||||
deserialized.proxy_type, proxy.proxy_type,
|
||||
"Proxy type should match"
|
||||
);
|
||||
assert_eq!(deserialized.host, proxy.host, "Host should match");
|
||||
assert_eq!(deserialized.port, proxy.port, "Port should match");
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref BROWSER_FACTORY: BrowserFactory = BrowserFactory::new();
|
||||
}
|
||||
|
||||
+1376
-2552
File diff suppressed because it is too large
Load Diff
+496
-654
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,502 @@
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
use crate::profile::BrowserProfile;
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
use tokio::sync::Mutex as AsyncMutex;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CamoufoxConfig {
|
||||
pub proxy: Option<String>,
|
||||
pub screen_max_width: Option<u32>,
|
||||
pub screen_max_height: Option<u32>,
|
||||
pub screen_min_width: Option<u32>,
|
||||
pub screen_min_height: Option<u32>,
|
||||
pub geoip: Option<serde_json::Value>, // Can be String or bool
|
||||
pub block_images: Option<bool>,
|
||||
pub block_webrtc: Option<bool>,
|
||||
pub block_webgl: Option<bool>,
|
||||
pub executable_path: Option<String>,
|
||||
pub fingerprint: Option<String>, // JSON string of the complete fingerprint config
|
||||
}
|
||||
|
||||
impl Default for CamoufoxConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
proxy: None,
|
||||
screen_max_width: None,
|
||||
screen_max_height: None,
|
||||
screen_min_width: None,
|
||||
screen_min_height: None,
|
||||
geoip: Some(serde_json::Value::Bool(true)),
|
||||
block_images: None,
|
||||
block_webrtc: None,
|
||||
block_webgl: None,
|
||||
executable_path: None,
|
||||
fingerprint: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
pub struct CamoufoxLaunchResult {
|
||||
pub id: String,
|
||||
#[serde(alias = "process_id")]
|
||||
pub processId: Option<u32>,
|
||||
#[serde(alias = "profile_path")]
|
||||
pub profilePath: Option<String>,
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CamoufoxInstance {
|
||||
#[allow(dead_code)]
|
||||
id: String,
|
||||
process_id: Option<u32>,
|
||||
profile_path: Option<String>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
struct CamoufoxManagerInner {
|
||||
instances: HashMap<String, CamoufoxInstance>,
|
||||
}
|
||||
|
||||
pub struct CamoufoxManager {
|
||||
inner: Arc<AsyncMutex<CamoufoxManagerInner>>,
|
||||
base_dirs: BaseDirs,
|
||||
}
|
||||
|
||||
impl CamoufoxManager {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
inner: Arc::new(AsyncMutex::new(CamoufoxManagerInner {
|
||||
instances: HashMap::new(),
|
||||
})),
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static CamoufoxManager {
|
||||
&CAMOUFOX_NODECAR_LAUNCHER
|
||||
}
|
||||
|
||||
pub fn get_profiles_dir(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("profiles");
|
||||
path
|
||||
}
|
||||
|
||||
/// Generate Camoufox fingerprint configuration during profile creation
|
||||
pub async fn generate_fingerprint_config(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
profile: &crate::profile::BrowserProfile,
|
||||
config: &CamoufoxConfig,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut config_args = vec!["camoufox".to_string(), "generate-config".to_string()];
|
||||
|
||||
// Always ensure executable_path is set to the user's binary location
|
||||
let executable_path = if let Some(path) = &config.executable_path {
|
||||
path.clone()
|
||||
} else {
|
||||
// Use the browser runner helper with the real profile
|
||||
// Use self.browser_runner instead of instance()
|
||||
BrowserRunner::instance()
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
};
|
||||
config_args.extend(["--executable-path".to_string(), executable_path]);
|
||||
|
||||
// Pass existing fingerprint if provided (for advanced form partial fingerprints)
|
||||
if let Some(fingerprint) = &config.fingerprint {
|
||||
config_args.extend(["--fingerprint".to_string(), fingerprint.clone()]);
|
||||
}
|
||||
|
||||
if let Some(serde_json::Value::Bool(true)) = &config.geoip {
|
||||
config_args.push("--geoip".to_string());
|
||||
}
|
||||
|
||||
// Add proxy if provided (can be passed directly during fingerprint generation)
|
||||
if let Some(proxy) = &config.proxy {
|
||||
config_args.extend(["--proxy".to_string(), proxy.clone()]);
|
||||
}
|
||||
|
||||
// Add screen dimensions if provided
|
||||
if let Some(max_width) = config.screen_max_width {
|
||||
config_args.extend(["--max-width".to_string(), max_width.to_string()]);
|
||||
}
|
||||
|
||||
if let Some(max_height) = config.screen_max_height {
|
||||
config_args.extend(["--max-height".to_string(), max_height.to_string()]);
|
||||
}
|
||||
|
||||
if let Some(min_width) = config.screen_min_width {
|
||||
config_args.extend(["--min-width".to_string(), min_width.to_string()]);
|
||||
}
|
||||
|
||||
if let Some(min_height) = config.screen_min_height {
|
||||
config_args.extend(["--min-height".to_string(), min_height.to_string()]);
|
||||
}
|
||||
|
||||
// Add block_* options
|
||||
if let Some(block_images) = config.block_images {
|
||||
if block_images {
|
||||
config_args.push("--block-images".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(block_webrtc) = config.block_webrtc {
|
||||
if block_webrtc {
|
||||
config_args.push("--block-webrtc".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(block_webgl) = config.block_webgl {
|
||||
if block_webgl {
|
||||
config_args.push("--block-webgl".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Execute config generation command
|
||||
let mut config_sidecar = self.get_nodecar_sidecar(app_handle)?;
|
||||
for arg in &config_args {
|
||||
config_sidecar = config_sidecar.arg(arg);
|
||||
}
|
||||
|
||||
let config_output = config_sidecar.output().await?;
|
||||
if !config_output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&config_output.stderr);
|
||||
return Err(format!("Failed to generate camoufox fingerprint config: {stderr}").into());
|
||||
}
|
||||
|
||||
Ok(String::from_utf8_lossy(&config_output.stdout).to_string())
|
||||
}
|
||||
|
||||
/// Get the nodecar sidecar command
|
||||
fn get_nodecar_sidecar(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
) -> Result<tauri_plugin_shell::process::Command, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let shell = app_handle.shell();
|
||||
let sidecar_command = shell
|
||||
.sidecar("nodecar")
|
||||
.map_err(|e| format!("Failed to create nodecar sidecar: {e}"))?;
|
||||
Ok(sidecar_command)
|
||||
}
|
||||
|
||||
/// Launch Camoufox browser using nodecar sidecar
|
||||
pub async fn launch_camoufox(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
profile: &crate::profile::BrowserProfile,
|
||||
profile_path: &str,
|
||||
config: &CamoufoxConfig,
|
||||
url: Option<&str>,
|
||||
) -> Result<CamoufoxLaunchResult, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let custom_config = if let Some(existing_fingerprint) = &config.fingerprint {
|
||||
println!("Using existing fingerprint from profile metadata");
|
||||
existing_fingerprint.clone()
|
||||
} else {
|
||||
return Err("No fingerprint provided".into());
|
||||
};
|
||||
|
||||
// Always ensure executable_path is set to the user's binary location
|
||||
let executable_path = if let Some(path) = &config.executable_path {
|
||||
path.clone()
|
||||
} else {
|
||||
// Use the browser runner helper with the real profile
|
||||
// Use self.browser_runner instead of instance()
|
||||
BrowserRunner::instance()
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
};
|
||||
|
||||
// Build nodecar command arguments
|
||||
let mut args = vec!["camoufox".to_string(), "start".to_string()];
|
||||
|
||||
// Add profile path - ensure it's an absolute path
|
||||
let absolute_profile_path = std::path::Path::new(profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(profile_path).to_path_buf())
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
args.extend(["--profile-path".to_string(), absolute_profile_path]);
|
||||
|
||||
// Add URL if provided
|
||||
if let Some(url) = url {
|
||||
args.extend(["--url".to_string(), url.to_string()]);
|
||||
}
|
||||
|
||||
// Always add the executable path
|
||||
args.extend(["--executable-path".to_string(), executable_path]);
|
||||
|
||||
// Always add the generated custom config
|
||||
args.extend(["--custom-config".to_string(), custom_config]);
|
||||
|
||||
// Add proxy if provided
|
||||
if let Some(proxy) = &config.proxy {
|
||||
args.extend(["--proxy".to_string(), proxy.clone()]);
|
||||
}
|
||||
|
||||
// Add headless flag for tests
|
||||
if std::env::var("CAMOUFOX_HEADLESS").is_ok() {
|
||||
args.push("--headless".to_string());
|
||||
}
|
||||
|
||||
// Get the nodecar sidecar command
|
||||
let mut sidecar_command = self.get_nodecar_sidecar(app_handle)?;
|
||||
|
||||
// Add all arguments to the sidecar command
|
||||
for arg in &args {
|
||||
sidecar_command = sidecar_command.arg(arg);
|
||||
}
|
||||
|
||||
// Execute nodecar sidecar command
|
||||
println!("Executing nodecar command with args: {args:?}");
|
||||
let output = sidecar_command.output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
println!("nodecar camoufox failed - stdout: {stdout}, stderr: {stderr}");
|
||||
return Err(format!("nodecar camoufox failed: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
println!("nodecar camoufox output: {stdout}");
|
||||
|
||||
// Parse the JSON output
|
||||
let launch_result: CamoufoxLaunchResult = serde_json::from_str(&stdout)
|
||||
.map_err(|e| format!("Failed to parse nodecar output as JSON: {e}\nOutput was: {stdout}"))?;
|
||||
|
||||
// Store the instance
|
||||
let instance = CamoufoxInstance {
|
||||
id: launch_result.id.clone(),
|
||||
process_id: launch_result.processId,
|
||||
profile_path: launch_result.profilePath.clone(),
|
||||
url: launch_result.url.clone(),
|
||||
};
|
||||
|
||||
{
|
||||
let mut inner = self.inner.lock().await;
|
||||
inner.instances.insert(launch_result.id.clone(), instance);
|
||||
}
|
||||
|
||||
Ok(launch_result)
|
||||
}
|
||||
|
||||
/// Stop a Camoufox process by ID
|
||||
pub async fn stop_camoufox(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
id: &str,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Get the nodecar sidecar command
|
||||
let sidecar_command = self
|
||||
.get_nodecar_sidecar(app_handle)?
|
||||
.arg("camoufox")
|
||||
.arg("stop")
|
||||
.arg("--id")
|
||||
.arg(id);
|
||||
|
||||
// Execute nodecar stop command
|
||||
let output = sidecar_command.output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let _stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let result: serde_json::Value = serde_json::from_str(&stdout)
|
||||
.map_err(|e| format!("Failed to parse nodecar stop output: {e}"))?;
|
||||
|
||||
let success = result
|
||||
.get("success")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
if success {
|
||||
// Remove from our tracking
|
||||
let mut inner = self.inner.lock().await;
|
||||
inner.instances.remove(id);
|
||||
}
|
||||
|
||||
Ok(success)
|
||||
}
|
||||
|
||||
/// Find Camoufox server by profile path (for integration with browser_runner)
|
||||
pub async fn find_camoufox_by_profile(
|
||||
&self,
|
||||
profile_path: &str,
|
||||
) -> Result<Option<CamoufoxLaunchResult>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// First clean up any dead instances
|
||||
self.cleanup_dead_instances().await?;
|
||||
|
||||
let inner = self.inner.lock().await;
|
||||
|
||||
// Convert paths to canonical form for comparison
|
||||
let target_path = std::path::Path::new(profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(profile_path).to_path_buf());
|
||||
|
||||
for (id, instance) in inner.instances.iter() {
|
||||
if let Some(instance_profile_path) = &instance.profile_path {
|
||||
let instance_path = std::path::Path::new(instance_profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(instance_profile_path).to_path_buf());
|
||||
|
||||
if instance_path == target_path {
|
||||
// Verify the server is actually running by checking the process
|
||||
if let Some(process_id) = instance.process_id {
|
||||
if self.is_server_running(process_id).await {
|
||||
// Found running Camoufox instance
|
||||
return Ok(Some(CamoufoxLaunchResult {
|
||||
id: id.clone(),
|
||||
processId: instance.process_id,
|
||||
profilePath: instance.profile_path.clone(),
|
||||
url: instance.url.clone(),
|
||||
}));
|
||||
} else {
|
||||
// Camoufox instance found but process is not running
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Check if servers are still alive and clean up dead instances
|
||||
pub async fn cleanup_dead_instances(
|
||||
&self,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut dead_instances = Vec::new();
|
||||
let mut instances_to_remove = Vec::new();
|
||||
|
||||
{
|
||||
let inner = self.inner.lock().await;
|
||||
|
||||
for (id, instance) in inner.instances.iter() {
|
||||
if let Some(process_id) = instance.process_id {
|
||||
// Check if the process is still alive
|
||||
if !self.is_server_running(process_id).await {
|
||||
// Process is dead
|
||||
// Camoufox instance is no longer running
|
||||
dead_instances.push(id.clone());
|
||||
instances_to_remove.push(id.clone());
|
||||
}
|
||||
} else {
|
||||
// No process_id means it's likely a dead instance
|
||||
// Camoufox instance has no PID, marking as dead
|
||||
dead_instances.push(id.clone());
|
||||
instances_to_remove.push(id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove dead instances
|
||||
if !instances_to_remove.is_empty() {
|
||||
let mut inner = self.inner.lock().await;
|
||||
for id in &instances_to_remove {
|
||||
inner.instances.remove(id);
|
||||
// Removed dead Camoufox instance
|
||||
}
|
||||
}
|
||||
|
||||
Ok(dead_instances)
|
||||
}
|
||||
|
||||
/// Check if a Camoufox server is running with the given process ID
|
||||
async fn is_server_running(&self, process_id: u32) -> bool {
|
||||
// Check if the process is still running
|
||||
use sysinfo::{Pid, System};
|
||||
|
||||
let system = System::new_all();
|
||||
if let Some(process) = system.process(Pid::from(process_id as usize)) {
|
||||
// Check if this is actually a Camoufox process by looking at the command line
|
||||
let cmd = process.cmd();
|
||||
let is_camoufox = cmd.iter().any(|arg| {
|
||||
let arg_str = arg.to_str().unwrap_or("");
|
||||
arg_str.contains("camoufox-worker") || arg_str.contains("camoufox")
|
||||
});
|
||||
|
||||
if is_camoufox {
|
||||
// Found running Camoufox process
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl CamoufoxManager {
|
||||
pub async fn launch_camoufox_profile_nodecar(
|
||||
&self,
|
||||
app_handle: AppHandle,
|
||||
profile: BrowserProfile,
|
||||
config: CamoufoxConfig,
|
||||
url: Option<String>,
|
||||
) -> Result<CamoufoxLaunchResult, String> {
|
||||
// Get profile path
|
||||
let profiles_dir = self.get_profiles_dir();
|
||||
let profile_path = profile.get_profile_data_path(&profiles_dir);
|
||||
let profile_path_str = profile_path.to_string_lossy();
|
||||
|
||||
// Check if there's already a running instance for this profile
|
||||
if let Ok(Some(existing)) = self.find_camoufox_by_profile(&profile_path_str).await {
|
||||
// If there's an existing instance, stop it first to avoid conflicts
|
||||
let _ = self.stop_camoufox(&app_handle, &existing.id).await;
|
||||
}
|
||||
|
||||
// Clean up any dead instances before launching
|
||||
let _ = self.cleanup_dead_instances().await;
|
||||
|
||||
self
|
||||
.launch_camoufox(
|
||||
&app_handle,
|
||||
&profile,
|
||||
&profile_path_str,
|
||||
&config,
|
||||
url.as_deref(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to launch Camoufox via nodecar: {e}"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_default_config() {
|
||||
let default_config = CamoufoxConfig::default();
|
||||
|
||||
// Verify defaults
|
||||
assert_eq!(default_config.geoip, Some(serde_json::Value::Bool(true)));
|
||||
assert_eq!(default_config.proxy, None);
|
||||
assert_eq!(default_config.fingerprint, None);
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref CAMOUFOX_NODECAR_LAUNCHER: CamoufoxManager = CamoufoxManager::new();
|
||||
}
|
||||
@@ -1,5 +1,45 @@
|
||||
use tauri::command;
|
||||
|
||||
pub struct DefaultBrowser {}
|
||||
|
||||
impl DefaultBrowser {
|
||||
fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static DefaultBrowser {
|
||||
&DEFAULT_BROWSER
|
||||
}
|
||||
|
||||
pub async fn is_default_browser(&self) -> Result<bool, String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::is_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
}
|
||||
|
||||
pub async fn set_as_default_browser(&self) -> Result<(), String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::set_as_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod macos {
|
||||
use core_foundation::base::OSStatus;
|
||||
@@ -158,7 +198,7 @@ mod windows {
|
||||
app_key
|
||||
.set_value(
|
||||
"ApplicationDescription",
|
||||
&"Donut Browser - Simple Yet Powerful Browser Orchestrator",
|
||||
&"Donut Browser - Simple Yet Powerful Anti-Detect Browser",
|
||||
)
|
||||
.map_err(|e| format!("Failed to set ApplicationDescription: {}", e))?;
|
||||
|
||||
@@ -174,7 +214,7 @@ mod windows {
|
||||
capabilities
|
||||
.set_value(
|
||||
"ApplicationDescription",
|
||||
&"Donut Browser - Simple Yet Powerful Browser Orchestrator",
|
||||
&"Donut Browser - Simple Yet Powerful Anti-Detect Browser",
|
||||
)
|
||||
.map_err(|e| format!("Failed to set Capabilities description: {}", e))?;
|
||||
|
||||
@@ -482,159 +522,19 @@ mod linux {
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref DEFAULT_BROWSER: DefaultBrowser = DefaultBrowser::new();
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn is_default_browser() -> Result<bool, String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::is_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
let default_browser = DefaultBrowser::instance();
|
||||
default_browser.is_default_browser().await
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn set_as_default_browser() -> Result<(), String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::set_as_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn open_url_with_profile(
|
||||
app_handle: tauri::AppHandle,
|
||||
profile_name: String,
|
||||
url: String,
|
||||
) -> Result<(), String> {
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
|
||||
let runner = BrowserRunner::new();
|
||||
|
||||
// Get the profile by name
|
||||
let profiles = runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
let profile = profiles
|
||||
.into_iter()
|
||||
.find(|p| p.name == profile_name)
|
||||
.ok_or_else(|| format!("Profile '{profile_name}' not found"))?;
|
||||
|
||||
println!("Opening URL '{url}' with profile '{profile_name}'");
|
||||
|
||||
// Use launch_or_open_url which handles both launching new instances and opening in existing ones
|
||||
runner
|
||||
.launch_or_open_url(app_handle, &profile, Some(url.clone()))
|
||||
.await
|
||||
.map_err(|e| {
|
||||
println!("Failed to open URL with profile '{profile_name}': {e}");
|
||||
format!("Failed to open URL with profile: {e}")
|
||||
})?;
|
||||
|
||||
println!("Successfully opened URL '{url}' with profile '{profile_name}'");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn smart_open_url(
|
||||
app_handle: tauri::AppHandle,
|
||||
url: String,
|
||||
_is_startup: Option<bool>,
|
||||
) -> Result<String, String> {
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
|
||||
let runner = BrowserRunner::new();
|
||||
|
||||
// Get all profiles
|
||||
let profiles = runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
if profiles.is_empty() {
|
||||
return Err("no_profiles".to_string());
|
||||
}
|
||||
|
||||
println!(
|
||||
"URL opening - Total profiles: {}, checking for running profiles",
|
||||
profiles.len()
|
||||
);
|
||||
|
||||
// Check for running profiles and find the first one that can handle URLs
|
||||
for profile in &profiles {
|
||||
// Check if this profile is running
|
||||
let is_running = runner
|
||||
.check_browser_status(app_handle.clone(), profile)
|
||||
.await
|
||||
.unwrap_or(false);
|
||||
|
||||
if is_running {
|
||||
println!(
|
||||
"Found running profile '{}', attempting to open URL",
|
||||
profile.name
|
||||
);
|
||||
|
||||
// For TOR browser: Check if any other TOR browser is running
|
||||
if profile.browser == "tor-browser" {
|
||||
let mut other_tor_running = false;
|
||||
for p in &profiles {
|
||||
if p.browser == "tor-browser"
|
||||
&& p.name != profile.name
|
||||
&& runner
|
||||
.check_browser_status(app_handle.clone(), p)
|
||||
.await
|
||||
.unwrap_or(false)
|
||||
{
|
||||
other_tor_running = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if other_tor_running {
|
||||
continue; // Skip this one, can't have multiple TOR instances
|
||||
}
|
||||
}
|
||||
|
||||
// For Mullvad browser: skip if running (can't open URLs in running Mullvad)
|
||||
if profile.browser == "mullvad-browser" {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Try to open the URL with this running profile
|
||||
match runner
|
||||
.launch_or_open_url(app_handle.clone(), profile, Some(url.clone()))
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
println!(
|
||||
"Successfully opened URL '{}' with running profile '{}'",
|
||||
url, profile.name
|
||||
);
|
||||
return Ok(format!("opened_with_profile:{}", profile.name));
|
||||
}
|
||||
Err(e) => {
|
||||
println!(
|
||||
"Failed to open URL with running profile '{}': {}",
|
||||
profile.name, e
|
||||
);
|
||||
// Continue to try other profiles or show selector
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("No suitable running profiles found, showing profile selector");
|
||||
|
||||
// No suitable running profile found, show the profile selector
|
||||
Err("show_selector".to_string())
|
||||
let default_browser = DefaultBrowser::instance();
|
||||
default_browser.set_as_default_browser().await
|
||||
}
|
||||
|
||||
@@ -1,985 +0,0 @@
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tauri::Emitter;
|
||||
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::browser::BrowserType;
|
||||
use crate::browser_version_service::DownloadInfo;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DownloadProgress {
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
pub downloaded_bytes: u64,
|
||||
pub total_bytes: Option<u64>,
|
||||
pub percentage: f64,
|
||||
pub speed_bytes_per_sec: f64,
|
||||
pub eta_seconds: Option<f64>,
|
||||
pub stage: String, // "downloading", "extracting", "verifying"
|
||||
}
|
||||
|
||||
pub struct Downloader {
|
||||
client: Client,
|
||||
api_client: ApiClient,
|
||||
}
|
||||
|
||||
impl Downloader {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
api_client: ApiClient::new(),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn new_with_api_client(api_client: ApiClient) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
api_client,
|
||||
}
|
||||
}
|
||||
|
||||
/// Resolve the actual download URL for browsers that need dynamic asset resolution
|
||||
pub async fn resolve_download_url(
|
||||
&self,
|
||||
browser_type: BrowserType,
|
||||
version: &str,
|
||||
download_info: &DownloadInfo,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
match browser_type {
|
||||
BrowserType::Brave => {
|
||||
// For Brave, we need to find the actual platform-specific asset
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_brave_releases_with_caching(true)
|
||||
.await?;
|
||||
|
||||
// Find the release with the matching version
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| {
|
||||
r.tag_name == version || r.tag_name == format!("v{}", version.trim_start_matches('v'))
|
||||
})
|
||||
.ok_or(format!("Brave version {version} not found"))?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset based on platform and architecture
|
||||
let asset_url = self
|
||||
.find_brave_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Brave version {version} on {os}/{arch}"
|
||||
))?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
BrowserType::Zen => {
|
||||
// For Zen, verify the asset exists and handle different naming patterns
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_zen_releases_with_caching(true)
|
||||
.await?;
|
||||
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| r.tag_name == version)
|
||||
.ok_or(format!("Zen version {version} not found"))?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset
|
||||
let asset_url = self
|
||||
.find_zen_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Zen version {version} on {os}/{arch}"
|
||||
))?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
BrowserType::MullvadBrowser => {
|
||||
// For Mullvad, verify the asset exists
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_mullvad_releases_with_caching(true)
|
||||
.await?;
|
||||
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| r.tag_name == version)
|
||||
.ok_or(format!("Mullvad version {version} not found"))?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset
|
||||
let asset_url = self
|
||||
.find_mullvad_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Mullvad version {version} on {os}/{arch}"
|
||||
))?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
_ => {
|
||||
// For other browsers, use the provided URL
|
||||
Ok(download_info.url.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Get platform and architecture information
|
||||
fn get_platform_info() -> (String, String) {
|
||||
let os = if cfg!(target_os = "windows") {
|
||||
"windows"
|
||||
} else if cfg!(target_os = "linux") {
|
||||
"linux"
|
||||
} else if cfg!(target_os = "macos") {
|
||||
"macos"
|
||||
} else {
|
||||
"unknown"
|
||||
};
|
||||
|
||||
let arch = if cfg!(target_arch = "x86_64") {
|
||||
"x64"
|
||||
} else if cfg!(target_arch = "aarch64") {
|
||||
"arm64"
|
||||
} else {
|
||||
"unknown"
|
||||
};
|
||||
|
||||
(os.to_string(), arch.to_string())
|
||||
}
|
||||
|
||||
/// Find the appropriate Brave asset for the current platform and architecture
|
||||
fn find_brave_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Brave asset naming patterns:
|
||||
// Windows: BraveBrowserStandaloneNightlySetup.exe, BraveBrowserStandaloneSilentNightlySetup.exe
|
||||
// macOS: Brave-Browser-Nightly-universal.dmg, Brave-Browser-Nightly-universal.pkg
|
||||
// Linux: brave-browser-1.79.119-linux-arm64.zip, brave-browser-1.79.119-linux-amd64.zip
|
||||
|
||||
let asset = match os {
|
||||
"windows" => {
|
||||
// For Windows, look for standalone setup EXE (not the auto-updater one)
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("standalone") && name.ends_with(".exe") && !name.contains("silent")
|
||||
})
|
||||
.or_else(|| {
|
||||
// Fallback to any EXE if standalone not found
|
||||
assets.iter().find(|asset| asset.name.ends_with(".exe"))
|
||||
})
|
||||
}
|
||||
"macos" => {
|
||||
// For macOS, prefer universal DMG
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("universal") && name.ends_with(".dmg")
|
||||
})
|
||||
.or_else(|| {
|
||||
// Fallback to any DMG
|
||||
assets.iter().find(|asset| asset.name.ends_with(".dmg"))
|
||||
})
|
||||
}
|
||||
"linux" => {
|
||||
// For Linux, be strict about architecture matching - same logic as has_compatible_brave_asset
|
||||
let arch_pattern = if arch == "arm64" { "arm64" } else { "amd64" };
|
||||
|
||||
assets.iter().find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("linux") && name.contains(arch_pattern) && name.ends_with(".zip")
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
/// Find the appropriate Zen asset for the current platform and architecture
|
||||
fn find_zen_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Zen asset naming patterns:
|
||||
// Windows: zen.installer.exe, zen.installer-arm64.exe
|
||||
// macOS: zen.macos-universal.dmg
|
||||
// Linux: zen.linux-x86_64.tar.xz, zen.linux-aarch64.tar.xz, zen-x86_64.AppImage, zen-aarch64.AppImage
|
||||
|
||||
let asset = match (os, arch) {
|
||||
("windows", "x64") => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.installer.exe"),
|
||||
("windows", "arm64") => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.installer-arm64.exe"),
|
||||
("macos", _) => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.macos-universal.dmg"),
|
||||
("linux", "x64") => {
|
||||
// Prefer tar.xz, fallback to AppImage
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.linux-x86_64.tar.xz")
|
||||
.or_else(|| {
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen-x86_64.AppImage")
|
||||
})
|
||||
}
|
||||
("linux", "arm64") => {
|
||||
// Prefer tar.xz, fallback to AppImage
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.linux-aarch64.tar.xz")
|
||||
.or_else(|| {
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen-aarch64.AppImage")
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
/// Find the appropriate Mullvad asset for the current platform and architecture
|
||||
fn find_mullvad_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Mullvad asset naming patterns:
|
||||
// Windows: mullvad-browser-windows-x86_64-VERSION.exe
|
||||
// macOS: mullvad-browser-macos-VERSION.dmg
|
||||
// Linux: mullvad-browser-x86_64-VERSION.tar.xz
|
||||
|
||||
let asset = match (os, arch) {
|
||||
("windows", "x64") => assets.iter().find(|asset| {
|
||||
asset.name.contains("windows")
|
||||
&& asset.name.contains("x86_64")
|
||||
&& asset.name.ends_with(".exe")
|
||||
}),
|
||||
("windows", "arm64") => {
|
||||
// Mullvad doesn't support ARM64 on Windows
|
||||
None
|
||||
}
|
||||
("macos", _) => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name.contains("macos") && asset.name.ends_with(".dmg")),
|
||||
("linux", "x64") => assets.iter().find(|asset| {
|
||||
asset.name.contains("x86_64")
|
||||
&& asset.name.ends_with(".tar.xz")
|
||||
&& !asset.name.contains("windows")
|
||||
}),
|
||||
("linux", "arm64") => {
|
||||
// Mullvad doesn't support ARM64 on Linux
|
||||
None
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
pub async fn download_browser<R: tauri::Runtime>(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle<R>,
|
||||
browser_type: BrowserType,
|
||||
version: &str,
|
||||
download_info: &DownloadInfo,
|
||||
dest_path: &Path,
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let file_path = dest_path.join(&download_info.filename);
|
||||
|
||||
// Resolve the actual download URL
|
||||
let download_url = self
|
||||
.resolve_download_url(browser_type.clone(), version, download_info)
|
||||
.await?;
|
||||
|
||||
// Check if this is a twilight release for special handling
|
||||
let is_twilight =
|
||||
browser_type == BrowserType::Zen && version.to_lowercase().contains("twilight");
|
||||
|
||||
// Emit initial progress
|
||||
let progress = DownloadProgress {
|
||||
browser: browser_type.as_str().to_string(),
|
||||
version: version.to_string(),
|
||||
downloaded_bytes: 0,
|
||||
total_bytes: None,
|
||||
percentage: 0.0,
|
||||
speed_bytes_per_sec: 0.0,
|
||||
eta_seconds: None,
|
||||
stage: if is_twilight {
|
||||
"downloading (twilight rolling release)".to_string()
|
||||
} else {
|
||||
"downloading".to_string()
|
||||
},
|
||||
};
|
||||
|
||||
let _ = app_handle.emit("download-progress", &progress);
|
||||
|
||||
// Start download
|
||||
let response = self
|
||||
.client
|
||||
.get(&download_url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
// Check if the response is successful
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Download failed with status: {}", response.status()).into());
|
||||
}
|
||||
|
||||
let total_size = response.content_length();
|
||||
let mut downloaded = 0u64;
|
||||
let start_time = std::time::Instant::now();
|
||||
let mut last_update = start_time;
|
||||
|
||||
let mut file = File::create(&file_path)?;
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
while let Some(chunk) = stream.next().await {
|
||||
let chunk = chunk?;
|
||||
io::copy(&mut chunk.as_ref(), &mut file)?;
|
||||
downloaded += chunk.len() as u64;
|
||||
|
||||
let now = std::time::Instant::now();
|
||||
// Update progress every 100ms to avoid too many events
|
||||
if now.duration_since(last_update).as_millis() >= 100 {
|
||||
let elapsed = start_time.elapsed().as_secs_f64();
|
||||
let speed = if elapsed > 0.0 {
|
||||
downloaded as f64 / elapsed
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let percentage = if let Some(total) = total_size {
|
||||
(downloaded as f64 / total as f64) * 100.0
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let eta = if speed > 0.0 {
|
||||
total_size.map(|total| (total - downloaded) as f64 / speed)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let stage_description = if is_twilight {
|
||||
"downloading (twilight rolling release)".to_string()
|
||||
} else {
|
||||
"downloading".to_string()
|
||||
};
|
||||
|
||||
let progress = DownloadProgress {
|
||||
browser: browser_type.as_str().to_string(),
|
||||
version: version.to_string(),
|
||||
downloaded_bytes: downloaded,
|
||||
total_bytes: total_size,
|
||||
percentage,
|
||||
speed_bytes_per_sec: speed,
|
||||
eta_seconds: eta,
|
||||
stage: stage_description,
|
||||
};
|
||||
|
||||
let _ = app_handle.emit("download-progress", &progress);
|
||||
last_update = now;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(file_path)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::browser::BrowserType;
|
||||
use crate::browser_version_service::DownloadInfo;
|
||||
|
||||
use tempfile::TempDir;
|
||||
use wiremock::matchers::{method, path, query_param};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
async fn setup_mock_server() -> MockServer {
|
||||
MockServer::start().await
|
||||
}
|
||||
|
||||
fn create_test_api_client(server: &MockServer) -> ApiClient {
|
||||
let base_url = server.uri();
|
||||
ApiClient::new_with_base_urls(
|
||||
base_url.clone(), // firefox_api_base
|
||||
base_url.clone(), // firefox_dev_api_base
|
||||
base_url.clone(), // github_api_base
|
||||
base_url.clone(), // chromium_api_base
|
||||
base_url.clone(), // tor_archive_base
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_brave_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.9",
|
||||
"name": "Brave Release 1.81.9",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.9-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-universal.dmg",
|
||||
"size": 200000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/brave/brave-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "brave-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Brave, "v1.81.9", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/brave-1.81.9-universal.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_zen_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "1.11b",
|
||||
"name": "Zen Browser 1.11b",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "zen.macos-universal.dmg",
|
||||
"browser_download_url": "https://example.com/zen-1.11b-universal.dmg",
|
||||
"size": 120000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/zen-browser/desktop/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "zen-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Zen, "1.11b", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/zen-1.11b-universal.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_mullvad_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "14.5a6",
|
||||
"name": "Mullvad Browser 14.5a6",
|
||||
"prerelease": true,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "mullvad-browser-macos-14.5a6.dmg",
|
||||
"browser_download_url": "https://example.com/mullvad-14.5a6.dmg",
|
||||
"size": 100000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "mullvad-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::MullvadBrowser, "14.5a6", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/mullvad-14.5a6.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_firefox_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "https://download.mozilla.org/?product=firefox-139.0&os=osx&lang=en-US".to_string(),
|
||||
filename: "firefox-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Firefox, "139.0", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_chromium_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/1465660/chrome-mac.zip".to_string(),
|
||||
filename: "chromium-test.zip".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Chromium, "1465660", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_tor_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "https://archive.torproject.org/tor-package-archive/torbrowser/14.0.4/tor-browser-macos-14.0.4.dmg".to_string(),
|
||||
filename: "tor-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::TorBrowser, "14.0.4", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_brave_version_not_found() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.8",
|
||||
"name": "Brave Release 1.81.8",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.8-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.8-universal.dmg",
|
||||
"size": 200000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/brave/brave-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "brave-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Brave, "v1.81.9", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("Brave version v1.81.9 not found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_zen_asset_not_found() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "1.11b",
|
||||
"name": "Zen Browser 1.11b",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "zen.linux-universal.tar.bz2",
|
||||
"browser_download_url": "https://example.com/zen-1.11b-linux.tar.bz2",
|
||||
"size": 150000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/zen-browser/desktop/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "zen-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Zen, "1.11b", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("No compatible asset found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_with_progress() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
// Create a temporary directory for the test
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let dest_path = temp_dir.path();
|
||||
|
||||
// Create test file content (simulating a small download)
|
||||
let test_content = b"This is a test file content for download simulation";
|
||||
|
||||
// Mock the download endpoint
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/test-download"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_bytes(test_content)
|
||||
.insert_header("content-length", test_content.len().to_string())
|
||||
.insert_header("content-type", "application/octet-stream"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: format!("{}/test-download", server.uri()),
|
||||
filename: "test-file.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
// Create a mock app handle for testing
|
||||
let app = tauri::test::mock_app();
|
||||
let app_handle = app.handle().clone();
|
||||
|
||||
let result = downloader
|
||||
.download_browser(
|
||||
&app_handle,
|
||||
BrowserType::Firefox,
|
||||
"139.0",
|
||||
&download_info,
|
||||
dest_path,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let downloaded_file = result.unwrap();
|
||||
assert!(downloaded_file.exists());
|
||||
|
||||
// Verify file content
|
||||
let downloaded_content = std::fs::read(&downloaded_file).unwrap();
|
||||
assert_eq!(downloaded_content, test_content);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_network_error() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let dest_path = temp_dir.path();
|
||||
|
||||
// Mock a 404 response
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/missing-file"))
|
||||
.respond_with(ResponseTemplate::new(404))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: format!("{}/missing-file", server.uri()),
|
||||
filename: "missing-file.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let app = tauri::test::mock_app();
|
||||
let app_handle = app.handle().clone();
|
||||
|
||||
let result = downloader
|
||||
.download_browser(
|
||||
&app_handle,
|
||||
BrowserType::Firefox,
|
||||
"139.0",
|
||||
&download_info,
|
||||
dest_path,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_mullvad_asset_not_found() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "14.5a6",
|
||||
"name": "Mullvad Browser 14.5a6",
|
||||
"prerelease": true,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "mullvad-browser-linux-14.5a6.tar.xz",
|
||||
"browser_download_url": "https://example.com/mullvad-14.5a6.tar.xz",
|
||||
"size": 80000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "mullvad-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::MullvadBrowser, "14.5a6", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("No compatible asset found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_brave_version_with_v_prefix() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.9",
|
||||
"name": "Brave Release 1.81.9",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.9-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-universal.dmg",
|
||||
"size": 200000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/brave/brave-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "brave-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
// Test with version without v prefix
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Brave, "1.81.9", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/brave-1.81.9-universal.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_chunked_response() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let dest_path = temp_dir.path();
|
||||
|
||||
// Create larger test content to simulate chunked transfer
|
||||
let test_content = vec![42u8; 1024]; // 1KB of data
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/chunked-download"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_bytes(test_content.clone())
|
||||
.insert_header("content-length", test_content.len().to_string())
|
||||
.insert_header("content-type", "application/octet-stream"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: format!("{}/chunked-download", server.uri()),
|
||||
filename: "chunked-file.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let app = tauri::test::mock_app();
|
||||
let app_handle = app.handle().clone();
|
||||
|
||||
let result = downloader
|
||||
.download_browser(
|
||||
&app_handle,
|
||||
BrowserType::Chromium,
|
||||
"1465660",
|
||||
&download_info,
|
||||
dest_path,
|
||||
)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let downloaded_file = result.unwrap();
|
||||
assert!(downloaded_file.exists());
|
||||
|
||||
let downloaded_content = std::fs::read(&downloaded_file).unwrap();
|
||||
assert_eq!(downloaded_content.len(), test_content.len());
|
||||
}
|
||||
}
|
||||
@@ -1,354 +0,0 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DownloadedBrowserInfo {
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
pub download_date: u64,
|
||||
pub file_path: PathBuf,
|
||||
pub verified: bool,
|
||||
pub actual_version: Option<String>, // For browsers like Chromium where we track the actual version
|
||||
pub file_size: Option<u64>, // For tracking file size changes (useful for rolling releases)
|
||||
#[serde(default)] // Add default value (false) for backwards compatibility
|
||||
pub is_rolling_release: bool, // True for Zen's twilight releases and other rolling releases
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Default)]
|
||||
pub struct DownloadedBrowsersRegistry {
|
||||
pub browsers: HashMap<String, HashMap<String, DownloadedBrowserInfo>>, // browser -> version -> info
|
||||
}
|
||||
|
||||
impl DownloadedBrowsersRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn load() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
let registry_path = Self::get_registry_path()?;
|
||||
|
||||
if !registry_path.exists() {
|
||||
return Ok(Self::new());
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(®istry_path)?;
|
||||
let registry: DownloadedBrowsersRegistry = serde_json::from_str(&content)?;
|
||||
Ok(registry)
|
||||
}
|
||||
|
||||
pub fn save(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let registry_path = Self::get_registry_path()?;
|
||||
|
||||
// Ensure parent directory exists
|
||||
if let Some(parent) = registry_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let content = serde_json::to_string_pretty(self)?;
|
||||
fs::write(®istry_path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_registry_path() -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let mut path = base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("data");
|
||||
path.push("downloaded_browsers.json");
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub fn add_browser(&mut self, info: DownloadedBrowserInfo) {
|
||||
self
|
||||
.browsers
|
||||
.entry(info.browser.clone())
|
||||
.or_default()
|
||||
.insert(info.version.clone(), info);
|
||||
}
|
||||
|
||||
pub fn remove_browser(&mut self, browser: &str, version: &str) -> Option<DownloadedBrowserInfo> {
|
||||
self.browsers.get_mut(browser)?.remove(version)
|
||||
}
|
||||
|
||||
pub fn is_browser_downloaded(&self, browser: &str, version: &str) -> bool {
|
||||
self
|
||||
.browsers
|
||||
.get(browser)
|
||||
.and_then(|versions| versions.get(version))
|
||||
.map(|info| info.verified)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn get_downloaded_versions(&self, browser: &str) -> Vec<String> {
|
||||
self
|
||||
.browsers
|
||||
.get(browser)
|
||||
.map(|versions| {
|
||||
versions
|
||||
.iter()
|
||||
.filter(|(_, info)| info.verified)
|
||||
.map(|(version, _)| version.clone())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn mark_download_started(&mut self, browser: &str, version: &str, file_path: PathBuf) {
|
||||
let is_rolling = Self::is_rolling_release(browser, version);
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: browser.to_string(),
|
||||
version: version.to_string(),
|
||||
download_date: std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs(),
|
||||
file_path,
|
||||
verified: false,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: is_rolling,
|
||||
};
|
||||
self.add_browser(info);
|
||||
}
|
||||
|
||||
pub fn mark_download_completed_with_actual_version(
|
||||
&mut self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
actual_version: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
if let Some(info) = self
|
||||
.browsers
|
||||
.get_mut(browser)
|
||||
.and_then(|versions| versions.get_mut(version))
|
||||
{
|
||||
info.verified = true;
|
||||
info.actual_version = actual_version;
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("Browser {browser}:{version} not found in registry"))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_rolling_release(browser: &str, version: &str) -> bool {
|
||||
// Check if this is a rolling release like twilight
|
||||
browser == "zen" && version.to_lowercase() == "twilight"
|
||||
}
|
||||
|
||||
pub fn cleanup_failed_download(
|
||||
&mut self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
if let Some(info) = self.remove_browser(browser, version) {
|
||||
// Clean up any files that might have been left behind
|
||||
if info.file_path.exists() {
|
||||
if info.file_path.is_dir() {
|
||||
fs::remove_dir_all(&info.file_path)?;
|
||||
} else {
|
||||
fs::remove_file(&info.file_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Also clean up the browser directory if it exists
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let mut browser_dir = base_dirs.data_local_dir().to_path_buf();
|
||||
browser_dir.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
browser_dir.push("binaries");
|
||||
browser_dir.push(browser);
|
||||
browser_dir.push(version);
|
||||
|
||||
if browser_dir.exists() {
|
||||
fs::remove_dir_all(&browser_dir)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Find and remove unused browser binaries that are not referenced by any active profiles
|
||||
pub fn cleanup_unused_binaries(
|
||||
&mut self,
|
||||
active_profiles: &[(String, String)], // (browser, version) pairs
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
let active_set: std::collections::HashSet<(String, String)> =
|
||||
active_profiles.iter().cloned().collect();
|
||||
let mut cleaned_up = Vec::new();
|
||||
|
||||
// Collect all downloaded browsers that are not in active profiles
|
||||
let mut to_remove = Vec::new();
|
||||
for (browser, versions) in &self.browsers {
|
||||
for (version, info) in versions {
|
||||
if info.verified && !active_set.contains(&(browser.clone(), version.clone())) {
|
||||
to_remove.push((browser.clone(), version.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove unused binaries
|
||||
for (browser, version) in to_remove {
|
||||
if let Err(e) = self.cleanup_failed_download(&browser, &version) {
|
||||
eprintln!("Failed to cleanup unused binary {browser}:{version}: {e}");
|
||||
} else {
|
||||
cleaned_up.push(format!("{browser} {version}"));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cleaned_up)
|
||||
}
|
||||
|
||||
/// Get all browsers and versions referenced by active profiles
|
||||
pub fn get_active_browser_versions(
|
||||
&self,
|
||||
profiles: &[crate::browser_runner::BrowserProfile],
|
||||
) -> Vec<(String, String)> {
|
||||
profiles
|
||||
.iter()
|
||||
.map(|profile| (profile.browser.clone(), profile.version.clone()))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_registry_creation() {
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
assert!(registry.browsers.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_and_get_browser() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
download_date: 1234567890,
|
||||
file_path: PathBuf::from("/test/path"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
registry.add_browser(info.clone());
|
||||
|
||||
assert!(registry.is_browser_downloaded("firefox", "139.0"));
|
||||
assert!(!registry.is_browser_downloaded("firefox", "140.0"));
|
||||
assert!(!registry.is_browser_downloaded("chrome", "139.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_downloaded_versions() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
let info1 = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
download_date: 1234567890,
|
||||
file_path: PathBuf::from("/test/path1"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
let info2 = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "140.0".to_string(),
|
||||
download_date: 1234567891,
|
||||
file_path: PathBuf::from("/test/path2"),
|
||||
verified: false, // Not verified, should not be included
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
let info3 = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "141.0".to_string(),
|
||||
download_date: 1234567892,
|
||||
file_path: PathBuf::from("/test/path3"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
registry.add_browser(info1);
|
||||
registry.add_browser(info2);
|
||||
registry.add_browser(info3);
|
||||
|
||||
let versions = registry.get_downloaded_versions("firefox");
|
||||
assert_eq!(versions.len(), 2);
|
||||
assert!(versions.contains(&"139.0".to_string()));
|
||||
assert!(versions.contains(&"141.0".to_string()));
|
||||
assert!(!versions.contains(&"140.0".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mark_download_lifecycle() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
// Mark download started
|
||||
registry.mark_download_started("firefox", "139.0", PathBuf::from("/test/path"));
|
||||
|
||||
// Should not be considered downloaded yet
|
||||
assert!(!registry.is_browser_downloaded("firefox", "139.0"));
|
||||
|
||||
// Mark as completed
|
||||
registry
|
||||
.mark_download_completed_with_actual_version("firefox", "139.0", Some("139.0".to_string()))
|
||||
.unwrap();
|
||||
|
||||
// Now should be considered downloaded
|
||||
assert!(registry.is_browser_downloaded("firefox", "139.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_browser() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
download_date: 1234567890,
|
||||
file_path: PathBuf::from("/test/path"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
registry.add_browser(info);
|
||||
assert!(registry.is_browser_downloaded("firefox", "139.0"));
|
||||
|
||||
let removed = registry.remove_browser("firefox", "139.0");
|
||||
assert!(removed.is_some());
|
||||
assert!(!registry.is_browser_downloaded("firefox", "139.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_twilight_rolling_release() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
// Mark twilight download started
|
||||
registry.mark_download_started("zen", "twilight", PathBuf::from("/test/zen-twilight"));
|
||||
|
||||
// Check that it's marked as rolling release
|
||||
let zen_versions = ®istry.browsers["zen"];
|
||||
let twilight_info = &zen_versions["twilight"];
|
||||
assert!(twilight_info.is_rolling_release);
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
+806
-750
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,388 @@
|
||||
use crate::browser::GithubRelease;
|
||||
use crate::profile::manager::ProfileManager;
|
||||
use directories::BaseDirs;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use tauri::Emitter;
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
const MMDB_REPO: &str = "P3TERX/GeoLite.mmdb";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GeoIPDownloadProgress {
|
||||
pub stage: String, // "downloading", "extracting", "completed"
|
||||
pub percentage: f64,
|
||||
pub message: String,
|
||||
// Extra fields to mirror browser download progress payload
|
||||
pub downloaded_bytes: Option<u64>,
|
||||
pub total_bytes: Option<u64>,
|
||||
pub speed_bytes_per_sec: Option<f64>,
|
||||
pub eta_seconds: Option<f64>,
|
||||
}
|
||||
|
||||
pub struct GeoIPDownloader {
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl GeoIPDownloader {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static GeoIPDownloader {
|
||||
&GEOIP_DOWNLOADER
|
||||
}
|
||||
|
||||
/// Create a new downloader with custom client (for testing)
|
||||
#[cfg(test)]
|
||||
pub fn new_with_client(client: Client) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
|
||||
fn get_cache_dir() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to determine base directories")?;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let cache_dir = base_dirs
|
||||
.data_local_dir()
|
||||
.join("camoufox")
|
||||
.join("camoufox")
|
||||
.join("Cache");
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let cache_dir = base_dirs.cache_dir().join("camoufox");
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
let cache_dir = base_dirs.cache_dir().join("camoufox");
|
||||
|
||||
#[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))]
|
||||
let cache_dir = base_dirs.cache_dir().join("camoufox");
|
||||
|
||||
Ok(cache_dir)
|
||||
}
|
||||
|
||||
fn get_mmdb_file_path() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
Ok(Self::get_cache_dir()?.join("GeoLite2-City.mmdb"))
|
||||
}
|
||||
|
||||
pub fn is_geoip_database_available() -> bool {
|
||||
if let Ok(mmdb_path) = Self::get_mmdb_file_path() {
|
||||
mmdb_path.exists()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
/// Check if GeoIP database is missing for Camoufox profiles
|
||||
pub fn check_missing_geoip_database(
|
||||
&self,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Get all profiles
|
||||
let profiles = ProfileManager::instance()
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
// Check if there are any Camoufox profiles
|
||||
let has_camoufox_profiles = profiles.iter().any(|profile| profile.browser == "camoufox");
|
||||
|
||||
if has_camoufox_profiles {
|
||||
// Check if GeoIP database is available
|
||||
return Ok(!Self::is_geoip_database_available());
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn find_city_mmdb_asset(&self, release: &GithubRelease) -> Option<String> {
|
||||
for asset in &release.assets {
|
||||
if asset.name.ends_with("-City.mmdb") {
|
||||
return Some(asset.browser_download_url.clone());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub async fn download_geoip_database(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Emit initial progress
|
||||
let _ = app_handle.emit(
|
||||
"geoip-download-progress",
|
||||
GeoIPDownloadProgress {
|
||||
stage: "downloading".to_string(),
|
||||
percentage: 0.0,
|
||||
message: "Starting GeoIP database download".to_string(),
|
||||
downloaded_bytes: Some(0),
|
||||
total_bytes: None,
|
||||
speed_bytes_per_sec: Some(0.0),
|
||||
eta_seconds: None,
|
||||
},
|
||||
);
|
||||
|
||||
// Fetch latest release from GitHub
|
||||
let releases = self.fetch_geoip_releases().await?;
|
||||
let latest_release = releases.first().ok_or("No GeoIP database releases found")?;
|
||||
|
||||
let download_url = self
|
||||
.find_city_mmdb_asset(latest_release)
|
||||
.ok_or("No compatible GeoIP database asset found")?;
|
||||
|
||||
// Create cache directory
|
||||
let cache_dir = Self::get_cache_dir()?;
|
||||
fs::create_dir_all(&cache_dir).await?;
|
||||
|
||||
let mmdb_path = Self::get_mmdb_file_path()?;
|
||||
|
||||
// Download the file
|
||||
let response = self.client.get(&download_url).send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to download GeoIP database: HTTP {}",
|
||||
response.status()
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
let total_size = response.content_length().unwrap_or(0);
|
||||
let mut downloaded: u64 = 0;
|
||||
let mut file = fs::File::create(&mmdb_path).await?;
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
use std::time::Instant;
|
||||
let start_time = Instant::now();
|
||||
let mut last_update = Instant::now();
|
||||
while let Some(chunk) = stream.next().await {
|
||||
let chunk = chunk?;
|
||||
downloaded += chunk.len() as u64;
|
||||
file.write_all(&chunk).await?;
|
||||
|
||||
let now = Instant::now();
|
||||
if now.duration_since(last_update).as_millis() >= 100 {
|
||||
let elapsed = start_time.elapsed().as_secs_f64();
|
||||
let speed = if elapsed > 0.0 {
|
||||
downloaded as f64 / elapsed
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let percentage = if total_size > 0 {
|
||||
(downloaded as f64 / total_size as f64) * 100.0
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let eta = if speed > 0.0 && total_size > 0 {
|
||||
Some((total_size.saturating_sub(downloaded)) as f64 / speed)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let _ = app_handle.emit(
|
||||
"geoip-download-progress",
|
||||
GeoIPDownloadProgress {
|
||||
stage: "downloading".to_string(),
|
||||
percentage,
|
||||
message: format!("Downloaded {downloaded} / {total_size} bytes"),
|
||||
downloaded_bytes: Some(downloaded),
|
||||
total_bytes: Some(total_size),
|
||||
speed_bytes_per_sec: Some(speed),
|
||||
eta_seconds: eta,
|
||||
},
|
||||
);
|
||||
last_update = now;
|
||||
}
|
||||
}
|
||||
|
||||
file.flush().await?;
|
||||
|
||||
// Emit completion
|
||||
let _ = app_handle.emit(
|
||||
"geoip-download-progress",
|
||||
GeoIPDownloadProgress {
|
||||
stage: "completed".to_string(),
|
||||
percentage: 100.0,
|
||||
message: "GeoIP database download completed".to_string(),
|
||||
downloaded_bytes: Some(downloaded),
|
||||
total_bytes: Some(total_size),
|
||||
speed_bytes_per_sec: Some(0.0),
|
||||
eta_seconds: Some(0.0),
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_geoip_releases(
|
||||
&self,
|
||||
) -> Result<Vec<GithubRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let url = format!("https://api.github.com/repos/{MMDB_REPO}/releases");
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (compatible; donutbrowser)")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Failed to fetch releases: HTTP {}", response.status()).into());
|
||||
}
|
||||
|
||||
let releases: Vec<GithubRelease> = response.json().await?;
|
||||
Ok(releases)
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn check_missing_geoip_database() -> Result<bool, String> {
|
||||
let geoip_downloader = GeoIPDownloader::instance();
|
||||
geoip_downloader
|
||||
.check_missing_geoip_database()
|
||||
.map_err(|e| format!("Failed to check missing GeoIP database: {e}"))
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref GEOIP_DOWNLOADER: GeoIPDownloader = GeoIPDownloader::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::browser::GithubRelease;
|
||||
use wiremock::matchers::{method, path};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
fn create_mock_release() -> GithubRelease {
|
||||
GithubRelease {
|
||||
tag_name: "v1.0.0".to_string(),
|
||||
name: "Test Release".to_string(),
|
||||
body: Some("Test release body".to_string()),
|
||||
published_at: "2023-01-01T00:00:00Z".to_string(),
|
||||
created_at: Some("2023-01-01T00:00:00Z".to_string()),
|
||||
html_url: Some("https://example.com/release".to_string()),
|
||||
tarball_url: Some("https://example.com/tarball".to_string()),
|
||||
zipball_url: Some("https://example.com/zipball".to_string()),
|
||||
draft: false,
|
||||
prerelease: false,
|
||||
is_nightly: false,
|
||||
id: Some(1),
|
||||
node_id: Some("test_node_id".to_string()),
|
||||
target_commitish: None,
|
||||
assets: vec![crate::browser::GithubAsset {
|
||||
id: Some(1),
|
||||
node_id: Some("test_asset_node_id".to_string()),
|
||||
name: "GeoLite2-City.mmdb".to_string(),
|
||||
label: None,
|
||||
content_type: Some("application/octet-stream".to_string()),
|
||||
state: Some("uploaded".to_string()),
|
||||
size: 1024,
|
||||
download_count: Some(0),
|
||||
created_at: Some("2023-01-01T00:00:00Z".to_string()),
|
||||
updated_at: Some("2023-01-01T00:00:00Z".to_string()),
|
||||
browser_download_url: "https://example.com/GeoLite2-City.mmdb".to_string(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_geoip_releases_success() {
|
||||
let mock_server = MockServer::start().await;
|
||||
let releases = vec![create_mock_release()];
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/repos/{MMDB_REPO}/releases")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(&releases))
|
||||
.mount(&mock_server)
|
||||
.await;
|
||||
|
||||
let client = Client::builder()
|
||||
.build()
|
||||
.expect("Failed to create HTTP client");
|
||||
|
||||
let downloader = GeoIPDownloader::new_with_client(client);
|
||||
|
||||
// Override the URL for testing
|
||||
let url = format!("{}/repos/{}/releases", mock_server.uri(), MMDB_REPO);
|
||||
let response = downloader
|
||||
.client
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (compatible; donutbrowser)")
|
||||
.send()
|
||||
.await
|
||||
.expect("Request should succeed");
|
||||
|
||||
assert!(response.status().is_success());
|
||||
|
||||
let fetched_releases: Vec<GithubRelease> = response.json().await.expect("Should parse JSON");
|
||||
assert_eq!(fetched_releases.len(), 1);
|
||||
assert_eq!(fetched_releases[0].tag_name, "v1.0.0");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_find_city_mmdb_asset() {
|
||||
let downloader = GeoIPDownloader::new();
|
||||
let release = create_mock_release();
|
||||
|
||||
let asset_url = downloader.find_city_mmdb_asset(&release);
|
||||
assert!(asset_url.is_some());
|
||||
assert_eq!(asset_url.unwrap(), "https://example.com/GeoLite2-City.mmdb");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_find_city_mmdb_asset_not_found() {
|
||||
let downloader = GeoIPDownloader::new();
|
||||
let mut release = create_mock_release();
|
||||
release.assets[0].name = "wrong-file.txt".to_string();
|
||||
|
||||
let asset_url = downloader.find_city_mmdb_asset(&release);
|
||||
assert!(asset_url.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_cache_dir() {
|
||||
let cache_dir = GeoIPDownloader::get_cache_dir();
|
||||
assert!(cache_dir.is_ok());
|
||||
|
||||
let path = cache_dir.unwrap();
|
||||
assert!(path.to_string_lossy().contains("camoufox"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_mmdb_file_path() {
|
||||
let mmdb_path = GeoIPDownloader::get_mmdb_file_path();
|
||||
assert!(mmdb_path.is_ok());
|
||||
|
||||
let path = mmdb_path.unwrap();
|
||||
assert!(path.to_string_lossy().ends_with("GeoLite2-City.mmdb"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_geoip_database_available() {
|
||||
// Test that the function works correctly regardless of file system state
|
||||
let is_available = GeoIPDownloader::is_geoip_database_available();
|
||||
|
||||
// The function should return a boolean value (either true or false)
|
||||
// The function should return a boolean value - we just verify it doesn't panic
|
||||
// and returns the expected result based on file existence
|
||||
|
||||
// Verify the function logic by checking if the path resolution works
|
||||
let mmdb_path_result = GeoIPDownloader::get_mmdb_file_path();
|
||||
assert!(
|
||||
mmdb_path_result.is_ok(),
|
||||
"Should be able to get MMDB file path"
|
||||
);
|
||||
|
||||
let mmdb_path = mmdb_path_result.unwrap();
|
||||
let expected_available = mmdb_path.exists();
|
||||
assert_eq!(
|
||||
is_available, expected_available,
|
||||
"Function result should match actual file existence"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,314 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
use tauri::Emitter;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProfileGroup {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GroupWithCount {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct GroupsData {
|
||||
groups: Vec<ProfileGroup>,
|
||||
}
|
||||
|
||||
pub struct GroupManager {
|
||||
base_dirs: BaseDirs,
|
||||
data_dir_override: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl GroupManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
data_dir_override: std::env::var("DONUTBROWSER_DATA_DIR")
|
||||
.ok()
|
||||
.map(PathBuf::from),
|
||||
}
|
||||
}
|
||||
|
||||
// Helper for tests to override data directory without global env var
|
||||
#[allow(dead_code)]
|
||||
pub fn with_data_dir_override(dir: &Path) -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
data_dir_override: Some(dir.to_path_buf()),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_groups_file_path(&self) -> PathBuf {
|
||||
if let Some(dir) = &self.data_dir_override {
|
||||
let mut override_path = dir.clone();
|
||||
// Ensure the directory exists before returning the path
|
||||
let _ = fs::create_dir_all(&override_path);
|
||||
override_path.push("groups.json");
|
||||
return override_path;
|
||||
}
|
||||
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("data");
|
||||
path.push("groups.json");
|
||||
path
|
||||
}
|
||||
|
||||
fn load_groups_data(&self) -> Result<GroupsData, Box<dyn std::error::Error>> {
|
||||
let groups_file = self.get_groups_file_path();
|
||||
|
||||
if !groups_file.exists() {
|
||||
return Ok(GroupsData { groups: Vec::new() });
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(groups_file)?;
|
||||
let groups_data: GroupsData = serde_json::from_str(&content)?;
|
||||
Ok(groups_data)
|
||||
}
|
||||
|
||||
fn save_groups_data(&self, groups_data: &GroupsData) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let groups_file = self.get_groups_file_path();
|
||||
|
||||
// Ensure the parent directory exists
|
||||
if let Some(parent) = groups_file.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let json = serde_json::to_string_pretty(groups_data)?;
|
||||
fs::write(groups_file, json)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_all_groups(&self) -> Result<Vec<ProfileGroup>, Box<dyn std::error::Error>> {
|
||||
let groups_data = self.load_groups_data()?;
|
||||
Ok(groups_data.groups)
|
||||
}
|
||||
|
||||
pub fn create_group(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
name: String,
|
||||
) -> Result<ProfileGroup, Box<dyn std::error::Error>> {
|
||||
let mut groups_data = self.load_groups_data()?;
|
||||
|
||||
// Check if group with this name already exists
|
||||
if groups_data.groups.iter().any(|g| g.name == name) {
|
||||
return Err(format!("Group with name '{name}' already exists").into());
|
||||
}
|
||||
|
||||
let group = ProfileGroup {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
name,
|
||||
};
|
||||
|
||||
groups_data.groups.push(group.clone());
|
||||
self.save_groups_data(&groups_data)?;
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("groups-changed", ()) {
|
||||
eprintln!("Failed to emit groups-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(group)
|
||||
}
|
||||
|
||||
pub fn update_group(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
id: String,
|
||||
name: String,
|
||||
) -> Result<ProfileGroup, Box<dyn std::error::Error>> {
|
||||
let mut groups_data = self.load_groups_data()?;
|
||||
|
||||
// Check if another group with this name already exists
|
||||
if groups_data
|
||||
.groups
|
||||
.iter()
|
||||
.any(|g| g.name == name && g.id != id)
|
||||
{
|
||||
return Err(format!("Group with name '{name}' already exists").into());
|
||||
}
|
||||
|
||||
let group = groups_data
|
||||
.groups
|
||||
.iter_mut()
|
||||
.find(|g| g.id == id)
|
||||
.ok_or_else(|| format!("Group with id '{id}' not found"))?;
|
||||
|
||||
group.name = name;
|
||||
let updated_group = group.clone();
|
||||
|
||||
self.save_groups_data(&groups_data)?;
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("groups-changed", ()) {
|
||||
eprintln!("Failed to emit groups-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(updated_group)
|
||||
}
|
||||
|
||||
pub fn delete_group(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
id: String,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut groups_data = self.load_groups_data()?;
|
||||
|
||||
let initial_len = groups_data.groups.len();
|
||||
groups_data.groups.retain(|g| g.id != id);
|
||||
|
||||
if groups_data.groups.len() == initial_len {
|
||||
return Err(format!("Group with id '{id}' not found").into());
|
||||
}
|
||||
|
||||
self.save_groups_data(&groups_data)?;
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("groups-changed", ()) {
|
||||
eprintln!("Failed to emit groups-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_groups_with_profile_counts(
|
||||
&self,
|
||||
profiles: &[crate::profile::BrowserProfile],
|
||||
) -> Result<Vec<GroupWithCount>, Box<dyn std::error::Error>> {
|
||||
let groups = self.get_all_groups()?;
|
||||
let mut group_counts = HashMap::new();
|
||||
|
||||
// Count profiles in each group
|
||||
for profile in profiles {
|
||||
if let Some(group_id) = &profile.group_id {
|
||||
*group_counts.entry(group_id.clone()).or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Create result including all groups (even those with 0 count)
|
||||
let mut result = Vec::new();
|
||||
for group in groups {
|
||||
let count = group_counts.get(&group.id).copied().unwrap_or(0);
|
||||
result.push(GroupWithCount {
|
||||
id: group.id,
|
||||
name: group.name,
|
||||
count,
|
||||
});
|
||||
}
|
||||
|
||||
// Add default group count (profiles without group_id), always include even if 0
|
||||
let default_count = profiles.iter().filter(|p| p.group_id.is_none()).count();
|
||||
let default_group = GroupWithCount {
|
||||
id: "default".to_string(),
|
||||
name: "Default".to_string(),
|
||||
count: default_count,
|
||||
};
|
||||
// Insert at the beginning for consistent ordering with UI expectations
|
||||
result.insert(0, default_group);
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
// Global instance
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref GROUP_MANAGER: Mutex<GroupManager> = Mutex::new(GroupManager::new());
|
||||
}
|
||||
|
||||
// Helper function to get groups with counts
|
||||
pub fn get_groups_with_counts(profiles: &[crate::profile::BrowserProfile]) -> Vec<GroupWithCount> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.get_groups_with_profile_counts(profiles)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
// Tauri commands
|
||||
#[tauri::command]
|
||||
pub async fn get_profile_groups() -> Result<Vec<ProfileGroup>, String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.get_all_groups()
|
||||
.map_err(|e| format!("Failed to get profile groups: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_groups_with_profile_counts() -> Result<Vec<GroupWithCount>, String> {
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
let profiles = profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
Ok(get_groups_with_counts(&profiles))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn create_profile_group(
|
||||
app_handle: tauri::AppHandle,
|
||||
name: String,
|
||||
) -> Result<ProfileGroup, String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.create_group(&app_handle, name)
|
||||
.map_err(|e| format!("Failed to create group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn update_profile_group(
|
||||
app_handle: tauri::AppHandle,
|
||||
group_id: String,
|
||||
name: String,
|
||||
) -> Result<ProfileGroup, String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.update_group(&app_handle, group_id, name)
|
||||
.map_err(|e| format!("Failed to update group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_profile_group(
|
||||
app_handle: tauri::AppHandle,
|
||||
group_id: String,
|
||||
) -> Result<(), String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.delete_group(&app_handle, group_id)
|
||||
.map_err(|e| format!("Failed to delete group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn assign_profiles_to_group(
|
||||
app_handle: tauri::AppHandle,
|
||||
profile_ids: Vec<String>,
|
||||
group_id: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
profile_manager
|
||||
.assign_profiles_to_group(&app_handle, profile_ids, group_id)
|
||||
.map_err(|e| format!("Failed to assign profiles to group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_selected_profiles(
|
||||
app_handle: tauri::AppHandle,
|
||||
profile_ids: Vec<String>,
|
||||
) -> Result<(), String> {
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
profile_manager
|
||||
.delete_multiple_profiles(&app_handle, profile_ids)
|
||||
.map_err(|e| format!("Failed to delete profiles: {e}"))
|
||||
}
|
||||
+461
-80
@@ -1,4 +1,5 @@
|
||||
// Learn more about Tauri commands at https://tauri.app/develop/calling-rust/
|
||||
use std::env;
|
||||
use std::sync::Mutex;
|
||||
use tauri::{Emitter, Manager, Runtime, WebviewUrl, WebviewWindow, WebviewWindowBuilder};
|
||||
use tauri_plugin_deep_link::DeepLinkExt;
|
||||
@@ -7,49 +8,65 @@ use tauri_plugin_deep_link::DeepLinkExt;
|
||||
static PENDING_URLS: Mutex<Vec<String>> = Mutex::new(Vec::new());
|
||||
|
||||
mod api_client;
|
||||
mod api_server;
|
||||
mod app_auto_updater;
|
||||
mod auto_updater;
|
||||
mod browser;
|
||||
mod browser_runner;
|
||||
mod browser_version_service;
|
||||
mod browser_version_manager;
|
||||
mod camoufox_manager;
|
||||
mod default_browser;
|
||||
mod download;
|
||||
mod downloaded_browsers;
|
||||
mod downloaded_browsers_registry;
|
||||
mod downloader;
|
||||
mod extraction;
|
||||
mod geoip_downloader;
|
||||
mod group_manager;
|
||||
mod platform_browser;
|
||||
mod profile;
|
||||
mod profile_importer;
|
||||
mod proxy_manager;
|
||||
mod settings_manager;
|
||||
mod theme_detector;
|
||||
// mod theme_detector; // removed: theme detection handled in webview via CSS prefers-color-scheme
|
||||
mod tag_manager;
|
||||
mod version_updater;
|
||||
|
||||
extern crate lazy_static;
|
||||
|
||||
use browser_runner::{
|
||||
check_browser_exists, check_browser_status, create_browser_profile_new, delete_profile,
|
||||
download_browser, fetch_browser_versions_cached_first, fetch_browser_versions_with_count,
|
||||
fetch_browser_versions_with_count_cached_first, get_downloaded_browser_versions,
|
||||
get_supported_browsers, is_browser_supported_on_platform, kill_browser_profile,
|
||||
launch_browser_profile, list_browser_profiles, rename_profile, update_profile_proxy,
|
||||
update_profile_version,
|
||||
check_browser_exists, kill_browser_profile, launch_browser_profile, open_url_with_profile,
|
||||
};
|
||||
|
||||
use profile::manager::{
|
||||
check_browser_status, create_browser_profile_new, delete_profile, list_browser_profiles,
|
||||
rename_profile, update_camoufox_config, update_profile_proxy, update_profile_tags,
|
||||
};
|
||||
|
||||
use browser_version_manager::{
|
||||
fetch_browser_versions_cached_first, fetch_browser_versions_with_count,
|
||||
fetch_browser_versions_with_count_cached_first, get_supported_browsers,
|
||||
is_browser_supported_on_platform,
|
||||
};
|
||||
|
||||
use downloaded_browsers_registry::{
|
||||
check_missing_binaries, ensure_all_binaries_exist, get_downloaded_browser_versions,
|
||||
};
|
||||
|
||||
use downloader::download_browser;
|
||||
|
||||
use settings_manager::{
|
||||
clear_all_version_cache_and_refetch, get_app_settings, get_table_sorting_settings,
|
||||
save_app_settings, save_table_sorting_settings, should_show_settings_on_startup,
|
||||
get_app_settings, get_table_sorting_settings, save_app_settings, save_table_sorting_settings,
|
||||
should_show_settings_on_startup,
|
||||
};
|
||||
|
||||
use default_browser::{
|
||||
is_default_browser, open_url_with_profile, set_as_default_browser, smart_open_url,
|
||||
};
|
||||
use tag_manager::get_all_tags;
|
||||
|
||||
use default_browser::{is_default_browser, set_as_default_browser};
|
||||
|
||||
use version_updater::{
|
||||
get_version_update_status, get_version_updater, trigger_manual_version_update,
|
||||
clear_all_version_cache_and_refetch, get_version_update_status, get_version_updater,
|
||||
trigger_manual_version_update,
|
||||
};
|
||||
|
||||
use auto_updater::{
|
||||
check_for_browser_updates, complete_browser_update_with_auto_update, dismiss_update_notification,
|
||||
is_auto_update_download, is_browser_disabled_for_update, mark_auto_update_download,
|
||||
remove_auto_update_download,
|
||||
};
|
||||
|
||||
use app_auto_updater::{
|
||||
@@ -58,7 +75,16 @@ use app_auto_updater::{
|
||||
|
||||
use profile_importer::{detect_existing_profiles, import_browser_profile};
|
||||
|
||||
use theme_detector::get_system_theme;
|
||||
use group_manager::{
|
||||
assign_profiles_to_group, create_profile_group, delete_profile_group, delete_selected_profiles,
|
||||
get_groups_with_profile_counts, get_profile_groups, update_profile_group,
|
||||
};
|
||||
|
||||
use geoip_downloader::{check_missing_geoip_database, GeoIPDownloader};
|
||||
|
||||
use browser_version_manager::get_browser_release_types;
|
||||
|
||||
use api_server::{get_api_server_status, start_api_server, stop_api_server};
|
||||
|
||||
// Trait to extend WebviewWindow with transparent titlebar functionality
|
||||
pub trait WindowExt {
|
||||
@@ -105,26 +131,51 @@ impl<R: Runtime> WindowExt for WebviewWindow<R> {
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn warm_up_nodecar(app: tauri::AppHandle) -> Result<(), String> {
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
use tokio::time::{timeout, Duration};
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
// Use sidecar to execute a fast, harmless command that ensures the binary is loaded
|
||||
let cmd = app
|
||||
.shell()
|
||||
.sidecar("nodecar")
|
||||
.map_err(|e| format!("Failed to create nodecar sidecar: {e}"))?
|
||||
.arg("help");
|
||||
|
||||
let exec_future = async { cmd.output().await };
|
||||
match timeout(Duration::from_secs(120), exec_future).await {
|
||||
Ok(Ok(_output)) => {
|
||||
let duration = start_time.elapsed();
|
||||
println!(
|
||||
"Nodecar warm-up (frontend-triggered) completed in {:.2}s",
|
||||
duration.as_secs_f64()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
Ok(Err(e)) => Err(format!("Failed to execute nodecar for warm-up: {e}")),
|
||||
Err(_) => Err("Nodecar warm-up timed out after 120s".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn handle_url_open(app: tauri::AppHandle, url: String) -> Result<(), String> {
|
||||
println!("handle_url_open called with URL: {url}");
|
||||
|
||||
// Check if the main window exists and is ready
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
if window.is_visible().unwrap_or(false) {
|
||||
// Window is visible, emit event directly
|
||||
println!("Main window is visible, emitting show-profile-selector event");
|
||||
app
|
||||
.emit("show-profile-selector", url.clone())
|
||||
.map_err(|e| format!("Failed to emit URL open event: {e}"))?;
|
||||
let _ = window.show();
|
||||
let _ = window.set_focus();
|
||||
} else {
|
||||
// Window not visible yet - add to pending URLs
|
||||
println!("Main window not visible, adding URL to pending list");
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
pending.push(url);
|
||||
}
|
||||
println!("Main window exists");
|
||||
|
||||
// Try to show and focus the window first
|
||||
let _ = window.show();
|
||||
let _ = window.set_focus();
|
||||
let _ = window.unminimize();
|
||||
|
||||
app
|
||||
.emit("show-profile-selector", url.clone())
|
||||
.map_err(|e| format!("Failed to emit URL open event: {e}"))?;
|
||||
} else {
|
||||
// Window doesn't exist yet - add to pending URLs
|
||||
println!("Main window doesn't exist, adding URL to pending list");
|
||||
@@ -136,41 +187,73 @@ async fn handle_url_open(app: tauri::AppHandle, url: String) -> Result<(), Strin
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn check_and_handle_startup_url(app_handle: tauri::AppHandle) -> Result<bool, String> {
|
||||
let pending_urls = {
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
let urls = pending.clone();
|
||||
pending.clear(); // Clear after getting them
|
||||
urls
|
||||
};
|
||||
async fn create_stored_proxy(
|
||||
app_handle: tauri::AppHandle,
|
||||
name: String,
|
||||
proxy_settings: crate::browser::ProxySettings,
|
||||
) -> Result<crate::proxy_manager::StoredProxy, String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.create_stored_proxy(&app_handle, name, proxy_settings)
|
||||
.map_err(|e| format!("Failed to create stored proxy: {e}"))
|
||||
}
|
||||
|
||||
if !pending_urls.is_empty() {
|
||||
println!(
|
||||
"Handling {} pending URLs from frontend request",
|
||||
pending_urls.len()
|
||||
);
|
||||
#[tauri::command]
|
||||
async fn get_stored_proxies() -> Result<Vec<crate::proxy_manager::StoredProxy>, String> {
|
||||
Ok(crate::proxy_manager::PROXY_MANAGER.get_stored_proxies())
|
||||
}
|
||||
|
||||
for url in pending_urls {
|
||||
println!("Emitting show-profile-selector event for URL: {url}");
|
||||
if let Err(e) = app_handle.emit("show-profile-selector", url.clone()) {
|
||||
eprintln!("Failed to emit URL event: {e}");
|
||||
return Err(format!("Failed to emit URL event: {e}"));
|
||||
}
|
||||
}
|
||||
#[tauri::command]
|
||||
async fn update_stored_proxy(
|
||||
app_handle: tauri::AppHandle,
|
||||
proxy_id: String,
|
||||
name: Option<String>,
|
||||
proxy_settings: Option<crate::browser::ProxySettings>,
|
||||
) -> Result<crate::proxy_manager::StoredProxy, String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.update_stored_proxy(&app_handle, &proxy_id, name, proxy_settings)
|
||||
.map_err(|e| format!("Failed to update stored proxy: {e}"))
|
||||
}
|
||||
|
||||
return Ok(true);
|
||||
}
|
||||
#[tauri::command]
|
||||
async fn delete_stored_proxy(app_handle: tauri::AppHandle, proxy_id: String) -> Result<(), String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.delete_stored_proxy(&app_handle, &proxy_id)
|
||||
.map_err(|e| format!("Failed to delete stored proxy: {e}"))
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
#[tauri::command]
|
||||
async fn is_geoip_database_available() -> Result<bool, String> {
|
||||
Ok(GeoIPDownloader::is_geoip_database_available())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn download_geoip_database(app_handle: tauri::AppHandle) -> Result<(), String> {
|
||||
let downloader = GeoIPDownloader::instance();
|
||||
downloader
|
||||
.download_geoip_database(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to download GeoIP database: {e}"))
|
||||
}
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
pub fn run() {
|
||||
let args: Vec<String> = env::args().collect();
|
||||
let startup_url = args.iter().find(|arg| arg.starts_with("http")).cloned();
|
||||
|
||||
if let Some(url) = startup_url.clone() {
|
||||
println!("Found startup URL in command line: {url}");
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
pending.push(url.clone());
|
||||
}
|
||||
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_single_instance::init(|_, args, _cwd| {
|
||||
println!("Single instance triggered with args: {args:?}");
|
||||
}))
|
||||
.plugin(tauri_plugin_deep_link::init())
|
||||
.plugin(tauri_plugin_fs::init())
|
||||
.plugin(tauri_plugin_opener::init())
|
||||
.plugin(tauri_plugin_shell::init())
|
||||
.plugin(tauri_plugin_deep_link::init())
|
||||
.plugin(tauri_plugin_dialog::init())
|
||||
.plugin(tauri_plugin_macos_permissions::init())
|
||||
.setup(|app| {
|
||||
@@ -180,7 +263,10 @@ pub fn run() {
|
||||
.title("Donut Browser")
|
||||
.inner_size(900.0, 600.0)
|
||||
.resizable(false)
|
||||
.fullscreen(false);
|
||||
.fullscreen(false)
|
||||
.center()
|
||||
.focused(true)
|
||||
.visible(true);
|
||||
|
||||
#[allow(unused_variables)]
|
||||
let window = win_builder.build().unwrap();
|
||||
@@ -199,16 +285,27 @@ pub fn run() {
|
||||
#[cfg(any(windows, target_os = "linux"))]
|
||||
{
|
||||
// For Windows and Linux, register all deep links at runtime for development
|
||||
app.deep_link().register_all()?;
|
||||
if let Err(e) = app.deep_link().register_all() {
|
||||
eprintln!("Failed to register deep links: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// On macOS, try to register deep links for development builds
|
||||
if let Err(e) = app.deep_link().register_all() {
|
||||
eprintln!(
|
||||
"Note: Deep link registration failed on macOS (this is normal for production): {e}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle deep links - this works for both scenarios:
|
||||
// 1. App is running and URL is opened
|
||||
// 2. App is not running and URL causes app to launch
|
||||
app.deep_link().on_open_url({
|
||||
let handle = handle.clone();
|
||||
move |event| {
|
||||
let urls = event.urls();
|
||||
println!("Deep link event received with {} URLs", urls.len());
|
||||
|
||||
for url in urls {
|
||||
let url_string = url.to_string();
|
||||
println!("Deep link received: {url_string}");
|
||||
@@ -226,27 +323,90 @@ pub fn run() {
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(startup_url) = startup_url {
|
||||
let handle_clone = handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
println!("Processing startup URL from command line: {startup_url}");
|
||||
if let Err(e) = handle_url_open(handle_clone, startup_url.clone()).await {
|
||||
eprintln!("Failed to handle startup URL: {e}");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize and start background version updater
|
||||
let app_handle = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let version_updater = get_version_updater();
|
||||
let mut updater_guard = version_updater.lock().await;
|
||||
|
||||
// Set the app handle
|
||||
updater_guard.set_app_handle(app_handle).await;
|
||||
{
|
||||
let mut updater_guard = version_updater.lock().await;
|
||||
updater_guard.set_app_handle(app_handle);
|
||||
}
|
||||
|
||||
// Start the background updates
|
||||
updater_guard.start_background_updates().await;
|
||||
// Run startup check without holding the lock
|
||||
{
|
||||
let updater_guard = version_updater.lock().await;
|
||||
if let Err(e) = updater_guard.start_background_updates().await {
|
||||
eprintln!("Failed to start background updates: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start the background update task separately
|
||||
tauri::async_runtime::spawn(async move {
|
||||
version_updater::VersionUpdater::run_background_task().await;
|
||||
});
|
||||
|
||||
let app_handle_auto_updater = app.handle().clone();
|
||||
|
||||
// Start the auto-update check task separately
|
||||
tauri::async_runtime::spawn(async move {
|
||||
auto_updater::check_for_updates_with_progress(app_handle_auto_updater).await;
|
||||
});
|
||||
|
||||
// Handle any pending URLs that were received before the window was ready
|
||||
let handle_pending = handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
// Wait a bit for the window to be fully ready
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await;
|
||||
|
||||
let pending_urls = {
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
let urls = pending.clone();
|
||||
pending.clear();
|
||||
urls
|
||||
};
|
||||
|
||||
for url in pending_urls {
|
||||
println!("Processing pending URL: {url}");
|
||||
if let Err(e) = handle_url_open(handle_pending.clone(), url).await {
|
||||
eprintln!("Failed to handle pending URL: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start periodic cleanup task for unused binaries
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(43200)); // Every 12 hours
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
let registry =
|
||||
crate::downloaded_browsers_registry::DownloadedBrowsersRegistry::instance();
|
||||
if let Err(e) = registry.cleanup_unused_binaries() {
|
||||
eprintln!("Periodic cleanup failed: {e}");
|
||||
} else {
|
||||
println!("Periodic cleanup completed successfully");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Check for app updates at startup
|
||||
let app_handle_update = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
// Add a small delay to ensure the app is fully loaded
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(3)).await;
|
||||
|
||||
println!("Starting app update check at startup...");
|
||||
let updater = app_auto_updater::AppAutoUpdater::new();
|
||||
let updater = app_auto_updater::AppAutoUpdater::instance();
|
||||
match updater.check_for_updates().await {
|
||||
Ok(Some(update_info)) => {
|
||||
println!(
|
||||
@@ -269,6 +429,211 @@ pub fn run() {
|
||||
}
|
||||
});
|
||||
|
||||
// Start Camoufox cleanup task
|
||||
let _app_handle_cleanup = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let camoufox_manager = crate::camoufox_manager::CamoufoxManager::instance();
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(5));
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
match camoufox_manager.cleanup_dead_instances().await {
|
||||
Ok(_) => {
|
||||
// Cleanup completed silently
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error during Camoufox cleanup: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Check and download GeoIP database at startup if needed
|
||||
let app_handle_geoip = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
// Wait a bit for the app to fully initialize
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
|
||||
|
||||
let geoip_downloader = crate::geoip_downloader::GeoIPDownloader::instance();
|
||||
match geoip_downloader.check_missing_geoip_database() {
|
||||
Ok(true) => {
|
||||
println!("GeoIP database is missing for Camoufox profiles, downloading at startup...");
|
||||
let geoip_downloader = GeoIPDownloader::instance();
|
||||
if let Err(e) = geoip_downloader
|
||||
.download_geoip_database(&app_handle_geoip)
|
||||
.await
|
||||
{
|
||||
eprintln!("Failed to download GeoIP database at startup: {e}");
|
||||
} else {
|
||||
println!("GeoIP database downloaded successfully at startup");
|
||||
}
|
||||
}
|
||||
Ok(false) => {
|
||||
// No Camoufox profiles or GeoIP database already available
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to check GeoIP database status at startup: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start proxy cleanup task for dead browser processes
|
||||
let app_handle_proxy_cleanup = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(30));
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
match crate::proxy_manager::PROXY_MANAGER
|
||||
.cleanup_dead_proxies(app_handle_proxy_cleanup.clone())
|
||||
.await
|
||||
{
|
||||
Ok(dead_pids) => {
|
||||
if !dead_pids.is_empty() {
|
||||
println!(
|
||||
"Cleaned up proxies for {} dead browser processes",
|
||||
dead_pids.len()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error during proxy cleanup: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Periodically broadcast browser running status to the frontend
|
||||
let app_handle_status = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_millis(500));
|
||||
let mut last_running_states: std::collections::HashMap<String, bool> =
|
||||
std::collections::HashMap::new();
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
let runner = crate::browser_runner::BrowserRunner::instance();
|
||||
// If listing profiles fails, skip this tick
|
||||
let profiles = match runner.profile_manager.list_profiles() {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
println!("Warning: Failed to list profiles in status checker: {e}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
for profile in profiles {
|
||||
// Check browser status and track changes
|
||||
match runner
|
||||
.check_browser_status(app_handle_status.clone(), &profile)
|
||||
.await
|
||||
{
|
||||
Ok(is_running) => {
|
||||
let profile_id = profile.id.to_string();
|
||||
let last_state = last_running_states
|
||||
.get(&profile_id)
|
||||
.copied()
|
||||
.unwrap_or(false);
|
||||
|
||||
// Only emit event if state actually changed
|
||||
if last_state != is_running {
|
||||
println!(
|
||||
"Status checker detected change for profile {}: {} -> {}",
|
||||
profile.name, last_state, is_running
|
||||
);
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
struct RunningChangedPayload {
|
||||
id: String,
|
||||
is_running: bool,
|
||||
}
|
||||
|
||||
let payload = RunningChangedPayload {
|
||||
id: profile_id.clone(),
|
||||
is_running,
|
||||
};
|
||||
|
||||
if let Err(e) = app_handle_status.emit("profile-running-changed", &payload) {
|
||||
println!("Warning: Failed to emit profile running changed event: {e}");
|
||||
} else {
|
||||
println!(
|
||||
"Status checker emitted profile-running-changed event for {}: running={}",
|
||||
profile.name, is_running
|
||||
);
|
||||
}
|
||||
|
||||
last_running_states.insert(profile_id, is_running);
|
||||
} else {
|
||||
// Update the state even if unchanged to ensure we have it tracked
|
||||
last_running_states.insert(profile_id, is_running);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
println!(
|
||||
"Warning: Status check failed for profile {}: {}",
|
||||
profile.name, e
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Nodecar warm-up is now triggered from the frontend to allow UI blocking overlay
|
||||
|
||||
// Start API server if enabled in settings
|
||||
let app_handle_api = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
match crate::settings_manager::get_app_settings(app_handle_api.clone()).await {
|
||||
Ok(settings) => {
|
||||
if settings.api_enabled {
|
||||
println!("API is enabled in settings, starting API server...");
|
||||
match crate::api_server::start_api_server_internal(settings.api_port, &app_handle_api)
|
||||
.await
|
||||
{
|
||||
Ok(port) => {
|
||||
println!("API server started successfully on port {port}");
|
||||
// Emit success toast to frontend
|
||||
if let Err(e) = app_handle_api.emit(
|
||||
"show-toast",
|
||||
crate::api_server::ToastPayload {
|
||||
message: "API server started successfully".to_string(),
|
||||
variant: "success".to_string(),
|
||||
title: "Local API Started".to_string(),
|
||||
description: Some(format!("API server running on port {port}")),
|
||||
},
|
||||
) {
|
||||
eprintln!("Failed to emit API start toast: {e}");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to start API server at startup: {e}");
|
||||
// Emit error toast to frontend
|
||||
if let Err(toast_err) = app_handle_api.emit(
|
||||
"show-toast",
|
||||
crate::api_server::ToastPayload {
|
||||
message: "Failed to start API server".to_string(),
|
||||
variant: "error".to_string(),
|
||||
title: "Failed to Start Local API".to_string(),
|
||||
description: Some(format!("Error: {e}")),
|
||||
},
|
||||
) {
|
||||
eprintln!("Failed to emit API error toast: {toast_err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to load app settings for API startup: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
@@ -284,8 +649,10 @@ pub fn run() {
|
||||
fetch_browser_versions_cached_first,
|
||||
fetch_browser_versions_with_count_cached_first,
|
||||
get_downloaded_browser_versions,
|
||||
get_all_tags,
|
||||
get_browser_release_types,
|
||||
update_profile_proxy,
|
||||
update_profile_version,
|
||||
update_profile_tags,
|
||||
check_browser_status,
|
||||
kill_browser_profile,
|
||||
rename_profile,
|
||||
@@ -298,23 +665,37 @@ pub fn run() {
|
||||
is_default_browser,
|
||||
open_url_with_profile,
|
||||
set_as_default_browser,
|
||||
smart_open_url,
|
||||
check_and_handle_startup_url,
|
||||
trigger_manual_version_update,
|
||||
get_version_update_status,
|
||||
check_for_browser_updates,
|
||||
is_browser_disabled_for_update,
|
||||
dismiss_update_notification,
|
||||
complete_browser_update_with_auto_update,
|
||||
mark_auto_update_download,
|
||||
remove_auto_update_download,
|
||||
is_auto_update_download,
|
||||
check_for_app_updates,
|
||||
check_for_app_updates_manual,
|
||||
download_and_install_app_update,
|
||||
get_system_theme,
|
||||
detect_existing_profiles,
|
||||
import_browser_profile,
|
||||
check_missing_binaries,
|
||||
check_missing_geoip_database,
|
||||
ensure_all_binaries_exist,
|
||||
create_stored_proxy,
|
||||
get_stored_proxies,
|
||||
update_stored_proxy,
|
||||
delete_stored_proxy,
|
||||
update_camoufox_config,
|
||||
get_profile_groups,
|
||||
get_groups_with_profile_counts,
|
||||
create_profile_group,
|
||||
update_profile_group,
|
||||
delete_profile_group,
|
||||
assign_profiles_to_group,
|
||||
delete_selected_profiles,
|
||||
is_geoip_database_available,
|
||||
download_geoip_database,
|
||||
warm_up_nodecar,
|
||||
start_api_server,
|
||||
stop_api_server,
|
||||
get_api_server_status
|
||||
])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,5 @@
|
||||
pub mod manager;
|
||||
pub mod types;
|
||||
|
||||
pub use manager::ProfileManager;
|
||||
pub use types::BrowserProfile;
|
||||
@@ -0,0 +1,36 @@
|
||||
use crate::camoufox_manager::CamoufoxConfig;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct BrowserProfile {
|
||||
pub id: uuid::Uuid,
|
||||
pub name: String,
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
#[serde(default)]
|
||||
pub proxy_id: Option<String>, // Reference to stored proxy
|
||||
#[serde(default)]
|
||||
pub process_id: Option<u32>,
|
||||
#[serde(default)]
|
||||
pub last_launch: Option<u64>,
|
||||
#[serde(default = "default_release_type")]
|
||||
pub release_type: String, // "stable" or "nightly"
|
||||
#[serde(default)]
|
||||
pub camoufox_config: Option<CamoufoxConfig>, // Camoufox configuration
|
||||
#[serde(default)]
|
||||
pub group_id: Option<String>, // Reference to profile group
|
||||
#[serde(default)]
|
||||
pub tags: Vec<String>, // Free-form tags
|
||||
}
|
||||
|
||||
pub fn default_release_type() -> String {
|
||||
"stable".to_string()
|
||||
}
|
||||
|
||||
impl BrowserProfile {
|
||||
/// Get the path to the profile data directory (profiles/{uuid}/profile)
|
||||
pub fn get_profile_data_path(&self, profiles_dir: &Path) -> PathBuf {
|
||||
profiles_dir.join(self.id.to_string()).join("profile")
|
||||
}
|
||||
}
|
||||
+256
-178
@@ -5,7 +5,8 @@ use std::fs::{self, create_dir_all};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use crate::browser::BrowserType;
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
use crate::downloaded_browsers_registry::DownloadedBrowsersRegistry;
|
||||
use crate::profile::ProfileManager;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DetectedProfile {
|
||||
@@ -17,17 +18,23 @@ pub struct DetectedProfile {
|
||||
|
||||
pub struct ProfileImporter {
|
||||
base_dirs: BaseDirs,
|
||||
browser_runner: BrowserRunner,
|
||||
downloaded_browsers_registry: &'static DownloadedBrowsersRegistry,
|
||||
profile_manager: &'static ProfileManager,
|
||||
}
|
||||
|
||||
impl ProfileImporter {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
browser_runner: BrowserRunner::new(),
|
||||
downloaded_browsers_registry: DownloadedBrowsersRegistry::instance(),
|
||||
profile_manager: ProfileManager::instance(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static ProfileImporter {
|
||||
&PROFILE_IMPORTER
|
||||
}
|
||||
|
||||
/// Detect existing browser profiles on the system
|
||||
pub fn detect_existing_profiles(
|
||||
&self,
|
||||
@@ -49,14 +56,11 @@ impl ProfileImporter {
|
||||
// Detect Chromium profiles
|
||||
detected_profiles.extend(self.detect_chromium_profiles()?);
|
||||
|
||||
// Detect Mullvad Browser profiles
|
||||
detected_profiles.extend(self.detect_mullvad_browser_profiles()?);
|
||||
|
||||
// Detect Zen Browser profiles
|
||||
detected_profiles.extend(self.detect_zen_browser_profiles()?);
|
||||
|
||||
// Detect TOR Browser profiles
|
||||
detected_profiles.extend(self.detect_tor_browser_profiles()?);
|
||||
// NOTE: Mullvad and Tor Browser profile imports are no longer supported.
|
||||
// We intentionally do not detect these profiles to avoid offering them in the UI.
|
||||
|
||||
// Remove duplicates based on path
|
||||
let mut seen_paths = HashSet::new();
|
||||
@@ -240,45 +244,6 @@ impl ProfileImporter {
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Detect Mullvad Browser profiles
|
||||
fn detect_mullvad_browser_profiles(
|
||||
&self,
|
||||
) -> Result<Vec<DetectedProfile>, Box<dyn std::error::Error>> {
|
||||
let mut profiles = Vec::new();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let mullvad_dir = self
|
||||
.base_dirs
|
||||
.home_dir()
|
||||
.join("Library/Application Support/MullvadBrowser/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_dir, "mullvad-browser")?);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Primary location in AppData\Roaming
|
||||
let app_data = self.base_dirs.data_dir();
|
||||
let mullvad_dir = app_data.join("MullvadBrowser/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_dir, "mullvad-browser")?);
|
||||
|
||||
// Also check common installation locations
|
||||
let local_app_data = self.base_dirs.data_local_dir();
|
||||
let mullvad_local_dir = local_app_data.join("MullvadBrowser/Profiles");
|
||||
if mullvad_local_dir.exists() {
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_local_dir, "mullvad-browser")?);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let mullvad_dir = self.base_dirs.home_dir().join(".mullvad-browser");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_dir, "mullvad-browser")?);
|
||||
}
|
||||
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Detect Zen Browser profiles
|
||||
fn detect_zen_browser_profiles(
|
||||
&self,
|
||||
@@ -310,107 +275,6 @@ impl ProfileImporter {
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Detect TOR Browser profiles
|
||||
fn detect_tor_browser_profiles(
|
||||
&self,
|
||||
) -> Result<Vec<DetectedProfile>, Box<dyn std::error::Error>> {
|
||||
let mut profiles = Vec::new();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// TOR Browser on macOS is typically in Applications
|
||||
let tor_dir = self
|
||||
.base_dirs
|
||||
.home_dir()
|
||||
.join("Library/Application Support/TorBrowser-Data/Browser/profile.default");
|
||||
|
||||
if tor_dir.exists() {
|
||||
profiles.push(DetectedProfile {
|
||||
browser: "tor-browser".to_string(),
|
||||
name: "TOR Browser - Default Profile".to_string(),
|
||||
path: tor_dir.to_string_lossy().to_string(),
|
||||
description: "Default TOR Browser profile".to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Check common TOR Browser installation locations on Windows
|
||||
let possible_paths = [
|
||||
// Default installation in user directory
|
||||
(
|
||||
"Desktop",
|
||||
"Desktop/Tor Browser/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
),
|
||||
// AppData locations
|
||||
(
|
||||
"AppData/Roaming",
|
||||
"TorBrowser/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
),
|
||||
(
|
||||
"AppData/Local",
|
||||
"TorBrowser/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
),
|
||||
];
|
||||
|
||||
let home_dir = self.base_dirs.home_dir();
|
||||
|
||||
for (location_name, relative_path) in &possible_paths {
|
||||
let tor_dir = home_dir.join(relative_path);
|
||||
if tor_dir.exists() {
|
||||
profiles.push(DetectedProfile {
|
||||
browser: "tor-browser".to_string(),
|
||||
name: format!("TOR Browser - {} Profile", location_name),
|
||||
path: tor_dir.to_string_lossy().to_string(),
|
||||
description: format!("TOR Browser profile from {}", location_name),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Also check AppData directories if available
|
||||
let app_data = self.base_dirs.data_dir();
|
||||
let tor_app_data =
|
||||
app_data.join("TorBrowser/Browser/TorBrowser/Data/Browser/profile.default");
|
||||
if tor_app_data.exists() {
|
||||
profiles.push(DetectedProfile {
|
||||
browser: "tor-browser".to_string(),
|
||||
name: "TOR Browser - AppData Profile".to_string(),
|
||||
path: tor_app_data.to_string_lossy().to_string(),
|
||||
description: "TOR Browser profile from AppData".to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Common TOR Browser locations on Linux
|
||||
let possible_paths = [
|
||||
".local/share/torbrowser/tbb/x86_64/tor-browser_en-US/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
"tor-browser_en-US/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
".tor-browser/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
"Downloads/tor-browser_en-US/Browser/TorBrowser/Data/Browser/profile.default",
|
||||
];
|
||||
|
||||
let home_dir = self.base_dirs.home_dir();
|
||||
|
||||
for relative_path in &possible_paths {
|
||||
let tor_dir = home_dir.join(relative_path);
|
||||
if tor_dir.exists() {
|
||||
profiles.push(DetectedProfile {
|
||||
browser: "tor-browser".to_string(),
|
||||
name: "TOR Browser - Default Profile".to_string(),
|
||||
path: tor_dir.to_string_lossy().to_string(),
|
||||
description: "TOR Browser profile".to_string(),
|
||||
});
|
||||
break; // Only add the first one found to avoid duplicates
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Scan Firefox-style profiles directory
|
||||
fn scan_firefox_profiles_dir(
|
||||
&self,
|
||||
@@ -645,6 +509,11 @@ impl ProfileImporter {
|
||||
browser_type: &str,
|
||||
new_profile_name: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Disable imports for Mullvad and Tor browsers
|
||||
if browser_type == "mullvad-browser" || browser_type == "tor-browser" {
|
||||
return Err("Importing Mullvad Browser or Tor Browser profiles is not supported".into());
|
||||
}
|
||||
|
||||
// Validate that source path exists
|
||||
let source_path = Path::new(source_path);
|
||||
if !source_path.exists() {
|
||||
@@ -656,7 +525,7 @@ impl ProfileImporter {
|
||||
.map_err(|_| format!("Invalid browser type: {browser_type}"))?;
|
||||
|
||||
// Check if a profile with this name already exists
|
||||
let existing_profiles = self.browser_runner.list_profiles()?;
|
||||
let existing_profiles = self.profile_manager.list_profiles()?;
|
||||
if existing_profiles
|
||||
.iter()
|
||||
.any(|p| p.name.to_lowercase() == new_profile_name.to_lowercase())
|
||||
@@ -664,32 +533,38 @@ impl ProfileImporter {
|
||||
return Err(format!("Profile with name '{new_profile_name}' already exists").into());
|
||||
}
|
||||
|
||||
// Create the new profile directory
|
||||
let snake_case_name = new_profile_name.to_lowercase().replace(' ', "_");
|
||||
let profiles_dir = self.browser_runner.get_profiles_dir();
|
||||
let new_profile_path = profiles_dir.join(&snake_case_name);
|
||||
// Generate UUID for new profile and create the directory structure
|
||||
let profile_id = uuid::Uuid::new_v4();
|
||||
let profiles_dir = self.profile_manager.get_profiles_dir();
|
||||
let new_profile_uuid_dir = profiles_dir.join(profile_id.to_string());
|
||||
let new_profile_data_dir = new_profile_uuid_dir.join("profile");
|
||||
|
||||
create_dir_all(&new_profile_path)?;
|
||||
create_dir_all(&new_profile_uuid_dir)?;
|
||||
create_dir_all(&new_profile_data_dir)?;
|
||||
|
||||
// Copy all files from source to destination
|
||||
Self::copy_directory_recursive(source_path, &new_profile_path)?;
|
||||
// Copy all files from source to destination profile subdirectory
|
||||
Self::copy_directory_recursive(source_path, &new_profile_data_dir)?;
|
||||
|
||||
// Create the profile metadata without overwriting the imported data
|
||||
// We need to find a suitable version for this browser type
|
||||
let available_versions = self.get_default_version_for_browser(browser_type)?;
|
||||
|
||||
let profile = crate::browser_runner::BrowserProfile {
|
||||
let profile = crate::profile::BrowserProfile {
|
||||
id: profile_id,
|
||||
name: new_profile_name.to_string(),
|
||||
browser: browser_type.to_string(),
|
||||
version: available_versions,
|
||||
profile_path: new_profile_path.to_string_lossy().to_string(),
|
||||
proxy: None,
|
||||
proxy_id: None,
|
||||
process_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
group_id: None,
|
||||
tags: Vec::new(),
|
||||
};
|
||||
|
||||
// Save the profile metadata
|
||||
self.browser_runner.save_profile(&profile)?;
|
||||
self.profile_manager.save_profile(&profile)?;
|
||||
|
||||
println!(
|
||||
"Successfully imported profile '{}' from '{}'",
|
||||
@@ -705,26 +580,21 @@ impl ProfileImporter {
|
||||
&self,
|
||||
browser_type: &str,
|
||||
) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Try to get a downloaded version first, fallback to a reasonable default
|
||||
let registry =
|
||||
crate::downloaded_browsers::DownloadedBrowsersRegistry::load().unwrap_or_default();
|
||||
let downloaded_versions = registry.get_downloaded_versions(browser_type);
|
||||
// Check if any version of the browser is downloaded
|
||||
let downloaded_versions = self
|
||||
.downloaded_browsers_registry
|
||||
.get_downloaded_versions(browser_type);
|
||||
|
||||
if let Some(version) = downloaded_versions.first() {
|
||||
return Ok(version.clone());
|
||||
}
|
||||
|
||||
// If no downloaded versions, return a sensible default
|
||||
match browser_type {
|
||||
"firefox" => Ok("latest".to_string()),
|
||||
"firefox-developer" => Ok("latest".to_string()),
|
||||
"chromium" => Ok("latest".to_string()),
|
||||
"brave" => Ok("latest".to_string()),
|
||||
"zen" => Ok("latest".to_string()),
|
||||
"mullvad-browser" => Ok("13.5.16".to_string()), // Mullvad Browser common version
|
||||
"tor-browser" => Ok("latest".to_string()),
|
||||
_ => Ok("latest".to_string()),
|
||||
}
|
||||
// If no downloaded versions found, return an error
|
||||
Err(format!(
|
||||
"No downloaded versions found for browser '{}'. Please download a version of {} first before importing profiles.",
|
||||
browser_type,
|
||||
self.get_browser_display_name(browser_type)
|
||||
).into())
|
||||
}
|
||||
|
||||
/// Recursively copy directory contents
|
||||
@@ -755,7 +625,7 @@ impl ProfileImporter {
|
||||
// Tauri commands
|
||||
#[tauri::command]
|
||||
pub async fn detect_existing_profiles() -> Result<Vec<DetectedProfile>, String> {
|
||||
let importer = ProfileImporter::new();
|
||||
let importer = ProfileImporter::instance();
|
||||
importer
|
||||
.detect_existing_profiles()
|
||||
.map_err(|e| format!("Failed to detect existing profiles: {e}"))
|
||||
@@ -767,8 +637,216 @@ pub async fn import_browser_profile(
|
||||
browser_type: String,
|
||||
new_profile_name: String,
|
||||
) -> Result<(), String> {
|
||||
let importer = ProfileImporter::new();
|
||||
let importer = ProfileImporter::instance();
|
||||
importer
|
||||
.import_profile(&source_path, &browser_type, &new_profile_name)
|
||||
.map_err(|e| format!("Failed to import profile: {e}"))
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref PROFILE_IMPORTER: ProfileImporter = ProfileImporter::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn create_test_profile_importer() -> (ProfileImporter, TempDir) {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Set up a temporary home directory for testing
|
||||
env::set_var("HOME", temp_dir.path());
|
||||
|
||||
let importer = ProfileImporter::new();
|
||||
(importer, temp_dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_profile_importer_creation() {
|
||||
let (_importer, _temp_dir) = create_test_profile_importer();
|
||||
// Test passes if no panic occurs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_browser_display_name() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
assert_eq!(importer.get_browser_display_name("firefox"), "Firefox");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("firefox-developer"),
|
||||
"Firefox Developer"
|
||||
);
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("chromium"),
|
||||
"Chrome/Chromium"
|
||||
);
|
||||
assert_eq!(importer.get_browser_display_name("brave"), "Brave");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("mullvad-browser"),
|
||||
"Mullvad Browser"
|
||||
);
|
||||
assert_eq!(importer.get_browser_display_name("zen"), "Zen Browser");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("tor-browser"),
|
||||
"Tor Browser"
|
||||
);
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("unknown"),
|
||||
"Unknown Browser"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_existing_profiles_no_panic() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
// This should not panic even if no browser profiles exist
|
||||
let result = importer.detect_existing_profiles();
|
||||
assert!(result.is_ok(), "detect_existing_profiles should not fail");
|
||||
|
||||
let _profiles = result.unwrap();
|
||||
// We can't assert specific profiles since they depend on the system
|
||||
// but we can verify the result is a valid Vec
|
||||
// We can't assert specific profiles since they depend on the system
|
||||
// but we can verify the result is a valid Vec (length check is always true for Vec, but shows intent)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scan_firefox_profiles_dir_nonexistent() {
|
||||
let (importer, temp_dir) = create_test_profile_importer();
|
||||
|
||||
let nonexistent_dir = temp_dir.path().join("nonexistent");
|
||||
let result = importer.scan_firefox_profiles_dir(&nonexistent_dir, "firefox");
|
||||
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent directory gracefully"
|
||||
);
|
||||
let profiles = result.unwrap();
|
||||
assert!(
|
||||
profiles.is_empty(),
|
||||
"Should return empty vector for nonexistent directory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scan_chrome_profiles_dir_nonexistent() {
|
||||
let (importer, temp_dir) = create_test_profile_importer();
|
||||
|
||||
let nonexistent_dir = temp_dir.path().join("nonexistent");
|
||||
let result = importer.scan_chrome_profiles_dir(&nonexistent_dir, "chromium");
|
||||
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent directory gracefully"
|
||||
);
|
||||
let profiles = result.unwrap();
|
||||
assert!(
|
||||
profiles.is_empty(),
|
||||
"Should return empty vector for nonexistent directory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_firefox_profiles_ini_empty() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
let empty_content = "";
|
||||
let profiles_dir = Path::new("/tmp");
|
||||
let result = importer.parse_firefox_profiles_ini(empty_content, profiles_dir, "firefox");
|
||||
|
||||
assert!(result.is_ok(), "Should handle empty profiles.ini");
|
||||
let profiles = result.unwrap();
|
||||
assert!(
|
||||
profiles.is_empty(),
|
||||
"Should return empty vector for empty content"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_firefox_profiles_ini_valid() {
|
||||
let (importer, temp_dir) = create_test_profile_importer();
|
||||
|
||||
// Create a mock profile directory
|
||||
let profiles_dir = temp_dir.path().join("profiles");
|
||||
let profile_dir = profiles_dir.join("test.profile");
|
||||
fs::create_dir_all(&profile_dir).expect("Should create profile directory");
|
||||
|
||||
// Create a prefs.js file to make it look like a valid profile
|
||||
let prefs_file = profile_dir.join("prefs.js");
|
||||
fs::write(&prefs_file, "// Firefox preferences").expect("Should create prefs.js");
|
||||
|
||||
let profiles_ini_content = r#"
|
||||
[Profile0]
|
||||
Name=Test Profile
|
||||
IsRelative=1
|
||||
Path=test.profile
|
||||
"#;
|
||||
|
||||
let result =
|
||||
importer.parse_firefox_profiles_ini(profiles_ini_content, &profiles_dir, "firefox");
|
||||
|
||||
assert!(result.is_ok(), "Should parse valid profiles.ini");
|
||||
let profiles = result.unwrap();
|
||||
assert_eq!(profiles.len(), 1, "Should find one profile");
|
||||
assert_eq!(profiles[0].name, "Firefox - Test Profile");
|
||||
assert_eq!(profiles[0].browser, "firefox");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_directory_recursive() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Create source directory structure
|
||||
let source_dir = temp_dir.path().join("source");
|
||||
let source_subdir = source_dir.join("subdir");
|
||||
fs::create_dir_all(&source_subdir).expect("Should create source directories");
|
||||
|
||||
// Create some test files
|
||||
let source_file1 = source_dir.join("file1.txt");
|
||||
let source_file2 = source_subdir.join("file2.txt");
|
||||
fs::write(&source_file1, "content1").expect("Should create file1");
|
||||
fs::write(&source_file2, "content2").expect("Should create file2");
|
||||
|
||||
// Create destination directory
|
||||
let dest_dir = temp_dir.path().join("dest");
|
||||
|
||||
// Copy recursively
|
||||
let result = ProfileImporter::copy_directory_recursive(&source_dir, &dest_dir);
|
||||
assert!(result.is_ok(), "Should copy directory successfully");
|
||||
|
||||
// Verify files were copied
|
||||
let dest_file1 = dest_dir.join("file1.txt");
|
||||
let dest_file2 = dest_dir.join("subdir").join("file2.txt");
|
||||
|
||||
assert!(dest_file1.exists(), "file1.txt should be copied");
|
||||
assert!(dest_file2.exists(), "file2.txt should be copied");
|
||||
|
||||
let content1 = fs::read_to_string(&dest_file1).expect("Should read file1");
|
||||
let content2 = fs::read_to_string(&dest_file2).expect("Should read file2");
|
||||
|
||||
assert_eq!(content1, "content1", "file1 content should match");
|
||||
assert_eq!(content2, "content2", "file2 content should match");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_default_version_for_browser_no_versions() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
// This should fail since no versions are downloaded in test environment
|
||||
let result = importer.get_default_version_for_browser("firefox");
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"Should fail when no versions are available"
|
||||
);
|
||||
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("No downloaded versions found"),
|
||||
"Error should mention no versions found"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+515
-193
@@ -1,7 +1,11 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
use tauri::Emitter;
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
|
||||
use crate::browser::ProxySettings;
|
||||
@@ -15,90 +19,387 @@ pub struct ProxyInfo {
|
||||
pub upstream_port: u16,
|
||||
pub upstream_type: String,
|
||||
pub local_port: u16,
|
||||
// Optional profile name to which this proxy instance is logically tied
|
||||
pub profile_name: Option<String>,
|
||||
}
|
||||
|
||||
// Global proxy manager to track active proxies
|
||||
// Stored proxy configuration with name and ID for reuse
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct StoredProxy {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub proxy_settings: ProxySettings,
|
||||
}
|
||||
|
||||
impl StoredProxy {
|
||||
pub fn new(name: String, proxy_settings: ProxySettings) -> Self {
|
||||
Self {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
name,
|
||||
proxy_settings,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_settings(&mut self, proxy_settings: ProxySettings) {
|
||||
self.proxy_settings = proxy_settings;
|
||||
}
|
||||
|
||||
pub fn update_name(&mut self, name: String) {
|
||||
self.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
// Global proxy manager to track active proxies and stored proxy configurations
|
||||
pub struct ProxyManager {
|
||||
active_proxies: Mutex<HashMap<u32, ProxyInfo>>, // Maps browser process ID to proxy info
|
||||
// Store proxy info by profile name for persistence across browser restarts
|
||||
profile_proxies: Mutex<HashMap<String, ProxySettings>>, // Maps profile name to proxy settings
|
||||
// Track active proxy IDs by profile name for targeted cleanup
|
||||
profile_active_proxy_ids: Mutex<HashMap<String, String>>, // Maps profile name to proxy id
|
||||
stored_proxies: Mutex<HashMap<String, StoredProxy>>, // Maps proxy ID to stored proxy
|
||||
base_dirs: BaseDirs,
|
||||
}
|
||||
|
||||
impl ProxyManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
let base_dirs = BaseDirs::new().expect("Failed to get base directories");
|
||||
let manager = Self {
|
||||
active_proxies: Mutex::new(HashMap::new()),
|
||||
profile_proxies: Mutex::new(HashMap::new()),
|
||||
profile_active_proxy_ids: Mutex::new(HashMap::new()),
|
||||
stored_proxies: Mutex::new(HashMap::new()),
|
||||
base_dirs,
|
||||
};
|
||||
|
||||
// Load stored proxies on initialization
|
||||
if let Err(e) = manager.load_stored_proxies() {
|
||||
eprintln!("Warning: Failed to load stored proxies: {e}");
|
||||
}
|
||||
|
||||
manager
|
||||
}
|
||||
|
||||
// Start a proxy for given proxy settings and associate it with a browser process ID
|
||||
pub async fn start_proxy(
|
||||
&self,
|
||||
app_handle: tauri::AppHandle,
|
||||
proxy_settings: &ProxySettings,
|
||||
browser_pid: u32,
|
||||
profile_name: Option<&str>,
|
||||
) -> Result<ProxySettings, String> {
|
||||
// Check if we already have a proxy for this browser
|
||||
{
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
if let Some(proxy) = proxies.get(&browser_pid) {
|
||||
return Ok(ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: proxy.upstream_type.clone(),
|
||||
host: "127.0.0.1".to_string(), // Use 127.0.0.1 instead of localhost for better compatibility
|
||||
port: proxy.local_port,
|
||||
username: None,
|
||||
password: None,
|
||||
});
|
||||
// Get the path to the proxies directory
|
||||
fn get_proxies_dir(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("proxies");
|
||||
path
|
||||
}
|
||||
|
||||
// Get the path to a specific proxy file
|
||||
fn get_proxy_file_path(&self, proxy_id: &str) -> PathBuf {
|
||||
self.get_proxies_dir().join(format!("{proxy_id}.json"))
|
||||
}
|
||||
|
||||
// Load stored proxies from disk
|
||||
fn load_stored_proxies(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let proxies_dir = self.get_proxies_dir();
|
||||
|
||||
if !proxies_dir.exists() {
|
||||
return Ok(()); // No proxies directory yet
|
||||
}
|
||||
|
||||
let mut stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
|
||||
// Read all JSON files from the proxies directory
|
||||
for entry in fs::read_dir(&proxies_dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.extension().is_some_and(|ext| ext == "json") {
|
||||
let content = fs::read_to_string(&path)?;
|
||||
let proxy: StoredProxy = serde_json::from_str(&content)?;
|
||||
stored_proxies.insert(proxy.id.clone(), proxy);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we have a preferred port for this profile
|
||||
let preferred_port = if let Some(name) = profile_name {
|
||||
let profile_proxies = self.profile_proxies.lock().unwrap();
|
||||
profile_proxies.get(name).and_then(|settings| {
|
||||
// Find existing proxy with same settings to reuse port
|
||||
let active_proxies = self.active_proxies.lock().unwrap();
|
||||
active_proxies
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Save a single proxy to disk
|
||||
fn save_proxy(&self, proxy: &StoredProxy) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let proxies_dir = self.get_proxies_dir();
|
||||
|
||||
// Ensure directory exists
|
||||
fs::create_dir_all(&proxies_dir)?;
|
||||
|
||||
let proxy_file = self.get_proxy_file_path(&proxy.id);
|
||||
let content = serde_json::to_string_pretty(proxy)?;
|
||||
fs::write(&proxy_file, content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Delete a proxy file from disk
|
||||
fn delete_proxy_file(&self, proxy_id: &str) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let proxy_file = self.get_proxy_file_path(proxy_id);
|
||||
if proxy_file.exists() {
|
||||
fs::remove_file(proxy_file)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Create a new stored proxy
|
||||
pub fn create_stored_proxy(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
name: String,
|
||||
proxy_settings: ProxySettings,
|
||||
) -> Result<StoredProxy, String> {
|
||||
// Check if name already exists
|
||||
{
|
||||
let stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
if stored_proxies.values().any(|p| p.name == name) {
|
||||
return Err(format!("Proxy with name '{name}' already exists"));
|
||||
}
|
||||
}
|
||||
|
||||
let stored_proxy = StoredProxy::new(name, proxy_settings);
|
||||
|
||||
{
|
||||
let mut stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
stored_proxies.insert(stored_proxy.id.clone(), stored_proxy.clone());
|
||||
}
|
||||
|
||||
if let Err(e) = self.save_proxy(&stored_proxy) {
|
||||
eprintln!("Warning: Failed to save proxy: {e}");
|
||||
}
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("proxies-changed", ()) {
|
||||
eprintln!("Failed to emit proxies-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(stored_proxy)
|
||||
}
|
||||
|
||||
// Get all stored proxies
|
||||
pub fn get_stored_proxies(&self) -> Vec<StoredProxy> {
|
||||
let stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
let mut list: Vec<StoredProxy> = stored_proxies.values().cloned().collect();
|
||||
// Sort case-insensitively by name for consistent ordering across UI/API consumers
|
||||
list.sort_by_key(|p| p.name.to_lowercase());
|
||||
list
|
||||
}
|
||||
|
||||
// Get a stored proxy by ID
|
||||
|
||||
// Update a stored proxy
|
||||
pub fn update_stored_proxy(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
proxy_id: &str,
|
||||
name: Option<String>,
|
||||
proxy_settings: Option<ProxySettings>,
|
||||
) -> Result<StoredProxy, String> {
|
||||
// First, check for conflicts without holding a mutable reference
|
||||
{
|
||||
let stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
|
||||
// Check if proxy exists
|
||||
if !stored_proxies.contains_key(proxy_id) {
|
||||
return Err(format!("Proxy with ID '{proxy_id}' not found"));
|
||||
}
|
||||
|
||||
// Check if new name conflicts with existing proxies
|
||||
if let Some(ref new_name) = name {
|
||||
if stored_proxies
|
||||
.values()
|
||||
.find(|p| {
|
||||
p.upstream_host == settings.host
|
||||
&& p.upstream_port == settings.port
|
||||
&& p.upstream_type == settings.proxy_type
|
||||
})
|
||||
.map(|p| p.local_port)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
.any(|p| p.id != proxy_id && p.name == *new_name)
|
||||
{
|
||||
return Err(format!("Proxy with name '{new_name}' already exists"));
|
||||
}
|
||||
}
|
||||
} // Release the lock here
|
||||
|
||||
// Now get mutable access for updates
|
||||
let updated_proxy = {
|
||||
let mut stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
let stored_proxy = stored_proxies.get_mut(proxy_id).unwrap(); // Safe because we checked above
|
||||
|
||||
if let Some(new_name) = name {
|
||||
stored_proxy.update_name(new_name);
|
||||
}
|
||||
|
||||
if let Some(new_settings) = proxy_settings {
|
||||
stored_proxy.update_settings(new_settings);
|
||||
}
|
||||
|
||||
stored_proxy.clone()
|
||||
};
|
||||
|
||||
if let Err(e) = self.save_proxy(&updated_proxy) {
|
||||
eprintln!("Warning: Failed to save proxy: {e}");
|
||||
}
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("proxies-changed", ()) {
|
||||
eprintln!("Failed to emit proxies-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(updated_proxy)
|
||||
}
|
||||
|
||||
// Delete a stored proxy
|
||||
pub fn delete_stored_proxy(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
proxy_id: &str,
|
||||
) -> Result<(), String> {
|
||||
{
|
||||
let mut stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
if stored_proxies.remove(proxy_id).is_none() {
|
||||
return Err(format!("Proxy with ID '{proxy_id}' not found"));
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = self.delete_proxy_file(proxy_id) {
|
||||
eprintln!("Warning: Failed to delete proxy file: {e}");
|
||||
}
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("proxies-changed", ()) {
|
||||
eprintln!("Failed to emit proxies-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Get proxy settings for a stored proxy ID
|
||||
pub fn get_proxy_settings_by_id(&self, proxy_id: &str) -> Option<ProxySettings> {
|
||||
let stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
stored_proxies
|
||||
.get(proxy_id)
|
||||
.map(|p| p.proxy_settings.clone())
|
||||
}
|
||||
|
||||
// Start a proxy for given proxy settings and associate it with a browser process ID
|
||||
// If proxy_settings is None, starts a direct proxy for traffic monitoring
|
||||
pub async fn start_proxy(
|
||||
&self,
|
||||
app_handle: tauri::AppHandle,
|
||||
proxy_settings: Option<&ProxySettings>,
|
||||
browser_pid: u32,
|
||||
profile_name: Option<&str>,
|
||||
) -> Result<ProxySettings, String> {
|
||||
// First, proactively cleanup any dead proxies so we don't accidentally reuse stale ones
|
||||
let _ = self.cleanup_dead_proxies(app_handle.clone()).await;
|
||||
|
||||
// If we have a previous proxy tied to this profile, and the upstream settings are changing,
|
||||
// stop it before starting a new one so the change takes effect immediately.
|
||||
if let Some(name) = profile_name {
|
||||
// Check if we have an active proxy recorded for this profile
|
||||
let maybe_existing_id = {
|
||||
let map = self.profile_active_proxy_ids.lock().unwrap();
|
||||
map.get(name).cloned()
|
||||
};
|
||||
|
||||
if let Some(existing_id) = maybe_existing_id {
|
||||
// Find the existing proxy info
|
||||
let existing_info = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies.values().find(|p| p.id == existing_id).cloned()
|
||||
};
|
||||
|
||||
if let Some(existing) = existing_info {
|
||||
let desired_type = proxy_settings
|
||||
.map(|p| p.proxy_type.as_str())
|
||||
.unwrap_or("DIRECT");
|
||||
let desired_host = proxy_settings.map(|p| p.host.as_str()).unwrap_or("DIRECT");
|
||||
let desired_port = proxy_settings.map(|p| p.port).unwrap_or(0);
|
||||
|
||||
let is_same_upstream = existing.upstream_type == desired_type
|
||||
&& existing.upstream_host == desired_host
|
||||
&& existing.upstream_port == desired_port;
|
||||
|
||||
if !is_same_upstream {
|
||||
// Stop the previous proxy tied to this profile (best effort)
|
||||
// We don't know the original PID mapping that created it; iterate to find its key
|
||||
let pid_to_stop = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies.iter().find_map(|(pid, info)| {
|
||||
if info.id == existing_id {
|
||||
Some(*pid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
};
|
||||
if let Some(pid) = pid_to_stop {
|
||||
let _ = self.stop_proxy(app_handle.clone(), pid).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check if we already have a proxy for this browser PID. If it exists but the upstream
|
||||
// settings don't match the newly requested ones, stop it and create a new proxy so that
|
||||
// changes take effect immediately.
|
||||
let mut needs_restart = false;
|
||||
{
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
if let Some(existing) = proxies.get(&browser_pid) {
|
||||
let desired_type = proxy_settings
|
||||
.map(|p| p.proxy_type.as_str())
|
||||
.unwrap_or("DIRECT");
|
||||
let desired_host = proxy_settings.map(|p| p.host.as_str()).unwrap_or("DIRECT");
|
||||
let desired_port = proxy_settings.map(|p| p.port).unwrap_or(0);
|
||||
|
||||
let is_same_upstream = existing.upstream_type == desired_type
|
||||
&& existing.upstream_host == desired_host
|
||||
&& existing.upstream_port == desired_port;
|
||||
|
||||
if is_same_upstream {
|
||||
// Reuse existing local proxy
|
||||
return Ok(ProxySettings {
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: existing.local_port,
|
||||
username: None,
|
||||
password: None,
|
||||
});
|
||||
} else {
|
||||
// Upstream changed; we must restart the local proxy so that traffic is routed correctly
|
||||
needs_restart = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if needs_restart {
|
||||
// Best-effort stop of the old proxy for this PID before starting a new one
|
||||
let _ = self.stop_proxy(app_handle.clone(), browser_pid).await;
|
||||
}
|
||||
|
||||
// Start a new proxy using the nodecar binary with the correct CLI interface
|
||||
let mut nodecar = app_handle
|
||||
.shell()
|
||||
.sidecar("nodecar")
|
||||
.map_err(|e| format!("Failed to create sidecar: {e}"))?
|
||||
.arg("proxy")
|
||||
.arg("start")
|
||||
.arg("--host")
|
||||
.arg(&proxy_settings.host)
|
||||
.arg("--proxy-port")
|
||||
.arg(proxy_settings.port.to_string())
|
||||
.arg("--type")
|
||||
.arg(&proxy_settings.proxy_type);
|
||||
.arg("start");
|
||||
|
||||
// Add credentials if provided
|
||||
if let Some(username) = &proxy_settings.username {
|
||||
nodecar = nodecar.arg("--username").arg(username);
|
||||
}
|
||||
if let Some(password) = &proxy_settings.password {
|
||||
nodecar = nodecar.arg("--password").arg(password);
|
||||
}
|
||||
// Add upstream proxy settings if provided, otherwise create direct proxy
|
||||
if let Some(proxy_settings) = proxy_settings {
|
||||
nodecar = nodecar
|
||||
.arg("--host")
|
||||
.arg(&proxy_settings.host)
|
||||
.arg("--proxy-port")
|
||||
.arg(proxy_settings.port.to_string())
|
||||
.arg("--type")
|
||||
.arg(&proxy_settings.proxy_type);
|
||||
|
||||
// If we have a preferred port, use it
|
||||
if let Some(port) = preferred_port {
|
||||
nodecar = nodecar.arg("--port").arg(port.to_string());
|
||||
// Add credentials if provided
|
||||
if let Some(username) = &proxy_settings.username {
|
||||
nodecar = nodecar.arg("--username").arg(username);
|
||||
}
|
||||
if let Some(password) = &proxy_settings.password {
|
||||
nodecar = nodecar.arg("--password").arg(password);
|
||||
}
|
||||
}
|
||||
|
||||
// Execute the command and wait for it to complete
|
||||
@@ -134,12 +435,41 @@ impl ProxyManager {
|
||||
let proxy_info = ProxyInfo {
|
||||
id: id.to_string(),
|
||||
local_url,
|
||||
upstream_host: proxy_settings.host.clone(),
|
||||
upstream_port: proxy_settings.port,
|
||||
upstream_type: proxy_settings.proxy_type.clone(),
|
||||
upstream_host: proxy_settings
|
||||
.map(|p| p.host.clone())
|
||||
.unwrap_or_else(|| "DIRECT".to_string()),
|
||||
upstream_port: proxy_settings.map(|p| p.port).unwrap_or(0),
|
||||
upstream_type: proxy_settings
|
||||
.map(|p| p.proxy_type.clone())
|
||||
.unwrap_or_else(|| "DIRECT".to_string()),
|
||||
local_port,
|
||||
profile_name: profile_name.map(|s| s.to_string()),
|
||||
};
|
||||
|
||||
// Wait for the local proxy port to be ready to accept connections
|
||||
{
|
||||
use tokio::net::TcpStream;
|
||||
use tokio::time::{sleep, Duration};
|
||||
let mut ready = false;
|
||||
for _ in 0..50 {
|
||||
match TcpStream::connect((std::net::Ipv4Addr::LOCALHOST, proxy_info.local_port)).await {
|
||||
Ok(_stream) => {
|
||||
ready = true;
|
||||
break;
|
||||
}
|
||||
Err(_) => {
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
if !ready {
|
||||
return Err(format!(
|
||||
"Local proxy on 127.0.0.1:{} did not become ready in time",
|
||||
proxy_info.local_port
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Store the proxy info
|
||||
{
|
||||
let mut proxies = self.active_proxies.lock().unwrap();
|
||||
@@ -148,13 +478,17 @@ impl ProxyManager {
|
||||
|
||||
// Store the profile proxy info for persistence
|
||||
if let Some(name) = profile_name {
|
||||
let mut profile_proxies = self.profile_proxies.lock().unwrap();
|
||||
profile_proxies.insert(name.to_string(), proxy_settings.clone());
|
||||
if let Some(proxy_settings) = proxy_settings {
|
||||
let mut profile_proxies = self.profile_proxies.lock().unwrap();
|
||||
profile_proxies.insert(name.to_string(), proxy_settings.clone());
|
||||
}
|
||||
// Also record the active proxy id for this profile for quick cleanup on changes
|
||||
let mut map = self.profile_active_proxy_ids.lock().unwrap();
|
||||
map.insert(name.to_string(), proxy_info.id.clone());
|
||||
}
|
||||
|
||||
// Return proxy settings for the browser
|
||||
Ok(ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(), // Use 127.0.0.1 instead of localhost for better compatibility
|
||||
port: proxy_info.local_port,
|
||||
@@ -169,10 +503,10 @@ impl ProxyManager {
|
||||
app_handle: tauri::AppHandle,
|
||||
browser_pid: u32,
|
||||
) -> Result<(), String> {
|
||||
let proxy_id = {
|
||||
let (proxy_id, profile_name): (String, Option<String>) = {
|
||||
let mut proxies = self.active_proxies.lock().unwrap();
|
||||
match proxies.remove(&browser_pid) {
|
||||
Some(proxy) => proxy.id,
|
||||
Some(proxy) => (proxy.id, proxy.profile_name.clone()),
|
||||
None => return Ok(()), // No proxy to stop
|
||||
}
|
||||
};
|
||||
@@ -185,7 +519,7 @@ impl ProxyManager {
|
||||
.arg("proxy")
|
||||
.arg("stop")
|
||||
.arg("--id")
|
||||
.arg(proxy_id);
|
||||
.arg(&proxy_id);
|
||||
|
||||
let output = nodecar.output().await.unwrap();
|
||||
|
||||
@@ -195,28 +529,24 @@ impl ProxyManager {
|
||||
// We still return Ok since we've already removed the proxy from our tracking
|
||||
}
|
||||
|
||||
// Clear profile-to-proxy mapping if it references this proxy
|
||||
if let Some(name) = profile_name {
|
||||
let mut map = self.profile_active_proxy_ids.lock().unwrap();
|
||||
if let Some(current_id) = map.get(&name) {
|
||||
if current_id == &proxy_id {
|
||||
map.remove(&name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("proxies-changed", ()) {
|
||||
eprintln!("Failed to emit proxies-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Get proxy settings for a browser process ID
|
||||
pub fn get_proxy_settings(&self, browser_pid: u32) -> Option<ProxySettings> {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies.get(&browser_pid).map(|proxy| ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(), // Use 127.0.0.1 instead of localhost for better compatibility
|
||||
port: proxy.local_port,
|
||||
username: None,
|
||||
password: None,
|
||||
})
|
||||
}
|
||||
|
||||
// Get stored proxy info for a profile
|
||||
pub fn get_profile_proxy_info(&self, profile_name: &str) -> Option<ProxySettings> {
|
||||
let profile_proxies = self.profile_proxies.lock().unwrap();
|
||||
profile_proxies.get(profile_name).cloned()
|
||||
}
|
||||
|
||||
// Update the PID mapping for an existing proxy
|
||||
pub fn update_proxy_pid(&self, old_pid: u32, new_pid: u32) -> Result<(), String> {
|
||||
let mut proxies = self.active_proxies.lock().unwrap();
|
||||
@@ -227,6 +557,40 @@ impl ProxyManager {
|
||||
Err(format!("No proxy found for PID {old_pid}"))
|
||||
}
|
||||
}
|
||||
|
||||
// Check if a process is still running
|
||||
fn is_process_running(&self, pid: u32) -> bool {
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
system.process(Pid::from(pid as usize)).is_some()
|
||||
}
|
||||
|
||||
// Clean up proxies for dead browser processes
|
||||
pub async fn cleanup_dead_proxies(
|
||||
&self,
|
||||
app_handle: tauri::AppHandle,
|
||||
) -> Result<Vec<u32>, String> {
|
||||
let dead_pids = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies
|
||||
.keys()
|
||||
.filter(|&&pid| pid != 0 && !self.is_process_running(pid)) // Skip temporary PID 0
|
||||
.copied()
|
||||
.collect::<Vec<u32>>()
|
||||
};
|
||||
|
||||
for dead_pid in &dead_pids {
|
||||
println!("Cleaning up proxy for dead browser process PID: {dead_pid}");
|
||||
let _ = self.stop_proxy(app_handle.clone(), *dead_pid).await;
|
||||
}
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
if let Err(e) = app_handle.emit("proxies-changed", ()) {
|
||||
eprintln!("Failed to emit proxies-changed event: {e}");
|
||||
}
|
||||
|
||||
Ok(dead_pids)
|
||||
}
|
||||
}
|
||||
|
||||
// Create a singleton instance of the proxy manager
|
||||
@@ -261,8 +625,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.to_path_buf();
|
||||
let nodecar_dir = project_root.join("nodecar");
|
||||
let nodecar_dist = nodecar_dir.join("dist");
|
||||
let nodecar_binary = nodecar_dist.join("nodecar");
|
||||
let nodecar_binary = nodecar_dir.join("nodecar-bin");
|
||||
|
||||
// Check if binary already exists
|
||||
if nodecar_binary.exists() {
|
||||
@@ -316,77 +679,10 @@ mod tests {
|
||||
Ok(nodecar_binary)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_manager_profile_persistence() {
|
||||
let proxy_manager = ProxyManager::new();
|
||||
|
||||
let proxy_settings = ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "socks5".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 1080,
|
||||
username: None,
|
||||
password: None,
|
||||
};
|
||||
|
||||
// Test profile proxy info storage
|
||||
{
|
||||
let mut profile_proxies = proxy_manager.profile_proxies.lock().unwrap();
|
||||
profile_proxies.insert("test_profile".to_string(), proxy_settings.clone());
|
||||
}
|
||||
|
||||
// Test retrieval
|
||||
let retrieved = proxy_manager.get_profile_proxy_info("test_profile");
|
||||
assert!(retrieved.is_some());
|
||||
let retrieved = retrieved.unwrap();
|
||||
assert_eq!(retrieved.proxy_type, "socks5");
|
||||
assert_eq!(retrieved.host, "127.0.0.1");
|
||||
assert_eq!(retrieved.port, 1080);
|
||||
|
||||
// Test non-existent profile
|
||||
let non_existent = proxy_manager.get_profile_proxy_info("non_existent");
|
||||
assert!(non_existent.is_none());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_manager_active_proxy_tracking() {
|
||||
let proxy_manager = ProxyManager::new();
|
||||
|
||||
let proxy_info = ProxyInfo {
|
||||
id: "test_proxy_123".to_string(),
|
||||
local_url: "http://localhost:8080".to_string(),
|
||||
upstream_host: "proxy.example.com".to_string(),
|
||||
upstream_port: 3128,
|
||||
upstream_type: "http".to_string(),
|
||||
local_port: 8080,
|
||||
};
|
||||
|
||||
let browser_pid = 54321u32;
|
||||
|
||||
// Add active proxy
|
||||
{
|
||||
let mut active_proxies = proxy_manager.active_proxies.lock().unwrap();
|
||||
active_proxies.insert(browser_pid, proxy_info.clone());
|
||||
}
|
||||
|
||||
// Test retrieval of proxy settings
|
||||
let proxy_settings = proxy_manager.get_proxy_settings(browser_pid);
|
||||
assert!(proxy_settings.is_some());
|
||||
let settings = proxy_settings.unwrap();
|
||||
assert!(settings.enabled);
|
||||
assert_eq!(settings.host, "127.0.0.1");
|
||||
assert_eq!(settings.port, 8080);
|
||||
|
||||
// Test non-existent browser PID
|
||||
let non_existent = proxy_manager.get_proxy_settings(99999);
|
||||
assert!(non_existent.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proxy_settings_validation() {
|
||||
// Test valid proxy settings
|
||||
let valid_settings = ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 8080,
|
||||
@@ -394,14 +690,26 @@ mod tests {
|
||||
password: Some("pass".to_string()),
|
||||
};
|
||||
|
||||
assert!(valid_settings.enabled);
|
||||
assert_eq!(valid_settings.proxy_type, "http");
|
||||
assert!(!valid_settings.host.is_empty());
|
||||
assert!(valid_settings.port > 0);
|
||||
assert!(
|
||||
!valid_settings.host.is_empty(),
|
||||
"Valid settings should have non-empty host"
|
||||
);
|
||||
assert!(
|
||||
valid_settings.port > 0,
|
||||
"Valid settings should have positive port"
|
||||
);
|
||||
assert_eq!(valid_settings.proxy_type, "http", "Proxy type should match");
|
||||
assert!(
|
||||
valid_settings.username.is_some(),
|
||||
"Username should be present"
|
||||
);
|
||||
assert!(
|
||||
valid_settings.password.is_some(),
|
||||
"Password should be present"
|
||||
);
|
||||
|
||||
// Test disabled proxy settings
|
||||
let disabled_settings = ProxySettings {
|
||||
enabled: false,
|
||||
// Test proxy settings with empty values
|
||||
let empty_settings = ProxySettings {
|
||||
proxy_type: "http".to_string(),
|
||||
host: "".to_string(),
|
||||
port: 0,
|
||||
@@ -409,7 +717,16 @@ mod tests {
|
||||
password: None,
|
||||
};
|
||||
|
||||
assert!(!disabled_settings.enabled);
|
||||
assert!(
|
||||
empty_settings.host.is_empty(),
|
||||
"Empty settings should have empty host"
|
||||
);
|
||||
assert_eq!(
|
||||
empty_settings.port, 0,
|
||||
"Empty settings should have zero port"
|
||||
);
|
||||
assert!(empty_settings.username.is_none(), "Username should be None");
|
||||
assert!(empty_settings.password.is_none(), "Password should be None");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -431,6 +748,7 @@ mod tests {
|
||||
upstream_port: 3128,
|
||||
upstream_type: "http".to_string(),
|
||||
local_port: (8000 + i) as u16,
|
||||
profile_name: None,
|
||||
};
|
||||
|
||||
// Add proxy
|
||||
@@ -439,10 +757,6 @@ mod tests {
|
||||
active_proxies.insert(browser_pid, proxy_info);
|
||||
}
|
||||
|
||||
// Read proxy
|
||||
let settings = pm.get_proxy_settings(browser_pid);
|
||||
assert!(settings.is_some());
|
||||
|
||||
browser_pid
|
||||
});
|
||||
handles.push(handle);
|
||||
@@ -505,7 +819,7 @@ mod tests {
|
||||
.arg("http");
|
||||
|
||||
// Set a timeout for the command
|
||||
let output = tokio::time::timeout(Duration::from_secs(10), async { cmd.output() }).await??;
|
||||
let output = tokio::time::timeout(Duration::from_secs(60), async { cmd.output() }).await??;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
@@ -521,7 +835,7 @@ mod tests {
|
||||
|
||||
// Wait for proxy worker to start
|
||||
println!("Waiting for proxy worker to start...");
|
||||
tokio::time::sleep(Duration::from_secs(3)).await;
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
|
||||
// Test that the local port is listening
|
||||
let mut port_test = Command::new("nc");
|
||||
@@ -542,7 +856,7 @@ mod tests {
|
||||
stop_cmd.arg("proxy").arg("stop").arg("--id").arg(proxy_id);
|
||||
|
||||
let stop_output =
|
||||
tokio::time::timeout(Duration::from_secs(5), async { stop_cmd.output() }).await??;
|
||||
tokio::time::timeout(Duration::from_secs(60), async { stop_cmd.output() }).await??;
|
||||
|
||||
assert!(stop_output.status.success());
|
||||
|
||||
@@ -563,7 +877,6 @@ mod tests {
|
||||
#[test]
|
||||
fn test_proxy_command_construction() {
|
||||
let proxy_settings = ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "proxy.example.com".to_string(),
|
||||
port: 8080,
|
||||
@@ -572,7 +885,7 @@ mod tests {
|
||||
};
|
||||
|
||||
// Test command arguments match expected format
|
||||
let _expected_args = [
|
||||
let expected_args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
@@ -588,11 +901,37 @@ mod tests {
|
||||
];
|
||||
|
||||
// This test verifies the argument structure without actually running the command
|
||||
assert_eq!(proxy_settings.host, "proxy.example.com");
|
||||
assert_eq!(proxy_settings.port, 8080);
|
||||
assert_eq!(proxy_settings.proxy_type, "http");
|
||||
assert_eq!(proxy_settings.username.as_ref().unwrap(), "user");
|
||||
assert_eq!(proxy_settings.password.as_ref().unwrap(), "pass");
|
||||
assert_eq!(
|
||||
proxy_settings.host, "proxy.example.com",
|
||||
"Host should match expected value"
|
||||
);
|
||||
assert_eq!(
|
||||
proxy_settings.port, 8080,
|
||||
"Port should match expected value"
|
||||
);
|
||||
assert_eq!(
|
||||
proxy_settings.proxy_type, "http",
|
||||
"Proxy type should match expected value"
|
||||
);
|
||||
assert_eq!(
|
||||
proxy_settings.username.as_ref().unwrap(),
|
||||
"user",
|
||||
"Username should match expected value"
|
||||
);
|
||||
assert_eq!(
|
||||
proxy_settings.password.as_ref().unwrap(),
|
||||
"pass",
|
||||
"Password should match expected value"
|
||||
);
|
||||
|
||||
// Verify expected args structure
|
||||
assert_eq!(expected_args[0], "proxy", "First arg should be 'proxy'");
|
||||
assert_eq!(expected_args[1], "start", "Second arg should be 'start'");
|
||||
assert_eq!(expected_args[2], "--host", "Third arg should be '--host'");
|
||||
assert_eq!(
|
||||
expected_args[3], "proxy.example.com",
|
||||
"Fourth arg should be host value"
|
||||
);
|
||||
}
|
||||
|
||||
// Test the CLI detachment specifically - ensure the CLI exits properly
|
||||
@@ -618,13 +957,6 @@ mod tests {
|
||||
match output {
|
||||
Ok(Ok(cmd_output)) => {
|
||||
let execution_time = start_time.elapsed();
|
||||
println!("CLI completed in {execution_time:?}");
|
||||
|
||||
// Should complete very quickly if properly detached
|
||||
assert!(
|
||||
execution_time < Duration::from_secs(3),
|
||||
"CLI took too long ({execution_time:?}), should exit immediately after starting worker"
|
||||
);
|
||||
|
||||
if cmd_output.status.success() {
|
||||
let stdout = String::from_utf8(cmd_output.stdout)?;
|
||||
@@ -668,17 +1000,7 @@ mod tests {
|
||||
.arg("--type")
|
||||
.arg("http");
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
let output = tokio::time::timeout(Duration::from_secs(5), async { cmd.output() }).await??;
|
||||
let execution_time = start_time.elapsed();
|
||||
|
||||
// Command should complete very quickly if properly detached
|
||||
assert!(
|
||||
execution_time < Duration::from_secs(5),
|
||||
"CLI command took {execution_time:?}, should complete in under 5 seconds for proper detachment"
|
||||
);
|
||||
|
||||
println!("CLI detachment test: command completed in {execution_time:?}");
|
||||
let output = tokio::time::timeout(Duration::from_secs(60), async { cmd.output() }).await??;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
@@ -720,7 +1042,7 @@ mod tests {
|
||||
.arg("--password")
|
||||
.arg("pass word!"); // Contains space and special character
|
||||
|
||||
let output = tokio::time::timeout(Duration::from_secs(5), async { cmd.output() }).await??;
|
||||
let output = tokio::time::timeout(Duration::from_secs(60), async { cmd.output() }).await??;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
|
||||
@@ -3,8 +3,11 @@ use serde::{Deserialize, Serialize};
|
||||
use std::fs::{self, create_dir_all};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::browser_version_service::BrowserVersionService;
|
||||
use aes_gcm::{
|
||||
aead::{Aead, AeadCore, KeyInit, OsRng},
|
||||
Aes256Gcm, Key, Nonce,
|
||||
};
|
||||
use argon2::{password_hash::SaltString, Argon2, PasswordHasher};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct TableSortingSettings {
|
||||
@@ -25,40 +28,35 @@ impl Default for TableSortingSettings {
|
||||
pub struct AppSettings {
|
||||
#[serde(default)]
|
||||
pub set_as_default_browser: bool,
|
||||
#[serde(default = "default_show_settings_on_startup")]
|
||||
pub show_settings_on_startup: bool,
|
||||
#[serde(default = "default_theme")]
|
||||
pub theme: String, // "light", "dark", or "system"
|
||||
#[serde(default = "default_auto_updates_enabled")]
|
||||
pub auto_updates_enabled: bool,
|
||||
#[serde(default = "default_auto_delete_unused_binaries")]
|
||||
pub auto_delete_unused_binaries: bool,
|
||||
}
|
||||
|
||||
fn default_show_settings_on_startup() -> bool {
|
||||
true
|
||||
#[serde(default)]
|
||||
pub custom_theme: Option<std::collections::HashMap<String, String>>, // CSS var name -> value (e.g., "--background": "#1a1b26")
|
||||
#[serde(default)]
|
||||
pub api_enabled: bool,
|
||||
#[serde(default = "default_api_port")]
|
||||
pub api_port: u16,
|
||||
#[serde(default)]
|
||||
pub api_token: Option<String>, // Displayed token for user to copy
|
||||
}
|
||||
|
||||
fn default_theme() -> String {
|
||||
"system".to_string()
|
||||
}
|
||||
|
||||
fn default_auto_updates_enabled() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_auto_delete_unused_binaries() -> bool {
|
||||
true
|
||||
fn default_api_port() -> u16 {
|
||||
10108
|
||||
}
|
||||
|
||||
impl Default for AppSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
set_as_default_browser: false,
|
||||
show_settings_on_startup: default_show_settings_on_startup(),
|
||||
theme: default_theme(),
|
||||
auto_updates_enabled: default_auto_updates_enabled(),
|
||||
auto_delete_unused_binaries: default_auto_delete_unused_binaries(),
|
||||
theme: "system".to_string(),
|
||||
custom_theme: None,
|
||||
api_enabled: false,
|
||||
api_port: 10108,
|
||||
api_token: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -68,12 +66,16 @@ pub struct SettingsManager {
|
||||
}
|
||||
|
||||
impl SettingsManager {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static SettingsManager {
|
||||
&SETTINGS_MANAGER
|
||||
}
|
||||
|
||||
pub fn get_settings_dir(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
@@ -165,35 +167,257 @@ impl SettingsManager {
|
||||
}
|
||||
|
||||
pub fn should_show_settings_on_startup(&self) -> Result<bool, Box<dyn std::error::Error>> {
|
||||
let settings = self.load_settings()?;
|
||||
// Always return false - we don't show settings on startup anymore
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
// Show prompt if:
|
||||
// 1. User wants to see the prompt
|
||||
// 2. Donut Browser is not set as default
|
||||
// 3. User hasn't explicitly disabled the default browser setting
|
||||
Ok(settings.show_settings_on_startup && !settings.set_as_default_browser)
|
||||
fn get_vault_password() -> String {
|
||||
env!("DONUT_BROWSER_VAULT_PASSWORD").to_string()
|
||||
}
|
||||
|
||||
pub async fn generate_api_token(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Generate a secure random token (base64 encoded for URL safety)
|
||||
let token_bytes: [u8; 32] = {
|
||||
use rand::RngCore;
|
||||
let mut rng = rand::rng();
|
||||
let mut bytes = [0u8; 32];
|
||||
rng.fill_bytes(&mut bytes);
|
||||
bytes
|
||||
};
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
let token = general_purpose::URL_SAFE_NO_PAD.encode(token_bytes);
|
||||
|
||||
// Store token securely
|
||||
self.store_api_token(app_handle, &token).await?;
|
||||
|
||||
Ok(token)
|
||||
}
|
||||
|
||||
pub async fn store_api_token(
|
||||
&self,
|
||||
_app_handle: &tauri::AppHandle,
|
||||
token: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Store token in an encrypted file using Argon2 + AES-GCM
|
||||
let token_file = self.get_settings_dir().join("api_token.dat");
|
||||
|
||||
// Create directory if it doesn't exist
|
||||
if let Some(parent) = token_file.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let vault_password = Self::get_vault_password();
|
||||
|
||||
// Generate a random salt for Argon2
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
|
||||
// Use Argon2 to derive a 32-byte key from the vault password
|
||||
let argon2 = Argon2::default();
|
||||
let password_hash = argon2
|
||||
.hash_password(vault_password.as_bytes(), &salt)
|
||||
.map_err(|e| format!("Argon2 key derivation failed: {e}"))?;
|
||||
let hash_value = password_hash.hash.unwrap();
|
||||
let hash_bytes = hash_value.as_bytes();
|
||||
|
||||
// Take first 32 bytes for AES-256 key
|
||||
let key = Key::<Aes256Gcm>::from_slice(&hash_bytes[..32]);
|
||||
let cipher = Aes256Gcm::new(key);
|
||||
|
||||
// Generate a random nonce
|
||||
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
|
||||
|
||||
// Encrypt the token
|
||||
let ciphertext = cipher
|
||||
.encrypt(&nonce, token.as_bytes())
|
||||
.map_err(|e| format!("Encryption failed: {e}"))?;
|
||||
|
||||
// Create file data with header, salt, nonce, and encrypted data
|
||||
let mut file_data = Vec::new();
|
||||
file_data.extend_from_slice(b"DBAPI"); // 5-byte header
|
||||
file_data.push(2u8); // Version 2 (Argon2 + AES-GCM)
|
||||
|
||||
// Store salt length and salt
|
||||
let salt_str = salt.as_str();
|
||||
file_data.push(salt_str.len() as u8);
|
||||
file_data.extend_from_slice(salt_str.as_bytes());
|
||||
|
||||
// Store nonce (12 bytes for AES-GCM)
|
||||
file_data.extend_from_slice(&nonce);
|
||||
|
||||
// Store ciphertext length and ciphertext
|
||||
file_data.extend_from_slice(&(ciphertext.len() as u32).to_le_bytes());
|
||||
file_data.extend_from_slice(&ciphertext);
|
||||
|
||||
std::fs::write(token_file, file_data)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn get_api_token(
|
||||
&self,
|
||||
_app_handle: &tauri::AppHandle,
|
||||
) -> Result<Option<String>, Box<dyn std::error::Error>> {
|
||||
let token_file = self.get_settings_dir().join("api_token.dat");
|
||||
|
||||
if !token_file.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let file_data = std::fs::read(token_file)?;
|
||||
|
||||
// Validate header
|
||||
if file_data.len() < 6 || &file_data[0..5] != b"DBAPI" {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let version = file_data[5];
|
||||
|
||||
// Only support Argon2 + AES-GCM (version 2)
|
||||
if version != 2 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Argon2 + AES-GCM decryption
|
||||
let mut offset = 6;
|
||||
|
||||
// Read salt
|
||||
if offset >= file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let salt_len = file_data[offset] as usize;
|
||||
offset += 1;
|
||||
|
||||
if offset + salt_len > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let salt_bytes = &file_data[offset..offset + salt_len];
|
||||
let salt_str = std::str::from_utf8(salt_bytes).map_err(|_| "Invalid salt encoding")?;
|
||||
let salt = SaltString::from_b64(salt_str).map_err(|_| "Invalid salt format")?;
|
||||
offset += salt_len;
|
||||
|
||||
// Read nonce (12 bytes)
|
||||
if offset + 12 > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let nonce_bytes = &file_data[offset..offset + 12];
|
||||
let nonce = Nonce::from_slice(nonce_bytes);
|
||||
offset += 12;
|
||||
|
||||
// Read ciphertext
|
||||
if offset + 4 > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let ciphertext_len = u32::from_le_bytes([
|
||||
file_data[offset],
|
||||
file_data[offset + 1],
|
||||
file_data[offset + 2],
|
||||
file_data[offset + 3],
|
||||
]) as usize;
|
||||
offset += 4;
|
||||
|
||||
if offset + ciphertext_len > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let ciphertext = &file_data[offset..offset + ciphertext_len];
|
||||
|
||||
// Derive key using Argon2
|
||||
let vault_password = Self::get_vault_password();
|
||||
let argon2 = Argon2::default();
|
||||
let password_hash = argon2
|
||||
.hash_password(vault_password.as_bytes(), &salt)
|
||||
.map_err(|e| format!("Argon2 key derivation failed: {e}"))?;
|
||||
let hash_value = password_hash.hash.unwrap();
|
||||
let hash_bytes = hash_value.as_bytes();
|
||||
|
||||
let key = Key::<Aes256Gcm>::from_slice(&hash_bytes[..32]);
|
||||
let cipher = Aes256Gcm::new(key);
|
||||
|
||||
// Decrypt the token
|
||||
let plaintext = cipher
|
||||
.decrypt(nonce, ciphertext)
|
||||
.map_err(|_| "Decryption failed")?;
|
||||
|
||||
match String::from_utf8(plaintext) {
|
||||
Ok(token) => Ok(Some(token)),
|
||||
Err(_) => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn remove_api_token(
|
||||
&self,
|
||||
_app_handle: &tauri::AppHandle,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let token_file = self.get_settings_dir().join("api_token.dat");
|
||||
|
||||
if token_file.exists() {
|
||||
std::fs::remove_file(token_file)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_settings() -> Result<AppSettings, String> {
|
||||
let manager = SettingsManager::new();
|
||||
manager
|
||||
pub async fn get_app_settings(app_handle: tauri::AppHandle) -> Result<AppSettings, String> {
|
||||
let manager = SettingsManager::instance();
|
||||
let mut settings = manager
|
||||
.load_settings()
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))?;
|
||||
|
||||
// Always load token for display purposes if it exists
|
||||
settings.api_token = manager
|
||||
.get_api_token(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to load API token: {e}"))?;
|
||||
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_app_settings(settings: AppSettings) -> Result<(), String> {
|
||||
let manager = SettingsManager::new();
|
||||
pub async fn save_app_settings(
|
||||
app_handle: tauri::AppHandle,
|
||||
mut settings: AppSettings,
|
||||
) -> Result<AppSettings, String> {
|
||||
let manager = SettingsManager::instance();
|
||||
|
||||
if settings.api_enabled {
|
||||
if let Some(ref token) = settings.api_token {
|
||||
manager
|
||||
.store_api_token(&app_handle, token)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to store API token: {e}"))?;
|
||||
} else {
|
||||
let token = manager
|
||||
.generate_api_token(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to generate API token: {e}"))?;
|
||||
settings.api_token = Some(token);
|
||||
}
|
||||
}
|
||||
|
||||
// If API is being disabled, remove the token
|
||||
if !settings.api_enabled {
|
||||
manager
|
||||
.remove_api_token(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to remove API token: {e}"))?;
|
||||
settings.api_token = None;
|
||||
}
|
||||
|
||||
let mut persist_settings = settings.clone();
|
||||
persist_settings.api_token = None;
|
||||
manager
|
||||
.save_settings(&settings)
|
||||
.map_err(|e| format!("Failed to save settings: {e}"))
|
||||
.save_settings(&persist_settings)
|
||||
.map_err(|e| format!("Failed to save settings: {e}"))?;
|
||||
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn should_show_settings_on_startup() -> Result<bool, String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.should_show_settings_on_startup()
|
||||
.map_err(|e| format!("Failed to check prompt setting: {e}"))
|
||||
@@ -201,7 +425,7 @@ pub async fn should_show_settings_on_startup() -> Result<bool, String> {
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_table_sorting_settings() -> Result<TableSortingSettings, String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.load_table_sorting()
|
||||
.map_err(|e| format!("Failed to load table sorting settings: {e}"))
|
||||
@@ -209,38 +433,227 @@ pub async fn get_table_sorting_settings() -> Result<TableSortingSettings, String
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_table_sorting_settings(sorting: TableSortingSettings) -> Result<(), String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.save_table_sorting(&sorting)
|
||||
.map_err(|e| format!("Failed to save table sorting settings: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn clear_all_version_cache_and_refetch() -> Result<(), String> {
|
||||
let api_client = ApiClient::new();
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref SETTINGS_MANAGER: SettingsManager = SettingsManager::new();
|
||||
}
|
||||
|
||||
// Clear all cache first
|
||||
api_client
|
||||
.clear_all_cache()
|
||||
.map_err(|e| format!("Failed to clear version cache: {e}"))?;
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Trigger auto-fetch for all supported browsers
|
||||
let service = BrowserVersionService::new();
|
||||
let supported_browsers = service.get_supported_browsers();
|
||||
fn create_test_settings_manager() -> (SettingsManager, TempDir) {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
for browser in supported_browsers {
|
||||
// Start background fetch for each browser (don't wait for completion)
|
||||
let service_clone = BrowserVersionService::new();
|
||||
let browser_clone = browser.clone();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = service_clone
|
||||
.fetch_browser_versions_detailed(&browser_clone, false)
|
||||
.await
|
||||
{
|
||||
eprintln!("Background version fetch failed for {browser_clone}: {e}");
|
||||
}
|
||||
});
|
||||
// Set up a temporary home directory for testing
|
||||
env::set_var("HOME", temp_dir.path());
|
||||
|
||||
let manager = SettingsManager::new();
|
||||
(manager, temp_dir)
|
||||
}
|
||||
|
||||
Ok(())
|
||||
#[test]
|
||||
fn test_settings_manager_creation() {
|
||||
let (_manager, _temp_dir) = create_test_settings_manager();
|
||||
// Test passes if no panic occurs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_app_settings() {
|
||||
let default_settings = AppSettings::default();
|
||||
|
||||
assert!(
|
||||
!default_settings.set_as_default_browser,
|
||||
"Default should not set as default browser"
|
||||
);
|
||||
assert_eq!(
|
||||
default_settings.theme, "system",
|
||||
"Default theme should be system"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_table_sorting_settings() {
|
||||
let default_sorting = TableSortingSettings::default();
|
||||
|
||||
assert_eq!(
|
||||
default_sorting.column, "name",
|
||||
"Default sort column should be name"
|
||||
);
|
||||
assert_eq!(
|
||||
default_sorting.direction, "asc",
|
||||
"Default sort direction should be asc"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_settings_nonexistent_file() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let result = manager.load_settings();
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent settings file gracefully"
|
||||
);
|
||||
|
||||
let settings = result.unwrap();
|
||||
assert!(
|
||||
!settings.set_as_default_browser,
|
||||
"Should return default settings"
|
||||
);
|
||||
assert_eq!(settings.theme, "system", "Should return default theme");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_and_load_settings() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let test_settings = AppSettings {
|
||||
set_as_default_browser: true,
|
||||
theme: "dark".to_string(),
|
||||
custom_theme: None,
|
||||
api_enabled: false,
|
||||
api_port: 10108,
|
||||
api_token: None,
|
||||
};
|
||||
|
||||
// Save settings
|
||||
let save_result = manager.save_settings(&test_settings);
|
||||
assert!(save_result.is_ok(), "Should save settings successfully");
|
||||
|
||||
// Load settings back
|
||||
let load_result = manager.load_settings();
|
||||
assert!(load_result.is_ok(), "Should load settings successfully");
|
||||
|
||||
let loaded_settings = load_result.unwrap();
|
||||
assert!(
|
||||
loaded_settings.set_as_default_browser,
|
||||
"Loaded settings should match saved"
|
||||
);
|
||||
assert_eq!(
|
||||
loaded_settings.theme, "dark",
|
||||
"Loaded theme should match saved"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_table_sorting_nonexistent_file() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let result = manager.load_table_sorting();
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent sorting file gracefully"
|
||||
);
|
||||
|
||||
let sorting = result.unwrap();
|
||||
assert_eq!(sorting.column, "name", "Should return default sorting");
|
||||
assert_eq!(sorting.direction, "asc", "Should return default direction");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_and_load_table_sorting() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let test_sorting = TableSortingSettings {
|
||||
column: "browser".to_string(),
|
||||
direction: "desc".to_string(),
|
||||
};
|
||||
|
||||
// Save sorting
|
||||
let save_result = manager.save_table_sorting(&test_sorting);
|
||||
assert!(save_result.is_ok(), "Should save sorting successfully");
|
||||
|
||||
// Load sorting back
|
||||
let load_result = manager.load_table_sorting();
|
||||
assert!(load_result.is_ok(), "Should load sorting successfully");
|
||||
|
||||
let loaded_sorting = load_result.unwrap();
|
||||
assert_eq!(
|
||||
loaded_sorting.column, "browser",
|
||||
"Loaded column should match saved"
|
||||
);
|
||||
assert_eq!(
|
||||
loaded_sorting.direction, "desc",
|
||||
"Loaded direction should match saved"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_show_settings_on_startup() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let result = manager.should_show_settings_on_startup();
|
||||
assert!(result.is_ok(), "Should not fail");
|
||||
|
||||
let should_show = result.unwrap();
|
||||
assert!(
|
||||
!should_show,
|
||||
"Should always return false as per implementation"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_corrupted_settings_file() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
// Create settings directory
|
||||
let settings_dir = manager.get_settings_dir();
|
||||
fs::create_dir_all(&settings_dir).expect("Should create settings directory");
|
||||
|
||||
// Write corrupted JSON
|
||||
let settings_file = manager.get_settings_file();
|
||||
fs::write(&settings_file, "{ invalid json }").expect("Should write corrupted file");
|
||||
|
||||
// Should handle corrupted file gracefully
|
||||
let result = manager.load_settings();
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle corrupted settings file gracefully"
|
||||
);
|
||||
|
||||
let settings = result.unwrap();
|
||||
assert!(
|
||||
!settings.set_as_default_browser,
|
||||
"Should return default settings for corrupted file"
|
||||
);
|
||||
assert_eq!(
|
||||
settings.theme, "system",
|
||||
"Should return default theme for corrupted file"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_settings_file_paths() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let settings_dir = manager.get_settings_dir();
|
||||
let settings_file = manager.get_settings_file();
|
||||
let sorting_file = manager.get_table_sorting_file();
|
||||
|
||||
assert!(
|
||||
settings_dir.to_string_lossy().contains("settings"),
|
||||
"Settings dir should contain 'settings'"
|
||||
);
|
||||
assert!(
|
||||
settings_file
|
||||
.to_string_lossy()
|
||||
.ends_with("app_settings.json"),
|
||||
"Settings file should end with app_settings.json"
|
||||
);
|
||||
assert!(
|
||||
sorting_file
|
||||
.to_string_lossy()
|
||||
.ends_with("table_sorting.json"),
|
||||
"Sorting file should end with table_sorting.json"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
use crate::profile::BrowserProfile;
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::BTreeSet;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Default, Clone)]
|
||||
struct TagsData {
|
||||
tags: Vec<String>,
|
||||
}
|
||||
|
||||
pub struct TagManager {
|
||||
base_dirs: BaseDirs,
|
||||
data_dir_override: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl TagManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
data_dir_override: std::env::var("DONUTBROWSER_DATA_DIR")
|
||||
.ok()
|
||||
.map(PathBuf::from),
|
||||
}
|
||||
}
|
||||
|
||||
// Helper for tests to override data directory without global env var
|
||||
#[allow(dead_code)]
|
||||
pub fn with_data_dir_override(dir: &Path) -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
data_dir_override: Some(dir.to_path_buf()),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_tags_file_path(&self) -> PathBuf {
|
||||
if let Some(dir) = &self.data_dir_override {
|
||||
let mut override_path = dir.clone();
|
||||
let _ = fs::create_dir_all(&override_path);
|
||||
override_path.push("tags.json");
|
||||
return override_path;
|
||||
}
|
||||
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("data");
|
||||
path.push("tags.json");
|
||||
path
|
||||
}
|
||||
|
||||
fn load_tags_data(&self) -> Result<TagsData, Box<dyn std::error::Error>> {
|
||||
let file_path = self.get_tags_file_path();
|
||||
if !file_path.exists() {
|
||||
return Ok(TagsData::default());
|
||||
}
|
||||
let content = fs::read_to_string(file_path)?;
|
||||
let data: TagsData = serde_json::from_str(&content)?;
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
fn save_tags_data(&self, data: &TagsData) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let file_path = self.get_tags_file_path();
|
||||
if let Some(parent) = file_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
let json = serde_json::to_string_pretty(data)?;
|
||||
fs::write(file_path, json)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_all_tags(&self) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
let mut all = self.load_tags_data()?.tags;
|
||||
// Ensure deterministic order
|
||||
all.sort();
|
||||
all.dedup();
|
||||
Ok(all)
|
||||
}
|
||||
|
||||
pub fn rebuild_from_profiles(
|
||||
&self,
|
||||
profiles: &[BrowserProfile],
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
// Build a set of all tags currently used by any profile
|
||||
let mut set: BTreeSet<String> = BTreeSet::new();
|
||||
for profile in profiles {
|
||||
for tag in &profile.tags {
|
||||
// Store exactly as provided (no normalization) to preserve characters
|
||||
set.insert(tag.clone());
|
||||
}
|
||||
}
|
||||
let combined: Vec<String> = set.into_iter().collect();
|
||||
self.save_tags_data(&TagsData {
|
||||
tags: combined.clone(),
|
||||
})?;
|
||||
Ok(combined)
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn get_all_tags() -> Result<Vec<String>, String> {
|
||||
let tag_manager = crate::tag_manager::TAG_MANAGER.lock().unwrap();
|
||||
tag_manager
|
||||
.get_all_tags()
|
||||
.map_err(|e| format!("Failed to get tags: {e}"))
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref TAG_MANAGER: std::sync::Mutex<TagManager> = std::sync::Mutex::new(TagManager::new());
|
||||
}
|
||||
@@ -1,539 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process::Command;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct SystemTheme {
|
||||
pub theme: String, // "light", "dark", or "unknown"
|
||||
}
|
||||
|
||||
pub struct ThemeDetector;
|
||||
|
||||
impl ThemeDetector {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
/// Detect the system theme preference
|
||||
pub fn detect_system_theme(&self) -> SystemTheme {
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::detect_system_theme();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::detect_system_theme();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::detect_system_theme();
|
||||
|
||||
#[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))]
|
||||
return SystemTheme {
|
||||
theme: "unknown".to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
mod linux {
|
||||
use super::*;
|
||||
|
||||
pub fn detect_system_theme() -> SystemTheme {
|
||||
// Try multiple methods in order of preference
|
||||
|
||||
// 1. Try GNOME/GTK settings via gsettings
|
||||
if let Ok(theme) = detect_gnome_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 2. Try KDE Plasma settings via kreadconfig5/kreadconfig6
|
||||
if let Ok(theme) = detect_kde_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 3. Try XFCE settings via xfconf-query
|
||||
if let Ok(theme) = detect_xfce_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 4. Try looking at current GTK theme name
|
||||
if let Ok(theme) = detect_gtk_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 5. Try dconf directly (fallback for GNOME-based systems)
|
||||
if let Ok(theme) = detect_dconf_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 6. Try environment variables
|
||||
if let Ok(theme) = detect_env_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 7. Try freedesktop portal
|
||||
if let Ok(theme) = detect_portal_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 8. Try looking at system color scheme files
|
||||
if let Ok(theme) = detect_system_files_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// Fallback to unknown
|
||||
SystemTheme {
|
||||
theme: "unknown".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn detect_gnome_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Check if gsettings is available
|
||||
if !is_command_available("gsettings") {
|
||||
return Err("gsettings not available".into());
|
||||
}
|
||||
|
||||
// Try GNOME color scheme first (modern way)
|
||||
if let Ok(output) = Command::new("gsettings")
|
||||
.args(["get", "org.gnome.desktop.interface", "color-scheme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let scheme = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
match scheme.as_str() {
|
||||
"'prefer-dark'" => return Ok("dark".to_string()),
|
||||
"'prefer-light'" => return Ok("light".to_string()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to GTK theme name detection
|
||||
if let Ok(output) = Command::new("gsettings")
|
||||
.args(["get", "org.gnome.desktop.interface", "gtk-theme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme_name = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.trim_matches('\'')
|
||||
.to_lowercase();
|
||||
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect GNOME theme".into())
|
||||
}
|
||||
|
||||
fn detect_kde_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Try KDE Plasma 6 first
|
||||
if is_command_available("kreadconfig6") {
|
||||
if let Ok(output) = Command::new("kreadconfig6")
|
||||
.args([
|
||||
"--file",
|
||||
"kdeglobals",
|
||||
"--group",
|
||||
"KDE",
|
||||
"--key",
|
||||
"LookAndFeelPackage",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("breezedark") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") || theme.contains("breeze") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try color scheme as well
|
||||
if let Ok(output) = Command::new("kreadconfig6")
|
||||
.args([
|
||||
"--file",
|
||||
"kdeglobals",
|
||||
"--group",
|
||||
"General",
|
||||
"--key",
|
||||
"ColorScheme",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let scheme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if scheme.contains("dark") || scheme.contains("breezedark") {
|
||||
return Ok("dark".to_string());
|
||||
} else if scheme.contains("light") || scheme.contains("breeze") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try KDE Plasma 5 as fallback
|
||||
if is_command_available("kreadconfig5") {
|
||||
if let Ok(output) = Command::new("kreadconfig5")
|
||||
.args([
|
||||
"--file",
|
||||
"kdeglobals",
|
||||
"--group",
|
||||
"KDE",
|
||||
"--key",
|
||||
"LookAndFeelPackage",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("breezedark") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") || theme.contains("breeze") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect KDE theme".into())
|
||||
}
|
||||
|
||||
fn detect_xfce_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
if !is_command_available("xfconf-query") {
|
||||
return Err("xfconf-query not available".into());
|
||||
}
|
||||
|
||||
// Check XFCE theme
|
||||
if let Ok(output) = Command::new("xfconf-query")
|
||||
.args(["-c", "xsettings", "-p", "/Net/ThemeName"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check XFCE window manager theme as backup
|
||||
if let Ok(output) = Command::new("xfconf-query")
|
||||
.args(["-c", "xfwm4", "-p", "/general/theme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect XFCE theme".into())
|
||||
}
|
||||
|
||||
fn detect_gtk_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Try to read GTK3 settings file
|
||||
if let Ok(home) = std::env::var("HOME") {
|
||||
let gtk3_settings = std::path::Path::new(&home).join(".config/gtk-3.0/settings.ini");
|
||||
if gtk3_settings.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(gtk3_settings) {
|
||||
for line in content.lines() {
|
||||
if line.starts_with("gtk-theme-name=") {
|
||||
let theme_name = line.split('=').nth(1).unwrap_or("").trim().to_lowercase();
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try GTK4 settings
|
||||
let gtk4_settings = std::path::Path::new(&home).join(".config/gtk-4.0/settings.ini");
|
||||
if gtk4_settings.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(gtk4_settings) {
|
||||
for line in content.lines() {
|
||||
if line.starts_with("gtk-theme-name=") {
|
||||
let theme_name = line.split('=').nth(1).unwrap_or("").trim().to_lowercase();
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect GTK theme".into())
|
||||
}
|
||||
|
||||
fn detect_dconf_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
if !is_command_available("dconf") {
|
||||
return Err("dconf not available".into());
|
||||
}
|
||||
|
||||
// Try reading color scheme directly from dconf
|
||||
if let Ok(output) = Command::new("dconf")
|
||||
.args(["read", "/org/gnome/desktop/interface/color-scheme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let scheme = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
match scheme.as_str() {
|
||||
"'prefer-dark'" => return Ok("dark".to_string()),
|
||||
"'prefer-light'" => return Ok("light".to_string()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try reading GTK theme from dconf
|
||||
if let Ok(output) = Command::new("dconf")
|
||||
.args(["read", "/org/gnome/desktop/interface/gtk-theme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme_name = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.trim_matches('\'')
|
||||
.to_lowercase();
|
||||
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect dconf theme".into())
|
||||
}
|
||||
|
||||
fn detect_env_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Check common environment variables
|
||||
if let Ok(theme) = std::env::var("GTK_THEME") {
|
||||
let theme_lower = theme.to_lowercase();
|
||||
if theme_lower.contains("dark") || theme_lower.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_lower.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(theme) = std::env::var("QT_STYLE_OVERRIDE") {
|
||||
let theme_lower = theme.to_lowercase();
|
||||
if theme_lower.contains("dark") || theme_lower.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_lower.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect theme from environment".into())
|
||||
}
|
||||
|
||||
fn detect_portal_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
if !is_command_available("busctl") {
|
||||
return Err("busctl not available".into());
|
||||
}
|
||||
|
||||
// Try to query the color scheme via org.freedesktop.portal.Settings
|
||||
if let Ok(output) = Command::new("busctl")
|
||||
.args([
|
||||
"--user",
|
||||
"call",
|
||||
"org.freedesktop.portal.Desktop",
|
||||
"/org/freedesktop/portal/desktop",
|
||||
"org.freedesktop.portal.Settings",
|
||||
"Read",
|
||||
"ss",
|
||||
"org.freedesktop.appearance",
|
||||
"color-scheme",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let response = String::from_utf8_lossy(&output.stdout);
|
||||
// Parse DBus response - look for preference values
|
||||
if response.contains(" 1 ") {
|
||||
return Ok("dark".to_string());
|
||||
} else if response.contains(" 2 ") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect portal theme".into())
|
||||
}
|
||||
|
||||
fn detect_system_files_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Check if we're in a dark terminal (heuristic)
|
||||
if let Ok(term) = std::env::var("TERM") {
|
||||
let term_lower = term.to_lowercase();
|
||||
if term_lower.contains("dark") || term_lower.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we can determine from desktop session
|
||||
if let Ok(desktop) = std::env::var("XDG_CURRENT_DESKTOP") {
|
||||
let desktop_lower = desktop.to_lowercase();
|
||||
// Some desktops default to dark
|
||||
if desktop_lower.contains("i3") || desktop_lower.contains("sway") {
|
||||
// Window managers often use dark themes by default
|
||||
return Ok("dark".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect theme from system files".into())
|
||||
}
|
||||
|
||||
fn is_command_available(command: &str) -> bool {
|
||||
Command::new("which")
|
||||
.arg(command)
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod macos {
|
||||
use super::*;
|
||||
|
||||
pub fn detect_system_theme() -> SystemTheme {
|
||||
// macOS theme detection using osascript
|
||||
if let Ok(output) = Command::new("osascript")
|
||||
.args([
|
||||
"-e",
|
||||
"tell application \"System Events\" to tell appearance preferences to get dark mode",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let result = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
let result = result.trim();
|
||||
match result {
|
||||
"true" => {
|
||||
return SystemTheme {
|
||||
theme: "dark".to_string(),
|
||||
}
|
||||
}
|
||||
"false" => {
|
||||
return SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback method using defaults
|
||||
if let Ok(output) = Command::new("defaults")
|
||||
.args(["read", "-g", "AppleInterfaceStyle"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let style = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
let style = style.trim();
|
||||
if style.to_lowercase() == "dark" {
|
||||
return SystemTheme {
|
||||
theme: "dark".to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Default to light if we can't determine
|
||||
SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
mod windows {
|
||||
use super::*;
|
||||
|
||||
pub fn detect_system_theme() -> SystemTheme {
|
||||
// Windows theme detection via registry
|
||||
// This is a simplified implementation - you might want to use winreg crate for better registry access
|
||||
if let Ok(output) = Command::new("reg")
|
||||
.args([
|
||||
"query",
|
||||
"HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Themes\\Personalize",
|
||||
"/v",
|
||||
"AppsUseLightTheme",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let result = String::from_utf8_lossy(&output.stdout);
|
||||
if result.contains("0x0") {
|
||||
return SystemTheme {
|
||||
theme: "dark".to_string(),
|
||||
};
|
||||
} else if result.contains("0x1") {
|
||||
return SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Default to light if we can't determine
|
||||
SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Command to expose this functionality to the frontend
|
||||
#[tauri::command]
|
||||
pub fn get_system_theme() -> SystemTheme {
|
||||
let detector = ThemeDetector::new();
|
||||
detector.detect_system_theme()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_theme_detector_creation() {
|
||||
let detector = ThemeDetector::new();
|
||||
let theme = detector.detect_system_theme();
|
||||
|
||||
// Should return a valid theme string
|
||||
assert!(matches!(theme.theme.as_str(), "light" | "dark" | "unknown"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_system_theme_command() {
|
||||
let theme = get_system_theme();
|
||||
assert!(matches!(theme.theme.as_str(), "light" | "dark" | "unknown"));
|
||||
}
|
||||
}
|
||||
+298
-123
@@ -1,4 +1,3 @@
|
||||
use crate::browser_version_service::BrowserVersionService;
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
@@ -8,7 +7,10 @@ use std::sync::OnceLock;
|
||||
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||
use tauri::Emitter;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::time::{interval, Interval};
|
||||
use tokio::time::interval;
|
||||
|
||||
use crate::auto_updater::AutoUpdater;
|
||||
use crate::browser_version_manager::BrowserVersionManager;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct VersionUpdateProgress {
|
||||
@@ -39,29 +41,31 @@ impl Default for BackgroundUpdateState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
last_update_time: 0,
|
||||
update_interval_hours: 3,
|
||||
update_interval_hours: 12,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Extension of auto_updater.rs for background updates
|
||||
pub struct VersionUpdater {
|
||||
version_service: BrowserVersionService,
|
||||
app_handle: Arc<Mutex<Option<tauri::AppHandle>>>,
|
||||
update_interval: Interval,
|
||||
browser_version_manager: &'static BrowserVersionManager,
|
||||
auto_updater: &'static AutoUpdater,
|
||||
app_handle: Option<tauri::AppHandle>,
|
||||
}
|
||||
|
||||
impl VersionUpdater {
|
||||
pub fn new() -> Self {
|
||||
let mut update_interval = interval(Duration::from_secs(5 * 60)); // Check every 5 minutes
|
||||
update_interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
|
||||
Self {
|
||||
version_service: BrowserVersionService::new(),
|
||||
app_handle: Arc::new(Mutex::new(None)),
|
||||
update_interval,
|
||||
browser_version_manager: BrowserVersionManager::instance(),
|
||||
auto_updater: AutoUpdater::instance(),
|
||||
app_handle: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_app_handle(&mut self, app_handle: tauri::AppHandle) {
|
||||
self.app_handle = Some(app_handle);
|
||||
}
|
||||
|
||||
fn get_cache_dir() -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let app_name = if cfg!(debug_assertions) {
|
||||
@@ -143,11 +147,6 @@ impl VersionUpdater {
|
||||
should_update
|
||||
}
|
||||
|
||||
pub async fn set_app_handle(&self, app_handle: tauri::AppHandle) {
|
||||
let mut handle = self.app_handle.lock().await;
|
||||
*handle = Some(app_handle);
|
||||
}
|
||||
|
||||
pub async fn check_and_run_startup_update(
|
||||
&self,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
@@ -157,15 +156,10 @@ impl VersionUpdater {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let app_handle = {
|
||||
let handle_guard = self.app_handle.lock().await;
|
||||
handle_guard.clone()
|
||||
};
|
||||
|
||||
if let Some(handle) = app_handle {
|
||||
if let Some(ref app_handle) = self.app_handle {
|
||||
println!("Running startup version update...");
|
||||
|
||||
match self.update_all_browser_versions(&handle).await {
|
||||
match self.update_all_browser_versions(app_handle).await {
|
||||
Ok(_) => {
|
||||
// Update the persistent state after successful update
|
||||
let state = BackgroundUpdateState {
|
||||
@@ -191,7 +185,9 @@ impl VersionUpdater {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn start_background_updates(&mut self) {
|
||||
pub async fn start_background_updates(
|
||||
&self,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
println!(
|
||||
"Starting background version update service (checking every 5 minutes for 3-hour intervals)"
|
||||
);
|
||||
@@ -201,41 +197,54 @@ impl VersionUpdater {
|
||||
eprintln!("Startup version update failed: {e}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn run_background_task() {
|
||||
let mut update_interval = interval(Duration::from_secs(5 * 60)); // Check every 5 minutes
|
||||
update_interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
|
||||
loop {
|
||||
self.update_interval.tick().await;
|
||||
update_interval.tick().await;
|
||||
|
||||
// Check if we should run an update based on persistent state
|
||||
if !Self::should_run_background_update() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if we have an app handle
|
||||
let app_handle = {
|
||||
let handle_guard = self.app_handle.lock().await;
|
||||
handle_guard.clone()
|
||||
};
|
||||
println!("Starting background version update...");
|
||||
|
||||
if let Some(handle) = app_handle {
|
||||
println!("Starting background version update...");
|
||||
// Get the updater instance for this update cycle
|
||||
let updater = get_version_updater();
|
||||
let result = {
|
||||
let updater_guard = updater.lock().await;
|
||||
if let Some(ref app_handle) = updater_guard.app_handle {
|
||||
updater_guard.update_all_browser_versions(app_handle).await
|
||||
} else {
|
||||
Err("App handle not available for background update".into())
|
||||
}
|
||||
}; // Release the lock here
|
||||
|
||||
match self.update_all_browser_versions(&handle).await {
|
||||
Ok(_) => {
|
||||
// Update the persistent state after successful update
|
||||
let state = BackgroundUpdateState {
|
||||
last_update_time: Self::get_current_timestamp(),
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
match result {
|
||||
Ok(_) => {
|
||||
// Update the persistent state after successful update
|
||||
let state = BackgroundUpdateState {
|
||||
last_update_time: Self::get_current_timestamp(),
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
|
||||
if let Err(e) = Self::save_background_update_state(&state) {
|
||||
eprintln!("Failed to save background update state: {e}");
|
||||
} else {
|
||||
println!("Background version update completed successfully");
|
||||
}
|
||||
if let Err(e) = Self::save_background_update_state(&state) {
|
||||
eprintln!("Failed to save background update state: {e}");
|
||||
} else {
|
||||
println!("Background version update completed successfully");
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Background version update failed: {e}");
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Background version update failed: {e}");
|
||||
|
||||
// Emit error event
|
||||
// Try to emit error event if we have an app handle
|
||||
let updater_guard = updater.lock().await;
|
||||
if let Some(ref app_handle) = updater_guard.app_handle {
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: "".to_string(),
|
||||
total_browsers: 0,
|
||||
@@ -244,11 +253,9 @@ impl VersionUpdater {
|
||||
browser_new_versions: 0,
|
||||
status: "error".to_string(),
|
||||
};
|
||||
let _ = handle.emit("version-update-progress", &progress);
|
||||
let _ = app_handle.emit("version-update-progress", &progress);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("App handle not available, skipping background update");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -257,107 +264,108 @@ impl VersionUpdater {
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<Vec<BackgroundUpdateResult>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
println!("Starting background version update for all browsers");
|
||||
|
||||
let browsers = [
|
||||
"firefox",
|
||||
"firefox-developer",
|
||||
"mullvad-browser",
|
||||
"zen",
|
||||
"brave",
|
||||
"chromium",
|
||||
"tor-browser",
|
||||
];
|
||||
|
||||
let total_browsers = browsers.len();
|
||||
let supported_browsers = self.browser_version_manager.get_supported_browsers();
|
||||
let total_browsers = supported_browsers.len();
|
||||
let mut results = Vec::new();
|
||||
let mut total_new_versions = 0;
|
||||
|
||||
// Emit start event
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: "".to_string(),
|
||||
// Emit initial progress
|
||||
let initial_progress = VersionUpdateProgress {
|
||||
current_browser: String::new(),
|
||||
total_browsers,
|
||||
completed_browsers: 0,
|
||||
new_versions_found: 0,
|
||||
browser_new_versions: 0,
|
||||
status: "updating".to_string(),
|
||||
};
|
||||
let _ = app_handle.emit("version-update-progress", &progress);
|
||||
|
||||
for (index, browser) in browsers.iter().enumerate() {
|
||||
// Check if individual browser cache is expired before updating
|
||||
if !self.version_service.should_update_cache(browser) {
|
||||
println!("Skipping {browser} - cache is still fresh");
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &initial_progress) {
|
||||
eprintln!("Failed to emit initial progress: {e}");
|
||||
}
|
||||
|
||||
let browser_result = BackgroundUpdateResult {
|
||||
browser: browser.to_string(),
|
||||
new_versions_count: 0,
|
||||
total_versions_count: 0,
|
||||
updated_successfully: true,
|
||||
error: None,
|
||||
};
|
||||
results.push(browser_result);
|
||||
continue;
|
||||
}
|
||||
for (index, browser) in supported_browsers.iter().enumerate() {
|
||||
println!("Updating browser versions for: {browser}");
|
||||
|
||||
println!("Updating versions for browser: {browser}");
|
||||
|
||||
// Emit progress for current browser
|
||||
// Emit progress update for current browser
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: browser.to_string(),
|
||||
current_browser: browser.clone(),
|
||||
total_browsers,
|
||||
completed_browsers: index,
|
||||
new_versions_found: total_new_versions,
|
||||
browser_new_versions: 0,
|
||||
status: "updating".to_string(),
|
||||
};
|
||||
let _ = app_handle.emit("version-update-progress", &progress);
|
||||
|
||||
let result = self.update_browser_versions(browser).await;
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &progress) {
|
||||
eprintln!("Failed to emit progress for {browser}: {e}");
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(new_count) => {
|
||||
total_new_versions += new_count;
|
||||
let browser_result = BackgroundUpdateResult {
|
||||
browser: browser.to_string(),
|
||||
new_versions_count: new_count,
|
||||
total_versions_count: 0, // We'll update this if needed
|
||||
match self.update_browser_versions(browser).await {
|
||||
Ok(new_versions_count) => {
|
||||
results.push(BackgroundUpdateResult {
|
||||
browser: browser.clone(),
|
||||
new_versions_count,
|
||||
total_versions_count: 0, // We don't track total for background updates
|
||||
updated_successfully: true,
|
||||
error: None,
|
||||
};
|
||||
results.push(browser_result);
|
||||
});
|
||||
|
||||
println!("Found {new_count} new versions for {browser}");
|
||||
total_new_versions += new_versions_count;
|
||||
|
||||
// Emit progress update with new versions found
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: browser.clone(),
|
||||
total_browsers,
|
||||
completed_browsers: index,
|
||||
new_versions_found: total_new_versions,
|
||||
browser_new_versions: new_versions_count,
|
||||
status: "updating".to_string(),
|
||||
};
|
||||
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &progress) {
|
||||
eprintln!("Failed to emit progress with versions for {browser}: {e}");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to update versions for {browser}: {e}");
|
||||
let browser_result = BackgroundUpdateResult {
|
||||
browser: browser.to_string(),
|
||||
results.push(BackgroundUpdateResult {
|
||||
browser: browser.clone(),
|
||||
new_versions_count: 0,
|
||||
total_versions_count: 0,
|
||||
updated_successfully: false,
|
||||
error: Some(e.to_string()),
|
||||
};
|
||||
results.push(browser_result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Small delay between browsers to avoid overwhelming APIs
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
|
||||
// Emit completion event
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: "".to_string(),
|
||||
// Emit completion
|
||||
let final_progress = VersionUpdateProgress {
|
||||
current_browser: String::new(),
|
||||
total_browsers,
|
||||
completed_browsers: total_browsers,
|
||||
new_versions_found: total_new_versions,
|
||||
browser_new_versions: 0,
|
||||
status: "completed".to_string(),
|
||||
};
|
||||
let _ = app_handle.emit("version-update-progress", &progress);
|
||||
|
||||
println!("Background version update completed. Found {total_new_versions} new versions total");
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &final_progress) {
|
||||
eprintln!("Failed to emit completion progress: {e}");
|
||||
}
|
||||
|
||||
// After all version updates are complete, trigger auto-update check
|
||||
if total_new_versions > 0 {
|
||||
println!(
|
||||
"Found {total_new_versions} new versions across all browsers. Checking for auto-updates..."
|
||||
);
|
||||
|
||||
// Trigger auto-update check which will automatically download browsers
|
||||
self
|
||||
.auto_updater
|
||||
.check_for_updates_with_progress(app_handle)
|
||||
.await;
|
||||
} else {
|
||||
println!("No new versions found, skipping auto-update check");
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
@@ -367,7 +375,7 @@ impl VersionUpdater {
|
||||
browser: &str,
|
||||
) -> Result<usize, Box<dyn std::error::Error + Send + Sync>> {
|
||||
self
|
||||
.version_service
|
||||
.browser_version_manager
|
||||
.update_browser_versions_incrementally(browser)
|
||||
.await
|
||||
}
|
||||
@@ -448,6 +456,63 @@ pub async fn get_version_update_status() -> Result<(Option<u64>, u64), String> {
|
||||
Ok((last_update, time_until_next))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn clear_all_version_cache_and_refetch(
|
||||
app_handle: tauri::AppHandle,
|
||||
) -> Result<(), String> {
|
||||
let api_client = crate::api_client::ApiClient::instance();
|
||||
let version_updater = VersionUpdater::new();
|
||||
|
||||
// Clear all cache first
|
||||
api_client
|
||||
.clear_all_cache()
|
||||
.map_err(|e| format!("Failed to clear version cache: {e}"))?;
|
||||
|
||||
// Disable all browsers during the update process
|
||||
let supported_browsers = version_updater
|
||||
.browser_version_manager
|
||||
.get_supported_browsers();
|
||||
|
||||
// Load current state and disable all browsers
|
||||
let mut state = version_updater
|
||||
.auto_updater
|
||||
.load_auto_update_state()
|
||||
.map_err(|e| format!("Failed to load auto update state: {e}"))?;
|
||||
for browser in &supported_browsers {
|
||||
state.disabled_browsers.insert(browser.clone());
|
||||
}
|
||||
version_updater
|
||||
.auto_updater
|
||||
.save_auto_update_state(&state)
|
||||
.map_err(|e| format!("Failed to save auto update state: {e}"))?;
|
||||
|
||||
let updater = get_version_updater();
|
||||
let updater_guard = updater.lock().await;
|
||||
|
||||
let result = updater_guard
|
||||
.trigger_manual_update(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to trigger version update: {e}"));
|
||||
|
||||
// Re-enable all browsers after the update completes (regardless of success/failure)
|
||||
let mut final_state = version_updater
|
||||
.auto_updater
|
||||
.load_auto_update_state()
|
||||
.unwrap_or_default();
|
||||
for browser in &supported_browsers {
|
||||
final_state.disabled_browsers.remove(browser);
|
||||
}
|
||||
if let Err(e) = version_updater
|
||||
.auto_updater
|
||||
.save_auto_update_state(&final_state)
|
||||
{
|
||||
eprintln!("Warning: Failed to re-enable browsers after cache clear: {e}");
|
||||
}
|
||||
|
||||
result?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -512,31 +577,141 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_should_run_background_update_logic() {
|
||||
// Note: This test uses the shared state file, so results may vary
|
||||
// depending on previous test runs. This is expected behavior.
|
||||
// Create isolated test states to avoid interference
|
||||
let current_time = VersionUpdater::get_current_timestamp();
|
||||
|
||||
// Test with recent update (should not update)
|
||||
let recent_state = BackgroundUpdateState {
|
||||
last_update_time: VersionUpdater::get_current_timestamp() - 60, // 1 minute ago
|
||||
last_update_time: current_time - 60, // 1 minute ago
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
VersionUpdater::save_background_update_state(&recent_state).unwrap();
|
||||
assert!(!VersionUpdater::should_run_background_update());
|
||||
|
||||
// Save and test recent state
|
||||
let save_result = VersionUpdater::save_background_update_state(&recent_state);
|
||||
assert!(save_result.is_ok(), "Should save recent state successfully");
|
||||
|
||||
let should_update_recent = VersionUpdater::should_run_background_update();
|
||||
assert!(
|
||||
!should_update_recent,
|
||||
"Should not update when last update was recent"
|
||||
);
|
||||
|
||||
// Test with old update (should update)
|
||||
let old_state = BackgroundUpdateState {
|
||||
last_update_time: VersionUpdater::get_current_timestamp() - (4 * 60 * 60), // 4 hours ago
|
||||
last_update_time: current_time - (4 * 60 * 60), // 4 hours ago
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
VersionUpdater::save_background_update_state(&old_state).unwrap();
|
||||
assert!(VersionUpdater::should_run_background_update());
|
||||
|
||||
// Save and test old state
|
||||
let save_result = VersionUpdater::save_background_update_state(&old_state);
|
||||
assert!(save_result.is_ok(), "Should save old state successfully");
|
||||
|
||||
let should_update_old = VersionUpdater::should_run_background_update();
|
||||
assert!(should_update_old, "Should update when last update was old");
|
||||
|
||||
// Test with never updated (should update)
|
||||
let never_updated_state = BackgroundUpdateState {
|
||||
last_update_time: 0,
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
|
||||
let save_result = VersionUpdater::save_background_update_state(&never_updated_state);
|
||||
assert!(
|
||||
save_result.is_ok(),
|
||||
"Should save never updated state successfully"
|
||||
);
|
||||
|
||||
let should_update_never = VersionUpdater::should_run_background_update();
|
||||
assert!(
|
||||
should_update_never,
|
||||
"Should update when never updated before"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_dir_creation() {
|
||||
// This should not panic and should create the directory if it doesn't exist
|
||||
let cache_dir = VersionUpdater::get_cache_dir().unwrap();
|
||||
assert!(cache_dir.exists());
|
||||
assert!(cache_dir.is_dir());
|
||||
let cache_dir_result = VersionUpdater::get_cache_dir();
|
||||
assert!(
|
||||
cache_dir_result.is_ok(),
|
||||
"Should successfully get cache directory"
|
||||
);
|
||||
|
||||
let cache_dir = cache_dir_result.unwrap();
|
||||
assert!(
|
||||
cache_dir.exists(),
|
||||
"Cache directory should exist after creation"
|
||||
);
|
||||
assert!(cache_dir.is_dir(), "Cache directory should be a directory");
|
||||
|
||||
// Verify the path contains expected components
|
||||
let path_str = cache_dir.to_string_lossy();
|
||||
assert!(
|
||||
path_str.contains("version_cache"),
|
||||
"Path should contain version_cache"
|
||||
);
|
||||
|
||||
// Test that calling it again returns the same directory
|
||||
let cache_dir2 = VersionUpdater::get_cache_dir().unwrap();
|
||||
assert_eq!(
|
||||
cache_dir, cache_dir2,
|
||||
"Multiple calls should return same directory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_updater_creation() {
|
||||
let updater = VersionUpdater::new();
|
||||
|
||||
// Should have valid references to services
|
||||
assert!(
|
||||
!std::ptr::eq(
|
||||
updater.browser_version_manager as *const _,
|
||||
std::ptr::null()
|
||||
),
|
||||
"Version service should not be null"
|
||||
);
|
||||
assert!(
|
||||
!std::ptr::eq(updater.auto_updater as *const _, std::ptr::null()),
|
||||
"Auto updater should not be null"
|
||||
);
|
||||
assert!(
|
||||
updater.app_handle.is_none(),
|
||||
"App handle should initially be None"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_current_timestamp() {
|
||||
let timestamp1 = VersionUpdater::get_current_timestamp();
|
||||
|
||||
// Should be a reasonable timestamp (after year 2020)
|
||||
assert!(
|
||||
timestamp1 > 1577836800,
|
||||
"Timestamp should be after 2020-01-01"
|
||||
); // 2020-01-01 00:00:00 UTC
|
||||
|
||||
// Should be before year 2100
|
||||
assert!(
|
||||
timestamp1 < 4102444800,
|
||||
"Timestamp should be before 2100-01-01"
|
||||
); // 2100-01-01 00:00:00 UTC
|
||||
|
||||
// Wait a tiny bit and check it increases
|
||||
std::thread::sleep(std::time::Duration::from_millis(1));
|
||||
let timestamp2 = VersionUpdater::get_current_timestamp();
|
||||
assert!(timestamp2 >= timestamp1, "Timestamp should not decrease");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_version_updater_singleton() {
|
||||
let updater1 = get_version_updater();
|
||||
let updater2 = get_version_updater();
|
||||
|
||||
// Should return the same Arc instance
|
||||
assert!(
|
||||
Arc::ptr_eq(&updater1, &updater2),
|
||||
"Should return same singleton instance"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://schema.tauri.app/config/2",
|
||||
"productName": "Donut Browser",
|
||||
"version": "0.4.0",
|
||||
"version": "0.12.2",
|
||||
"identifier": "com.donutbrowser",
|
||||
"build": {
|
||||
"beforeDevCommand": "pnpm dev",
|
||||
@@ -61,8 +61,9 @@
|
||||
},
|
||||
"plugins": {
|
||||
"deep-link": {
|
||||
"schemes": ["http", "https"],
|
||||
"domains": []
|
||||
"desktop": {
|
||||
"schemes": ["http", "https"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
Hello, World!
|
||||
Binary file not shown.
@@ -0,0 +1,133 @@
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Utility functions for integration tests
|
||||
pub struct TestUtils;
|
||||
|
||||
impl TestUtils {
|
||||
/// Build the nodecar binary if it doesn't exist
|
||||
pub async fn ensure_nodecar_binary() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>>
|
||||
{
|
||||
let cargo_manifest_dir = env::var("CARGO_MANIFEST_DIR")?;
|
||||
let project_root = PathBuf::from(cargo_manifest_dir)
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_path_buf();
|
||||
let nodecar_dir = project_root.join("nodecar");
|
||||
let nodecar_binary = nodecar_dir.join("nodecar-bin");
|
||||
|
||||
// Check if binary already exists
|
||||
if nodecar_binary.exists() {
|
||||
return Ok(nodecar_binary);
|
||||
}
|
||||
|
||||
println!("Building nodecar binary for integration tests...");
|
||||
|
||||
// Install dependencies
|
||||
let install_status = Command::new("pnpm")
|
||||
.args(["install", "--frozen-lockfile"])
|
||||
.current_dir(&nodecar_dir)
|
||||
.status()?;
|
||||
|
||||
if !install_status.success() {
|
||||
return Err("Failed to install nodecar dependencies".into());
|
||||
}
|
||||
|
||||
// Build the binary
|
||||
let build_status = Command::new("pnpm")
|
||||
.args(["run", "build"])
|
||||
.current_dir(&nodecar_dir)
|
||||
.status()?;
|
||||
|
||||
if !build_status.success() {
|
||||
return Err("Failed to build nodecar binary".into());
|
||||
}
|
||||
|
||||
if !nodecar_binary.exists() {
|
||||
return Err("Nodecar binary was not created successfully".into());
|
||||
}
|
||||
|
||||
Ok(nodecar_binary)
|
||||
}
|
||||
|
||||
/// Execute a nodecar command with timeout
|
||||
pub async fn execute_nodecar_command(
|
||||
binary_path: &PathBuf,
|
||||
args: &[&str],
|
||||
) -> Result<std::process::Output, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut cmd = Command::new(binary_path);
|
||||
cmd.args(args);
|
||||
|
||||
let output = tokio::process::Command::from(cmd).output().await?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Check if a port is available
|
||||
pub async fn is_port_available(port: u16) -> bool {
|
||||
tokio::net::TcpListener::bind(format!("127.0.0.1:{port}"))
|
||||
.await
|
||||
.is_ok()
|
||||
}
|
||||
|
||||
/// Wait for a port to become available or occupied
|
||||
pub async fn wait_for_port_state(port: u16, should_be_occupied: bool, timeout_secs: u64) -> bool {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
while start.elapsed().as_secs() < timeout_secs {
|
||||
let is_available = Self::is_port_available(port).await;
|
||||
|
||||
if should_be_occupied && !is_available {
|
||||
return true; // Port is occupied as expected
|
||||
} else if !should_be_occupied && is_available {
|
||||
return true; // Port is available as expected
|
||||
}
|
||||
|
||||
tokio::time::sleep(Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Create a temporary directory for test files
|
||||
pub fn create_temp_dir() -> Result<tempfile::TempDir, Box<dyn std::error::Error + Send + Sync>> {
|
||||
Ok(tempfile::tempdir()?)
|
||||
}
|
||||
|
||||
/// Clean up specific nodecar processes by IDs (for targeted test cleanup)
|
||||
pub async fn cleanup_specific_processes(
|
||||
nodecar_path: &PathBuf,
|
||||
proxy_ids: &[String],
|
||||
camoufox_ids: &[String],
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
println!("Cleaning up specific test processes...");
|
||||
|
||||
// Stop specific proxies
|
||||
for proxy_id in proxy_ids {
|
||||
let stop_args = ["proxy", "stop", "--id", proxy_id];
|
||||
if let Ok(output) = Self::execute_nodecar_command(nodecar_path, &stop_args).await {
|
||||
if output.status.success() {
|
||||
println!("Stopped test proxy: {proxy_id}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Stop specific camoufox instances
|
||||
for camoufox_id in camoufox_ids {
|
||||
let stop_args = ["camoufox", "stop", "--id", camoufox_id];
|
||||
if let Ok(output) = Self::execute_nodecar_command(nodecar_path, &stop_args).await {
|
||||
if output.status.success() {
|
||||
println!("Stopped test camoufox instance: {camoufox_id}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Give processes time to clean up
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
|
||||
println!("Test process cleanup completed");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,999 @@
|
||||
mod common;
|
||||
use common::TestUtils;
|
||||
use serde_json::Value;
|
||||
|
||||
/// Setup function to ensure clean state before tests
|
||||
async fn setup_test() -> Result<std::path::PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = TestUtils::ensure_nodecar_binary().await?;
|
||||
|
||||
// Only clean up test-specific processes, not all processes
|
||||
// This prevents interfering with actual app usage during testing
|
||||
println!("Setting up test environment...");
|
||||
|
||||
Ok(nodecar_path)
|
||||
}
|
||||
|
||||
/// Helper to track and cleanup specific test resources
|
||||
struct TestResourceTracker {
|
||||
proxy_ids: Vec<String>,
|
||||
camoufox_ids: Vec<String>,
|
||||
nodecar_path: std::path::PathBuf,
|
||||
}
|
||||
|
||||
impl TestResourceTracker {
|
||||
fn new(nodecar_path: std::path::PathBuf) -> Self {
|
||||
Self {
|
||||
proxy_ids: Vec::new(),
|
||||
camoufox_ids: Vec::new(),
|
||||
nodecar_path,
|
||||
}
|
||||
}
|
||||
|
||||
fn track_proxy(&mut self, proxy_id: String) {
|
||||
self.proxy_ids.push(proxy_id);
|
||||
}
|
||||
|
||||
fn track_camoufox(&mut self, camoufox_id: String) {
|
||||
self.camoufox_ids.push(camoufox_id);
|
||||
}
|
||||
|
||||
async fn cleanup_all(&self) {
|
||||
// Use targeted cleanup to only stop test-specific processes
|
||||
let _ = TestUtils::cleanup_specific_processes(
|
||||
&self.nodecar_path,
|
||||
&self.proxy_ids,
|
||||
&self.camoufox_ids,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestResourceTracker {
|
||||
fn drop(&mut self) {
|
||||
// Ensure cleanup happens even if test panics
|
||||
let proxy_ids = self.proxy_ids.clone();
|
||||
let camoufox_ids = self.camoufox_ids.clone();
|
||||
let nodecar_path = self.nodecar_path.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let _ = TestUtils::cleanup_specific_processes(&nodecar_path, &proxy_ids, &camoufox_ids).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Integration tests for nodecar proxy functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_proxy_lifecycle() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Test proxy start with a known working upstream
|
||||
let args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http",
|
||||
];
|
||||
|
||||
println!("Starting proxy with nodecar...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
tracker.cleanup_all().await;
|
||||
return Err(format!("Proxy start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify proxy configuration structure
|
||||
assert!(config["id"].is_string(), "Proxy ID should be a string");
|
||||
assert!(
|
||||
config["localPort"].is_number(),
|
||||
"Local port should be a number"
|
||||
);
|
||||
assert!(
|
||||
config["localUrl"].is_string(),
|
||||
"Local URL should be a string"
|
||||
);
|
||||
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
let local_port = config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
println!("Proxy started with ID: {proxy_id} on port: {local_port}");
|
||||
|
||||
// Wait for the proxy to start listening
|
||||
let is_listening = TestUtils::wait_for_port_state(local_port, true, 10).await;
|
||||
assert!(
|
||||
is_listening,
|
||||
"Proxy should be listening on the assigned port"
|
||||
);
|
||||
|
||||
// Test stopping the proxy
|
||||
let stop_args = ["proxy", "stop", "--id", &proxy_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
assert!(stop_output.status.success(), "Proxy stop should succeed");
|
||||
|
||||
let port_available = TestUtils::wait_for_port_state(local_port, false, 5).await;
|
||||
assert!(
|
||||
port_available,
|
||||
"Port should be available after stopping proxy"
|
||||
);
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test proxy with authentication
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_proxy_with_auth() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http",
|
||||
"--username",
|
||||
"testuser",
|
||||
"--password",
|
||||
"testpass",
|
||||
];
|
||||
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
// Verify upstream URL contains encoded credentials
|
||||
if let Some(upstream_url) = config["upstreamUrl"].as_str() {
|
||||
assert!(
|
||||
upstream_url.contains("testuser"),
|
||||
"Upstream URL should contain username"
|
||||
);
|
||||
// Password might be encoded, so we check for the presence of auth info
|
||||
assert!(
|
||||
upstream_url.contains("@"),
|
||||
"Upstream URL should contain auth separator"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test proxy list functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_proxy_list() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Start a proxy first
|
||||
let start_args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http",
|
||||
];
|
||||
|
||||
let start_output = TestUtils::execute_nodecar_command(&nodecar_path, &start_args).await?;
|
||||
|
||||
if start_output.status.success() {
|
||||
let stdout = String::from_utf8(start_output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
// Test list command
|
||||
let list_args = ["proxy", "list"];
|
||||
let list_output = TestUtils::execute_nodecar_command(&nodecar_path, &list_args).await?;
|
||||
|
||||
assert!(list_output.status.success(), "Proxy list should succeed");
|
||||
|
||||
let list_stdout = String::from_utf8(list_output.stdout)?;
|
||||
let proxy_list: Value = serde_json::from_str(&list_stdout)?;
|
||||
|
||||
assert!(proxy_list.is_array(), "Proxy list should be an array");
|
||||
|
||||
let proxies = proxy_list.as_array().unwrap();
|
||||
assert!(
|
||||
!proxies.is_empty(),
|
||||
"Should have at least one proxy in the list"
|
||||
);
|
||||
|
||||
// Find our proxy in the list
|
||||
let found_proxy = proxies.iter().find(|p| p["id"].as_str() == Some(&proxy_id));
|
||||
assert!(found_proxy.is_some(), "Started proxy should be in the list");
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_lifecycle() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let temp_dir = TestUtils::create_temp_dir()?;
|
||||
let profile_path = temp_dir.path().join("test_profile");
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"start",
|
||||
"--profile-path",
|
||||
profile_path.to_str().unwrap(),
|
||||
"--headless",
|
||||
];
|
||||
|
||||
println!("Starting Camoufox with nodecar...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
// If Camoufox is not installed or times out, skip the test
|
||||
if stderr.contains("not installed")
|
||||
|| stderr.contains("not found")
|
||||
|| stderr.contains("timeout")
|
||||
|| stdout.contains("timeout")
|
||||
{
|
||||
println!("Skipping Camoufox test - Camoufox not available or timed out");
|
||||
tracker.cleanup_all().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(format!("Camoufox start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify Camoufox configuration structure
|
||||
assert!(config["id"].is_string(), "Camoufox ID should be a string");
|
||||
|
||||
let camoufox_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_camoufox(camoufox_id.clone());
|
||||
println!("Camoufox started with ID: {camoufox_id}");
|
||||
|
||||
// Test stopping Camoufox
|
||||
let stop_args = ["camoufox", "stop", "--id", &camoufox_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
assert!(stop_output.status.success(), "Camoufox stop should succeed");
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox with URL opening
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_with_url() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let temp_dir = TestUtils::create_temp_dir()?;
|
||||
let profile_path = temp_dir.path().join("test_profile_url");
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"start",
|
||||
"--profile-path",
|
||||
profile_path.to_str().unwrap(),
|
||||
"--url",
|
||||
"https://httpbin.org/get",
|
||||
"--headless",
|
||||
];
|
||||
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let camoufox_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_camoufox(camoufox_id.clone());
|
||||
|
||||
// Verify URL is set
|
||||
if let Some(url) = config["url"].as_str() {
|
||||
assert_eq!(
|
||||
url, "https://httpbin.org/get",
|
||||
"URL should match what was provided"
|
||||
);
|
||||
}
|
||||
|
||||
// Test stopping Camoufox explicitly
|
||||
let stop_args = ["camoufox", "stop", "--id", &camoufox_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
assert!(stop_output.status.success(), "Camoufox stop should succeed");
|
||||
} else {
|
||||
println!("Skipping Camoufox URL test - likely not installed");
|
||||
tracker.cleanup_all().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox list functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_list() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Test list command (should work even without Camoufox installed)
|
||||
let list_args = ["camoufox", "list"];
|
||||
let list_output = TestUtils::execute_nodecar_command(&nodecar_path, &list_args).await?;
|
||||
|
||||
assert!(list_output.status.success(), "Camoufox list should succeed");
|
||||
|
||||
let list_stdout = String::from_utf8(list_output.stdout)?;
|
||||
let camoufox_list: Value = serde_json::from_str(&list_stdout)?;
|
||||
|
||||
assert!(camoufox_list.is_array(), "Camoufox list should be an array");
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox process tracking and management
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_process_tracking(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let temp_dir = TestUtils::create_temp_dir()?;
|
||||
let profile_path = temp_dir.path().join("test_profile_tracking");
|
||||
|
||||
// Start multiple Camoufox instances
|
||||
let mut instance_ids: Vec<String> = Vec::new();
|
||||
|
||||
for i in 0..2 {
|
||||
let instance_profile_path = format!("{}_instance_{}", profile_path.to_str().unwrap(), i);
|
||||
let args = [
|
||||
"camoufox",
|
||||
"start",
|
||||
"--profile-path",
|
||||
&instance_profile_path,
|
||||
"--headless",
|
||||
];
|
||||
|
||||
println!("Starting Camoufox instance {i}...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
// If Camoufox is not installed, skip the test
|
||||
if stderr.contains("not installed") || stderr.contains("not found") {
|
||||
println!("Skipping Camoufox process tracking test - Camoufox not installed");
|
||||
tracker.cleanup_all().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Camoufox instance {i} start failed - stdout: {stdout}, stderr: {stderr}").into(),
|
||||
);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let camoufox_id = config["id"].as_str().unwrap().to_string();
|
||||
instance_ids.push(camoufox_id.clone());
|
||||
tracker.track_camoufox(camoufox_id.clone());
|
||||
println!("Camoufox instance {i} started with ID: {camoufox_id}");
|
||||
}
|
||||
|
||||
// Verify all instances are tracked
|
||||
let list_args = ["camoufox", "list"];
|
||||
let list_output = TestUtils::execute_nodecar_command(&nodecar_path, &list_args).await?;
|
||||
|
||||
assert!(list_output.status.success(), "Camoufox list should succeed");
|
||||
|
||||
let list_stdout = String::from_utf8(list_output.stdout)?;
|
||||
println!("Camoufox list output: {list_stdout}");
|
||||
let instances: Value = serde_json::from_str(&list_stdout)?;
|
||||
|
||||
let instances_array = instances.as_array().unwrap();
|
||||
println!("Found {} instances in list", instances_array.len());
|
||||
|
||||
// Verify our instances are in the list
|
||||
for instance_id in &instance_ids {
|
||||
let instance_found = instances_array
|
||||
.iter()
|
||||
.any(|i| i["id"].as_str() == Some(instance_id));
|
||||
if !instance_found {
|
||||
println!("Instance {instance_id} not found in list. Available instances:");
|
||||
for instance in instances_array {
|
||||
if let Some(id) = instance["id"].as_str() {
|
||||
println!(" - {id}");
|
||||
}
|
||||
}
|
||||
}
|
||||
assert!(
|
||||
instance_found,
|
||||
"Camoufox instance {instance_id} should be found in list"
|
||||
);
|
||||
}
|
||||
|
||||
// Stop all instances individually
|
||||
for instance_id in &instance_ids {
|
||||
println!("Stopping Camoufox instance: {instance_id}");
|
||||
let stop_args = ["camoufox", "stop", "--id", instance_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
if stop_output.status.success() {
|
||||
let stop_stdout = String::from_utf8(stop_output.stdout)?;
|
||||
if let Ok(stop_result) = serde_json::from_str::<Value>(&stop_stdout) {
|
||||
let success = stop_result["success"].as_bool().unwrap_or(false);
|
||||
if !success {
|
||||
println!("Warning: Stop command returned success=false for instance {instance_id}");
|
||||
}
|
||||
} else {
|
||||
println!("Warning: Could not parse stop result for instance {instance_id}");
|
||||
}
|
||||
} else {
|
||||
println!("Warning: Stop command failed for instance {instance_id}");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify all instances are removed
|
||||
let list_output_after = TestUtils::execute_nodecar_command(&nodecar_path, &list_args).await?;
|
||||
|
||||
let instances_after: Value = serde_json::from_str(&String::from_utf8(list_output_after.stdout)?)?;
|
||||
let instances_after_array = instances_after.as_array().unwrap();
|
||||
|
||||
for instance_id in &instance_ids {
|
||||
let instance_still_exists = instances_after_array
|
||||
.iter()
|
||||
.any(|i| i["id"].as_str() == Some(instance_id));
|
||||
assert!(
|
||||
!instance_still_exists,
|
||||
"Stopped Camoufox instance {instance_id} should not be found in list"
|
||||
);
|
||||
}
|
||||
|
||||
println!("Camoufox process tracking test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox with various configuration options
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_configuration_options(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let temp_dir = TestUtils::create_temp_dir()?;
|
||||
let profile_path = temp_dir.path().join("test_profile_config");
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"start",
|
||||
"--profile-path",
|
||||
profile_path.to_str().unwrap(),
|
||||
"--block-images",
|
||||
"--max-width",
|
||||
"1920",
|
||||
"--max-height",
|
||||
"1080",
|
||||
"--headless",
|
||||
];
|
||||
|
||||
println!("Starting Camoufox with configuration options...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
// If Camoufox is not installed, skip the test
|
||||
if stderr.contains("not installed") || stderr.contains("not found") {
|
||||
println!("Skipping Camoufox configuration test - Camoufox not installed");
|
||||
tracker.cleanup_all().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Camoufox with config start failed - stdout: {stdout}, stderr: {stderr}").into(),
|
||||
);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let camoufox_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_camoufox(camoufox_id.clone());
|
||||
println!("Camoufox with configuration started with ID: {camoufox_id}");
|
||||
|
||||
// Verify configuration was applied by checking the profile path
|
||||
if let Some(returned_profile_path) = config["profilePath"].as_str() {
|
||||
assert!(
|
||||
returned_profile_path.contains("test_profile_config"),
|
||||
"Profile path should match what was provided"
|
||||
);
|
||||
}
|
||||
|
||||
// Test stopping Camoufox explicitly
|
||||
let stop_args = ["camoufox", "stop", "--id", &camoufox_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
assert!(stop_output.status.success(), "Camoufox stop should succeed");
|
||||
|
||||
println!("Camoufox configuration test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox generate-config command with basic options
|
||||
#[ignore = "CI is rate limited for camoufox download"]
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_generate_config_basic(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"generate-config",
|
||||
"--max-width",
|
||||
"1920",
|
||||
"--max-height",
|
||||
"1080",
|
||||
"--block-images",
|
||||
];
|
||||
|
||||
println!("Testing Camoufox config generation with basic options...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Camoufox generate-config failed - stdout: {stdout}, stderr: {stderr}").into(),
|
||||
);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
println!("Generated config output: {stdout}");
|
||||
|
||||
// Parse the generated config as JSON
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify the config contains expected properties
|
||||
assert!(
|
||||
config.is_object(),
|
||||
"Generated config should be a JSON object"
|
||||
);
|
||||
|
||||
// Check for some expected fingerprint properties
|
||||
assert!(
|
||||
config.get("screen.width").is_some(),
|
||||
"Config should contain screen.width"
|
||||
);
|
||||
assert!(
|
||||
config.get("screen.height").is_some(),
|
||||
"Config should contain screen.height"
|
||||
);
|
||||
assert!(
|
||||
config.get("navigator.userAgent").is_some(),
|
||||
"Config should contain navigator.userAgent"
|
||||
);
|
||||
|
||||
println!("Camoufox generate-config basic test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox generate-config command with custom fingerprint
|
||||
#[ignore = "CI is rate limited for camoufox download"]
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_generate_config_custom_fingerprint(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Create a custom fingerprint JSON
|
||||
let custom_fingerprint = r#"{
|
||||
"screen.width": 1440,
|
||||
"screen.height": 900,
|
||||
"navigator.userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/140.0",
|
||||
"navigator.platform": "TestPlatform",
|
||||
"timezone": "America/New_York",
|
||||
"locale:language": "en",
|
||||
"locale:region": "US"
|
||||
}"#;
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"generate-config",
|
||||
"--fingerprint",
|
||||
custom_fingerprint,
|
||||
"--block-webrtc",
|
||||
];
|
||||
|
||||
println!("Testing Camoufox config generation with custom fingerprint...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Camoufox generate-config with custom fingerprint failed - stdout: {stdout}, stderr: {stderr}").into(),
|
||||
);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
|
||||
// Parse the generated config as JSON
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify the config contains expected properties
|
||||
assert!(
|
||||
config.is_object(),
|
||||
"Generated config should be a JSON object"
|
||||
);
|
||||
|
||||
// Check that our custom values are preserved
|
||||
assert_eq!(
|
||||
config.get("screen.width").and_then(|v| v.as_u64()),
|
||||
Some(1440),
|
||||
"Custom screen width should be preserved"
|
||||
);
|
||||
assert_eq!(
|
||||
config.get("screen.height").and_then(|v| v.as_u64()),
|
||||
Some(900),
|
||||
"Custom screen height should be preserved"
|
||||
);
|
||||
assert_eq!(
|
||||
config.get("navigator.userAgent").and_then(|v| v.as_str()),
|
||||
Some("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/140.0"),
|
||||
"Custom user agent should be preserved"
|
||||
);
|
||||
assert_eq!(
|
||||
config.get("timezone").and_then(|v| v.as_str()),
|
||||
Some("America/New_York"),
|
||||
"Custom timezone should be preserved"
|
||||
);
|
||||
|
||||
println!("Camoufox generate-config custom fingerprint test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test nodecar command validation
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_command_validation() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Test invalid command
|
||||
let invalid_args = ["invalid", "command"];
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &invalid_args).await?;
|
||||
|
||||
assert!(!output.status.success(), "Invalid command should fail");
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test concurrent proxy operations
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_concurrent_proxies() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Start multiple proxies concurrently
|
||||
let mut handles = vec![];
|
||||
|
||||
for i in 0..3 {
|
||||
let nodecar_path_clone = nodecar_path.clone();
|
||||
let handle = tokio::spawn(async move {
|
||||
let args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http",
|
||||
];
|
||||
|
||||
TestUtils::execute_nodecar_command(&nodecar_path_clone, &args).await
|
||||
});
|
||||
handles.push((i, handle));
|
||||
}
|
||||
|
||||
// Wait for all proxies to start
|
||||
for (i, handle) in handles {
|
||||
match handle.await.map_err(|e| format!("Join error: {e}"))? {
|
||||
Ok(output) if output.status.success() => {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
println!("Proxy {i} started successfully");
|
||||
}
|
||||
Ok(output) => {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
println!("Proxy {i} failed to start: {stderr}");
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Proxy {i} error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test proxy with different upstream types
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_proxy_types() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let test_cases = vec![
|
||||
("http", "httpbin.org", "80"),
|
||||
("https", "httpbin.org", "443"),
|
||||
];
|
||||
|
||||
for (proxy_type, host, port) in test_cases {
|
||||
println!("Testing {proxy_type} proxy to {host}:{port}");
|
||||
|
||||
let args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
host,
|
||||
"--proxy-port",
|
||||
port,
|
||||
"--type",
|
||||
proxy_type,
|
||||
];
|
||||
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
println!("{proxy_type} proxy test passed");
|
||||
} else {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
println!("{proxy_type} proxy test failed: {stderr}");
|
||||
}
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test direct proxy (no upstream) functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_direct_proxy() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Test starting a direct proxy (no upstream)
|
||||
let args = ["proxy", "start"];
|
||||
|
||||
println!("Starting direct proxy with nodecar...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
tracker.cleanup_all().await;
|
||||
return Err(format!("Direct proxy start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify proxy configuration structure
|
||||
assert!(config["id"].is_string(), "Proxy ID should be a string");
|
||||
assert!(
|
||||
config["localPort"].is_number(),
|
||||
"Local port should be a number"
|
||||
);
|
||||
assert!(
|
||||
config["localUrl"].is_string(),
|
||||
"Local URL should be a string"
|
||||
);
|
||||
assert_eq!(
|
||||
config["upstreamUrl"].as_str().unwrap(),
|
||||
"DIRECT",
|
||||
"Upstream URL should be DIRECT"
|
||||
);
|
||||
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
let local_port = config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
println!("Direct proxy started with ID: {proxy_id} on port: {local_port}");
|
||||
|
||||
// Wait for the proxy to start listening
|
||||
let is_listening = TestUtils::wait_for_port_state(local_port, true, 10).await;
|
||||
assert!(
|
||||
is_listening,
|
||||
"Direct proxy should be listening on the assigned port"
|
||||
);
|
||||
|
||||
// Test stopping the proxy
|
||||
let stop_args = ["proxy", "stop", "--id", &proxy_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
assert!(
|
||||
stop_output.status.success(),
|
||||
"Direct proxy stop should succeed"
|
||||
);
|
||||
|
||||
let port_available = TestUtils::wait_for_port_state(local_port, false, 5).await;
|
||||
assert!(
|
||||
port_available,
|
||||
"Port should be available after stopping direct proxy"
|
||||
);
|
||||
|
||||
println!("Direct proxy test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test SOCKS5 proxy chaining - create two proxies where the second uses the first as upstream
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_socks5_proxy_chaining() -> Result<(), Box<dyn std::error::Error + Send + Sync>>
|
||||
{
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Step 1: Start a SOCKS5 proxy with a known working upstream (httpbin.org)
|
||||
let socks5_args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http", // Use HTTP upstream for the first proxy
|
||||
];
|
||||
|
||||
println!("Starting first proxy with HTTP upstream...");
|
||||
let socks5_output = TestUtils::execute_nodecar_command(&nodecar_path, &socks5_args).await?;
|
||||
|
||||
if !socks5_output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&socks5_output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&socks5_output.stdout);
|
||||
tracker.cleanup_all().await;
|
||||
return Err(format!("First proxy start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let socks5_stdout = String::from_utf8(socks5_output.stdout)?;
|
||||
let socks5_config: Value = serde_json::from_str(&socks5_stdout)?;
|
||||
|
||||
let socks5_proxy_id = socks5_config["id"].as_str().unwrap().to_string();
|
||||
let socks5_local_port = socks5_config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(socks5_proxy_id.clone());
|
||||
|
||||
println!("First proxy started with ID: {socks5_proxy_id} on port: {socks5_local_port}");
|
||||
|
||||
// Step 2: Start a second proxy that uses the first proxy as upstream
|
||||
let http_proxy_args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--upstream",
|
||||
&format!("http://127.0.0.1:{socks5_local_port}"),
|
||||
];
|
||||
|
||||
println!("Starting second proxy with first proxy as upstream...");
|
||||
let http_output = TestUtils::execute_nodecar_command(&nodecar_path, &http_proxy_args).await?;
|
||||
|
||||
if !http_output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&http_output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&http_output.stdout);
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Second proxy with chained upstream failed - stdout: {stdout}, stderr: {stderr}")
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
let http_stdout = String::from_utf8(http_output.stdout)?;
|
||||
let http_config: Value = serde_json::from_str(&http_stdout)?;
|
||||
|
||||
let http_proxy_id = http_config["id"].as_str().unwrap().to_string();
|
||||
let http_local_port = http_config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(http_proxy_id.clone());
|
||||
|
||||
println!(
|
||||
"Second proxy started with ID: {http_proxy_id} on port: {http_local_port} (chained through first proxy)"
|
||||
);
|
||||
|
||||
// Verify both proxies are listening by waiting for them to be occupied
|
||||
let socks5_listening = TestUtils::wait_for_port_state(socks5_local_port, true, 5).await;
|
||||
let http_listening = TestUtils::wait_for_port_state(http_local_port, true, 5).await;
|
||||
|
||||
assert!(
|
||||
socks5_listening,
|
||||
"First proxy should be listening on port {socks5_local_port}"
|
||||
);
|
||||
assert!(
|
||||
http_listening,
|
||||
"Second proxy should be listening on port {http_local_port}"
|
||||
);
|
||||
|
||||
// Clean up both proxies
|
||||
let stop_http_args = ["proxy", "stop", "--id", &http_proxy_id];
|
||||
let stop_socks5_args = ["proxy", "stop", "--id", &socks5_proxy_id];
|
||||
|
||||
let http_stop_result = TestUtils::execute_nodecar_command(&nodecar_path, &stop_http_args).await;
|
||||
let socks5_stop_result =
|
||||
TestUtils::execute_nodecar_command(&nodecar_path, &stop_socks5_args).await;
|
||||
|
||||
// Verify cleanup
|
||||
assert!(
|
||||
http_stop_result.is_ok() && http_stop_result.unwrap().status.success(),
|
||||
"Second proxy stop should succeed"
|
||||
);
|
||||
assert!(
|
||||
socks5_stop_result.is_ok() && socks5_stop_result.unwrap().status.success(),
|
||||
"First proxy stop should succeed"
|
||||
);
|
||||
|
||||
let http_port_available = TestUtils::wait_for_port_state(http_local_port, false, 5).await;
|
||||
let socks5_port_available = TestUtils::wait_for_port_state(socks5_local_port, false, 5).await;
|
||||
|
||||
assert!(
|
||||
http_port_available,
|
||||
"Second proxy port should be available after stopping"
|
||||
);
|
||||
assert!(
|
||||
socks5_port_available,
|
||||
"First proxy port should be available after stopping"
|
||||
);
|
||||
|
||||
println!("Proxy chaining test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
+1
-1
@@ -24,7 +24,7 @@ export default function RootLayout({
|
||||
return (
|
||||
<html lang="en" suppressHydrationWarning>
|
||||
<body
|
||||
className={`${geistSans.variable} ${geistMono.variable} antialiased`}
|
||||
className={`${geistSans.variable} ${geistMono.variable} antialiased overflow-hidden bg-background`}
|
||||
>
|
||||
<CustomThemeProvider>
|
||||
<WindowDragArea />
|
||||
|
||||
+618
-372
File diff suppressed because it is too large
Load Diff
@@ -1,26 +1,52 @@
|
||||
"use client";
|
||||
|
||||
import { FaDownload, FaTimes } from "react-icons/fa";
|
||||
import { LuCheckCheck, LuCog, LuRefreshCw } from "react-icons/lu";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import React from "react";
|
||||
import { FaDownload, FaTimes } from "react-icons/fa";
|
||||
import { LuRefreshCw } from "react-icons/lu";
|
||||
|
||||
interface AppUpdateInfo {
|
||||
current_version: string;
|
||||
new_version: string;
|
||||
release_notes: string;
|
||||
download_url: string;
|
||||
is_nightly: boolean;
|
||||
published_at: string;
|
||||
}
|
||||
import type { AppUpdateInfo, AppUpdateProgress } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface AppUpdateToastProps {
|
||||
updateInfo: AppUpdateInfo;
|
||||
onUpdate: (updateInfo: AppUpdateInfo) => Promise<void>;
|
||||
onDismiss: () => void;
|
||||
isUpdating?: boolean;
|
||||
updateProgress?: string;
|
||||
updateProgress?: AppUpdateProgress | null;
|
||||
}
|
||||
|
||||
function getStageIcon(stage?: string, isUpdating?: boolean) {
|
||||
if (!isUpdating) {
|
||||
return <FaDownload className="flex-shrink-0 w-5 h-5" />;
|
||||
}
|
||||
|
||||
switch (stage) {
|
||||
case "downloading":
|
||||
return <FaDownload className="flex-shrink-0 w-5 h-5" />;
|
||||
case "extracting":
|
||||
return <LuRefreshCw className="flex-shrink-0 w-5 h-5 animate-spin" />;
|
||||
case "installing":
|
||||
return <LuCog className="flex-shrink-0 w-5 h-5 animate-spin" />;
|
||||
case "completed":
|
||||
return <LuCheckCheck className="flex-shrink-0 w-5 h-5" />;
|
||||
default:
|
||||
return <LuRefreshCw className="flex-shrink-0 w-5 h-5 animate-spin" />;
|
||||
}
|
||||
}
|
||||
|
||||
function getStageDisplayName(stage?: string) {
|
||||
switch (stage) {
|
||||
case "downloading":
|
||||
return "Downloading";
|
||||
case "extracting":
|
||||
return "Extracting";
|
||||
case "installing":
|
||||
return "Installing";
|
||||
case "completed":
|
||||
return "Completed";
|
||||
default:
|
||||
return "Updating";
|
||||
}
|
||||
}
|
||||
|
||||
export function AppUpdateToast({
|
||||
@@ -34,22 +60,32 @@ export function AppUpdateToast({
|
||||
await onUpdate(updateInfo);
|
||||
};
|
||||
|
||||
const showDownloadProgress =
|
||||
isUpdating &&
|
||||
updateProgress?.stage === "downloading" &&
|
||||
updateProgress.percentage !== undefined;
|
||||
|
||||
const showOtherStageProgress =
|
||||
isUpdating &&
|
||||
updateProgress &&
|
||||
(updateProgress.stage === "extracting" ||
|
||||
updateProgress.stage === "installing" ||
|
||||
updateProgress.stage === "completed");
|
||||
|
||||
return (
|
||||
<div className="flex items-start w-full bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4 shadow-lg max-w-md">
|
||||
<div className="flex items-start p-4 w-full max-w-md rounded-lg border shadow-lg bg-card border-border text-card-foreground">
|
||||
<div className="mr-3 mt-0.5">
|
||||
{isUpdating ? (
|
||||
<LuRefreshCw className="h-5 w-5 text-blue-500 animate-spin flex-shrink-0" />
|
||||
) : (
|
||||
<FaDownload className="h-5 w-5 text-blue-500 flex-shrink-0" />
|
||||
)}
|
||||
{getStageIcon(updateProgress?.stage, isUpdating)}
|
||||
</div>
|
||||
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-start justify-between gap-2">
|
||||
<div className="flex gap-2 justify-between items-start">
|
||||
<div className="flex flex-col gap-1">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="font-semibold text-foreground text-sm">
|
||||
Donut Browser Update Available
|
||||
<div className="flex gap-2 items-center">
|
||||
<span className="text-sm font-semibold text-foreground">
|
||||
{isUpdating
|
||||
? `${getStageDisplayName(updateProgress?.stage)} Donut Browser Update`
|
||||
: "Donut Browser Update Available"}
|
||||
</span>
|
||||
<Badge
|
||||
variant={updateInfo.is_nightly ? "secondary" : "default"}
|
||||
@@ -59,8 +95,14 @@ export function AppUpdateToast({
|
||||
</Badge>
|
||||
</div>
|
||||
<div className="text-xs text-muted-foreground">
|
||||
Update from {updateInfo.current_version} to{" "}
|
||||
<span className="font-medium">{updateInfo.new_version}</span>
|
||||
{isUpdating ? (
|
||||
updateProgress?.message || "Updating..."
|
||||
) : (
|
||||
<>
|
||||
Update from {updateInfo.current_version} to{" "}
|
||||
<span className="font-medium">{updateInfo.new_version}</span>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -69,52 +111,66 @@ export function AppUpdateToast({
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={onDismiss}
|
||||
className="h-6 w-6 p-0 shrink-0"
|
||||
className="p-0 w-6 h-6 shrink-0"
|
||||
>
|
||||
<FaTimes className="h-3 w-3" />
|
||||
<FaTimes className="w-3 h-3" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isUpdating && updateProgress && (
|
||||
<div className="mt-2">
|
||||
<p className="text-xs text-muted-foreground">{updateProgress}</p>
|
||||
{/* Download progress */}
|
||||
{showDownloadProgress && updateProgress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<div className="flex justify-between items-center">
|
||||
<p className="flex-1 min-w-0 text-xs text-muted-foreground">
|
||||
{updateProgress.percentage?.toFixed(1)}%
|
||||
{updateProgress.speed && ` • ${updateProgress.speed} MB/s`}
|
||||
{updateProgress.eta && ` • ${updateProgress.eta} remaining`}
|
||||
</p>
|
||||
</div>
|
||||
<div className="w-full bg-muted rounded-full h-1.5">
|
||||
<div
|
||||
className="bg-primary h-1.5 rounded-full transition-all duration-300"
|
||||
style={{ width: `${updateProgress.percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Other stage progress (with visual indicators) */}
|
||||
{showOtherStageProgress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
{/* Progress indicator for non-downloading stages */}
|
||||
<div className="w-full bg-muted rounded-full h-1.5">
|
||||
<div
|
||||
className={`h-1.5 rounded-full transition-all duration-500 ${
|
||||
updateProgress.stage === "completed"
|
||||
? "bg-green-500 w-full"
|
||||
: "bg-primary w-full animate-pulse"
|
||||
}`}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isUpdating && (
|
||||
<div className="flex items-center gap-2 mt-3">
|
||||
<Button
|
||||
<div className="flex gap-2 items-center mt-3">
|
||||
<RippleButton
|
||||
onClick={() => void handleUpdateClick()}
|
||||
size="sm"
|
||||
className="flex items-center gap-2 text-xs"
|
||||
className="flex gap-2 items-center text-xs"
|
||||
>
|
||||
<FaDownload className="h-3 w-3" />
|
||||
<FaDownload className="w-3 h-3" />
|
||||
Update Now
|
||||
</Button>
|
||||
<Button
|
||||
</RippleButton>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={onDismiss}
|
||||
size="sm"
|
||||
className="text-xs"
|
||||
>
|
||||
Later
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{updateInfo.release_notes && !isUpdating && (
|
||||
<div className="mt-2">
|
||||
<details className="text-xs">
|
||||
<summary className="cursor-pointer text-muted-foreground hover:text-foreground">
|
||||
Release Notes
|
||||
</summary>
|
||||
<div className="mt-1 text-muted-foreground whitespace-pre-wrap max-h-32 overflow-y-auto">
|
||||
{updateInfo.release_notes.length > 200
|
||||
? `${updateInfo.release_notes.substring(0, 200)}...`
|
||||
: updateInfo.release_notes}
|
||||
</div>
|
||||
</details>
|
||||
</RippleButton>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,135 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState } from "react";
|
||||
import { SharedCamoufoxConfigForm } from "@/components/shared-camoufox-config-form";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import type { BrowserProfile, CamoufoxConfig } from "@/types";
|
||||
import { LoadingButton } from "./loading-button";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface CamoufoxConfigDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
profile: BrowserProfile | null;
|
||||
onSave: (profile: BrowserProfile, config: CamoufoxConfig) => Promise<void>;
|
||||
}
|
||||
|
||||
export function CamoufoxConfigDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
profile,
|
||||
onSave,
|
||||
}: CamoufoxConfigDialogProps) {
|
||||
const [config, setConfig] = useState<CamoufoxConfig>({
|
||||
geoip: true,
|
||||
});
|
||||
const [isSaving, setIsSaving] = useState(false);
|
||||
|
||||
// Initialize config when profile changes
|
||||
useEffect(() => {
|
||||
if (profile && profile.browser === "camoufox") {
|
||||
setConfig(
|
||||
profile.camoufox_config || {
|
||||
geoip: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
}, [profile]);
|
||||
|
||||
const updateConfig = (key: keyof CamoufoxConfig, value: unknown) => {
|
||||
setConfig((prev) => ({ ...prev, [key]: value }));
|
||||
};
|
||||
|
||||
const handleSave = async () => {
|
||||
if (!profile) return;
|
||||
|
||||
// Validate fingerprint JSON if it exists
|
||||
if (config.fingerprint) {
|
||||
try {
|
||||
JSON.parse(config.fingerprint);
|
||||
} catch (_error) {
|
||||
const { toast } = await import("sonner");
|
||||
toast.error("Invalid fingerprint configuration", {
|
||||
description:
|
||||
"The fingerprint configuration contains invalid JSON. Please check your advanced settings.",
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
setIsSaving(true);
|
||||
try {
|
||||
await onSave(profile, config);
|
||||
onClose();
|
||||
} catch (error) {
|
||||
console.error("Failed to save camoufox config:", error);
|
||||
const { toast } = await import("sonner");
|
||||
toast.error("Failed to save configuration", {
|
||||
description:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
} finally {
|
||||
setIsSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleClose = () => {
|
||||
// Reset config to original when closing without saving
|
||||
if (profile && profile.browser === "camoufox") {
|
||||
setConfig(
|
||||
profile.camoufox_config || {
|
||||
geoip: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
onClose();
|
||||
};
|
||||
|
||||
if (!profile || profile.browser !== "camoufox") {
|
||||
return null;
|
||||
}
|
||||
|
||||
// No OS warning needed anymore since we removed OS selection
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={handleClose}>
|
||||
<DialogContent className="max-w-3xl max-h-[90vh] flex flex-col">
|
||||
<DialogHeader className="flex-shrink-0">
|
||||
<DialogTitle>
|
||||
Configure Fingerprint Settings - {profile.name}
|
||||
</DialogTitle>
|
||||
</DialogHeader>
|
||||
|
||||
<ScrollArea className="flex-1 h-[320px]">
|
||||
<div className="py-4">
|
||||
<SharedCamoufoxConfigForm
|
||||
config={config}
|
||||
onConfigChange={updateConfig}
|
||||
forceAdvanced={true}
|
||||
/>
|
||||
</div>
|
||||
</ScrollArea>
|
||||
|
||||
<DialogFooter className="flex-shrink-0 pt-4 border-t">
|
||||
<RippleButton variant="outline" onClick={handleClose}>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
isLoading={isSaving}
|
||||
onClick={handleSave}
|
||||
disabled={isSaving}
|
||||
>
|
||||
Save
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,196 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { VersionSelector } from "@/components/version-selector";
|
||||
import { useBrowserDownload } from "@/hooks/use-browser-download";
|
||||
import type { BrowserProfile } from "@/types";
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useEffect, useState } from "react";
|
||||
import { LuTriangleAlert } from "react-icons/lu";
|
||||
|
||||
interface ChangeVersionDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
profile: BrowserProfile | null;
|
||||
onVersionChanged: () => void;
|
||||
}
|
||||
|
||||
export function ChangeVersionDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
profile,
|
||||
onVersionChanged,
|
||||
}: ChangeVersionDialogProps) {
|
||||
const [selectedVersion, setSelectedVersion] = useState<string | null>(null);
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
const [showDowngradeWarning, setShowDowngradeWarning] = useState(false);
|
||||
const [acknowledgeDowngrade, setAcknowledgeDowngrade] = useState(false);
|
||||
|
||||
const {
|
||||
availableVersions,
|
||||
downloadedVersions,
|
||||
isDownloading,
|
||||
loadVersions,
|
||||
loadDownloadedVersions,
|
||||
downloadBrowser,
|
||||
isVersionDownloaded,
|
||||
} = useBrowserDownload();
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen && profile) {
|
||||
setSelectedVersion(profile.version);
|
||||
setAcknowledgeDowngrade(false);
|
||||
void loadVersions(profile.browser);
|
||||
void loadDownloadedVersions(profile.browser);
|
||||
}
|
||||
}, [isOpen, profile, loadVersions, loadDownloadedVersions]);
|
||||
|
||||
useEffect(() => {
|
||||
if (profile && selectedVersion) {
|
||||
// Check if this is a downgrade
|
||||
const currentVersionIndex = availableVersions.findIndex(
|
||||
(v) => v.tag_name === profile.version,
|
||||
);
|
||||
const selectedVersionIndex = availableVersions.findIndex(
|
||||
(v) => v.tag_name === selectedVersion,
|
||||
);
|
||||
|
||||
// If selected version has a higher index, it's older (downgrade)
|
||||
const isDowngrade =
|
||||
currentVersionIndex !== -1 &&
|
||||
selectedVersionIndex !== -1 &&
|
||||
selectedVersionIndex > currentVersionIndex;
|
||||
setShowDowngradeWarning(isDowngrade);
|
||||
|
||||
if (!isDowngrade) {
|
||||
setAcknowledgeDowngrade(false);
|
||||
}
|
||||
}
|
||||
}, [selectedVersion, profile, availableVersions]);
|
||||
|
||||
const handleDownload = async () => {
|
||||
if (!profile || !selectedVersion) return;
|
||||
await downloadBrowser(profile.browser, selectedVersion);
|
||||
};
|
||||
|
||||
const handleVersionChange = async () => {
|
||||
if (!profile || !selectedVersion) return;
|
||||
|
||||
setIsUpdating(true);
|
||||
try {
|
||||
await invoke("update_profile_version", {
|
||||
profileName: profile.name,
|
||||
version: selectedVersion,
|
||||
});
|
||||
onVersionChanged();
|
||||
onClose();
|
||||
} catch (error) {
|
||||
console.error("Failed to update profile version:", error);
|
||||
} finally {
|
||||
setIsUpdating(false);
|
||||
}
|
||||
};
|
||||
|
||||
const canUpdate =
|
||||
profile &&
|
||||
selectedVersion &&
|
||||
selectedVersion !== profile.version &&
|
||||
selectedVersion &&
|
||||
isVersionDownloaded(selectedVersion) &&
|
||||
(!showDowngradeWarning || acknowledgeDowngrade);
|
||||
|
||||
if (!profile) return null;
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Change Browser Version</DialogTitle>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="grid gap-4 py-4">
|
||||
<div className="space-y-2">
|
||||
<Label className="text-sm font-medium">Profile:</Label>
|
||||
<div className="p-2 bg-muted rounded text-sm">{profile.name}</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label className="text-sm font-medium">Current Version:</Label>
|
||||
<div className="p-2 bg-muted rounded text-sm">
|
||||
{profile.version}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Version Selection */}
|
||||
<div className="grid gap-2">
|
||||
<Label>New Version</Label>
|
||||
<VersionSelector
|
||||
selectedVersion={selectedVersion}
|
||||
onVersionSelect={setSelectedVersion}
|
||||
availableVersions={availableVersions}
|
||||
downloadedVersions={downloadedVersions}
|
||||
isDownloading={isDownloading}
|
||||
onDownload={() => {
|
||||
void handleDownload();
|
||||
}}
|
||||
placeholder="Select version..."
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Downgrade Warning */}
|
||||
{showDowngradeWarning && (
|
||||
<Alert className="border-orange-700">
|
||||
<LuTriangleAlert className="h-4 w-4 text-orange-700" />
|
||||
<AlertTitle className="text-orange-700">
|
||||
Downgrade Warning
|
||||
</AlertTitle>
|
||||
<AlertDescription className="text-orange-700">
|
||||
You are about to downgrade from version {profile.version} to{" "}
|
||||
{selectedVersion}. This may lead to compatibility issues, data
|
||||
loss, or unexpected behavior.
|
||||
<div className="flex items-center space-x-2 mt-3">
|
||||
<Checkbox
|
||||
id="acknowledge-downgrade"
|
||||
checked={acknowledgeDowngrade}
|
||||
onCheckedChange={(checked) => {
|
||||
setAcknowledgeDowngrade(checked as boolean);
|
||||
}}
|
||||
/>
|
||||
<Label htmlFor="acknowledge-downgrade" className="text-sm">
|
||||
I understand the risks and want to proceed
|
||||
</Label>
|
||||
</div>
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<LoadingButton
|
||||
isLoading={isUpdating}
|
||||
onClick={() => {
|
||||
void handleVersionChange();
|
||||
}}
|
||||
disabled={!canUpdate}
|
||||
>
|
||||
{isUpdating ? "Updating..." : "Update Version"}
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
"use client";
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useCallback, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import type { ProfileGroup } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface CreateGroupDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
onGroupCreated: (group: ProfileGroup) => void;
|
||||
}
|
||||
|
||||
export function CreateGroupDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
onGroupCreated,
|
||||
}: CreateGroupDialogProps) {
|
||||
const [groupName, setGroupName] = useState("");
|
||||
const [isCreating, setIsCreating] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const handleCreate = useCallback(async () => {
|
||||
if (!groupName.trim()) return;
|
||||
|
||||
setIsCreating(true);
|
||||
setError(null);
|
||||
try {
|
||||
const newGroup = await invoke<ProfileGroup>("create_profile_group", {
|
||||
name: groupName.trim(),
|
||||
});
|
||||
|
||||
toast.success("Group created successfully");
|
||||
onGroupCreated(newGroup);
|
||||
setGroupName("");
|
||||
onClose();
|
||||
} catch (err) {
|
||||
console.error("Failed to create group:", err);
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : "Failed to create group";
|
||||
setError(errorMessage);
|
||||
toast.error(errorMessage);
|
||||
} finally {
|
||||
setIsCreating(false);
|
||||
}
|
||||
}, [groupName, onGroupCreated, onClose]);
|
||||
|
||||
const handleClose = useCallback(() => {
|
||||
setGroupName("");
|
||||
setError(null);
|
||||
onClose();
|
||||
}, [onClose]);
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={handleClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Create New Group</DialogTitle>
|
||||
<DialogDescription>
|
||||
Create a new group to organize your browser profiles.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="group-name">Group Name</Label>
|
||||
<Input
|
||||
id="group-name"
|
||||
placeholder="Enter group name..."
|
||||
value={groupName}
|
||||
onChange={(e) => setGroupName(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" && groupName.trim()) {
|
||||
void handleCreate();
|
||||
}
|
||||
}}
|
||||
disabled={isCreating}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="p-3 text-sm text-red-600 bg-red-50 rounded-md dark:bg-red-900/20 dark:text-red-400">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={handleClose}
|
||||
disabled={isCreating}
|
||||
>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
isLoading={isCreating}
|
||||
onClick={() => void handleCreate()}
|
||||
disabled={!groupName.trim()}
|
||||
>
|
||||
Create
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -10,6 +10,7 @@
|
||||
* - Progress bars for downloads/updates
|
||||
* - Success/error states
|
||||
* - Customizable icons and content
|
||||
* - Auto-update notifications
|
||||
*
|
||||
* Usage Examples:
|
||||
*
|
||||
@@ -23,6 +24,11 @@
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Auto-update toast:
|
||||
* ```
|
||||
* showAutoUpdateToast("Firefox", "125.0.1");
|
||||
* ```
|
||||
*
|
||||
* Download progress toast:
|
||||
* ```
|
||||
* showToast({
|
||||
@@ -41,20 +47,23 @@
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
/** biome-ignore-all lint/suspicious/noExplicitAny: TODO */
|
||||
|
||||
import React from "react";
|
||||
import {
|
||||
LuCheckCheck,
|
||||
LuDownload,
|
||||
LuRefreshCw,
|
||||
LuTriangleAlert,
|
||||
} from "react-icons/lu";
|
||||
import type { ExternalToast } from "sonner";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface BaseToastProps {
|
||||
id?: string;
|
||||
title: string;
|
||||
description?: string;
|
||||
duration?: number;
|
||||
action?: ExternalToast["action"];
|
||||
}
|
||||
|
||||
interface LoadingToastProps extends BaseToastProps {
|
||||
@@ -90,6 +99,7 @@ interface VersionUpdateToastProps extends BaseToastProps {
|
||||
current: number;
|
||||
total: number;
|
||||
found: number;
|
||||
current_browser?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -116,45 +126,52 @@ type ToastProps =
|
||||
function getToastIcon(type: ToastProps["type"], stage?: string) {
|
||||
switch (type) {
|
||||
case "success":
|
||||
return <LuCheckCheck className="flex-shrink-0 w-4 h-4 text-green-500" />;
|
||||
return <LuCheckCheck className="flex-shrink-0 w-4 h-4 text-foreground" />;
|
||||
case "error":
|
||||
return <LuTriangleAlert className="flex-shrink-0 w-4 h-4 text-red-500" />;
|
||||
return (
|
||||
<LuTriangleAlert className="flex-shrink-0 w-4 h-4 text-foreground" />
|
||||
);
|
||||
case "download":
|
||||
if (stage === "completed") {
|
||||
return (
|
||||
<LuCheckCheck className="flex-shrink-0 w-4 h-4 text-green-500" />
|
||||
<LuCheckCheck className="flex-shrink-0 w-4 h-4 text-foreground" />
|
||||
);
|
||||
}
|
||||
return <LuDownload className="flex-shrink-0 w-4 h-4 text-blue-500" />;
|
||||
return <LuDownload className="flex-shrink-0 w-4 h-4 text-foreground" />;
|
||||
|
||||
case "version-update":
|
||||
return (
|
||||
<LuRefreshCw className="flex-shrink-0 w-4 h-4 text-blue-500 animate-spin" />
|
||||
<LuRefreshCw className="flex-shrink-0 w-4 h-4 animate-spin text-foreground" />
|
||||
);
|
||||
case "fetching":
|
||||
return (
|
||||
<LuRefreshCw className="flex-shrink-0 w-4 h-4 text-blue-500 animate-spin" />
|
||||
<LuRefreshCw className="flex-shrink-0 w-4 h-4 animate-spin text-foreground" />
|
||||
);
|
||||
case "twilight-update":
|
||||
return (
|
||||
<LuRefreshCw className="flex-shrink-0 w-4 h-4 text-purple-500 animate-spin" />
|
||||
<LuRefreshCw className="flex-shrink-0 w-4 h-4 animate-spin text-foreground" />
|
||||
);
|
||||
case "loading":
|
||||
return (
|
||||
<div className="flex-shrink-0 w-4 h-4 rounded-full border-2 animate-spin border-foreground border-t-transparent" />
|
||||
);
|
||||
default:
|
||||
return (
|
||||
<div className="flex-shrink-0 w-4 h-4 rounded-full border-2 border-blue-500 animate-spin border-t-transparent" />
|
||||
<div className="flex-shrink-0 w-4 h-4 rounded-full border-2 animate-spin border-foreground border-t-transparent" />
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function UnifiedToast(props: ToastProps) {
|
||||
const { title, description, type } = props;
|
||||
const { title, description, type, action } = props;
|
||||
const stage = "stage" in props ? props.stage : undefined;
|
||||
const progress = "progress" in props ? props.progress : undefined;
|
||||
|
||||
return (
|
||||
<div className="flex items-start p-3 w-full bg-white rounded-lg border border-gray-200 shadow-lg dark:bg-gray-800 dark:border-gray-700">
|
||||
<div className="flex items-start p-3 w-96 rounded-lg border shadow-lg bg-card border-border text-card-foreground">
|
||||
<div className="mr-3 mt-0.5">{getToastIcon(type, stage)}</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<p className="text-sm font-medium leading-tight text-gray-900 dark:text-white">
|
||||
<p className="text-sm font-semibold leading-tight text-foreground">
|
||||
{title}
|
||||
</p>
|
||||
|
||||
@@ -165,15 +182,15 @@ export function UnifiedToast(props: ToastProps) {
|
||||
stage === "downloading" && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<div className="flex justify-between items-center">
|
||||
<p className="flex-1 min-w-0 text-xs text-gray-600 dark:text-gray-300">
|
||||
<p className="flex-1 min-w-0 text-xs text-muted-foreground">
|
||||
{progress.percentage.toFixed(1)}%
|
||||
{progress.speed && ` • ${progress.speed} MB/s`}
|
||||
{progress.eta && ` • ${progress.eta} remaining`}
|
||||
</p>
|
||||
</div>
|
||||
<div className="w-full bg-gray-200 dark:bg-gray-700 rounded-full h-1.5">
|
||||
<div className="w-full bg-muted rounded-full h-1.5">
|
||||
<div
|
||||
className="bg-blue-500 h-1.5 rounded-full transition-all duration-300"
|
||||
className="bg-foreground h-1.5 rounded-full transition-all duration-300"
|
||||
style={{ width: `${progress.percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
@@ -181,37 +198,41 @@ export function UnifiedToast(props: ToastProps) {
|
||||
)}
|
||||
|
||||
{/* Version update progress */}
|
||||
{type === "version-update" && progress && "found" in progress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<p className="text-xs text-gray-600 dark:text-gray-300">
|
||||
{progress.found} new versions found so far
|
||||
</p>
|
||||
<div className="flex items-center space-x-2">
|
||||
<div className="flex-1 bg-gray-200 dark:bg-gray-700 rounded-full h-1.5 min-w-0">
|
||||
<div
|
||||
className="bg-blue-500 h-1.5 rounded-full transition-all duration-300"
|
||||
style={{
|
||||
width: `${(progress.current / progress.total) * 100}%`,
|
||||
}}
|
||||
/>
|
||||
{type === "version-update" &&
|
||||
progress &&
|
||||
"current_browser" in progress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{progress.current_browser && (
|
||||
<>Looking for updates for {progress.current_browser}</>
|
||||
)}
|
||||
</p>
|
||||
<div className="flex items-center space-x-2">
|
||||
<div className="flex-1 bg-muted rounded-full h-1.5 min-w-0">
|
||||
<div
|
||||
className="bg-foreground h-1.5 rounded-full transition-all duration-300"
|
||||
style={{
|
||||
width: `${(progress.current / progress.total) * 100}%`,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<span className="w-8 text-xs text-right whitespace-nowrap text-muted-foreground shrink-0">
|
||||
{progress.current}/{progress.total}
|
||||
</span>
|
||||
</div>
|
||||
<span className="w-8 text-xs text-right text-gray-500 whitespace-nowrap dark:text-gray-400 shrink-0">
|
||||
{progress.current}/{progress.total}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
)}
|
||||
|
||||
{/* Twilight update progress */}
|
||||
{type === "twilight-update" && (
|
||||
<div className="mt-2">
|
||||
<p className="text-xs text-gray-600 dark:text-gray-300">
|
||||
<p className="text-xs text-muted-foreground">
|
||||
{"hasUpdate" in props && props.hasUpdate
|
||||
? "New twilight build available for download"
|
||||
: "Checking for twilight updates..."}
|
||||
</p>
|
||||
{props.browserName && (
|
||||
<p className="mt-1 text-xs text-purple-600 dark:text-purple-400">
|
||||
<p className="mt-1 text-xs text-muted-foreground">
|
||||
{props.browserName} • Rolling Release
|
||||
</p>
|
||||
)}
|
||||
@@ -220,7 +241,7 @@ export function UnifiedToast(props: ToastProps) {
|
||||
|
||||
{/* Description */}
|
||||
{description && (
|
||||
<p className="mt-1 text-xs leading-tight text-gray-600 dark:text-gray-300">
|
||||
<p className="mt-1 text-xs leading-tight text-muted-foreground">
|
||||
{description}
|
||||
</p>
|
||||
)}
|
||||
@@ -229,22 +250,35 @@ export function UnifiedToast(props: ToastProps) {
|
||||
{type === "download" && !description && (
|
||||
<>
|
||||
{stage === "extracting" && (
|
||||
<p className="mt-1 text-xs text-gray-600 dark:text-gray-300">
|
||||
<p className="mt-1 text-xs text-muted-foreground">
|
||||
Extracting browser files...
|
||||
</p>
|
||||
)}
|
||||
{stage === "verifying" && (
|
||||
<p className="mt-1 text-xs text-gray-600 dark:text-gray-300">
|
||||
<p className="mt-1 text-xs text-muted-foreground">
|
||||
Verifying browser files...
|
||||
</p>
|
||||
)}
|
||||
{stage === "downloading (twilight rolling release)" && (
|
||||
<p className="mt-1 text-xs text-purple-600 dark:text-purple-400">
|
||||
<p className="mt-1 text-xs text-muted-foreground">
|
||||
Downloading rolling release build...
|
||||
</p>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{action &&
|
||||
"onClick" in (action as any) &&
|
||||
"label" in (action as any) && (
|
||||
<div className="mt-2 w-full">
|
||||
<RippleButton
|
||||
size="sm"
|
||||
className="ml-auto"
|
||||
onClick={(action as any).onClick}
|
||||
>
|
||||
{(action as any).label}
|
||||
</RippleButton>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,87 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { LoadingButton } from "./loading-button";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface DeleteConfirmationDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
onConfirm: () => void | Promise<void>;
|
||||
title: string;
|
||||
description: string;
|
||||
confirmButtonText?: string;
|
||||
isLoading?: boolean;
|
||||
profileIds?: string[];
|
||||
profiles?: { id: string; name: string }[];
|
||||
}
|
||||
|
||||
export function DeleteConfirmationDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
onConfirm,
|
||||
title,
|
||||
description,
|
||||
confirmButtonText = "Delete",
|
||||
isLoading = false,
|
||||
profileIds,
|
||||
profiles = [],
|
||||
}: DeleteConfirmationDialogProps) {
|
||||
const handleConfirm = async () => {
|
||||
await onConfirm();
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle>{title}</DialogTitle>
|
||||
<DialogDescription>{description}</DialogDescription>
|
||||
{profileIds && profileIds.length > 0 && (
|
||||
<div className="mt-4">
|
||||
<p className="text-sm font-medium mb-2">
|
||||
Profiles to be deleted:
|
||||
</p>
|
||||
<div className="bg-muted rounded-md p-3 max-h-32 overflow-y-auto">
|
||||
<ul className="space-y-1">
|
||||
{profileIds.map((id) => {
|
||||
const profile = profiles.find((p) => p.id === id);
|
||||
const displayName = profile ? profile.name : id;
|
||||
return (
|
||||
<li key={id} className="text-sm text-muted-foreground">
|
||||
• {displayName}
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</DialogHeader>
|
||||
<DialogFooter>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={onClose}
|
||||
disabled={isLoading}
|
||||
>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
variant="destructive"
|
||||
onClick={() => void handleConfirm()}
|
||||
isLoading={isLoading}
|
||||
>
|
||||
{confirmButtonText}
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,213 @@
|
||||
"use client";
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import type { BrowserProfile, ProfileGroup } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface DeleteGroupDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
group: ProfileGroup | null;
|
||||
onGroupDeleted: () => void;
|
||||
}
|
||||
|
||||
export function DeleteGroupDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
group,
|
||||
onGroupDeleted,
|
||||
}: DeleteGroupDialogProps) {
|
||||
const [associatedProfiles, setAssociatedProfiles] = useState<
|
||||
BrowserProfile[]
|
||||
>([]);
|
||||
const [deleteAction, setDeleteAction] = useState<"move" | "delete">("move");
|
||||
const [isDeleting, setIsDeleting] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const loadAssociatedProfiles = useCallback(async () => {
|
||||
if (!group) return;
|
||||
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const allProfiles = await invoke<BrowserProfile[]>(
|
||||
"list_browser_profiles",
|
||||
);
|
||||
const groupProfiles = allProfiles.filter(
|
||||
(profile) => profile.group_id === group.id,
|
||||
);
|
||||
setAssociatedProfiles(groupProfiles);
|
||||
} catch (err) {
|
||||
console.error("Failed to load associated profiles:", err);
|
||||
setError(err instanceof Error ? err.message : "Failed to load profiles");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [group]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen && group) {
|
||||
void loadAssociatedProfiles();
|
||||
}
|
||||
}, [isOpen, group, loadAssociatedProfiles]);
|
||||
|
||||
const handleDelete = useCallback(async () => {
|
||||
if (!group) return;
|
||||
|
||||
setIsDeleting(true);
|
||||
setError(null);
|
||||
try {
|
||||
if (deleteAction === "delete" && associatedProfiles.length > 0) {
|
||||
// Delete all associated profiles first
|
||||
const profileIds = associatedProfiles.map((p) => p.id);
|
||||
await invoke("delete_selected_profiles", { profileIds });
|
||||
} else if (deleteAction === "move" && associatedProfiles.length > 0) {
|
||||
// Move profiles to default group (null group_id)
|
||||
const profileIds = associatedProfiles.map((p) => p.id);
|
||||
await invoke("assign_profiles_to_group", {
|
||||
profileIds,
|
||||
groupId: null,
|
||||
});
|
||||
}
|
||||
|
||||
// Delete the group
|
||||
await invoke("delete_profile_group", { groupId: group.id });
|
||||
|
||||
toast.success("Group deleted successfully");
|
||||
onGroupDeleted();
|
||||
onClose();
|
||||
} catch (err) {
|
||||
console.error("Failed to delete group:", err);
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : "Failed to delete group";
|
||||
setError(errorMessage);
|
||||
toast.error(errorMessage);
|
||||
} finally {
|
||||
setIsDeleting(false);
|
||||
}
|
||||
}, [group, deleteAction, associatedProfiles, onGroupDeleted, onClose]);
|
||||
|
||||
const handleClose = useCallback(() => {
|
||||
setError(null);
|
||||
setDeleteAction("move");
|
||||
setAssociatedProfiles([]);
|
||||
onClose();
|
||||
}, [onClose]);
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={handleClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Delete Group</DialogTitle>
|
||||
<DialogDescription>
|
||||
This action cannot be undone. This will permanently delete the group
|
||||
"{group?.name}".
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-4">
|
||||
{isLoading ? (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
Loading associated profiles...
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
{associatedProfiles.length > 0 && (
|
||||
<div className="space-y-3">
|
||||
<div className="space-y-2">
|
||||
<Label>
|
||||
Associated Profiles ({associatedProfiles.length})
|
||||
</Label>
|
||||
<ScrollArea className="h-32 w-full border rounded-md p-3">
|
||||
<div className="space-y-1">
|
||||
{associatedProfiles.map((profile) => (
|
||||
<div key={profile.id} className="text-sm">
|
||||
• {profile.name}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
<Label>What should happen to these profiles?</Label>
|
||||
<RadioGroup
|
||||
value={deleteAction}
|
||||
onValueChange={(value) =>
|
||||
setDeleteAction(value as "move" | "delete")
|
||||
}
|
||||
>
|
||||
<div className="flex items-center space-x-2">
|
||||
<RadioGroupItem value="move" id="move" />
|
||||
<Label htmlFor="move" className="text-sm">
|
||||
Move profiles to Default group
|
||||
</Label>
|
||||
</div>
|
||||
<div className="flex items-center space-x-2">
|
||||
<RadioGroupItem value="delete" id="delete" />
|
||||
<Label
|
||||
htmlFor="delete"
|
||||
className="text-sm text-red-600"
|
||||
>
|
||||
Delete profiles along with the group
|
||||
</Label>
|
||||
</div>
|
||||
</RadioGroup>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{associatedProfiles.length === 0 && !isLoading && (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
This group has no associated profiles.
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div className="p-3 text-sm text-red-600 bg-red-50 rounded-md dark:bg-red-900/20 dark:text-red-400">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={handleClose}
|
||||
disabled={isDeleting}
|
||||
>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
variant="destructive"
|
||||
isLoading={isDeleting}
|
||||
onClick={() => void handleDelete()}
|
||||
disabled={isLoading}
|
||||
>
|
||||
Delete Group
|
||||
{deleteAction === "delete" &&
|
||||
associatedProfiles.length > 0 &&
|
||||
" & Profiles"}
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,129 @@
|
||||
"use client";
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import type { ProfileGroup } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface EditGroupDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
group: ProfileGroup | null;
|
||||
onGroupUpdated: (group: ProfileGroup) => void;
|
||||
}
|
||||
|
||||
export function EditGroupDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
group,
|
||||
onGroupUpdated,
|
||||
}: EditGroupDialogProps) {
|
||||
const [groupName, setGroupName] = useState("");
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (group) {
|
||||
setGroupName(group.name);
|
||||
} else {
|
||||
setGroupName("");
|
||||
}
|
||||
setError(null);
|
||||
}, [group]);
|
||||
|
||||
const handleUpdate = useCallback(async () => {
|
||||
if (!group || !groupName.trim()) return;
|
||||
|
||||
setIsUpdating(true);
|
||||
setError(null);
|
||||
try {
|
||||
const updatedGroup = await invoke<ProfileGroup>("update_profile_group", {
|
||||
groupId: group.id,
|
||||
name: groupName.trim(),
|
||||
});
|
||||
|
||||
toast.success("Group updated successfully");
|
||||
onGroupUpdated(updatedGroup);
|
||||
onClose();
|
||||
} catch (err) {
|
||||
console.error("Failed to update group:", err);
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : "Failed to update group";
|
||||
setError(errorMessage);
|
||||
toast.error(errorMessage);
|
||||
} finally {
|
||||
setIsUpdating(false);
|
||||
}
|
||||
}, [group, groupName, onGroupUpdated, onClose]);
|
||||
|
||||
const handleClose = useCallback(() => {
|
||||
setError(null);
|
||||
onClose();
|
||||
}, [onClose]);
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={handleClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Edit Group</DialogTitle>
|
||||
<DialogDescription>
|
||||
Update the name of the group "{group?.name}".
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="group-name">Group Name</Label>
|
||||
<Input
|
||||
id="group-name"
|
||||
placeholder="Enter group name..."
|
||||
value={groupName}
|
||||
onChange={(e) => setGroupName(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" && groupName.trim()) {
|
||||
void handleUpdate();
|
||||
}
|
||||
}}
|
||||
disabled={isUpdating}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="p-3 text-sm text-red-600 bg-red-50 rounded-md dark:bg-red-900/20 dark:text-red-400">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={handleClose}
|
||||
disabled={isUpdating}
|
||||
>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
isLoading={isUpdating}
|
||||
onClick={() => void handleUpdate()}
|
||||
disabled={!groupName.trim() || groupName === group?.name}
|
||||
>
|
||||
Update Group
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user