mirror of
https://github.com/zhom/donutbrowser.git
synced 2026-05-04 01:25:12 +02:00
Compare commits
403 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c1079cf7b1 | |||
| 1e3f1d4668 | |||
| bb62ca350c | |||
| 1281fb3955 | |||
| ac878aed48 | |||
| 140621dcbe | |||
| 55b8955a20 | |||
| 1611c8e536 | |||
| c17bb56fec | |||
| cea8030268 | |||
| d9e3e1f3ef | |||
| d48e26c7eb | |||
| b7b75ec3d8 | |||
| 4a8b0bd407 | |||
| 8f24410f11 | |||
| ff4aa572fe | |||
| 0abea50279 | |||
| 1f90b12fe5 | |||
| bc0c31f527 | |||
| 357499168f | |||
| 7b1311f2ca | |||
| d755978b34 | |||
| bb164ce743 | |||
| daa36f008b | |||
| 92ef2798d2 | |||
| a9720676ae | |||
| fbcec2cbc1 | |||
| 5d395f606e | |||
| 6963e07be5 | |||
| c28537d304 | |||
| c303de4a8a | |||
| 21a13fb217 | |||
| 90d8e782de | |||
| ad2d9b73f2 | |||
| e645e212f2 | |||
| 7df92ae8ee | |||
| 18a0254ca7 | |||
| b0a58c3131 | |||
| 467e82ca93 | |||
| 8d9654044a | |||
| 711d94c9c1 | |||
| 305051d03d | |||
| 8695884535 | |||
| bb96936550 | |||
| 730621e5a1 | |||
| 714102eb25 | |||
| e5378c1bb7 | |||
| b7c8d5672a | |||
| 3fb81e2ca0 | |||
| 510de96393 | |||
| 3688e88d67 | |||
| 9646a41788 | |||
| f7679d25ca | |||
| 2d42772718 | |||
| 3980f835d6 | |||
| d0185dd5ae | |||
| de39fa4555 | |||
| f773eb6f1c | |||
| 458c30433d | |||
| 1cb9ffa249 | |||
| 5c58b5c644 | |||
| f41311a7bb | |||
| c8c09c296e | |||
| ca0c2614f4 | |||
| dca5a2970e | |||
| e0a1dd5a8a | |||
| e48b681215 | |||
| 6796912606 | |||
| 7105f6544f | |||
| 3003f868e7 | |||
| b733d26f10 | |||
| 675c2417d7 | |||
| e10a7bf089 | |||
| c8e3cd39ff | |||
| 0103150dc7 | |||
| be57ac3219 | |||
| b4067b5e34 | |||
| 3fa8822139 | |||
| 1e0ef0b497 | |||
| 2da832f100 | |||
| 284dbc5a3b | |||
| f328ceeb4f | |||
| 1bfbb365c5 | |||
| 1a15af1ded | |||
| ca20ccb489 | |||
| 0daba2eb9b | |||
| 1dfdfc6a21 | |||
| ee2f728194 | |||
| 702c1545bf | |||
| 53f403b82c | |||
| 2cae2824d3 | |||
| ca790c74ce | |||
| 25d4b30975 | |||
| 687fc7817f | |||
| 28818bed77 | |||
| d1d953b3e2 | |||
| c2153d463c | |||
| fa142a8cb0 | |||
| cf291fb0d1 | |||
| 5fed6b7c3f | |||
| 9ba51cd4e3 | |||
| a507a3daed | |||
| aabae8d3d4 | |||
| b7e6c1eb84 | |||
| d05f2190e8 | |||
| 34a9418474 | |||
| 63e125738d | |||
| 58d82d12c4 | |||
| b1c86709b0 | |||
| 12651f9f85 | |||
| c1815fdfdc | |||
| 1662c1efba | |||
| 4a8e905a44 | |||
| e165e35f2c | |||
| cbeb099cc9 | |||
| fef0c963cb | |||
| cd28531588 | |||
| ed913309dd | |||
| cecb4579c7 | |||
| 9cfed6d73e | |||
| a461fd4798 | |||
| 5159f943df | |||
| 72af5f682f | |||
| 6d77c872f2 | |||
| 0baecbdb0c | |||
| eb2af5c10b | |||
| 76cef4757a | |||
| 00d74bddaf | |||
| b5b08a0196 | |||
| ff35717cb5 | |||
| 669611ec68 | |||
| 8f1b84f615 | |||
| 2bf6531767 | |||
| da0af075fc | |||
| 83f4c2c162 | |||
| e675441171 | |||
| cf77d96042 | |||
| a4706a7f9a | |||
| b088ae675b | |||
| 54fd9b7282 | |||
| 77a50c60d1 | |||
| 62b9768006 | |||
| 66d3420000 | |||
| 8f05c48594 | |||
| a5709d95c7 | |||
| eef3e19d2f | |||
| 2c57920d44 | |||
| 57a36a5fc2 | |||
| a2a980d203 | |||
| 2deacbacab | |||
| 7cd7d077ae | |||
| 8679d0ca62 | |||
| 20cf9de4fa | |||
| 4202d595f2 | |||
| 3086ea0085 | |||
| 1ddbc5228c | |||
| 2d02095d4d | |||
| 4e0d985996 | |||
| 5af751a9b2 | |||
| da7f791274 | |||
| f4c33ad96e | |||
| 5b31cfaf32 | |||
| 4997854577 | |||
| a43e41a020 | |||
| b22b4cacf9 | |||
| 7f0df6f943 | |||
| dccf843952 | |||
| fc6ddb7cbf | |||
| 63000c72bd | |||
| 2fd344b9bb | |||
| 44bd34d8f0 | |||
| d3822bdd88 | |||
| ed1132bdc3 | |||
| fcae0623c0 | |||
| fe843e14f1 | |||
| b071e971b3 | |||
| 0b7cf547b3 | |||
| f024ce19ae | |||
| e1d3ff9000 | |||
| e2a168b188 | |||
| af767da32c | |||
| b5dfe1233e | |||
| dddf8e2e39 | |||
| adcb20fab9 | |||
| ff9c633b07 | |||
| 7ca76b1f78 | |||
| 4887a3db4d | |||
| e38cd2e560 | |||
| 7e7b47cae3 | |||
| 13ae170166 | |||
| df78e22650 | |||
| 328e6f16ee | |||
| 40ad32af6d | |||
| f299eeaea5 | |||
| 84142caac9 | |||
| d06dbb6c70 | |||
| cf5b498bd6 | |||
| 3c28a169bd | |||
| 25653e166b | |||
| 0b4263140d | |||
| b500c28b96 | |||
| 7c2be81531 | |||
| b55ef469ed | |||
| 76a206093d | |||
| 3e88dbc30e | |||
| 031823587e | |||
| c7a1ac228c | |||
| 8ede335bed | |||
| b170b8846d | |||
| 632d90a022 | |||
| 3bec00a2cd | |||
| 3b78971df8 | |||
| 5f9a716f62 | |||
| 4d07984d99 | |||
| 188e14e5b5 | |||
| bc1b9e9757 | |||
| e742e5fdfa | |||
| 9ce7757cb2 | |||
| 3ca454a2c5 | |||
| 689ac8e3ca | |||
| 0e1c5dcfb6 | |||
| f22a9f3557 | |||
| 5a76fe3221 | |||
| 5edad9b97c | |||
| 38556fc504 | |||
| 703ca2c50b | |||
| 198046fca9 | |||
| fdcce5c86a | |||
| 1cd1c7b59d | |||
| d803361fca | |||
| 2f6f20eb29 | |||
| 59272e0cff | |||
| cac2273ad3 | |||
| 1691a7a06b | |||
| 5a4718fba6 | |||
| 336543d06e | |||
| 73cc6c2ac5 | |||
| f4c96ec0c6 | |||
| f84b3c2812 | |||
| 29603076f7 | |||
| 76bcb73b39 | |||
| 51983bf3a5 | |||
| eda83cf439 | |||
| 7b6ea00838 | |||
| d8f07ddb11 | |||
| 1b0ebbc666 | |||
| d377809c77 | |||
| fbf36b49df | |||
| 341751c9b2 | |||
| eea227d853 | |||
| 29b6aed475 | |||
| 050f8b5353 | |||
| 8793de8c87 | |||
| 7408ec876c | |||
| fc8c358088 | |||
| b11495e3b9 | |||
| 11567ca50e | |||
| 1c2d5b3774 | |||
| 852066ef41 | |||
| 9622d85e73 | |||
| 4e2b87c5f1 | |||
| 2099dadbc0 | |||
| 00e4eb2715 | |||
| 33bc4476a4 | |||
| 0ad8988f7e | |||
| 2b3aaf1e92 | |||
| 5a10e0b696 | |||
| 9e48ddbf3e | |||
| bcbb2c1d42 | |||
| 391bfdabdc | |||
| 7b2dc84b5b | |||
| ddc09726f4 | |||
| e1451d3fbb | |||
| b18df6499f | |||
| c5c2563a4e | |||
| 8475f42821 | |||
| f51aa9ed85 | |||
| 3d3a3b3816 | |||
| e090881917 | |||
| b46976f47d | |||
| 39a978682c | |||
| 38e58e604b | |||
| ffcff2ce7c | |||
| c8ea31f85d | |||
| 7ac6e21dbc | |||
| 7533993909 | |||
| 8176f45e41 | |||
| f55a3f7155 | |||
| 7d74ac09d9 | |||
| d314fa1f71 | |||
| 968969cf1e | |||
| a7a3d99881 | |||
| 80cd2e4e7f | |||
| 6361a039bc | |||
| 8005ec90b6 | |||
| cdf30b7baa | |||
| fadef414fe | |||
| e1c55233f7 | |||
| 801a2b5732 | |||
| abe5c691ce | |||
| 2f9a17c6e0 | |||
| fcdb80f75a | |||
| 7568e7998d | |||
| e0f4f93c30 | |||
| d142b7f79b | |||
| dc5553a5d3 | |||
| 07445ff95b | |||
| 6ecbc39e46 | |||
| 67849c00d5 | |||
| bdf71e4ef8 | |||
| 2d2ebba40e | |||
| 2caac5bf4c | |||
| a816fbb140 | |||
| c954668ed1 | |||
| 2db27b5ffd | |||
| 845e9f28ad | |||
| ee8c6dcc85 | |||
| 08453fe9a6 | |||
| b486f00875 | |||
| 703154b30f | |||
| 130f8b86d1 | |||
| 607ed66e29 | |||
| 9570b6d605 | |||
| 2d92cbb0e5 | |||
| 251016609f | |||
| bddf796946 | |||
| 8d793a6868 | |||
| 469f161293 | |||
| 9756e64319 | |||
| 800544ede9 | |||
| aa2228a8aa | |||
| 432e5bff90 | |||
| f4b60eb6c7 | |||
| 30122c5781 | |||
| b71d84fda4 | |||
| 859af72724 | |||
| 0360a89ceb | |||
| cb6f744d6b | |||
| 575d7f80b1 | |||
| d05b69ff3d | |||
| 54abb11129 | |||
| 04c690c750 | |||
| 9a4be86e95 | |||
| 6d013d86aa | |||
| 769fbf9d75 | |||
| 6e62abc601 | |||
| 8848fa8130 | |||
| 1f0ecbe36e | |||
| f83f2033fe | |||
| 821cd4ea82 | |||
| d3a63c37bf | |||
| 95cd2426c3 | |||
| 5a3fb7b2b0 | |||
| 767a0701ce | |||
| ec61d51c07 | |||
| 545c518a55 | |||
| c99eee2c21 | |||
| 7f3683cc2e | |||
| baac3a533a | |||
| 5cd1774ffc | |||
| cb87641890 | |||
| 3df5ac671b | |||
| 390f79f97b | |||
| c4dc2ed50c | |||
| 3b7315cc0d | |||
| bbd0f5df0c | |||
| 8e7982bdf8 | |||
| 9ac662aee8 | |||
| 2394716ea3 | |||
| 06992f8b9a | |||
| 5a454e3647 | |||
| 3f91d92d8b | |||
| c586046542 | |||
| 42f63172fb | |||
| f717600fcb | |||
| c807ea5596 | |||
| df2c1316d4 | |||
| 1fdc552dc7 | |||
| ab563f81fa | |||
| d17545bd05 | |||
| 29c329b432 | |||
| 4d7bbe719f | |||
| 5b869a6115 | |||
| c4b1745a0f | |||
| ac293f6204 | |||
| 7f3a3287d6 | |||
| dd9347d429 | |||
| 615d7b8c8a | |||
| 5c26ab5c33 | |||
| dccaf6c7de | |||
| bf5b2886f5 | |||
| bbc12bcc03 | |||
| 6d437f30e1 | |||
| 4b0ab6b732 | |||
| a802895491 | |||
| a57f90899b | |||
| 3ebc714b23 | |||
| 1acd4781b5 | |||
| a5b9afafcb | |||
| 0c8dd5ace5 | |||
| e8c3188657 | |||
| e6f0b2b9e9 | |||
| 394406e134 |
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Before finishing the task and showing summary, always run "pnpm format && pnpm lint && pnpm test" at the root of the project to ensure that you don't finish with broken application.
|
||||
@@ -3,4 +3,4 @@ description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Don't leave comments that don't add value
|
||||
Don't leave comments that don't add value.
|
||||
@@ -3,4 +3,4 @@ description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times
|
||||
Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times.
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
Anytime you change nodecar's code and try to test, recompile it with "cd nodecar && pnpm build".
|
||||
@@ -3,4 +3,4 @@ description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
After your changes, instead of running specific tests or linting specific files, run "pnpm format && pnpm lint && pnpm test"
|
||||
After your changes, instead of running specific tests or linting specific files, run "pnpm format && pnpm lint && pnpm test". It means that you first format the code, then lint it, then test it, so that no part is broken after your changes.
|
||||
@@ -0,0 +1,6 @@
|
||||
---
|
||||
description:
|
||||
globs:
|
||||
alwaysApply: true
|
||||
---
|
||||
If there is a global singleton of a struct, only use it inside a method while properly initializing it, unless I have explicitly specified in the request otherwise.
|
||||
+1
-20
@@ -1,4 +1,5 @@
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
# Frontend dependencies (root package.json)
|
||||
- package-ecosystem: "npm"
|
||||
@@ -13,30 +14,10 @@ updates:
|
||||
frontend-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
ignore:
|
||||
- dependency-name: "eslint"
|
||||
versions: ">= 9"
|
||||
commit-message:
|
||||
prefix: "deps"
|
||||
include: "scope"
|
||||
|
||||
# Nodecar dependencies
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/nodecar"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
day: "saturday"
|
||||
time: "09:00"
|
||||
allow:
|
||||
- dependency-type: "all"
|
||||
groups:
|
||||
nodecar-dependencies:
|
||||
patterns:
|
||||
- "*"
|
||||
commit-message:
|
||||
prefix: "deps(nodecar)"
|
||||
include: "scope"
|
||||
|
||||
# Rust dependencies
|
||||
- package-ecosystem: "cargo"
|
||||
directory: "/src-tauri"
|
||||
|
||||
@@ -0,0 +1,98 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
schedule:
|
||||
- cron: "16 13 * * 5"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
security-events: write
|
||||
packages: read
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: actions
|
||||
build-mode: none
|
||||
- language: javascript-typescript
|
||||
build-mode: none
|
||||
# - language: rust
|
||||
# build-mode: none
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b #master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: x86_64-unknown-linux-gnu
|
||||
|
||||
- name: Install system dependencies (Rust only)
|
||||
if: matrix.language == 'rust'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 #v2.8.0
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install dependencies from lockfile
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install rust dependencies
|
||||
if: matrix.language == 'rust'
|
||||
working-directory: ./src-tauri
|
||||
run: |
|
||||
cargo build
|
||||
|
||||
- name: Build nodecar sidecar
|
||||
if: matrix.language == 'rust'
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm run build:linux-x64
|
||||
|
||||
- name: Copy nodecar binary to Tauri binaries
|
||||
if: matrix.language == 'rust'
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-x86_64-unknown-linux-gnu
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@b1e4dc3db58c9601794e22a9f6d28d45461b9dbf #v3.29.0
|
||||
with:
|
||||
queries: security-extended
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@b1e4dc3db58c9601794e22a9f6d28d45461b9dbf #v3.29.0
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
@@ -0,0 +1,21 @@
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
contrib-readme-job:
|
||||
runs-on: ubuntu-latest
|
||||
name: Automatically update the contributors list in the README
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Contribute List
|
||||
uses: akhilmhdh/contributors-readme-action@83ea0b4f1ac928fbfe88b9e8460a932a528eb79f #v2.3.11
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b00f71e051ddddc6e46a193c31c8c0bf283bf9e6" # v2.1.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -32,29 +32,51 @@ jobs:
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
packages: read
|
||||
actions: read
|
||||
|
||||
spellcheck:
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
dependabot-automerge:
|
||||
name: Dependabot Automerge
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
needs: [security-scan, lint-js, lint-rust]
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dependabot metadata
|
||||
id: metadata
|
||||
uses: dependabot/fetch-metadata@v2
|
||||
uses: dependabot/fetch-metadata@08eff52bf64351f401fb50d4972fa95b9f2c2d1b #v2.4.0
|
||||
with:
|
||||
compat-lookup: true
|
||||
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- name: Auto-merge minor and patch updates
|
||||
uses: ridedott/merge-me-action@v2
|
||||
uses: ridedott/merge-me-action@f96a67511b4be051e77760230e6a3fb9cb7b1903 #v2.10.124
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.SECRET_DEPENDABOT_GITHUB_TOKEN }}
|
||||
PRESET: DEPENDABOT_MINOR
|
||||
MERGE_METHOD: SQUASH
|
||||
PRESET: DEPENDABOT_MINOR
|
||||
MAXIMUM_RETRIES: 5
|
||||
timeout-minutes: 10
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
name: Greetings
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened]
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
greeting:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/first-interaction@2d4393e6bc0e2efb2e48fba7e06819c3bf61ffc9 # v2.0.0
|
||||
with:
|
||||
issue-message: "Thank you for your first issue ❤️ If it's a feature request, please make sure it's clear what you want, why you want it, and how important it is to you. If you posted a bug report, please make sure it includes as much detail as possible."
|
||||
pr-message: "Welcome to the community and thank you for your first contribution ❤️ A human will review your PR shortly. Make sure that the pipelines are green, so that the PR is considered ready for a review and could be merged."
|
||||
@@ -0,0 +1,199 @@
|
||||
name: Issue Validation
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
validate-issue:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
|
||||
- name: Get issue templates
|
||||
id: get-templates
|
||||
run: |
|
||||
# Read the issue templates
|
||||
if [ -f ".github/ISSUE_TEMPLATE/01-bug-report.md" ]; then
|
||||
echo "bug-template-exists=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [ -f ".github/ISSUE_TEMPLATE/02-feature-request.md" ]; then
|
||||
echo "feature-template-exists=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create issue analysis prompt
|
||||
id: create-prompt
|
||||
env:
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
ISSUE_LABELS: ${{ join(github.event.issue.labels.*.name, ', ') }}
|
||||
run: |
|
||||
cat > issue_analysis.txt << EOF
|
||||
## Issue Content to Analyze:
|
||||
|
||||
**Title:** $ISSUE_TITLE
|
||||
|
||||
**Body:**
|
||||
$ISSUE_BODY
|
||||
|
||||
**Labels:** $ISSUE_LABELS
|
||||
EOF
|
||||
|
||||
- name: Validate issue with AI
|
||||
id: validate
|
||||
uses: actions/ai-inference@b81b2afb8390ee6839b494a404766bef6493c7d9 # v1.2.8
|
||||
with:
|
||||
prompt-file: issue_analysis.txt
|
||||
system-prompt: |
|
||||
You are an issue validation assistant for Donut Browser, an anti-detect browser.
|
||||
|
||||
Analyze the provided issue content and determine if it contains sufficient information based on these requirements:
|
||||
|
||||
**For Bug Reports, the issue should include:**
|
||||
1. Clear description of the problem
|
||||
2. Steps to reproduce the issue (numbered list preferred)
|
||||
3. Expected vs actual behavior
|
||||
4. Environment information (OS, browser version, etc.)
|
||||
5. Error messages, stack traces, or screenshots if applicable
|
||||
|
||||
**For Feature Requests, the issue should include:**
|
||||
1. Clear description of the requested feature
|
||||
2. Use case or problem it solves
|
||||
3. Proposed solution or how it should work
|
||||
4. Priority level or importance
|
||||
|
||||
**General Requirements for all issues:**
|
||||
1. Descriptive title
|
||||
2. Sufficient detail to understand and act upon
|
||||
3. Professional tone and clear communication
|
||||
|
||||
Respond in JSON format with the following structure:
|
||||
```json
|
||||
{
|
||||
"is_valid": true|false,
|
||||
"issue_type": "bug_report"|"feature_request"|"other",
|
||||
"missing_info": [
|
||||
"List of missing required information"
|
||||
],
|
||||
"suggestions": [
|
||||
"Specific suggestions for improvement"
|
||||
],
|
||||
"overall_assessment": "Brief assessment of the issue quality"
|
||||
}
|
||||
```
|
||||
|
||||
Be constructive and helpful in your feedback. If the issue is incomplete, provide specific guidance on what's needed.
|
||||
model: openai/gpt-4o
|
||||
|
||||
- name: Parse validation result and take action
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Prefer reading from the response file to avoid output truncation
|
||||
RESPONSE_FILE='${{ steps.validate.outputs.response-file }}'
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RAW_OUTPUT=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
RAW_OUTPUT='${{ steps.validate.outputs.response }}'
|
||||
fi
|
||||
|
||||
# Extract JSON if wrapped in markdown code fences; otherwise use raw
|
||||
JSON_RESULT=$(printf "%s" "$RAW_OUTPUT" | sed -n '/```json/,/```/p' | sed '1d;$d')
|
||||
if [ -z "$JSON_RESULT" ]; then
|
||||
JSON_RESULT="$RAW_OUTPUT"
|
||||
fi
|
||||
|
||||
# Parse JSON fields
|
||||
IS_VALID=$(echo "$JSON_RESULT" | jq -r '.is_valid // false')
|
||||
ISSUE_TYPE=$(echo "$JSON_RESULT" | jq -r '.issue_type // "other"')
|
||||
MISSING_INFO=$(echo "$JSON_RESULT" | jq -r '.missing_info[]? // empty' | sed 's/^/- /')
|
||||
SUGGESTIONS=$(echo "$JSON_RESULT" | jq -r '.suggestions[]? // empty' | sed 's/^/- /')
|
||||
ASSESSMENT=$(echo "$JSON_RESULT" | jq -r '.overall_assessment // "No assessment provided"')
|
||||
|
||||
echo "Issue validation result: $IS_VALID"
|
||||
echo "Issue type: $ISSUE_TYPE"
|
||||
|
||||
if [ "$IS_VALID" = "false" ]; then
|
||||
# Create a comment asking for more information
|
||||
cat > comment.md << EOF
|
||||
## 🤖 Issue Validation
|
||||
|
||||
Thank you for submitting this issue! However, it appears that some required information might be missing to help us better understand and address your concern.
|
||||
|
||||
**Issue Type Detected:** \`$ISSUE_TYPE\`
|
||||
|
||||
**Assessment:** $ASSESSMENT
|
||||
|
||||
### 📋 Missing Information:
|
||||
$MISSING_INFO
|
||||
|
||||
### 💡 Suggestions for Improvement:
|
||||
$SUGGESTIONS
|
||||
|
||||
### 📝 How to Provide Additional Information:
|
||||
|
||||
Please edit your original issue description to include the missing information. Here are our issue templates for reference:
|
||||
|
||||
- **Bug Report Template:** [View Template](.github/ISSUE_TEMPLATE/01-bug-report.md)
|
||||
- **Feature Request Template:** [View Template](.github/ISSUE_TEMPLATE/02-feature-request.md)
|
||||
|
||||
### 🔧 Quick Tips:
|
||||
- For **bug reports**: Include step-by-step reproduction instructions, your environment details, and any error messages
|
||||
- For **feature requests**: Describe the use case, expected behavior, and why this feature would be valuable
|
||||
- Add **screenshots** or **logs** when applicable
|
||||
|
||||
Once you've updated the issue with the missing information, feel free to remove this comment or reply to let us know you've made the updates.
|
||||
|
||||
---
|
||||
*This validation was performed automatically to ensure we have all the information needed to help you effectively.*
|
||||
EOF
|
||||
|
||||
# Post the comment
|
||||
gh issue comment ${{ github.event.issue.number }} --body-file comment.md
|
||||
|
||||
# Add a label to indicate validation needed
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "needs-info"
|
||||
|
||||
echo "✅ Validation comment posted and 'needs-info' label added"
|
||||
else
|
||||
echo "✅ Issue contains sufficient information"
|
||||
|
||||
# Prepare a summary comment even when valid
|
||||
cat > comment.md << EOF
|
||||
## 🤖 Issue Validation
|
||||
|
||||
**Issue Type Detected:** \`$ISSUE_TYPE\`
|
||||
|
||||
**Assessment:** $ASSESSMENT
|
||||
|
||||
$( [ -n "$SUGGESTIONS" ] && echo "### 💡 Suggestions:" && echo "$SUGGESTIONS" )
|
||||
|
||||
---
|
||||
*This validation was performed automatically to help triage issues.*
|
||||
EOF
|
||||
|
||||
# Post the summary comment
|
||||
gh issue comment ${{ github.event.issue.number }} --body-file comment.md
|
||||
|
||||
# Add appropriate labels based on issue type
|
||||
case "$ISSUE_TYPE" in
|
||||
"bug_report")
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "bug"
|
||||
;;
|
||||
"feature_request")
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "enhancement"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
rm -f issue_analysis.txt comment.md
|
||||
@@ -16,6 +16,9 @@ on:
|
||||
- ".github/workflows/lint-rs.yml"
|
||||
- ".github/workflows/osv.yml"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
@@ -31,13 +34,13 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Set up Node.js v22
|
||||
uses: actions/setup-node@v4
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
@@ -45,10 +48,5 @@ jobs:
|
||||
- name: Install dependencies from lockfile
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run lint step
|
||||
run: pnpm run lint:js
|
||||
|
||||
@@ -24,12 +24,14 @@ on:
|
||||
- "tsconfig.json"
|
||||
- "biome.json"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [macos-latest]
|
||||
os: [macos-latest, ubuntu-latest]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
@@ -39,25 +41,29 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
- name: Install Rust toolchain
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b #master
|
||||
with:
|
||||
toolchain: stable
|
||||
components: rustfmt, clippy
|
||||
|
||||
- name: Install cargo-audit
|
||||
run: cargo install cargo-audit
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install dependencies (Ubuntu only)
|
||||
if: matrix.os == 'ubuntu-latest'
|
||||
run: |
|
||||
@@ -67,11 +73,6 @@ jobs:
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build nodecar binary
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
@@ -84,16 +85,21 @@ jobs:
|
||||
pnpm run build:win-x64
|
||||
fi
|
||||
|
||||
# TODO: replace with an integration test that fetches everything from rust
|
||||
# - name: Download Camoufox for testing
|
||||
# run: npx camoufox-js fetch
|
||||
# continue-on-error: true
|
||||
|
||||
- name: Copy nodecar binary to Tauri binaries
|
||||
shell: bash
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.os }}" == "ubuntu-latest" ]]; then
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-x86_64-unknown-linux-gnu
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-x86_64-unknown-linux-gnu
|
||||
elif [[ "${{ matrix.os }}" == "macos-latest" ]]; then
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-aarch64-apple-darwin
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-aarch64-apple-darwin
|
||||
elif [[ "${{ matrix.os }}" == "windows-latest" ]]; then
|
||||
cp nodecar/dist/nodecar.exe src-tauri/binaries/nodecar-x86_64-pc-windows-msvc.exe
|
||||
cp nodecar/nodecar-bin.exe src-tauri/binaries/nodecar-x86_64-pc-windows-msvc.exe
|
||||
fi
|
||||
|
||||
- name: Create empty 'dist' directory
|
||||
@@ -107,7 +113,7 @@ jobs:
|
||||
run: cargo clippy --all-targets --all-features -- -D warnings -D clippy::all
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Run Rust unit tests
|
||||
- name: Run Rust tests
|
||||
run: cargo test
|
||||
working-directory: src-tauri
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
scan-scheduled:
|
||||
name: Scheduled Security Scan
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'schedule' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b00f71e051ddddc6e46a193c31c8c0bf283bf9e6" # v2.1.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
scan-pr:
|
||||
name: PR Security Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b00f71e051ddddc6e46a193c31c8c0bf283bf9e6" # v2.1.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -16,16 +16,20 @@ jobs:
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b00f71e051ddddc6e46a193c31c8c0bf283bf9e6" # v2.1.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -0,0 +1,118 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
models: read
|
||||
|
||||
jobs:
|
||||
generate-release-notes:
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v') && !github.event.release.prerelease
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
with:
|
||||
fetch-depth: 0 # Fetch full history to compare with previous release
|
||||
|
||||
- name: Get previous release tag
|
||||
id: get-previous-tag
|
||||
run: |
|
||||
# Get the previous release tag (excluding the current one)
|
||||
CURRENT_TAG="${{ github.ref_name }}"
|
||||
PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | grep -v "$CURRENT_TAG" | head -n 1)
|
||||
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
echo "No previous release found, using initial commit"
|
||||
PREVIOUS_TAG=$(git rev-list --max-parents=0 HEAD)
|
||||
fi
|
||||
|
||||
echo "current-tag=$CURRENT_TAG" >> $GITHUB_OUTPUT
|
||||
echo "previous-tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT
|
||||
echo "Previous release: $PREVIOUS_TAG"
|
||||
echo "Current release: $CURRENT_TAG"
|
||||
|
||||
- name: Get commit messages between releases
|
||||
id: get-commits
|
||||
run: |
|
||||
# Get commit messages between previous and current release
|
||||
PREVIOUS_TAG="${{ steps.get-previous-tag.outputs.previous-tag }}"
|
||||
CURRENT_TAG="${{ steps.get-previous-tag.outputs.current-tag }}"
|
||||
|
||||
# Get commit log with detailed format
|
||||
COMMIT_LOG=$(git log --pretty=format:"- %s (%h by %an)" $PREVIOUS_TAG..$CURRENT_TAG --no-merges)
|
||||
|
||||
# Get changed files summary
|
||||
CHANGED_FILES=$(git diff --name-status $PREVIOUS_TAG..$CURRENT_TAG | head -20)
|
||||
|
||||
# Save to files for AI processing
|
||||
echo "$COMMIT_LOG" > commits.txt
|
||||
echo "$CHANGED_FILES" > changes.txt
|
||||
|
||||
echo "commits-file=commits.txt" >> $GITHUB_OUTPUT
|
||||
echo "changes-file=changes.txt" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate release notes with AI
|
||||
id: generate-notes
|
||||
uses: actions/ai-inference@b81b2afb8390ee6839b494a404766bef6493c7d9 # v1.2.8
|
||||
with:
|
||||
prompt-file: commits.txt
|
||||
system-prompt: |
|
||||
You are an expert technical writer tasked with generating comprehensive release notes for Donut Browser, a powerful anti-detect browser.
|
||||
|
||||
Analyze the provided commit messages and generate well-structured release notes following this format:
|
||||
|
||||
## What's New in ${{ steps.get-previous-tag.outputs.current-tag }}
|
||||
|
||||
[Brief 1-2 sentence overview of the release]
|
||||
|
||||
### ✨ New Features
|
||||
[List new features with brief descriptions]
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
[List bug fixes]
|
||||
|
||||
### 🔧 Improvements
|
||||
[List improvements and enhancements]
|
||||
|
||||
### 📚 Documentation
|
||||
[List documentation updates if any]
|
||||
|
||||
### 🔄 Dependencies
|
||||
[List dependency updates if any]
|
||||
|
||||
### 🛠️ Developer Experience
|
||||
[List development-related changes if any]
|
||||
|
||||
Guidelines:
|
||||
- Use clear, user-friendly language
|
||||
- Group related commits logically
|
||||
- Omit minor commits like formatting, typos unless significant
|
||||
- Focus on user-facing changes
|
||||
- Use emojis sparingly and consistently
|
||||
- Keep descriptions concise but informative
|
||||
- If commits are unclear, infer the purpose from the context
|
||||
|
||||
The application is a desktop app built with Tauri + Next.js that helps users manage multiple browser profiles with proxy support.
|
||||
model: gpt-4o
|
||||
|
||||
- name: Update release with generated notes
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
# Get the generated release notes
|
||||
RELEASE_NOTES="${{ steps.generate-notes.outputs.response }}"
|
||||
|
||||
# Update the release with the generated notes
|
||||
gh api --method PATCH /repos/${{ github.repository }}/releases/${{ github.event.release.id }} \
|
||||
--field body="$RELEASE_NOTES"
|
||||
|
||||
echo "✅ Release notes updated successfully!"
|
||||
|
||||
- name: Cleanup
|
||||
run: |
|
||||
rm -f commits.txt changes.txt
|
||||
@@ -13,7 +13,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b00f71e051ddddc6e46a193c31c8c0bf283bf9e6" # v2.1.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -31,14 +31,35 @@ jobs:
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
packages: read
|
||||
actions: read
|
||||
|
||||
spellcheck:
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
release:
|
||||
needs: [security-scan, lint-js, lint-rust]
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
@@ -46,38 +67,37 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- platform: "macos-latest"
|
||||
args: "--target aarch64-apple-darwin"
|
||||
args: "--target aarch64-apple-darwin --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-apple-darwin"
|
||||
pkg_target: "latest-macos-arm64"
|
||||
nodecar_script: "build:mac-aarch64"
|
||||
- platform: "macos-latest"
|
||||
args: "--target x86_64-apple-darwin"
|
||||
args: "--target x86_64-apple-darwin --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-apple-darwin"
|
||||
pkg_target: "latest-macos-x64"
|
||||
nodecar_script: "build:mac-x86_64"
|
||||
- platform: "ubuntu-22.04"
|
||||
args: "--target x86_64-unknown-linux-gnu"
|
||||
- platform: "ubuntu-latest"
|
||||
args: "--target x86_64-unknown-linux-gnu --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-x64"
|
||||
nodecar_script: "build:linux-x64"
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
args: "--target aarch64-unknown-linux-gnu"
|
||||
args: "--target aarch64-unknown-linux-gnu --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-arm64"
|
||||
nodecar_script: "build:linux-arm64"
|
||||
# Future platforms can be added here:
|
||||
# - platform: "windows-latest"
|
||||
# args: "--target x86_64-pc-windows-msvc"
|
||||
# args: "--target x86_64-pc-windows-msvc --verbose"
|
||||
# arch: "x86_64"
|
||||
# target: "x86_64-pc-windows-msvc"
|
||||
# pkg_target: "latest-win-x64"
|
||||
# nodecar_script: "build:win-x64"
|
||||
# - platform: "windows-latest"
|
||||
# args: "--target aarch64-pc-windows-msvc"
|
||||
# - platform: "windows-11-arm"
|
||||
# args: "--target aarch64-pc-windows-msvc --verbose"
|
||||
# arch: "aarch64"
|
||||
# target: "aarch64-pc-windows-msvc"
|
||||
# pkg_target: "latest-win-arm64"
|
||||
@@ -85,40 +105,39 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b #master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Install dependencies (Ubuntu only)
|
||||
if: matrix.platform == 'ubuntu-22.04' || matrix.platform == 'ubuntu-22.04-arm'
|
||||
if: matrix.platform == 'ubuntu-latest' || matrix.platform == 'ubuntu-22.04-arm'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
uses: swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 #v2.8.0
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build nodecar sidecar
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
@@ -130,16 +149,20 @@ jobs:
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.platform }}" == "windows-latest" ]]; then
|
||||
cp nodecar/dist/nodecar.exe src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
else
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
fi
|
||||
|
||||
# - name: Download Camoufox for testing
|
||||
# run: npx camoufox-js fetch
|
||||
# continue-on-error: true
|
||||
|
||||
- name: Build frontend
|
||||
run: pnpm build
|
||||
|
||||
- name: Build Tauri app
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
uses: tauri-apps/tauri-action@564aea5a8075c7a54c167bb0cf5b3255314a7f9d #v0.5.22
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_REF_NAME: ${{ github.ref_name }}
|
||||
@@ -150,3 +173,9 @@ jobs:
|
||||
releaseDraft: false
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }}
|
||||
|
||||
- name: Commit CHANGELOG.md
|
||||
uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 #v6.0.1
|
||||
with:
|
||||
branch: main
|
||||
commit_message: "docs: update CHANGELOG.md for ${{ github.ref_name }} [skip ci]"
|
||||
|
||||
@@ -12,7 +12,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@e69cc6c86b31f1e7e23935bbe7031b50e51082de" # v2.0.2
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b00f71e051ddddc6e46a193c31c8c0bf283bf9e6" # v2.1.0
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -30,14 +30,35 @@ jobs:
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
security-events: write
|
||||
contents: read
|
||||
packages: read
|
||||
actions: read
|
||||
|
||||
spellcheck:
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
rolling-release:
|
||||
needs: [security-scan, lint-js, lint-rust]
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
permissions:
|
||||
contents: write
|
||||
strategy:
|
||||
@@ -45,66 +66,77 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- platform: "macos-latest"
|
||||
args: "--target aarch64-apple-darwin"
|
||||
args: "--target aarch64-apple-darwin --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-apple-darwin"
|
||||
pkg_target: "latest-macos-arm64"
|
||||
nodecar_script: "build:mac-aarch64"
|
||||
- platform: "macos-latest"
|
||||
args: "--target x86_64-apple-darwin"
|
||||
args: "--target x86_64-apple-darwin --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-apple-darwin"
|
||||
pkg_target: "latest-macos-x64"
|
||||
nodecar_script: "build:mac-x86_64"
|
||||
- platform: "ubuntu-22.04"
|
||||
args: "--target x86_64-unknown-linux-gnu"
|
||||
- platform: "ubuntu-latest"
|
||||
args: "--target x86_64-unknown-linux-gnu --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-x64"
|
||||
nodecar_script: "build:linux-x64"
|
||||
- platform: "ubuntu-22.04-arm"
|
||||
args: "--target aarch64-unknown-linux-gnu"
|
||||
args: "--target aarch64-unknown-linux-gnu --verbose"
|
||||
arch: "aarch64"
|
||||
target: "aarch64-unknown-linux-gnu"
|
||||
pkg_target: "latest-linux-arm64"
|
||||
nodecar_script: "build:linux-arm64"
|
||||
- platform: "windows-latest"
|
||||
args: "--target x86_64-pc-windows-msvc --verbose"
|
||||
arch: "x86_64"
|
||||
target: "x86_64-pc-windows-msvc"
|
||||
pkg_target: "latest-win-x64"
|
||||
nodecar_script: "build:win-x64"
|
||||
# - platform: "windows-11-arm"
|
||||
# args: "--target aarch64-pc-windows-msvc --verbose"
|
||||
# arch: "aarch64"
|
||||
# target: "aarch64-pc-windows-msvc"
|
||||
# pkg_target: "latest-win-arm64"
|
||||
# nodecar_script: "build:win-arm64"
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 #v4.4.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@v4
|
||||
uses: pnpm/action-setup@a7487c7e89a18df4991f7f222e4898a00d66ddda #v4.1.0
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
uses: dtolnay/rust-toolchain@b3b07ba8b418998c39fb20f53e8b695cdcc8de1b #master
|
||||
with:
|
||||
toolchain: stable
|
||||
targets: ${{ matrix.target }}
|
||||
|
||||
- name: Install dependencies (Ubuntu only)
|
||||
if: matrix.platform == 'ubuntu-22.04' || matrix.platform == 'ubuntu-22.04-arm'
|
||||
if: matrix.platform == 'ubuntu-latest' || matrix.platform == 'ubuntu-22.04-arm'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@v2
|
||||
uses: swatinem/rust-cache@98c8021b550208e191a6a3145459bfc9fb29c4c0 #v2.8.0
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
- name: Install banderole
|
||||
run: cargo install banderole
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install nodecar dependencies
|
||||
working-directory: ./nodecar
|
||||
run: |
|
||||
pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build nodecar sidecar
|
||||
shell: bash
|
||||
working-directory: ./nodecar
|
||||
@@ -116,29 +148,38 @@ jobs:
|
||||
run: |
|
||||
mkdir -p src-tauri/binaries
|
||||
if [[ "${{ matrix.platform }}" == "windows-latest" ]]; then
|
||||
cp nodecar/dist/nodecar.exe src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}.exe
|
||||
else
|
||||
cp nodecar/dist/nodecar src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
cp nodecar/nodecar-bin src-tauri/binaries/nodecar-${{ matrix.target }}
|
||||
fi
|
||||
|
||||
# - name: Download Camoufox for testing
|
||||
# run: npx camoufox-js fetch
|
||||
# continue-on-error: true
|
||||
|
||||
- name: Build frontend
|
||||
run: pnpm build
|
||||
|
||||
- name: Get commit hash
|
||||
id: commit
|
||||
run: echo "hash=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
|
||||
- name: Generate nightly timestamp
|
||||
id: timestamp
|
||||
shell: bash
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +"%Y-%m-%d")
|
||||
COMMIT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-7)
|
||||
echo "timestamp=${TIMESTAMP}-${COMMIT_HASH}" >> $GITHUB_OUTPUT
|
||||
echo "Generated timestamp: ${TIMESTAMP}-${COMMIT_HASH}"
|
||||
|
||||
- name: Build Tauri app
|
||||
uses: tauri-apps/tauri-action@v0
|
||||
uses: tauri-apps/tauri-action@564aea5a8075c7a54c167bb0cf5b3255314a7f9d #v0.5.22
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_TAG: "nightly-${{ steps.commit.outputs.hash }}"
|
||||
GITHUB_REF_NAME: "nightly-${{ steps.commit.outputs.hash }}"
|
||||
BUILD_TAG: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
GITHUB_REF_NAME: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
GITHUB_SHA: ${{ github.sha }}
|
||||
with:
|
||||
tagName: "nightly-${{ steps.commit.outputs.hash }}"
|
||||
releaseName: "Donut Browser Nightly (Build ${{ steps.commit.outputs.hash }})"
|
||||
releaseBody: "⚠️ **Nightly Release** - This is an automatically generated pre-release build from the latest main branch. Use with caution.\n\nCommit: ${{ github.sha }}\nBuild: ${{ steps.commit.outputs.hash }}"
|
||||
tagName: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
releaseName: "Donut Browser Nightly (Build ${{ steps.timestamp.outputs.timestamp }})"
|
||||
releaseBody: "⚠️ **Nightly Release** - This is an automatically generated pre-release build from the latest main branch. Use with caution.\n\nCommit: ${{ github.sha }}\nBuild: ${{ steps.timestamp.outputs.timestamp }}"
|
||||
releaseDraft: false
|
||||
prerelease: true
|
||||
args: ${{ matrix.args }}
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
name: Spell Check
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
CARGO_TERM_COLOR: always
|
||||
CLICOLOR: 1
|
||||
|
||||
jobs:
|
||||
spelling:
|
||||
name: Spell Check with Typos
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions Repository
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 #v4.2.2
|
||||
- name: Spell Check Repo
|
||||
uses: crate-ci/typos@52bd719c2c91f9d676e2aa359fc8e0db8925e6d8 #v1.35.3
|
||||
@@ -0,0 +1,21 @@
|
||||
name: Mark stale issues and pull requests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "35 23 * * *"
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: "This issue has been inactive for 60 days. Please respond to keep it open."
|
||||
stale-pr-message: "This pull request has been inactive for 60 days. Please respond to keep it open."
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
+3
-3
@@ -46,7 +46,7 @@ yarn-error.log*
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
|
||||
# eslint
|
||||
.eslintcache
|
||||
!**/.gitkeep
|
||||
|
||||
!**/.gitkeep
|
||||
# nodecar
|
||||
nodecar/nodecar-bin
|
||||
Vendored
+115
-2
@@ -1,30 +1,75 @@
|
||||
{
|
||||
"cSpell.words": [
|
||||
"adwaita",
|
||||
"ahooks",
|
||||
"akhilmhdh",
|
||||
"appimage",
|
||||
"appindicator",
|
||||
"applescript",
|
||||
"asyncio",
|
||||
"autoconfig",
|
||||
"autologin",
|
||||
"biomejs",
|
||||
"breezedark",
|
||||
"browserforge",
|
||||
"busctl",
|
||||
"CAMOU",
|
||||
"camoufox",
|
||||
"cdylib",
|
||||
"certifi",
|
||||
"CFURL",
|
||||
"checkin",
|
||||
"chrono",
|
||||
"CLICOLOR",
|
||||
"clippy",
|
||||
"cmdk",
|
||||
"codegen",
|
||||
"codesign",
|
||||
"commitish",
|
||||
"CTYPE",
|
||||
"daijro",
|
||||
"dataclasses",
|
||||
"datareporting",
|
||||
"datas",
|
||||
"dconf",
|
||||
"devedition",
|
||||
"distro",
|
||||
"doctest",
|
||||
"doesn",
|
||||
"domcontentloaded",
|
||||
"donutbrowser",
|
||||
"dpkg",
|
||||
"dtolnay",
|
||||
"dyld",
|
||||
"elif",
|
||||
"errorlevel",
|
||||
"esac",
|
||||
"esbuild",
|
||||
"eslintcache",
|
||||
"etree",
|
||||
"flate",
|
||||
"frontmost",
|
||||
"geoip",
|
||||
"getcwd",
|
||||
"gettimezone",
|
||||
"gifs",
|
||||
"gsettings",
|
||||
"healthreport",
|
||||
"hiddenimports",
|
||||
"hkcu",
|
||||
"hooksconfig",
|
||||
"hookspath",
|
||||
"icns",
|
||||
"idlelib",
|
||||
"idletime",
|
||||
"idna",
|
||||
"Inno",
|
||||
"kdeglobals",
|
||||
"keras",
|
||||
"KHTML",
|
||||
"Kolkata",
|
||||
"kreadconfig",
|
||||
"launchservices",
|
||||
"letterboxing",
|
||||
"libatk",
|
||||
"libayatana",
|
||||
"libcairo",
|
||||
@@ -33,41 +78,109 @@
|
||||
"libpango",
|
||||
"librsvg",
|
||||
"libwebkit",
|
||||
"libxdo",
|
||||
"localtime",
|
||||
"lxml",
|
||||
"lzma",
|
||||
"mmdb",
|
||||
"mountpoint",
|
||||
"msiexec",
|
||||
"msvc",
|
||||
"msys",
|
||||
"Mullvad",
|
||||
"mullvadbrowser",
|
||||
"mypy",
|
||||
"noarchive",
|
||||
"nobrowse",
|
||||
"noconfirm",
|
||||
"nodecar",
|
||||
"nodemon",
|
||||
"norestart",
|
||||
"NSIS",
|
||||
"ntlm",
|
||||
"numpy",
|
||||
"objc",
|
||||
"orhun",
|
||||
"orjson",
|
||||
"osascript",
|
||||
"oscpu",
|
||||
"outpath",
|
||||
"pathex",
|
||||
"pathlib",
|
||||
"peerconnection",
|
||||
"pids",
|
||||
"pixbuf",
|
||||
"plasmohq",
|
||||
"platformdirs",
|
||||
"prefs",
|
||||
"propertylist",
|
||||
"psutil",
|
||||
"pycache",
|
||||
"pydantic",
|
||||
"pyee",
|
||||
"pyinstaller",
|
||||
"pyoxidizer",
|
||||
"pytest",
|
||||
"pyyaml",
|
||||
"reqwest",
|
||||
"ridedott",
|
||||
"rlib",
|
||||
"rustc",
|
||||
"SARIF",
|
||||
"scipy",
|
||||
"screeninfo",
|
||||
"serde",
|
||||
"setuptools",
|
||||
"shadcn",
|
||||
"showcursor",
|
||||
"shutil",
|
||||
"signon",
|
||||
"signum",
|
||||
"sklearn",
|
||||
"sonner",
|
||||
"splitn",
|
||||
"sspi",
|
||||
"staticlib",
|
||||
"stefanzweifel",
|
||||
"subdirs",
|
||||
"subkey",
|
||||
"SUPPRESSMSGBOXES",
|
||||
"swatinem",
|
||||
"sysinfo",
|
||||
"systempreferences",
|
||||
"systemsetup",
|
||||
"taskkill",
|
||||
"tasklist",
|
||||
"tauri",
|
||||
"TERX",
|
||||
"testpass",
|
||||
"testuser",
|
||||
"timedatectl",
|
||||
"titlebar",
|
||||
"tkinter",
|
||||
"Torbrowser",
|
||||
"tqdm",
|
||||
"trackingprotection",
|
||||
"turbopack",
|
||||
"turtledemo",
|
||||
"udeps",
|
||||
"unlisten",
|
||||
"unminimize",
|
||||
"unrs",
|
||||
"urlencoding",
|
||||
"urllib",
|
||||
"venv",
|
||||
"vercel",
|
||||
"VERYSILENT",
|
||||
"webgl",
|
||||
"webrtc",
|
||||
"winreg",
|
||||
"wiremock",
|
||||
"xattr",
|
||||
"zhom"
|
||||
"xfconf",
|
||||
"xsettings",
|
||||
"zhom",
|
||||
"zipball",
|
||||
"zoneinfo"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
# Instructions for AI Agents
|
||||
|
||||
- After your changes, instead of running specific tests or linting specific files, run "pnpm format && pnpm lint && pnpm test". It means that you first format the code, then lint it, then test it, so that no part is broken after your changes.
|
||||
- Don't leave comments that don't add value.
|
||||
- Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times.
|
||||
- Before finishing the task and showing summary, always run "pnpm format && pnpm lint && pnpm test" at the root of the project to ensure that you don't finish with broken application.
|
||||
- Anytime you change nodecar's code and try to test, recompile it with "cd nodecar && pnpm build".
|
||||
- If there is a global singleton of a struct, only use it inside a method while properly initializing it, unless I have explicitly specified in the request otherwise.
|
||||
@@ -1,5 +0,0 @@
|
||||
# Instructions for AI Agents
|
||||
|
||||
- If you want to run tests, only ever run them as "pnpm format && pnpm lint && pnpm test".
|
||||
- Don't leave comments that don't add value
|
||||
- Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times
|
||||
+1
-1
@@ -23,6 +23,6 @@ Examples of unacceptable behavior by participants include:
|
||||
|
||||
## Enforcement
|
||||
|
||||
Violations of the Code of Conduct may be reported by pinging @zhom on Github. All reports will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. Further details of specific enforcement policies may be posted separately.
|
||||
Violations of the Code of Conduct may be reported to contact at donutbrowser dot com. All reports will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
We hold the right and responsibility to remove comments or other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any members for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
+6
-42
@@ -26,6 +26,7 @@ Ensure you have the following dependencies installed:
|
||||
- Node.js (see `.node-version` for exact version)
|
||||
- pnpm package manager
|
||||
- Latest Rust and Cargo toolchain
|
||||
- [Banderole](https://github.com/zhom/banderole)
|
||||
- [Tauri prerequisites guide](https://v2.tauri.app/start/prerequisites/).
|
||||
|
||||
## Run Locally
|
||||
@@ -46,12 +47,13 @@ After having the above dependencies installed, proceed through the following ste
|
||||
pnpm install
|
||||
```
|
||||
|
||||
4. **Install nodecar dependencies**
|
||||
4. **Build nodecar**
|
||||
|
||||
Building nodecar requires you to have `banderole` installed.
|
||||
|
||||
```bash
|
||||
cd nodecar
|
||||
pnpm install --frozen-lockfile
|
||||
cd ..
|
||||
pnpm build
|
||||
```
|
||||
|
||||
5. **Start the development server**
|
||||
@@ -105,7 +107,6 @@ Make sure the build completes successfully without errors.
|
||||
## Testing
|
||||
|
||||
- Always test your changes on the target platform
|
||||
- Test both development and production builds
|
||||
- Verify that existing functionality still works
|
||||
- Add tests for new features when possible
|
||||
|
||||
@@ -149,50 +150,13 @@ Refs #00000
|
||||
|
||||
- Ensure that "Allow edits from maintainers" option is checked
|
||||
|
||||
## Types of Contributions
|
||||
|
||||
### Bug Reports
|
||||
|
||||
When filing bug reports, please include:
|
||||
|
||||
- Clear description of the issue
|
||||
- Steps to reproduce
|
||||
- Expected vs actual behavior
|
||||
- Environment details (OS, version, etc.)
|
||||
- Screenshots or error logs if applicable
|
||||
|
||||
### Feature Requests
|
||||
|
||||
When suggesting new features:
|
||||
|
||||
- Explain the use case and why it's valuable
|
||||
- Describe the desired behavior
|
||||
- Consider alternatives you've thought of
|
||||
- Check if it aligns with our roadmap
|
||||
|
||||
### Code Contributions
|
||||
|
||||
- Bug fixes
|
||||
- New features
|
||||
- Performance improvements
|
||||
- Documentation updates
|
||||
- Test coverage improvements
|
||||
|
||||
### Documentation
|
||||
|
||||
- README improvements
|
||||
- Code comments
|
||||
- API documentation
|
||||
- Tutorial content
|
||||
- Translation work
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Donut Browser is built with:
|
||||
|
||||
- **Frontend**: Next.js React application
|
||||
- **Backend**: Tauri (Rust) for native functionality
|
||||
- **Node.js Sidecar**: `nodecar` binary for proxy support
|
||||
- **Node.js Sidecar**: `nodecar` binary for access to JavaScript ecosystem
|
||||
- **Build System**: GitHub Actions for CI/CD
|
||||
|
||||
Understanding this architecture will help you contribute more effectively.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<div align="center">
|
||||
<img src="assets/logo.png" alt="Donut Browser Logo" width="150">
|
||||
<h1>Donut Browser</h1>
|
||||
<strong>A powerful browser orchestrator that puts you in control of your browsing experience. 🍩</strong>
|
||||
<strong>A powerful anti-detect browser that puts you in control of your browsing experience. 🍩</strong>
|
||||
</div>
|
||||
<br>
|
||||
|
||||
@@ -14,23 +14,30 @@
|
||||
<a style="text-decoration: none;" href="https://github.com/zhom/donutbrowser/blob/main/LICENSE" target="_blank">
|
||||
<img src="https://img.shields.io/badge/license-AGPL--3.0-blue.svg" alt="License">
|
||||
</a>
|
||||
<a href="https://app.codacy.com/gh/zhom/donutbrowser/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade">
|
||||
<img src="https://app.codacy.com/project/badge/Grade/b9c9beafc92d4bc8bc7c5b42c6c4ba81"/>
|
||||
</a>
|
||||
<a href="https://app.fossa.com/projects/git%2Bgithub.com%2Fzhom%2Fdonutbrowser?ref=badge_shield&issueType=security" alt="FOSSA Status">
|
||||
<img src="https://app.fossa.com/api/projects/git%2Bgithub.com%2Fzhom%2Fdonutbrowser.svg?type=shield&issueType=security"/>
|
||||
</a>
|
||||
<a style="text-decoration: none;" href="https://github.com/zhom/donutbrowser/stargazers" target="_blank">
|
||||
<img src="https://img.shields.io/github/stars/zhom/donutbrowser?style=social" alt="GitHub stars">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
## Donut Browser
|
||||
|
||||
> A free and open source browser orchestrator built with [Tauri](https://v2.tauri.app/).
|
||||
|
||||

|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="assets/preview-dark.png" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="assets/preview.png" />
|
||||
<img alt="Preview" src="assets/preview.png" />
|
||||
</picture>
|
||||
|
||||
## Features
|
||||
|
||||
- Create unlimited number of local browser profiles completely isolated from each other
|
||||
- Safely use multiple accounts on one device by using anti-detect browser profiles, powered by [Camoufox](https://camoufox.com)
|
||||
- Proxy support with basic auth for all browsers except for TOR Browser
|
||||
- Import profiles from your existing browsers
|
||||
- Automatic updates both for browsers and for the app itself
|
||||
- Automatic updates for browsers
|
||||
- Set Donut Browser as your default browser to control in which profile to open links
|
||||
|
||||
## Download
|
||||
@@ -73,6 +80,24 @@ Have questions or want to contribute? We'd love to hear from you!
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
## Contributors
|
||||
|
||||
<!-- readme: collaborators,contributors -start -->
|
||||
<table>
|
||||
<tbody>
|
||||
<tr>
|
||||
<td align="center">
|
||||
<a href="https://github.com/zhom">
|
||||
<img src="https://avatars.githubusercontent.com/u/2717306?v=4" width="100;" alt="zhom"/>
|
||||
<br />
|
||||
<sub><b>zhom</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tbody>
|
||||
</table>
|
||||
<!-- readme: collaborators,contributors -end -->
|
||||
|
||||
## Contact
|
||||
|
||||
Have an urgent question or want to report a security vulnerability? Send an email to contact at donutbrowser dot com and we'll get back to you as fast as possible.
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 111 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 163 KiB After Width: | Height: | Size: 114 KiB |
+3
-17
@@ -1,22 +1,18 @@
|
||||
{
|
||||
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
|
||||
"$schema": "https://biomejs.dev/schemas/2.0.6/schema.json",
|
||||
"vcs": {
|
||||
"enabled": false,
|
||||
"clientKind": "git",
|
||||
"useIgnoreFile": false
|
||||
},
|
||||
"files": {
|
||||
"ignoreUnknown": false,
|
||||
"ignore": []
|
||||
"ignoreUnknown": false
|
||||
},
|
||||
"formatter": {
|
||||
"enabled": true,
|
||||
"indentStyle": "space",
|
||||
"indentWidth": 2
|
||||
},
|
||||
"organizeImports": {
|
||||
"enabled": true
|
||||
},
|
||||
"linter": {
|
||||
"enabled": true,
|
||||
"rules": {
|
||||
@@ -25,17 +21,7 @@
|
||||
"useHookAtTopLevel": "error"
|
||||
},
|
||||
"nursery": {
|
||||
"useGoogleFontDisplay": "error",
|
||||
"noDocumentImportInPage": "error",
|
||||
"noHeadElement": "error",
|
||||
"noHeadImportInDocument": "error",
|
||||
"noImgElement": "off",
|
||||
"useComponentExportOnlyModules": {
|
||||
"level": "error",
|
||||
"options": {
|
||||
"allowExportNames": ["metadata", "badgeVariants", "buttonVariants"]
|
||||
}
|
||||
}
|
||||
"useUniqueElementIds": "off"
|
||||
},
|
||||
"a11y": {
|
||||
"useSemanticElements": "off"
|
||||
|
||||
@@ -1,133 +0,0 @@
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
import eslint from "@eslint/js";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: import.meta.dirname,
|
||||
});
|
||||
|
||||
const eslintConfig = tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
tseslint.configs.strictTypeChecked,
|
||||
tseslint.configs.stylisticTypeChecked,
|
||||
...compat.extends("next/core-web-vitals"),
|
||||
{
|
||||
// Disabled rules taken from https://biomejs.dev/linter/rules-sources for ones that
|
||||
// are already handled by Prettier and TypeScript or are not needed
|
||||
rules: {
|
||||
// eslint-plugin-jsx-a11y rules - some disabled for performance/specific project needs
|
||||
"jsx-a11y/alt-text": "off",
|
||||
"jsx-a11y/anchor-has-content": "off",
|
||||
"jsx-a11y/anchor-is-valid": "off",
|
||||
"jsx-a11y/aria-activedescendant-has-tabindex": "off",
|
||||
"jsx-a11y/aria-props": "off",
|
||||
"jsx-a11y/aria-proptypes": "off",
|
||||
"jsx-a11y/aria-role": "off",
|
||||
"jsx-a11y/aria-unsupported-elements": "off",
|
||||
"jsx-a11y/autocomplete-valid": "off",
|
||||
"jsx-a11y/click-events-have-key-events": "off",
|
||||
"jsx-a11y/heading-has-content": "off",
|
||||
"jsx-a11y/html-has-lang": "off",
|
||||
"jsx-a11y/iframe-has-title": "off",
|
||||
"jsx-a11y/img-redundant-alt": "off",
|
||||
"jsx-a11y/interactive-supports-focus": "off",
|
||||
"jsx-a11y/label-has-associated-control": "off",
|
||||
"jsx-a11y/lang": "off",
|
||||
"jsx-a11y/media-has-caption": "off",
|
||||
"jsx-a11y/mouse-events-have-key-events": "off",
|
||||
"jsx-a11y/no-access-key": "off",
|
||||
"jsx-a11y/no-aria-hidden-on-focusable": "off",
|
||||
"jsx-a11y/no-autofocus": "off",
|
||||
"jsx-a11y/no-distracting-elements": "off",
|
||||
"jsx-a11y/no-interactive-element-to-noninteractive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-element-to-interactive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-tabindex": "off",
|
||||
"jsx-a11y/no-redundant-roles": "off",
|
||||
"jsx-a11y/no-static-element-interactions": "off",
|
||||
"jsx-a11y/prefer-tag-over-role": "off",
|
||||
"jsx-a11y/role-has-required-aria-props": "off",
|
||||
"jsx-a11y/role-supports-aria-props": "off",
|
||||
"jsx-a11y/scope": "off",
|
||||
"jsx-a11y/tabindex-no-positive": "off",
|
||||
// eslint-plugin-react rules - some disabled for performance/specific project needs
|
||||
"react/button-has-type": "off",
|
||||
"react/jsx-boolean-value": "off",
|
||||
"react/jsx-curly-brace-presence": "off",
|
||||
"react/jsx-fragments": "off",
|
||||
"react/jsx-key": "off",
|
||||
"react/jsx-no-comment-textnodes": "off",
|
||||
"react/jsx-no-duplicate-props": "off",
|
||||
"react/jsx-no-target-blank": "off",
|
||||
"react/jsx-no-useless-fragment": "off",
|
||||
"react/no-array-index-key": "off",
|
||||
"react/no-children-prop": "off",
|
||||
"react/no-danger": "off",
|
||||
"react/no-danger-with-children": "off",
|
||||
"react/void-dom-elements-no-children": "off",
|
||||
// eslint-plugin-react-hooks rules - disabled for specific project needs
|
||||
"react-hooks/exhaustive-deps": "off",
|
||||
"react-hooks/rules-of-hooks": "off",
|
||||
// typescript-eslint rules - some handled by TypeScript compiler or disabled for project needs
|
||||
"@typescript-eslint/adjacent-overload-signatures": "off",
|
||||
"@typescript-eslint/array-type": "off",
|
||||
"@typescript-eslint/ban-types": "off",
|
||||
"@typescript-eslint/consistent-type-exports": "off",
|
||||
"@typescript-eslint/consistent-type-imports": "off",
|
||||
"@typescript-eslint/default-param-last": "off",
|
||||
"@typescript-eslint/dot-notation": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": "off",
|
||||
"@typescript-eslint/explicit-member-accessibility": "off",
|
||||
"@typescript-eslint/naming-convention": "off",
|
||||
"@typescript-eslint/no-dupe-class-members": "off",
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"@typescript-eslint/no-empty-interface": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-extra-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-extraneous-class": "off",
|
||||
"@typescript-eslint/no-inferrable-types": "off",
|
||||
"@typescript-eslint/no-invalid-void-type": "off",
|
||||
"@typescript-eslint/no-loss-of-precision": "off",
|
||||
"@typescript-eslint/no-misused-new": "off",
|
||||
"@typescript-eslint/no-namespace": "off",
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-redeclare": "off",
|
||||
"@typescript-eslint/no-require-imports": "off",
|
||||
"@typescript-eslint/no-restricted-imports": "off",
|
||||
"@typescript-eslint/no-restricted-types": "off",
|
||||
"@typescript-eslint/no-this-alias": "off",
|
||||
"@typescript-eslint/no-unnecessary-type-constraint": "off",
|
||||
"@typescript-eslint/no-unsafe-declaration-merging": "off",
|
||||
"@typescript-eslint/no-unused-vars": "off",
|
||||
"@typescript-eslint/no-use-before-define": "off",
|
||||
"@typescript-eslint/no-useless-constructor": "off",
|
||||
"@typescript-eslint/no-useless-empty-export": "off",
|
||||
"@typescript-eslint/only-throw-error": "off",
|
||||
"@typescript-eslint/parameter-properties": "off",
|
||||
"@typescript-eslint/prefer-as-const": "off",
|
||||
"@typescript-eslint/prefer-enum-initializers": "off",
|
||||
"@typescript-eslint/prefer-for-of": "off",
|
||||
"@typescript-eslint/prefer-function-type": "off",
|
||||
"@typescript-eslint/prefer-literal-enum-member": "off",
|
||||
"@typescript-eslint/prefer-namespace-keyword": "off",
|
||||
"@typescript-eslint/prefer-optional-chain": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
// Custom rules
|
||||
"@typescript-eslint/restrict-template-expressions": [
|
||||
"error",
|
||||
{
|
||||
allowNumber: true,
|
||||
allowBoolean: true,
|
||||
allowNever: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
projectService: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
export default eslintConfig;
|
||||
@@ -21,5 +21,8 @@ if [ -z "$TARGET_TRIPLE" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Copy the file
|
||||
cp "dist/nodecar${EXT}" "../src-tauri/binaries/nodecar-${TARGET_TRIPLE}${EXT}"
|
||||
# Copy the file with target triple suffix
|
||||
cp "nodecar-bin" "../src-tauri/binaries/nodecar-${TARGET_TRIPLE}${EXT}"
|
||||
|
||||
# Also copy a generic version for Tauri to find
|
||||
cp "nodecar-bin" "../src-tauri/binaries/nodecar${EXT}"
|
||||
@@ -1,131 +0,0 @@
|
||||
import { FlatCompat } from "@eslint/eslintrc";
|
||||
import eslint from "@eslint/js";
|
||||
import tseslint from "typescript-eslint";
|
||||
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: import.meta.dirname,
|
||||
});
|
||||
|
||||
const eslintConfig = tseslint.config(
|
||||
eslint.configs.recommended,
|
||||
...compat.extends("next/core-web-vitals"),
|
||||
{
|
||||
// Disabled rules taken from https://biomejs.dev/linter/rules-sources for ones that
|
||||
// are already handled by Prettier and TypeScript or are not needed
|
||||
rules: {
|
||||
// eslint-plugin-jsx-a11y rules - some disabled for performance/specific project needs
|
||||
"jsx-a11y/alt-text": "off",
|
||||
"jsx-a11y/anchor-has-content": "off",
|
||||
"jsx-a11y/anchor-is-valid": "off",
|
||||
"jsx-a11y/aria-activedescendant-has-tabindex": "off",
|
||||
"jsx-a11y/aria-props": "off",
|
||||
"jsx-a11y/aria-proptypes": "off",
|
||||
"jsx-a11y/aria-role": "off",
|
||||
"jsx-a11y/aria-unsupported-elements": "off",
|
||||
"jsx-a11y/autocomplete-valid": "off",
|
||||
"jsx-a11y/click-events-have-key-events": "off",
|
||||
"jsx-a11y/heading-has-content": "off",
|
||||
"jsx-a11y/html-has-lang": "off",
|
||||
"jsx-a11y/iframe-has-title": "off",
|
||||
"jsx-a11y/img-redundant-alt": "off",
|
||||
"jsx-a11y/interactive-supports-focus": "off",
|
||||
"jsx-a11y/label-has-associated-control": "off",
|
||||
"jsx-a11y/lang": "off",
|
||||
"jsx-a11y/media-has-caption": "off",
|
||||
"jsx-a11y/mouse-events-have-key-events": "off",
|
||||
"jsx-a11y/no-access-key": "off",
|
||||
"jsx-a11y/no-aria-hidden-on-focusable": "off",
|
||||
"jsx-a11y/no-autofocus": "off",
|
||||
"jsx-a11y/no-distracting-elements": "off",
|
||||
"jsx-a11y/no-interactive-element-to-noninteractive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-element-to-interactive-role": "off",
|
||||
"jsx-a11y/no-noninteractive-tabindex": "off",
|
||||
"jsx-a11y/no-redundant-roles": "off",
|
||||
"jsx-a11y/no-static-element-interactions": "off",
|
||||
"jsx-a11y/prefer-tag-over-role": "off",
|
||||
"jsx-a11y/role-has-required-aria-props": "off",
|
||||
"jsx-a11y/role-supports-aria-props": "off",
|
||||
"jsx-a11y/scope": "off",
|
||||
"jsx-a11y/tabindex-no-positive": "off",
|
||||
// eslint-plugin-react rules - some disabled for performance/specific project needs
|
||||
"react/button-has-type": "off",
|
||||
"react/jsx-boolean-value": "off",
|
||||
"react/jsx-curly-brace-presence": "off",
|
||||
"react/jsx-fragments": "off",
|
||||
"react/jsx-key": "off",
|
||||
"react/jsx-no-comment-textnodes": "off",
|
||||
"react/jsx-no-duplicate-props": "off",
|
||||
"react/jsx-no-target-blank": "off",
|
||||
"react/jsx-no-useless-fragment": "off",
|
||||
"react/no-array-index-key": "off",
|
||||
"react/no-children-prop": "off",
|
||||
"react/no-danger": "off",
|
||||
"react/no-danger-with-children": "off",
|
||||
"react/void-dom-elements-no-children": "off",
|
||||
// eslint-plugin-react-hooks rules - disabled for specific project needs
|
||||
"react-hooks/exhaustive-deps": "off",
|
||||
"react-hooks/rules-of-hooks": "off",
|
||||
// typescript-eslint rules - some handled by TypeScript compiler or disabled for project needs
|
||||
"@typescript-eslint/adjacent-overload-signatures": "off",
|
||||
"@typescript-eslint/array-type": "off",
|
||||
"@typescript-eslint/ban-types": "off",
|
||||
"@typescript-eslint/consistent-type-exports": "off",
|
||||
"@typescript-eslint/consistent-type-imports": "off",
|
||||
"@typescript-eslint/default-param-last": "off",
|
||||
"@typescript-eslint/dot-notation": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": "off",
|
||||
"@typescript-eslint/explicit-member-accessibility": "off",
|
||||
"@typescript-eslint/naming-convention": "off",
|
||||
"@typescript-eslint/no-dupe-class-members": "off",
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"@typescript-eslint/no-empty-interface": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-extra-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-extraneous-class": "off",
|
||||
"@typescript-eslint/no-inferrable-types": "off",
|
||||
"@typescript-eslint/no-invalid-void-type": "off",
|
||||
"@typescript-eslint/no-loss-of-precision": "off",
|
||||
"@typescript-eslint/no-misused-new": "off",
|
||||
"@typescript-eslint/no-namespace": "off",
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/no-redeclare": "off",
|
||||
"@typescript-eslint/no-require-imports": "off",
|
||||
"@typescript-eslint/no-restricted-imports": "off",
|
||||
"@typescript-eslint/no-restricted-types": "off",
|
||||
"@typescript-eslint/no-this-alias": "off",
|
||||
"@typescript-eslint/no-unnecessary-type-constraint": "off",
|
||||
"@typescript-eslint/no-unsafe-declaration-merging": "off",
|
||||
"@typescript-eslint/no-unused-vars": "off",
|
||||
"@typescript-eslint/no-use-before-define": "off",
|
||||
"@typescript-eslint/no-useless-constructor": "off",
|
||||
"@typescript-eslint/no-useless-empty-export": "off",
|
||||
"@typescript-eslint/only-throw-error": "off",
|
||||
"@typescript-eslint/parameter-properties": "off",
|
||||
"@typescript-eslint/prefer-as-const": "off",
|
||||
"@typescript-eslint/prefer-enum-initializers": "off",
|
||||
"@typescript-eslint/prefer-for-of": "off",
|
||||
"@typescript-eslint/prefer-function-type": "off",
|
||||
"@typescript-eslint/prefer-literal-enum-member": "off",
|
||||
"@typescript-eslint/prefer-namespace-keyword": "off",
|
||||
"@typescript-eslint/prefer-optional-chain": "off",
|
||||
"@typescript-eslint/require-await": "off",
|
||||
// Custom rules
|
||||
"@typescript-eslint/restrict-template-expressions": [
|
||||
"error",
|
||||
{
|
||||
allowNumber: true,
|
||||
allowBoolean: true,
|
||||
allowNever: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
languageOptions: {
|
||||
parserOptions: {
|
||||
projectService: true,
|
||||
tsconfigRootDir: import.meta.dirname,
|
||||
},
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
export default eslintConfig;
|
||||
+20
-13
@@ -2,32 +2,39 @@
|
||||
"name": "nodecar",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "src/index.ts",
|
||||
"main": "dist/index.js",
|
||||
"bin": "dist/index.js",
|
||||
"scripts": {
|
||||
"watch": "nodemon --exec ts-node --esm ./src/index.ts --watch src",
|
||||
"dev": "node --loader ts-node/esm ./src/index.ts",
|
||||
"start": "node --loader ts-node/esm ./src/index.ts",
|
||||
"start": "tsc && node ./dist/index.js",
|
||||
"rename-binary": "sh ./copy-binary.sh",
|
||||
"build": "tsc && pkg ./dist/index.js --targets latest-macos-arm64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:mac-aarch64": "tsc && pkg ./dist/index.js --targets latest-macos-arm64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:mac-x86_64": "tsc && pkg ./dist/index.js --targets latest-macos-x64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:linux-x64": "tsc && pkg ./dist/index.js --targets latest-linux-x64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:linux-arm64": "tsc && pkg ./dist/index.js --targets latest-linux-arm64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:win-x64": "tsc && pkg ./dist/index.js --targets latest-win-x64 --output dist/nodecar && pnpm rename-binary",
|
||||
"build:win-arm64": "tsc && pkg ./dist/index.js --targets latest-win-arm64 --output dist/nodecar && pnpm rename-binary"
|
||||
"build": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:mac-aarch64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:mac-x86_64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:linux-x64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:linux-arm64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:win-x64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary",
|
||||
"build:win-arm64": "tsc && banderole bundle . --output nodecar-bin && pnpm rename-binary"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@types/node": "^22.15.30",
|
||||
"@yao-pkg/pkg": "^6.5.1",
|
||||
"@types/node": "^24.2.1",
|
||||
"commander": "^14.0.0",
|
||||
"dotenv": "^16.5.0",
|
||||
"donutbrowser-camoufox-js": "^0.6.6",
|
||||
"dotenv": "^17.2.1",
|
||||
"fingerprint-generator": "^2.1.69",
|
||||
"get-port": "^7.1.0",
|
||||
"nodemon": "^3.1.10",
|
||||
"playwright-core": "^1.54.2",
|
||||
"proxy-chain": "^2.5.9",
|
||||
"tmp": "^0.2.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.8.3"
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tmp": "^0.2.6"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,459 @@
|
||||
import { spawn } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import { launchOptions } from "donutbrowser-camoufox-js";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import {
|
||||
type CamoufoxConfig,
|
||||
deleteCamoufoxConfig,
|
||||
generateCamoufoxId,
|
||||
getCamoufoxConfig,
|
||||
listCamoufoxConfigs,
|
||||
saveCamoufoxConfig,
|
||||
} from "./camoufox-storage.js";
|
||||
|
||||
/**
|
||||
* Convert camoufox fingerprint format to fingerprint-generator format
|
||||
* @param camoufoxFingerprint The camoufox fingerprint object
|
||||
* @returns fingerprint-generator object
|
||||
*/
|
||||
function convertCamoufoxToFingerprintGenerator(
|
||||
camoufoxFingerprint: Record<string, any>,
|
||||
): any {
|
||||
const fingerprintObj: Record<string, any> = {
|
||||
navigator: {},
|
||||
screen: {},
|
||||
videoCard: {},
|
||||
headers: {},
|
||||
battery: {},
|
||||
};
|
||||
|
||||
// Mapping from camoufox keys to fingerprint-generator structure based on the YAML
|
||||
const mappings: Record<string, string> = {
|
||||
// Navigator properties
|
||||
"navigator.userAgent": "navigator.userAgent",
|
||||
"navigator.platform": "navigator.platform",
|
||||
"navigator.hardwareConcurrency": "navigator.hardwareConcurrency",
|
||||
"navigator.maxTouchPoints": "navigator.maxTouchPoints",
|
||||
"navigator.doNotTrack": "navigator.doNotTrack",
|
||||
"navigator.appCodeName": "navigator.appCodeName",
|
||||
"navigator.appName": "navigator.appName",
|
||||
"navigator.appVersion": "navigator.appVersion",
|
||||
"navigator.oscpu": "navigator.oscpu",
|
||||
"navigator.product": "navigator.product",
|
||||
"navigator.language": "navigator.language",
|
||||
"navigator.languages": "navigator.languages",
|
||||
"navigator.globalPrivacyControl": "navigator.globalPrivacyControl",
|
||||
|
||||
// Screen properties
|
||||
"screen.width": "screen.width",
|
||||
"screen.height": "screen.height",
|
||||
"screen.availWidth": "screen.availWidth",
|
||||
"screen.availHeight": "screen.availHeight",
|
||||
"screen.availTop": "screen.availTop",
|
||||
"screen.availLeft": "screen.availLeft",
|
||||
"screen.colorDepth": "screen.colorDepth",
|
||||
"screen.pixelDepth": "screen.pixelDepth",
|
||||
"window.outerWidth": "screen.outerWidth",
|
||||
"window.outerHeight": "screen.outerHeight",
|
||||
"window.innerWidth": "screen.innerWidth",
|
||||
"window.innerHeight": "screen.innerHeight",
|
||||
"window.screenX": "screen.screenX",
|
||||
"window.screenY": "screen.screenY",
|
||||
"screen.pageXOffset": "screen.pageXOffset",
|
||||
"screen.pageYOffset": "screen.pageYOffset",
|
||||
"window.devicePixelRatio": "screen.devicePixelRatio",
|
||||
"document.body.clientWidth": "screen.clientWidth",
|
||||
"document.body.clientHeight": "screen.clientHeight",
|
||||
|
||||
// WebGL properties
|
||||
"webGl:vendor": "videoCard.vendor",
|
||||
"webGl:renderer": "videoCard.renderer",
|
||||
|
||||
// Headers
|
||||
"headers.Accept-Encoding": "headers.Accept-Encoding",
|
||||
|
||||
// Battery
|
||||
"battery:charging": "battery.charging",
|
||||
"battery:chargingTime": "battery.chargingTime",
|
||||
"battery:dischargingTime": "battery.dischargingTime",
|
||||
};
|
||||
|
||||
// Apply mappings
|
||||
for (const [camoufoxKey, fingerprintPath] of Object.entries(mappings)) {
|
||||
if (camoufoxFingerprint[camoufoxKey] !== undefined) {
|
||||
const pathParts = fingerprintPath.split(".");
|
||||
let current = fingerprintObj;
|
||||
|
||||
// Navigate to the nested property, creating objects as needed
|
||||
for (let i = 0; i < pathParts.length - 1; i++) {
|
||||
const part = pathParts[i];
|
||||
if (!current[part]) {
|
||||
current[part] = {};
|
||||
}
|
||||
current = current[part];
|
||||
}
|
||||
|
||||
// Set the final value
|
||||
const finalKey = pathParts[pathParts.length - 1];
|
||||
current[finalKey] = camoufoxFingerprint[camoufoxKey];
|
||||
}
|
||||
}
|
||||
|
||||
// Handle fonts separately
|
||||
if (camoufoxFingerprint.fonts && Array.isArray(camoufoxFingerprint.fonts)) {
|
||||
fingerprintObj.fonts = camoufoxFingerprint.fonts;
|
||||
}
|
||||
|
||||
return { ...camoufoxFingerprint, ...fingerprintObj };
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a Camoufox instance in a separate process
|
||||
* @param options Camoufox launch options
|
||||
* @param profilePath Profile directory path
|
||||
* @param url Optional URL to open
|
||||
* @returns Promise resolving to the Camoufox configuration
|
||||
*/
|
||||
export async function startCamoufoxProcess(
|
||||
options: LaunchOptions = {},
|
||||
profilePath?: string,
|
||||
url?: string,
|
||||
customConfig?: string,
|
||||
): Promise<CamoufoxConfig> {
|
||||
// Generate a unique ID for this instance
|
||||
const id = generateCamoufoxId();
|
||||
|
||||
// Ensure profile path is absolute if provided
|
||||
const absoluteProfilePath = profilePath
|
||||
? path.resolve(profilePath)
|
||||
: undefined;
|
||||
|
||||
// Create the Camoufox configuration
|
||||
const config: CamoufoxConfig = {
|
||||
id,
|
||||
options: JSON.parse(JSON.stringify(options)), // Deep clone to avoid reference sharing
|
||||
profilePath: absoluteProfilePath,
|
||||
url,
|
||||
customConfig,
|
||||
};
|
||||
|
||||
// Save the configuration before starting the process
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
// Build the command arguments
|
||||
const args = [
|
||||
path.join(__dirname, "index.js"),
|
||||
"camoufox-worker",
|
||||
"start",
|
||||
"--id",
|
||||
id,
|
||||
];
|
||||
|
||||
// Spawn the process with proper detachment - similar to proxy implementation
|
||||
const child = spawn(process.execPath, args, {
|
||||
detached: true,
|
||||
stdio: ["ignore", "pipe", "pipe"], // Capture stdout and stderr for startup feedback
|
||||
cwd: process.cwd(),
|
||||
env: {
|
||||
...process.env,
|
||||
NODE_ENV: "production",
|
||||
// Ensure Camoufox can find its dependencies
|
||||
NODE_PATH: process.env.NODE_PATH || "",
|
||||
},
|
||||
});
|
||||
|
||||
// Wait for the worker to start successfully or fail - with shorter timeout for quick response
|
||||
return new Promise<CamoufoxConfig>((resolve, reject) => {
|
||||
let resolved = false;
|
||||
let stdoutBuffer = "";
|
||||
let stderrBuffer = "";
|
||||
|
||||
// Shorter timeout for quick startup feedback
|
||||
const timeout = setTimeout(() => {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
child.kill("SIGKILL");
|
||||
reject(
|
||||
new Error(`Camoufox worker ${id} startup timeout after 5 seconds`),
|
||||
);
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
// Handle stdout - look for success JSON
|
||||
if (child.stdout) {
|
||||
child.stdout.on("data", (data) => {
|
||||
const output = data.toString();
|
||||
stdoutBuffer += output;
|
||||
|
||||
// Look for success JSON message
|
||||
const lines = stdoutBuffer.split("\n");
|
||||
for (const line of lines) {
|
||||
if (line.trim()) {
|
||||
try {
|
||||
const parsed = JSON.parse(line.trim());
|
||||
if (parsed.success && parsed.id === id && parsed.processId) {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
clearTimeout(timeout);
|
||||
config.processId = parsed.processId;
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
// Unref immediately after success to detach properly
|
||||
child.unref();
|
||||
resolve(config);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Not JSON, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Handle stderr - look for error JSON
|
||||
if (child.stderr) {
|
||||
child.stderr.on("data", (data) => {
|
||||
const output = data.toString();
|
||||
stderrBuffer += output;
|
||||
|
||||
// Look for error JSON message
|
||||
const lines = stderrBuffer.split("\n");
|
||||
for (const line of lines) {
|
||||
if (line.trim()) {
|
||||
try {
|
||||
const parsed = JSON.parse(line.trim());
|
||||
if (parsed.error && parsed.id === id) {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
clearTimeout(timeout);
|
||||
reject(
|
||||
new Error(
|
||||
`Camoufox worker failed: ${parsed.message || parsed.error}`,
|
||||
),
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Not JSON, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
child.on("exit", (code, signal) => {
|
||||
if (!resolved) {
|
||||
resolved = true;
|
||||
clearTimeout(timeout);
|
||||
if (code !== 0) {
|
||||
reject(
|
||||
new Error(
|
||||
`Camoufox worker ${id} exited with code ${code} and signal ${signal}. Stderr: ${stderrBuffer}`,
|
||||
),
|
||||
);
|
||||
} else {
|
||||
// Process exited successfully but we didn't get success message
|
||||
reject(
|
||||
new Error(
|
||||
`Camoufox worker ${id} exited without success confirmation`,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a Camoufox process
|
||||
* @param id The Camoufox ID to stop
|
||||
* @returns Promise resolving to true if stopped, false if not found
|
||||
*/
|
||||
export async function stopCamoufoxProcess(id: string): Promise<boolean> {
|
||||
const config = getCamoufoxConfig(id);
|
||||
|
||||
if (!config) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
// Method 1: If we have a process ID, kill by PID with proper signal sequence
|
||||
if (config.processId) {
|
||||
try {
|
||||
// First try SIGTERM for graceful shutdown
|
||||
process.kill(config.processId, "SIGTERM");
|
||||
// Give it more time to terminate gracefully (increased from 2s to 5s)
|
||||
await new Promise((resolve) => setTimeout(resolve, 5000));
|
||||
|
||||
// Check if process is still running
|
||||
try {
|
||||
process.kill(config.processId, 0); // Signal 0 checks if process exists
|
||||
process.kill(config.processId, "SIGKILL");
|
||||
} catch {}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
// Method 2: Pattern-based kill as fallback
|
||||
const killByPattern = spawn(
|
||||
"pkill",
|
||||
["-TERM", "-f", `camoufox-worker.*${id}`],
|
||||
{
|
||||
stdio: "ignore",
|
||||
},
|
||||
);
|
||||
|
||||
// Wait for pattern-based kill command to complete
|
||||
await new Promise<void>((resolve) => {
|
||||
killByPattern.on("exit", () => resolve());
|
||||
// Timeout after 3 seconds
|
||||
setTimeout(() => resolve(), 3000);
|
||||
});
|
||||
|
||||
// Final cleanup with SIGKILL if needed
|
||||
setTimeout(() => {
|
||||
spawn("pkill", ["-KILL", "-f", `camoufox-worker.*${id}`], {
|
||||
stdio: "ignore",
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
// Delete the configuration
|
||||
deleteCamoufoxConfig(id);
|
||||
return true;
|
||||
} catch {
|
||||
// Delete the configuration even if stopping failed
|
||||
deleteCamoufoxConfig(id);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all Camoufox processes
|
||||
* @returns Promise resolving when all instances are stopped
|
||||
*/
|
||||
export async function stopAllCamoufoxProcesses(): Promise<void> {
|
||||
const configs = listCamoufoxConfigs();
|
||||
|
||||
const stopPromises = configs.map((config) => stopCamoufoxProcess(config.id));
|
||||
await Promise.all(stopPromises);
|
||||
}
|
||||
|
||||
interface GenerateConfigOptions {
|
||||
proxy?: string;
|
||||
maxWidth?: number;
|
||||
maxHeight?: number;
|
||||
minWidth?: number;
|
||||
minHeight?: number;
|
||||
geoip?: string | boolean;
|
||||
blockImages?: boolean;
|
||||
blockWebrtc?: boolean;
|
||||
blockWebgl?: boolean;
|
||||
executablePath?: string;
|
||||
fingerprint?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate Camoufox configuration using launchOptions
|
||||
* @param options Configuration options
|
||||
* @returns Promise resolving to the generated config JSON string
|
||||
*/
|
||||
export async function generateCamoufoxConfig(
|
||||
options: GenerateConfigOptions,
|
||||
): Promise<string> {
|
||||
try {
|
||||
const launchOpts: any = {
|
||||
headless: false,
|
||||
i_know_what_im_doing: true,
|
||||
config: {
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
},
|
||||
};
|
||||
|
||||
if (options.geoip) {
|
||||
launchOpts.geoip = true;
|
||||
}
|
||||
|
||||
if (options.blockImages) {
|
||||
launchOpts.block_images = true;
|
||||
}
|
||||
if (options.blockWebrtc) {
|
||||
launchOpts.block_webrtc = true;
|
||||
}
|
||||
if (options.blockWebgl) {
|
||||
launchOpts.block_webgl = true;
|
||||
}
|
||||
|
||||
if (options.executablePath) {
|
||||
launchOpts.executable_path = options.executablePath;
|
||||
}
|
||||
|
||||
if (options.proxy) {
|
||||
launchOpts.proxy = options.proxy;
|
||||
}
|
||||
|
||||
// If fingerprint is provided, use it and ignore other options except executable_path and block_*
|
||||
if (options.fingerprint) {
|
||||
try {
|
||||
const camoufoxFingerprint = JSON.parse(options.fingerprint);
|
||||
|
||||
if (camoufoxFingerprint.timezone) {
|
||||
launchOpts.config.timezone = camoufoxFingerprint.timezone;
|
||||
}
|
||||
|
||||
// Convert camoufox fingerprint format to fingerprint-generator format
|
||||
const fingerprintObj =
|
||||
convertCamoufoxToFingerprintGenerator(camoufoxFingerprint);
|
||||
launchOpts.fingerprint = fingerprintObj;
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid fingerprint JSON: ${error}`);
|
||||
}
|
||||
} else {
|
||||
// Use individual options to build configuration
|
||||
|
||||
// Build screen configuration with min/max dimensions
|
||||
const screen: {
|
||||
minWidth?: number;
|
||||
maxWidth?: number;
|
||||
minHeight?: number;
|
||||
maxHeight?: number;
|
||||
} = {};
|
||||
|
||||
if (options.minWidth) screen.minWidth = options.minWidth;
|
||||
if (options.maxWidth) screen.maxWidth = options.maxWidth;
|
||||
if (options.minHeight) screen.minHeight = options.minHeight;
|
||||
if (options.maxHeight) screen.maxHeight = options.maxHeight;
|
||||
|
||||
if (Object.keys(screen).length > 0) {
|
||||
launchOpts.screen = screen;
|
||||
}
|
||||
}
|
||||
|
||||
// Generate the configuration using launchOptions
|
||||
const generatedOptions = await launchOptions(launchOpts);
|
||||
|
||||
// Extract the environment variables that contain the config
|
||||
const envVars = generatedOptions.env || {};
|
||||
|
||||
// Reconstruct the config from environment variables using getEnvVars utility
|
||||
let configStr = "";
|
||||
let chunkIndex = 1;
|
||||
|
||||
while (envVars[`CAMOU_CONFIG_${chunkIndex}`]) {
|
||||
configStr += envVars[`CAMOU_CONFIG_${chunkIndex}`];
|
||||
chunkIndex++;
|
||||
}
|
||||
|
||||
if (!configStr) {
|
||||
throw new Error("No configuration generated");
|
||||
}
|
||||
|
||||
// Parse and return the config as JSON string
|
||||
const config = JSON.parse(configStr);
|
||||
return JSON.stringify(config);
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to generate Camoufox config: ${error}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,153 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import tmp from "tmp";
|
||||
|
||||
export interface CamoufoxConfig {
|
||||
id: string;
|
||||
options: LaunchOptions;
|
||||
profilePath?: string;
|
||||
url?: string;
|
||||
processId?: number;
|
||||
customConfig?: string; // JSON string of the fingerprint config
|
||||
}
|
||||
|
||||
const STORAGE_DIR = path.join(tmp.tmpdir, "donutbrowser", "camoufox");
|
||||
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
fs.mkdirSync(STORAGE_DIR, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Save a Camoufox configuration to disk
|
||||
* @param config The Camoufox configuration to save
|
||||
*/
|
||||
export function saveCamoufoxConfig(config: CamoufoxConfig): void {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a Camoufox configuration by ID
|
||||
* @param id The Camoufox ID
|
||||
* @returns The Camoufox configuration or null if not found
|
||||
*/
|
||||
export function getCamoufoxConfig(id: string): CamoufoxConfig | null {
|
||||
const filePath = path.join(STORAGE_DIR, `${id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = fs.readFileSync(filePath, "utf-8");
|
||||
return JSON.parse(content) as CamoufoxConfig;
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: `Error reading Camoufox config ${id}`,
|
||||
error: (error as Error).message,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a Camoufox configuration
|
||||
* @param id The Camoufox ID to delete
|
||||
* @returns True if deleted, false if not found
|
||||
*/
|
||||
export function deleteCamoufoxConfig(id: string): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.unlinkSync(filePath);
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: `Error deleting Camoufox config ${id}`,
|
||||
error: (error as Error).message,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all saved Camoufox configurations
|
||||
* @returns Array of Camoufox configurations
|
||||
*/
|
||||
export function listCamoufoxConfigs(): CamoufoxConfig[] {
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
return fs
|
||||
.readdirSync(STORAGE_DIR)
|
||||
.filter((file) => file.endsWith(".json"))
|
||||
.map((file) => {
|
||||
try {
|
||||
const content = fs.readFileSync(
|
||||
path.join(STORAGE_DIR, file),
|
||||
"utf-8",
|
||||
);
|
||||
return JSON.parse(content) as CamoufoxConfig;
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: `Error reading Camoufox config ${file}`,
|
||||
error,
|
||||
});
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter((config): config is CamoufoxConfig => config !== null)
|
||||
.map((config) => {
|
||||
config.options = "Removed for logging" as any;
|
||||
config.customConfig = "Removed for logging" as any;
|
||||
return config;
|
||||
});
|
||||
} catch (error) {
|
||||
console.error({ message: "Error listing Camoufox configs:", error });
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a Camoufox configuration
|
||||
* @param config The Camoufox configuration to update
|
||||
* @returns True if updated, false if not found
|
||||
*/
|
||||
export function updateCamoufoxConfig(config: CamoufoxConfig): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
|
||||
try {
|
||||
fs.readFileSync(filePath, "utf-8");
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
console.error({
|
||||
message: `Config ${config.id} was deleted while the app was running`,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
console.error({
|
||||
message: `Error updating Camoufox config ${config.id}`,
|
||||
error,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique ID for a Camoufox instance
|
||||
* @returns A unique ID string
|
||||
*/
|
||||
export function generateCamoufoxId(): string {
|
||||
// Include process ID to ensure uniqueness across multiple processes
|
||||
return `camoufox_${Date.now()}_${process.pid}_${Math.floor(Math.random() * 10000)}`;
|
||||
}
|
||||
@@ -0,0 +1,307 @@
|
||||
import { launchOptions } from "donutbrowser-camoufox-js";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import { type Browser, type BrowserContext, firefox } from "playwright-core";
|
||||
import { getCamoufoxConfig, saveCamoufoxConfig } from "./camoufox-storage.js";
|
||||
import { getEnvVars, parseProxyString } from "./utils.js";
|
||||
|
||||
/**
|
||||
* Run a Camoufox browser server as a worker process
|
||||
* @param id The Camoufox configuration ID
|
||||
*/
|
||||
export async function runCamoufoxWorker(id: string): Promise<void> {
|
||||
// Get the Camoufox configuration
|
||||
const config = getCamoufoxConfig(id);
|
||||
|
||||
if (!config) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Configuration not found",
|
||||
id: id,
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
config.processId = process.pid;
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
success: true,
|
||||
id: id,
|
||||
processId: process.pid,
|
||||
profilePath: config.profilePath,
|
||||
message: "Camoufox worker started successfully",
|
||||
}),
|
||||
);
|
||||
|
||||
// Launch browser in background - this can take time and may fail
|
||||
setImmediate(async () => {
|
||||
let browser: Browser | null = null;
|
||||
let context: BrowserContext | null = null;
|
||||
let windowCheckInterval: NodeJS.Timeout | null = null;
|
||||
|
||||
// Graceful shutdown handler with access to browser and server
|
||||
const gracefulShutdown = async () => {
|
||||
try {
|
||||
// Clear any intervals first
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
|
||||
// Close browser context and server if they exist
|
||||
if (context && !context.pages) {
|
||||
// Context is already closed
|
||||
} else if (context) {
|
||||
await context.close();
|
||||
}
|
||||
|
||||
if (browser?.isConnected()) {
|
||||
await browser.close();
|
||||
}
|
||||
} catch {
|
||||
// Ignore cleanup errors during shutdown
|
||||
}
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
// Handle various quit signals for proper macOS Command+Q support
|
||||
process.on("SIGTERM", () => void gracefulShutdown());
|
||||
process.on("SIGINT", () => void gracefulShutdown());
|
||||
process.on("SIGHUP", () => void gracefulShutdown());
|
||||
process.on("SIGQUIT", () => void gracefulShutdown());
|
||||
|
||||
// Handle uncaught exceptions and unhandled rejections
|
||||
process.on("uncaughtException", () => void gracefulShutdown());
|
||||
process.on("unhandledRejection", () => void gracefulShutdown());
|
||||
|
||||
try {
|
||||
// Deep clone to avoid reference sharing and ensure fresh configuration for each instance
|
||||
const camoufoxOptions: LaunchOptions = JSON.parse(
|
||||
JSON.stringify(config.options || {}),
|
||||
);
|
||||
|
||||
// Add profile path if provided
|
||||
if (config.profilePath) {
|
||||
camoufoxOptions.user_data_dir = config.profilePath;
|
||||
}
|
||||
|
||||
// Ensure block options are properly set
|
||||
if (camoufoxOptions.block_images) {
|
||||
camoufoxOptions.block_images = true;
|
||||
}
|
||||
|
||||
if (camoufoxOptions.block_webgl) {
|
||||
camoufoxOptions.block_webgl = true;
|
||||
}
|
||||
|
||||
if (camoufoxOptions.block_webrtc) {
|
||||
camoufoxOptions.block_webrtc = true;
|
||||
}
|
||||
|
||||
// Check for headless mode from config (no environment variable check)
|
||||
if (camoufoxOptions.headless) {
|
||||
camoufoxOptions.headless = true;
|
||||
}
|
||||
|
||||
// Always set these defaults - ensure they are applied for each instance
|
||||
camoufoxOptions.i_know_what_im_doing = true;
|
||||
camoufoxOptions.config = {
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
...(camoufoxOptions.config || {}),
|
||||
};
|
||||
|
||||
// Generate fresh options for this specific instance
|
||||
const generatedOptions = await launchOptions(camoufoxOptions);
|
||||
|
||||
// Start with process environment to ensure proper inheritance
|
||||
let finalEnv = { ...process.env };
|
||||
|
||||
// Add generated options environment variables
|
||||
if (generatedOptions.env) {
|
||||
finalEnv = { ...finalEnv, ...generatedOptions.env };
|
||||
}
|
||||
|
||||
// If we have a custom config from Rust, use it directly as environment variables
|
||||
if (config.customConfig) {
|
||||
try {
|
||||
// Parse the custom config JSON string
|
||||
const customConfigObj = JSON.parse(config.customConfig);
|
||||
|
||||
// Ensure default config values are preserved even with custom config
|
||||
const mergedConfig = {
|
||||
...customConfigObj,
|
||||
disableTheming: true,
|
||||
showcursor: false,
|
||||
};
|
||||
|
||||
// Convert merged config to environment variables using getEnvVars
|
||||
const customEnvVars = getEnvVars(mergedConfig);
|
||||
|
||||
// Merge custom config with generated config (custom takes precedence)
|
||||
finalEnv = { ...finalEnv, ...customEnvVars };
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Camoufox worker ${id}: Failed to parse custom config, using generated config:`,
|
||||
error,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// Prepare profile path for persistent context
|
||||
const profilePath = config.profilePath || "";
|
||||
|
||||
// Launch persistent context with the final configuration
|
||||
const finalOptions: any = {
|
||||
...generatedOptions,
|
||||
env: finalEnv,
|
||||
};
|
||||
|
||||
// If a custom executable path was provided, ensure Playwright uses it
|
||||
if (
|
||||
(camoufoxOptions as any).executable_path &&
|
||||
typeof (camoufoxOptions as any).executable_path === "string"
|
||||
) {
|
||||
finalOptions.executablePath = (camoufoxOptions as any)
|
||||
.executable_path as string;
|
||||
}
|
||||
|
||||
// Only add proxy if it exists and is valid
|
||||
if (camoufoxOptions.proxy) {
|
||||
try {
|
||||
finalOptions.proxy = parseProxyString(camoufoxOptions.proxy);
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: "Failed to parse proxy, launching without proxy",
|
||||
error,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Use launchPersistentContext instead of launchServer
|
||||
context = await firefox.launchPersistentContext(
|
||||
profilePath,
|
||||
finalOptions,
|
||||
);
|
||||
|
||||
// Get the browser instance from context
|
||||
browser = context.browser();
|
||||
|
||||
// Handle browser disconnection for proper cleanup
|
||||
if (browser) {
|
||||
browser.on("disconnected", () => void gracefulShutdown());
|
||||
}
|
||||
|
||||
// Handle context close for proper cleanup
|
||||
context.on("close", () => void gracefulShutdown());
|
||||
|
||||
saveCamoufoxConfig(config);
|
||||
|
||||
// Monitor for window closure
|
||||
const startWindowMonitoring = () => {
|
||||
windowCheckInterval = setInterval(async () => {
|
||||
try {
|
||||
// Check if context is still active
|
||||
if (!context?.pages || context.pages().length === 0) {
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if browser is still connected (if available)
|
||||
if (browser && !browser.isConnected()) {
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check pages in the persistent context
|
||||
const pages = context.pages();
|
||||
if (pages.length === 0) {
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
}
|
||||
} catch {
|
||||
// If we can't check windows, assume browser is closing
|
||||
if (windowCheckInterval) {
|
||||
clearInterval(windowCheckInterval);
|
||||
}
|
||||
await gracefulShutdown();
|
||||
}
|
||||
}, 1000); // Check every second
|
||||
};
|
||||
|
||||
// Handle URL opening if provided
|
||||
if (config.url) {
|
||||
try {
|
||||
const pages = await context.pages();
|
||||
if (pages.length) {
|
||||
const page = pages[0];
|
||||
await page.goto(config.url, {
|
||||
waitUntil: "domcontentloaded",
|
||||
timeout: 30000,
|
||||
});
|
||||
|
||||
// Start monitoring after page is created
|
||||
startWindowMonitoring();
|
||||
}
|
||||
} catch (urlError) {
|
||||
console.error({
|
||||
message: "Failed to open URL",
|
||||
error: urlError,
|
||||
});
|
||||
// URL opening failure doesn't affect startup success
|
||||
// Still start monitoring
|
||||
startWindowMonitoring();
|
||||
}
|
||||
} else {
|
||||
// Start monitoring after page is created
|
||||
startWindowMonitoring();
|
||||
}
|
||||
|
||||
// Monitor browser/context connection
|
||||
const keepAlive = setInterval(async () => {
|
||||
try {
|
||||
// Check if context is still active
|
||||
if (!context?.pages) {
|
||||
clearInterval(keepAlive);
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
|
||||
// Check browser connection if available
|
||||
if (browser && !browser.isConnected()) {
|
||||
clearInterval(keepAlive);
|
||||
await gracefulShutdown();
|
||||
return;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: "Error in keepAlive check",
|
||||
error,
|
||||
});
|
||||
clearInterval(keepAlive);
|
||||
await gracefulShutdown();
|
||||
}
|
||||
}, 2000);
|
||||
} catch (error) {
|
||||
console.error({
|
||||
message: "Failed to launch Camoufox",
|
||||
error,
|
||||
});
|
||||
// Browser launch failed, attempt cleanup
|
||||
await gracefulShutdown();
|
||||
}
|
||||
});
|
||||
|
||||
// Keep process alive
|
||||
process.stdin.resume();
|
||||
}
|
||||
+388
-25
@@ -1,8 +1,17 @@
|
||||
import { program } from "commander";
|
||||
import type { LaunchOptions } from "donutbrowser-camoufox-js/dist/utils.js";
|
||||
import {
|
||||
generateCamoufoxConfig,
|
||||
startCamoufoxProcess,
|
||||
stopAllCamoufoxProcesses,
|
||||
stopCamoufoxProcess,
|
||||
} from "./camoufox-launcher.js";
|
||||
import { listCamoufoxConfigs } from "./camoufox-storage.js";
|
||||
import { runCamoufoxWorker } from "./camoufox-worker.js";
|
||||
import {
|
||||
startProxyProcess,
|
||||
stopProxyProcess,
|
||||
stopAllProxyProcesses,
|
||||
stopProxyProcess,
|
||||
} from "./proxy-runner";
|
||||
import { listProxyConfigs } from "./proxy-storage";
|
||||
import { runProxyWorker } from "./proxy-worker";
|
||||
@@ -11,79 +20,119 @@ import { runProxyWorker } from "./proxy-worker";
|
||||
program
|
||||
.command("proxy")
|
||||
.argument("<action>", "start, stop, or list proxies")
|
||||
.option(
|
||||
"-u, --upstream <url>",
|
||||
"upstream proxy URL (protocol://[username:password@]host:port)"
|
||||
)
|
||||
.option("--host <host>", "upstream proxy host")
|
||||
.option("--proxy-port <port>", "upstream proxy port", Number.parseInt)
|
||||
.option("--type <type>", "proxy type (http, https, socks4, socks5)")
|
||||
.option("--username <username>", "proxy username")
|
||||
.option("--password <password>", "proxy password")
|
||||
.option(
|
||||
"-p, --port <number>",
|
||||
"local port to use (random if not specified)",
|
||||
Number.parseInt
|
||||
Number.parseInt,
|
||||
)
|
||||
.option("--ignore-certificate", "ignore certificate errors for HTTPS proxies")
|
||||
.option("--id <id>", "proxy ID for stop command")
|
||||
.option(
|
||||
"-u, --upstream <url>",
|
||||
"upstream proxy URL (protocol://[username:password@]host:port)",
|
||||
)
|
||||
.description("manage proxy servers")
|
||||
.action(
|
||||
async (
|
||||
action: string,
|
||||
options: {
|
||||
upstream?: string;
|
||||
host?: string;
|
||||
proxyPort?: number;
|
||||
type?: string;
|
||||
username?: string;
|
||||
password?: string;
|
||||
port?: number;
|
||||
ignoreCertificate?: boolean;
|
||||
id?: string;
|
||||
}
|
||||
upstream?: string;
|
||||
},
|
||||
) => {
|
||||
if (action === "start") {
|
||||
if (!options.upstream) {
|
||||
console.error("Error: Upstream proxy URL is required");
|
||||
console.log(
|
||||
"Example: proxy start -u http://username:password@proxy.example.com:8080"
|
||||
);
|
||||
return;
|
||||
let upstreamUrl: string | undefined;
|
||||
|
||||
// Build upstream URL from individual components if provided
|
||||
if (options.host && options.proxyPort && options.type) {
|
||||
const protocol =
|
||||
options.type === "socks4" || options.type === "socks5"
|
||||
? options.type
|
||||
: "http";
|
||||
const auth =
|
||||
options.username && options.password
|
||||
? `${encodeURIComponent(options.username)}:${encodeURIComponent(
|
||||
options.password,
|
||||
)}@`
|
||||
: "";
|
||||
upstreamUrl = `${protocol}://${auth}${options.host}:${options.proxyPort}`;
|
||||
} else if (options.upstream) {
|
||||
upstreamUrl = options.upstream;
|
||||
}
|
||||
// If no upstream is provided, create a direct proxy
|
||||
|
||||
try {
|
||||
const config = await startProxyProcess(options.upstream, {
|
||||
const config = await startProxyProcess(upstreamUrl, {
|
||||
port: options.port,
|
||||
ignoreProxyCertificate: options.ignoreCertificate,
|
||||
});
|
||||
console.log(JSON.stringify(config));
|
||||
|
||||
// Output the configuration as JSON for the Rust side to parse
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
id: config.id,
|
||||
localPort: config.localPort,
|
||||
localUrl: config.localUrl,
|
||||
upstreamUrl: config.upstreamUrl,
|
||||
}),
|
||||
);
|
||||
|
||||
// Exit successfully to allow the process to detach
|
||||
process.exit(0);
|
||||
} catch (error: unknown) {
|
||||
console.error(`Failed to start proxy: ${JSON.stringify(error)}`);
|
||||
console.error(
|
||||
`Failed to start proxy: ${
|
||||
error instanceof Error ? error.message : JSON.stringify(error)
|
||||
}`,
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
} else if (action === "stop") {
|
||||
if (options.id) {
|
||||
const stopped = await stopProxyProcess(options.id);
|
||||
console.log(`{
|
||||
"success": ${stopped}}`);
|
||||
console.log(JSON.stringify({ success: stopped }));
|
||||
} else if (options.upstream) {
|
||||
// Find proxies with this upstream URL
|
||||
const configs = listProxyConfigs().filter(
|
||||
(config) => config.upstreamUrl === options.upstream
|
||||
(config) => config.upstreamUrl === options.upstream,
|
||||
);
|
||||
|
||||
if (configs.length === 0) {
|
||||
console.error(`No proxies found for ${options.upstream}`);
|
||||
process.exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const config of configs) {
|
||||
const stopped = await stopProxyProcess(config.id);
|
||||
console.log(`{
|
||||
"success": ${stopped}}`);
|
||||
console.log(JSON.stringify({ success: stopped }));
|
||||
}
|
||||
} else {
|
||||
await stopAllProxyProcesses();
|
||||
console.log(`{
|
||||
"success": true}`);
|
||||
console.log(JSON.stringify({ success: true }));
|
||||
}
|
||||
process.exit(0);
|
||||
} else if (action === "list") {
|
||||
const configs = listProxyConfigs();
|
||||
console.log(JSON.stringify(configs));
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.error("Invalid action. Use 'start', 'stop', or 'list'");
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Command for proxy worker (internal use)
|
||||
@@ -101,4 +150,318 @@ program
|
||||
}
|
||||
});
|
||||
|
||||
// Command for Camoufox management
|
||||
program
|
||||
.command("camoufox")
|
||||
.argument(
|
||||
"<action>",
|
||||
"start, stop, list, or generate-config Camoufox instances",
|
||||
)
|
||||
.option("--id <id>", "Camoufox ID for stop command")
|
||||
.option("--profile-path <path>", "profile directory path")
|
||||
.option("--url <url>", "URL to open")
|
||||
|
||||
// Config generation options
|
||||
.option("--proxy <proxy>", "proxy URL for config generation")
|
||||
.option("--max-width <width>", "maximum screen width", parseInt)
|
||||
.option("--max-height <height>", "maximum screen height", parseInt)
|
||||
.option("--min-width <width>", "minimum screen width", parseInt)
|
||||
.option("--min-height <height>", "minimum screen height", parseInt)
|
||||
.option("--geoip", "enable geoip")
|
||||
.option("--block-images", "block images")
|
||||
.option("--block-webrtc", "block WebRTC")
|
||||
.option("--block-webgl", "block WebGL")
|
||||
.option("--executable-path <path>", "executable path")
|
||||
.option("--fingerprint <json>", "fingerprint JSON string")
|
||||
.option("--headless", "run in headless mode")
|
||||
.option("--custom-config <json>", "custom config JSON string")
|
||||
|
||||
.description("manage Camoufox browser instances")
|
||||
.action(
|
||||
async (
|
||||
action: string,
|
||||
options: Record<string, string | number | boolean | undefined>,
|
||||
) => {
|
||||
if (action === "start") {
|
||||
try {
|
||||
// Build Camoufox options in the format expected by camoufox-js
|
||||
const camoufoxOptions: LaunchOptions = {};
|
||||
|
||||
// OS fingerprinting
|
||||
if (options.os && typeof options.os === "string") {
|
||||
camoufoxOptions.os = options.os.includes(",")
|
||||
? (options.os.split(",") as ("windows" | "macos" | "linux")[])
|
||||
: (options.os as "windows" | "macos" | "linux");
|
||||
}
|
||||
|
||||
// Blocking options
|
||||
if (options.blockImages) camoufoxOptions.block_images = true;
|
||||
if (options.blockWebrtc) camoufoxOptions.block_webrtc = true;
|
||||
if (options.blockWebgl) camoufoxOptions.block_webgl = true;
|
||||
|
||||
// Security options
|
||||
if (options.disableCoop) camoufoxOptions.disable_coop = true;
|
||||
|
||||
if (options.geoip) {
|
||||
camoufoxOptions.geoip = true;
|
||||
}
|
||||
|
||||
if (options.latitude && options.longitude) {
|
||||
camoufoxOptions.geolocation = {
|
||||
latitude: options.latitude as number,
|
||||
longitude: options.longitude as number,
|
||||
accuracy: 100,
|
||||
};
|
||||
}
|
||||
if (options.country)
|
||||
camoufoxOptions.country = options.country as string;
|
||||
if (options.timezone)
|
||||
camoufoxOptions.timezone = options.timezone as string;
|
||||
|
||||
if (options.humanize)
|
||||
camoufoxOptions.humanize = options.humanize as boolean;
|
||||
if (options.headless) camoufoxOptions.headless = true;
|
||||
|
||||
// Localization
|
||||
if (options.locale && typeof options.locale === "string") {
|
||||
camoufoxOptions.locale = options.locale.includes(",")
|
||||
? options.locale.split(",")
|
||||
: options.locale;
|
||||
}
|
||||
|
||||
// Extensions and fonts
|
||||
if (options.addons && typeof options.addons === "string")
|
||||
camoufoxOptions.addons = options.addons.split(",");
|
||||
if (options.fonts && typeof options.fonts === "string")
|
||||
camoufoxOptions.fonts = options.fonts.split(",");
|
||||
if (options.customFontsOnly) camoufoxOptions.custom_fonts_only = true;
|
||||
if (
|
||||
options.excludeAddons &&
|
||||
typeof options.excludeAddons === "string"
|
||||
)
|
||||
camoufoxOptions.exclude_addons = options.excludeAddons.split(
|
||||
",",
|
||||
) as "UBO"[];
|
||||
|
||||
// Executable path: forward through to camoufox-js and ultimately Playwright
|
||||
if (
|
||||
options.executablePath &&
|
||||
typeof options.executablePath === "string"
|
||||
) {
|
||||
// camoufox-js uses snake_case for this option
|
||||
(camoufoxOptions as any).executable_path =
|
||||
options.executablePath as string;
|
||||
}
|
||||
|
||||
// Screen and window
|
||||
const screen: {
|
||||
minWidth?: number;
|
||||
maxWidth?: number;
|
||||
minHeight?: number;
|
||||
maxHeight?: number;
|
||||
} = {};
|
||||
if (options.screenMinWidth)
|
||||
screen.minWidth = options.screenMinWidth as number;
|
||||
if (options.screenMaxWidth)
|
||||
screen.maxWidth = options.screenMaxWidth as number;
|
||||
if (options.screenMinHeight)
|
||||
screen.minHeight = options.screenMinHeight as number;
|
||||
if (options.screenMaxHeight)
|
||||
screen.maxHeight = options.screenMaxHeight as number;
|
||||
if (Object.keys(screen).length > 0) camoufoxOptions.screen = screen;
|
||||
|
||||
if (options.windowWidth && options.windowHeight) {
|
||||
camoufoxOptions.window = [
|
||||
options.windowWidth as number,
|
||||
options.windowHeight as number,
|
||||
];
|
||||
}
|
||||
|
||||
// Advanced options
|
||||
if (options.ffVersion)
|
||||
camoufoxOptions.ff_version = options.ffVersion as number;
|
||||
if (options.mainWorldEval) camoufoxOptions.main_world_eval = true;
|
||||
if (options.webglVendor && options.webglRenderer) {
|
||||
camoufoxOptions.webgl_config = [
|
||||
options.webglVendor as string,
|
||||
options.webglRenderer as string,
|
||||
];
|
||||
}
|
||||
|
||||
// Proxy
|
||||
if (options.proxy) camoufoxOptions.proxy = options.proxy as string;
|
||||
|
||||
// Cache and performance - default to enabled
|
||||
camoufoxOptions.enable_cache = !options.disableCache;
|
||||
|
||||
// Environment and debugging
|
||||
if (options.virtualDisplay)
|
||||
camoufoxOptions.virtual_display = options.virtualDisplay as string;
|
||||
if (options.debug) camoufoxOptions.debug = true;
|
||||
|
||||
// Handle headless mode via flag instead of environment variable
|
||||
if (options.headless) {
|
||||
camoufoxOptions.headless = true;
|
||||
}
|
||||
if (options.args && typeof options.args === "string")
|
||||
camoufoxOptions.args = options.args.split(",");
|
||||
if (options.env && typeof options.env === "string") {
|
||||
try {
|
||||
camoufoxOptions.env = JSON.parse(options.env);
|
||||
} catch (e) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Invalid JSON for --env option",
|
||||
message: String(e),
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Firefox preferences
|
||||
if (
|
||||
options.firefoxPrefs &&
|
||||
typeof options.firefoxPrefs === "string"
|
||||
) {
|
||||
try {
|
||||
camoufoxOptions.firefox_user_prefs = JSON.parse(
|
||||
options.firefoxPrefs,
|
||||
);
|
||||
} catch (e) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Invalid JSON for --firefox-prefs option",
|
||||
message: String(e),
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
const config = await startCamoufoxProcess(
|
||||
camoufoxOptions,
|
||||
typeof options.profilePath === "string"
|
||||
? options.profilePath
|
||||
: undefined,
|
||||
typeof options.url === "string" ? options.url : undefined,
|
||||
typeof options.customConfig === "string"
|
||||
? options.customConfig
|
||||
: undefined,
|
||||
);
|
||||
|
||||
console.log(
|
||||
JSON.stringify({
|
||||
id: config.id,
|
||||
processId: config.processId,
|
||||
profilePath: config.profilePath,
|
||||
url: config.url,
|
||||
}),
|
||||
);
|
||||
|
||||
process.exit(0);
|
||||
} catch (error: unknown) {
|
||||
console.error(
|
||||
JSON.stringify({
|
||||
error: "Failed to start Camoufox",
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
}),
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
} else if (action === "stop") {
|
||||
if (options.id && typeof options.id === "string") {
|
||||
const stopped = await stopCamoufoxProcess(options.id);
|
||||
console.log(JSON.stringify({ success: stopped }));
|
||||
} else {
|
||||
await stopAllCamoufoxProcesses();
|
||||
console.log(JSON.stringify({ success: true }));
|
||||
}
|
||||
process.exit(0);
|
||||
} else if (action === "list") {
|
||||
const configs = listCamoufoxConfigs();
|
||||
console.log(JSON.stringify(configs));
|
||||
process.exit(0);
|
||||
} else if (action === "generate-config") {
|
||||
try {
|
||||
const config = await generateCamoufoxConfig({
|
||||
proxy:
|
||||
typeof options.proxy === "string" ? options.proxy : undefined,
|
||||
maxWidth:
|
||||
typeof options.maxWidth === "number"
|
||||
? options.maxWidth
|
||||
: undefined,
|
||||
maxHeight:
|
||||
typeof options.maxHeight === "number"
|
||||
? options.maxHeight
|
||||
: undefined,
|
||||
minWidth:
|
||||
typeof options.minWidth === "number"
|
||||
? options.minWidth
|
||||
: undefined,
|
||||
minHeight:
|
||||
typeof options.minHeight === "number"
|
||||
? options.minHeight
|
||||
: undefined,
|
||||
geoip: Boolean(options.geoip),
|
||||
blockImages:
|
||||
typeof options.blockImages === "boolean"
|
||||
? options.blockImages
|
||||
: undefined,
|
||||
blockWebrtc:
|
||||
typeof options.blockWebrtc === "boolean"
|
||||
? options.blockWebrtc
|
||||
: undefined,
|
||||
blockWebgl:
|
||||
typeof options.blockWebgl === "boolean"
|
||||
? options.blockWebgl
|
||||
: undefined,
|
||||
executablePath:
|
||||
typeof options.executablePath === "string"
|
||||
? options.executablePath
|
||||
: undefined,
|
||||
fingerprint:
|
||||
typeof options.fingerprint === "string"
|
||||
? options.fingerprint
|
||||
: undefined,
|
||||
});
|
||||
console.log(config);
|
||||
process.exit(0);
|
||||
} catch (error: unknown) {
|
||||
console.error({
|
||||
error: "Failed to generate config",
|
||||
message:
|
||||
error instanceof Error ? error.message : JSON.stringify(error),
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
console.error({
|
||||
error: "Invalid action",
|
||||
message: "Use 'start', 'stop', 'list', or 'generate-config'",
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Command for Camoufox worker (internal use)
|
||||
program
|
||||
.command("camoufox-worker")
|
||||
.argument("<action>", "start a Camoufox worker")
|
||||
.requiredOption("--id <id>", "Camoufox configuration ID")
|
||||
.description("run a Camoufox worker process")
|
||||
.action(async (action: string, options: { id: string }) => {
|
||||
if (action === "start") {
|
||||
await runCamoufoxWorker(options.id);
|
||||
} else {
|
||||
console.error({
|
||||
error: "Invalid action for camoufox-worker",
|
||||
message: "Use 'start'",
|
||||
});
|
||||
process.exit(1);
|
||||
}
|
||||
});
|
||||
|
||||
program.parse();
|
||||
|
||||
+43
-31
@@ -2,67 +2,70 @@ import { spawn } from "node:child_process";
|
||||
import path from "node:path";
|
||||
import getPort from "get-port";
|
||||
import {
|
||||
deleteProxyConfig,
|
||||
generateProxyId,
|
||||
getProxyConfig,
|
||||
isProcessRunning,
|
||||
listProxyConfigs,
|
||||
type ProxyConfig,
|
||||
saveProxyConfig,
|
||||
getProxyConfig,
|
||||
deleteProxyConfig,
|
||||
isProcessRunning,
|
||||
generateProxyId,
|
||||
listProxyConfigs,
|
||||
} from "./proxy-storage";
|
||||
|
||||
/**
|
||||
* Start a proxy in a separate process
|
||||
* @param upstreamUrl The upstream proxy URL
|
||||
* @param upstreamUrl The upstream proxy URL (optional for direct proxy)
|
||||
* @param options Optional configuration
|
||||
* @returns Promise resolving to the proxy configuration
|
||||
*/
|
||||
export async function startProxyProcess(
|
||||
upstreamUrl: string,
|
||||
options: { port?: number; ignoreProxyCertificate?: boolean } = {}
|
||||
upstreamUrl?: string,
|
||||
options: { port?: number; ignoreProxyCertificate?: boolean } = {},
|
||||
): Promise<ProxyConfig> {
|
||||
// Generate a unique ID for this proxy
|
||||
const id = generateProxyId();
|
||||
|
||||
// Get a random available port if not specified
|
||||
const port = options.port || (await getPort());
|
||||
const port = options.port ?? (await getPort());
|
||||
|
||||
// Create the proxy configuration
|
||||
const config: ProxyConfig = {
|
||||
id,
|
||||
upstreamUrl,
|
||||
upstreamUrl: upstreamUrl || "DIRECT",
|
||||
localPort: port,
|
||||
ignoreProxyCertificate: options.ignoreProxyCertificate || false,
|
||||
ignoreProxyCertificate: options.ignoreProxyCertificate ?? false,
|
||||
};
|
||||
|
||||
// Save the configuration before starting the process
|
||||
saveProxyConfig(config);
|
||||
|
||||
// Build the command arguments
|
||||
const args = ["proxy-worker", "start", "--id", id];
|
||||
const args = [
|
||||
path.join(__dirname, "index.js"),
|
||||
"proxy-worker",
|
||||
"start",
|
||||
"--id",
|
||||
id,
|
||||
];
|
||||
|
||||
// Spawn the process
|
||||
const child = spawn(
|
||||
process.execPath,
|
||||
[path.join(__dirname, "index.js"), ...args],
|
||||
{
|
||||
detached: true,
|
||||
stdio: "ignore",
|
||||
}
|
||||
);
|
||||
// Spawn the process with proper detachment
|
||||
const child = spawn(process.execPath, args, {
|
||||
detached: true,
|
||||
stdio: ["ignore", "ignore", "ignore"], // Completely ignore all stdio
|
||||
cwd: process.cwd(),
|
||||
});
|
||||
|
||||
// Unref the child to allow the parent to exit independently
|
||||
child.unref();
|
||||
|
||||
// Store the process ID
|
||||
// Store the process ID and local URL
|
||||
config.pid = child.pid;
|
||||
config.localUrl = `http://localhost:${port}`;
|
||||
config.localUrl = `http://127.0.0.1:${port}`;
|
||||
|
||||
// Update the configuration with the process ID
|
||||
saveProxyConfig(config);
|
||||
|
||||
// Wait a bit to ensure the proxy has started
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
// Give the worker a moment to start before returning
|
||||
await new Promise((resolve) => setTimeout(resolve, 100));
|
||||
|
||||
return config;
|
||||
}
|
||||
@@ -75,7 +78,9 @@ export async function startProxyProcess(
|
||||
export async function stopProxyProcess(id: string): Promise<boolean> {
|
||||
const config = getProxyConfig(id);
|
||||
|
||||
if (!config || !config.pid) {
|
||||
if (!config?.pid) {
|
||||
// Try to delete the config anyway in case it exists without a PID
|
||||
deleteProxyConfig(id);
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -83,10 +88,16 @@ export async function stopProxyProcess(id: string): Promise<boolean> {
|
||||
// Check if the process is running
|
||||
if (isProcessRunning(config.pid)) {
|
||||
// Send SIGTERM to the process
|
||||
process.kill(config.pid);
|
||||
process.kill(config.pid, "SIGTERM");
|
||||
|
||||
// Wait a bit to ensure the process has terminated
|
||||
await new Promise((resolve) => setTimeout(resolve, 300));
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
// If still running, send SIGKILL
|
||||
if (isProcessRunning(config.pid)) {
|
||||
process.kill(config.pid, "SIGKILL");
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the configuration
|
||||
@@ -95,6 +106,8 @@ export async function stopProxyProcess(id: string): Promise<boolean> {
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`Error stopping proxy ${id}:`, error);
|
||||
// Delete the configuration even if stopping failed
|
||||
deleteProxyConfig(id);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -106,7 +119,6 @@ export async function stopProxyProcess(id: string): Promise<boolean> {
|
||||
export async function stopAllProxyProcesses(): Promise<void> {
|
||||
const configs = listProxyConfigs();
|
||||
|
||||
for (const config of configs) {
|
||||
await stopProxyProcess(config.id);
|
||||
}
|
||||
const stopPromises = configs.map((config) => stopProxyProcess(config.id));
|
||||
await Promise.all(stopPromises);
|
||||
}
|
||||
|
||||
@@ -1,21 +1,18 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import os from "node:os";
|
||||
import tmp from "tmp";
|
||||
|
||||
// Define the proxy configuration type
|
||||
export interface ProxyConfig {
|
||||
id: string;
|
||||
upstreamUrl: string;
|
||||
upstreamUrl: string; // Can be "DIRECT" for direct proxy
|
||||
localPort?: number;
|
||||
ignoreProxyCertificate?: boolean;
|
||||
localUrl?: string;
|
||||
pid?: number;
|
||||
}
|
||||
|
||||
// Path to store proxy configurations
|
||||
const STORAGE_DIR = path.join(os.tmpdir(), "donutbrowser", "proxies");
|
||||
const STORAGE_DIR = path.join(tmp.tmpdir, "donutbrowser", "proxies");
|
||||
|
||||
// Ensure storage directory exists
|
||||
if (!fs.existsSync(STORAGE_DIR)) {
|
||||
fs.mkdirSync(STORAGE_DIR, { recursive: true });
|
||||
}
|
||||
@@ -88,7 +85,7 @@ export function listProxyConfigs(): ProxyConfig[] {
|
||||
try {
|
||||
const content = fs.readFileSync(
|
||||
path.join(STORAGE_DIR, file),
|
||||
"utf-8"
|
||||
"utf-8",
|
||||
);
|
||||
return JSON.parse(content) as ProxyConfig;
|
||||
} catch (error) {
|
||||
@@ -111,14 +108,18 @@ export function listProxyConfigs(): ProxyConfig[] {
|
||||
export function updateProxyConfig(config: ProxyConfig): boolean {
|
||||
const filePath = path.join(STORAGE_DIR, `${config.id}.json`);
|
||||
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
fs.readFileSync(filePath, "utf-8");
|
||||
fs.writeFileSync(filePath, JSON.stringify(config, null, 2));
|
||||
return true;
|
||||
} catch (error) {
|
||||
if ((error as NodeJS.ErrnoException).code === "ENOENT") {
|
||||
console.error(
|
||||
`Config ${config.id} was deleted while the app was running`,
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
console.error(`Error updating proxy config ${config.id}:`, error);
|
||||
return false;
|
||||
}
|
||||
@@ -135,7 +136,7 @@ export function isProcessRunning(pid: number): boolean {
|
||||
// but checks if it exists
|
||||
process.kill(pid, 0);
|
||||
return true;
|
||||
} catch (error) {
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
+38
-19
@@ -1,5 +1,5 @@
|
||||
import { Server } from "proxy-chain";
|
||||
import { getProxyConfig } from "./proxy-storage";
|
||||
import { getProxyConfig, updateProxyConfig } from "./proxy-storage";
|
||||
|
||||
/**
|
||||
* Run a proxy server as a worker process
|
||||
@@ -8,44 +8,63 @@ import { getProxyConfig } from "./proxy-storage";
|
||||
export async function runProxyWorker(id: string): Promise<void> {
|
||||
// Get the proxy configuration
|
||||
const config = getProxyConfig(id);
|
||||
|
||||
|
||||
if (!config) {
|
||||
console.error(`Proxy configuration ${id} not found`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
||||
// Create a new proxy server
|
||||
const server = new Server({
|
||||
port: config.localPort,
|
||||
host: "localhost",
|
||||
host: "127.0.0.1",
|
||||
prepareRequestFunction: () => {
|
||||
// If upstreamUrl is "DIRECT", don't use upstream proxy
|
||||
if (config.upstreamUrl === "DIRECT") {
|
||||
return {};
|
||||
}
|
||||
return {
|
||||
upstreamProxyUrl: config.upstreamUrl,
|
||||
ignoreUpstreamProxyCertificate: config.ignoreProxyCertificate || false,
|
||||
ignoreUpstreamProxyCertificate: config.ignoreProxyCertificate ?? false,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
// Handle process termination
|
||||
process.on("SIGTERM", async () => {
|
||||
console.log(`Proxy worker ${id} received SIGTERM, shutting down...`);
|
||||
await server.close(true);
|
||||
|
||||
// Handle process termination gracefully
|
||||
const gracefulShutdown = async () => {
|
||||
try {
|
||||
await server.close(true);
|
||||
} catch {}
|
||||
process.exit(0);
|
||||
};
|
||||
|
||||
process.on("SIGTERM", () => void gracefulShutdown());
|
||||
process.on("SIGINT", () => void gracefulShutdown());
|
||||
|
||||
// Handle uncaught exceptions
|
||||
process.on("uncaughtException", () => {
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
process.on("SIGINT", async () => {
|
||||
console.log(`Proxy worker ${id} received SIGINT, shutting down...`);
|
||||
await server.close(true);
|
||||
process.exit(0);
|
||||
|
||||
process.on("unhandledRejection", () => {
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
|
||||
// Start the server
|
||||
try {
|
||||
await server.listen();
|
||||
console.log(`Proxy worker ${id} started on port ${server.port}`);
|
||||
console.log(`Forwarding to upstream proxy: ${config.upstreamUrl}`);
|
||||
|
||||
// Update the config with the actual port (in case it was auto-assigned)
|
||||
config.localPort = server.port;
|
||||
config.localUrl = `http://127.0.0.1:${server.port}`;
|
||||
updateProxyConfig(config);
|
||||
|
||||
// Keep the process alive
|
||||
setInterval(() => {
|
||||
// Do nothing, just keep the process alive
|
||||
}, 60000);
|
||||
} catch (error) {
|
||||
console.error(`Failed to start proxy worker ${id}:`, error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,73 +0,0 @@
|
||||
import {
|
||||
startProxyProcess,
|
||||
stopProxyProcess,
|
||||
stopAllProxyProcesses
|
||||
} from "./proxy-runner";
|
||||
import { listProxyConfigs } from "./proxy-storage";
|
||||
|
||||
// Type definitions
|
||||
interface ProxyOptions {
|
||||
port?: number;
|
||||
ignoreProxyCertificate?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a local proxy server that forwards to an upstream proxy
|
||||
* @param upstreamProxyUrl The upstream proxy URL (protocol://[username:password@]host:port)
|
||||
* @param options Optional configuration
|
||||
* @returns Promise resolving to the local proxy URL
|
||||
*/
|
||||
export async function startProxy(
|
||||
upstreamProxyUrl: string,
|
||||
options: ProxyOptions = {}
|
||||
): Promise<string> {
|
||||
const config = await startProxyProcess(upstreamProxyUrl, {
|
||||
port: options.port,
|
||||
ignoreProxyCertificate: options.ignoreProxyCertificate,
|
||||
});
|
||||
|
||||
return config.localUrl || `http://localhost:${config.localPort}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a specific proxy by its upstream URL
|
||||
* @param upstreamProxyUrl The upstream proxy URL to stop
|
||||
* @returns Promise resolving to true if proxy was found and stopped, false otherwise
|
||||
*/
|
||||
export async function stopProxy(upstreamProxyUrl: string): Promise<boolean> {
|
||||
// Find all proxies with this upstream URL
|
||||
const configs = listProxyConfigs().filter(
|
||||
config => config.upstreamUrl === upstreamProxyUrl
|
||||
);
|
||||
|
||||
if (configs.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Stop all matching proxies
|
||||
let success = true;
|
||||
for (const config of configs) {
|
||||
const stopped = await stopProxyProcess(config.id);
|
||||
if (!stopped) {
|
||||
success = false;
|
||||
}
|
||||
}
|
||||
|
||||
return success;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a list of all active proxy upstream URLs
|
||||
* @returns Array of upstream proxy URLs
|
||||
*/
|
||||
export function getActiveProxies(): string[] {
|
||||
return listProxyConfigs().map(config => config.upstreamUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all active proxies
|
||||
* @returns Promise that resolves when all proxies are stopped
|
||||
*/
|
||||
export async function stopAllProxies(): Promise<void> {
|
||||
await stopAllProxyProcesses();
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
import type { LaunchOptions } from "playwright-core";
|
||||
|
||||
const OS_MAP: { [key: string]: "mac" | "win" | "lin" } = {
|
||||
darwin: "mac",
|
||||
linux: "lin",
|
||||
win32: "win",
|
||||
};
|
||||
|
||||
const OS_NAME: "mac" | "win" | "lin" = OS_MAP[process.platform];
|
||||
|
||||
export function getEnvVars(configMap: Record<string, string>) {
|
||||
const envVars: {
|
||||
[key: string]: string | undefined;
|
||||
} = {};
|
||||
let updatedConfigData: Uint8Array;
|
||||
|
||||
try {
|
||||
// Ensure we're working with a fresh copy of the config
|
||||
const configCopy = JSON.parse(JSON.stringify(configMap));
|
||||
updatedConfigData = new TextEncoder().encode(JSON.stringify(configCopy));
|
||||
} catch (e) {
|
||||
console.error(`Error updating config: ${e}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const chunkSize = OS_NAME === "win" ? 2047 : 32767;
|
||||
const configStr = new TextDecoder().decode(updatedConfigData);
|
||||
|
||||
for (let i = 0; i < configStr.length; i += chunkSize) {
|
||||
const chunk = configStr.slice(i, i + chunkSize);
|
||||
const envName = `CAMOU_CONFIG_${Math.floor(i / chunkSize) + 1}`;
|
||||
try {
|
||||
envVars[envName] = chunk;
|
||||
} catch (e) {
|
||||
console.error(`Error setting ${envName}: ${e}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
return envVars;
|
||||
}
|
||||
|
||||
export function parseProxyString(proxyString: LaunchOptions["proxy"] | string) {
|
||||
if (typeof proxyString === "object") {
|
||||
return proxyString;
|
||||
}
|
||||
|
||||
if (!proxyString || typeof proxyString !== "string") {
|
||||
throw new Error("Invalid proxy string provided");
|
||||
}
|
||||
|
||||
// Remove any leading/trailing whitespace
|
||||
const trimmed = proxyString.trim();
|
||||
|
||||
// Handle different proxy string formats:
|
||||
// 1. http://username:password@host:port
|
||||
// 2. host:port
|
||||
// 3. protocol://host:port
|
||||
// 4. username:password@host:port
|
||||
|
||||
let server = "";
|
||||
let username: string | undefined;
|
||||
let password: string | undefined;
|
||||
|
||||
try {
|
||||
// Try parsing as URL first (handles protocol://username:password@host:port)
|
||||
if (trimmed.includes("://")) {
|
||||
const url = new URL(trimmed);
|
||||
server = `${url.hostname}:${url.port}`;
|
||||
|
||||
if (url.username) {
|
||||
username = decodeURIComponent(url.username);
|
||||
}
|
||||
if (url.password) {
|
||||
password = decodeURIComponent(url.password);
|
||||
}
|
||||
} else {
|
||||
// Handle formats without protocol
|
||||
let workingString = trimmed;
|
||||
|
||||
// Check for username:password@ prefix
|
||||
const authMatch = workingString.match(/^([^:@]+):([^@]+)@(.+)$/);
|
||||
if (authMatch) {
|
||||
username = authMatch[1];
|
||||
password = authMatch[2];
|
||||
workingString = authMatch[3];
|
||||
}
|
||||
|
||||
// The remaining part should be host:port
|
||||
server = workingString;
|
||||
}
|
||||
|
||||
// Validate that we have a server
|
||||
if (!server) {
|
||||
throw new Error("Could not extract server information");
|
||||
}
|
||||
|
||||
// Basic validation for host:port format
|
||||
if (!server.includes(":") || server.split(":").length !== 2) {
|
||||
throw new Error("Server must be in host:port format");
|
||||
}
|
||||
|
||||
const result: LaunchOptions["proxy"] = { server };
|
||||
|
||||
if (username !== undefined) {
|
||||
result.username = username;
|
||||
}
|
||||
|
||||
if (password !== undefined) {
|
||||
result.password = password;
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
throw new Error(
|
||||
`Failed to parse proxy string: ${error instanceof Error ? error.message : "Unknown error"}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
+34
-36
@@ -2,7 +2,7 @@
|
||||
"name": "donutbrowser",
|
||||
"private": true,
|
||||
"license": "AGPL-3.0",
|
||||
"version": "0.3.2",
|
||||
"version": "0.9.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "next dev --turbopack",
|
||||
@@ -11,15 +11,16 @@
|
||||
"test": "pnpm test:rust",
|
||||
"test:rust": "cd src-tauri && cargo test",
|
||||
"lint": "pnpm lint:js && pnpm lint:rust",
|
||||
"lint:js": "biome check src/ && tsc --noEmit && next lint",
|
||||
"lint:js": "biome check src/ && tsc --noEmit",
|
||||
"lint:rust": "cd src-tauri && cargo clippy --all-targets --all-features -- -D warnings -D clippy::all && cargo fmt --all",
|
||||
"tauri": "tauri",
|
||||
"shadcn:add": "pnpm dlx shadcn@latest add",
|
||||
"prepare": "husky && husky install",
|
||||
"format:rust": "cd src-tauri && cargo clippy --fix --allow-dirty --all-targets --all-features -- -D warnings -D clippy::all && cargo fmt --all",
|
||||
"format:js": "biome check src/ --fix",
|
||||
"format:js": "biome check src/ --write --unsafe",
|
||||
"format": "pnpm format:js && pnpm format:rust",
|
||||
"cargo": "cd src-tauri && cargo",
|
||||
"unused-exports:js": "ts-unused-exports tsconfig.json",
|
||||
"check-unused-commands": "cd src-tauri && cargo run --bin check_unused_commands"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -29,54 +30,51 @@
|
||||
"@radix-ui/react-label": "^2.1.7",
|
||||
"@radix-ui/react-popover": "^1.1.14",
|
||||
"@radix-ui/react-progress": "^1.1.7",
|
||||
"@radix-ui/react-radio-group": "^1.3.7",
|
||||
"@radix-ui/react-scroll-area": "^1.2.9",
|
||||
"@radix-ui/react-select": "^2.2.5",
|
||||
"@radix-ui/react-slot": "^1.2.3",
|
||||
"@radix-ui/react-tabs": "^1.1.12",
|
||||
"@radix-ui/react-tooltip": "^1.2.7",
|
||||
"@tanstack/react-table": "^8.21.3",
|
||||
"@tauri-apps/api": "^2.5.0",
|
||||
"@tauri-apps/plugin-dialog": "^2.2.2",
|
||||
"@tauri-apps/plugin-fs": "~2.3.0",
|
||||
"@tauri-apps/plugin-opener": "^2.2.7",
|
||||
"@tauri-apps/api": "^2.7.0",
|
||||
"@tauri-apps/plugin-deep-link": "^2.4.1",
|
||||
"@tauri-apps/plugin-dialog": "^2.3.2",
|
||||
"@tauri-apps/plugin-fs": "~2.4.1",
|
||||
"@tauri-apps/plugin-opener": "^2.4.0",
|
||||
"ahooks": "^3.9.0",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"next": "^15.3.3",
|
||||
"motion": "^12.23.12",
|
||||
"next": "^15.4.6",
|
||||
"next-themes": "^0.4.6",
|
||||
"react": "^19.1.0",
|
||||
"react-dom": "^19.1.0",
|
||||
"react": "^19.1.1",
|
||||
"react-dom": "^19.1.1",
|
||||
"react-icons": "^5.5.0",
|
||||
"sonner": "^2.0.5",
|
||||
"tailwind-merge": "^3.3.0"
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.3.1",
|
||||
"tauri-plugin-macos-permissions-api": "^2.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "1.9.4",
|
||||
"@eslint/eslintrc": "^3.3.1",
|
||||
"@eslint/js": "^9.28.0",
|
||||
"@next/eslint-plugin-next": "^15.3.3",
|
||||
"@tailwindcss/postcss": "^4.1.8",
|
||||
"@tauri-apps/cli": "^2.5.0",
|
||||
"@types/node": "^22.15.30",
|
||||
"@types/react": "^19.1.6",
|
||||
"@types/react-dom": "^19.1.6",
|
||||
"@typescript-eslint/eslint-plugin": "^8.33.1",
|
||||
"@typescript-eslint/parser": "^8.33.1",
|
||||
"@vitejs/plugin-react": "^4.5.1",
|
||||
"eslint": "^9.28.0",
|
||||
"eslint-config-next": "^15.3.3",
|
||||
"eslint-plugin-react-hooks": "^5.2.0",
|
||||
"@biomejs/biome": "2.1.4",
|
||||
"@tailwindcss/postcss": "^4.1.11",
|
||||
"@tauri-apps/cli": "^2.7.1",
|
||||
"@types/node": "^24.2.1",
|
||||
"@types/react": "^19.1.9",
|
||||
"@types/react-dom": "^19.1.7",
|
||||
"@vitejs/plugin-react": "^5.0.0",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^16.1.0",
|
||||
"tailwindcss": "^4.1.8",
|
||||
"tw-animate-css": "^1.3.4",
|
||||
"typescript": "~5.8.3",
|
||||
"typescript-eslint": "^8.33.1"
|
||||
"lint-staged": "^16.1.5",
|
||||
"tailwindcss": "^4.1.11",
|
||||
"ts-unused-exports": "^11.0.1",
|
||||
"tw-animate-css": "^1.3.6",
|
||||
"typescript": "~5.9.2"
|
||||
},
|
||||
"packageManager": "pnpm@10.11.1",
|
||||
"packageManager": "pnpm@10.13.1",
|
||||
"lint-staged": {
|
||||
"src/**/*.{js,jsx,ts,tsx,json,css,md}": [
|
||||
"biome check --fix",
|
||||
"eslint --cache --fix"
|
||||
"**/*.{js,jsx,ts,tsx,json,css}": [
|
||||
"biome check --fix"
|
||||
],
|
||||
"src-tauri/**/*.rs": [
|
||||
"cd src-tauri && cargo fmt --all",
|
||||
|
||||
Generated
+2287
-3707
File diff suppressed because it is too large
Load Diff
+4
-4
@@ -1,9 +1,9 @@
|
||||
packages:
|
||||
- "nodecar"
|
||||
|
||||
- nodecar
|
||||
onlyBuiltDependencies:
|
||||
- "@biomejs/biome"
|
||||
- "@tailwindcss/oxide"
|
||||
- '@biomejs/biome'
|
||||
- '@tailwindcss/oxide'
|
||||
- esbuild
|
||||
- sharp
|
||||
- sqlite3
|
||||
- unrs-resolver
|
||||
|
||||
Generated
+1160
-588
File diff suppressed because it is too large
Load Diff
+46
-9
@@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "donutbrowser"
|
||||
version = "0.3.2"
|
||||
description = "Simple Yet Powerful Browser Orchestrator"
|
||||
version = "0.9.0"
|
||||
description = "Simple Yet Powerful Anti-Detect Browser"
|
||||
authors = ["zhom@github"]
|
||||
edition = "2021"
|
||||
default-run = "donutbrowser"
|
||||
@@ -27,30 +27,67 @@ tauri-plugin-fs = "2"
|
||||
tauri-plugin-shell = "2"
|
||||
tauri-plugin-deep-link = "2"
|
||||
tauri-plugin-dialog = "2"
|
||||
tauri-plugin-macos-permissions = "2"
|
||||
directories = "6"
|
||||
reqwest = { version = "0.12", features = ["json", "stream"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
sysinfo = "0.35"
|
||||
tokio = { version = "1", features = ["full", "sync"] }
|
||||
sysinfo = "0.36"
|
||||
lazy_static = "1.4"
|
||||
base64 = "0.22"
|
||||
zip = "4"
|
||||
async-trait = "0.1"
|
||||
futures-util = "0.3"
|
||||
zip = "4"
|
||||
tar = "0"
|
||||
bzip2 = "0"
|
||||
flate2 = "1"
|
||||
lzma-rs = "0"
|
||||
msi-extract = "0"
|
||||
|
||||
uuid = { version = "1.0", features = ["v4", "serde"] }
|
||||
url = "2.5"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2", features = ["deep-link"] }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
core-foundation="0.10"
|
||||
core-foundation = "0.10"
|
||||
objc2 = "0.6.1"
|
||||
objc2-app-kit = { version = "0.3.1", features = ["NSWindow"] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
winreg = "0.55"
|
||||
windows = { version = "0.61", features = [
|
||||
"Win32_Foundation",
|
||||
"Win32_System_ProcessStatus",
|
||||
"Win32_System_Threading",
|
||||
"Win32_System_Diagnostics_Debug",
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_Security",
|
||||
"Win32_Storage_FileSystem",
|
||||
"Win32_System_Registry",
|
||||
"Win32_UI_Shell",
|
||||
] }
|
||||
|
||||
[dev-dependencies]
|
||||
tempfile = "3.13.0"
|
||||
tokio-test = "0.4.4"
|
||||
wiremock = "0.6"
|
||||
hyper = { version = "1.0", features = ["full"] }
|
||||
hyper-util = { version = "0.1", features = ["full"] }
|
||||
http-body-util = "0.1"
|
||||
tower = "0.5"
|
||||
tower-http = { version = "0.6", features = ["fs", "trace"] }
|
||||
futures-util = "0.3"
|
||||
|
||||
# Integration test configuration
|
||||
[[test]]
|
||||
name = "nodecar_integration"
|
||||
path = "tests/nodecar_integration.rs"
|
||||
|
||||
[features]
|
||||
# by default Tauri runs in production mode
|
||||
# when `tauri dev` runs it is executed with `cargo run --no-default-features` if `devPath` points to the filesystem
|
||||
default = [ "custom-protocol" ]
|
||||
default = ["custom-protocol"]
|
||||
# this feature is used used for production builds where `devPath` points to the filesystem
|
||||
# DO NOT remove this
|
||||
custom-protocol = [ "tauri/custom-protocol" ]
|
||||
custom-protocol = ["tauri/custom-protocol"]
|
||||
|
||||
+29
-19
@@ -2,47 +2,57 @@
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>NSCameraUsageDescription</key>
|
||||
<string>Donut Browser needs camera access to enable camera functionality in web browsers. Each website will still ask for your permission individually.</string>
|
||||
<key>NSMicrophoneUsageDescription</key>
|
||||
<string>Donut Browser needs microphone access to enable microphone functionality in web browsers. Each website will still ask for your permission individually.</string>
|
||||
<key>NSLocalNetworkUsageDescription</key>
|
||||
<string>Donut Browser has proxy functionality that requires local network access. You can deny this functionality if you don't plan on setting proxies for browser profiles.</string>
|
||||
<key>CFBundleDisplayName</key>
|
||||
<string>Donut Browser</string>
|
||||
<key>CFBundleName</key>
|
||||
<string>Donut Browser</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.donutbrowser</string>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>com.donutbrowser</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>donutbrowser</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>1</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>0.3.2</string>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleIconFile</key>
|
||||
<string>icon.icns</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleIconFile</key>
|
||||
<string>icon.icns</string>
|
||||
<key>LSApplicationCategoryType</key>
|
||||
<string>public.app-category.productivity</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>Copyright © 2025 Donut Browser</string>
|
||||
<key>CFBundleDocumentTypes</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>CFBundleTypeName</key>
|
||||
<string>HTML document</string>
|
||||
<key>CFBundleTypeRole</key>
|
||||
<string>Viewer</string>
|
||||
<key>LSHandlerRank</key>
|
||||
<string>Default</string>
|
||||
<key>LSItemContentTypes</key>
|
||||
<array>
|
||||
<string>public.html</string>
|
||||
<string>public.xhtml</string>
|
||||
</array>
|
||||
</dict>
|
||||
</array>
|
||||
<key>CFBundleURLTypes</key>
|
||||
<array>
|
||||
<dict>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>Web Browser</string>
|
||||
<string>Web site URL</string>
|
||||
<key>CFBundleURLSchemes</key>
|
||||
<array>
|
||||
<string>http</string>
|
||||
<string>https</string>
|
||||
</array>
|
||||
<key>CFBundleURLIconFile</key>
|
||||
<string>icon.icns</string>
|
||||
<key>LSHandlerRank</key>
|
||||
<string>Owner</string>
|
||||
</dict>
|
||||
</array>
|
||||
<key>LSApplicationCategoryType</key>
|
||||
<string>public.app-category.productivity</string>
|
||||
<key>NSHumanReadableCopyright</key>
|
||||
<string>Copyright © 2025 Donut Browser</string>
|
||||
<key>LSMinimumSystemVersion</key>
|
||||
<string>10.13</string>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -1,14 +1,3 @@
|
||||
function FindProxyForURL(url, host) {
|
||||
const proxyString = "{{proxy_url}}";
|
||||
|
||||
// Split the proxy string to get the credentials part
|
||||
const parts = proxyString.split(" ")[1].split("@");
|
||||
if (parts.length > 1) {
|
||||
const credentials = parts[0];
|
||||
const encodedCredentials = encodeURIComponent(credentials);
|
||||
// Replace the original credentials with encoded ones
|
||||
return proxyString.replace(credentials, encodedCredentials);
|
||||
}
|
||||
|
||||
return proxyString;
|
||||
return "{{proxy_url}}";
|
||||
}
|
||||
|
||||
+1
-1
@@ -17,7 +17,7 @@ fn main() {
|
||||
let version = std::env::var("CARGO_PKG_VERSION").unwrap_or_else(|_| "0.1.0".to_string());
|
||||
println!("cargo:rustc-env=BUILD_VERSION=v{version}");
|
||||
} else if let Ok(commit_hash) = std::env::var("GITHUB_SHA") {
|
||||
// For nightly builds, use commit hash
|
||||
// For nightly builds, use timestamp format or fallback to commit hash
|
||||
let short_hash = &commit_hash[0..7.min(commit_hash.len())];
|
||||
println!("cargo:rustc-env=BUILD_VERSION=nightly-{short_hash}");
|
||||
} else {
|
||||
|
||||
@@ -19,7 +19,16 @@
|
||||
"shell:allow-spawn",
|
||||
"shell:allow-stdin-write",
|
||||
"deep-link:default",
|
||||
"deep-link:allow-register",
|
||||
"deep-link:allow-unregister",
|
||||
"deep-link:allow-is-registered",
|
||||
"deep-link:allow-get-current",
|
||||
"dialog:default",
|
||||
"dialog:allow-open"
|
||||
"dialog:allow-open",
|
||||
"macos-permissions:default",
|
||||
"macos-permissions:allow-request-microphone-permission",
|
||||
"macos-permissions:allow-request-camera-permission",
|
||||
"macos-permissions:allow-check-microphone-permission",
|
||||
"macos-permissions:allow-check-camera-permission"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
Version=1.0
|
||||
Type=Application
|
||||
Name=Donut Browser
|
||||
Comment=Simple Yet Powerful Browser Orchestrator
|
||||
Comment=Simple Yet Powerful Anti-Detect Browser
|
||||
Exec=donutbrowser %u
|
||||
Icon=donutbrowser
|
||||
StartupNotify=true
|
||||
NoDisplay=false
|
||||
Categories=Network;WebBrowser;Productivity;
|
||||
Categories=Network;WebBrowser;
|
||||
MimeType=x-scheme-handler/http;x-scheme-handler/https;text/html;application/xhtml+xml;
|
||||
StartupWMClass=donutbrowser
|
||||
Keywords=browser;web;internet;productivity;
|
||||
@@ -12,5 +12,21 @@
|
||||
<true/>
|
||||
<key>com.apple.security.files.downloads.read-write</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.camera</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.audio-output</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.microphone</key>
|
||||
<true/>
|
||||
<key>com.apple.security.device.audio-input</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
+503
-157
@@ -9,21 +9,21 @@ use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use crate::browser::GithubRelease;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct VersionComponent {
|
||||
major: u32,
|
||||
minor: u32,
|
||||
patch: u32,
|
||||
pre_release: Option<PreRelease>,
|
||||
pub struct VersionComponent {
|
||||
pub major: u32,
|
||||
pub minor: u32,
|
||||
pub patch: u32,
|
||||
pub pre_release: Option<PreRelease>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct PreRelease {
|
||||
kind: PreReleaseKind,
|
||||
number: Option<u32>,
|
||||
pub struct PreRelease {
|
||||
pub kind: PreReleaseKind,
|
||||
pub number: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
enum PreReleaseKind {
|
||||
pub enum PreReleaseKind {
|
||||
Alpha,
|
||||
Beta,
|
||||
RC,
|
||||
@@ -32,8 +32,14 @@ enum PreReleaseKind {
|
||||
}
|
||||
|
||||
impl VersionComponent {
|
||||
fn parse(version: &str) -> Self {
|
||||
pub fn parse(version: &str) -> Self {
|
||||
let version = version.trim();
|
||||
// Normalize common tag prefixes like 'v1.2.3' -> '1.2.3'
|
||||
let version = if version.starts_with('v') || version.starts_with('V') {
|
||||
&version[1..]
|
||||
} else {
|
||||
version
|
||||
};
|
||||
|
||||
// Handle special case for Zen Browser twilight releases
|
||||
if version.to_lowercase() == "twilight" {
|
||||
@@ -218,8 +224,11 @@ pub fn sort_versions(versions: &mut [String]) {
|
||||
// Helper function to sort GitHub releases
|
||||
pub fn sort_github_releases(releases: &mut [GithubRelease]) {
|
||||
releases.sort_by(|a, b| {
|
||||
let version_a = VersionComponent::parse(&a.tag_name);
|
||||
let version_b = VersionComponent::parse(&b.tag_name);
|
||||
// Normalize tags like "v1.81.9" -> "1.81.9" for correct ordering
|
||||
let tag_a = a.tag_name.trim_start_matches('v');
|
||||
let tag_b = b.tag_name.trim_start_matches('v');
|
||||
let version_a = VersionComponent::parse(tag_a);
|
||||
let version_b = VersionComponent::parse(tag_b);
|
||||
version_b.cmp(&version_a) // Descending order (newest first)
|
||||
});
|
||||
}
|
||||
@@ -242,12 +251,22 @@ pub fn is_browser_version_nightly(
|
||||
version.to_lowercase() == "twilight"
|
||||
}
|
||||
"brave" => {
|
||||
// For Brave Browser, only releases titled "Release" are stable, everything else is nightly
|
||||
// For Brave Browser, only releases whose name starts with "Release" (case-insensitive) are stable.
|
||||
if let Some(name) = release_name {
|
||||
!name.starts_with("Release")
|
||||
} else {
|
||||
true
|
||||
let normalized = name.trim_start().to_ascii_lowercase();
|
||||
return !normalized.starts_with("release");
|
||||
}
|
||||
|
||||
// Fallback: try cached GitHub releases
|
||||
if let Some(releases) = ApiClient::instance().get_cached_github_releases("brave") {
|
||||
if let Some(found) = releases.iter().find(|r| r.tag_name == version) {
|
||||
let normalized = found.name.trim_start().to_ascii_lowercase();
|
||||
return !normalized.starts_with("release");
|
||||
}
|
||||
}
|
||||
|
||||
// Last resort: when no name available, treat as nightly (non-Release)
|
||||
true
|
||||
}
|
||||
"firefox" | "firefox-developer" => {
|
||||
// For Firefox, use the category from the API response to determine stability
|
||||
@@ -259,6 +278,10 @@ pub fn is_browser_version_nightly(
|
||||
// Chromium builds are generally stable snapshots
|
||||
false
|
||||
}
|
||||
"camoufox" => {
|
||||
// For Camoufox, beta versions are actually the stable releases
|
||||
false
|
||||
}
|
||||
_ => {
|
||||
// Default fallback
|
||||
is_nightly_version(version)
|
||||
@@ -291,7 +314,7 @@ pub struct BrowserRelease {
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct CachedVersionData {
|
||||
versions: Vec<String>,
|
||||
releases: Vec<BrowserRelease>,
|
||||
timestamp: u64,
|
||||
}
|
||||
|
||||
@@ -311,7 +334,7 @@ pub struct ApiClient {
|
||||
}
|
||||
|
||||
impl ApiClient {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
firefox_api_base: "https://product-details.mozilla.org/1.0".to_string(),
|
||||
@@ -323,6 +346,69 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
async fn fetch_github_releases_multiple_pages(
|
||||
&self,
|
||||
base_releases_url: &str,
|
||||
) -> Result<Vec<GithubRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut all_releases: Vec<GithubRelease> = Vec::new();
|
||||
|
||||
// For now, only fetch 1 page
|
||||
for page in 1..=1 {
|
||||
let url = format!("{base_releases_url}?per_page=100&page={page}");
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header(
|
||||
"User-Agent",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36",
|
||||
)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
// If the first page fails, propagate error; otherwise stop pagination
|
||||
if page == 1 {
|
||||
return Err(
|
||||
format!(
|
||||
"GitHub API returned status for page {}: {}",
|
||||
page,
|
||||
response.status()
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let text = response.text().await?;
|
||||
let mut page_releases: Vec<GithubRelease> = serde_json::from_str(&text).map_err(|e| {
|
||||
eprintln!("Failed to parse GitHub API response (page {page}): {e}");
|
||||
eprintln!(
|
||||
"Response text (first 500 chars): {}",
|
||||
if text.len() > 500 {
|
||||
&text[..500]
|
||||
} else {
|
||||
&text
|
||||
}
|
||||
);
|
||||
format!("Failed to parse GitHub API response: {e}")
|
||||
})?;
|
||||
|
||||
if page_releases.is_empty() {
|
||||
break;
|
||||
}
|
||||
|
||||
all_releases.append(&mut page_releases);
|
||||
}
|
||||
|
||||
Ok(all_releases)
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static ApiClient {
|
||||
&API_CLIENT
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn new_with_base_urls(
|
||||
firefox_api_base: String,
|
||||
@@ -366,7 +452,7 @@ impl ApiClient {
|
||||
current_time - timestamp < cache_duration
|
||||
}
|
||||
|
||||
pub fn load_cached_versions(&self, browser: &str) -> Option<Vec<String>> {
|
||||
pub fn load_cached_versions(&self, browser: &str) -> Option<Vec<BrowserRelease>> {
|
||||
let cache_dir = Self::get_cache_dir().ok()?;
|
||||
let cache_file = cache_dir.join(format!("{browser}_versions.json"));
|
||||
|
||||
@@ -375,11 +461,27 @@ impl ApiClient {
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&cache_file).ok()?;
|
||||
let cached_data: CachedVersionData = serde_json::from_str(&content).ok()?;
|
||||
if let Ok(cached) = serde_json::from_str::<CachedVersionData>(&content) {
|
||||
// Always return cached releases regardless of age - they're always valid
|
||||
println!("Using cached versions for {browser}");
|
||||
return Some(cached.releases);
|
||||
}
|
||||
|
||||
// Always return cached versions regardless of age - they're always valid
|
||||
println!("Using cached versions for {browser}");
|
||||
Some(cached_data.versions)
|
||||
// Backward compatibility: legacy caches stored just an array of version strings
|
||||
if let Ok(legacy_versions) = serde_json::from_str::<Vec<String>>(&content) {
|
||||
println!("Using legacy cached versions for {browser}; upgrading in-memory");
|
||||
let releases: Vec<BrowserRelease> = legacy_versions
|
||||
.into_iter()
|
||||
.map(|version| BrowserRelease {
|
||||
is_prerelease: is_browser_version_nightly(browser, &version, None),
|
||||
version,
|
||||
date: "".to_string(),
|
||||
})
|
||||
.collect();
|
||||
return Some(releases);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn is_cache_expired(&self, browser: &str) -> bool {
|
||||
@@ -410,19 +512,19 @@ impl ApiClient {
|
||||
pub fn save_cached_versions(
|
||||
&self,
|
||||
browser: &str,
|
||||
versions: &[String],
|
||||
releases: &[BrowserRelease],
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let cache_dir = Self::get_cache_dir()?;
|
||||
let cache_file = cache_dir.join(format!("{browser}_versions.json"));
|
||||
|
||||
let cached_data = CachedVersionData {
|
||||
versions: versions.to_vec(),
|
||||
releases: releases.to_vec(),
|
||||
timestamp: Self::get_current_timestamp(),
|
||||
};
|
||||
|
||||
let content = serde_json::to_string_pretty(&cached_data)?;
|
||||
fs::write(&cache_file, content)?;
|
||||
println!("Cached {} versions for {}", versions.len(), browser);
|
||||
println!("Cached {} versions for {}", releases.len(), browser);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -442,6 +544,11 @@ impl ApiClient {
|
||||
Some(cached_data.releases)
|
||||
}
|
||||
|
||||
/// Public accessor for cached GitHub releases (used by other modules for classification)
|
||||
pub fn get_cached_github_releases(&self, browser: &str) -> Option<Vec<GithubRelease>> {
|
||||
self.load_cached_github_releases(browser)
|
||||
}
|
||||
|
||||
fn save_cached_github_releases(
|
||||
&self,
|
||||
browser: &str,
|
||||
@@ -467,19 +574,8 @@ impl ApiClient {
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_versions) = self.load_cached_versions("firefox") {
|
||||
return Ok(
|
||||
cached_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // Cache doesn't store dates
|
||||
is_prerelease: is_browser_version_nightly("firefox", &version, None),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
if let Some(cached_releases) = self.load_cached_versions("firefox") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -525,12 +621,9 @@ impl ApiClient {
|
||||
version_b.cmp(&version_a)
|
||||
});
|
||||
|
||||
// Extract versions for caching
|
||||
let versions: Vec<String> = releases.iter().map(|r| r.version.clone()).collect();
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_versions("firefox", &versions) {
|
||||
if let Err(e) = self.save_cached_versions("firefox", &releases) {
|
||||
eprintln!("Failed to cache Firefox versions: {e}");
|
||||
}
|
||||
}
|
||||
@@ -544,19 +637,8 @@ impl ApiClient {
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_versions) = self.load_cached_versions("firefox-developer") {
|
||||
return Ok(
|
||||
cached_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // Cache doesn't store dates
|
||||
is_prerelease: is_browser_version_nightly("firefox-developer", &version, None),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
if let Some(cached_releases) = self.load_cached_versions("firefox-developer") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -608,12 +690,9 @@ impl ApiClient {
|
||||
version_b.cmp(&version_a)
|
||||
});
|
||||
|
||||
// Extract versions for caching
|
||||
let versions: Vec<String> = releases.iter().map(|r| r.version.clone()).collect();
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_versions("firefox-developer", &versions) {
|
||||
if let Err(e) = self.save_cached_versions("firefox-developer", &releases) {
|
||||
eprintln!("Failed to cache Firefox Developer versions: {e}");
|
||||
}
|
||||
}
|
||||
@@ -632,19 +711,12 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fetching Mullvad releases from GitHub API...");
|
||||
let url = format!(
|
||||
"{}/repos/mullvad/mullvad-browser/releases?per_page=100",
|
||||
println!("Fetching Mullvad releases from GitHub API");
|
||||
let base_url = format!(
|
||||
"{}/repos/mullvad/mullvad-browser/releases",
|
||||
self.github_api_base
|
||||
);
|
||||
let releases = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?
|
||||
.json::<Vec<GithubRelease>>()
|
||||
.await?;
|
||||
let releases = self.fetch_github_releases_multiple_pages(&base_url).await?;
|
||||
|
||||
let mut releases: Vec<GithubRelease> = releases
|
||||
.into_iter()
|
||||
@@ -678,19 +750,13 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fetching Zen releases from GitHub API...");
|
||||
let url = format!(
|
||||
"{}/repos/zen-browser/desktop/releases?per_page=100",
|
||||
println!("Fetching Zen releases from GitHub API");
|
||||
let base_url = format!(
|
||||
"{}/repos/zen-browser/desktop/releases",
|
||||
self.github_api_base
|
||||
);
|
||||
let mut releases = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?
|
||||
.json::<Vec<GithubRelease>>()
|
||||
.await?;
|
||||
let mut releases: Vec<GithubRelease> =
|
||||
self.fetch_github_releases_multiple_pages(&base_url).await?;
|
||||
|
||||
// Check for twilight updates and mark alpha releases
|
||||
for release in &mut releases {
|
||||
@@ -735,31 +801,25 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fetching Brave releases from GitHub API...");
|
||||
let url = format!(
|
||||
"{}/repos/brave/brave-browser/releases?per_page=100",
|
||||
println!("Fetching Brave releases from GitHub API");
|
||||
let base_url = format!(
|
||||
"{}/repos/brave/brave-browser/releases",
|
||||
self.github_api_base
|
||||
);
|
||||
let releases = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?
|
||||
.json::<Vec<GithubRelease>>()
|
||||
.await?;
|
||||
let releases: Vec<GithubRelease> = self.fetch_github_releases_multiple_pages(&base_url).await?;
|
||||
|
||||
// Get platform info to filter appropriate releases
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
let (os, _) = Self::get_platform_info();
|
||||
|
||||
// Filter releases that have assets compatible with the current platform
|
||||
let mut filtered_releases: Vec<GithubRelease> = releases
|
||||
.into_iter()
|
||||
.filter_map(|mut release| {
|
||||
// Check if this release has compatible assets for the current platform
|
||||
let has_compatible_asset = Self::has_compatible_brave_asset(&release.assets, &os, &arch);
|
||||
let has_compatible_asset = Self::has_compatible_brave_asset(&release.assets, &os);
|
||||
|
||||
if has_compatible_asset {
|
||||
println!("release.name: {:?}", release.name);
|
||||
// Use the centralized nightly detection function
|
||||
release.is_nightly =
|
||||
is_browser_version_nightly("brave", &release.tag_name, Some(&release.name));
|
||||
@@ -773,22 +833,40 @@ impl ApiClient {
|
||||
// Sort releases using the new version sorting system
|
||||
sort_github_releases(&mut filtered_releases);
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_github_releases("brave", &filtered_releases) {
|
||||
eprintln!("Failed to cache Brave releases: {e}");
|
||||
}
|
||||
if let Err(e) = self.save_cached_github_releases("brave", &filtered_releases) {
|
||||
eprintln!("Failed to cache Brave releases: {e}");
|
||||
}
|
||||
|
||||
Ok(filtered_releases)
|
||||
}
|
||||
|
||||
/// Check if a Brave release has compatible assets for the given platform and architecture
|
||||
fn has_compatible_brave_asset(
|
||||
fn has_compatible_camoufox_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> bool {
|
||||
let (os_name, arch_name) = match (os, arch) {
|
||||
("windows", "x64") => ("win", "x86_64"),
|
||||
("windows", "arm64") => ("win", "arm64"),
|
||||
("linux", "x64") => ("lin", "x86_64"),
|
||||
("linux", "arm64") => ("lin", "arm64"),
|
||||
("macos", "x64") => ("mac", "x86_64"),
|
||||
("macos", "arm64") => ("mac", "arm64"),
|
||||
_ => return false,
|
||||
};
|
||||
|
||||
// Look for assets matching the pattern: camoufox-{version}-{release}-{os}.{arch}.zip
|
||||
assets.iter().any(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.starts_with("camoufox-")
|
||||
&& name.contains(&format!("-{os_name}.{arch_name}.zip"))
|
||||
&& name.ends_with(".zip")
|
||||
})
|
||||
}
|
||||
|
||||
fn has_compatible_brave_asset(assets: &[crate::browser::GithubAsset], os: &str) -> bool {
|
||||
match os {
|
||||
"windows" => {
|
||||
// For Windows, look for standalone setup EXE (not the auto-updater one)
|
||||
@@ -805,12 +883,9 @@ impl ApiClient {
|
||||
}) || assets.iter().any(|asset| asset.name.ends_with(".dmg"))
|
||||
}
|
||||
"linux" => {
|
||||
// For Linux, be strict about architecture matching - only allow assets that explicitly match the current architecture
|
||||
let arch_pattern = if arch == "arm64" { "arm64" } else { "amd64" };
|
||||
|
||||
if assets.iter().any(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("linux") && name.contains(arch_pattern) && name.ends_with(".zip")
|
||||
name.contains("lin")
|
||||
}) {
|
||||
return true;
|
||||
}
|
||||
@@ -874,19 +949,8 @@ impl ApiClient {
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_versions) = self.load_cached_versions("chromium") {
|
||||
return Ok(
|
||||
cached_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // Cache doesn't store dates
|
||||
is_prerelease: false, // Chromium versions are generally stable builds
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
if let Some(cached_releases) = self.load_cached_versions("chromium") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -905,23 +969,112 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
// Convert to BrowserRelease objects
|
||||
let releases: Vec<BrowserRelease> = versions
|
||||
.into_iter()
|
||||
.map(|version| BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(),
|
||||
is_prerelease: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_versions("chromium", &versions) {
|
||||
if let Err(e) = self.save_cached_versions("chromium", &releases) {
|
||||
eprintln!("Failed to cache Chromium versions: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(
|
||||
versions
|
||||
.into_iter()
|
||||
.map(|version| BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(),
|
||||
is_prerelease: false,
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
Ok(releases)
|
||||
}
|
||||
|
||||
pub async fn fetch_camoufox_releases_with_caching(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
) -> Result<Vec<GithubRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_releases) = self.load_cached_github_releases("camoufox") {
|
||||
println!(
|
||||
"Using cached Camoufox releases, count: {}",
|
||||
cached_releases.len()
|
||||
);
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
println!("Fetching Camoufox releases from GitHub API");
|
||||
let base_url = format!("{}/repos/daijro/camoufox/releases", self.github_api_base);
|
||||
let releases: Vec<GithubRelease> = self.fetch_github_releases_multiple_pages(&base_url).await?;
|
||||
|
||||
println!(
|
||||
"Fetched {} total Camoufox releases from GitHub",
|
||||
releases.len()
|
||||
);
|
||||
|
||||
// Get platform info to filter appropriate releases
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
println!("Filtering for platform: {os}/{arch}");
|
||||
|
||||
// Filter releases that have assets compatible with the current platform
|
||||
let mut compatible_releases: Vec<GithubRelease> = releases
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.filter_map(|(i, release)| {
|
||||
let has_compatible = self.has_compatible_camoufox_asset(&release.assets, &os, &arch);
|
||||
if !has_compatible {
|
||||
println!(
|
||||
"Release {} ({}) has no compatible assets for {}/{}",
|
||||
i, release.tag_name, os, arch
|
||||
);
|
||||
println!(
|
||||
" Available assets: {:?}",
|
||||
release.assets.iter().map(|a| &a.name).collect::<Vec<_>>()
|
||||
);
|
||||
}
|
||||
if has_compatible {
|
||||
Some(release)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
println!(
|
||||
"After platform filtering: {} compatible releases",
|
||||
compatible_releases.len()
|
||||
);
|
||||
|
||||
// Sort by version (latest first) with debugging
|
||||
println!(
|
||||
"Before sorting: {:?}",
|
||||
compatible_releases
|
||||
.iter()
|
||||
.map(|r| &r.tag_name)
|
||||
.take(10)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
sort_github_releases(&mut compatible_releases);
|
||||
println!(
|
||||
"After sorting: {:?}",
|
||||
compatible_releases
|
||||
.iter()
|
||||
.map(|r| &r.tag_name)
|
||||
.take(10)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_github_releases("camoufox", &compatible_releases) {
|
||||
eprintln!("Failed to cache Camoufox releases: {e}");
|
||||
} else {
|
||||
println!("Cached {} Camoufox releases", compatible_releases.len());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(compatible_releases)
|
||||
}
|
||||
|
||||
pub async fn fetch_tor_releases_with_caching(
|
||||
@@ -930,19 +1083,8 @@ impl ApiClient {
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_versions) = self.load_cached_versions("tor-browser") {
|
||||
return Ok(
|
||||
cached_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // Cache doesn't store dates
|
||||
is_prerelease: is_browser_version_nightly("tor-browser", &version, None),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
if let Some(cached_releases) = self.load_cached_versions("tor-browser") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -998,25 +1140,24 @@ impl ApiClient {
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
// Convert to BrowserRelease objects
|
||||
let releases: Vec<BrowserRelease> = version_strings
|
||||
.into_iter()
|
||||
.map(|version| BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // TOR archive doesn't provide structured dates
|
||||
is_prerelease: false, // Assume all archived versions are stable
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_versions("tor-browser", &version_strings) {
|
||||
if let Err(e) = self.save_cached_versions("tor-browser", &releases) {
|
||||
eprintln!("Failed to cache TOR versions: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(
|
||||
version_strings
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // TOR archive doesn't provide structured dates
|
||||
is_prerelease: false, // Assume all archived versions are stable
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
Ok(releases)
|
||||
}
|
||||
|
||||
async fn check_tor_version_has_macos(
|
||||
@@ -1113,6 +1254,11 @@ impl ApiClient {
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref API_CLIENT: ApiClient = ApiClient::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -1443,14 +1589,25 @@ mod tests {
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.9",
|
||||
"name": "Brave Release 1.81.9",
|
||||
"name": "Release v1.81.9 (Chromium 137.0.7151.104)",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"draft": false,
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.9-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-universal.dmg",
|
||||
"size": 200000000
|
||||
},
|
||||
{
|
||||
"name": "brave-browser-1.81.9-linux-amd64.zip",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-linux-amd64.zip",
|
||||
"size": 180000000
|
||||
},
|
||||
{
|
||||
"name": "BraveBrowserStandaloneSetup.exe",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-setup.exe",
|
||||
"size": 150000000
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1476,7 +1633,7 @@ mod tests {
|
||||
let releases = result.unwrap();
|
||||
assert!(!releases.is_empty());
|
||||
assert_eq!(releases[0].tag_name, "v1.81.9");
|
||||
assert!(releases[0].is_nightly);
|
||||
assert!(!releases[0].is_nightly); // "Release v1.81.9 (Chromium 137.0.7151.104)" starts with "Release" so it should be stable
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -1726,4 +1883,193 @@ mod tests {
|
||||
let result = client.fetch_zen_releases_with_caching(true).await;
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_mullvad_pagination_two_pages() {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
// Page 1 response with Link: rel="next" header
|
||||
let mock_page1 = r#"[
|
||||
{
|
||||
"tag_name": "100.0",
|
||||
"name": "Mullvad Browser 100.0",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-07-01T00:00:00Z",
|
||||
"assets": [
|
||||
{ "name": "mullvad-browser-macos-100.0.dmg", "browser_download_url": "https://example.com/100.0.dmg", "size": 1 }
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
// Page 2 response
|
||||
let mock_page2 = r#"[
|
||||
{
|
||||
"tag_name": "99.0",
|
||||
"name": "Mullvad Browser 99.0",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-06-01T00:00:00Z",
|
||||
"assets": [
|
||||
{ "name": "mullvad-browser-macos-99.0.dmg", "browser_download_url": "https://example.com/99.0.dmg", "size": 1 }
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
// Mock page 1
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.and(query_param("page", "1"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_page1)
|
||||
.insert_header("content-type", "application/json")
|
||||
.insert_header(
|
||||
"link",
|
||||
format!(
|
||||
"<{}?per_page=100&page=2>; rel=\"next\", <{}?per_page=100&page=2>; rel=\"last\"",
|
||||
server.uri().to_string() + "/repos/mullvad/mullvad-browser/releases",
|
||||
server.uri().to_string() + "/repos/mullvad/mullvad-browser/releases"
|
||||
),
|
||||
),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
// Mock page 2
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.and(query_param("page", "2"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_page2)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let result = client.fetch_mullvad_releases_with_caching(true).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let releases = result.unwrap();
|
||||
// We currently only fetch 1 page intentionally; ensure we at least got page 1
|
||||
assert_eq!(
|
||||
releases.len(),
|
||||
1,
|
||||
"Should fetch only the first page of results"
|
||||
);
|
||||
assert_eq!(releases[0].tag_name, "100.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_beta_version_parsing() {
|
||||
// Test specific Camoufox beta versions that are causing issues
|
||||
let v22 = VersionComponent::parse("135.0.5beta22");
|
||||
let v24 = VersionComponent::parse("135.0.5beta24");
|
||||
|
||||
println!("v22: {v22:?}");
|
||||
println!("v24: {v24:?}");
|
||||
|
||||
// v24 should be greater than v22
|
||||
assert!(
|
||||
v24 > v22,
|
||||
"135.0.5beta24 should be greater than 135.0.5beta22"
|
||||
);
|
||||
|
||||
// Test other beta version combinations
|
||||
let v1 = VersionComponent::parse("135.0.5beta1");
|
||||
let v2 = VersionComponent::parse("135.0.5beta2");
|
||||
assert!(v2 > v1, "135.0.5beta2 should be greater than 135.0.5beta1");
|
||||
|
||||
// Test sorting of multiple versions
|
||||
let mut versions = vec![
|
||||
"135.0.5beta22".to_string(),
|
||||
"135.0.5beta24".to_string(),
|
||||
"135.0.5beta23".to_string(),
|
||||
"135.0.5beta21".to_string(),
|
||||
];
|
||||
|
||||
sort_versions(&mut versions);
|
||||
|
||||
println!("Sorted versions: {versions:?}");
|
||||
|
||||
// Should be sorted from newest to oldest
|
||||
assert_eq!(versions[0], "135.0.5beta24");
|
||||
assert_eq!(versions[1], "135.0.5beta23");
|
||||
assert_eq!(versions[2], "135.0.5beta22");
|
||||
assert_eq!(versions[3], "135.0.5beta21");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_user_reported_versions() {
|
||||
// Test the exact versions reported by the user: 135.0.1beta24 vs 135.0beta22
|
||||
let v22 = VersionComponent::parse("135.0beta22");
|
||||
let v24 = VersionComponent::parse("135.0.1beta24");
|
||||
|
||||
println!("User reported v22: {v22:?}");
|
||||
println!("User reported v24: {v24:?}");
|
||||
|
||||
// 135.0.1beta24 should be greater than 135.0beta22 (newer patch version)
|
||||
assert!(
|
||||
v24 > v22,
|
||||
"135.0.1beta24 should be greater than 135.0beta22, but got: v24={v24:?} vs v22={v22:?}"
|
||||
);
|
||||
|
||||
// Test sorting of the exact user-reported versions
|
||||
let mut versions = vec!["135.0beta22".to_string(), "135.0.1beta24".to_string()];
|
||||
|
||||
sort_versions(&mut versions);
|
||||
|
||||
println!("User reported sorted versions: {versions:?}");
|
||||
|
||||
// Should be sorted from newest to oldest
|
||||
assert_eq!(
|
||||
versions[0], "135.0.1beta24",
|
||||
"135.0.1beta24 should be first (newest)"
|
||||
);
|
||||
assert_eq!(
|
||||
versions[1], "135.0beta22",
|
||||
"135.0beta22 should be second (older)"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_version_classification() {
|
||||
// Test that Camoufox beta versions are now correctly classified as stable (not nightly)
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0beta22", None),
|
||||
"135.0beta22 should be classified as stable for Camoufox"
|
||||
);
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0.1beta24", None),
|
||||
"135.0.1beta24 should be classified as stable for Camoufox"
|
||||
);
|
||||
|
||||
// Test with release names too - beta releases should be stable
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0beta22", Some("Release Beta 22")),
|
||||
"Release with 'Beta' in name should be classified as stable for Camoufox"
|
||||
);
|
||||
|
||||
// Test that stable versions are not classified as nightly
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0", None),
|
||||
"135.0 should be classified as stable"
|
||||
);
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "135.0.1", None),
|
||||
"135.0.1 should be classified as stable"
|
||||
);
|
||||
|
||||
// Test alpha and RC versions are still considered nightly
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "136.0alpha1", None),
|
||||
"136.0alpha1 should not be classified as nightly/prerelease"
|
||||
);
|
||||
assert!(
|
||||
!is_browser_version_nightly("camoufox", "136.0rc1", None),
|
||||
"136.0rc1 should not be classified as nightly/prerelease"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+1416
-145
File diff suppressed because it is too large
Load Diff
+303
-200
@@ -1,10 +1,12 @@
|
||||
use crate::browser_runner::{BrowserProfile, BrowserRunner};
|
||||
use crate::browser_version_service::{BrowserVersionInfo, BrowserVersionService};
|
||||
use crate::api_client::is_browser_version_nightly;
|
||||
use crate::browser_version_manager::{BrowserVersionInfo, BrowserVersionManager};
|
||||
use crate::profile::BrowserProfile;
|
||||
use crate::settings_manager::SettingsManager;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use tauri::Emitter;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct UpdateNotification {
|
||||
@@ -27,50 +29,52 @@ pub struct AutoUpdateState {
|
||||
}
|
||||
|
||||
pub struct AutoUpdater {
|
||||
version_service: BrowserVersionService,
|
||||
browser_runner: BrowserRunner,
|
||||
settings_manager: SettingsManager,
|
||||
version_service: &'static BrowserVersionManager,
|
||||
settings_manager: &'static SettingsManager,
|
||||
}
|
||||
|
||||
impl AutoUpdater {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
version_service: BrowserVersionService::new(),
|
||||
browser_runner: BrowserRunner::new(),
|
||||
settings_manager: SettingsManager::new(),
|
||||
version_service: BrowserVersionManager::instance(),
|
||||
settings_manager: SettingsManager::instance(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static AutoUpdater {
|
||||
&AUTO_UPDATER
|
||||
}
|
||||
|
||||
/// Check for updates for all profiles
|
||||
pub async fn check_for_updates(
|
||||
&self,
|
||||
) -> Result<Vec<UpdateNotification>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check if auto-updates are enabled
|
||||
let settings = self
|
||||
.settings_manager
|
||||
.load_settings()
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))?;
|
||||
if !settings.auto_updates_enabled {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let profiles = self
|
||||
.browser_runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
let mut notifications = Vec::new();
|
||||
let mut browser_versions: HashMap<String, Vec<BrowserVersionInfo>> = HashMap::new();
|
||||
|
||||
// Group profiles by browser type
|
||||
// Group profiles by browser
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
let profiles = profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
let mut browser_profiles: HashMap<String, Vec<BrowserProfile>> = HashMap::new();
|
||||
|
||||
for profile in profiles {
|
||||
// Only check supported browsers
|
||||
if !self
|
||||
.version_service
|
||||
.is_browser_supported(&profile.browser)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
browser_profiles
|
||||
.entry(profile.browser.clone())
|
||||
.or_default()
|
||||
.push(profile);
|
||||
}
|
||||
|
||||
// Check each browser type
|
||||
for (browser, profiles) in browser_profiles {
|
||||
// Get cached versions first, then try to fetch if needed
|
||||
let versions = if let Some(cached) = self
|
||||
@@ -97,7 +101,26 @@ impl AutoUpdater {
|
||||
// Check each profile for updates
|
||||
for profile in profiles {
|
||||
if let Some(update) = self.check_profile_update(&profile, &versions)? {
|
||||
notifications.push(update);
|
||||
// Apply chromium threshold logic
|
||||
if browser == "chromium" {
|
||||
// For chromium, only show notifications if there are 400+ new versions
|
||||
let current_version = &profile.version.parse::<u32>().unwrap();
|
||||
let new_version = &update.new_version.parse::<u32>().unwrap();
|
||||
|
||||
let result = new_version - current_version;
|
||||
println!(
|
||||
"Current version: {current_version}, New version: {new_version}, Result: {result}"
|
||||
);
|
||||
if result > 400 {
|
||||
notifications.push(update);
|
||||
} else {
|
||||
println!(
|
||||
"Skipping chromium update notification: only {result} new versions (need 400+)"
|
||||
);
|
||||
}
|
||||
} else {
|
||||
notifications.push(update);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -105,6 +128,93 @@ impl AutoUpdater {
|
||||
Ok(notifications)
|
||||
}
|
||||
|
||||
pub async fn check_for_updates_with_progress(&self, app_handle: &tauri::AppHandle) {
|
||||
println!("Starting auto-update check with progress...");
|
||||
|
||||
// Check for browser updates and trigger auto-downloads
|
||||
match self.check_for_updates().await {
|
||||
Ok(update_notifications) => {
|
||||
if !update_notifications.is_empty() {
|
||||
println!(
|
||||
"Found {} browser updates to auto-download",
|
||||
update_notifications.len()
|
||||
);
|
||||
|
||||
// Trigger automatic downloads for each update
|
||||
for notification in update_notifications {
|
||||
println!(
|
||||
"Auto-downloading {} version {}",
|
||||
notification.browser, notification.new_version
|
||||
);
|
||||
|
||||
// Clone app_handle for the async task
|
||||
let app_handle_clone = app_handle.clone();
|
||||
let browser = notification.browser.clone();
|
||||
let new_version = notification.new_version.clone();
|
||||
let notification_id = notification.id.clone();
|
||||
let affected_profiles = notification.affected_profiles.clone();
|
||||
|
||||
// Spawn async task to handle the download and auto-update
|
||||
tokio::spawn(async move {
|
||||
// First, check if browser already exists
|
||||
match crate::browser_runner::is_browser_downloaded(
|
||||
browser.clone(),
|
||||
new_version.clone(),
|
||||
) {
|
||||
true => {
|
||||
println!("Browser {browser} {new_version} already downloaded, proceeding to auto-update profiles");
|
||||
|
||||
// Browser already exists, go straight to profile update
|
||||
match crate::auto_updater::complete_browser_update_with_auto_update(
|
||||
browser.clone(),
|
||||
new_version.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(updated_profiles) => {
|
||||
println!(
|
||||
"Auto-update completed for {} profiles: {:?}",
|
||||
updated_profiles.len(),
|
||||
updated_profiles
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to complete auto-update for {browser}: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
false => {
|
||||
println!("Downloading browser {browser} version {new_version}...");
|
||||
|
||||
// Emit the auto-update event to trigger frontend handling
|
||||
let auto_update_event = serde_json::json!({
|
||||
"browser": browser,
|
||||
"new_version": new_version,
|
||||
"notification_id": notification_id,
|
||||
"affected_profiles": affected_profiles
|
||||
});
|
||||
|
||||
if let Err(e) =
|
||||
app_handle_clone.emit("browser-auto-update-available", &auto_update_event)
|
||||
{
|
||||
eprintln!("Failed to emit auto-update event for {browser}: {e}");
|
||||
} else {
|
||||
println!("Emitted auto-update event for {browser}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
println!("No browser updates needed");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to check for browser updates: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a specific profile has an available update
|
||||
fn check_profile_update(
|
||||
&self,
|
||||
@@ -112,16 +222,15 @@ impl AutoUpdater {
|
||||
available_versions: &[BrowserVersionInfo],
|
||||
) -> Result<Option<UpdateNotification>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let current_version = &profile.version;
|
||||
let is_current_stable = !self.is_nightly_version(current_version);
|
||||
let is_current_nightly = is_browser_version_nightly(&profile.browser, current_version, None);
|
||||
|
||||
// Find the best available update
|
||||
let best_update = available_versions
|
||||
.iter()
|
||||
.filter(|v| {
|
||||
// Only consider versions newer than current
|
||||
self.is_version_newer(&v.version, current_version) &&
|
||||
// Respect version type preference
|
||||
is_current_stable != v.is_prerelease
|
||||
self.is_version_newer(&v.version, current_version)
|
||||
&& is_browser_version_nightly(&profile.browser, &v.version, None) == is_current_nightly
|
||||
})
|
||||
.max_by(|a, b| self.compare_versions(&a.version, &b.version));
|
||||
|
||||
@@ -181,51 +290,14 @@ impl AutoUpdater {
|
||||
result
|
||||
}
|
||||
|
||||
/// Mark download as auto-update
|
||||
pub fn mark_auto_update_download(
|
||||
&self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut state = self.load_auto_update_state()?;
|
||||
let download_key = format!("{browser}-{version}");
|
||||
state.auto_update_downloads.insert(download_key);
|
||||
self.save_auto_update_state(&state)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Remove auto-update download tracking
|
||||
pub fn remove_auto_update_download(
|
||||
&self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut state = self.load_auto_update_state()?;
|
||||
let download_key = format!("{browser}-{version}");
|
||||
state.auto_update_downloads.remove(&download_key);
|
||||
self.save_auto_update_state(&state)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check if download is marked as auto-update
|
||||
pub fn is_auto_update_download(
|
||||
&self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let state = self.load_auto_update_state()?;
|
||||
let download_key = format!("{browser}-{version}");
|
||||
Ok(state.auto_update_downloads.contains(&download_key))
|
||||
}
|
||||
|
||||
/// Automatically update all affected profile versions after browser download
|
||||
pub async fn auto_update_profile_versions(
|
||||
&self,
|
||||
browser: &str,
|
||||
new_version: &str,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let profiles = self
|
||||
.browser_runner
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
let profiles = profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
@@ -242,10 +314,7 @@ impl AutoUpdater {
|
||||
// Check if this is an update (newer version)
|
||||
if self.is_version_newer(new_version, &profile.version) {
|
||||
// Update the profile version
|
||||
match self
|
||||
.browser_runner
|
||||
.update_profile_version(&profile.name, new_version)
|
||||
{
|
||||
match profile_manager.update_profile_version(&profile.name, new_version) {
|
||||
Ok(_) => {
|
||||
updated_profiles.push(profile.name);
|
||||
}
|
||||
@@ -278,16 +347,9 @@ impl AutoUpdater {
|
||||
state.auto_update_downloads.remove(&download_key);
|
||||
self.save_auto_update_state(&state)?;
|
||||
|
||||
// Check if auto-delete of unused binaries is enabled and perform cleanup
|
||||
let settings = self
|
||||
.settings_manager
|
||||
.load_settings()
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))?;
|
||||
if settings.auto_delete_unused_binaries {
|
||||
// Perform cleanup in the background - don't fail the update if cleanup fails
|
||||
if let Err(e) = self.cleanup_unused_binaries_internal() {
|
||||
eprintln!("Warning: Failed to cleanup unused binaries after auto-update: {e}");
|
||||
}
|
||||
// Always perform cleanup after auto-update - don't fail the update if cleanup fails
|
||||
if let Err(e) = self.cleanup_unused_binaries_internal() {
|
||||
eprintln!("Warning: Failed to cleanup unused binaries after auto-update: {e}");
|
||||
}
|
||||
|
||||
Ok(updated_profiles)
|
||||
@@ -298,21 +360,23 @@ impl AutoUpdater {
|
||||
&self,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Load current profiles
|
||||
let profiles = self
|
||||
.browser_runner
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
let profiles = profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to load profiles: {e}"))?;
|
||||
|
||||
// Load registry
|
||||
let mut registry = crate::downloaded_browsers::DownloadedBrowsersRegistry::load()
|
||||
.map_err(|e| format!("Failed to load browser registry: {e}"))?;
|
||||
// Get registry instance
|
||||
let registry = crate::downloaded_browsers::DownloadedBrowsersRegistry::instance();
|
||||
|
||||
// Get active browser versions
|
||||
// Get active browser versions (all profiles)
|
||||
let active_versions = registry.get_active_browser_versions(&profiles);
|
||||
|
||||
// Cleanup unused binaries
|
||||
// Get running browser versions (only running profiles)
|
||||
let running_versions = registry.get_running_browser_versions(&profiles);
|
||||
|
||||
// Cleanup unused binaries (but keep running ones)
|
||||
let cleaned_up = registry
|
||||
.cleanup_unused_binaries(&active_versions)
|
||||
.cleanup_unused_binaries(&active_versions, &running_versions)
|
||||
.map_err(|e| format!("Failed to cleanup unused binaries: {e}"))?;
|
||||
|
||||
// Save updated registry
|
||||
@@ -343,31 +407,18 @@ impl AutoUpdater {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
|
||||
fn is_nightly_version(&self, version: &str) -> bool {
|
||||
// Use the centralized nightly detection function
|
||||
// Since we don't have browser context here, use the general fallback
|
||||
crate::api_client::is_nightly_version(version)
|
||||
}
|
||||
|
||||
fn is_version_newer(&self, version1: &str, version2: &str) -> bool {
|
||||
self.compare_versions(version1, version2) == std::cmp::Ordering::Greater
|
||||
// Use the proper VersionComponent comparison from api_client.rs
|
||||
let version_a = crate::api_client::VersionComponent::parse(version1);
|
||||
let version_b = crate::api_client::VersionComponent::parse(version2);
|
||||
version_a > version_b
|
||||
}
|
||||
|
||||
fn compare_versions(&self, version1: &str, version2: &str) -> std::cmp::Ordering {
|
||||
// Basic semantic version comparison
|
||||
let v1_parts = self.parse_version(version1);
|
||||
let v2_parts = self.parse_version(version2);
|
||||
|
||||
v1_parts.cmp(&v2_parts)
|
||||
}
|
||||
|
||||
fn parse_version(&self, version: &str) -> Vec<u32> {
|
||||
version
|
||||
.split(&['.', 'a', 'b', '-', '_'][..])
|
||||
.filter_map(|part| part.parse::<u32>().ok())
|
||||
.collect()
|
||||
// Use the proper VersionComponent comparison from api_client.rs
|
||||
let version_a = crate::api_client::VersionComponent::parse(version1);
|
||||
let version_b = crate::api_client::VersionComponent::parse(version2);
|
||||
version_a.cmp(&version_b)
|
||||
}
|
||||
|
||||
fn get_auto_update_state_file(&self) -> PathBuf {
|
||||
@@ -377,7 +428,7 @@ impl AutoUpdater {
|
||||
.join("auto_update_state.json")
|
||||
}
|
||||
|
||||
fn load_auto_update_state(
|
||||
pub fn load_auto_update_state(
|
||||
&self,
|
||||
) -> Result<AutoUpdateState, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let state_file = self.get_auto_update_state_file();
|
||||
@@ -391,7 +442,7 @@ impl AutoUpdater {
|
||||
Ok(state)
|
||||
}
|
||||
|
||||
fn save_auto_update_state(
|
||||
pub fn save_auto_update_state(
|
||||
&self,
|
||||
state: &AutoUpdateState,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
@@ -410,7 +461,7 @@ impl AutoUpdater {
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn check_for_browser_updates() -> Result<Vec<UpdateNotification>, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let notifications = updater
|
||||
.check_for_updates()
|
||||
.await
|
||||
@@ -421,7 +472,7 @@ pub async fn check_for_browser_updates() -> Result<Vec<UpdateNotification>, Stri
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn is_browser_disabled_for_update(browser: String) -> Result<bool, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
updater
|
||||
.is_browser_disabled(&browser)
|
||||
.map_err(|e| format!("Failed to check browser status: {e}"))
|
||||
@@ -429,7 +480,7 @@ pub async fn is_browser_disabled_for_update(browser: String) -> Result<bool, Str
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn dismiss_update_notification(notification_id: String) -> Result<(), String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
updater
|
||||
.dismiss_update_notification(¬ification_id)
|
||||
.map_err(|e| format!("Failed to dismiss notification: {e}"))
|
||||
@@ -440,7 +491,7 @@ pub async fn complete_browser_update_with_auto_update(
|
||||
browser: String,
|
||||
new_version: String,
|
||||
) -> Result<Vec<String>, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
updater
|
||||
.complete_browser_update_with_auto_update(&browser, &new_version)
|
||||
.await
|
||||
@@ -448,27 +499,9 @@ pub async fn complete_browser_update_with_auto_update(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn mark_auto_update_download(browser: String, version: String) -> Result<(), String> {
|
||||
let updater = AutoUpdater::new();
|
||||
updater
|
||||
.mark_auto_update_download(&browser, &version)
|
||||
.map_err(|e| format!("Failed to mark auto-update download: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn remove_auto_update_download(browser: String, version: String) -> Result<(), String> {
|
||||
let updater = AutoUpdater::new();
|
||||
updater
|
||||
.remove_auto_update_download(&browser, &version)
|
||||
.map_err(|e| format!("Failed to remove auto-update download: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn is_auto_update_download(browser: String, version: String) -> Result<bool, String> {
|
||||
let updater = AutoUpdater::new();
|
||||
updater
|
||||
.is_auto_update_download(&browser, &version)
|
||||
.map_err(|e| format!("Failed to check auto-update download: {e}"))
|
||||
pub async fn check_for_updates_with_progress(app_handle: tauri::AppHandle) {
|
||||
let updater = AutoUpdater::instance();
|
||||
updater.check_for_updates_with_progress(&app_handle).await;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -477,13 +510,16 @@ mod tests {
|
||||
|
||||
fn create_test_profile(name: &str, browser: &str, version: &str) -> BrowserProfile {
|
||||
BrowserProfile {
|
||||
id: uuid::Uuid::new_v4(),
|
||||
name: name.to_string(),
|
||||
browser: browser.to_string(),
|
||||
version: version.to_string(),
|
||||
profile_path: format!("/tmp/{name}"),
|
||||
process_id: None,
|
||||
proxy: None,
|
||||
proxy_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
group_id: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -495,24 +531,9 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_nightly_version() {
|
||||
let updater = AutoUpdater::new();
|
||||
|
||||
assert!(updater.is_nightly_version("1.0.0-alpha"));
|
||||
assert!(updater.is_nightly_version("1.0.0-beta"));
|
||||
assert!(updater.is_nightly_version("1.0.0-rc"));
|
||||
assert!(updater.is_nightly_version("1.0.0a1"));
|
||||
assert!(updater.is_nightly_version("1.0.0b1"));
|
||||
assert!(updater.is_nightly_version("1.0.0-dev"));
|
||||
|
||||
assert!(!updater.is_nightly_version("1.0.0"));
|
||||
assert!(!updater.is_nightly_version("1.2.3"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_compare_versions() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
assert_eq!(
|
||||
updater.compare_versions("1.0.0", "1.0.0"),
|
||||
@@ -538,7 +559,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_is_version_newer() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
assert!(updater.is_version_newer("1.0.1", "1.0.0"));
|
||||
assert!(updater.is_version_newer("2.0.0", "1.9.9"));
|
||||
@@ -546,9 +567,71 @@ mod tests {
|
||||
assert!(!updater.is_version_newer("1.0.0", "1.0.0"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_beta_version_comparison() {
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
// Test the exact user-reported scenario: 135.0.1beta24 vs 135.0beta22
|
||||
assert!(
|
||||
updater.is_version_newer("135.0.1beta24", "135.0beta22"),
|
||||
"135.0.1beta24 should be newer than 135.0beta22"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
updater.compare_versions("135.0.1beta24", "135.0beta22"),
|
||||
std::cmp::Ordering::Greater,
|
||||
"135.0.1beta24 should compare as greater than 135.0beta22"
|
||||
);
|
||||
|
||||
// Test other camoufox beta version combinations
|
||||
assert!(
|
||||
updater.is_version_newer("135.0.5beta24", "135.0.5beta22"),
|
||||
"135.0.5beta24 should be newer than 135.0.5beta22"
|
||||
);
|
||||
|
||||
assert!(
|
||||
updater.is_version_newer("135.0.1beta1", "135.0beta1"),
|
||||
"135.0.1beta1 should be newer than 135.0beta1 due to patch version"
|
||||
);
|
||||
|
||||
// Test that older versions are not considered newer
|
||||
assert!(
|
||||
!updater.is_version_newer("135.0beta22", "135.0.1beta24"),
|
||||
"135.0beta22 should NOT be newer than 135.0.1beta24"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_beta_version_ordering_comprehensive() {
|
||||
let updater = AutoUpdater::instance();
|
||||
|
||||
// Test various beta version patterns that could appear in camoufox
|
||||
let test_cases = vec![
|
||||
("135.0.1beta24", "135.0beta22", true), // User reported case
|
||||
("135.0.5beta24", "135.0.5beta22", true), // Same patch, different beta
|
||||
("135.1beta1", "135.0beta99", true), // Higher minor beats beta number
|
||||
("136.0beta1", "135.9.9beta99", true), // Higher major beats everything
|
||||
("135.0.1beta1", "135.0beta1", true), // Patch version matters
|
||||
("135.0beta22", "135.0.1beta24", false), // Reverse of user case
|
||||
];
|
||||
|
||||
for (newer, older, should_be_newer) in test_cases {
|
||||
let result = updater.is_version_newer(newer, older);
|
||||
assert_eq!(
|
||||
result,
|
||||
should_be_newer,
|
||||
"Expected {} {} {} but got {}",
|
||||
newer,
|
||||
if should_be_newer { ">" } else { "<=" },
|
||||
older,
|
||||
if result { "true" } else { "false" }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_profile_update_stable_to_stable() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let profile = create_test_profile("test", "firefox", "1.0.0");
|
||||
let versions = vec![
|
||||
create_test_version_info("1.0.1", false), // stable, newer
|
||||
@@ -566,7 +649,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_check_profile_update_alpha_to_alpha() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let profile = create_test_profile("test", "firefox", "1.0.0-alpha");
|
||||
let versions = vec![
|
||||
create_test_version_info("1.0.1", false), // stable, should be included
|
||||
@@ -585,7 +668,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_check_profile_update_no_update_available() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let profile = create_test_profile("test", "firefox", "1.0.0");
|
||||
let versions = vec![
|
||||
create_test_version_info("0.9.0", false), // older
|
||||
@@ -598,7 +681,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_group_update_notifications() {
|
||||
let updater = AutoUpdater::new();
|
||||
let updater = AutoUpdater::instance();
|
||||
let notifications = vec![
|
||||
UpdateNotification {
|
||||
id: "firefox_1.0.0_to_1.1.0_profile1".to_string(),
|
||||
@@ -696,13 +779,15 @@ mod tests {
|
||||
let state_file = test_settings_manager
|
||||
.get_settings_dir()
|
||||
.join("auto_update_state.json");
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir()).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir())
|
||||
.expect("Failed to create settings directory");
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write state file");
|
||||
|
||||
// Load state
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let loaded_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read state file");
|
||||
let loaded_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize state");
|
||||
|
||||
assert_eq!(loaded_state.disabled_browsers.len(), 1);
|
||||
assert!(loaded_state.disabled_browsers.contains("firefox"));
|
||||
@@ -740,11 +825,15 @@ mod tests {
|
||||
let state_file = test_settings_manager
|
||||
.get_settings_dir()
|
||||
.join("auto_update_state.json");
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir()).unwrap();
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir())
|
||||
.expect("Failed to create settings directory");
|
||||
|
||||
// Initially not disabled (empty state file means default state)
|
||||
let state = AutoUpdateState::default();
|
||||
assert!(!state.disabled_browsers.contains("firefox"));
|
||||
assert!(
|
||||
!state.disabled_browsers.contains("firefox"),
|
||||
"Firefox should not be disabled initially"
|
||||
);
|
||||
|
||||
// Start update (should disable)
|
||||
let mut state = AutoUpdateState::default();
|
||||
@@ -752,27 +841,41 @@ mod tests {
|
||||
state
|
||||
.auto_update_downloads
|
||||
.insert("firefox-1.1.0".to_string());
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write state file");
|
||||
|
||||
// Check that it's disabled
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let loaded_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
assert!(loaded_state.disabled_browsers.contains("firefox"));
|
||||
assert!(loaded_state.auto_update_downloads.contains("firefox-1.1.0"));
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read state file");
|
||||
let loaded_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize state");
|
||||
assert!(
|
||||
loaded_state.disabled_browsers.contains("firefox"),
|
||||
"Firefox should be disabled"
|
||||
);
|
||||
assert!(
|
||||
loaded_state.auto_update_downloads.contains("firefox-1.1.0"),
|
||||
"Firefox download should be tracked"
|
||||
);
|
||||
|
||||
// Complete update (should enable)
|
||||
let mut state = loaded_state;
|
||||
state.disabled_browsers.remove("firefox");
|
||||
state.auto_update_downloads.remove("firefox-1.1.0");
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize final state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write final state file");
|
||||
|
||||
// Check that it's enabled again
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let final_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
assert!(!final_state.disabled_browsers.contains("firefox"));
|
||||
assert!(!final_state.auto_update_downloads.contains("firefox-1.1.0"));
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read final state file");
|
||||
let final_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize final state");
|
||||
assert!(
|
||||
!final_state.disabled_browsers.contains("firefox"),
|
||||
"Firefox should be enabled again"
|
||||
);
|
||||
assert!(
|
||||
!final_state.auto_update_downloads.contains("firefox-1.1.0"),
|
||||
"Firefox download should not be tracked anymore"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -780,7 +883,7 @@ mod tests {
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Create a temporary directory for testing
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Create a mock settings manager that uses the temp directory
|
||||
struct TestSettingsManager {
|
||||
@@ -814,31 +917,31 @@ mod tests {
|
||||
let state_file = test_settings_manager
|
||||
.get_settings_dir()
|
||||
.join("auto_update_state.json");
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir()).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
std::fs::create_dir_all(test_settings_manager.get_settings_dir())
|
||||
.expect("Failed to create settings directory");
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize initial state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write initial state file");
|
||||
|
||||
// Dismiss notification (remove from pending updates)
|
||||
state
|
||||
.pending_updates
|
||||
.retain(|n| n.id != "test_notification");
|
||||
let json = serde_json::to_string_pretty(&state).unwrap();
|
||||
std::fs::write(&state_file, json).unwrap();
|
||||
let json = serde_json::to_string_pretty(&state).expect("Failed to serialize updated state");
|
||||
std::fs::write(&state_file, json).expect("Failed to write updated state file");
|
||||
|
||||
// Check that it's removed
|
||||
let content = std::fs::read_to_string(&state_file).unwrap();
|
||||
let loaded_state: AutoUpdateState = serde_json::from_str(&content).unwrap();
|
||||
assert_eq!(loaded_state.pending_updates.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_version() {
|
||||
let updater = AutoUpdater::new();
|
||||
|
||||
assert_eq!(updater.parse_version("1.2.3"), vec![1, 2, 3]);
|
||||
assert_eq!(updater.parse_version("1.2.3-alpha"), vec![1, 2, 3]);
|
||||
assert_eq!(updater.parse_version("1.2.3a1"), vec![1, 2, 3, 1]);
|
||||
assert_eq!(updater.parse_version("1.2.3b2"), vec![1, 2, 3, 2]);
|
||||
assert_eq!(updater.parse_version("10.0.0"), vec![10, 0, 0]);
|
||||
let content = std::fs::read_to_string(&state_file).expect("Failed to read updated state file");
|
||||
let loaded_state: AutoUpdateState =
|
||||
serde_json::from_str(&content).expect("Failed to deserialize updated state");
|
||||
assert_eq!(
|
||||
loaded_state.pending_updates.len(),
|
||||
0,
|
||||
"Pending updates should be empty after dismissal"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref AUTO_UPDATER: AutoUpdater = AutoUpdater::new();
|
||||
}
|
||||
|
||||
+396
-127
@@ -1,14 +1,13 @@
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProxySettings {
|
||||
pub enabled: bool,
|
||||
pub proxy_type: String, // "http", "https", "socks4", or "socks5"
|
||||
pub host: String,
|
||||
pub port: u16,
|
||||
pub username: Option<String>,
|
||||
pub password: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
@@ -20,6 +19,7 @@ pub enum BrowserType {
|
||||
Brave,
|
||||
Zen,
|
||||
TorBrowser,
|
||||
Camoufox,
|
||||
}
|
||||
|
||||
impl BrowserType {
|
||||
@@ -32,6 +32,7 @@ impl BrowserType {
|
||||
BrowserType::Brave => "brave",
|
||||
BrowserType::Zen => "zen",
|
||||
BrowserType::TorBrowser => "tor-browser",
|
||||
BrowserType::Camoufox => "camoufox",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,6 +45,7 @@ impl BrowserType {
|
||||
"brave" => Ok(BrowserType::Brave),
|
||||
"zen" => Ok(BrowserType::Zen),
|
||||
"tor-browser" => Ok(BrowserType::TorBrowser),
|
||||
"camoufox" => Ok(BrowserType::Camoufox),
|
||||
_ => Err(format!("Unknown browser type: {s}")),
|
||||
}
|
||||
}
|
||||
@@ -54,7 +56,7 @@ pub trait Browser: Send + Sync {
|
||||
fn create_launch_args(
|
||||
&self,
|
||||
profile_path: &str,
|
||||
_proxy_settings: Option<&ProxySettings>,
|
||||
proxy_settings: Option<&ProxySettings>,
|
||||
url: Option<String>,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>>;
|
||||
fn is_version_downloaded(&self, version: &str, binaries_dir: &Path) -> bool;
|
||||
@@ -90,6 +92,7 @@ mod macos {
|
||||
|| name.starts_with("mullvad")
|
||||
|| name.starts_with("zen")
|
||||
|| name.starts_with("tor")
|
||||
|| name.starts_with("camoufox")
|
||||
|| name.contains("Browser")
|
||||
})
|
||||
.map(|entry| entry.path())
|
||||
@@ -165,17 +168,25 @@ mod linux {
|
||||
install_dir: &Path,
|
||||
browser_type: &BrowserType,
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
// Expected structure: install_dir/<browser>/<binary>
|
||||
// Expected structure examples:
|
||||
// - Firefox/Firefox Developer on Linux often extract to: install_dir/firefox/firefox
|
||||
// - Some archives may extract directly under: install_dir/firefox or install_dir/firefox-bin
|
||||
// - For some flavors we may have: install_dir/<browser_type>/<binary>
|
||||
let browser_subdir = install_dir.join(browser_type.as_str());
|
||||
|
||||
// Try firefox first (preferred), then firefox-bin
|
||||
// Try common firefox executable locations (nested and flat)
|
||||
let possible_executables = match browser_type {
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper => {
|
||||
vec![
|
||||
browser_subdir.join("firefox"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
]
|
||||
}
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper => vec![
|
||||
// Nested "firefox/firefox" or "firefox/firefox-bin"
|
||||
install_dir.join("firefox").join("firefox"),
|
||||
install_dir.join("firefox").join("firefox-bin"),
|
||||
// Flat under version directory
|
||||
install_dir.join("firefox"),
|
||||
install_dir.join("firefox-bin"),
|
||||
// Under a subdirectory matching the browser type
|
||||
browser_subdir.join("firefox"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
],
|
||||
BrowserType::MullvadBrowser => {
|
||||
vec![
|
||||
browser_subdir.join("firefox"),
|
||||
@@ -188,9 +199,20 @@ mod linux {
|
||||
}
|
||||
BrowserType::TorBrowser => {
|
||||
vec![
|
||||
browser_subdir.join("firefox"),
|
||||
// Common Tor Browser launchers
|
||||
browser_subdir.join("tor-browser"),
|
||||
// Firefox-based binaries
|
||||
browser_subdir.join("firefox"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
// Sometimes packaged similarly to Firefox
|
||||
install_dir.join("firefox").join("firefox"),
|
||||
install_dir.join("firefox").join("firefox-bin"),
|
||||
]
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
vec![
|
||||
install_dir.join("camoufox-bin"),
|
||||
install_dir.join("camoufox"),
|
||||
]
|
||||
}
|
||||
_ => vec![],
|
||||
@@ -204,9 +226,9 @@ mod linux {
|
||||
|
||||
Err(
|
||||
format!(
|
||||
"Firefox executable not found in {}/{}",
|
||||
"Executable not found for {} in {}",
|
||||
browser_type.as_str(),
|
||||
install_dir.display(),
|
||||
browser_type.as_str()
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
@@ -216,17 +238,13 @@ mod linux {
|
||||
install_dir: &Path,
|
||||
browser_type: &BrowserType,
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
// Expected structure: install_dir/<browser>/<binary>
|
||||
let browser_subdir = install_dir.join(browser_type.as_str());
|
||||
|
||||
let possible_executables = match browser_type {
|
||||
BrowserType::Chromium => vec![
|
||||
browser_subdir.join("chromium"),
|
||||
browser_subdir.join("chrome"),
|
||||
],
|
||||
BrowserType::Chromium => vec![install_dir.join("chromium"), install_dir.join("chrome")],
|
||||
BrowserType::Brave => vec![
|
||||
browser_subdir.join("brave"),
|
||||
browser_subdir.join("brave-browser"),
|
||||
install_dir.join("brave"),
|
||||
install_dir.join("brave-browser"),
|
||||
install_dir.join("brave-browser-nightly"),
|
||||
install_dir.join("brave-browser-beta"),
|
||||
],
|
||||
_ => vec![],
|
||||
};
|
||||
@@ -248,18 +266,21 @@ mod linux {
|
||||
}
|
||||
|
||||
pub fn is_firefox_version_downloaded(install_dir: &Path, browser_type: &BrowserType) -> bool {
|
||||
// Expected structure: install_dir/<browser>/<binary>
|
||||
// Expected structure (most common):
|
||||
// install_dir/<browser>/<binary>
|
||||
// However, Firefox Developer tarballs often extract to a "firefox" subfolder
|
||||
// rather than "firefox-developer". Support both layouts.
|
||||
let browser_subdir = install_dir.join(browser_type.as_str());
|
||||
|
||||
if !browser_subdir.exists() || !browser_subdir.is_dir() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let possible_executables = match browser_type {
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper => {
|
||||
vec![
|
||||
// Preferred: executable inside a subdirectory named after the browser type
|
||||
browser_subdir.join("firefox-bin"),
|
||||
browser_subdir.join("firefox"),
|
||||
// Fallback: executable inside a generic "firefox" subdirectory
|
||||
install_dir.join("firefox").join("firefox-bin"),
|
||||
install_dir.join("firefox").join("firefox"),
|
||||
]
|
||||
}
|
||||
BrowserType::MullvadBrowser => {
|
||||
@@ -279,6 +300,12 @@ mod linux {
|
||||
browser_subdir.join("firefox"),
|
||||
]
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
vec![
|
||||
install_dir.join("camoufox-bin"),
|
||||
install_dir.join("camoufox"),
|
||||
]
|
||||
}
|
||||
_ => vec![],
|
||||
};
|
||||
|
||||
@@ -292,23 +319,13 @@ mod linux {
|
||||
}
|
||||
|
||||
pub fn is_chromium_version_downloaded(install_dir: &Path, browser_type: &BrowserType) -> bool {
|
||||
// Expected structure: install_dir/<browser>/<binary>
|
||||
let browser_subdir = install_dir.join(browser_type.as_str());
|
||||
|
||||
if !browser_subdir.exists() || !browser_subdir.is_dir() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let possible_executables = match browser_type {
|
||||
BrowserType::Chromium => vec![
|
||||
browser_subdir.join("chromium"),
|
||||
browser_subdir.join("chrome"),
|
||||
],
|
||||
BrowserType::Chromium => vec![install_dir.join("chromium"), install_dir.join("chrome")],
|
||||
BrowserType::Brave => vec![
|
||||
browser_subdir.join("brave"),
|
||||
browser_subdir.join("brave-browser"),
|
||||
browser_subdir.join("brave-browser-nightly"),
|
||||
browser_subdir.join("brave-browser-beta"),
|
||||
install_dir.join("brave"),
|
||||
install_dir.join("brave-browser"),
|
||||
install_dir.join("brave-browser-nightly"),
|
||||
install_dir.join("brave-browser-beta"),
|
||||
],
|
||||
_ => vec![],
|
||||
};
|
||||
@@ -373,6 +390,7 @@ mod windows {
|
||||
|| name.starts_with("mullvad")
|
||||
|| name.starts_with("zen")
|
||||
|| name.starts_with("tor")
|
||||
|| name.starts_with("camoufox")
|
||||
|| name.contains("browser")
|
||||
{
|
||||
return Ok(path);
|
||||
@@ -451,6 +469,7 @@ mod windows {
|
||||
|| name.starts_with("mullvad")
|
||||
|| name.starts_with("zen")
|
||||
|| name.starts_with("tor")
|
||||
|| name.starts_with("camoufox")
|
||||
|| name.contains("browser")
|
||||
{
|
||||
return true;
|
||||
@@ -547,7 +566,10 @@ impl Browser for FirefoxBrowser {
|
||||
BrowserType::MullvadBrowser | BrowserType::TorBrowser => {
|
||||
args.push("-no-remote".to_string());
|
||||
}
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper | BrowserType::Zen => {
|
||||
BrowserType::Firefox
|
||||
| BrowserType::FirefoxDeveloper
|
||||
| BrowserType::Zen
|
||||
| BrowserType::Camoufox => {
|
||||
// Don't use -no-remote so we can communicate with existing instances
|
||||
}
|
||||
_ => {}
|
||||
@@ -645,20 +667,16 @@ impl Browser for ChromiumBrowser {
|
||||
"--disable-component-update".to_string(),
|
||||
"--disable-background-timer-throttling".to_string(),
|
||||
"--crash-server-url=".to_string(),
|
||||
"--disable-updater".to_string(),
|
||||
];
|
||||
|
||||
// Add proxy configuration if provided
|
||||
if let Some(proxy) = proxy_settings {
|
||||
if proxy.enabled {
|
||||
let pac_path = Path::new(profile_path).join("proxy.pac");
|
||||
if pac_path.exists() {
|
||||
let pac_content = fs::read(&pac_path)?;
|
||||
let pac_base64 = general_purpose::STANDARD.encode(&pac_content);
|
||||
args.push(format!(
|
||||
"--proxy-pac-url=data:application/x-javascript-config;base64,{pac_base64}"
|
||||
));
|
||||
}
|
||||
}
|
||||
// Apply proxy settings
|
||||
args.push(format!(
|
||||
"--proxy-server=http://{}:{}",
|
||||
proxy.host, proxy.port
|
||||
));
|
||||
}
|
||||
|
||||
if let Some(url) = url {
|
||||
@@ -712,18 +730,110 @@ impl Browser for ChromiumBrowser {
|
||||
}
|
||||
}
|
||||
|
||||
// Factory function to create browser instances
|
||||
pub fn create_browser(browser_type: BrowserType) -> Box<dyn Browser> {
|
||||
match browser_type {
|
||||
BrowserType::MullvadBrowser
|
||||
| BrowserType::Firefox
|
||||
| BrowserType::FirefoxDeveloper
|
||||
| BrowserType::Zen
|
||||
| BrowserType::TorBrowser => Box::new(FirefoxBrowser::new(browser_type)),
|
||||
BrowserType::Chromium | BrowserType::Brave => Box::new(ChromiumBrowser::new(browser_type)),
|
||||
pub struct CamoufoxBrowser;
|
||||
|
||||
impl CamoufoxBrowser {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl Browser for CamoufoxBrowser {
|
||||
fn get_executable_path(&self, install_dir: &Path) -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::get_firefox_executable_path(install_dir);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::get_firefox_executable_path(install_dir, &BrowserType::Camoufox);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::get_firefox_executable_path(install_dir);
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
||||
Err("Unsupported platform".into())
|
||||
}
|
||||
|
||||
fn create_launch_args(
|
||||
&self,
|
||||
profile_path: &str,
|
||||
_proxy_settings: Option<&ProxySettings>,
|
||||
url: Option<String>,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
// For Camoufox, we handle launching through the camoufox launcher
|
||||
// This method won't be used directly, but we provide basic Firefox args as fallback
|
||||
let mut args = vec![
|
||||
"-profile".to_string(),
|
||||
profile_path.to_string(),
|
||||
"-no-remote".to_string(),
|
||||
];
|
||||
|
||||
if let Some(url) = url {
|
||||
args.push(url);
|
||||
}
|
||||
|
||||
Ok(args)
|
||||
}
|
||||
|
||||
fn is_version_downloaded(&self, version: &str, binaries_dir: &Path) -> bool {
|
||||
let install_dir = binaries_dir.join("camoufox").join(version);
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::is_firefox_version_downloaded(&install_dir);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::is_firefox_version_downloaded(&install_dir, &BrowserType::Camoufox);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::is_firefox_version_downloaded(&install_dir);
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
||||
false
|
||||
}
|
||||
|
||||
fn prepare_executable(&self, executable_path: &Path) -> Result<(), Box<dyn std::error::Error>> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::prepare_executable(executable_path);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::prepare_executable(executable_path);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::prepare_executable(executable_path);
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "linux", target_os = "windows")))]
|
||||
Err("Unsupported platform".into())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BrowserFactory;
|
||||
|
||||
impl BrowserFactory {
|
||||
fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static BrowserFactory {
|
||||
&BROWSER_FACTORY
|
||||
}
|
||||
|
||||
pub fn create_browser(&self, browser_type: BrowserType) -> Box<dyn Browser> {
|
||||
match browser_type {
|
||||
BrowserType::MullvadBrowser
|
||||
| BrowserType::Firefox
|
||||
| BrowserType::FirefoxDeveloper
|
||||
| BrowserType::Zen
|
||||
| BrowserType::TorBrowser => Box::new(FirefoxBrowser::new(browser_type)),
|
||||
BrowserType::Chromium | BrowserType::Brave => Box::new(ChromiumBrowser::new(browser_type)),
|
||||
BrowserType::Camoufox => Box::new(CamoufoxBrowser::new()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Factory function to create browser instances (kept for backward compatibility)
|
||||
pub fn create_browser(browser_type: BrowserType) -> Box<dyn Browser> {
|
||||
BrowserFactory::instance().create_browser(browser_type)
|
||||
}
|
||||
|
||||
// Add GithubRelease and GithubAsset structs to browser.rs if they don't already exist
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct GithubRelease {
|
||||
@@ -737,6 +847,24 @@ pub struct GithubRelease {
|
||||
pub is_nightly: bool,
|
||||
#[serde(default)]
|
||||
pub prerelease: bool,
|
||||
#[serde(default)]
|
||||
pub draft: bool,
|
||||
#[serde(default)]
|
||||
pub body: Option<String>,
|
||||
#[serde(default)]
|
||||
pub html_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub id: Option<u64>,
|
||||
#[serde(default)]
|
||||
pub node_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub target_commitish: Option<String>,
|
||||
#[serde(default)]
|
||||
pub created_at: Option<String>,
|
||||
#[serde(default)]
|
||||
pub tarball_url: Option<String>,
|
||||
#[serde(default)]
|
||||
pub zipball_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
@@ -745,6 +873,22 @@ pub struct GithubAsset {
|
||||
pub browser_download_url: String,
|
||||
#[serde(default)]
|
||||
pub size: u64,
|
||||
#[serde(default)]
|
||||
pub download_count: Option<u64>,
|
||||
#[serde(default)]
|
||||
pub id: Option<u64>,
|
||||
#[serde(default)]
|
||||
pub node_id: Option<String>,
|
||||
#[serde(default)]
|
||||
pub label: Option<String>,
|
||||
#[serde(default)]
|
||||
pub content_type: Option<String>,
|
||||
#[serde(default)]
|
||||
pub state: Option<String>,
|
||||
#[serde(default)]
|
||||
pub created_at: Option<String>,
|
||||
#[serde(default)]
|
||||
pub updated_at: Option<String>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -763,35 +907,57 @@ mod tests {
|
||||
assert_eq!(BrowserType::Brave.as_str(), "brave");
|
||||
assert_eq!(BrowserType::Zen.as_str(), "zen");
|
||||
assert_eq!(BrowserType::TorBrowser.as_str(), "tor-browser");
|
||||
assert_eq!(BrowserType::Camoufox.as_str(), "camoufox");
|
||||
|
||||
// Test from_str
|
||||
// Test from_str - use expect with descriptive messages instead of unwrap
|
||||
assert_eq!(
|
||||
BrowserType::from_str("mullvad-browser").unwrap(),
|
||||
BrowserType::from_str("mullvad-browser").expect("mullvad-browser should be valid"),
|
||||
BrowserType::MullvadBrowser
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("firefox").unwrap(),
|
||||
BrowserType::from_str("firefox").expect("firefox should be valid"),
|
||||
BrowserType::Firefox
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("firefox-developer").unwrap(),
|
||||
BrowserType::from_str("firefox-developer").expect("firefox-developer should be valid"),
|
||||
BrowserType::FirefoxDeveloper
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("chromium").unwrap(),
|
||||
BrowserType::from_str("chromium").expect("chromium should be valid"),
|
||||
BrowserType::Chromium
|
||||
);
|
||||
assert_eq!(BrowserType::from_str("brave").unwrap(), BrowserType::Brave);
|
||||
assert_eq!(BrowserType::from_str("zen").unwrap(), BrowserType::Zen);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("tor-browser").unwrap(),
|
||||
BrowserType::from_str("brave").expect("brave should be valid"),
|
||||
BrowserType::Brave
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("zen").expect("zen should be valid"),
|
||||
BrowserType::Zen
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("tor-browser").expect("tor-browser should be valid"),
|
||||
BrowserType::TorBrowser
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("camoufox").expect("camoufox should be valid"),
|
||||
BrowserType::Camoufox
|
||||
);
|
||||
|
||||
// Test invalid browser type
|
||||
assert!(BrowserType::from_str("invalid").is_err());
|
||||
assert!(BrowserType::from_str("").is_err());
|
||||
assert!(BrowserType::from_str("Firefox").is_err()); // Case sensitive
|
||||
// Test invalid browser type - these should properly fail
|
||||
let invalid_result = BrowserType::from_str("invalid");
|
||||
assert!(
|
||||
invalid_result.is_err(),
|
||||
"Invalid browser type should return error"
|
||||
);
|
||||
|
||||
let empty_result = BrowserType::from_str("");
|
||||
assert!(empty_result.is_err(), "Empty string should return error");
|
||||
|
||||
let case_sensitive_result = BrowserType::from_str("Firefox");
|
||||
assert!(
|
||||
case_sensitive_result.is_err(),
|
||||
"Case sensitive check should fail"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -800,9 +966,12 @@ mod tests {
|
||||
let browser = FirefoxBrowser::new(BrowserType::Firefox);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.expect("Failed to create launch args for Firefox");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile"]);
|
||||
assert!(!args.contains(&"-no-remote".to_string()));
|
||||
assert!(
|
||||
!args.contains(&"-no-remote".to_string()),
|
||||
"Firefox should not use -no-remote"
|
||||
);
|
||||
|
||||
let args = browser
|
||||
.create_launch_args(
|
||||
@@ -810,7 +979,7 @@ mod tests {
|
||||
None,
|
||||
Some("https://example.com".to_string()),
|
||||
)
|
||||
.unwrap();
|
||||
.expect("Failed to create launch args for Firefox with URL");
|
||||
assert_eq!(
|
||||
args,
|
||||
vec!["-profile", "/path/to/profile", "https://example.com"]
|
||||
@@ -820,23 +989,26 @@ mod tests {
|
||||
let browser = FirefoxBrowser::new(BrowserType::MullvadBrowser);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.expect("Failed to create launch args for Mullvad Browser");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile", "-no-remote"]);
|
||||
|
||||
// Test Tor Browser (should use -no-remote)
|
||||
let browser = FirefoxBrowser::new(BrowserType::TorBrowser);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.expect("Failed to create launch args for Tor Browser");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile", "-no-remote"]);
|
||||
|
||||
// Test Zen Browser (should not use -no-remote)
|
||||
let browser = FirefoxBrowser::new(BrowserType::Zen);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.expect("Failed to create launch args for Zen Browser");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile"]);
|
||||
assert!(!args.contains(&"-no-remote".to_string()));
|
||||
assert!(
|
||||
!args.contains(&"-no-remote".to_string()),
|
||||
"Zen Browser should not use -no-remote"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -844,15 +1016,27 @@ mod tests {
|
||||
let browser = ChromiumBrowser::new(BrowserType::Chromium);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None)
|
||||
.unwrap();
|
||||
.expect("Failed to create launch args for Chromium");
|
||||
|
||||
// Test that basic required arguments are present
|
||||
assert!(args.contains(&"--user-data-dir=/path/to/profile".to_string()));
|
||||
assert!(args.contains(&"--no-default-browser-check".to_string()));
|
||||
assert!(
|
||||
args.contains(&"--user-data-dir=/path/to/profile".to_string()),
|
||||
"Chromium args should contain user-data-dir"
|
||||
);
|
||||
assert!(
|
||||
args.contains(&"--no-default-browser-check".to_string()),
|
||||
"Chromium args should contain no-default-browser-check"
|
||||
);
|
||||
|
||||
// Test that automatic update disabling arguments are present
|
||||
assert!(args.contains(&"--disable-background-mode".to_string()));
|
||||
assert!(args.contains(&"--disable-component-update".to_string()));
|
||||
assert!(
|
||||
args.contains(&"--disable-background-mode".to_string()),
|
||||
"Chromium args should contain disable-background-mode"
|
||||
);
|
||||
assert!(
|
||||
args.contains(&"--disable-component-update".to_string()),
|
||||
"Chromium args should contain disable-component-update"
|
||||
);
|
||||
|
||||
let args_with_url = browser
|
||||
.create_launch_args(
|
||||
@@ -860,33 +1044,40 @@ mod tests {
|
||||
None,
|
||||
Some("https://example.com".to_string()),
|
||||
)
|
||||
.unwrap();
|
||||
assert!(args_with_url.contains(&"https://example.com".to_string()));
|
||||
.expect("Failed to create launch args for Chromium with URL");
|
||||
assert!(
|
||||
args_with_url.contains(&"https://example.com".to_string()),
|
||||
"Chromium args should contain the URL"
|
||||
);
|
||||
|
||||
// Verify URL is at the end
|
||||
assert_eq!(args_with_url.last().unwrap(), "https://example.com");
|
||||
assert_eq!(
|
||||
args_with_url.last().expect("Args should not be empty"),
|
||||
"https://example.com"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proxy_settings_creation() {
|
||||
let proxy = ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 8080,
|
||||
username: None,
|
||||
password: None,
|
||||
};
|
||||
|
||||
assert!(proxy.enabled);
|
||||
assert_eq!(proxy.proxy_type, "http");
|
||||
assert_eq!(proxy.host, "127.0.0.1");
|
||||
assert_eq!(proxy.port, 8080);
|
||||
|
||||
// Test different proxy types
|
||||
let socks_proxy = ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "socks5".to_string(),
|
||||
host: "proxy.example.com".to_string(),
|
||||
port: 1080,
|
||||
username: None,
|
||||
password: None,
|
||||
};
|
||||
|
||||
assert_eq!(socks_proxy.proxy_type, "socks5");
|
||||
@@ -896,16 +1087,45 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_version_downloaded_check() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
let binaries_dir = temp_dir.path();
|
||||
|
||||
// Create a mock Firefox browser installation with new path structure: binaries/<browser>/<version>/
|
||||
let browser_dir = binaries_dir.join("firefox").join("139.0");
|
||||
fs::create_dir_all(&browser_dir).unwrap();
|
||||
fs::create_dir_all(&browser_dir).expect("Failed to create browser directory");
|
||||
|
||||
// Create a mock .app directory
|
||||
let app_dir = browser_dir.join("Firefox.app");
|
||||
fs::create_dir_all(&app_dir).unwrap();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// Create a mock .app directory for macOS
|
||||
let app_dir = browser_dir.join("Firefox.app");
|
||||
fs::create_dir_all(&app_dir).expect("Failed to create Firefox.app directory");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Create a mock firefox subdirectory and executable for Linux
|
||||
let firefox_subdir = browser_dir.join("firefox");
|
||||
fs::create_dir_all(&firefox_subdir).expect("Failed to create firefox subdirectory");
|
||||
let executable_path = firefox_subdir.join("firefox");
|
||||
fs::write(&executable_path, "mock executable").expect("Failed to write mock executable");
|
||||
|
||||
// Set executable permissions on Linux
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut permissions = executable_path
|
||||
.metadata()
|
||||
.expect("Failed to get file metadata")
|
||||
.permissions();
|
||||
permissions.set_mode(0o755);
|
||||
fs::set_permissions(&executable_path, permissions)
|
||||
.expect("Failed to set executable permissions");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Create a mock firefox.exe for Windows
|
||||
let executable_path = browser_dir.join("firefox.exe");
|
||||
fs::write(&executable_path, "mock executable").expect("Failed to write mock executable");
|
||||
}
|
||||
|
||||
let browser = FirefoxBrowser::new(BrowserType::Firefox);
|
||||
assert!(browser.is_version_downloaded("139.0", binaries_dir));
|
||||
@@ -913,36 +1133,76 @@ mod tests {
|
||||
|
||||
// Test with Chromium browser with new path structure
|
||||
let chromium_dir = binaries_dir.join("chromium").join("1465660");
|
||||
fs::create_dir_all(&chromium_dir).unwrap();
|
||||
let chromium_app_dir = chromium_dir.join("Chromium.app");
|
||||
fs::create_dir_all(chromium_app_dir.join("Contents").join("MacOS")).unwrap();
|
||||
fs::create_dir_all(&chromium_dir).expect("Failed to create chromium directory");
|
||||
|
||||
// Create a mock executable
|
||||
let executable_path = chromium_app_dir
|
||||
.join("Contents")
|
||||
.join("MacOS")
|
||||
.join("Chromium");
|
||||
fs::write(&executable_path, "mock executable").unwrap();
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let chromium_app_dir = chromium_dir.join("Chromium.app");
|
||||
fs::create_dir_all(chromium_app_dir.join("Contents").join("MacOS"))
|
||||
.expect("Failed to create Chromium.app structure");
|
||||
|
||||
// Create a mock executable
|
||||
let executable_path = chromium_app_dir
|
||||
.join("Contents")
|
||||
.join("MacOS")
|
||||
.join("Chromium");
|
||||
fs::write(&executable_path, "mock executable")
|
||||
.expect("Failed to write mock Chromium executable");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
// Create a mock chromium executable for Linux
|
||||
let executable_path = chromium_dir.join("chromium");
|
||||
fs::write(&executable_path, "mock executable")
|
||||
.expect("Failed to write mock chromium executable");
|
||||
|
||||
// Set executable permissions on Linux
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
let mut permissions = executable_path
|
||||
.metadata()
|
||||
.expect("Failed to get chromium metadata")
|
||||
.permissions();
|
||||
permissions.set_mode(0o755);
|
||||
fs::set_permissions(&executable_path, permissions)
|
||||
.expect("Failed to set chromium permissions");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
// Create a mock chromium.exe for Windows
|
||||
let executable_path = chromium_dir.join("chromium.exe");
|
||||
fs::write(&executable_path, "mock executable").expect("Failed to write mock chromium.exe");
|
||||
}
|
||||
|
||||
let chromium_browser = ChromiumBrowser::new(BrowserType::Chromium);
|
||||
assert!(chromium_browser.is_version_downloaded("1465660", binaries_dir));
|
||||
assert!(!chromium_browser.is_version_downloaded("1465661", binaries_dir));
|
||||
assert!(
|
||||
chromium_browser.is_version_downloaded("1465660", binaries_dir),
|
||||
"Chromium version should be detected as downloaded"
|
||||
);
|
||||
assert!(
|
||||
!chromium_browser.is_version_downloaded("1465661", binaries_dir),
|
||||
"Non-existent Chromium version should not be detected as downloaded"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_downloaded_no_app_directory() {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
let binaries_dir = temp_dir.path();
|
||||
|
||||
// Create browser directory but no .app directory with new path structure
|
||||
// Create browser directory but no proper executable structure
|
||||
let browser_dir = binaries_dir.join("firefox").join("139.0");
|
||||
fs::create_dir_all(&browser_dir).unwrap();
|
||||
fs::create_dir_all(&browser_dir).expect("Failed to create browser directory");
|
||||
|
||||
// Create some other files but no .app
|
||||
fs::write(browser_dir.join("readme.txt"), "Some content").unwrap();
|
||||
// Create some other files but no proper executable structure
|
||||
fs::write(browser_dir.join("readme.txt"), "Some content").expect("Failed to write readme file");
|
||||
|
||||
let browser = FirefoxBrowser::new(BrowserType::Firefox);
|
||||
assert!(!browser.is_version_downloaded("139.0", binaries_dir));
|
||||
assert!(
|
||||
!browser.is_version_downloaded("139.0", binaries_dir),
|
||||
"Firefox version should not be detected without proper executable structure"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -959,23 +1219,32 @@ mod tests {
|
||||
#[test]
|
||||
fn test_proxy_settings_serialization() {
|
||||
let proxy = ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 8080,
|
||||
username: None,
|
||||
password: None,
|
||||
};
|
||||
|
||||
// Test that it can be serialized (implements Serialize)
|
||||
let json = serde_json::to_string(&proxy).unwrap();
|
||||
assert!(json.contains("127.0.0.1"));
|
||||
assert!(json.contains("8080"));
|
||||
assert!(json.contains("http"));
|
||||
let json = serde_json::to_string(&proxy).expect("Failed to serialize proxy settings");
|
||||
assert!(json.contains("127.0.0.1"), "JSON should contain host IP");
|
||||
assert!(json.contains("8080"), "JSON should contain port number");
|
||||
assert!(json.contains("http"), "JSON should contain proxy type");
|
||||
|
||||
// Test that it can be deserialized (implements Deserialize)
|
||||
let deserialized: ProxySettings = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(deserialized.enabled, proxy.enabled);
|
||||
assert_eq!(deserialized.proxy_type, proxy.proxy_type);
|
||||
assert_eq!(deserialized.host, proxy.host);
|
||||
assert_eq!(deserialized.port, proxy.port);
|
||||
let deserialized: ProxySettings =
|
||||
serde_json::from_str(&json).expect("Failed to deserialize proxy settings");
|
||||
assert_eq!(
|
||||
deserialized.proxy_type, proxy.proxy_type,
|
||||
"Proxy type should match"
|
||||
);
|
||||
assert_eq!(deserialized.host, proxy.host, "Host should match");
|
||||
assert_eq!(deserialized.port, proxy.port, "Port should match");
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref BROWSER_FACTORY: BrowserFactory = BrowserFactory::new();
|
||||
}
|
||||
|
||||
+1185
-2012
File diff suppressed because it is too large
Load Diff
+400
-654
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,488 @@
|
||||
use crate::profile::BrowserProfile;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
use tauri::AppHandle;
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
use tokio::sync::Mutex as AsyncMutex;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct CamoufoxConfig {
|
||||
pub proxy: Option<String>,
|
||||
pub screen_max_width: Option<u32>,
|
||||
pub screen_max_height: Option<u32>,
|
||||
pub screen_min_width: Option<u32>,
|
||||
pub screen_min_height: Option<u32>,
|
||||
pub geoip: Option<serde_json::Value>, // Can be String or bool
|
||||
pub block_images: Option<bool>,
|
||||
pub block_webrtc: Option<bool>,
|
||||
pub block_webgl: Option<bool>,
|
||||
pub executable_path: Option<String>,
|
||||
pub fingerprint: Option<String>, // JSON string of the complete fingerprint config
|
||||
}
|
||||
|
||||
impl Default for CamoufoxConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
proxy: None,
|
||||
screen_max_width: None,
|
||||
screen_max_height: None,
|
||||
screen_min_width: None,
|
||||
screen_min_height: None,
|
||||
geoip: Some(serde_json::Value::Bool(true)),
|
||||
block_images: None,
|
||||
block_webrtc: None,
|
||||
block_webgl: None,
|
||||
executable_path: None,
|
||||
fingerprint: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[allow(non_snake_case)]
|
||||
pub struct CamoufoxLaunchResult {
|
||||
pub id: String,
|
||||
#[serde(alias = "process_id")]
|
||||
pub processId: Option<u32>,
|
||||
#[serde(alias = "profile_path")]
|
||||
pub profilePath: Option<String>,
|
||||
pub url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CamoufoxInstance {
|
||||
#[allow(dead_code)]
|
||||
id: String,
|
||||
process_id: Option<u32>,
|
||||
profile_path: Option<String>,
|
||||
url: Option<String>,
|
||||
}
|
||||
|
||||
struct CamoufoxNodecarLauncherInner {
|
||||
instances: HashMap<String, CamoufoxInstance>,
|
||||
}
|
||||
|
||||
pub struct CamoufoxNodecarLauncher {
|
||||
inner: Arc<AsyncMutex<CamoufoxNodecarLauncherInner>>,
|
||||
}
|
||||
|
||||
impl CamoufoxNodecarLauncher {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
inner: Arc::new(AsyncMutex::new(CamoufoxNodecarLauncherInner {
|
||||
instances: HashMap::new(),
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static CamoufoxNodecarLauncher {
|
||||
&CAMOUFOX_NODECAR_LAUNCHER
|
||||
}
|
||||
|
||||
/// Generate Camoufox fingerprint configuration during profile creation
|
||||
pub async fn generate_fingerprint_config(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
profile: &crate::profile::BrowserProfile,
|
||||
config: &CamoufoxConfig,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut config_args = vec!["camoufox".to_string(), "generate-config".to_string()];
|
||||
|
||||
// Always ensure executable_path is set to the user's binary location
|
||||
let executable_path = if let Some(path) = &config.executable_path {
|
||||
path.clone()
|
||||
} else {
|
||||
// Use the browser runner helper with the real profile
|
||||
let browser_runner = crate::browser_runner::BrowserRunner::instance();
|
||||
browser_runner
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
};
|
||||
config_args.extend(["--executable-path".to_string(), executable_path]);
|
||||
|
||||
// Pass existing fingerprint if provided (for advanced form partial fingerprints)
|
||||
if let Some(fingerprint) = &config.fingerprint {
|
||||
config_args.extend(["--fingerprint".to_string(), fingerprint.clone()]);
|
||||
}
|
||||
|
||||
if let Some(serde_json::Value::Bool(true)) = &config.geoip {
|
||||
config_args.push("--geoip".to_string());
|
||||
}
|
||||
|
||||
// Add proxy if provided (can be passed directly during fingerprint generation)
|
||||
if let Some(proxy) = &config.proxy {
|
||||
config_args.extend(["--proxy".to_string(), proxy.clone()]);
|
||||
}
|
||||
|
||||
// Add screen dimensions if provided
|
||||
if let Some(max_width) = config.screen_max_width {
|
||||
config_args.extend(["--max-width".to_string(), max_width.to_string()]);
|
||||
}
|
||||
|
||||
if let Some(max_height) = config.screen_max_height {
|
||||
config_args.extend(["--max-height".to_string(), max_height.to_string()]);
|
||||
}
|
||||
|
||||
if let Some(min_width) = config.screen_min_width {
|
||||
config_args.extend(["--min-width".to_string(), min_width.to_string()]);
|
||||
}
|
||||
|
||||
if let Some(min_height) = config.screen_min_height {
|
||||
config_args.extend(["--min-height".to_string(), min_height.to_string()]);
|
||||
}
|
||||
|
||||
// Add block_* options
|
||||
if let Some(block_images) = config.block_images {
|
||||
if block_images {
|
||||
config_args.push("--block-images".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(block_webrtc) = config.block_webrtc {
|
||||
if block_webrtc {
|
||||
config_args.push("--block-webrtc".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(block_webgl) = config.block_webgl {
|
||||
if block_webgl {
|
||||
config_args.push("--block-webgl".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Execute config generation command
|
||||
let mut config_sidecar = self.get_nodecar_sidecar(app_handle)?;
|
||||
for arg in &config_args {
|
||||
config_sidecar = config_sidecar.arg(arg);
|
||||
}
|
||||
|
||||
let config_output = config_sidecar.output().await?;
|
||||
if !config_output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&config_output.stderr);
|
||||
return Err(format!("Failed to generate camoufox fingerprint config: {stderr}").into());
|
||||
}
|
||||
|
||||
Ok(String::from_utf8_lossy(&config_output.stdout).to_string())
|
||||
}
|
||||
|
||||
/// Get the nodecar sidecar command
|
||||
fn get_nodecar_sidecar(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
) -> Result<tauri_plugin_shell::process::Command, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let shell = app_handle.shell();
|
||||
let sidecar_command = shell
|
||||
.sidecar("nodecar")
|
||||
.map_err(|e| format!("Failed to create nodecar sidecar: {e}"))?;
|
||||
Ok(sidecar_command)
|
||||
}
|
||||
|
||||
/// Launch Camoufox browser using nodecar sidecar
|
||||
pub async fn launch_camoufox(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
profile: &crate::profile::BrowserProfile,
|
||||
profile_path: &str,
|
||||
config: &CamoufoxConfig,
|
||||
url: Option<&str>,
|
||||
) -> Result<CamoufoxLaunchResult, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let custom_config = if let Some(existing_fingerprint) = &config.fingerprint {
|
||||
println!("Using existing fingerprint from profile metadata");
|
||||
existing_fingerprint.clone()
|
||||
} else {
|
||||
return Err("No fingerprint provided".into());
|
||||
};
|
||||
|
||||
// Always ensure executable_path is set to the user's binary location
|
||||
let executable_path = if let Some(path) = &config.executable_path {
|
||||
path.clone()
|
||||
} else {
|
||||
// Use the browser runner helper with the real profile
|
||||
let browser_runner = crate::browser_runner::BrowserRunner::instance();
|
||||
browser_runner
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
};
|
||||
|
||||
// Build nodecar command arguments
|
||||
let mut args = vec!["camoufox".to_string(), "start".to_string()];
|
||||
|
||||
// Add profile path - ensure it's an absolute path
|
||||
let absolute_profile_path = std::path::Path::new(profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(profile_path).to_path_buf())
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
args.extend(["--profile-path".to_string(), absolute_profile_path]);
|
||||
|
||||
// Add URL if provided
|
||||
if let Some(url) = url {
|
||||
args.extend(["--url".to_string(), url.to_string()]);
|
||||
}
|
||||
|
||||
// Always add the executable path
|
||||
args.extend(["--executable-path".to_string(), executable_path]);
|
||||
|
||||
// Always add the generated custom config
|
||||
args.extend(["--custom-config".to_string(), custom_config]);
|
||||
|
||||
// Add proxy if provided
|
||||
if let Some(proxy) = &config.proxy {
|
||||
args.extend(["--proxy".to_string(), proxy.clone()]);
|
||||
}
|
||||
|
||||
// Add headless flag for tests
|
||||
if std::env::var("CAMOUFOX_HEADLESS").is_ok() {
|
||||
args.push("--headless".to_string());
|
||||
}
|
||||
|
||||
// Get the nodecar sidecar command
|
||||
let mut sidecar_command = self.get_nodecar_sidecar(app_handle)?;
|
||||
|
||||
// Add all arguments to the sidecar command
|
||||
for arg in &args {
|
||||
sidecar_command = sidecar_command.arg(arg);
|
||||
}
|
||||
|
||||
// Execute nodecar sidecar command
|
||||
println!("Executing nodecar command with args: {args:?}");
|
||||
let output = sidecar_command.output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
println!("nodecar camoufox failed - stdout: {stdout}, stderr: {stderr}");
|
||||
return Err(format!("nodecar camoufox failed: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
println!("nodecar camoufox output: {stdout}");
|
||||
|
||||
// Parse the JSON output
|
||||
let launch_result: CamoufoxLaunchResult = serde_json::from_str(&stdout)
|
||||
.map_err(|e| format!("Failed to parse nodecar output as JSON: {e}\nOutput was: {stdout}"))?;
|
||||
|
||||
// Store the instance
|
||||
let instance = CamoufoxInstance {
|
||||
id: launch_result.id.clone(),
|
||||
process_id: launch_result.processId,
|
||||
profile_path: launch_result.profilePath.clone(),
|
||||
url: launch_result.url.clone(),
|
||||
};
|
||||
|
||||
{
|
||||
let mut inner = self.inner.lock().await;
|
||||
inner.instances.insert(launch_result.id.clone(), instance);
|
||||
}
|
||||
|
||||
Ok(launch_result)
|
||||
}
|
||||
|
||||
/// Stop a Camoufox process by ID
|
||||
pub async fn stop_camoufox(
|
||||
&self,
|
||||
app_handle: &AppHandle,
|
||||
id: &str,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Get the nodecar sidecar command
|
||||
let sidecar_command = self
|
||||
.get_nodecar_sidecar(app_handle)?
|
||||
.arg("camoufox")
|
||||
.arg("stop")
|
||||
.arg("--id")
|
||||
.arg(id);
|
||||
|
||||
// Execute nodecar stop command
|
||||
let output = sidecar_command.output().await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let _stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
let result: serde_json::Value = serde_json::from_str(&stdout)
|
||||
.map_err(|e| format!("Failed to parse nodecar stop output: {e}"))?;
|
||||
|
||||
let success = result
|
||||
.get("success")
|
||||
.and_then(|v| v.as_bool())
|
||||
.unwrap_or(false);
|
||||
|
||||
if success {
|
||||
// Remove from our tracking
|
||||
let mut inner = self.inner.lock().await;
|
||||
inner.instances.remove(id);
|
||||
}
|
||||
|
||||
Ok(success)
|
||||
}
|
||||
|
||||
/// Find Camoufox server by profile path (for integration with browser_runner)
|
||||
pub async fn find_camoufox_by_profile(
|
||||
&self,
|
||||
profile_path: &str,
|
||||
) -> Result<Option<CamoufoxLaunchResult>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// First clean up any dead instances
|
||||
self.cleanup_dead_instances().await?;
|
||||
|
||||
let inner = self.inner.lock().await;
|
||||
|
||||
// Convert paths to canonical form for comparison
|
||||
let target_path = std::path::Path::new(profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(profile_path).to_path_buf());
|
||||
|
||||
for (id, instance) in inner.instances.iter() {
|
||||
if let Some(instance_profile_path) = &instance.profile_path {
|
||||
let instance_path = std::path::Path::new(instance_profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(instance_profile_path).to_path_buf());
|
||||
|
||||
if instance_path == target_path {
|
||||
// Verify the server is actually running by checking the process
|
||||
if let Some(process_id) = instance.process_id {
|
||||
if self.is_server_running(process_id).await {
|
||||
// Found running Camoufox instance
|
||||
return Ok(Some(CamoufoxLaunchResult {
|
||||
id: id.clone(),
|
||||
processId: instance.process_id,
|
||||
profilePath: instance.profile_path.clone(),
|
||||
url: instance.url.clone(),
|
||||
}));
|
||||
} else {
|
||||
// Camoufox instance found but process is not running
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Check if servers are still alive and clean up dead instances
|
||||
pub async fn cleanup_dead_instances(
|
||||
&self,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut dead_instances = Vec::new();
|
||||
let mut instances_to_remove = Vec::new();
|
||||
|
||||
{
|
||||
let inner = self.inner.lock().await;
|
||||
|
||||
for (id, instance) in inner.instances.iter() {
|
||||
if let Some(process_id) = instance.process_id {
|
||||
// Check if the process is still alive
|
||||
if !self.is_server_running(process_id).await {
|
||||
// Process is dead
|
||||
// Camoufox instance is no longer running
|
||||
dead_instances.push(id.clone());
|
||||
instances_to_remove.push(id.clone());
|
||||
}
|
||||
} else {
|
||||
// No process_id means it's likely a dead instance
|
||||
// Camoufox instance has no PID, marking as dead
|
||||
dead_instances.push(id.clone());
|
||||
instances_to_remove.push(id.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove dead instances
|
||||
if !instances_to_remove.is_empty() {
|
||||
let mut inner = self.inner.lock().await;
|
||||
for id in &instances_to_remove {
|
||||
inner.instances.remove(id);
|
||||
// Removed dead Camoufox instance
|
||||
}
|
||||
}
|
||||
|
||||
Ok(dead_instances)
|
||||
}
|
||||
|
||||
/// Check if a Camoufox server is running with the given process ID
|
||||
async fn is_server_running(&self, process_id: u32) -> bool {
|
||||
// Check if the process is still running
|
||||
use sysinfo::{Pid, System};
|
||||
|
||||
let system = System::new_all();
|
||||
if let Some(process) = system.process(Pid::from(process_id as usize)) {
|
||||
// Check if this is actually a Camoufox process by looking at the command line
|
||||
let cmd = process.cmd();
|
||||
let is_camoufox = cmd.iter().any(|arg| {
|
||||
let arg_str = arg.to_str().unwrap_or("");
|
||||
arg_str.contains("camoufox-worker") || arg_str.contains("camoufox")
|
||||
});
|
||||
|
||||
if is_camoufox {
|
||||
// Found running Camoufox process
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl CamoufoxNodecarLauncher {
|
||||
pub async fn launch_camoufox_profile_nodecar(
|
||||
&self,
|
||||
app_handle: AppHandle,
|
||||
profile: BrowserProfile,
|
||||
config: CamoufoxConfig,
|
||||
url: Option<String>,
|
||||
) -> Result<CamoufoxLaunchResult, String> {
|
||||
// Get profile path
|
||||
let browser_runner = crate::browser_runner::BrowserRunner::instance();
|
||||
let profiles_dir = browser_runner.get_profiles_dir();
|
||||
let profile_path = profile.get_profile_data_path(&profiles_dir);
|
||||
let profile_path_str = profile_path.to_string_lossy();
|
||||
|
||||
// Check if there's already a running instance for this profile
|
||||
if let Ok(Some(existing)) = self.find_camoufox_by_profile(&profile_path_str).await {
|
||||
// If there's an existing instance, stop it first to avoid conflicts
|
||||
let _ = self.stop_camoufox(&app_handle, &existing.id).await;
|
||||
}
|
||||
|
||||
// Clean up any dead instances before launching
|
||||
let _ = self.cleanup_dead_instances().await;
|
||||
|
||||
self
|
||||
.launch_camoufox(
|
||||
&app_handle,
|
||||
&profile,
|
||||
&profile_path_str,
|
||||
&config,
|
||||
url.as_deref(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to launch Camoufox via nodecar: {e}"))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_default_config() {
|
||||
let default_config = CamoufoxConfig::default();
|
||||
|
||||
// Verify defaults
|
||||
assert_eq!(default_config.geoip, Some(serde_json::Value::Bool(true)));
|
||||
assert_eq!(default_config.proxy, None);
|
||||
assert_eq!(default_config.fingerprint, None);
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref CAMOUFOX_NODECAR_LAUNCHER: CamoufoxNodecarLauncher = CamoufoxNodecarLauncher::new();
|
||||
}
|
||||
+356
-143
@@ -1,5 +1,77 @@
|
||||
use tauri::command;
|
||||
|
||||
pub struct DefaultBrowser;
|
||||
|
||||
impl DefaultBrowser {
|
||||
fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static DefaultBrowser {
|
||||
&DEFAULT_BROWSER
|
||||
}
|
||||
|
||||
pub async fn is_default_browser(&self) -> Result<bool, String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::is_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
}
|
||||
|
||||
pub async fn set_as_default_browser(&self) -> Result<(), String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::set_as_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
}
|
||||
|
||||
pub async fn open_url_with_profile(
|
||||
&self,
|
||||
app_handle: tauri::AppHandle,
|
||||
profile_name: String,
|
||||
url: String,
|
||||
) -> Result<(), String> {
|
||||
let runner = crate::browser_runner::BrowserRunner::instance();
|
||||
|
||||
// Get the profile by name
|
||||
let profiles = runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
let profile = profiles
|
||||
.into_iter()
|
||||
.find(|p| p.name == profile_name)
|
||||
.ok_or_else(|| format!("Profile '{profile_name}' not found"))?;
|
||||
|
||||
println!("Opening URL '{url}' with profile '{profile_name}'");
|
||||
|
||||
// Use launch_or_open_url which handles both launching new instances and opening in existing ones
|
||||
runner
|
||||
.launch_or_open_url(app_handle, &profile, Some(url.clone()), None)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
println!("Failed to open URL with profile '{profile_name}': {e}");
|
||||
format!("Failed to open URL with profile: {e}")
|
||||
})?;
|
||||
|
||||
println!("Successfully opened URL '{url}' with profile '{profile_name}'");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod macos {
|
||||
use core_foundation::base::OSStatus;
|
||||
@@ -65,13 +137,282 @@ mod macos {
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
mod windows {
|
||||
use std::path::Path;
|
||||
use winreg::enums::*;
|
||||
use winreg::RegKey;
|
||||
|
||||
const APP_NAME: &str = "DonutBrowser";
|
||||
const PROG_ID: &str = "DonutBrowser.HTML";
|
||||
|
||||
pub fn is_default_browser() -> Result<bool, String> {
|
||||
// Windows implementation would go here
|
||||
Err("Windows support not implemented yet".to_string())
|
||||
let schemes = ["http", "https"];
|
||||
|
||||
for scheme in schemes {
|
||||
// Check if our browser is set as the default handler for this scheme
|
||||
if !is_default_for_scheme(scheme)? {
|
||||
return Ok(false);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub fn set_as_default_browser() -> Result<(), String> {
|
||||
Err("Windows support not implemented yet".to_string())
|
||||
// Get the current executable path
|
||||
let exe_path = std::env::current_exe()
|
||||
.map_err(|e| format!("Failed to get current executable path: {}", e))?;
|
||||
|
||||
let exe_path_str = exe_path
|
||||
.to_str()
|
||||
.ok_or("Failed to convert executable path to string")?;
|
||||
|
||||
// Verify the executable exists
|
||||
if !Path::new(exe_path_str).exists() {
|
||||
return Err(format!("Executable not found at: {}", exe_path_str));
|
||||
}
|
||||
|
||||
// Register the application
|
||||
register_application(exe_path_str)?;
|
||||
|
||||
// Set as default for HTTP and HTTPS
|
||||
set_default_for_scheme("http")?;
|
||||
set_default_for_scheme("https")?;
|
||||
|
||||
// Register file associations for HTML files
|
||||
register_html_file_association(exe_path_str)?;
|
||||
|
||||
// Notify the system of changes
|
||||
notify_system_of_changes();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_default_for_scheme(scheme: &str) -> Result<bool, String> {
|
||||
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
|
||||
|
||||
// Check Software\Microsoft\Windows\Shell\Associations\UrlAssociations\{scheme}\UserChoice
|
||||
let path = format!(
|
||||
"Software\\Microsoft\\Windows\\Shell\\Associations\\UrlAssociations\\{}\\UserChoice",
|
||||
scheme
|
||||
);
|
||||
|
||||
match hkcu.open_subkey(&path) {
|
||||
Ok(key) => match key.get_value::<String, _>("ProgId") {
|
||||
Ok(prog_id) => Ok(prog_id == PROG_ID),
|
||||
Err(_) => Ok(false),
|
||||
},
|
||||
Err(_) => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
fn register_application(exe_path: &str) -> Result<(), String> {
|
||||
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
|
||||
|
||||
// Register in Software\RegisteredApplications
|
||||
let (registered_apps, _) = hkcu
|
||||
.create_subkey("Software\\RegisteredApplications")
|
||||
.map_err(|e| format!("Failed to create RegisteredApplications key: {}", e))?;
|
||||
|
||||
registered_apps
|
||||
.set_value(APP_NAME, &format!("Software\\{}", APP_NAME))
|
||||
.map_err(|e| format!("Failed to set registered application: {}", e))?;
|
||||
|
||||
// Create application key
|
||||
let (app_key, _) = hkcu
|
||||
.create_subkey(&format!("Software\\{}", APP_NAME))
|
||||
.map_err(|e| format!("Failed to create application key: {}", e))?;
|
||||
|
||||
// Set application properties
|
||||
app_key
|
||||
.set_value("ApplicationName", &APP_NAME)
|
||||
.map_err(|e| format!("Failed to set ApplicationName: {}", e))?;
|
||||
|
||||
app_key
|
||||
.set_value(
|
||||
"ApplicationDescription",
|
||||
&"Donut Browser - Simple Yet Powerful Anti-Detect Browser",
|
||||
)
|
||||
.map_err(|e| format!("Failed to set ApplicationDescription: {}", e))?;
|
||||
|
||||
app_key
|
||||
.set_value("ApplicationIcon", &format!("{},0", exe_path))
|
||||
.map_err(|e| format!("Failed to set ApplicationIcon: {}", e))?;
|
||||
|
||||
// Create Capabilities key
|
||||
let (capabilities, _) = app_key
|
||||
.create_subkey("Capabilities")
|
||||
.map_err(|e| format!("Failed to create Capabilities key: {}", e))?;
|
||||
|
||||
capabilities
|
||||
.set_value(
|
||||
"ApplicationDescription",
|
||||
&"Donut Browser - Simple Yet Powerful Anti-Detect Browser",
|
||||
)
|
||||
.map_err(|e| format!("Failed to set Capabilities description: {}", e))?;
|
||||
|
||||
// Set URL associations
|
||||
let (url_assoc, _) = capabilities
|
||||
.create_subkey("URLAssociations")
|
||||
.map_err(|e| format!("Failed to create URLAssociations key: {}", e))?;
|
||||
|
||||
url_assoc
|
||||
.set_value("http", &PROG_ID)
|
||||
.map_err(|e| format!("Failed to set http association: {}", e))?;
|
||||
|
||||
url_assoc
|
||||
.set_value("https", &PROG_ID)
|
||||
.map_err(|e| format!("Failed to set https association: {}", e))?;
|
||||
|
||||
// Set file associations
|
||||
let (file_assoc, _) = capabilities
|
||||
.create_subkey("FileAssociations")
|
||||
.map_err(|e| format!("Failed to create FileAssociations key: {}", e))?;
|
||||
|
||||
file_assoc
|
||||
.set_value(".html", &PROG_ID)
|
||||
.map_err(|e| format!("Failed to set .html association: {}", e))?;
|
||||
|
||||
file_assoc
|
||||
.set_value(".htm", &PROG_ID)
|
||||
.map_err(|e| format!("Failed to set .htm association: {}", e))?;
|
||||
|
||||
// Register the ProgID
|
||||
register_prog_id(exe_path)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn register_prog_id(exe_path: &str) -> Result<(), String> {
|
||||
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
|
||||
|
||||
// Create ProgID key
|
||||
let (prog_id_key, _) = hkcu
|
||||
.create_subkey(&format!("Software\\Classes\\{}", PROG_ID))
|
||||
.map_err(|e| format!("Failed to create ProgID key: {}", e))?;
|
||||
|
||||
prog_id_key
|
||||
.set_value("", &"Donut Browser Document")
|
||||
.map_err(|e| format!("Failed to set ProgID default value: {}", e))?;
|
||||
|
||||
prog_id_key
|
||||
.set_value("FriendlyTypeName", &"Donut Browser Document")
|
||||
.map_err(|e| format!("Failed to set FriendlyTypeName: {}", e))?;
|
||||
|
||||
// Create DefaultIcon key
|
||||
let (icon_key, _) = prog_id_key
|
||||
.create_subkey("DefaultIcon")
|
||||
.map_err(|e| format!("Failed to create DefaultIcon key: {}", e))?;
|
||||
|
||||
icon_key
|
||||
.set_value("", &format!("{},0", exe_path))
|
||||
.map_err(|e| format!("Failed to set default icon: {}", e))?;
|
||||
|
||||
// Create shell\open\command key
|
||||
let (command_key, _) = prog_id_key
|
||||
.create_subkey("shell\\open\\command")
|
||||
.map_err(|e| format!("Failed to create command key: {}", e))?;
|
||||
|
||||
command_key
|
||||
.set_value("", &format!("\"{}\" \"%1\"", exe_path))
|
||||
.map_err(|e| format!("Failed to set command: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn set_default_for_scheme(scheme: &str) -> Result<(), String> {
|
||||
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
|
||||
|
||||
// Set in Software\Microsoft\Windows\CurrentVersion\Explorer\FileExts\.html\UserChoice
|
||||
// Note: On Windows 10+, this might require elevated permissions or user interaction
|
||||
// through the Settings app due to security restrictions
|
||||
|
||||
// Try to set the association in the user's choice
|
||||
let user_choice_path = format!(
|
||||
"Software\\Microsoft\\Windows\\Shell\\Associations\\UrlAssociations\\{}\\UserChoice",
|
||||
scheme
|
||||
);
|
||||
|
||||
// Note: Setting UserChoice directly may not work on Windows 10+ due to hash verification
|
||||
// The user may need to manually set the default browser through Windows Settings
|
||||
match hkcu.create_subkey(&user_choice_path) {
|
||||
Ok((user_choice, _)) => {
|
||||
// Attempt to set the ProgId
|
||||
if let Err(_) = user_choice.set_value("ProgId", &PROG_ID) {
|
||||
// If we can't set UserChoice, that's expected on newer Windows versions
|
||||
// The registration is still valuable for the "Open with" menu
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
// Expected on newer Windows versions - user must set manually
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn register_html_file_association(_exe_path: &str) -> Result<(), String> {
|
||||
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
|
||||
|
||||
// Register .html and .htm file associations
|
||||
for ext in &[".html", ".htm"] {
|
||||
let ext_path = format!("Software\\Classes\\{}", ext);
|
||||
|
||||
match hkcu.create_subkey(&ext_path) {
|
||||
Ok((ext_key, _)) => {
|
||||
// Set the default value to our ProgID
|
||||
let _ = ext_key.set_value("", &PROG_ID);
|
||||
}
|
||||
Err(_) => {
|
||||
// Continue if we can't set the file association
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn notify_system_of_changes() {
|
||||
// Use Windows API to notify the system of association changes
|
||||
// This helps refresh the system's understanding of the changes
|
||||
unsafe {
|
||||
use std::ffi::c_void;
|
||||
|
||||
// Declare the Windows API functions
|
||||
type UINT = u32;
|
||||
type DWORD = u32;
|
||||
type LPARAM = isize;
|
||||
type WPARAM = usize;
|
||||
|
||||
const HWND_BROADCAST: *mut c_void = 0xffff as *mut c_void;
|
||||
const WM_SETTINGCHANGE: UINT = 0x001A;
|
||||
const SMTO_ABORTIFHUNG: UINT = 0x0002;
|
||||
|
||||
// Link to user32.dll functions
|
||||
extern "system" {
|
||||
fn SendMessageTimeoutA(
|
||||
hWnd: *mut c_void,
|
||||
Msg: UINT,
|
||||
wParam: WPARAM,
|
||||
lParam: LPARAM,
|
||||
fuFlags: UINT,
|
||||
uTimeout: UINT,
|
||||
lpdwResult: *mut DWORD,
|
||||
) -> isize;
|
||||
}
|
||||
|
||||
let mut result: DWORD = 0;
|
||||
|
||||
// Notify about file associations change
|
||||
SendMessageTimeoutA(
|
||||
HWND_BROADCAST,
|
||||
WM_SETTINGCHANGE,
|
||||
0,
|
||||
"Software\\Classes\0".as_ptr() as LPARAM,
|
||||
SMTO_ABORTIFHUNG,
|
||||
1000,
|
||||
&mut result,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -213,34 +554,21 @@ mod linux {
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref DEFAULT_BROWSER: DefaultBrowser = DefaultBrowser::new();
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn is_default_browser() -> Result<bool, String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::is_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::is_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
let default_browser = DefaultBrowser::instance();
|
||||
default_browser.is_default_browser().await
|
||||
}
|
||||
|
||||
#[command]
|
||||
pub async fn set_as_default_browser() -> Result<(), String> {
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::set_as_default_browser();
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::set_as_default_browser();
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
Err("Unsupported platform".to_string())
|
||||
let default_browser = DefaultBrowser::instance();
|
||||
default_browser.set_as_default_browser().await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@@ -249,123 +577,8 @@ pub async fn open_url_with_profile(
|
||||
profile_name: String,
|
||||
url: String,
|
||||
) -> Result<(), String> {
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
|
||||
let runner = BrowserRunner::new();
|
||||
|
||||
// Get the profile by name
|
||||
let profiles = runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
let profile = profiles
|
||||
.into_iter()
|
||||
.find(|p| p.name == profile_name)
|
||||
.ok_or_else(|| format!("Profile '{profile_name}' not found"))?;
|
||||
|
||||
println!("Opening URL '{url}' with profile '{profile_name}'");
|
||||
|
||||
// Use launch_or_open_url which handles both launching new instances and opening in existing ones
|
||||
runner
|
||||
.launch_or_open_url(app_handle, &profile, Some(url.clone()))
|
||||
let default_browser = DefaultBrowser::instance();
|
||||
default_browser
|
||||
.open_url_with_profile(app_handle, profile_name, url)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
println!("Failed to open URL with profile '{profile_name}': {e}");
|
||||
format!("Failed to open URL with profile: {e}")
|
||||
})?;
|
||||
|
||||
println!("Successfully opened URL '{url}' with profile '{profile_name}'");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn smart_open_url(
|
||||
app_handle: tauri::AppHandle,
|
||||
url: String,
|
||||
_is_startup: Option<bool>,
|
||||
) -> Result<String, String> {
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
|
||||
let runner = BrowserRunner::new();
|
||||
|
||||
// Get all profiles
|
||||
let profiles = runner
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
if profiles.is_empty() {
|
||||
return Err("no_profiles".to_string());
|
||||
}
|
||||
|
||||
println!(
|
||||
"URL opening - Total profiles: {}, checking for running profiles",
|
||||
profiles.len()
|
||||
);
|
||||
|
||||
// Check for running profiles and find the first one that can handle URLs
|
||||
for profile in &profiles {
|
||||
// Check if this profile is running
|
||||
let is_running = runner
|
||||
.check_browser_status(app_handle.clone(), profile)
|
||||
.await
|
||||
.unwrap_or(false);
|
||||
|
||||
if is_running {
|
||||
println!(
|
||||
"Found running profile '{}', attempting to open URL",
|
||||
profile.name
|
||||
);
|
||||
|
||||
// For TOR browser: Check if any other TOR browser is running
|
||||
if profile.browser == "tor-browser" {
|
||||
let mut other_tor_running = false;
|
||||
for p in &profiles {
|
||||
if p.browser == "tor-browser"
|
||||
&& p.name != profile.name
|
||||
&& runner
|
||||
.check_browser_status(app_handle.clone(), p)
|
||||
.await
|
||||
.unwrap_or(false)
|
||||
{
|
||||
other_tor_running = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if other_tor_running {
|
||||
continue; // Skip this one, can't have multiple TOR instances
|
||||
}
|
||||
}
|
||||
|
||||
// For Mullvad browser: skip if running (can't open URLs in running Mullvad)
|
||||
if profile.browser == "mullvad-browser" {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Try to open the URL with this running profile
|
||||
match runner
|
||||
.launch_or_open_url(app_handle.clone(), profile, Some(url.clone()))
|
||||
.await
|
||||
{
|
||||
Ok(_) => {
|
||||
println!(
|
||||
"Successfully opened URL '{}' with running profile '{}'",
|
||||
url, profile.name
|
||||
);
|
||||
return Ok(format!("opened_with_profile:{}", profile.name));
|
||||
}
|
||||
Err(e) => {
|
||||
println!(
|
||||
"Failed to open URL with running profile '{}': {}",
|
||||
profile.name, e
|
||||
);
|
||||
// Continue to try other profiles or show selector
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!("No suitable running profiles found, showing profile selector");
|
||||
|
||||
// No suitable running profile found, show the profile selector
|
||||
Err("show_selector".to_string())
|
||||
}
|
||||
|
||||
+198
-390
@@ -1,13 +1,12 @@
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tauri::Emitter;
|
||||
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::browser::BrowserType;
|
||||
use crate::browser_version_service::DownloadInfo;
|
||||
use crate::browser_version_manager::DownloadInfo;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DownloadProgress {
|
||||
@@ -23,22 +22,26 @@ pub struct DownloadProgress {
|
||||
|
||||
pub struct Downloader {
|
||||
client: Client,
|
||||
api_client: ApiClient,
|
||||
api_client: &'static ApiClient,
|
||||
}
|
||||
|
||||
impl Downloader {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
api_client: ApiClient::new(),
|
||||
api_client: ApiClient::instance(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static Downloader {
|
||||
&DOWNLOADER
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn new_with_api_client(api_client: ApiClient) -> Self {
|
||||
pub fn new_with_api_client(_api_client: ApiClient) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
api_client,
|
||||
api_client: ApiClient::instance(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,15 +82,29 @@ impl Downloader {
|
||||
}
|
||||
BrowserType::Zen => {
|
||||
// For Zen, verify the asset exists and handle different naming patterns
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_zen_releases_with_caching(true)
|
||||
.await?;
|
||||
let releases = match self.api_client.fetch_zen_releases_with_caching(true).await {
|
||||
Ok(releases) => releases,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to fetch Zen releases: {e}");
|
||||
return Err(format!("Failed to fetch Zen releases from GitHub API: {e}. This might be due to GitHub API rate limiting or network issues. Please try again later.").into());
|
||||
}
|
||||
};
|
||||
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| r.tag_name == version)
|
||||
.ok_or(format!("Zen version {version} not found"))?;
|
||||
.ok_or_else(|| {
|
||||
format!(
|
||||
"Zen version {} not found. Available versions: {}",
|
||||
version,
|
||||
releases
|
||||
.iter()
|
||||
.take(5)
|
||||
.map(|r| r.tag_name.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
})?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
@@ -95,9 +112,17 @@ impl Downloader {
|
||||
// Find the appropriate asset
|
||||
let asset_url = self
|
||||
.find_zen_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Zen version {version} on {os}/{arch}"
|
||||
))?;
|
||||
.ok_or_else(|| {
|
||||
let available_assets: Vec<&str> =
|
||||
release.assets.iter().map(|a| a.name.as_str()).collect();
|
||||
format!(
|
||||
"No compatible asset found for Zen version {} on {}/{}. Available assets: {}",
|
||||
version,
|
||||
os,
|
||||
arch,
|
||||
available_assets.join(", ")
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
@@ -125,6 +150,30 @@ impl Downloader {
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
// For Camoufox, verify the asset exists and find the correct download URL
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_camoufox_releases_with_caching(true)
|
||||
.await?;
|
||||
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| r.tag_name == version)
|
||||
.ok_or(format!("Camoufox version {version} not found"))?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset
|
||||
let asset_url = self
|
||||
.find_camoufox_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Camoufox version {version} on {os}/{arch}"
|
||||
))?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
_ => {
|
||||
// For other browsers, use the provided URL
|
||||
Ok(download_info.url.clone())
|
||||
@@ -299,6 +348,35 @@ impl Downloader {
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
/// Find the appropriate Camoufox asset for the current platform and architecture
|
||||
fn find_camoufox_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Camoufox asset naming pattern: camoufox-{version}-{release}-{os}.{arch}.zip
|
||||
let (os_name, arch_name) = match (os, arch) {
|
||||
("windows", "x64") => ("win", "x86_64"),
|
||||
("windows", "arm64") => ("win", "arm64"),
|
||||
("linux", "x64") => ("lin", "x86_64"),
|
||||
("linux", "arm64") => ("lin", "arm64"),
|
||||
("macos", "x64") => ("mac", "x86_64"),
|
||||
("macos", "arm64") => ("mac", "arm64"),
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
// Look for assets matching the pattern
|
||||
let asset = assets.iter().find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.starts_with("camoufox-")
|
||||
&& name.contains(&format!("-{os_name}.{arch_name}.zip"))
|
||||
&& name.ends_with(".zip")
|
||||
});
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
pub async fn download_browser<R: tauri::Runtime>(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle<R>,
|
||||
@@ -318,43 +396,104 @@ impl Downloader {
|
||||
let is_twilight =
|
||||
browser_type == BrowserType::Zen && version.to_lowercase().contains("twilight");
|
||||
|
||||
// Emit initial progress
|
||||
// Determine if we have a partial file to resume
|
||||
let mut existing_size: u64 = 0;
|
||||
if let Ok(meta) = std::fs::metadata(&file_path) {
|
||||
existing_size = meta.len();
|
||||
}
|
||||
|
||||
// Build request, add Range only if we have bytes
|
||||
let mut request = self
|
||||
.client
|
||||
.get(&download_url)
|
||||
.header(
|
||||
"User-Agent",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36",
|
||||
);
|
||||
|
||||
if existing_size > 0 {
|
||||
request = request.header("Range", format!("bytes={existing_size}-"));
|
||||
}
|
||||
|
||||
// Start download (or resume)
|
||||
let response = request.send().await?;
|
||||
|
||||
// Check if the response is successful
|
||||
if !(response.status().is_success() || response.status().as_u16() == 206) {
|
||||
return Err(format!("Download failed with status: {}", response.status()).into());
|
||||
}
|
||||
|
||||
// Determine total size
|
||||
let mut total_size = response.content_length();
|
||||
|
||||
// If resuming (206) and Content-Range is present, parse total
|
||||
if response.status().as_u16() == 206 {
|
||||
if let Some(content_range) = response.headers().get(reqwest::header::CONTENT_RANGE) {
|
||||
if let Ok(cr) = content_range.to_str() {
|
||||
// Format: bytes start-end/total
|
||||
if let Some((_, total_str)) = cr.split('/').collect::<Vec<_>>().split_first() {
|
||||
if let Some(total_str) = total_str.first() {
|
||||
if let Ok(total) = total_str.parse::<u64>() {
|
||||
total_size = Some(total);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if let Some(len) = response.headers().get(reqwest::header::CONTENT_LENGTH) {
|
||||
// Fallback: total = existing + incoming length
|
||||
if let Ok(len_str) = len.to_str() {
|
||||
if let Ok(incoming) = len_str.parse::<u64>() {
|
||||
total_size = Some(existing_size + incoming);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if existing_size > 0 && response.status().is_success() {
|
||||
// Server ignored range or we asked from 0; if 200 and existing file has content, start fresh
|
||||
// Truncate existing file so we don't append duplicate bytes
|
||||
let _ = std::fs::remove_file(&file_path);
|
||||
existing_size = 0;
|
||||
}
|
||||
|
||||
let mut downloaded = existing_size;
|
||||
let start_time = std::time::Instant::now();
|
||||
let mut last_update = start_time;
|
||||
|
||||
// Emit initial progress AFTER we've established total size and resume state
|
||||
let initial_percentage = if let Some(total) = total_size {
|
||||
if total > 0 {
|
||||
(existing_size as f64 / total as f64) * 100.0
|
||||
} else {
|
||||
0.0
|
||||
}
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
|
||||
let initial_stage = if is_twilight {
|
||||
"downloading (twilight rolling release)".to_string()
|
||||
} else {
|
||||
"downloading".to_string()
|
||||
};
|
||||
|
||||
let progress = DownloadProgress {
|
||||
browser: browser_type.as_str().to_string(),
|
||||
version: version.to_string(),
|
||||
downloaded_bytes: 0,
|
||||
total_bytes: None,
|
||||
percentage: 0.0,
|
||||
downloaded_bytes: existing_size,
|
||||
total_bytes: total_size,
|
||||
percentage: initial_percentage,
|
||||
speed_bytes_per_sec: 0.0,
|
||||
eta_seconds: None,
|
||||
stage: if is_twilight {
|
||||
"downloading (twilight rolling release)".to_string()
|
||||
} else {
|
||||
"downloading".to_string()
|
||||
},
|
||||
stage: initial_stage,
|
||||
};
|
||||
|
||||
let _ = app_handle.emit("download-progress", &progress);
|
||||
|
||||
// Start download
|
||||
let response = self
|
||||
.client
|
||||
.get(&download_url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
// Check if the response is successful
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Download failed with status: {}", response.status()).into());
|
||||
}
|
||||
|
||||
let total_size = response.content_length();
|
||||
let mut downloaded = 0u64;
|
||||
let start_time = std::time::Instant::now();
|
||||
let mut last_update = start_time;
|
||||
|
||||
let mut file = File::create(&file_path)?;
|
||||
// Open file in append mode (resuming) or create new
|
||||
use std::fs::OpenOptions;
|
||||
let mut file = OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&file_path)?;
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
@@ -367,13 +506,19 @@ impl Downloader {
|
||||
// Update progress every 100ms to avoid too many events
|
||||
if now.duration_since(last_update).as_millis() >= 100 {
|
||||
let elapsed = start_time.elapsed().as_secs_f64();
|
||||
// Compute speed based only on bytes downloaded in this session to avoid inflated values when resuming
|
||||
let downloaded_since_start = downloaded.saturating_sub(existing_size);
|
||||
let speed = if elapsed > 0.0 {
|
||||
downloaded as f64 / elapsed
|
||||
downloaded_since_start as f64 / elapsed
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let percentage = if let Some(total) = total_size {
|
||||
(downloaded as f64 / total as f64) * 100.0
|
||||
if total > 0 {
|
||||
(downloaded as f64 / total as f64) * 100.0
|
||||
} else {
|
||||
0.0
|
||||
}
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
@@ -414,10 +559,10 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::browser::BrowserType;
|
||||
use crate::browser_version_service::DownloadInfo;
|
||||
use crate::browser_version_manager::DownloadInfo;
|
||||
|
||||
use tempfile::TempDir;
|
||||
use wiremock::matchers::{method, path, query_param};
|
||||
use wiremock::matchers::{method, path};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
async fn setup_mock_server() -> MockServer {
|
||||
@@ -435,153 +580,10 @@ mod tests {
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_brave_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.9",
|
||||
"name": "Brave Release 1.81.9",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.9-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-universal.dmg",
|
||||
"size": 200000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/brave/brave-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "brave-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Brave, "v1.81.9", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/brave-1.81.9-universal.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_zen_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "1.11b",
|
||||
"name": "Zen Browser 1.11b",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "zen.macos-universal.dmg",
|
||||
"browser_download_url": "https://example.com/zen-1.11b-universal.dmg",
|
||||
"size": 120000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/zen-browser/desktop/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "zen-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Zen, "1.11b", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/zen-1.11b-universal.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_mullvad_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "14.5a6",
|
||||
"name": "Mullvad Browser 14.5a6",
|
||||
"prerelease": true,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "mullvad-browser-macos-14.5a6.dmg",
|
||||
"browser_download_url": "https://example.com/mullvad-14.5a6.dmg",
|
||||
"size": 100000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "mullvad-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::MullvadBrowser, "14.5a6", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/mullvad-14.5a6.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_firefox_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
@@ -642,106 +644,6 @@ mod tests {
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_brave_version_not_found() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.8",
|
||||
"name": "Brave Release 1.81.8",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.8-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.8-universal.dmg",
|
||||
"size": 200000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/brave/brave-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "brave-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Brave, "v1.81.9", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("Brave version v1.81.9 not found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_zen_asset_not_found() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "1.11b",
|
||||
"name": "Zen Browser 1.11b",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "zen.linux-universal.tar.bz2",
|
||||
"browser_download_url": "https://example.com/zen-1.11b-linux.tar.bz2",
|
||||
"size": 150000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/zen-browser/desktop/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "zen-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Zen, "1.11b", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("No compatible asset found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_with_progress() {
|
||||
let server = setup_mock_server().await;
|
||||
@@ -834,105 +736,6 @@ mod tests {
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_mullvad_asset_not_found() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "14.5a6",
|
||||
"name": "Mullvad Browser 14.5a6",
|
||||
"prerelease": true,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "mullvad-browser-linux-14.5a6.tar.xz",
|
||||
"browser_download_url": "https://example.com/mullvad-14.5a6.tar.xz",
|
||||
"size": 80000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "mullvad-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::MullvadBrowser, "14.5a6", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
assert!(result
|
||||
.unwrap_err()
|
||||
.to_string()
|
||||
.contains("No compatible asset found"));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_brave_version_with_v_prefix() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "v1.81.9",
|
||||
"name": "Brave Release 1.81.9",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "brave-v1.81.9-universal.dmg",
|
||||
"browser_download_url": "https://example.com/brave-1.81.9-universal.dmg",
|
||||
"size": 200000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/brave/brave-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "placeholder".to_string(),
|
||||
filename: "brave-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
// Test with version without v prefix
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Brave, "1.81.9", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, "https://example.com/brave-1.81.9-universal.dmg");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_chunked_response() {
|
||||
let server = setup_mock_server().await;
|
||||
@@ -983,3 +786,8 @@ mod tests {
|
||||
assert_eq!(downloaded_content.len(), test_content.len());
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref DOWNLOADER: Downloader = Downloader::new();
|
||||
}
|
||||
|
||||
@@ -3,43 +3,51 @@ use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DownloadedBrowserInfo {
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
pub download_date: u64,
|
||||
pub file_path: PathBuf,
|
||||
pub verified: bool,
|
||||
pub actual_version: Option<String>, // For browsers like Chromium where we track the actual version
|
||||
pub file_size: Option<u64>, // For tracking file size changes (useful for rolling releases)
|
||||
#[serde(default)] // Add default value (false) for backwards compatibility
|
||||
pub is_rolling_release: bool, // True for Zen's twilight releases and other rolling releases
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Default)]
|
||||
pub struct DownloadedBrowsersRegistry {
|
||||
struct RegistryData {
|
||||
pub browsers: HashMap<String, HashMap<String, DownloadedBrowserInfo>>, // browser -> version -> info
|
||||
}
|
||||
|
||||
pub struct DownloadedBrowsersRegistry {
|
||||
data: Mutex<RegistryData>,
|
||||
}
|
||||
|
||||
impl DownloadedBrowsersRegistry {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
data: Mutex::new(RegistryData::default()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load() -> Result<Self, Box<dyn std::error::Error>> {
|
||||
pub fn instance() -> &'static DownloadedBrowsersRegistry {
|
||||
&DOWNLOADED_BROWSERS_REGISTRY
|
||||
}
|
||||
|
||||
pub fn load(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let registry_path = Self::get_registry_path()?;
|
||||
|
||||
if !registry_path.exists() {
|
||||
return Ok(Self::new());
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(®istry_path)?;
|
||||
let registry: DownloadedBrowsersRegistry = serde_json::from_str(&content)?;
|
||||
Ok(registry)
|
||||
let registry_data: RegistryData = serde_json::from_str(&content)?;
|
||||
|
||||
let mut data = self.data.lock().unwrap();
|
||||
*data = registry_data;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn save(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
pub fn save(&self) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let registry_path = Self::get_registry_path()?;
|
||||
|
||||
// Ensure parent directory exists
|
||||
@@ -47,12 +55,13 @@ impl DownloadedBrowsersRegistry {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let content = serde_json::to_string_pretty(self)?;
|
||||
let data = self.data.lock().unwrap();
|
||||
let content = serde_json::to_string_pretty(&*data)?;
|
||||
fs::write(®istry_path, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_registry_path() -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
fn get_registry_path() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let mut path = base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
@@ -65,88 +74,66 @@ impl DownloadedBrowsersRegistry {
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub fn add_browser(&mut self, info: DownloadedBrowserInfo) {
|
||||
self
|
||||
pub fn add_browser(&self, info: DownloadedBrowserInfo) {
|
||||
let mut data = self.data.lock().unwrap();
|
||||
data
|
||||
.browsers
|
||||
.entry(info.browser.clone())
|
||||
.or_default()
|
||||
.insert(info.version.clone(), info);
|
||||
}
|
||||
|
||||
pub fn remove_browser(&mut self, browser: &str, version: &str) -> Option<DownloadedBrowserInfo> {
|
||||
self.browsers.get_mut(browser)?.remove(version)
|
||||
pub fn remove_browser(&self, browser: &str, version: &str) -> Option<DownloadedBrowserInfo> {
|
||||
let mut data = self.data.lock().unwrap();
|
||||
data.browsers.get_mut(browser)?.remove(version)
|
||||
}
|
||||
|
||||
pub fn is_browser_downloaded(&self, browser: &str, version: &str) -> bool {
|
||||
self
|
||||
let data = self.data.lock().unwrap();
|
||||
data
|
||||
.browsers
|
||||
.get(browser)
|
||||
.and_then(|versions| versions.get(version))
|
||||
.map(|info| info.verified)
|
||||
.unwrap_or(false)
|
||||
.is_some()
|
||||
}
|
||||
|
||||
pub fn get_downloaded_versions(&self, browser: &str) -> Vec<String> {
|
||||
self
|
||||
let data = self.data.lock().unwrap();
|
||||
data
|
||||
.browsers
|
||||
.get(browser)
|
||||
.map(|versions| {
|
||||
versions
|
||||
.iter()
|
||||
.filter(|(_, info)| info.verified)
|
||||
.map(|(version, _)| version.clone())
|
||||
.collect()
|
||||
})
|
||||
.map(|versions| versions.keys().cloned().collect())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub fn mark_download_started(&mut self, browser: &str, version: &str, file_path: PathBuf) {
|
||||
let is_rolling = Self::is_rolling_release(browser, version);
|
||||
pub fn mark_download_started(&self, browser: &str, version: &str, file_path: PathBuf) {
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: browser.to_string(),
|
||||
version: version.to_string(),
|
||||
download_date: std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs(),
|
||||
file_path,
|
||||
verified: false,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: is_rolling,
|
||||
};
|
||||
self.add_browser(info);
|
||||
}
|
||||
|
||||
pub fn mark_download_completed_with_actual_version(
|
||||
&mut self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
actual_version: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
if let Some(info) = self
|
||||
pub fn mark_download_completed(&self, browser: &str, version: &str) -> Result<(), String> {
|
||||
let data = self.data.lock().unwrap();
|
||||
if data
|
||||
.browsers
|
||||
.get_mut(browser)
|
||||
.and_then(|versions| versions.get_mut(version))
|
||||
.get(browser)
|
||||
.and_then(|versions| versions.get(version))
|
||||
.is_some()
|
||||
{
|
||||
info.verified = true;
|
||||
info.actual_version = actual_version;
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("Browser {browser}:{version} not found in registry"))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_rolling_release(browser: &str, version: &str) -> bool {
|
||||
// Check if this is a rolling release like twilight
|
||||
browser == "zen" && version.to_lowercase() == "twilight"
|
||||
}
|
||||
|
||||
pub fn cleanup_failed_download(
|
||||
&mut self,
|
||||
&self,
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
if let Some(info) = self.remove_browser(browser, version) {
|
||||
// Clean up any files that might have been left behind
|
||||
if info.file_path.exists() {
|
||||
@@ -178,19 +165,39 @@ impl DownloadedBrowsersRegistry {
|
||||
|
||||
/// Find and remove unused browser binaries that are not referenced by any active profiles
|
||||
pub fn cleanup_unused_binaries(
|
||||
&mut self,
|
||||
&self,
|
||||
active_profiles: &[(String, String)], // (browser, version) pairs
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error>> {
|
||||
running_profiles: &[(String, String)], // (browser, version) pairs for running profiles
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let active_set: std::collections::HashSet<(String, String)> =
|
||||
active_profiles.iter().cloned().collect();
|
||||
let running_set: std::collections::HashSet<(String, String)> =
|
||||
running_profiles.iter().cloned().collect();
|
||||
let mut cleaned_up = Vec::new();
|
||||
|
||||
// Collect all downloaded browsers that are not in active profiles
|
||||
let mut to_remove = Vec::new();
|
||||
for (browser, versions) in &self.browsers {
|
||||
for (version, info) in versions {
|
||||
if info.verified && !active_set.contains(&(browser.clone(), version.clone())) {
|
||||
to_remove.push((browser.clone(), version.clone()));
|
||||
{
|
||||
let data = self.data.lock().unwrap();
|
||||
for (browser, versions) in &data.browsers {
|
||||
for version in versions.keys() {
|
||||
let browser_version = (browser.clone(), version.clone());
|
||||
|
||||
// Don't remove if it's used by any active profile
|
||||
if active_set.contains(&browser_version) {
|
||||
println!("Keeping: {browser} {version} (in use by profile)");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Don't remove if it's currently running (even if not in active profiles)
|
||||
if running_set.contains(&browser_version) {
|
||||
println!("Keeping: {browser} {version} (currently running)");
|
||||
continue;
|
||||
}
|
||||
|
||||
// Mark for removal
|
||||
to_remove.push(browser_version);
|
||||
println!("Marking for removal: {browser} {version} (not used by any profile)");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -201,22 +208,224 @@ impl DownloadedBrowsersRegistry {
|
||||
eprintln!("Failed to cleanup unused binary {browser}:{version}: {e}");
|
||||
} else {
|
||||
cleaned_up.push(format!("{browser} {version}"));
|
||||
println!("Successfully removed unused binary: {browser} {version}");
|
||||
}
|
||||
}
|
||||
|
||||
if cleaned_up.is_empty() {
|
||||
println!("No unused binaries found to clean up");
|
||||
} else {
|
||||
println!("Cleaned up {} unused binaries", cleaned_up.len());
|
||||
}
|
||||
|
||||
Ok(cleaned_up)
|
||||
}
|
||||
|
||||
/// Get all browsers and versions referenced by active profiles
|
||||
pub fn get_active_browser_versions(
|
||||
&self,
|
||||
profiles: &[crate::browser_runner::BrowserProfile],
|
||||
profiles: &[crate::profile::BrowserProfile],
|
||||
) -> Vec<(String, String)> {
|
||||
profiles
|
||||
.iter()
|
||||
.map(|profile| (profile.browser.clone(), profile.version.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Verify that all registered browsers actually exist on disk and clean up stale entries
|
||||
pub fn verify_and_cleanup_stale_entries(
|
||||
&self,
|
||||
browser_runner: &crate::browser_runner::BrowserRunner,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
use crate::browser::{create_browser, BrowserType};
|
||||
let mut cleaned_up = Vec::new();
|
||||
let binaries_dir = browser_runner.get_binaries_dir();
|
||||
|
||||
let browsers_to_check: Vec<(String, String)> = {
|
||||
let data = self.data.lock().unwrap();
|
||||
data
|
||||
.browsers
|
||||
.iter()
|
||||
.flat_map(|(browser, versions)| {
|
||||
versions
|
||||
.keys()
|
||||
.map(|version| (browser.clone(), version.clone()))
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
for (browser_str, version) in browsers_to_check {
|
||||
if let Ok(browser_type) = BrowserType::from_str(&browser_str) {
|
||||
let browser = create_browser(browser_type);
|
||||
if !browser.is_version_downloaded(&version, &binaries_dir) {
|
||||
// Files don't exist, remove from registry
|
||||
if let Some(_removed) = self.remove_browser(&browser_str, &version) {
|
||||
cleaned_up.push(format!("{browser_str} {version}"));
|
||||
println!("Removed stale registry entry for {browser_str} {version}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !cleaned_up.is_empty() {
|
||||
self.save()?;
|
||||
}
|
||||
|
||||
Ok(cleaned_up)
|
||||
}
|
||||
|
||||
/// Get all browsers and versions that are currently running
|
||||
pub fn get_running_browser_versions(
|
||||
&self,
|
||||
profiles: &[crate::profile::BrowserProfile],
|
||||
) -> Vec<(String, String)> {
|
||||
profiles
|
||||
.iter()
|
||||
.filter(|profile| profile.process_id.is_some())
|
||||
.map(|profile| (profile.browser.clone(), profile.version.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Scan the binaries directory and sync with registry
|
||||
/// This ensures the registry reflects what's actually on disk
|
||||
pub fn sync_with_binaries_directory(
|
||||
&self,
|
||||
binaries_dir: &std::path::Path,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut changes = Vec::new();
|
||||
|
||||
if !binaries_dir.exists() {
|
||||
return Ok(changes);
|
||||
}
|
||||
|
||||
// Scan for actual browser directories
|
||||
for browser_entry in fs::read_dir(binaries_dir)? {
|
||||
let browser_entry = browser_entry?;
|
||||
let browser_path = browser_entry.path();
|
||||
|
||||
if !browser_path.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let browser_name = browser_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or("");
|
||||
|
||||
if browser_name.is_empty() || browser_name.starts_with('.') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Scan for version directories within this browser
|
||||
for version_entry in fs::read_dir(&browser_path)? {
|
||||
let version_entry = version_entry?;
|
||||
let version_path = version_entry.path();
|
||||
|
||||
if !version_path.is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let version_name = version_path
|
||||
.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.unwrap_or("");
|
||||
|
||||
if version_name.is_empty() || version_name.starts_with('.') {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this browser/version is already in registry
|
||||
if !self.is_browser_downloaded(browser_name, version_name) {
|
||||
// Add to registry
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: browser_name.to_string(),
|
||||
version: version_name.to_string(),
|
||||
file_path: version_path.clone(),
|
||||
};
|
||||
self.add_browser(info);
|
||||
changes.push(format!("Added {browser_name} {version_name} to registry"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !changes.is_empty() {
|
||||
self.save()?;
|
||||
}
|
||||
|
||||
Ok(changes)
|
||||
}
|
||||
|
||||
/// Comprehensive cleanup that removes unused binaries and syncs registry
|
||||
pub fn comprehensive_cleanup(
|
||||
&self,
|
||||
binaries_dir: &std::path::Path,
|
||||
active_profiles: &[(String, String)],
|
||||
running_profiles: &[(String, String)],
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut cleanup_results = Vec::new();
|
||||
|
||||
// First, sync registry with actual binaries on disk
|
||||
let sync_results = self.sync_with_binaries_directory(binaries_dir)?;
|
||||
cleanup_results.extend(sync_results);
|
||||
|
||||
// Then perform the regular cleanup
|
||||
let regular_cleanup = self.cleanup_unused_binaries(active_profiles, running_profiles)?;
|
||||
cleanup_results.extend(regular_cleanup);
|
||||
|
||||
// Finally, verify and cleanup stale entries
|
||||
let stale_cleanup = self.verify_and_cleanup_stale_entries_simple(binaries_dir)?;
|
||||
cleanup_results.extend(stale_cleanup);
|
||||
|
||||
if !cleanup_results.is_empty() {
|
||||
self.save()?;
|
||||
}
|
||||
|
||||
Ok(cleanup_results)
|
||||
}
|
||||
|
||||
/// Simplified version of verify_and_cleanup_stale_entries that doesn't need BrowserRunner
|
||||
pub fn verify_and_cleanup_stale_entries_simple(
|
||||
&self,
|
||||
binaries_dir: &std::path::Path,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut cleaned_up = Vec::new();
|
||||
let mut browsers_to_remove = Vec::new();
|
||||
|
||||
{
|
||||
let data = self.data.lock().unwrap();
|
||||
for (browser_str, versions) in &data.browsers {
|
||||
for version in versions.keys() {
|
||||
// Check if the browser directory actually exists
|
||||
let browser_dir = binaries_dir.join(browser_str).join(version);
|
||||
if !browser_dir.exists() {
|
||||
browsers_to_remove.push((browser_str.clone(), version.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Remove stale entries
|
||||
for (browser_str, version) in browsers_to_remove {
|
||||
if let Some(_removed) = self.remove_browser(&browser_str, &version) {
|
||||
cleaned_up.push(format!(
|
||||
"Removed stale registry entry for {browser_str} {version}"
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cleaned_up)
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref DOWNLOADED_BROWSERS_REGISTRY: DownloadedBrowsersRegistry = {
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
if let Err(e) = registry.load() {
|
||||
eprintln!("Warning: Failed to load downloaded browsers registry: {e}");
|
||||
}
|
||||
registry
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -226,21 +435,17 @@ mod tests {
|
||||
#[test]
|
||||
fn test_registry_creation() {
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
assert!(registry.browsers.is_empty());
|
||||
let data = registry.data.lock().unwrap();
|
||||
assert!(data.browsers.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_and_get_browser() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
download_date: 1234567890,
|
||||
file_path: PathBuf::from("/test/path"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
registry.add_browser(info.clone());
|
||||
@@ -252,39 +457,24 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_get_downloaded_versions() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
let info1 = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
download_date: 1234567890,
|
||||
file_path: PathBuf::from("/test/path1"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
let info2 = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "140.0".to_string(),
|
||||
download_date: 1234567891,
|
||||
file_path: PathBuf::from("/test/path2"),
|
||||
verified: false, // Not verified, should not be included
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
let info3 = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "141.0".to_string(),
|
||||
download_date: 1234567892,
|
||||
file_path: PathBuf::from("/test/path3"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
registry.add_browser(info1);
|
||||
@@ -292,63 +482,74 @@ mod tests {
|
||||
registry.add_browser(info3);
|
||||
|
||||
let versions = registry.get_downloaded_versions("firefox");
|
||||
assert_eq!(versions.len(), 2);
|
||||
assert_eq!(versions.len(), 3);
|
||||
assert!(versions.contains(&"139.0".to_string()));
|
||||
assert!(versions.contains(&"140.0".to_string()));
|
||||
assert!(versions.contains(&"141.0".to_string()));
|
||||
assert!(!versions.contains(&"140.0".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_mark_download_lifecycle() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
// Mark download started
|
||||
registry.mark_download_started("firefox", "139.0", PathBuf::from("/test/path"));
|
||||
|
||||
// Should not be considered downloaded yet
|
||||
assert!(!registry.is_browser_downloaded("firefox", "139.0"));
|
||||
// Should be considered downloaded immediately
|
||||
assert!(
|
||||
registry.is_browser_downloaded("firefox", "139.0"),
|
||||
"Browser should be considered downloaded after marking as started"
|
||||
);
|
||||
|
||||
// Mark as completed
|
||||
registry
|
||||
.mark_download_completed_with_actual_version("firefox", "139.0", Some("139.0".to_string()))
|
||||
.unwrap();
|
||||
.mark_download_completed("firefox", "139.0")
|
||||
.expect("Failed to mark download as completed");
|
||||
|
||||
// Now should be considered downloaded
|
||||
assert!(registry.is_browser_downloaded("firefox", "139.0"));
|
||||
// Should still be considered downloaded
|
||||
assert!(
|
||||
registry.is_browser_downloaded("firefox", "139.0"),
|
||||
"Browser should still be considered downloaded after completion"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_remove_browser() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
let info = DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
download_date: 1234567890,
|
||||
file_path: PathBuf::from("/test/path"),
|
||||
verified: true,
|
||||
actual_version: None,
|
||||
file_size: None,
|
||||
is_rolling_release: false,
|
||||
};
|
||||
|
||||
registry.add_browser(info);
|
||||
assert!(registry.is_browser_downloaded("firefox", "139.0"));
|
||||
assert!(
|
||||
registry.is_browser_downloaded("firefox", "139.0"),
|
||||
"Browser should be downloaded after adding"
|
||||
);
|
||||
|
||||
let removed = registry.remove_browser("firefox", "139.0");
|
||||
assert!(removed.is_some());
|
||||
assert!(!registry.is_browser_downloaded("firefox", "139.0"));
|
||||
assert!(
|
||||
removed.is_some(),
|
||||
"Remove operation should return the removed browser info"
|
||||
);
|
||||
assert!(
|
||||
!registry.is_browser_downloaded("firefox", "139.0"),
|
||||
"Browser should not be downloaded after removal"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_twilight_rolling_release() {
|
||||
let mut registry = DownloadedBrowsersRegistry::new();
|
||||
fn test_twilight_download() {
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
// Mark twilight download started
|
||||
registry.mark_download_started("zen", "twilight", PathBuf::from("/test/zen-twilight"));
|
||||
|
||||
// Check that it's marked as rolling release
|
||||
let zen_versions = ®istry.browsers["zen"];
|
||||
let twilight_info = &zen_versions["twilight"];
|
||||
assert!(twilight_info.is_rolling_release);
|
||||
// Check that it's registered
|
||||
assert!(
|
||||
registry.is_browser_downloaded("zen", "twilight"),
|
||||
"Zen twilight version should be registered as downloaded"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+904
-611
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,360 @@
|
||||
use crate::browser::GithubRelease;
|
||||
use directories::BaseDirs;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use tauri::Emitter;
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
const MMDB_REPO: &str = "P3TERX/GeoLite.mmdb";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GeoIPDownloadProgress {
|
||||
pub stage: String, // "downloading", "extracting", "completed"
|
||||
pub percentage: f64,
|
||||
pub message: String,
|
||||
// Extra fields to mirror browser download progress payload
|
||||
pub downloaded_bytes: Option<u64>,
|
||||
pub total_bytes: Option<u64>,
|
||||
pub speed_bytes_per_sec: Option<f64>,
|
||||
pub eta_seconds: Option<f64>,
|
||||
}
|
||||
|
||||
pub struct GeoIPDownloader {
|
||||
client: Client,
|
||||
}
|
||||
|
||||
impl GeoIPDownloader {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static GeoIPDownloader {
|
||||
&GEOIP_DOWNLOADER
|
||||
}
|
||||
|
||||
/// Create a new downloader with custom client (for testing)
|
||||
#[cfg(test)]
|
||||
pub fn new_with_client(client: Client) -> Self {
|
||||
Self { client }
|
||||
}
|
||||
|
||||
fn get_cache_dir() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to determine base directories")?;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let cache_dir = base_dirs
|
||||
.data_local_dir()
|
||||
.join("camoufox")
|
||||
.join("camoufox")
|
||||
.join("Cache");
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
let cache_dir = base_dirs.cache_dir().join("camoufox");
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
let cache_dir = base_dirs.cache_dir().join("camoufox");
|
||||
|
||||
#[cfg(not(any(target_os = "windows", target_os = "macos", target_os = "linux")))]
|
||||
let cache_dir = base_dirs.cache_dir().join("camoufox");
|
||||
|
||||
Ok(cache_dir)
|
||||
}
|
||||
|
||||
fn get_mmdb_file_path() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
Ok(Self::get_cache_dir()?.join("GeoLite2-City.mmdb"))
|
||||
}
|
||||
|
||||
pub fn is_geoip_database_available() -> bool {
|
||||
if let Ok(mmdb_path) = Self::get_mmdb_file_path() {
|
||||
mmdb_path.exists()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn find_city_mmdb_asset(&self, release: &GithubRelease) -> Option<String> {
|
||||
for asset in &release.assets {
|
||||
if asset.name.ends_with("-City.mmdb") {
|
||||
return Some(asset.browser_download_url.clone());
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub async fn download_geoip_database(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Emit initial progress
|
||||
let _ = app_handle.emit(
|
||||
"geoip-download-progress",
|
||||
GeoIPDownloadProgress {
|
||||
stage: "downloading".to_string(),
|
||||
percentage: 0.0,
|
||||
message: "Starting GeoIP database download".to_string(),
|
||||
downloaded_bytes: Some(0),
|
||||
total_bytes: None,
|
||||
speed_bytes_per_sec: Some(0.0),
|
||||
eta_seconds: None,
|
||||
},
|
||||
);
|
||||
|
||||
// Fetch latest release from GitHub
|
||||
let releases = self.fetch_geoip_releases().await?;
|
||||
let latest_release = releases.first().ok_or("No GeoIP database releases found")?;
|
||||
|
||||
let download_url = self
|
||||
.find_city_mmdb_asset(latest_release)
|
||||
.ok_or("No compatible GeoIP database asset found")?;
|
||||
|
||||
// Create cache directory
|
||||
let cache_dir = Self::get_cache_dir()?;
|
||||
fs::create_dir_all(&cache_dir).await?;
|
||||
|
||||
let mmdb_path = Self::get_mmdb_file_path()?;
|
||||
|
||||
// Download the file
|
||||
let response = self.client.get(&download_url).send().await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to download GeoIP database: HTTP {}",
|
||||
response.status()
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
let total_size = response.content_length().unwrap_or(0);
|
||||
let mut downloaded: u64 = 0;
|
||||
let mut file = fs::File::create(&mmdb_path).await?;
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
use std::time::Instant;
|
||||
let start_time = Instant::now();
|
||||
let mut last_update = Instant::now();
|
||||
while let Some(chunk) = stream.next().await {
|
||||
let chunk = chunk?;
|
||||
downloaded += chunk.len() as u64;
|
||||
file.write_all(&chunk).await?;
|
||||
|
||||
let now = Instant::now();
|
||||
if now.duration_since(last_update).as_millis() >= 100 {
|
||||
let elapsed = start_time.elapsed().as_secs_f64();
|
||||
let speed = if elapsed > 0.0 {
|
||||
downloaded as f64 / elapsed
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let percentage = if total_size > 0 {
|
||||
(downloaded as f64 / total_size as f64) * 100.0
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let eta = if speed > 0.0 && total_size > 0 {
|
||||
Some((total_size.saturating_sub(downloaded)) as f64 / speed)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let _ = app_handle.emit(
|
||||
"geoip-download-progress",
|
||||
GeoIPDownloadProgress {
|
||||
stage: "downloading".to_string(),
|
||||
percentage,
|
||||
message: format!("Downloaded {downloaded} / {total_size} bytes"),
|
||||
downloaded_bytes: Some(downloaded),
|
||||
total_bytes: Some(total_size),
|
||||
speed_bytes_per_sec: Some(speed),
|
||||
eta_seconds: eta,
|
||||
},
|
||||
);
|
||||
last_update = now;
|
||||
}
|
||||
}
|
||||
|
||||
file.flush().await?;
|
||||
|
||||
// Emit completion
|
||||
let _ = app_handle.emit(
|
||||
"geoip-download-progress",
|
||||
GeoIPDownloadProgress {
|
||||
stage: "completed".to_string(),
|
||||
percentage: 100.0,
|
||||
message: "GeoIP database download completed".to_string(),
|
||||
downloaded_bytes: Some(downloaded),
|
||||
total_bytes: Some(total_size),
|
||||
speed_bytes_per_sec: Some(0.0),
|
||||
eta_seconds: Some(0.0),
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn fetch_geoip_releases(
|
||||
&self,
|
||||
) -> Result<Vec<GithubRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let url = format!("https://api.github.com/repos/{MMDB_REPO}/releases");
|
||||
let response = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (compatible; donutbrowser)")
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Failed to fetch releases: HTTP {}", response.status()).into());
|
||||
}
|
||||
|
||||
let releases: Vec<GithubRelease> = response.json().await?;
|
||||
Ok(releases)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::browser::GithubRelease;
|
||||
use wiremock::matchers::{method, path};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
fn create_mock_release() -> GithubRelease {
|
||||
GithubRelease {
|
||||
tag_name: "v1.0.0".to_string(),
|
||||
name: "Test Release".to_string(),
|
||||
body: Some("Test release body".to_string()),
|
||||
published_at: "2023-01-01T00:00:00Z".to_string(),
|
||||
created_at: Some("2023-01-01T00:00:00Z".to_string()),
|
||||
html_url: Some("https://example.com/release".to_string()),
|
||||
tarball_url: Some("https://example.com/tarball".to_string()),
|
||||
zipball_url: Some("https://example.com/zipball".to_string()),
|
||||
draft: false,
|
||||
prerelease: false,
|
||||
is_nightly: false,
|
||||
id: Some(1),
|
||||
node_id: Some("test_node_id".to_string()),
|
||||
target_commitish: None,
|
||||
assets: vec![crate::browser::GithubAsset {
|
||||
id: Some(1),
|
||||
node_id: Some("test_asset_node_id".to_string()),
|
||||
name: "GeoLite2-City.mmdb".to_string(),
|
||||
label: None,
|
||||
content_type: Some("application/octet-stream".to_string()),
|
||||
state: Some("uploaded".to_string()),
|
||||
size: 1024,
|
||||
download_count: Some(0),
|
||||
created_at: Some("2023-01-01T00:00:00Z".to_string()),
|
||||
updated_at: Some("2023-01-01T00:00:00Z".to_string()),
|
||||
browser_download_url: "https://example.com/GeoLite2-City.mmdb".to_string(),
|
||||
}],
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_fetch_geoip_releases_success() {
|
||||
let mock_server = MockServer::start().await;
|
||||
let releases = vec![create_mock_release()];
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/repos/{MMDB_REPO}/releases")))
|
||||
.respond_with(ResponseTemplate::new(200).set_body_json(&releases))
|
||||
.mount(&mock_server)
|
||||
.await;
|
||||
|
||||
let client = Client::builder()
|
||||
.build()
|
||||
.expect("Failed to create HTTP client");
|
||||
|
||||
let downloader = GeoIPDownloader::new_with_client(client);
|
||||
|
||||
// Override the URL for testing
|
||||
let url = format!("{}/repos/{}/releases", mock_server.uri(), MMDB_REPO);
|
||||
let response = downloader
|
||||
.client
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (compatible; donutbrowser)")
|
||||
.send()
|
||||
.await
|
||||
.expect("Request should succeed");
|
||||
|
||||
assert!(response.status().is_success());
|
||||
|
||||
let fetched_releases: Vec<GithubRelease> = response.json().await.expect("Should parse JSON");
|
||||
assert_eq!(fetched_releases.len(), 1);
|
||||
assert_eq!(fetched_releases[0].tag_name, "v1.0.0");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_find_city_mmdb_asset() {
|
||||
let downloader = GeoIPDownloader::new();
|
||||
let release = create_mock_release();
|
||||
|
||||
let asset_url = downloader.find_city_mmdb_asset(&release);
|
||||
assert!(asset_url.is_some());
|
||||
assert_eq!(asset_url.unwrap(), "https://example.com/GeoLite2-City.mmdb");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_find_city_mmdb_asset_not_found() {
|
||||
let downloader = GeoIPDownloader::new();
|
||||
let mut release = create_mock_release();
|
||||
release.assets[0].name = "wrong-file.txt".to_string();
|
||||
|
||||
let asset_url = downloader.find_city_mmdb_asset(&release);
|
||||
assert!(asset_url.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_cache_dir() {
|
||||
let cache_dir = GeoIPDownloader::get_cache_dir();
|
||||
assert!(cache_dir.is_ok());
|
||||
|
||||
let path = cache_dir.unwrap();
|
||||
assert!(path.to_string_lossy().contains("camoufox"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_mmdb_file_path() {
|
||||
let mmdb_path = GeoIPDownloader::get_mmdb_file_path();
|
||||
assert!(mmdb_path.is_ok());
|
||||
|
||||
let path = mmdb_path.unwrap();
|
||||
assert!(path.to_string_lossy().ends_with("GeoLite2-City.mmdb"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_geoip_database_available() {
|
||||
// Test that the function works correctly regardless of file system state
|
||||
let is_available = GeoIPDownloader::is_geoip_database_available();
|
||||
|
||||
// The function should return a boolean value (either true or false)
|
||||
// The function should return a boolean value - we just verify it doesn't panic
|
||||
// and returns the expected result based on file existence
|
||||
|
||||
// Verify the function logic by checking if the path resolution works
|
||||
let mmdb_path_result = GeoIPDownloader::get_mmdb_file_path();
|
||||
assert!(
|
||||
mmdb_path_result.is_ok(),
|
||||
"Should be able to get MMDB file path"
|
||||
);
|
||||
|
||||
let mmdb_path = mmdb_path_result.unwrap();
|
||||
let expected_available = mmdb_path.exists();
|
||||
assert_eq!(
|
||||
is_available, expected_available,
|
||||
"Function result should match actual file existence"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref GEOIP_DOWNLOADER: GeoIPDownloader = GeoIPDownloader::new();
|
||||
}
|
||||
@@ -0,0 +1,507 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProfileGroup {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GroupWithCount {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub count: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct GroupsData {
|
||||
groups: Vec<ProfileGroup>,
|
||||
}
|
||||
|
||||
pub struct GroupManager {
|
||||
base_dirs: BaseDirs,
|
||||
}
|
||||
|
||||
impl GroupManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_groups_file_path(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("data");
|
||||
path.push("groups.json");
|
||||
path
|
||||
}
|
||||
|
||||
fn load_groups_data(&self) -> Result<GroupsData, Box<dyn std::error::Error>> {
|
||||
let groups_file = self.get_groups_file_path();
|
||||
|
||||
if !groups_file.exists() {
|
||||
return Ok(GroupsData { groups: Vec::new() });
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(groups_file)?;
|
||||
let groups_data: GroupsData = serde_json::from_str(&content)?;
|
||||
Ok(groups_data)
|
||||
}
|
||||
|
||||
fn save_groups_data(&self, groups_data: &GroupsData) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let groups_file = self.get_groups_file_path();
|
||||
|
||||
// Ensure the parent directory exists
|
||||
if let Some(parent) = groups_file.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
let json = serde_json::to_string_pretty(groups_data)?;
|
||||
fs::write(groups_file, json)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_all_groups(&self) -> Result<Vec<ProfileGroup>, Box<dyn std::error::Error>> {
|
||||
let groups_data = self.load_groups_data()?;
|
||||
Ok(groups_data.groups)
|
||||
}
|
||||
|
||||
pub fn create_group(&self, name: String) -> Result<ProfileGroup, Box<dyn std::error::Error>> {
|
||||
let mut groups_data = self.load_groups_data()?;
|
||||
|
||||
// Check if group with this name already exists
|
||||
if groups_data.groups.iter().any(|g| g.name == name) {
|
||||
return Err(format!("Group with name '{name}' already exists").into());
|
||||
}
|
||||
|
||||
let group = ProfileGroup {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
name,
|
||||
};
|
||||
|
||||
groups_data.groups.push(group.clone());
|
||||
self.save_groups_data(&groups_data)?;
|
||||
|
||||
Ok(group)
|
||||
}
|
||||
|
||||
pub fn update_group(
|
||||
&self,
|
||||
id: String,
|
||||
name: String,
|
||||
) -> Result<ProfileGroup, Box<dyn std::error::Error>> {
|
||||
let mut groups_data = self.load_groups_data()?;
|
||||
|
||||
// Check if another group with this name already exists
|
||||
if groups_data
|
||||
.groups
|
||||
.iter()
|
||||
.any(|g| g.name == name && g.id != id)
|
||||
{
|
||||
return Err(format!("Group with name '{name}' already exists").into());
|
||||
}
|
||||
|
||||
let group = groups_data
|
||||
.groups
|
||||
.iter_mut()
|
||||
.find(|g| g.id == id)
|
||||
.ok_or_else(|| format!("Group with id '{id}' not found"))?;
|
||||
|
||||
group.name = name;
|
||||
let updated_group = group.clone();
|
||||
|
||||
self.save_groups_data(&groups_data)?;
|
||||
Ok(updated_group)
|
||||
}
|
||||
|
||||
pub fn delete_group(&self, id: String) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let mut groups_data = self.load_groups_data()?;
|
||||
|
||||
let initial_len = groups_data.groups.len();
|
||||
groups_data.groups.retain(|g| g.id != id);
|
||||
|
||||
if groups_data.groups.len() == initial_len {
|
||||
return Err(format!("Group with id '{id}' not found").into());
|
||||
}
|
||||
|
||||
self.save_groups_data(&groups_data)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_groups_with_profile_counts(
|
||||
&self,
|
||||
profiles: &[crate::profile::BrowserProfile],
|
||||
) -> Result<Vec<GroupWithCount>, Box<dyn std::error::Error>> {
|
||||
let groups = self.get_all_groups()?;
|
||||
let mut group_counts = HashMap::new();
|
||||
|
||||
// Count profiles in each group
|
||||
for profile in profiles {
|
||||
if let Some(group_id) = &profile.group_id {
|
||||
*group_counts.entry(group_id.clone()).or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Create result with counts
|
||||
let mut result = Vec::new();
|
||||
for group in groups {
|
||||
let count = group_counts.get(&group.id).copied().unwrap_or(0);
|
||||
if count > 0 {
|
||||
result.push(GroupWithCount {
|
||||
id: group.id,
|
||||
name: group.name,
|
||||
count,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Add default group count (profiles without group_id)
|
||||
let default_count = profiles.iter().filter(|p| p.group_id.is_none()).count();
|
||||
if default_count > 0 {
|
||||
let default_group = GroupWithCount {
|
||||
id: "default".to_string(),
|
||||
name: "Default".to_string(),
|
||||
count: default_count,
|
||||
};
|
||||
result.insert(0, default_group);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
|
||||
// Global instance
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref GROUP_MANAGER: Mutex<GroupManager> = Mutex::new(GroupManager::new());
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn create_test_group_manager() -> (GroupManager, TempDir) {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Set up a temporary home directory for testing
|
||||
env::set_var("HOME", temp_dir.path());
|
||||
|
||||
let manager = GroupManager::new();
|
||||
(manager, temp_dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_group_manager_creation() {
|
||||
let (_manager, _temp_dir) = create_test_group_manager();
|
||||
// Test passes if no panic occurs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_create_and_get_groups() {
|
||||
let (manager, _temp_dir) = create_test_group_manager();
|
||||
|
||||
// Initially should have no groups
|
||||
let groups = manager
|
||||
.get_all_groups()
|
||||
.expect("Should be able to get groups");
|
||||
assert!(groups.is_empty(), "Should start with no groups");
|
||||
|
||||
// Create a group
|
||||
let group_name = "Test Group".to_string();
|
||||
let created_group = manager
|
||||
.create_group(group_name.clone())
|
||||
.expect("Should create group successfully");
|
||||
|
||||
assert_eq!(
|
||||
created_group.name, group_name,
|
||||
"Created group should have correct name"
|
||||
);
|
||||
assert!(
|
||||
!created_group.id.is_empty(),
|
||||
"Created group should have an ID"
|
||||
);
|
||||
|
||||
// Verify group was saved
|
||||
let groups = manager
|
||||
.get_all_groups()
|
||||
.expect("Should be able to get groups");
|
||||
assert_eq!(groups.len(), 1, "Should have one group");
|
||||
assert_eq!(
|
||||
groups[0].name, group_name,
|
||||
"Retrieved group should have correct name"
|
||||
);
|
||||
assert_eq!(
|
||||
groups[0].id, created_group.id,
|
||||
"Retrieved group should have correct ID"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_create_duplicate_group_fails() {
|
||||
let (manager, _temp_dir) = create_test_group_manager();
|
||||
|
||||
let group_name = "Duplicate Group".to_string();
|
||||
|
||||
// Create first group
|
||||
let _first_group = manager
|
||||
.create_group(group_name.clone())
|
||||
.expect("Should create first group");
|
||||
|
||||
// Try to create duplicate group
|
||||
let result = manager.create_group(group_name.clone());
|
||||
assert!(result.is_err(), "Should fail to create duplicate group");
|
||||
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("already exists"),
|
||||
"Error should mention group already exists"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_group() {
|
||||
let (manager, _temp_dir) = create_test_group_manager();
|
||||
|
||||
// Create a group
|
||||
let original_name = "Original Name".to_string();
|
||||
let created_group = manager
|
||||
.create_group(original_name)
|
||||
.expect("Should create group");
|
||||
|
||||
// Update the group
|
||||
let new_name = "Updated Name".to_string();
|
||||
let updated_group = manager
|
||||
.update_group(created_group.id.clone(), new_name.clone())
|
||||
.expect("Should update group successfully");
|
||||
|
||||
assert_eq!(
|
||||
updated_group.name, new_name,
|
||||
"Updated group should have new name"
|
||||
);
|
||||
assert_eq!(
|
||||
updated_group.id, created_group.id,
|
||||
"Updated group should keep same ID"
|
||||
);
|
||||
|
||||
// Verify update was persisted
|
||||
let groups = manager.get_all_groups().expect("Should get groups");
|
||||
assert_eq!(groups.len(), 1, "Should still have one group");
|
||||
assert_eq!(
|
||||
groups[0].name, new_name,
|
||||
"Persisted group should have updated name"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_update_nonexistent_group_fails() {
|
||||
let (manager, _temp_dir) = create_test_group_manager();
|
||||
|
||||
let result = manager.update_group("nonexistent-id".to_string(), "New Name".to_string());
|
||||
assert!(result.is_err(), "Should fail to update nonexistent group");
|
||||
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("not found"),
|
||||
"Error should mention group not found"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_group() {
|
||||
let (manager, _temp_dir) = create_test_group_manager();
|
||||
|
||||
// Create a group
|
||||
let group_name = "To Delete".to_string();
|
||||
let created_group = manager
|
||||
.create_group(group_name)
|
||||
.expect("Should create group");
|
||||
|
||||
// Verify group exists
|
||||
let groups = manager.get_all_groups().expect("Should get groups");
|
||||
assert_eq!(groups.len(), 1, "Should have one group");
|
||||
|
||||
// Delete the group
|
||||
manager
|
||||
.delete_group(created_group.id)
|
||||
.expect("Should delete group successfully");
|
||||
|
||||
// Verify group was deleted
|
||||
let groups = manager.get_all_groups().expect("Should get groups");
|
||||
assert!(groups.is_empty(), "Should have no groups after deletion");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_nonexistent_group_fails() {
|
||||
let (manager, _temp_dir) = create_test_group_manager();
|
||||
|
||||
let result = manager.delete_group("nonexistent-id".to_string());
|
||||
assert!(result.is_err(), "Should fail to delete nonexistent group");
|
||||
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("not found"),
|
||||
"Error should mention group not found"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_groups_with_profile_counts() {
|
||||
let (manager, _temp_dir) = create_test_group_manager();
|
||||
|
||||
// Create test groups
|
||||
let group1 = manager
|
||||
.create_group("Group 1".to_string())
|
||||
.expect("Should create group 1");
|
||||
let _group2 = manager
|
||||
.create_group("Group 2".to_string())
|
||||
.expect("Should create group 2");
|
||||
|
||||
// Create mock profiles
|
||||
let profiles = vec![
|
||||
crate::profile::BrowserProfile {
|
||||
id: uuid::Uuid::new_v4(),
|
||||
name: "Profile 1".to_string(),
|
||||
browser: "firefox".to_string(),
|
||||
version: "1.0".to_string(),
|
||||
proxy_id: None,
|
||||
process_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
group_id: Some(group1.id.clone()),
|
||||
},
|
||||
crate::profile::BrowserProfile {
|
||||
id: uuid::Uuid::new_v4(),
|
||||
name: "Profile 2".to_string(),
|
||||
browser: "firefox".to_string(),
|
||||
version: "1.0".to_string(),
|
||||
proxy_id: None,
|
||||
process_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
group_id: Some(group1.id.clone()),
|
||||
},
|
||||
crate::profile::BrowserProfile {
|
||||
id: uuid::Uuid::new_v4(),
|
||||
name: "Profile 3".to_string(),
|
||||
browser: "firefox".to_string(),
|
||||
version: "1.0".to_string(),
|
||||
proxy_id: None,
|
||||
process_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
group_id: None, // Default group
|
||||
},
|
||||
];
|
||||
|
||||
let groups_with_counts = manager
|
||||
.get_groups_with_profile_counts(&profiles)
|
||||
.expect("Should get groups with counts");
|
||||
|
||||
// Should have default group + group1 (_group2 has no profiles so shouldn't appear)
|
||||
assert_eq!(
|
||||
groups_with_counts.len(),
|
||||
2,
|
||||
"Should have 2 groups with profiles"
|
||||
);
|
||||
|
||||
// Check default group
|
||||
let default_group = groups_with_counts
|
||||
.iter()
|
||||
.find(|g| g.id == "default")
|
||||
.expect("Should have default group");
|
||||
assert_eq!(
|
||||
default_group.count, 1,
|
||||
"Default group should have 1 profile"
|
||||
);
|
||||
|
||||
// Check group1
|
||||
let group1_with_count = groups_with_counts
|
||||
.iter()
|
||||
.find(|g| g.id == group1.id)
|
||||
.expect("Should have group1");
|
||||
assert_eq!(group1_with_count.count, 2, "Group1 should have 2 profiles");
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to get groups with counts
|
||||
pub fn get_groups_with_counts(profiles: &[crate::profile::BrowserProfile]) -> Vec<GroupWithCount> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.get_groups_with_profile_counts(profiles)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
// Tauri commands
|
||||
#[tauri::command]
|
||||
pub async fn get_profile_groups() -> Result<Vec<ProfileGroup>, String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.get_all_groups()
|
||||
.map_err(|e| format!("Failed to get profile groups: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_groups_with_profile_counts() -> Result<Vec<GroupWithCount>, String> {
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
let profiles = profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
Ok(get_groups_with_counts(&profiles))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn create_profile_group(name: String) -> Result<ProfileGroup, String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.create_group(name)
|
||||
.map_err(|e| format!("Failed to create group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn update_profile_group(group_id: String, name: String) -> Result<ProfileGroup, String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.update_group(group_id, name)
|
||||
.map_err(|e| format!("Failed to update group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_profile_group(group_id: String) -> Result<(), String> {
|
||||
let group_manager = GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.delete_group(group_id)
|
||||
.map_err(|e| format!("Failed to delete group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn assign_profiles_to_group(
|
||||
profile_names: Vec<String>,
|
||||
group_id: Option<String>,
|
||||
) -> Result<(), String> {
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
profile_manager
|
||||
.assign_profiles_to_group(profile_names, group_id)
|
||||
.map_err(|e| format!("Failed to assign profiles to group: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn delete_selected_profiles(profile_names: Vec<String>) -> Result<(), String> {
|
||||
let profile_manager = crate::profile::ProfileManager::instance();
|
||||
profile_manager
|
||||
.delete_multiple_profiles(profile_names)
|
||||
.map_err(|e| format!("Failed to delete profiles: {e}"))
|
||||
}
|
||||
+305
-70
@@ -1,4 +1,5 @@
|
||||
// Learn more about Tauri commands at https://tauri.app/develop/calling-rust/
|
||||
use std::env;
|
||||
use std::sync::Mutex;
|
||||
use tauri::{Emitter, Manager, Runtime, WebviewUrl, WebviewWindow, WebviewWindowBuilder};
|
||||
use tauri_plugin_deep_link::DeepLinkExt;
|
||||
@@ -11,26 +12,32 @@ mod app_auto_updater;
|
||||
mod auto_updater;
|
||||
mod browser;
|
||||
mod browser_runner;
|
||||
mod browser_version_service;
|
||||
mod browser_version_manager;
|
||||
mod camoufox;
|
||||
mod default_browser;
|
||||
mod download;
|
||||
mod downloaded_browsers;
|
||||
mod extraction;
|
||||
mod geoip_downloader;
|
||||
mod group_manager;
|
||||
mod platform_browser;
|
||||
mod profile;
|
||||
mod profile_importer;
|
||||
mod proxy_manager;
|
||||
mod settings_manager;
|
||||
mod theme_detector;
|
||||
// mod theme_detector; // removed: theme detection handled in webview via CSS prefers-color-scheme
|
||||
mod version_updater;
|
||||
|
||||
extern crate lazy_static;
|
||||
|
||||
use browser_runner::{
|
||||
check_browser_exists, check_browser_status, create_browser_profile_new, delete_profile,
|
||||
download_browser, fetch_browser_versions_cached_first, fetch_browser_versions_with_count,
|
||||
check_browser_exists, check_browser_status, check_missing_binaries, check_missing_geoip_database,
|
||||
create_browser_profile_new, delete_profile, download_browser, ensure_all_binaries_exist,
|
||||
fetch_browser_versions_cached_first, fetch_browser_versions_with_count,
|
||||
fetch_browser_versions_with_count_cached_first, get_downloaded_browser_versions,
|
||||
get_supported_browsers, is_browser_supported_on_platform, kill_browser_profile,
|
||||
launch_browser_profile, list_browser_profiles, rename_profile, update_profile_proxy,
|
||||
update_profile_version,
|
||||
launch_browser_profile, list_browser_profiles, rename_profile, update_camoufox_config,
|
||||
update_profile_proxy,
|
||||
};
|
||||
|
||||
use settings_manager::{
|
||||
@@ -38,9 +45,7 @@ use settings_manager::{
|
||||
save_app_settings, save_table_sorting_settings, should_show_settings_on_startup,
|
||||
};
|
||||
|
||||
use default_browser::{
|
||||
is_default_browser, open_url_with_profile, set_as_default_browser, smart_open_url,
|
||||
};
|
||||
use default_browser::{is_default_browser, open_url_with_profile, set_as_default_browser};
|
||||
|
||||
use version_updater::{
|
||||
get_version_update_status, get_version_updater, trigger_manual_version_update,
|
||||
@@ -48,8 +53,7 @@ use version_updater::{
|
||||
|
||||
use auto_updater::{
|
||||
check_for_browser_updates, complete_browser_update_with_auto_update, dismiss_update_notification,
|
||||
is_auto_update_download, is_browser_disabled_for_update, mark_auto_update_download,
|
||||
remove_auto_update_download,
|
||||
is_browser_disabled_for_update,
|
||||
};
|
||||
|
||||
use app_auto_updater::{
|
||||
@@ -58,7 +62,16 @@ use app_auto_updater::{
|
||||
|
||||
use profile_importer::{detect_existing_profiles, import_browser_profile};
|
||||
|
||||
use theme_detector::get_system_theme;
|
||||
// use theme_detector::get_system_theme;
|
||||
|
||||
use group_manager::{
|
||||
assign_profiles_to_group, create_profile_group, delete_profile_group, delete_selected_profiles,
|
||||
get_groups_with_profile_counts, get_profile_groups, update_profile_group,
|
||||
};
|
||||
|
||||
use geoip_downloader::GeoIPDownloader;
|
||||
|
||||
use browser_version_manager::get_browser_release_types;
|
||||
|
||||
// Trait to extend WebviewWindow with transparent titlebar functionality
|
||||
pub trait WindowExt {
|
||||
@@ -111,20 +124,16 @@ async fn handle_url_open(app: tauri::AppHandle, url: String) -> Result<(), Strin
|
||||
|
||||
// Check if the main window exists and is ready
|
||||
if let Some(window) = app.get_webview_window("main") {
|
||||
if window.is_visible().unwrap_or(false) {
|
||||
// Window is visible, emit event directly
|
||||
println!("Main window is visible, emitting show-profile-selector event");
|
||||
app
|
||||
.emit("show-profile-selector", url.clone())
|
||||
.map_err(|e| format!("Failed to emit URL open event: {e}"))?;
|
||||
let _ = window.show();
|
||||
let _ = window.set_focus();
|
||||
} else {
|
||||
// Window not visible yet - add to pending URLs
|
||||
println!("Main window not visible, adding URL to pending list");
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
pending.push(url);
|
||||
}
|
||||
println!("Main window exists");
|
||||
|
||||
// Try to show and focus the window first
|
||||
let _ = window.show();
|
||||
let _ = window.set_focus();
|
||||
let _ = window.unminimize();
|
||||
|
||||
app
|
||||
.emit("show-profile-selector", url.clone())
|
||||
.map_err(|e| format!("Failed to emit URL open event: {e}"))?;
|
||||
} else {
|
||||
// Window doesn't exist yet - add to pending URLs
|
||||
println!("Main window doesn't exist, adding URL to pending list");
|
||||
@@ -136,42 +145,73 @@ async fn handle_url_open(app: tauri::AppHandle, url: String) -> Result<(), Strin
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn check_and_handle_startup_url(app_handle: tauri::AppHandle) -> Result<bool, String> {
|
||||
let pending_urls = {
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
let urls = pending.clone();
|
||||
pending.clear(); // Clear after getting them
|
||||
urls
|
||||
};
|
||||
async fn create_stored_proxy(
|
||||
name: String,
|
||||
proxy_settings: crate::browser::ProxySettings,
|
||||
) -> Result<crate::proxy_manager::StoredProxy, String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.create_stored_proxy(name, proxy_settings)
|
||||
.map_err(|e| format!("Failed to create stored proxy: {e}"))
|
||||
}
|
||||
|
||||
if !pending_urls.is_empty() {
|
||||
println!(
|
||||
"Handling {} pending URLs from frontend request",
|
||||
pending_urls.len()
|
||||
);
|
||||
#[tauri::command]
|
||||
async fn get_stored_proxies() -> Result<Vec<crate::proxy_manager::StoredProxy>, String> {
|
||||
Ok(crate::proxy_manager::PROXY_MANAGER.get_stored_proxies())
|
||||
}
|
||||
|
||||
for url in pending_urls {
|
||||
println!("Emitting show-profile-selector event for URL: {url}");
|
||||
if let Err(e) = app_handle.emit("show-profile-selector", url.clone()) {
|
||||
eprintln!("Failed to emit URL event: {e}");
|
||||
return Err(format!("Failed to emit URL event: {e}"));
|
||||
}
|
||||
}
|
||||
#[tauri::command]
|
||||
async fn update_stored_proxy(
|
||||
proxy_id: String,
|
||||
name: Option<String>,
|
||||
proxy_settings: Option<crate::browser::ProxySettings>,
|
||||
) -> Result<crate::proxy_manager::StoredProxy, String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.update_stored_proxy(&proxy_id, name, proxy_settings)
|
||||
.map_err(|e| format!("Failed to update stored proxy: {e}"))
|
||||
}
|
||||
|
||||
return Ok(true);
|
||||
}
|
||||
#[tauri::command]
|
||||
async fn delete_stored_proxy(proxy_id: String) -> Result<(), String> {
|
||||
crate::proxy_manager::PROXY_MANAGER
|
||||
.delete_stored_proxy(&proxy_id)
|
||||
.map_err(|e| format!("Failed to delete stored proxy: {e}"))
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
#[tauri::command]
|
||||
async fn is_geoip_database_available() -> Result<bool, String> {
|
||||
Ok(GeoIPDownloader::is_geoip_database_available())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
async fn download_geoip_database(app_handle: tauri::AppHandle) -> Result<(), String> {
|
||||
let downloader = GeoIPDownloader::instance();
|
||||
downloader
|
||||
.download_geoip_database(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to download GeoIP database: {e}"))
|
||||
}
|
||||
|
||||
#[cfg_attr(mobile, tauri::mobile_entry_point)]
|
||||
pub fn run() {
|
||||
let args: Vec<String> = env::args().collect();
|
||||
let startup_url = args.iter().find(|arg| arg.starts_with("http")).cloned();
|
||||
|
||||
if let Some(url) = startup_url.clone() {
|
||||
println!("Found startup URL in command line: {url}");
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
pending.push(url.clone());
|
||||
}
|
||||
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_single_instance::init(|_, args, _cwd| {
|
||||
println!("Single instance triggered with args: {args:?}");
|
||||
}))
|
||||
.plugin(tauri_plugin_deep_link::init())
|
||||
.plugin(tauri_plugin_fs::init())
|
||||
.plugin(tauri_plugin_opener::init())
|
||||
.plugin(tauri_plugin_shell::init())
|
||||
.plugin(tauri_plugin_deep_link::init())
|
||||
.plugin(tauri_plugin_dialog::init())
|
||||
.plugin(tauri_plugin_macos_permissions::init())
|
||||
.setup(|app| {
|
||||
// Create the main window programmatically
|
||||
#[allow(unused_variables)]
|
||||
@@ -179,7 +219,10 @@ pub fn run() {
|
||||
.title("Donut Browser")
|
||||
.inner_size(900.0, 600.0)
|
||||
.resizable(false)
|
||||
.fullscreen(false);
|
||||
.fullscreen(false)
|
||||
.center()
|
||||
.focused(true)
|
||||
.visible(true);
|
||||
|
||||
#[allow(unused_variables)]
|
||||
let window = win_builder.build().unwrap();
|
||||
@@ -198,16 +241,27 @@ pub fn run() {
|
||||
#[cfg(any(windows, target_os = "linux"))]
|
||||
{
|
||||
// For Windows and Linux, register all deep links at runtime for development
|
||||
app.deep_link().register_all()?;
|
||||
if let Err(e) = app.deep_link().register_all() {
|
||||
eprintln!("Failed to register deep links: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// On macOS, try to register deep links for development builds
|
||||
if let Err(e) = app.deep_link().register_all() {
|
||||
eprintln!(
|
||||
"Note: Deep link registration failed on macOS (this is normal for production): {e}"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle deep links - this works for both scenarios:
|
||||
// 1. App is running and URL is opened
|
||||
// 2. App is not running and URL causes app to launch
|
||||
app.deep_link().on_open_url({
|
||||
let handle = handle.clone();
|
||||
move |event| {
|
||||
let urls = event.urls();
|
||||
println!("Deep link event received with {} URLs", urls.len());
|
||||
|
||||
for url in urls {
|
||||
let url_string = url.to_string();
|
||||
println!("Deep link received: {url_string}");
|
||||
@@ -225,27 +279,89 @@ pub fn run() {
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(startup_url) = startup_url {
|
||||
let handle_clone = handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
println!("Processing startup URL from command line: {startup_url}");
|
||||
if let Err(e) = handle_url_open(handle_clone, startup_url.clone()).await {
|
||||
eprintln!("Failed to handle startup URL: {e}");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Initialize and start background version updater
|
||||
let app_handle = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let version_updater = get_version_updater();
|
||||
let mut updater_guard = version_updater.lock().await;
|
||||
|
||||
// Set the app handle
|
||||
updater_guard.set_app_handle(app_handle).await;
|
||||
{
|
||||
let mut updater_guard = version_updater.lock().await;
|
||||
updater_guard.set_app_handle(app_handle);
|
||||
}
|
||||
|
||||
// Start the background updates
|
||||
updater_guard.start_background_updates().await;
|
||||
// Run startup check without holding the lock
|
||||
{
|
||||
let updater_guard = version_updater.lock().await;
|
||||
if let Err(e) = updater_guard.start_background_updates().await {
|
||||
eprintln!("Failed to start background updates: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start the background update task separately
|
||||
tauri::async_runtime::spawn(async move {
|
||||
version_updater::VersionUpdater::run_background_task().await;
|
||||
});
|
||||
|
||||
let app_handle_auto_updater = app.handle().clone();
|
||||
|
||||
// Start the auto-update check task separately
|
||||
tauri::async_runtime::spawn(async move {
|
||||
auto_updater::check_for_updates_with_progress(app_handle_auto_updater).await;
|
||||
});
|
||||
|
||||
// Handle any pending URLs that were received before the window was ready
|
||||
let handle_pending = handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
// Wait a bit for the window to be fully ready
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(1000)).await;
|
||||
|
||||
let pending_urls = {
|
||||
let mut pending = PENDING_URLS.lock().unwrap();
|
||||
let urls = pending.clone();
|
||||
pending.clear();
|
||||
urls
|
||||
};
|
||||
|
||||
for url in pending_urls {
|
||||
println!("Processing pending URL: {url}");
|
||||
if let Err(e) = handle_url_open(handle_pending.clone(), url).await {
|
||||
eprintln!("Failed to handle pending URL: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start periodic cleanup task for unused binaries
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(43200)); // Every 12 hours
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
let browser_runner = crate::browser_runner::BrowserRunner::instance();
|
||||
if let Err(e) = browser_runner.cleanup_unused_binaries_internal() {
|
||||
eprintln!("Periodic cleanup failed: {e}");
|
||||
} else {
|
||||
println!("Periodic cleanup completed successfully");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Check for app updates at startup
|
||||
let app_handle_update = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
// Add a small delay to ensure the app is fully loaded
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(3)).await;
|
||||
|
||||
println!("Starting app update check at startup...");
|
||||
let updater = app_auto_updater::AppAutoUpdater::new();
|
||||
let updater = app_auto_updater::AppAutoUpdater::instance();
|
||||
match updater.check_for_updates().await {
|
||||
Ok(Some(update_info)) => {
|
||||
println!(
|
||||
@@ -268,6 +384,113 @@ pub fn run() {
|
||||
}
|
||||
});
|
||||
|
||||
// Start Camoufox cleanup task
|
||||
let _app_handle_cleanup = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let launcher = crate::camoufox::CamoufoxNodecarLauncher::instance();
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(5));
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
match launcher.cleanup_dead_instances().await {
|
||||
Ok(_dead_instances) => {
|
||||
// Cleanup completed silently
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error during Camoufox cleanup: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Check and download GeoIP database at startup if needed
|
||||
let app_handle_geoip = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
// Wait a bit for the app to fully initialize
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
|
||||
|
||||
let browser_runner = crate::browser_runner::BrowserRunner::instance();
|
||||
match browser_runner.check_missing_geoip_database() {
|
||||
Ok(true) => {
|
||||
println!("GeoIP database is missing for Camoufox profiles, downloading at startup...");
|
||||
let geoip_downloader = GeoIPDownloader::instance();
|
||||
if let Err(e) = geoip_downloader
|
||||
.download_geoip_database(&app_handle_geoip)
|
||||
.await
|
||||
{
|
||||
eprintln!("Failed to download GeoIP database at startup: {e}");
|
||||
} else {
|
||||
println!("GeoIP database downloaded successfully at startup");
|
||||
}
|
||||
}
|
||||
Ok(false) => {
|
||||
// No Camoufox profiles or GeoIP database already available
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to check GeoIP database status at startup: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Start proxy cleanup task for dead browser processes
|
||||
let app_handle_proxy_cleanup = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(30));
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
match crate::proxy_manager::PROXY_MANAGER
|
||||
.cleanup_dead_proxies(app_handle_proxy_cleanup.clone())
|
||||
.await
|
||||
{
|
||||
Ok(dead_pids) => {
|
||||
if !dead_pids.is_empty() {
|
||||
println!(
|
||||
"Cleaned up proxies for {} dead browser processes",
|
||||
dead_pids.len()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Error during proxy cleanup: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Warm up nodecar binary in the background
|
||||
tauri::async_runtime::spawn(async move {
|
||||
println!("Starting nodecar warm-up...");
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
// Send a ping request to nodecar to trigger unpacking/warm-up
|
||||
match tokio::process::Command::new("nodecar").output().await {
|
||||
Ok(output) => {
|
||||
let duration = start_time.elapsed();
|
||||
if output.status.success() {
|
||||
println!(
|
||||
"Nodecar warm-up completed successfully in {:.2}s",
|
||||
duration.as_secs_f64()
|
||||
);
|
||||
} else {
|
||||
println!(
|
||||
"Nodecar warm-up completed with non-zero exit code in {:.2}s",
|
||||
duration.as_secs_f64()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let duration = start_time.elapsed();
|
||||
println!(
|
||||
"Nodecar warm-up failed after {:.2}s: {e}",
|
||||
duration.as_secs_f64()
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
@@ -283,8 +506,8 @@ pub fn run() {
|
||||
fetch_browser_versions_cached_first,
|
||||
fetch_browser_versions_with_count_cached_first,
|
||||
get_downloaded_browser_versions,
|
||||
get_browser_release_types,
|
||||
update_profile_proxy,
|
||||
update_profile_version,
|
||||
check_browser_status,
|
||||
kill_browser_profile,
|
||||
rename_profile,
|
||||
@@ -297,23 +520,35 @@ pub fn run() {
|
||||
is_default_browser,
|
||||
open_url_with_profile,
|
||||
set_as_default_browser,
|
||||
smart_open_url,
|
||||
check_and_handle_startup_url,
|
||||
trigger_manual_version_update,
|
||||
get_version_update_status,
|
||||
check_for_browser_updates,
|
||||
is_browser_disabled_for_update,
|
||||
dismiss_update_notification,
|
||||
complete_browser_update_with_auto_update,
|
||||
mark_auto_update_download,
|
||||
remove_auto_update_download,
|
||||
is_auto_update_download,
|
||||
check_for_app_updates,
|
||||
check_for_app_updates_manual,
|
||||
download_and_install_app_update,
|
||||
get_system_theme,
|
||||
// get_system_theme, // removed
|
||||
detect_existing_profiles,
|
||||
import_browser_profile,
|
||||
check_missing_binaries,
|
||||
check_missing_geoip_database,
|
||||
ensure_all_binaries_exist,
|
||||
create_stored_proxy,
|
||||
get_stored_proxies,
|
||||
update_stored_proxy,
|
||||
delete_stored_proxy,
|
||||
update_camoufox_config,
|
||||
get_profile_groups,
|
||||
get_groups_with_profile_counts,
|
||||
create_profile_group,
|
||||
update_profile_group,
|
||||
delete_profile_group,
|
||||
assign_profiles_to_group,
|
||||
delete_selected_profiles,
|
||||
is_geoip_database_available,
|
||||
download_geoip_database,
|
||||
])
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error while running tauri application");
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,5 @@
|
||||
pub mod manager;
|
||||
pub mod types;
|
||||
|
||||
pub use manager::ProfileManager;
|
||||
pub use types::BrowserProfile;
|
||||
@@ -0,0 +1,34 @@
|
||||
use crate::camoufox::CamoufoxConfig;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct BrowserProfile {
|
||||
pub id: uuid::Uuid,
|
||||
pub name: String,
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
#[serde(default)]
|
||||
pub proxy_id: Option<String>, // Reference to stored proxy
|
||||
#[serde(default)]
|
||||
pub process_id: Option<u32>,
|
||||
#[serde(default)]
|
||||
pub last_launch: Option<u64>,
|
||||
#[serde(default = "default_release_type")]
|
||||
pub release_type: String, // "stable" or "nightly"
|
||||
#[serde(default)]
|
||||
pub camoufox_config: Option<CamoufoxConfig>, // Camoufox configuration
|
||||
#[serde(default)]
|
||||
pub group_id: Option<String>, // Reference to profile group
|
||||
}
|
||||
|
||||
pub fn default_release_type() -> String {
|
||||
"stable".to_string()
|
||||
}
|
||||
|
||||
impl BrowserProfile {
|
||||
/// Get the path to the profile data directory (profiles/{uuid}/profile)
|
||||
pub fn get_profile_data_path(&self, profiles_dir: &Path) -> PathBuf {
|
||||
profiles_dir.join(self.id.to_string()).join("profile")
|
||||
}
|
||||
}
|
||||
@@ -17,17 +17,19 @@ pub struct DetectedProfile {
|
||||
|
||||
pub struct ProfileImporter {
|
||||
base_dirs: BaseDirs,
|
||||
browser_runner: BrowserRunner,
|
||||
}
|
||||
|
||||
impl ProfileImporter {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
browser_runner: BrowserRunner::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static ProfileImporter {
|
||||
&PROFILE_IMPORTER
|
||||
}
|
||||
|
||||
/// Detect existing browser profiles on the system
|
||||
pub fn detect_existing_profiles(
|
||||
&self,
|
||||
@@ -49,12 +51,12 @@ impl ProfileImporter {
|
||||
// Detect Chromium profiles
|
||||
detected_profiles.extend(self.detect_chromium_profiles()?);
|
||||
|
||||
// Detect Mullvad Browser profiles
|
||||
detected_profiles.extend(self.detect_mullvad_browser_profiles()?);
|
||||
|
||||
// Detect Zen Browser profiles
|
||||
detected_profiles.extend(self.detect_zen_browser_profiles()?);
|
||||
|
||||
// NOTE: Mullvad and Tor Browser profile imports are no longer supported.
|
||||
// We intentionally do not detect these profiles to avoid offering them in the UI.
|
||||
|
||||
// Remove duplicates based on path
|
||||
let mut seen_paths = HashSet::new();
|
||||
let unique_profiles: Vec<DetectedProfile> = detected_profiles
|
||||
@@ -80,9 +82,16 @@ impl ProfileImporter {
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Some(app_data) = self.base_dirs.data_dir() {
|
||||
let firefox_dir = app_data.join("Mozilla/Firefox/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&firefox_dir, "firefox")?);
|
||||
// Primary location in AppData\Roaming
|
||||
let app_data = self.base_dirs.data_dir();
|
||||
let firefox_dir = app_data.join("Mozilla/Firefox/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&firefox_dir, "firefox")?);
|
||||
|
||||
// Also check AppData\Local for portable installations
|
||||
let local_app_data = self.base_dirs.data_local_dir();
|
||||
let firefox_local_dir = local_app_data.join("Mozilla/Firefox/Profiles");
|
||||
if firefox_local_dir.exists() {
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&firefox_local_dir, "firefox")?);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -117,12 +126,11 @@ impl ProfileImporter {
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Some(app_data) = self.base_dirs.data_dir() {
|
||||
// Firefox Developer Edition on Windows typically uses separate directories
|
||||
let firefox_dev_dir = app_data.join("Mozilla/Firefox Developer Edition/Profiles");
|
||||
if firefox_dev_dir.exists() {
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&firefox_dev_dir, "firefox-developer")?);
|
||||
}
|
||||
let app_data = self.base_dirs.data_dir();
|
||||
// Firefox Developer Edition on Windows typically uses separate directories
|
||||
let firefox_dev_dir = app_data.join("Mozilla/Firefox Developer Edition/Profiles");
|
||||
if firefox_dev_dir.exists() {
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&firefox_dev_dir, "firefox-developer")?);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,10 +164,9 @@ impl ProfileImporter {
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Some(local_app_data) = self.base_dirs.data_local_dir() {
|
||||
let chrome_dir = local_app_data.join("Google/Chrome/User Data");
|
||||
profiles.extend(self.scan_chrome_profiles_dir(&chrome_dir, "chromium")?);
|
||||
}
|
||||
let local_app_data = self.base_dirs.data_local_dir();
|
||||
let chrome_dir = local_app_data.join("Google/Chrome/User Data");
|
||||
profiles.extend(self.scan_chrome_profiles_dir(&chrome_dir, "chromium")?);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -186,10 +193,9 @@ impl ProfileImporter {
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Some(local_app_data) = self.base_dirs.data_local_dir() {
|
||||
let chromium_dir = local_app_data.join("Chromium/User Data");
|
||||
profiles.extend(self.scan_chrome_profiles_dir(&chromium_dir, "chromium")?);
|
||||
}
|
||||
let local_app_data = self.base_dirs.data_local_dir();
|
||||
let chromium_dir = local_app_data.join("Chromium/User Data");
|
||||
profiles.extend(self.scan_chrome_profiles_dir(&chromium_dir, "chromium")?);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -216,10 +222,9 @@ impl ProfileImporter {
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Some(local_app_data) = self.base_dirs.data_local_dir() {
|
||||
let brave_dir = local_app_data.join("BraveSoftware/Brave-Browser/User Data");
|
||||
profiles.extend(self.scan_chrome_profiles_dir(&brave_dir, "brave")?);
|
||||
}
|
||||
let local_app_data = self.base_dirs.data_local_dir();
|
||||
let brave_dir = local_app_data.join("BraveSoftware/Brave-Browser/User Data");
|
||||
profiles.extend(self.scan_chrome_profiles_dir(&brave_dir, "brave")?);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -234,38 +239,6 @@ impl ProfileImporter {
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Detect Mullvad Browser profiles
|
||||
fn detect_mullvad_browser_profiles(
|
||||
&self,
|
||||
) -> Result<Vec<DetectedProfile>, Box<dyn std::error::Error>> {
|
||||
let mut profiles = Vec::new();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let mullvad_dir = self
|
||||
.base_dirs
|
||||
.home_dir()
|
||||
.join("Library/Application Support/MullvadBrowser/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_dir, "mullvad-browser")?);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Some(app_data) = self.base_dirs.data_dir() {
|
||||
let mullvad_dir = app_data.join("MullvadBrowser/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_dir, "mullvad-browser")?);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let mullvad_dir = self.base_dirs.home_dir().join(".mullvad-browser");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&mullvad_dir, "mullvad-browser")?);
|
||||
}
|
||||
|
||||
Ok(profiles)
|
||||
}
|
||||
|
||||
/// Detect Zen Browser profiles
|
||||
fn detect_zen_browser_profiles(
|
||||
&self,
|
||||
@@ -283,10 +256,9 @@ impl ProfileImporter {
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Some(app_data) = self.base_dirs.data_dir() {
|
||||
let zen_dir = app_data.join("Zen/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&zen_dir, "zen")?);
|
||||
}
|
||||
let app_data = self.base_dirs.data_dir();
|
||||
let zen_dir = app_data.join("Zen/Profiles");
|
||||
profiles.extend(self.scan_firefox_profiles_dir(&zen_dir, "zen")?);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -532,6 +504,11 @@ impl ProfileImporter {
|
||||
browser_type: &str,
|
||||
new_profile_name: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Disable imports for Mullvad and Tor browsers
|
||||
if browser_type == "mullvad-browser" || browser_type == "tor-browser" {
|
||||
return Err("Importing Mullvad Browser or Tor Browser profiles is not supported".into());
|
||||
}
|
||||
|
||||
// Validate that source path exists
|
||||
let source_path = Path::new(source_path);
|
||||
if !source_path.exists() {
|
||||
@@ -543,7 +520,7 @@ impl ProfileImporter {
|
||||
.map_err(|_| format!("Invalid browser type: {browser_type}"))?;
|
||||
|
||||
// Check if a profile with this name already exists
|
||||
let existing_profiles = self.browser_runner.list_profiles()?;
|
||||
let existing_profiles = BrowserRunner::instance().list_profiles()?;
|
||||
if existing_profiles
|
||||
.iter()
|
||||
.any(|p| p.name.to_lowercase() == new_profile_name.to_lowercase())
|
||||
@@ -551,32 +528,37 @@ impl ProfileImporter {
|
||||
return Err(format!("Profile with name '{new_profile_name}' already exists").into());
|
||||
}
|
||||
|
||||
// Create the new profile directory
|
||||
let snake_case_name = new_profile_name.to_lowercase().replace(' ', "_");
|
||||
let profiles_dir = self.browser_runner.get_profiles_dir();
|
||||
let new_profile_path = profiles_dir.join(&snake_case_name);
|
||||
// Generate UUID for new profile and create the directory structure
|
||||
let profile_id = uuid::Uuid::new_v4();
|
||||
let profiles_dir = BrowserRunner::instance().get_profiles_dir();
|
||||
let new_profile_uuid_dir = profiles_dir.join(profile_id.to_string());
|
||||
let new_profile_data_dir = new_profile_uuid_dir.join("profile");
|
||||
|
||||
create_dir_all(&new_profile_path)?;
|
||||
create_dir_all(&new_profile_uuid_dir)?;
|
||||
create_dir_all(&new_profile_data_dir)?;
|
||||
|
||||
// Copy all files from source to destination
|
||||
Self::copy_directory_recursive(source_path, &new_profile_path)?;
|
||||
// Copy all files from source to destination profile subdirectory
|
||||
Self::copy_directory_recursive(source_path, &new_profile_data_dir)?;
|
||||
|
||||
// Create the profile metadata without overwriting the imported data
|
||||
// We need to find a suitable version for this browser type
|
||||
let available_versions = self.get_default_version_for_browser(browser_type)?;
|
||||
|
||||
let profile = crate::browser_runner::BrowserProfile {
|
||||
let profile = crate::profile::BrowserProfile {
|
||||
id: profile_id,
|
||||
name: new_profile_name.to_string(),
|
||||
browser: browser_type.to_string(),
|
||||
version: available_versions,
|
||||
profile_path: new_profile_path.to_string_lossy().to_string(),
|
||||
proxy: None,
|
||||
proxy_id: None,
|
||||
process_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
group_id: None,
|
||||
};
|
||||
|
||||
// Save the profile metadata
|
||||
self.browser_runner.save_profile(&profile)?;
|
||||
BrowserRunner::instance().save_profile(&profile)?;
|
||||
|
||||
println!(
|
||||
"Successfully imported profile '{}' from '{}'",
|
||||
@@ -592,26 +574,20 @@ impl ProfileImporter {
|
||||
&self,
|
||||
browser_type: &str,
|
||||
) -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Try to get a downloaded version first, fallback to a reasonable default
|
||||
let registry =
|
||||
crate::downloaded_browsers::DownloadedBrowsersRegistry::load().unwrap_or_default();
|
||||
// Check if any version of the browser is downloaded
|
||||
let registry = crate::downloaded_browsers::DownloadedBrowsersRegistry::instance();
|
||||
let downloaded_versions = registry.get_downloaded_versions(browser_type);
|
||||
|
||||
if let Some(version) = downloaded_versions.first() {
|
||||
return Ok(version.clone());
|
||||
}
|
||||
|
||||
// If no downloaded versions, return a sensible default
|
||||
match browser_type {
|
||||
"firefox" => Ok("latest".to_string()),
|
||||
"firefox-developer" => Ok("latest".to_string()),
|
||||
"chromium" => Ok("latest".to_string()),
|
||||
"brave" => Ok("latest".to_string()),
|
||||
"zen" => Ok("latest".to_string()),
|
||||
"mullvad-browser" => Ok("13.5.16".to_string()), // Mullvad Browser common version
|
||||
"tor-browser" => Ok("latest".to_string()),
|
||||
_ => Ok("latest".to_string()),
|
||||
}
|
||||
// If no downloaded versions found, return an error
|
||||
Err(format!(
|
||||
"No downloaded versions found for browser '{}'. Please download a version of {} first before importing profiles.",
|
||||
browser_type,
|
||||
self.get_browser_display_name(browser_type)
|
||||
).into())
|
||||
}
|
||||
|
||||
/// Recursively copy directory contents
|
||||
@@ -642,7 +618,7 @@ impl ProfileImporter {
|
||||
// Tauri commands
|
||||
#[tauri::command]
|
||||
pub async fn detect_existing_profiles() -> Result<Vec<DetectedProfile>, String> {
|
||||
let importer = ProfileImporter::new();
|
||||
let importer = ProfileImporter::instance();
|
||||
importer
|
||||
.detect_existing_profiles()
|
||||
.map_err(|e| format!("Failed to detect existing profiles: {e}"))
|
||||
@@ -654,8 +630,216 @@ pub async fn import_browser_profile(
|
||||
browser_type: String,
|
||||
new_profile_name: String,
|
||||
) -> Result<(), String> {
|
||||
let importer = ProfileImporter::new();
|
||||
let importer = ProfileImporter::instance();
|
||||
importer
|
||||
.import_profile(&source_path, &browser_type, &new_profile_name)
|
||||
.map_err(|e| format!("Failed to import profile: {e}"))
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref PROFILE_IMPORTER: ProfileImporter = ProfileImporter::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn create_test_profile_importer() -> (ProfileImporter, TempDir) {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Set up a temporary home directory for testing
|
||||
env::set_var("HOME", temp_dir.path());
|
||||
|
||||
let importer = ProfileImporter::new();
|
||||
(importer, temp_dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_profile_importer_creation() {
|
||||
let (_importer, _temp_dir) = create_test_profile_importer();
|
||||
// Test passes if no panic occurs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_browser_display_name() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
assert_eq!(importer.get_browser_display_name("firefox"), "Firefox");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("firefox-developer"),
|
||||
"Firefox Developer"
|
||||
);
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("chromium"),
|
||||
"Chrome/Chromium"
|
||||
);
|
||||
assert_eq!(importer.get_browser_display_name("brave"), "Brave");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("mullvad-browser"),
|
||||
"Mullvad Browser"
|
||||
);
|
||||
assert_eq!(importer.get_browser_display_name("zen"), "Zen Browser");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("tor-browser"),
|
||||
"Tor Browser"
|
||||
);
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("unknown"),
|
||||
"Unknown Browser"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_existing_profiles_no_panic() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
// This should not panic even if no browser profiles exist
|
||||
let result = importer.detect_existing_profiles();
|
||||
assert!(result.is_ok(), "detect_existing_profiles should not fail");
|
||||
|
||||
let _profiles = result.unwrap();
|
||||
// We can't assert specific profiles since they depend on the system
|
||||
// but we can verify the result is a valid Vec
|
||||
// We can't assert specific profiles since they depend on the system
|
||||
// but we can verify the result is a valid Vec (length check is always true for Vec, but shows intent)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scan_firefox_profiles_dir_nonexistent() {
|
||||
let (importer, temp_dir) = create_test_profile_importer();
|
||||
|
||||
let nonexistent_dir = temp_dir.path().join("nonexistent");
|
||||
let result = importer.scan_firefox_profiles_dir(&nonexistent_dir, "firefox");
|
||||
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent directory gracefully"
|
||||
);
|
||||
let profiles = result.unwrap();
|
||||
assert!(
|
||||
profiles.is_empty(),
|
||||
"Should return empty vector for nonexistent directory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scan_chrome_profiles_dir_nonexistent() {
|
||||
let (importer, temp_dir) = create_test_profile_importer();
|
||||
|
||||
let nonexistent_dir = temp_dir.path().join("nonexistent");
|
||||
let result = importer.scan_chrome_profiles_dir(&nonexistent_dir, "chromium");
|
||||
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent directory gracefully"
|
||||
);
|
||||
let profiles = result.unwrap();
|
||||
assert!(
|
||||
profiles.is_empty(),
|
||||
"Should return empty vector for nonexistent directory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_firefox_profiles_ini_empty() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
let empty_content = "";
|
||||
let profiles_dir = Path::new("/tmp");
|
||||
let result = importer.parse_firefox_profiles_ini(empty_content, profiles_dir, "firefox");
|
||||
|
||||
assert!(result.is_ok(), "Should handle empty profiles.ini");
|
||||
let profiles = result.unwrap();
|
||||
assert!(
|
||||
profiles.is_empty(),
|
||||
"Should return empty vector for empty content"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_firefox_profiles_ini_valid() {
|
||||
let (importer, temp_dir) = create_test_profile_importer();
|
||||
|
||||
// Create a mock profile directory
|
||||
let profiles_dir = temp_dir.path().join("profiles");
|
||||
let profile_dir = profiles_dir.join("test.profile");
|
||||
fs::create_dir_all(&profile_dir).expect("Should create profile directory");
|
||||
|
||||
// Create a prefs.js file to make it look like a valid profile
|
||||
let prefs_file = profile_dir.join("prefs.js");
|
||||
fs::write(&prefs_file, "// Firefox preferences").expect("Should create prefs.js");
|
||||
|
||||
let profiles_ini_content = r#"
|
||||
[Profile0]
|
||||
Name=Test Profile
|
||||
IsRelative=1
|
||||
Path=test.profile
|
||||
"#;
|
||||
|
||||
let result =
|
||||
importer.parse_firefox_profiles_ini(profiles_ini_content, &profiles_dir, "firefox");
|
||||
|
||||
assert!(result.is_ok(), "Should parse valid profiles.ini");
|
||||
let profiles = result.unwrap();
|
||||
assert_eq!(profiles.len(), 1, "Should find one profile");
|
||||
assert_eq!(profiles[0].name, "Firefox - Test Profile");
|
||||
assert_eq!(profiles[0].browser, "firefox");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_copy_directory_recursive() {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Create source directory structure
|
||||
let source_dir = temp_dir.path().join("source");
|
||||
let source_subdir = source_dir.join("subdir");
|
||||
fs::create_dir_all(&source_subdir).expect("Should create source directories");
|
||||
|
||||
// Create some test files
|
||||
let source_file1 = source_dir.join("file1.txt");
|
||||
let source_file2 = source_subdir.join("file2.txt");
|
||||
fs::write(&source_file1, "content1").expect("Should create file1");
|
||||
fs::write(&source_file2, "content2").expect("Should create file2");
|
||||
|
||||
// Create destination directory
|
||||
let dest_dir = temp_dir.path().join("dest");
|
||||
|
||||
// Copy recursively
|
||||
let result = ProfileImporter::copy_directory_recursive(&source_dir, &dest_dir);
|
||||
assert!(result.is_ok(), "Should copy directory successfully");
|
||||
|
||||
// Verify files were copied
|
||||
let dest_file1 = dest_dir.join("file1.txt");
|
||||
let dest_file2 = dest_dir.join("subdir").join("file2.txt");
|
||||
|
||||
assert!(dest_file1.exists(), "file1.txt should be copied");
|
||||
assert!(dest_file2.exists(), "file2.txt should be copied");
|
||||
|
||||
let content1 = fs::read_to_string(&dest_file1).expect("Should read file1");
|
||||
let content2 = fs::read_to_string(&dest_file2).expect("Should read file2");
|
||||
|
||||
assert_eq!(content1, "content1", "file1 content should match");
|
||||
assert_eq!(content2, "content2", "file2 content should match");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_default_version_for_browser_no_versions() {
|
||||
let (importer, _temp_dir) = create_test_profile_importer();
|
||||
|
||||
// This should fail since no versions are downloaded in test environment
|
||||
let result = importer.get_default_version_for_browser("firefox");
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"Should fail when no versions are available"
|
||||
);
|
||||
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("No downloaded versions found"),
|
||||
"Error should mention no versions found"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
+808
-39
@@ -1,6 +1,9 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Mutex;
|
||||
use tauri_plugin_shell::ShellExt;
|
||||
|
||||
@@ -11,30 +14,246 @@ use crate::browser::ProxySettings;
|
||||
pub struct ProxyInfo {
|
||||
pub id: String,
|
||||
pub local_url: String,
|
||||
pub upstream_url: String,
|
||||
pub upstream_host: String,
|
||||
pub upstream_port: u16,
|
||||
pub upstream_type: String,
|
||||
pub local_port: u16,
|
||||
}
|
||||
|
||||
// Global proxy manager to track active proxies
|
||||
// Stored proxy configuration with name and ID for reuse
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct StoredProxy {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub proxy_settings: ProxySettings,
|
||||
}
|
||||
|
||||
impl StoredProxy {
|
||||
pub fn new(name: String, proxy_settings: ProxySettings) -> Self {
|
||||
Self {
|
||||
id: uuid::Uuid::new_v4().to_string(),
|
||||
name,
|
||||
proxy_settings,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_settings(&mut self, proxy_settings: ProxySettings) {
|
||||
self.proxy_settings = proxy_settings;
|
||||
}
|
||||
|
||||
pub fn update_name(&mut self, name: String) {
|
||||
self.name = name;
|
||||
}
|
||||
}
|
||||
|
||||
// Global proxy manager to track active proxies and stored proxy configurations
|
||||
pub struct ProxyManager {
|
||||
active_proxies: Mutex<HashMap<u32, ProxyInfo>>, // Maps browser process ID to proxy info
|
||||
// Store proxy info by profile name for persistence across browser restarts
|
||||
profile_proxies: Mutex<HashMap<String, (String, u16)>>, // Maps profile name to (upstream_url, port)
|
||||
profile_proxies: Mutex<HashMap<String, ProxySettings>>, // Maps profile name to proxy settings
|
||||
stored_proxies: Mutex<HashMap<String, StoredProxy>>, // Maps proxy ID to stored proxy
|
||||
base_dirs: BaseDirs,
|
||||
}
|
||||
|
||||
impl ProxyManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
let base_dirs = BaseDirs::new().expect("Failed to get base directories");
|
||||
let manager = Self {
|
||||
active_proxies: Mutex::new(HashMap::new()),
|
||||
profile_proxies: Mutex::new(HashMap::new()),
|
||||
stored_proxies: Mutex::new(HashMap::new()),
|
||||
base_dirs,
|
||||
};
|
||||
|
||||
// Load stored proxies on initialization
|
||||
if let Err(e) = manager.load_stored_proxies() {
|
||||
eprintln!("Warning: Failed to load stored proxies: {e}");
|
||||
}
|
||||
|
||||
manager
|
||||
}
|
||||
|
||||
// Start a proxy for a given upstream URL and associate it with a browser process ID
|
||||
// Get the path to the proxies directory
|
||||
fn get_proxies_dir(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("proxies");
|
||||
path
|
||||
}
|
||||
|
||||
// Get the path to a specific proxy file
|
||||
fn get_proxy_file_path(&self, proxy_id: &str) -> PathBuf {
|
||||
self.get_proxies_dir().join(format!("{proxy_id}.json"))
|
||||
}
|
||||
|
||||
// Load stored proxies from disk
|
||||
fn load_stored_proxies(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let proxies_dir = self.get_proxies_dir();
|
||||
|
||||
if !proxies_dir.exists() {
|
||||
return Ok(()); // No proxies directory yet
|
||||
}
|
||||
|
||||
let mut stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
|
||||
// Read all JSON files from the proxies directory
|
||||
for entry in fs::read_dir(&proxies_dir)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
if path.extension().is_some_and(|ext| ext == "json") {
|
||||
let content = fs::read_to_string(&path)?;
|
||||
let proxy: StoredProxy = serde_json::from_str(&content)?;
|
||||
stored_proxies.insert(proxy.id.clone(), proxy);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Save a single proxy to disk
|
||||
fn save_proxy(&self, proxy: &StoredProxy) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let proxies_dir = self.get_proxies_dir();
|
||||
|
||||
// Ensure directory exists
|
||||
fs::create_dir_all(&proxies_dir)?;
|
||||
|
||||
let proxy_file = self.get_proxy_file_path(&proxy.id);
|
||||
let content = serde_json::to_string_pretty(proxy)?;
|
||||
fs::write(&proxy_file, content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Delete a proxy file from disk
|
||||
fn delete_proxy_file(&self, proxy_id: &str) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let proxy_file = self.get_proxy_file_path(proxy_id);
|
||||
if proxy_file.exists() {
|
||||
fs::remove_file(proxy_file)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Create a new stored proxy
|
||||
pub fn create_stored_proxy(
|
||||
&self,
|
||||
name: String,
|
||||
proxy_settings: ProxySettings,
|
||||
) -> Result<StoredProxy, String> {
|
||||
// Check if name already exists
|
||||
{
|
||||
let stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
if stored_proxies.values().any(|p| p.name == name) {
|
||||
return Err(format!("Proxy with name '{name}' already exists"));
|
||||
}
|
||||
}
|
||||
|
||||
let stored_proxy = StoredProxy::new(name, proxy_settings);
|
||||
|
||||
{
|
||||
let mut stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
stored_proxies.insert(stored_proxy.id.clone(), stored_proxy.clone());
|
||||
}
|
||||
|
||||
if let Err(e) = self.save_proxy(&stored_proxy) {
|
||||
eprintln!("Warning: Failed to save proxy: {e}");
|
||||
}
|
||||
|
||||
Ok(stored_proxy)
|
||||
}
|
||||
|
||||
// Get all stored proxies
|
||||
pub fn get_stored_proxies(&self) -> Vec<StoredProxy> {
|
||||
let stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
stored_proxies.values().cloned().collect()
|
||||
}
|
||||
|
||||
// Get a stored proxy by ID
|
||||
|
||||
// Update a stored proxy
|
||||
pub fn update_stored_proxy(
|
||||
&self,
|
||||
proxy_id: &str,
|
||||
name: Option<String>,
|
||||
proxy_settings: Option<ProxySettings>,
|
||||
) -> Result<StoredProxy, String> {
|
||||
// First, check for conflicts without holding a mutable reference
|
||||
{
|
||||
let stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
|
||||
// Check if proxy exists
|
||||
if !stored_proxies.contains_key(proxy_id) {
|
||||
return Err(format!("Proxy with ID '{proxy_id}' not found"));
|
||||
}
|
||||
|
||||
// Check if new name conflicts with existing proxies
|
||||
if let Some(ref new_name) = name {
|
||||
if stored_proxies
|
||||
.values()
|
||||
.any(|p| p.id != proxy_id && p.name == *new_name)
|
||||
{
|
||||
return Err(format!("Proxy with name '{new_name}' already exists"));
|
||||
}
|
||||
}
|
||||
} // Release the lock here
|
||||
|
||||
// Now get mutable access for updates
|
||||
let updated_proxy = {
|
||||
let mut stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
let stored_proxy = stored_proxies.get_mut(proxy_id).unwrap(); // Safe because we checked above
|
||||
|
||||
if let Some(new_name) = name {
|
||||
stored_proxy.update_name(new_name);
|
||||
}
|
||||
|
||||
if let Some(new_settings) = proxy_settings {
|
||||
stored_proxy.update_settings(new_settings);
|
||||
}
|
||||
|
||||
stored_proxy.clone()
|
||||
};
|
||||
|
||||
if let Err(e) = self.save_proxy(&updated_proxy) {
|
||||
eprintln!("Warning: Failed to save proxy: {e}");
|
||||
}
|
||||
|
||||
Ok(updated_proxy)
|
||||
}
|
||||
|
||||
// Delete a stored proxy
|
||||
pub fn delete_stored_proxy(&self, proxy_id: &str) -> Result<(), String> {
|
||||
{
|
||||
let mut stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
if stored_proxies.remove(proxy_id).is_none() {
|
||||
return Err(format!("Proxy with ID '{proxy_id}' not found"));
|
||||
}
|
||||
}
|
||||
|
||||
if let Err(e) = self.delete_proxy_file(proxy_id) {
|
||||
eprintln!("Warning: Failed to delete proxy file: {e}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Get proxy settings for a stored proxy ID
|
||||
pub fn get_proxy_settings_by_id(&self, proxy_id: &str) -> Option<ProxySettings> {
|
||||
let stored_proxies = self.stored_proxies.lock().unwrap();
|
||||
stored_proxies
|
||||
.get(proxy_id)
|
||||
.map(|p| p.proxy_settings.clone())
|
||||
}
|
||||
|
||||
// Start a proxy for given proxy settings and associate it with a browser process ID
|
||||
// If proxy_settings is None, starts a direct proxy for traffic monitoring
|
||||
pub async fn start_proxy(
|
||||
&self,
|
||||
app_handle: tauri::AppHandle,
|
||||
upstream_url: &str,
|
||||
proxy_settings: Option<&ProxySettings>,
|
||||
browser_pid: u32,
|
||||
profile_name: Option<&str>,
|
||||
) -> Result<ProxySettings, String> {
|
||||
@@ -43,10 +262,11 @@ impl ProxyManager {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
if let Some(proxy) = proxies.get(&browser_pid) {
|
||||
return Ok(ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "localhost".to_string(),
|
||||
host: "127.0.0.1".to_string(), // Use 127.0.0.1 instead of localhost for better compatibility
|
||||
port: proxy.local_port,
|
||||
username: None,
|
||||
password: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -54,31 +274,71 @@ impl ProxyManager {
|
||||
// Check if we have a preferred port for this profile
|
||||
let preferred_port = if let Some(name) = profile_name {
|
||||
let profile_proxies = self.profile_proxies.lock().unwrap();
|
||||
profile_proxies.get(name).map(|(_, port)| *port)
|
||||
profile_proxies.get(name).and_then(|_settings| {
|
||||
// Find existing proxy with same settings to reuse port
|
||||
let active_proxies = self.active_proxies.lock().unwrap();
|
||||
active_proxies
|
||||
.values()
|
||||
.find(|p| {
|
||||
if let Some(proxy_settings) = proxy_settings {
|
||||
p.upstream_host == proxy_settings.host
|
||||
&& p.upstream_port == proxy_settings.port
|
||||
&& p.upstream_type == proxy_settings.proxy_type
|
||||
} else {
|
||||
p.upstream_type == "DIRECT"
|
||||
}
|
||||
})
|
||||
.map(|p| p.local_port)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Start a new proxy using the nodecar binary
|
||||
// Start a new proxy using the nodecar binary with the correct CLI interface
|
||||
let mut nodecar = app_handle
|
||||
.shell()
|
||||
.sidecar("nodecar")
|
||||
.unwrap()
|
||||
.map_err(|e| format!("Failed to create sidecar: {e}"))?
|
||||
.arg("proxy")
|
||||
.arg("start")
|
||||
.arg("-u")
|
||||
.arg(upstream_url);
|
||||
.arg("start");
|
||||
|
||||
// Add upstream proxy settings if provided, otherwise create direct proxy
|
||||
if let Some(proxy_settings) = proxy_settings {
|
||||
nodecar = nodecar
|
||||
.arg("--host")
|
||||
.arg(&proxy_settings.host)
|
||||
.arg("--proxy-port")
|
||||
.arg(proxy_settings.port.to_string())
|
||||
.arg("--type")
|
||||
.arg(&proxy_settings.proxy_type);
|
||||
|
||||
// Add credentials if provided
|
||||
if let Some(username) = &proxy_settings.username {
|
||||
nodecar = nodecar.arg("--username").arg(username);
|
||||
}
|
||||
if let Some(password) = &proxy_settings.password {
|
||||
nodecar = nodecar.arg("--password").arg(password);
|
||||
}
|
||||
}
|
||||
|
||||
// If we have a preferred port, use it
|
||||
if let Some(port) = preferred_port {
|
||||
nodecar = nodecar.arg("-p").arg(port.to_string());
|
||||
nodecar = nodecar.arg("--port").arg(port.to_string());
|
||||
}
|
||||
|
||||
let output = nodecar.output().await.unwrap();
|
||||
// Execute the command and wait for it to complete
|
||||
// The nodecar binary should start the worker and then exit
|
||||
let output = nodecar
|
||||
.output()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to execute nodecar: {e}"))?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(format!("Proxy start failed: {stderr}"));
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
return Err(format!(
|
||||
"Proxy start failed - stdout: {stdout}, stderr: {stderr}"
|
||||
));
|
||||
}
|
||||
|
||||
let json_string =
|
||||
@@ -95,15 +355,17 @@ impl ProxyManager {
|
||||
.as_str()
|
||||
.ok_or("Missing local URL")?
|
||||
.to_string();
|
||||
let upstream_url_str = json["upstreamUrl"]
|
||||
.as_str()
|
||||
.ok_or("Missing upstream URL")?
|
||||
.to_string();
|
||||
|
||||
let proxy_info = ProxyInfo {
|
||||
id: id.to_string(),
|
||||
local_url,
|
||||
upstream_url: upstream_url_str.clone(),
|
||||
upstream_host: proxy_settings
|
||||
.map(|p| p.host.clone())
|
||||
.unwrap_or_else(|| "DIRECT".to_string()),
|
||||
upstream_port: proxy_settings.map(|p| p.port).unwrap_or(0),
|
||||
upstream_type: proxy_settings
|
||||
.map(|p| p.proxy_type.clone())
|
||||
.unwrap_or_else(|| "DIRECT".to_string()),
|
||||
local_port,
|
||||
};
|
||||
|
||||
@@ -115,16 +377,19 @@ impl ProxyManager {
|
||||
|
||||
// Store the profile proxy info for persistence
|
||||
if let Some(name) = profile_name {
|
||||
let mut profile_proxies = self.profile_proxies.lock().unwrap();
|
||||
profile_proxies.insert(name.to_string(), (upstream_url_str, local_port));
|
||||
if let Some(proxy_settings) = proxy_settings {
|
||||
let mut profile_proxies = self.profile_proxies.lock().unwrap();
|
||||
profile_proxies.insert(name.to_string(), proxy_settings.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Return proxy settings for the browser
|
||||
Ok(ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "localhost".to_string(),
|
||||
host: "127.0.0.1".to_string(), // Use 127.0.0.1 instead of localhost for better compatibility
|
||||
port: proxy_info.local_port,
|
||||
username: None,
|
||||
password: None,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -163,21 +428,44 @@ impl ProxyManager {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Get proxy settings for a browser process ID
|
||||
pub fn get_proxy_settings(&self, browser_pid: u32) -> Option<ProxySettings> {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies.get(&browser_pid).map(|proxy| ProxySettings {
|
||||
enabled: true,
|
||||
proxy_type: "http".to_string(),
|
||||
host: "localhost".to_string(),
|
||||
port: proxy.local_port,
|
||||
})
|
||||
// Update the PID mapping for an existing proxy
|
||||
pub fn update_proxy_pid(&self, old_pid: u32, new_pid: u32) -> Result<(), String> {
|
||||
let mut proxies = self.active_proxies.lock().unwrap();
|
||||
if let Some(proxy_info) = proxies.remove(&old_pid) {
|
||||
proxies.insert(new_pid, proxy_info);
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("No proxy found for PID {old_pid}"))
|
||||
}
|
||||
}
|
||||
|
||||
// Get stored proxy info for a profile
|
||||
pub fn get_profile_proxy_info(&self, profile_name: &str) -> Option<(String, u16)> {
|
||||
let profile_proxies = self.profile_proxies.lock().unwrap();
|
||||
profile_proxies.get(profile_name).cloned()
|
||||
// Check if a process is still running
|
||||
fn is_process_running(&self, pid: u32) -> bool {
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
system.process(Pid::from(pid as usize)).is_some()
|
||||
}
|
||||
|
||||
// Clean up proxies for dead browser processes
|
||||
pub async fn cleanup_dead_proxies(
|
||||
&self,
|
||||
app_handle: tauri::AppHandle,
|
||||
) -> Result<Vec<u32>, String> {
|
||||
let dead_pids = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies
|
||||
.keys()
|
||||
.filter(|&&pid| pid != 0 && !self.is_process_running(pid)) // Skip temporary PID 0
|
||||
.copied()
|
||||
.collect::<Vec<u32>>()
|
||||
};
|
||||
|
||||
for dead_pid in &dead_pids {
|
||||
println!("Cleaning up proxy for dead browser process PID: {dead_pid}");
|
||||
let _ = self.stop_proxy(app_handle.clone(), *dead_pid).await;
|
||||
}
|
||||
|
||||
Ok(dead_pids)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,3 +473,484 @@ impl ProxyManager {
|
||||
lazy_static::lazy_static! {
|
||||
pub static ref PROXY_MANAGER: ProxyManager = ProxyManager::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::time::Duration;
|
||||
use tokio::time::sleep;
|
||||
|
||||
// Mock HTTP server for testing
|
||||
|
||||
use http_body_util::Full;
|
||||
use hyper::body::Bytes;
|
||||
use hyper::server::conn::http1;
|
||||
use hyper::service::service_fn;
|
||||
use hyper::Response;
|
||||
use hyper_util::rt::TokioIo;
|
||||
use tokio::net::TcpListener;
|
||||
|
||||
// Helper function to build nodecar binary for testing
|
||||
async fn ensure_nodecar_binary() -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
let cargo_manifest_dir = env::var("CARGO_MANIFEST_DIR")?;
|
||||
let project_root = PathBuf::from(cargo_manifest_dir)
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_path_buf();
|
||||
let nodecar_dir = project_root.join("nodecar");
|
||||
let nodecar_binary = nodecar_dir.join("nodecar-bin");
|
||||
|
||||
// Check if binary already exists
|
||||
if nodecar_binary.exists() {
|
||||
return Ok(nodecar_binary);
|
||||
}
|
||||
|
||||
// Build the nodecar binary
|
||||
println!("Building nodecar binary for tests...");
|
||||
|
||||
// Install dependencies
|
||||
let install_status = Command::new("pnpm")
|
||||
.args(["install", "--frozen-lockfile"])
|
||||
.current_dir(&nodecar_dir)
|
||||
.status()?;
|
||||
|
||||
if !install_status.success() {
|
||||
return Err("Failed to install nodecar dependencies".into());
|
||||
}
|
||||
|
||||
// Determine the target architecture
|
||||
let target = if cfg!(target_arch = "aarch64") && cfg!(target_os = "macos") {
|
||||
"build:mac-aarch64"
|
||||
} else if cfg!(target_arch = "x86_64") && cfg!(target_os = "macos") {
|
||||
"build:mac-x86_64"
|
||||
} else if cfg!(target_arch = "x86_64") && cfg!(target_os = "linux") {
|
||||
"build:linux-x64"
|
||||
} else if cfg!(target_arch = "aarch64") && cfg!(target_os = "linux") {
|
||||
"build:linux-arm64"
|
||||
} else if cfg!(target_arch = "x86_64") && cfg!(target_os = "windows") {
|
||||
"build:win-x64"
|
||||
} else if cfg!(target_arch = "aarch64") && cfg!(target_os = "windows") {
|
||||
"build:win-arm64"
|
||||
} else {
|
||||
return Err("Unsupported target architecture for nodecar build".into());
|
||||
};
|
||||
|
||||
// Build the binary
|
||||
let build_status = Command::new("pnpm")
|
||||
.args(["run", target])
|
||||
.current_dir(&nodecar_dir)
|
||||
.status()?;
|
||||
|
||||
if !build_status.success() {
|
||||
return Err("Failed to build nodecar binary".into());
|
||||
}
|
||||
|
||||
if !nodecar_binary.exists() {
|
||||
return Err("Nodecar binary was not created successfully".into());
|
||||
}
|
||||
|
||||
Ok(nodecar_binary)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proxy_settings_validation() {
|
||||
// Test valid proxy settings
|
||||
let valid_settings = ProxySettings {
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: 8080,
|
||||
username: Some("user".to_string()),
|
||||
password: Some("pass".to_string()),
|
||||
};
|
||||
|
||||
assert!(
|
||||
!valid_settings.host.is_empty(),
|
||||
"Valid settings should have non-empty host"
|
||||
);
|
||||
assert!(
|
||||
valid_settings.port > 0,
|
||||
"Valid settings should have positive port"
|
||||
);
|
||||
assert_eq!(valid_settings.proxy_type, "http", "Proxy type should match");
|
||||
assert!(
|
||||
valid_settings.username.is_some(),
|
||||
"Username should be present"
|
||||
);
|
||||
assert!(
|
||||
valid_settings.password.is_some(),
|
||||
"Password should be present"
|
||||
);
|
||||
|
||||
// Test proxy settings with empty values
|
||||
let empty_settings = ProxySettings {
|
||||
proxy_type: "http".to_string(),
|
||||
host: "".to_string(),
|
||||
port: 0,
|
||||
username: None,
|
||||
password: None,
|
||||
};
|
||||
|
||||
assert!(
|
||||
empty_settings.host.is_empty(),
|
||||
"Empty settings should have empty host"
|
||||
);
|
||||
assert_eq!(
|
||||
empty_settings.port, 0,
|
||||
"Empty settings should have zero port"
|
||||
);
|
||||
assert!(empty_settings.username.is_none(), "Username should be None");
|
||||
assert!(empty_settings.password.is_none(), "Password should be None");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_proxy_manager_concurrent_access() {
|
||||
use std::sync::Arc;
|
||||
|
||||
let proxy_manager = Arc::new(ProxyManager::new());
|
||||
let mut handles = vec![];
|
||||
|
||||
// Spawn multiple tasks that access the proxy manager concurrently
|
||||
for i in 0..10 {
|
||||
let pm = proxy_manager.clone();
|
||||
let handle = tokio::spawn(async move {
|
||||
let browser_pid = (1000 + i) as u32;
|
||||
let proxy_info = ProxyInfo {
|
||||
id: format!("proxy_{i}"),
|
||||
local_url: format!("http://127.0.0.1:{}", 8000 + i),
|
||||
upstream_host: "127.0.0.1".to_string(),
|
||||
upstream_port: 3128,
|
||||
upstream_type: "http".to_string(),
|
||||
local_port: (8000 + i) as u16,
|
||||
};
|
||||
|
||||
// Add proxy
|
||||
{
|
||||
let mut active_proxies = pm.active_proxies.lock().unwrap();
|
||||
active_proxies.insert(browser_pid, proxy_info);
|
||||
}
|
||||
|
||||
browser_pid
|
||||
});
|
||||
handles.push(handle);
|
||||
}
|
||||
|
||||
// Wait for all tasks to complete
|
||||
let results: Vec<u32> = futures_util::future::join_all(handles)
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|r| r.unwrap())
|
||||
.collect();
|
||||
|
||||
// Verify all browser PIDs were processed
|
||||
assert_eq!(results.len(), 10);
|
||||
for (i, &browser_pid) in results.iter().enumerate() {
|
||||
assert_eq!(browser_pid, (1000 + i) as u32);
|
||||
}
|
||||
}
|
||||
|
||||
// Integration test that actually builds and uses nodecar binary
|
||||
#[tokio::test]
|
||||
async fn test_proxy_integration_with_real_nodecar() -> Result<(), Box<dyn std::error::Error>> {
|
||||
// This test requires nodecar to be built and available
|
||||
let nodecar_path = ensure_nodecar_binary().await?;
|
||||
|
||||
// Start a mock upstream HTTP server
|
||||
let upstream_listener = TcpListener::bind("127.0.0.1:0").await?;
|
||||
let upstream_addr = upstream_listener.local_addr()?;
|
||||
|
||||
// Spawn upstream server
|
||||
let server_handle = tokio::spawn(async move {
|
||||
while let Ok((stream, _)) = upstream_listener.accept().await {
|
||||
let io = TokioIo::new(stream);
|
||||
tokio::task::spawn(async move {
|
||||
let _ = http1::Builder::new()
|
||||
.serve_connection(
|
||||
io,
|
||||
service_fn(|_req| async {
|
||||
Ok::<_, hyper::Error>(Response::new(Full::new(Bytes::from("Upstream OK"))))
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for server to start
|
||||
sleep(Duration::from_millis(100)).await;
|
||||
|
||||
// Test nodecar proxy start command directly (using the binary itself, not node)
|
||||
let mut cmd = Command::new(&nodecar_path);
|
||||
cmd
|
||||
.arg("proxy")
|
||||
.arg("start")
|
||||
.arg("--host")
|
||||
.arg(upstream_addr.ip().to_string())
|
||||
.arg("--proxy-port")
|
||||
.arg(upstream_addr.port().to_string())
|
||||
.arg("--type")
|
||||
.arg("http");
|
||||
|
||||
// Set a timeout for the command
|
||||
let output = tokio::time::timeout(Duration::from_secs(60), async { cmd.output() }).await??;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: serde_json::Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify proxy configuration
|
||||
assert!(config["id"].is_string());
|
||||
assert!(config["localPort"].is_number());
|
||||
assert!(config["localUrl"].is_string());
|
||||
|
||||
let proxy_id = config["id"].as_str().unwrap();
|
||||
let local_port = config["localPort"].as_u64().unwrap();
|
||||
|
||||
// Wait for proxy worker to start
|
||||
println!("Waiting for proxy worker to start...");
|
||||
tokio::time::sleep(Duration::from_secs(1)).await;
|
||||
|
||||
// Test that the local port is listening
|
||||
let mut port_test = Command::new("nc");
|
||||
port_test
|
||||
.arg("-z")
|
||||
.arg("127.0.0.1")
|
||||
.arg(local_port.to_string());
|
||||
|
||||
let port_output = port_test.output()?;
|
||||
if port_output.status.success() {
|
||||
println!("Proxy is listening on port {local_port}");
|
||||
} else {
|
||||
println!("Warning: Proxy port {local_port} is not listening");
|
||||
}
|
||||
|
||||
// Test stopping the proxy
|
||||
let mut stop_cmd = Command::new(&nodecar_path);
|
||||
stop_cmd.arg("proxy").arg("stop").arg("--id").arg(proxy_id);
|
||||
|
||||
let stop_output =
|
||||
tokio::time::timeout(Duration::from_secs(60), async { stop_cmd.output() }).await??;
|
||||
|
||||
assert!(stop_output.status.success());
|
||||
|
||||
println!("Integration test passed: nodecar proxy start/stop works correctly");
|
||||
} else {
|
||||
let stderr = String::from_utf8(output.stderr)?;
|
||||
eprintln!("Nodecar failed: {stderr}");
|
||||
return Err(format!("Nodecar command failed: {stderr}").into());
|
||||
}
|
||||
|
||||
// Clean up server
|
||||
server_handle.abort();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Test that validates the command line arguments are constructed correctly
|
||||
#[test]
|
||||
fn test_proxy_command_construction() {
|
||||
let proxy_settings = ProxySettings {
|
||||
proxy_type: "http".to_string(),
|
||||
host: "proxy.example.com".to_string(),
|
||||
port: 8080,
|
||||
username: Some("user".to_string()),
|
||||
password: Some("pass".to_string()),
|
||||
};
|
||||
|
||||
// Test command arguments match expected format
|
||||
let expected_args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"proxy.example.com",
|
||||
"--proxy-port",
|
||||
"8080",
|
||||
"--type",
|
||||
"http",
|
||||
"--username",
|
||||
"user",
|
||||
"--password",
|
||||
"pass",
|
||||
];
|
||||
|
||||
// This test verifies the argument structure without actually running the command
|
||||
assert_eq!(
|
||||
proxy_settings.host, "proxy.example.com",
|
||||
"Host should match expected value"
|
||||
);
|
||||
assert_eq!(
|
||||
proxy_settings.port, 8080,
|
||||
"Port should match expected value"
|
||||
);
|
||||
assert_eq!(
|
||||
proxy_settings.proxy_type, "http",
|
||||
"Proxy type should match expected value"
|
||||
);
|
||||
assert_eq!(
|
||||
proxy_settings.username.as_ref().unwrap(),
|
||||
"user",
|
||||
"Username should match expected value"
|
||||
);
|
||||
assert_eq!(
|
||||
proxy_settings.password.as_ref().unwrap(),
|
||||
"pass",
|
||||
"Password should match expected value"
|
||||
);
|
||||
|
||||
// Verify expected args structure
|
||||
assert_eq!(expected_args[0], "proxy", "First arg should be 'proxy'");
|
||||
assert_eq!(expected_args[1], "start", "Second arg should be 'start'");
|
||||
assert_eq!(expected_args[2], "--host", "Third arg should be '--host'");
|
||||
assert_eq!(
|
||||
expected_args[3], "proxy.example.com",
|
||||
"Fourth arg should be host value"
|
||||
);
|
||||
}
|
||||
|
||||
// Test the CLI detachment specifically - ensure the CLI exits properly
|
||||
#[tokio::test]
|
||||
async fn test_cli_exits_after_proxy_start() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let nodecar_path = ensure_nodecar_binary().await?;
|
||||
|
||||
// Test that the CLI exits quickly with a mock upstream
|
||||
let mut cmd = Command::new(&nodecar_path);
|
||||
cmd
|
||||
.arg("proxy")
|
||||
.arg("start")
|
||||
.arg("--host")
|
||||
.arg("httpbin.org")
|
||||
.arg("--proxy-port")
|
||||
.arg("80")
|
||||
.arg("--type")
|
||||
.arg("http");
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
let output = tokio::time::timeout(Duration::from_secs(3), async { cmd.output() }).await;
|
||||
|
||||
match output {
|
||||
Ok(Ok(cmd_output)) => {
|
||||
let execution_time = start_time.elapsed();
|
||||
|
||||
if cmd_output.status.success() {
|
||||
let stdout = String::from_utf8(cmd_output.stdout)?;
|
||||
let config: serde_json::Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Clean up - try to stop the proxy
|
||||
if let Some(proxy_id) = config["id"].as_str() {
|
||||
let mut stop_cmd = Command::new(&nodecar_path);
|
||||
stop_cmd.arg("proxy").arg("stop").arg("--id").arg(proxy_id);
|
||||
let _ = stop_cmd.output();
|
||||
}
|
||||
}
|
||||
|
||||
println!("CLI detachment test passed - CLI exited in {execution_time:?}");
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
return Err(format!("Command execution failed: {e}").into());
|
||||
}
|
||||
Err(_) => {
|
||||
return Err("CLI command timed out - this indicates improper detachment".into());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Test that validates proper CLI detachment behavior
|
||||
#[tokio::test]
|
||||
async fn test_cli_detachment_behavior() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let nodecar_path = ensure_nodecar_binary().await?;
|
||||
|
||||
// Test that the CLI command exits quickly even with a real upstream
|
||||
let mut cmd = Command::new(&nodecar_path);
|
||||
cmd
|
||||
.arg("proxy")
|
||||
.arg("start")
|
||||
.arg("--host")
|
||||
.arg("httpbin.org") // Use a known good endpoint
|
||||
.arg("--proxy-port")
|
||||
.arg("80")
|
||||
.arg("--type")
|
||||
.arg("http");
|
||||
|
||||
let output = tokio::time::timeout(Duration::from_secs(60), async { cmd.output() }).await??;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: serde_json::Value = serde_json::from_str(&stdout)?;
|
||||
let proxy_id = config["id"].as_str().unwrap();
|
||||
|
||||
// Clean up
|
||||
let mut stop_cmd = Command::new(&nodecar_path);
|
||||
stop_cmd.arg("proxy").arg("stop").arg("--id").arg(proxy_id);
|
||||
let _ = stop_cmd.output();
|
||||
|
||||
println!("CLI detachment test passed");
|
||||
} else {
|
||||
// Even if the upstream fails, the CLI should still exit quickly
|
||||
println!("CLI command failed but exited quickly as expected");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Test that validates URL encoding for special characters in credentials
|
||||
#[tokio::test]
|
||||
async fn test_proxy_credentials_encoding() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let nodecar_path = ensure_nodecar_binary().await?;
|
||||
|
||||
// Test with credentials that include special characters
|
||||
let mut cmd = Command::new(&nodecar_path);
|
||||
cmd
|
||||
.arg("proxy")
|
||||
.arg("start")
|
||||
.arg("--host")
|
||||
.arg("test.example.com")
|
||||
.arg("--proxy-port")
|
||||
.arg("8080")
|
||||
.arg("--type")
|
||||
.arg("http")
|
||||
.arg("--username")
|
||||
.arg("user@domain.com") // Contains @ symbol
|
||||
.arg("--password")
|
||||
.arg("pass word!"); // Contains space and special character
|
||||
|
||||
let output = tokio::time::timeout(Duration::from_secs(60), async { cmd.output() }).await??;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: serde_json::Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let upstream_url = config["upstreamUrl"].as_str().unwrap();
|
||||
|
||||
println!("Generated upstream URL: {upstream_url}");
|
||||
|
||||
// Verify that special characters are properly encoded
|
||||
assert!(upstream_url.contains("user%40domain.com"));
|
||||
// The password may be encoded as "pass%20word!" or "pass%20word%21" depending on implementation
|
||||
assert!(upstream_url.contains("pass%20word"));
|
||||
|
||||
println!("URL encoding test passed - special characters handled correctly");
|
||||
|
||||
// Clean up
|
||||
let proxy_id = config["id"].as_str().unwrap();
|
||||
let mut stop_cmd = Command::new(&nodecar_path);
|
||||
stop_cmd.arg("proxy").arg("stop").arg("--id").arg(proxy_id);
|
||||
let _ = stop_cmd.output();
|
||||
} else {
|
||||
// This test might fail if the upstream doesn't exist, but we mainly care about URL construction
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let stderr = String::from_utf8(output.stderr)?;
|
||||
println!("Command failed (expected for non-existent upstream):");
|
||||
println!("Stdout: {stdout}");
|
||||
println!("Stderr: {stderr}");
|
||||
|
||||
// The important thing is that the command completed quickly
|
||||
println!("URL encoding test completed - credentials should be properly encoded");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ use std::fs::{self, create_dir_all};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::browser_version_service::BrowserVersionService;
|
||||
use crate::version_updater;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct TableSortingSettings {
|
||||
@@ -25,40 +25,19 @@ impl Default for TableSortingSettings {
|
||||
pub struct AppSettings {
|
||||
#[serde(default)]
|
||||
pub set_as_default_browser: bool,
|
||||
#[serde(default = "default_show_settings_on_startup")]
|
||||
pub show_settings_on_startup: bool,
|
||||
#[serde(default = "default_theme")]
|
||||
pub theme: String, // "light", "dark", or "system"
|
||||
#[serde(default = "default_auto_updates_enabled")]
|
||||
pub auto_updates_enabled: bool,
|
||||
#[serde(default = "default_auto_delete_unused_binaries")]
|
||||
pub auto_delete_unused_binaries: bool,
|
||||
}
|
||||
|
||||
fn default_show_settings_on_startup() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_theme() -> String {
|
||||
"system".to_string()
|
||||
}
|
||||
|
||||
fn default_auto_updates_enabled() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn default_auto_delete_unused_binaries() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
impl Default for AppSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
set_as_default_browser: false,
|
||||
show_settings_on_startup: default_show_settings_on_startup(),
|
||||
theme: default_theme(),
|
||||
auto_updates_enabled: default_auto_updates_enabled(),
|
||||
auto_delete_unused_binaries: default_auto_delete_unused_binaries(),
|
||||
theme: "system".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -68,12 +47,16 @@ pub struct SettingsManager {
|
||||
}
|
||||
|
||||
impl SettingsManager {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn instance() -> &'static SettingsManager {
|
||||
&SETTINGS_MANAGER
|
||||
}
|
||||
|
||||
pub fn get_settings_dir(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
@@ -165,19 +148,14 @@ impl SettingsManager {
|
||||
}
|
||||
|
||||
pub fn should_show_settings_on_startup(&self) -> Result<bool, Box<dyn std::error::Error>> {
|
||||
let settings = self.load_settings()?;
|
||||
|
||||
// Show prompt if:
|
||||
// 1. User wants to see the prompt
|
||||
// 2. Donut Browser is not set as default
|
||||
// 3. User hasn't explicitly disabled the default browser setting
|
||||
Ok(settings.show_settings_on_startup && !settings.set_as_default_browser)
|
||||
// Always return false - we don't show settings on startup anymore
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_app_settings() -> Result<AppSettings, String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.load_settings()
|
||||
.map_err(|e| format!("Failed to load settings: {e}"))
|
||||
@@ -185,7 +163,7 @@ pub async fn get_app_settings() -> Result<AppSettings, String> {
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_app_settings(settings: AppSettings) -> Result<(), String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.save_settings(&settings)
|
||||
.map_err(|e| format!("Failed to save settings: {e}"))
|
||||
@@ -193,7 +171,7 @@ pub async fn save_app_settings(settings: AppSettings) -> Result<(), String> {
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn should_show_settings_on_startup() -> Result<bool, String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.should_show_settings_on_startup()
|
||||
.map_err(|e| format!("Failed to check prompt setting: {e}"))
|
||||
@@ -201,7 +179,7 @@ pub async fn should_show_settings_on_startup() -> Result<bool, String> {
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_table_sorting_settings() -> Result<TableSortingSettings, String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.load_table_sorting()
|
||||
.map_err(|e| format!("Failed to load table sorting settings: {e}"))
|
||||
@@ -209,38 +187,271 @@ pub async fn get_table_sorting_settings() -> Result<TableSortingSettings, String
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_table_sorting_settings(sorting: TableSortingSettings) -> Result<(), String> {
|
||||
let manager = SettingsManager::new();
|
||||
let manager = SettingsManager::instance();
|
||||
manager
|
||||
.save_table_sorting(&sorting)
|
||||
.map_err(|e| format!("Failed to save table sorting settings: {e}"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn clear_all_version_cache_and_refetch() -> Result<(), String> {
|
||||
let api_client = ApiClient::new();
|
||||
pub async fn clear_all_version_cache_and_refetch(
|
||||
app_handle: tauri::AppHandle,
|
||||
) -> Result<(), String> {
|
||||
let api_client = ApiClient::instance();
|
||||
|
||||
// Clear all cache first
|
||||
api_client
|
||||
.clear_all_cache()
|
||||
.map_err(|e| format!("Failed to clear version cache: {e}"))?;
|
||||
|
||||
// Trigger auto-fetch for all supported browsers
|
||||
let service = BrowserVersionService::new();
|
||||
let supported_browsers = service.get_supported_browsers();
|
||||
// Disable all browsers during the update process
|
||||
let auto_updater = crate::auto_updater::AutoUpdater::instance();
|
||||
let supported_browsers =
|
||||
crate::browser_version_manager::BrowserVersionManager::instance().get_supported_browsers();
|
||||
|
||||
for browser in supported_browsers {
|
||||
// Start background fetch for each browser (don't wait for completion)
|
||||
let service_clone = BrowserVersionService::new();
|
||||
let browser_clone = browser.clone();
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = service_clone
|
||||
.fetch_browser_versions_detailed(&browser_clone, false)
|
||||
.await
|
||||
{
|
||||
eprintln!("Background version fetch failed for {browser_clone}: {e}");
|
||||
}
|
||||
});
|
||||
// Load current state and disable all browsers
|
||||
let mut state = auto_updater
|
||||
.load_auto_update_state()
|
||||
.map_err(|e| format!("Failed to load auto update state: {e}"))?;
|
||||
for browser in &supported_browsers {
|
||||
state.disabled_browsers.insert(browser.clone());
|
||||
}
|
||||
auto_updater
|
||||
.save_auto_update_state(&state)
|
||||
.map_err(|e| format!("Failed to save auto update state: {e}"))?;
|
||||
|
||||
let updater = version_updater::get_version_updater();
|
||||
let updater_guard = updater.lock().await;
|
||||
|
||||
let result = updater_guard
|
||||
.trigger_manual_update(&app_handle)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to trigger version update: {e}"));
|
||||
|
||||
// Re-enable all browsers after the update completes (regardless of success/failure)
|
||||
let mut final_state = auto_updater.load_auto_update_state().unwrap_or_default();
|
||||
for browser in &supported_browsers {
|
||||
final_state.disabled_browsers.remove(browser);
|
||||
}
|
||||
if let Err(e) = auto_updater.save_auto_update_state(&final_state) {
|
||||
eprintln!("Warning: Failed to re-enable browsers after cache clear: {e}");
|
||||
}
|
||||
|
||||
result?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref SETTINGS_MANAGER: SettingsManager = SettingsManager::new();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
|
||||
fn create_test_settings_manager() -> (SettingsManager, TempDir) {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
|
||||
// Set up a temporary home directory for testing
|
||||
env::set_var("HOME", temp_dir.path());
|
||||
|
||||
let manager = SettingsManager::new();
|
||||
(manager, temp_dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_settings_manager_creation() {
|
||||
let (_manager, _temp_dir) = create_test_settings_manager();
|
||||
// Test passes if no panic occurs
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_app_settings() {
|
||||
let default_settings = AppSettings::default();
|
||||
|
||||
assert!(
|
||||
!default_settings.set_as_default_browser,
|
||||
"Default should not set as default browser"
|
||||
);
|
||||
assert_eq!(
|
||||
default_settings.theme, "system",
|
||||
"Default theme should be system"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_default_table_sorting_settings() {
|
||||
let default_sorting = TableSortingSettings::default();
|
||||
|
||||
assert_eq!(
|
||||
default_sorting.column, "name",
|
||||
"Default sort column should be name"
|
||||
);
|
||||
assert_eq!(
|
||||
default_sorting.direction, "asc",
|
||||
"Default sort direction should be asc"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_settings_nonexistent_file() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let result = manager.load_settings();
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent settings file gracefully"
|
||||
);
|
||||
|
||||
let settings = result.unwrap();
|
||||
assert!(
|
||||
!settings.set_as_default_browser,
|
||||
"Should return default settings"
|
||||
);
|
||||
assert_eq!(settings.theme, "system", "Should return default theme");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_and_load_settings() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let test_settings = AppSettings {
|
||||
set_as_default_browser: true,
|
||||
theme: "dark".to_string(),
|
||||
};
|
||||
|
||||
// Save settings
|
||||
let save_result = manager.save_settings(&test_settings);
|
||||
assert!(save_result.is_ok(), "Should save settings successfully");
|
||||
|
||||
// Load settings back
|
||||
let load_result = manager.load_settings();
|
||||
assert!(load_result.is_ok(), "Should load settings successfully");
|
||||
|
||||
let loaded_settings = load_result.unwrap();
|
||||
assert!(
|
||||
loaded_settings.set_as_default_browser,
|
||||
"Loaded settings should match saved"
|
||||
);
|
||||
assert_eq!(
|
||||
loaded_settings.theme, "dark",
|
||||
"Loaded theme should match saved"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_table_sorting_nonexistent_file() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let result = manager.load_table_sorting();
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle nonexistent sorting file gracefully"
|
||||
);
|
||||
|
||||
let sorting = result.unwrap();
|
||||
assert_eq!(sorting.column, "name", "Should return default sorting");
|
||||
assert_eq!(sorting.direction, "asc", "Should return default direction");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_save_and_load_table_sorting() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let test_sorting = TableSortingSettings {
|
||||
column: "browser".to_string(),
|
||||
direction: "desc".to_string(),
|
||||
};
|
||||
|
||||
// Save sorting
|
||||
let save_result = manager.save_table_sorting(&test_sorting);
|
||||
assert!(save_result.is_ok(), "Should save sorting successfully");
|
||||
|
||||
// Load sorting back
|
||||
let load_result = manager.load_table_sorting();
|
||||
assert!(load_result.is_ok(), "Should load sorting successfully");
|
||||
|
||||
let loaded_sorting = load_result.unwrap();
|
||||
assert_eq!(
|
||||
loaded_sorting.column, "browser",
|
||||
"Loaded column should match saved"
|
||||
);
|
||||
assert_eq!(
|
||||
loaded_sorting.direction, "desc",
|
||||
"Loaded direction should match saved"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_should_show_settings_on_startup() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let result = manager.should_show_settings_on_startup();
|
||||
assert!(result.is_ok(), "Should not fail");
|
||||
|
||||
let should_show = result.unwrap();
|
||||
assert!(
|
||||
!should_show,
|
||||
"Should always return false as per implementation"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_load_corrupted_settings_file() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
// Create settings directory
|
||||
let settings_dir = manager.get_settings_dir();
|
||||
fs::create_dir_all(&settings_dir).expect("Should create settings directory");
|
||||
|
||||
// Write corrupted JSON
|
||||
let settings_file = manager.get_settings_file();
|
||||
fs::write(&settings_file, "{ invalid json }").expect("Should write corrupted file");
|
||||
|
||||
// Should handle corrupted file gracefully
|
||||
let result = manager.load_settings();
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Should handle corrupted settings file gracefully"
|
||||
);
|
||||
|
||||
let settings = result.unwrap();
|
||||
assert!(
|
||||
!settings.set_as_default_browser,
|
||||
"Should return default settings for corrupted file"
|
||||
);
|
||||
assert_eq!(
|
||||
settings.theme, "system",
|
||||
"Should return default theme for corrupted file"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_settings_file_paths() {
|
||||
let (manager, _temp_dir) = create_test_settings_manager();
|
||||
|
||||
let settings_dir = manager.get_settings_dir();
|
||||
let settings_file = manager.get_settings_file();
|
||||
let sorting_file = manager.get_table_sorting_file();
|
||||
|
||||
assert!(
|
||||
settings_dir.to_string_lossy().contains("settings"),
|
||||
"Settings dir should contain 'settings'"
|
||||
);
|
||||
assert!(
|
||||
settings_file
|
||||
.to_string_lossy()
|
||||
.ends_with("app_settings.json"),
|
||||
"Settings file should end with app_settings.json"
|
||||
);
|
||||
assert!(
|
||||
sorting_file
|
||||
.to_string_lossy()
|
||||
.ends_with("table_sorting.json"),
|
||||
"Sorting file should end with table_sorting.json"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,539 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::process::Command;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct SystemTheme {
|
||||
pub theme: String, // "light", "dark", or "unknown"
|
||||
}
|
||||
|
||||
pub struct ThemeDetector;
|
||||
|
||||
impl ThemeDetector {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
|
||||
/// Detect the system theme preference
|
||||
pub fn detect_system_theme(&self) -> SystemTheme {
|
||||
#[cfg(target_os = "linux")]
|
||||
return linux::detect_system_theme();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
return macos::detect_system_theme();
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return windows::detect_system_theme();
|
||||
|
||||
#[cfg(not(any(target_os = "linux", target_os = "macos", target_os = "windows")))]
|
||||
return SystemTheme {
|
||||
theme: "unknown".to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
mod linux {
|
||||
use super::*;
|
||||
|
||||
pub fn detect_system_theme() -> SystemTheme {
|
||||
// Try multiple methods in order of preference
|
||||
|
||||
// 1. Try GNOME/GTK settings via gsettings
|
||||
if let Ok(theme) = detect_gnome_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 2. Try KDE Plasma settings via kreadconfig5/kreadconfig6
|
||||
if let Ok(theme) = detect_kde_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 3. Try XFCE settings via xfconf-query
|
||||
if let Ok(theme) = detect_xfce_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 4. Try looking at current GTK theme name
|
||||
if let Ok(theme) = detect_gtk_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 5. Try dconf directly (fallback for GNOME-based systems)
|
||||
if let Ok(theme) = detect_dconf_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 6. Try environment variables
|
||||
if let Ok(theme) = detect_env_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 7. Try freedesktop portal
|
||||
if let Ok(theme) = detect_portal_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// 8. Try looking at system color scheme files
|
||||
if let Ok(theme) = detect_system_files_theme() {
|
||||
return SystemTheme { theme };
|
||||
}
|
||||
|
||||
// Fallback to unknown
|
||||
SystemTheme {
|
||||
theme: "unknown".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn detect_gnome_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Check if gsettings is available
|
||||
if !is_command_available("gsettings") {
|
||||
return Err("gsettings not available".into());
|
||||
}
|
||||
|
||||
// Try GNOME color scheme first (modern way)
|
||||
if let Ok(output) = Command::new("gsettings")
|
||||
.args(["get", "org.gnome.desktop.interface", "color-scheme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let scheme = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
match scheme.as_str() {
|
||||
"'prefer-dark'" => return Ok("dark".to_string()),
|
||||
"'prefer-light'" => return Ok("light".to_string()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to GTK theme name detection
|
||||
if let Ok(output) = Command::new("gsettings")
|
||||
.args(["get", "org.gnome.desktop.interface", "gtk-theme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme_name = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.trim_matches('\'')
|
||||
.to_lowercase();
|
||||
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect GNOME theme".into())
|
||||
}
|
||||
|
||||
fn detect_kde_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Try KDE Plasma 6 first
|
||||
if is_command_available("kreadconfig6") {
|
||||
if let Ok(output) = Command::new("kreadconfig6")
|
||||
.args([
|
||||
"--file",
|
||||
"kdeglobals",
|
||||
"--group",
|
||||
"KDE",
|
||||
"--key",
|
||||
"LookAndFeelPackage",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("breezedark") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") || theme.contains("breeze") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try color scheme as well
|
||||
if let Ok(output) = Command::new("kreadconfig6")
|
||||
.args([
|
||||
"--file",
|
||||
"kdeglobals",
|
||||
"--group",
|
||||
"General",
|
||||
"--key",
|
||||
"ColorScheme",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let scheme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if scheme.contains("dark") || scheme.contains("breezedark") {
|
||||
return Ok("dark".to_string());
|
||||
} else if scheme.contains("light") || scheme.contains("breeze") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try KDE Plasma 5 as fallback
|
||||
if is_command_available("kreadconfig5") {
|
||||
if let Ok(output) = Command::new("kreadconfig5")
|
||||
.args([
|
||||
"--file",
|
||||
"kdeglobals",
|
||||
"--group",
|
||||
"KDE",
|
||||
"--key",
|
||||
"LookAndFeelPackage",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("breezedark") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") || theme.contains("breeze") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect KDE theme".into())
|
||||
}
|
||||
|
||||
fn detect_xfce_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
if !is_command_available("xfconf-query") {
|
||||
return Err("xfconf-query not available".into());
|
||||
}
|
||||
|
||||
// Check XFCE theme
|
||||
if let Ok(output) = Command::new("xfconf-query")
|
||||
.args(["-c", "xsettings", "-p", "/Net/ThemeName"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check XFCE window manager theme as backup
|
||||
if let Ok(output) = Command::new("xfconf-query")
|
||||
.args(["-c", "xfwm4", "-p", "/general/theme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.to_lowercase();
|
||||
if theme.contains("dark") || theme.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect XFCE theme".into())
|
||||
}
|
||||
|
||||
fn detect_gtk_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Try to read GTK3 settings file
|
||||
if let Ok(home) = std::env::var("HOME") {
|
||||
let gtk3_settings = std::path::Path::new(&home).join(".config/gtk-3.0/settings.ini");
|
||||
if gtk3_settings.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(gtk3_settings) {
|
||||
for line in content.lines() {
|
||||
if line.starts_with("gtk-theme-name=") {
|
||||
let theme_name = line.split('=').nth(1).unwrap_or("").trim().to_lowercase();
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try GTK4 settings
|
||||
let gtk4_settings = std::path::Path::new(&home).join(".config/gtk-4.0/settings.ini");
|
||||
if gtk4_settings.exists() {
|
||||
if let Ok(content) = std::fs::read_to_string(gtk4_settings) {
|
||||
for line in content.lines() {
|
||||
if line.starts_with("gtk-theme-name=") {
|
||||
let theme_name = line.split('=').nth(1).unwrap_or("").trim().to_lowercase();
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect GTK theme".into())
|
||||
}
|
||||
|
||||
fn detect_dconf_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
if !is_command_available("dconf") {
|
||||
return Err("dconf not available".into());
|
||||
}
|
||||
|
||||
// Try reading color scheme directly from dconf
|
||||
if let Ok(output) = Command::new("dconf")
|
||||
.args(["read", "/org/gnome/desktop/interface/color-scheme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let scheme = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
match scheme.as_str() {
|
||||
"'prefer-dark'" => return Ok("dark".to_string()),
|
||||
"'prefer-light'" => return Ok("light".to_string()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try reading GTK theme from dconf
|
||||
if let Ok(output) = Command::new("dconf")
|
||||
.args(["read", "/org/gnome/desktop/interface/gtk-theme"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let theme_name = String::from_utf8_lossy(&output.stdout)
|
||||
.trim()
|
||||
.trim_matches('\'')
|
||||
.to_lowercase();
|
||||
|
||||
if theme_name.contains("dark") || theme_name.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_name.contains("light") || theme_name.contains("adwaita") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect dconf theme".into())
|
||||
}
|
||||
|
||||
fn detect_env_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Check common environment variables
|
||||
if let Ok(theme) = std::env::var("GTK_THEME") {
|
||||
let theme_lower = theme.to_lowercase();
|
||||
if theme_lower.contains("dark") || theme_lower.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_lower.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(theme) = std::env::var("QT_STYLE_OVERRIDE") {
|
||||
let theme_lower = theme.to_lowercase();
|
||||
if theme_lower.contains("dark") || theme_lower.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
} else if theme_lower.contains("light") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect theme from environment".into())
|
||||
}
|
||||
|
||||
fn detect_portal_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
if !is_command_available("busctl") {
|
||||
return Err("busctl not available".into());
|
||||
}
|
||||
|
||||
// Try to query the color scheme via org.freedesktop.portal.Settings
|
||||
if let Ok(output) = Command::new("busctl")
|
||||
.args([
|
||||
"--user",
|
||||
"call",
|
||||
"org.freedesktop.portal.Desktop",
|
||||
"/org/freedesktop/portal/desktop",
|
||||
"org.freedesktop.portal.Settings",
|
||||
"Read",
|
||||
"ss",
|
||||
"org.freedesktop.appearance",
|
||||
"color-scheme",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let response = String::from_utf8_lossy(&output.stdout);
|
||||
// Parse DBus response - look for preference values
|
||||
if response.contains(" 1 ") {
|
||||
return Ok("dark".to_string());
|
||||
} else if response.contains(" 2 ") {
|
||||
return Ok("light".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect portal theme".into())
|
||||
}
|
||||
|
||||
fn detect_system_files_theme() -> Result<String, Box<dyn std::error::Error>> {
|
||||
// Check if we're in a dark terminal (heuristic)
|
||||
if let Ok(term) = std::env::var("TERM") {
|
||||
let term_lower = term.to_lowercase();
|
||||
if term_lower.contains("dark") || term_lower.contains("night") {
|
||||
return Ok("dark".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Check if we can determine from desktop session
|
||||
if let Ok(desktop) = std::env::var("XDG_CURRENT_DESKTOP") {
|
||||
let desktop_lower = desktop.to_lowercase();
|
||||
// Some desktops default to dark
|
||||
if desktop_lower.contains("i3") || desktop_lower.contains("sway") {
|
||||
// Window managers often use dark themes by default
|
||||
return Ok("dark".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not detect theme from system files".into())
|
||||
}
|
||||
|
||||
fn is_command_available(command: &str) -> bool {
|
||||
Command::new("which")
|
||||
.arg(command)
|
||||
.output()
|
||||
.map(|output| output.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod macos {
|
||||
use super::*;
|
||||
|
||||
pub fn detect_system_theme() -> SystemTheme {
|
||||
// macOS theme detection using osascript
|
||||
if let Ok(output) = Command::new("osascript")
|
||||
.args([
|
||||
"-e",
|
||||
"tell application \"System Events\" to tell appearance preferences to get dark mode",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let result = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
let result = result.trim();
|
||||
match result {
|
||||
"true" => {
|
||||
return SystemTheme {
|
||||
theme: "dark".to_string(),
|
||||
}
|
||||
}
|
||||
"false" => {
|
||||
return SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback method using defaults
|
||||
if let Ok(output) = Command::new("defaults")
|
||||
.args(["read", "-g", "AppleInterfaceStyle"])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let style = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
let style = style.trim();
|
||||
if style.to_lowercase() == "dark" {
|
||||
return SystemTheme {
|
||||
theme: "dark".to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Default to light if we can't determine
|
||||
SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
mod windows {
|
||||
use super::*;
|
||||
|
||||
pub fn detect_system_theme() -> SystemTheme {
|
||||
// Windows theme detection via registry
|
||||
// This is a simplified implementation - you might want to use winreg crate for better registry access
|
||||
if let Ok(output) = Command::new("reg")
|
||||
.args([
|
||||
"query",
|
||||
"HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Themes\\Personalize",
|
||||
"/v",
|
||||
"AppsUseLightTheme",
|
||||
])
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let result = String::from_utf8_lossy(&output.stdout);
|
||||
if result.contains("0x0") {
|
||||
return SystemTheme {
|
||||
theme: "dark".to_string(),
|
||||
};
|
||||
} else if result.contains("0x1") {
|
||||
return SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Default to light if we can't determine
|
||||
SystemTheme {
|
||||
theme: "light".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Command to expose this functionality to the frontend
|
||||
#[tauri::command]
|
||||
pub fn get_system_theme() -> SystemTheme {
|
||||
let detector = ThemeDetector::new();
|
||||
detector.detect_system_theme()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_theme_detector_creation() {
|
||||
let detector = ThemeDetector::new();
|
||||
let theme = detector.detect_system_theme();
|
||||
|
||||
// Should return a valid theme string
|
||||
assert!(matches!(theme.theme.as_str(), "light" | "dark" | "unknown"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_system_theme_command() {
|
||||
let theme = get_system_theme();
|
||||
assert!(matches!(theme.theme.as_str(), "light" | "dark" | "unknown"));
|
||||
}
|
||||
}
|
||||
+236
-122
@@ -1,4 +1,3 @@
|
||||
use crate::browser_version_service::BrowserVersionService;
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs;
|
||||
@@ -8,7 +7,10 @@ use std::sync::OnceLock;
|
||||
use std::time::{Duration, SystemTime, UNIX_EPOCH};
|
||||
use tauri::Emitter;
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::time::{interval, Interval};
|
||||
use tokio::time::interval;
|
||||
|
||||
use crate::auto_updater::AutoUpdater;
|
||||
use crate::browser_version_manager::BrowserVersionManager;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct VersionUpdateProgress {
|
||||
@@ -39,29 +41,30 @@ impl Default for BackgroundUpdateState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
last_update_time: 0,
|
||||
update_interval_hours: 3,
|
||||
update_interval_hours: 12,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VersionUpdater {
|
||||
version_service: BrowserVersionService,
|
||||
app_handle: Arc<Mutex<Option<tauri::AppHandle>>>,
|
||||
update_interval: Interval,
|
||||
version_service: &'static BrowserVersionManager,
|
||||
auto_updater: &'static AutoUpdater,
|
||||
app_handle: Option<tauri::AppHandle>,
|
||||
}
|
||||
|
||||
impl VersionUpdater {
|
||||
pub fn new() -> Self {
|
||||
let mut update_interval = interval(Duration::from_secs(5 * 60)); // Check every 5 minutes
|
||||
update_interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
|
||||
Self {
|
||||
version_service: BrowserVersionService::new(),
|
||||
app_handle: Arc::new(Mutex::new(None)),
|
||||
update_interval,
|
||||
version_service: BrowserVersionManager::instance(),
|
||||
auto_updater: AutoUpdater::instance(),
|
||||
app_handle: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_app_handle(&mut self, app_handle: tauri::AppHandle) {
|
||||
self.app_handle = Some(app_handle);
|
||||
}
|
||||
|
||||
fn get_cache_dir() -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let app_name = if cfg!(debug_assertions) {
|
||||
@@ -143,11 +146,6 @@ impl VersionUpdater {
|
||||
should_update
|
||||
}
|
||||
|
||||
pub async fn set_app_handle(&self, app_handle: tauri::AppHandle) {
|
||||
let mut handle = self.app_handle.lock().await;
|
||||
*handle = Some(app_handle);
|
||||
}
|
||||
|
||||
pub async fn check_and_run_startup_update(
|
||||
&self,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
@@ -157,15 +155,10 @@ impl VersionUpdater {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let app_handle = {
|
||||
let handle_guard = self.app_handle.lock().await;
|
||||
handle_guard.clone()
|
||||
};
|
||||
|
||||
if let Some(handle) = app_handle {
|
||||
if let Some(ref app_handle) = self.app_handle {
|
||||
println!("Running startup version update...");
|
||||
|
||||
match self.update_all_browser_versions(&handle).await {
|
||||
match self.update_all_browser_versions(app_handle).await {
|
||||
Ok(_) => {
|
||||
// Update the persistent state after successful update
|
||||
let state = BackgroundUpdateState {
|
||||
@@ -191,7 +184,9 @@ impl VersionUpdater {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn start_background_updates(&mut self) {
|
||||
pub async fn start_background_updates(
|
||||
&self,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
println!(
|
||||
"Starting background version update service (checking every 5 minutes for 3-hour intervals)"
|
||||
);
|
||||
@@ -201,41 +196,54 @@ impl VersionUpdater {
|
||||
eprintln!("Startup version update failed: {e}");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn run_background_task() {
|
||||
let mut update_interval = interval(Duration::from_secs(5 * 60)); // Check every 5 minutes
|
||||
update_interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
|
||||
loop {
|
||||
self.update_interval.tick().await;
|
||||
update_interval.tick().await;
|
||||
|
||||
// Check if we should run an update based on persistent state
|
||||
if !Self::should_run_background_update() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if we have an app handle
|
||||
let app_handle = {
|
||||
let handle_guard = self.app_handle.lock().await;
|
||||
handle_guard.clone()
|
||||
};
|
||||
println!("Starting background version update...");
|
||||
|
||||
if let Some(handle) = app_handle {
|
||||
println!("Starting background version update...");
|
||||
// Get the updater instance for this update cycle
|
||||
let updater = get_version_updater();
|
||||
let result = {
|
||||
let updater_guard = updater.lock().await;
|
||||
if let Some(ref app_handle) = updater_guard.app_handle {
|
||||
updater_guard.update_all_browser_versions(app_handle).await
|
||||
} else {
|
||||
Err("App handle not available for background update".into())
|
||||
}
|
||||
}; // Release the lock here
|
||||
|
||||
match self.update_all_browser_versions(&handle).await {
|
||||
Ok(_) => {
|
||||
// Update the persistent state after successful update
|
||||
let state = BackgroundUpdateState {
|
||||
last_update_time: Self::get_current_timestamp(),
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
match result {
|
||||
Ok(_) => {
|
||||
// Update the persistent state after successful update
|
||||
let state = BackgroundUpdateState {
|
||||
last_update_time: Self::get_current_timestamp(),
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
|
||||
if let Err(e) = Self::save_background_update_state(&state) {
|
||||
eprintln!("Failed to save background update state: {e}");
|
||||
} else {
|
||||
println!("Background version update completed successfully");
|
||||
}
|
||||
if let Err(e) = Self::save_background_update_state(&state) {
|
||||
eprintln!("Failed to save background update state: {e}");
|
||||
} else {
|
||||
println!("Background version update completed successfully");
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Background version update failed: {e}");
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Background version update failed: {e}");
|
||||
|
||||
// Emit error event
|
||||
// Try to emit error event if we have an app handle
|
||||
let updater_guard = updater.lock().await;
|
||||
if let Some(ref app_handle) = updater_guard.app_handle {
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: "".to_string(),
|
||||
total_browsers: 0,
|
||||
@@ -244,11 +252,9 @@ impl VersionUpdater {
|
||||
browser_new_versions: 0,
|
||||
status: "error".to_string(),
|
||||
};
|
||||
let _ = handle.emit("version-update-progress", &progress);
|
||||
let _ = app_handle.emit("version-update-progress", &progress);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
println!("App handle not available, skipping background update");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -257,107 +263,108 @@ impl VersionUpdater {
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<Vec<BackgroundUpdateResult>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
println!("Starting background version update for all browsers");
|
||||
|
||||
let browsers = [
|
||||
"firefox",
|
||||
"firefox-developer",
|
||||
"mullvad-browser",
|
||||
"zen",
|
||||
"brave",
|
||||
"chromium",
|
||||
"tor-browser",
|
||||
];
|
||||
|
||||
let total_browsers = browsers.len();
|
||||
let supported_browsers = self.version_service.get_supported_browsers();
|
||||
let total_browsers = supported_browsers.len();
|
||||
let mut results = Vec::new();
|
||||
let mut total_new_versions = 0;
|
||||
|
||||
// Emit start event
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: "".to_string(),
|
||||
// Emit initial progress
|
||||
let initial_progress = VersionUpdateProgress {
|
||||
current_browser: String::new(),
|
||||
total_browsers,
|
||||
completed_browsers: 0,
|
||||
new_versions_found: 0,
|
||||
browser_new_versions: 0,
|
||||
status: "updating".to_string(),
|
||||
};
|
||||
let _ = app_handle.emit("version-update-progress", &progress);
|
||||
|
||||
for (index, browser) in browsers.iter().enumerate() {
|
||||
// Check if individual browser cache is expired before updating
|
||||
if !self.version_service.should_update_cache(browser) {
|
||||
println!("Skipping {browser} - cache is still fresh");
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &initial_progress) {
|
||||
eprintln!("Failed to emit initial progress: {e}");
|
||||
}
|
||||
|
||||
let browser_result = BackgroundUpdateResult {
|
||||
browser: browser.to_string(),
|
||||
new_versions_count: 0,
|
||||
total_versions_count: 0,
|
||||
updated_successfully: true,
|
||||
error: None,
|
||||
};
|
||||
results.push(browser_result);
|
||||
continue;
|
||||
}
|
||||
for (index, browser) in supported_browsers.iter().enumerate() {
|
||||
println!("Updating browser versions for: {browser}");
|
||||
|
||||
println!("Updating versions for browser: {browser}");
|
||||
|
||||
// Emit progress for current browser
|
||||
// Emit progress update for current browser
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: browser.to_string(),
|
||||
current_browser: browser.clone(),
|
||||
total_browsers,
|
||||
completed_browsers: index,
|
||||
new_versions_found: total_new_versions,
|
||||
browser_new_versions: 0,
|
||||
status: "updating".to_string(),
|
||||
};
|
||||
let _ = app_handle.emit("version-update-progress", &progress);
|
||||
|
||||
let result = self.update_browser_versions(browser).await;
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &progress) {
|
||||
eprintln!("Failed to emit progress for {browser}: {e}");
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(new_count) => {
|
||||
total_new_versions += new_count;
|
||||
let browser_result = BackgroundUpdateResult {
|
||||
browser: browser.to_string(),
|
||||
new_versions_count: new_count,
|
||||
total_versions_count: 0, // We'll update this if needed
|
||||
match self.update_browser_versions(browser).await {
|
||||
Ok(new_versions_count) => {
|
||||
results.push(BackgroundUpdateResult {
|
||||
browser: browser.clone(),
|
||||
new_versions_count,
|
||||
total_versions_count: 0, // We don't track total for background updates
|
||||
updated_successfully: true,
|
||||
error: None,
|
||||
};
|
||||
results.push(browser_result);
|
||||
});
|
||||
|
||||
println!("Found {new_count} new versions for {browser}");
|
||||
total_new_versions += new_versions_count;
|
||||
|
||||
// Emit progress update with new versions found
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: browser.clone(),
|
||||
total_browsers,
|
||||
completed_browsers: index,
|
||||
new_versions_found: total_new_versions,
|
||||
browser_new_versions: new_versions_count,
|
||||
status: "updating".to_string(),
|
||||
};
|
||||
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &progress) {
|
||||
eprintln!("Failed to emit progress with versions for {browser}: {e}");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to update versions for {browser}: {e}");
|
||||
let browser_result = BackgroundUpdateResult {
|
||||
browser: browser.to_string(),
|
||||
results.push(BackgroundUpdateResult {
|
||||
browser: browser.clone(),
|
||||
new_versions_count: 0,
|
||||
total_versions_count: 0,
|
||||
updated_successfully: false,
|
||||
error: Some(e.to_string()),
|
||||
};
|
||||
results.push(browser_result);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Small delay between browsers to avoid overwhelming APIs
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
}
|
||||
|
||||
// Emit completion event
|
||||
let progress = VersionUpdateProgress {
|
||||
current_browser: "".to_string(),
|
||||
// Emit completion
|
||||
let final_progress = VersionUpdateProgress {
|
||||
current_browser: String::new(),
|
||||
total_browsers,
|
||||
completed_browsers: total_browsers,
|
||||
new_versions_found: total_new_versions,
|
||||
browser_new_versions: 0,
|
||||
status: "completed".to_string(),
|
||||
};
|
||||
let _ = app_handle.emit("version-update-progress", &progress);
|
||||
|
||||
println!("Background version update completed. Found {total_new_versions} new versions total");
|
||||
if let Err(e) = app_handle.emit("version-update-progress", &final_progress) {
|
||||
eprintln!("Failed to emit completion progress: {e}");
|
||||
}
|
||||
|
||||
// After all version updates are complete, trigger auto-update check
|
||||
if total_new_versions > 0 {
|
||||
println!(
|
||||
"Found {total_new_versions} new versions across all browsers. Checking for auto-updates..."
|
||||
);
|
||||
|
||||
// Trigger auto-update check which will automatically download browsers
|
||||
self
|
||||
.auto_updater
|
||||
.check_for_updates_with_progress(app_handle)
|
||||
.await;
|
||||
} else {
|
||||
println!("No new versions found, skipping auto-update check");
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
@@ -512,31 +519,138 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_should_run_background_update_logic() {
|
||||
// Note: This test uses the shared state file, so results may vary
|
||||
// depending on previous test runs. This is expected behavior.
|
||||
// Create isolated test states to avoid interference
|
||||
let current_time = VersionUpdater::get_current_timestamp();
|
||||
|
||||
// Test with recent update (should not update)
|
||||
let recent_state = BackgroundUpdateState {
|
||||
last_update_time: VersionUpdater::get_current_timestamp() - 60, // 1 minute ago
|
||||
last_update_time: current_time - 60, // 1 minute ago
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
VersionUpdater::save_background_update_state(&recent_state).unwrap();
|
||||
assert!(!VersionUpdater::should_run_background_update());
|
||||
|
||||
// Save and test recent state
|
||||
let save_result = VersionUpdater::save_background_update_state(&recent_state);
|
||||
assert!(save_result.is_ok(), "Should save recent state successfully");
|
||||
|
||||
let should_update_recent = VersionUpdater::should_run_background_update();
|
||||
assert!(
|
||||
!should_update_recent,
|
||||
"Should not update when last update was recent"
|
||||
);
|
||||
|
||||
// Test with old update (should update)
|
||||
let old_state = BackgroundUpdateState {
|
||||
last_update_time: VersionUpdater::get_current_timestamp() - (4 * 60 * 60), // 4 hours ago
|
||||
last_update_time: current_time - (4 * 60 * 60), // 4 hours ago
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
VersionUpdater::save_background_update_state(&old_state).unwrap();
|
||||
assert!(VersionUpdater::should_run_background_update());
|
||||
|
||||
// Save and test old state
|
||||
let save_result = VersionUpdater::save_background_update_state(&old_state);
|
||||
assert!(save_result.is_ok(), "Should save old state successfully");
|
||||
|
||||
let should_update_old = VersionUpdater::should_run_background_update();
|
||||
assert!(should_update_old, "Should update when last update was old");
|
||||
|
||||
// Test with never updated (should update)
|
||||
let never_updated_state = BackgroundUpdateState {
|
||||
last_update_time: 0,
|
||||
update_interval_hours: 3,
|
||||
};
|
||||
|
||||
let save_result = VersionUpdater::save_background_update_state(&never_updated_state);
|
||||
assert!(
|
||||
save_result.is_ok(),
|
||||
"Should save never updated state successfully"
|
||||
);
|
||||
|
||||
let should_update_never = VersionUpdater::should_run_background_update();
|
||||
assert!(
|
||||
should_update_never,
|
||||
"Should update when never updated before"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_dir_creation() {
|
||||
// This should not panic and should create the directory if it doesn't exist
|
||||
let cache_dir = VersionUpdater::get_cache_dir().unwrap();
|
||||
assert!(cache_dir.exists());
|
||||
assert!(cache_dir.is_dir());
|
||||
let cache_dir_result = VersionUpdater::get_cache_dir();
|
||||
assert!(
|
||||
cache_dir_result.is_ok(),
|
||||
"Should successfully get cache directory"
|
||||
);
|
||||
|
||||
let cache_dir = cache_dir_result.unwrap();
|
||||
assert!(
|
||||
cache_dir.exists(),
|
||||
"Cache directory should exist after creation"
|
||||
);
|
||||
assert!(cache_dir.is_dir(), "Cache directory should be a directory");
|
||||
|
||||
// Verify the path contains expected components
|
||||
let path_str = cache_dir.to_string_lossy();
|
||||
assert!(
|
||||
path_str.contains("version_cache"),
|
||||
"Path should contain version_cache"
|
||||
);
|
||||
|
||||
// Test that calling it again returns the same directory
|
||||
let cache_dir2 = VersionUpdater::get_cache_dir().unwrap();
|
||||
assert_eq!(
|
||||
cache_dir, cache_dir2,
|
||||
"Multiple calls should return same directory"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_version_updater_creation() {
|
||||
let updater = VersionUpdater::new();
|
||||
|
||||
// Should have valid references to services
|
||||
assert!(
|
||||
!std::ptr::eq(updater.version_service as *const _, std::ptr::null()),
|
||||
"Version service should not be null"
|
||||
);
|
||||
assert!(
|
||||
!std::ptr::eq(updater.auto_updater as *const _, std::ptr::null()),
|
||||
"Auto updater should not be null"
|
||||
);
|
||||
assert!(
|
||||
updater.app_handle.is_none(),
|
||||
"App handle should initially be None"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_current_timestamp() {
|
||||
let timestamp1 = VersionUpdater::get_current_timestamp();
|
||||
|
||||
// Should be a reasonable timestamp (after year 2020)
|
||||
assert!(
|
||||
timestamp1 > 1577836800,
|
||||
"Timestamp should be after 2020-01-01"
|
||||
); // 2020-01-01 00:00:00 UTC
|
||||
|
||||
// Should be before year 2100
|
||||
assert!(
|
||||
timestamp1 < 4102444800,
|
||||
"Timestamp should be before 2100-01-01"
|
||||
); // 2100-01-01 00:00:00 UTC
|
||||
|
||||
// Wait a tiny bit and check it increases
|
||||
std::thread::sleep(std::time::Duration::from_millis(1));
|
||||
let timestamp2 = VersionUpdater::get_current_timestamp();
|
||||
assert!(timestamp2 >= timestamp1, "Timestamp should not decrease");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_version_updater_singleton() {
|
||||
let updater1 = get_version_updater();
|
||||
let updater2 = get_version_updater();
|
||||
|
||||
// Should return the same Arc instance
|
||||
assert!(
|
||||
Arc::ptr_eq(&updater1, &updater2),
|
||||
"Should return same singleton instance"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://schema.tauri.app/config/2",
|
||||
"productName": "Donut Browser",
|
||||
"version": "0.3.2",
|
||||
"version": "0.9.0",
|
||||
"identifier": "com.donutbrowser",
|
||||
"build": {
|
||||
"beforeDevCommand": "pnpm dev",
|
||||
@@ -61,8 +61,9 @@
|
||||
},
|
||||
"plugins": {
|
||||
"deep-link": {
|
||||
"schemes": ["http", "https"],
|
||||
"domains": []
|
||||
"desktop": {
|
||||
"schemes": ["http", "https"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1 @@
|
||||
Hello, World!
|
||||
Binary file not shown.
@@ -0,0 +1,133 @@
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::time::Duration;
|
||||
|
||||
/// Utility functions for integration tests
|
||||
pub struct TestUtils;
|
||||
|
||||
impl TestUtils {
|
||||
/// Build the nodecar binary if it doesn't exist
|
||||
pub async fn ensure_nodecar_binary() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>>
|
||||
{
|
||||
let cargo_manifest_dir = env::var("CARGO_MANIFEST_DIR")?;
|
||||
let project_root = PathBuf::from(cargo_manifest_dir)
|
||||
.parent()
|
||||
.unwrap()
|
||||
.to_path_buf();
|
||||
let nodecar_dir = project_root.join("nodecar");
|
||||
let nodecar_binary = nodecar_dir.join("nodecar-bin");
|
||||
|
||||
// Check if binary already exists
|
||||
if nodecar_binary.exists() {
|
||||
return Ok(nodecar_binary);
|
||||
}
|
||||
|
||||
println!("Building nodecar binary for integration tests...");
|
||||
|
||||
// Install dependencies
|
||||
let install_status = Command::new("pnpm")
|
||||
.args(["install", "--frozen-lockfile"])
|
||||
.current_dir(&nodecar_dir)
|
||||
.status()?;
|
||||
|
||||
if !install_status.success() {
|
||||
return Err("Failed to install nodecar dependencies".into());
|
||||
}
|
||||
|
||||
// Build the binary
|
||||
let build_status = Command::new("pnpm")
|
||||
.args(["run", "build"])
|
||||
.current_dir(&nodecar_dir)
|
||||
.status()?;
|
||||
|
||||
if !build_status.success() {
|
||||
return Err("Failed to build nodecar binary".into());
|
||||
}
|
||||
|
||||
if !nodecar_binary.exists() {
|
||||
return Err("Nodecar binary was not created successfully".into());
|
||||
}
|
||||
|
||||
Ok(nodecar_binary)
|
||||
}
|
||||
|
||||
/// Execute a nodecar command with timeout
|
||||
pub async fn execute_nodecar_command(
|
||||
binary_path: &PathBuf,
|
||||
args: &[&str],
|
||||
) -> Result<std::process::Output, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let mut cmd = Command::new(binary_path);
|
||||
cmd.args(args);
|
||||
|
||||
let output = tokio::process::Command::from(cmd).output().await?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Check if a port is available
|
||||
pub async fn is_port_available(port: u16) -> bool {
|
||||
tokio::net::TcpListener::bind(format!("127.0.0.1:{port}"))
|
||||
.await
|
||||
.is_ok()
|
||||
}
|
||||
|
||||
/// Wait for a port to become available or occupied
|
||||
pub async fn wait_for_port_state(port: u16, should_be_occupied: bool, timeout_secs: u64) -> bool {
|
||||
let start = std::time::Instant::now();
|
||||
|
||||
while start.elapsed().as_secs() < timeout_secs {
|
||||
let is_available = Self::is_port_available(port).await;
|
||||
|
||||
if should_be_occupied && !is_available {
|
||||
return true; // Port is occupied as expected
|
||||
} else if !should_be_occupied && is_available {
|
||||
return true; // Port is available as expected
|
||||
}
|
||||
|
||||
tokio::time::sleep(Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
/// Create a temporary directory for test files
|
||||
pub fn create_temp_dir() -> Result<tempfile::TempDir, Box<dyn std::error::Error + Send + Sync>> {
|
||||
Ok(tempfile::tempdir()?)
|
||||
}
|
||||
|
||||
/// Clean up specific nodecar processes by IDs (for targeted test cleanup)
|
||||
pub async fn cleanup_specific_processes(
|
||||
nodecar_path: &PathBuf,
|
||||
proxy_ids: &[String],
|
||||
camoufox_ids: &[String],
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
println!("Cleaning up specific test processes...");
|
||||
|
||||
// Stop specific proxies
|
||||
for proxy_id in proxy_ids {
|
||||
let stop_args = ["proxy", "stop", "--id", proxy_id];
|
||||
if let Ok(output) = Self::execute_nodecar_command(nodecar_path, &stop_args).await {
|
||||
if output.status.success() {
|
||||
println!("Stopped test proxy: {proxy_id}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Stop specific camoufox instances
|
||||
for camoufox_id in camoufox_ids {
|
||||
let stop_args = ["camoufox", "stop", "--id", camoufox_id];
|
||||
if let Ok(output) = Self::execute_nodecar_command(nodecar_path, &stop_args).await {
|
||||
if output.status.success() {
|
||||
println!("Stopped test camoufox instance: {camoufox_id}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Give processes time to clean up
|
||||
tokio::time::sleep(Duration::from_millis(500)).await;
|
||||
|
||||
println!("Test process cleanup completed");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,999 @@
|
||||
mod common;
|
||||
use common::TestUtils;
|
||||
use serde_json::Value;
|
||||
|
||||
/// Setup function to ensure clean state before tests
|
||||
async fn setup_test() -> Result<std::path::PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = TestUtils::ensure_nodecar_binary().await?;
|
||||
|
||||
// Only clean up test-specific processes, not all processes
|
||||
// This prevents interfering with actual app usage during testing
|
||||
println!("Setting up test environment...");
|
||||
|
||||
Ok(nodecar_path)
|
||||
}
|
||||
|
||||
/// Helper to track and cleanup specific test resources
|
||||
struct TestResourceTracker {
|
||||
proxy_ids: Vec<String>,
|
||||
camoufox_ids: Vec<String>,
|
||||
nodecar_path: std::path::PathBuf,
|
||||
}
|
||||
|
||||
impl TestResourceTracker {
|
||||
fn new(nodecar_path: std::path::PathBuf) -> Self {
|
||||
Self {
|
||||
proxy_ids: Vec::new(),
|
||||
camoufox_ids: Vec::new(),
|
||||
nodecar_path,
|
||||
}
|
||||
}
|
||||
|
||||
fn track_proxy(&mut self, proxy_id: String) {
|
||||
self.proxy_ids.push(proxy_id);
|
||||
}
|
||||
|
||||
fn track_camoufox(&mut self, camoufox_id: String) {
|
||||
self.camoufox_ids.push(camoufox_id);
|
||||
}
|
||||
|
||||
async fn cleanup_all(&self) {
|
||||
// Use targeted cleanup to only stop test-specific processes
|
||||
let _ = TestUtils::cleanup_specific_processes(
|
||||
&self.nodecar_path,
|
||||
&self.proxy_ids,
|
||||
&self.camoufox_ids,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestResourceTracker {
|
||||
fn drop(&mut self) {
|
||||
// Ensure cleanup happens even if test panics
|
||||
let proxy_ids = self.proxy_ids.clone();
|
||||
let camoufox_ids = self.camoufox_ids.clone();
|
||||
let nodecar_path = self.nodecar_path.clone();
|
||||
|
||||
tokio::spawn(async move {
|
||||
let _ = TestUtils::cleanup_specific_processes(&nodecar_path, &proxy_ids, &camoufox_ids).await;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// Integration tests for nodecar proxy functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_proxy_lifecycle() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Test proxy start with a known working upstream
|
||||
let args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http",
|
||||
];
|
||||
|
||||
println!("Starting proxy with nodecar...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
tracker.cleanup_all().await;
|
||||
return Err(format!("Proxy start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify proxy configuration structure
|
||||
assert!(config["id"].is_string(), "Proxy ID should be a string");
|
||||
assert!(
|
||||
config["localPort"].is_number(),
|
||||
"Local port should be a number"
|
||||
);
|
||||
assert!(
|
||||
config["localUrl"].is_string(),
|
||||
"Local URL should be a string"
|
||||
);
|
||||
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
let local_port = config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
println!("Proxy started with ID: {proxy_id} on port: {local_port}");
|
||||
|
||||
// Wait for the proxy to start listening
|
||||
let is_listening = TestUtils::wait_for_port_state(local_port, true, 10).await;
|
||||
assert!(
|
||||
is_listening,
|
||||
"Proxy should be listening on the assigned port"
|
||||
);
|
||||
|
||||
// Test stopping the proxy
|
||||
let stop_args = ["proxy", "stop", "--id", &proxy_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
assert!(stop_output.status.success(), "Proxy stop should succeed");
|
||||
|
||||
let port_available = TestUtils::wait_for_port_state(local_port, false, 5).await;
|
||||
assert!(
|
||||
port_available,
|
||||
"Port should be available after stopping proxy"
|
||||
);
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test proxy with authentication
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_proxy_with_auth() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http",
|
||||
"--username",
|
||||
"testuser",
|
||||
"--password",
|
||||
"testpass",
|
||||
];
|
||||
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
// Verify upstream URL contains encoded credentials
|
||||
if let Some(upstream_url) = config["upstreamUrl"].as_str() {
|
||||
assert!(
|
||||
upstream_url.contains("testuser"),
|
||||
"Upstream URL should contain username"
|
||||
);
|
||||
// Password might be encoded, so we check for the presence of auth info
|
||||
assert!(
|
||||
upstream_url.contains("@"),
|
||||
"Upstream URL should contain auth separator"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test proxy list functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_proxy_list() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Start a proxy first
|
||||
let start_args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http",
|
||||
];
|
||||
|
||||
let start_output = TestUtils::execute_nodecar_command(&nodecar_path, &start_args).await?;
|
||||
|
||||
if start_output.status.success() {
|
||||
let stdout = String::from_utf8(start_output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
// Test list command
|
||||
let list_args = ["proxy", "list"];
|
||||
let list_output = TestUtils::execute_nodecar_command(&nodecar_path, &list_args).await?;
|
||||
|
||||
assert!(list_output.status.success(), "Proxy list should succeed");
|
||||
|
||||
let list_stdout = String::from_utf8(list_output.stdout)?;
|
||||
let proxy_list: Value = serde_json::from_str(&list_stdout)?;
|
||||
|
||||
assert!(proxy_list.is_array(), "Proxy list should be an array");
|
||||
|
||||
let proxies = proxy_list.as_array().unwrap();
|
||||
assert!(
|
||||
!proxies.is_empty(),
|
||||
"Should have at least one proxy in the list"
|
||||
);
|
||||
|
||||
// Find our proxy in the list
|
||||
let found_proxy = proxies.iter().find(|p| p["id"].as_str() == Some(&proxy_id));
|
||||
assert!(found_proxy.is_some(), "Started proxy should be in the list");
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_lifecycle() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let temp_dir = TestUtils::create_temp_dir()?;
|
||||
let profile_path = temp_dir.path().join("test_profile");
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"start",
|
||||
"--profile-path",
|
||||
profile_path.to_str().unwrap(),
|
||||
"--headless",
|
||||
];
|
||||
|
||||
println!("Starting Camoufox with nodecar...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
// If Camoufox is not installed or times out, skip the test
|
||||
if stderr.contains("not installed")
|
||||
|| stderr.contains("not found")
|
||||
|| stderr.contains("timeout")
|
||||
|| stdout.contains("timeout")
|
||||
{
|
||||
println!("Skipping Camoufox test - Camoufox not available or timed out");
|
||||
tracker.cleanup_all().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(format!("Camoufox start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify Camoufox configuration structure
|
||||
assert!(config["id"].is_string(), "Camoufox ID should be a string");
|
||||
|
||||
let camoufox_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_camoufox(camoufox_id.clone());
|
||||
println!("Camoufox started with ID: {camoufox_id}");
|
||||
|
||||
// Test stopping Camoufox
|
||||
let stop_args = ["camoufox", "stop", "--id", &camoufox_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
assert!(stop_output.status.success(), "Camoufox stop should succeed");
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox with URL opening
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_with_url() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let temp_dir = TestUtils::create_temp_dir()?;
|
||||
let profile_path = temp_dir.path().join("test_profile_url");
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"start",
|
||||
"--profile-path",
|
||||
profile_path.to_str().unwrap(),
|
||||
"--url",
|
||||
"https://httpbin.org/get",
|
||||
"--headless",
|
||||
];
|
||||
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let camoufox_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_camoufox(camoufox_id.clone());
|
||||
|
||||
// Verify URL is set
|
||||
if let Some(url) = config["url"].as_str() {
|
||||
assert_eq!(
|
||||
url, "https://httpbin.org/get",
|
||||
"URL should match what was provided"
|
||||
);
|
||||
}
|
||||
|
||||
// Test stopping Camoufox explicitly
|
||||
let stop_args = ["camoufox", "stop", "--id", &camoufox_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
assert!(stop_output.status.success(), "Camoufox stop should succeed");
|
||||
} else {
|
||||
println!("Skipping Camoufox URL test - likely not installed");
|
||||
tracker.cleanup_all().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox list functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_list() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Test list command (should work even without Camoufox installed)
|
||||
let list_args = ["camoufox", "list"];
|
||||
let list_output = TestUtils::execute_nodecar_command(&nodecar_path, &list_args).await?;
|
||||
|
||||
assert!(list_output.status.success(), "Camoufox list should succeed");
|
||||
|
||||
let list_stdout = String::from_utf8(list_output.stdout)?;
|
||||
let camoufox_list: Value = serde_json::from_str(&list_stdout)?;
|
||||
|
||||
assert!(camoufox_list.is_array(), "Camoufox list should be an array");
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox process tracking and management
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_process_tracking(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let temp_dir = TestUtils::create_temp_dir()?;
|
||||
let profile_path = temp_dir.path().join("test_profile_tracking");
|
||||
|
||||
// Start multiple Camoufox instances
|
||||
let mut instance_ids: Vec<String> = Vec::new();
|
||||
|
||||
for i in 0..2 {
|
||||
let instance_profile_path = format!("{}_instance_{}", profile_path.to_str().unwrap(), i);
|
||||
let args = [
|
||||
"camoufox",
|
||||
"start",
|
||||
"--profile-path",
|
||||
&instance_profile_path,
|
||||
"--headless",
|
||||
];
|
||||
|
||||
println!("Starting Camoufox instance {i}...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
// If Camoufox is not installed, skip the test
|
||||
if stderr.contains("not installed") || stderr.contains("not found") {
|
||||
println!("Skipping Camoufox process tracking test - Camoufox not installed");
|
||||
tracker.cleanup_all().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Camoufox instance {i} start failed - stdout: {stdout}, stderr: {stderr}").into(),
|
||||
);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let camoufox_id = config["id"].as_str().unwrap().to_string();
|
||||
instance_ids.push(camoufox_id.clone());
|
||||
tracker.track_camoufox(camoufox_id.clone());
|
||||
println!("Camoufox instance {i} started with ID: {camoufox_id}");
|
||||
}
|
||||
|
||||
// Verify all instances are tracked
|
||||
let list_args = ["camoufox", "list"];
|
||||
let list_output = TestUtils::execute_nodecar_command(&nodecar_path, &list_args).await?;
|
||||
|
||||
assert!(list_output.status.success(), "Camoufox list should succeed");
|
||||
|
||||
let list_stdout = String::from_utf8(list_output.stdout)?;
|
||||
println!("Camoufox list output: {list_stdout}");
|
||||
let instances: Value = serde_json::from_str(&list_stdout)?;
|
||||
|
||||
let instances_array = instances.as_array().unwrap();
|
||||
println!("Found {} instances in list", instances_array.len());
|
||||
|
||||
// Verify our instances are in the list
|
||||
for instance_id in &instance_ids {
|
||||
let instance_found = instances_array
|
||||
.iter()
|
||||
.any(|i| i["id"].as_str() == Some(instance_id));
|
||||
if !instance_found {
|
||||
println!("Instance {instance_id} not found in list. Available instances:");
|
||||
for instance in instances_array {
|
||||
if let Some(id) = instance["id"].as_str() {
|
||||
println!(" - {id}");
|
||||
}
|
||||
}
|
||||
}
|
||||
assert!(
|
||||
instance_found,
|
||||
"Camoufox instance {instance_id} should be found in list"
|
||||
);
|
||||
}
|
||||
|
||||
// Stop all instances individually
|
||||
for instance_id in &instance_ids {
|
||||
println!("Stopping Camoufox instance: {instance_id}");
|
||||
let stop_args = ["camoufox", "stop", "--id", instance_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
if stop_output.status.success() {
|
||||
let stop_stdout = String::from_utf8(stop_output.stdout)?;
|
||||
if let Ok(stop_result) = serde_json::from_str::<Value>(&stop_stdout) {
|
||||
let success = stop_result["success"].as_bool().unwrap_or(false);
|
||||
if !success {
|
||||
println!("Warning: Stop command returned success=false for instance {instance_id}");
|
||||
}
|
||||
} else {
|
||||
println!("Warning: Could not parse stop result for instance {instance_id}");
|
||||
}
|
||||
} else {
|
||||
println!("Warning: Stop command failed for instance {instance_id}");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify all instances are removed
|
||||
let list_output_after = TestUtils::execute_nodecar_command(&nodecar_path, &list_args).await?;
|
||||
|
||||
let instances_after: Value = serde_json::from_str(&String::from_utf8(list_output_after.stdout)?)?;
|
||||
let instances_after_array = instances_after.as_array().unwrap();
|
||||
|
||||
for instance_id in &instance_ids {
|
||||
let instance_still_exists = instances_after_array
|
||||
.iter()
|
||||
.any(|i| i["id"].as_str() == Some(instance_id));
|
||||
assert!(
|
||||
!instance_still_exists,
|
||||
"Stopped Camoufox instance {instance_id} should not be found in list"
|
||||
);
|
||||
}
|
||||
|
||||
println!("Camoufox process tracking test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox with various configuration options
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_configuration_options(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let temp_dir = TestUtils::create_temp_dir()?;
|
||||
let profile_path = temp_dir.path().join("test_profile_config");
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"start",
|
||||
"--profile-path",
|
||||
profile_path.to_str().unwrap(),
|
||||
"--block-images",
|
||||
"--max-width",
|
||||
"1920",
|
||||
"--max-height",
|
||||
"1080",
|
||||
"--headless",
|
||||
];
|
||||
|
||||
println!("Starting Camoufox with configuration options...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
// If Camoufox is not installed, skip the test
|
||||
if stderr.contains("not installed") || stderr.contains("not found") {
|
||||
println!("Skipping Camoufox configuration test - Camoufox not installed");
|
||||
tracker.cleanup_all().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Camoufox with config start failed - stdout: {stdout}, stderr: {stderr}").into(),
|
||||
);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
let camoufox_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_camoufox(camoufox_id.clone());
|
||||
println!("Camoufox with configuration started with ID: {camoufox_id}");
|
||||
|
||||
// Verify configuration was applied by checking the profile path
|
||||
if let Some(returned_profile_path) = config["profilePath"].as_str() {
|
||||
assert!(
|
||||
returned_profile_path.contains("test_profile_config"),
|
||||
"Profile path should match what was provided"
|
||||
);
|
||||
}
|
||||
|
||||
// Test stopping Camoufox explicitly
|
||||
let stop_args = ["camoufox", "stop", "--id", &camoufox_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
assert!(stop_output.status.success(), "Camoufox stop should succeed");
|
||||
|
||||
println!("Camoufox configuration test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox generate-config command with basic options
|
||||
#[ignore = "CI is rate limited for camoufox download"]
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_generate_config_basic(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"generate-config",
|
||||
"--max-width",
|
||||
"1920",
|
||||
"--max-height",
|
||||
"1080",
|
||||
"--block-images",
|
||||
];
|
||||
|
||||
println!("Testing Camoufox config generation with basic options...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Camoufox generate-config failed - stdout: {stdout}, stderr: {stderr}").into(),
|
||||
);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
println!("Generated config output: {stdout}");
|
||||
|
||||
// Parse the generated config as JSON
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify the config contains expected properties
|
||||
assert!(
|
||||
config.is_object(),
|
||||
"Generated config should be a JSON object"
|
||||
);
|
||||
|
||||
// Check for some expected fingerprint properties
|
||||
assert!(
|
||||
config.get("screen.width").is_some(),
|
||||
"Config should contain screen.width"
|
||||
);
|
||||
assert!(
|
||||
config.get("screen.height").is_some(),
|
||||
"Config should contain screen.height"
|
||||
);
|
||||
assert!(
|
||||
config.get("navigator.userAgent").is_some(),
|
||||
"Config should contain navigator.userAgent"
|
||||
);
|
||||
|
||||
println!("Camoufox generate-config basic test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test Camoufox generate-config command with custom fingerprint
|
||||
#[ignore = "CI is rate limited for camoufox download"]
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_camoufox_generate_config_custom_fingerprint(
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Create a custom fingerprint JSON
|
||||
let custom_fingerprint = r#"{
|
||||
"screen.width": 1440,
|
||||
"screen.height": 900,
|
||||
"navigator.userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/140.0",
|
||||
"navigator.platform": "TestPlatform",
|
||||
"timezone": "America/New_York",
|
||||
"locale:language": "en",
|
||||
"locale:region": "US"
|
||||
}"#;
|
||||
|
||||
let args = [
|
||||
"camoufox",
|
||||
"generate-config",
|
||||
"--fingerprint",
|
||||
custom_fingerprint,
|
||||
"--block-webrtc",
|
||||
];
|
||||
|
||||
println!("Testing Camoufox config generation with custom fingerprint...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Camoufox generate-config with custom fingerprint failed - stdout: {stdout}, stderr: {stderr}").into(),
|
||||
);
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
|
||||
// Parse the generated config as JSON
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify the config contains expected properties
|
||||
assert!(
|
||||
config.is_object(),
|
||||
"Generated config should be a JSON object"
|
||||
);
|
||||
|
||||
// Check that our custom values are preserved
|
||||
assert_eq!(
|
||||
config.get("screen.width").and_then(|v| v.as_u64()),
|
||||
Some(1440),
|
||||
"Custom screen width should be preserved"
|
||||
);
|
||||
assert_eq!(
|
||||
config.get("screen.height").and_then(|v| v.as_u64()),
|
||||
Some(900),
|
||||
"Custom screen height should be preserved"
|
||||
);
|
||||
assert_eq!(
|
||||
config.get("navigator.userAgent").and_then(|v| v.as_str()),
|
||||
Some("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:135.0) Gecko/20100101 Firefox/140.0"),
|
||||
"Custom user agent should be preserved"
|
||||
);
|
||||
assert_eq!(
|
||||
config.get("timezone").and_then(|v| v.as_str()),
|
||||
Some("America/New_York"),
|
||||
"Custom timezone should be preserved"
|
||||
);
|
||||
|
||||
println!("Camoufox generate-config custom fingerprint test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test nodecar command validation
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_command_validation() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Test invalid command
|
||||
let invalid_args = ["invalid", "command"];
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &invalid_args).await?;
|
||||
|
||||
assert!(!output.status.success(), "Invalid command should fail");
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test concurrent proxy operations
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_concurrent_proxies() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Start multiple proxies concurrently
|
||||
let mut handles = vec![];
|
||||
|
||||
for i in 0..3 {
|
||||
let nodecar_path_clone = nodecar_path.clone();
|
||||
let handle = tokio::spawn(async move {
|
||||
let args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http",
|
||||
];
|
||||
|
||||
TestUtils::execute_nodecar_command(&nodecar_path_clone, &args).await
|
||||
});
|
||||
handles.push((i, handle));
|
||||
}
|
||||
|
||||
// Wait for all proxies to start
|
||||
for (i, handle) in handles {
|
||||
match handle.await.map_err(|e| format!("Join error: {e}"))? {
|
||||
Ok(output) if output.status.success() => {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
println!("Proxy {i} started successfully");
|
||||
}
|
||||
Ok(output) => {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
println!("Proxy {i} failed to start: {stderr}");
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Proxy {i} error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test proxy with different upstream types
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_proxy_types() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
let test_cases = vec![
|
||||
("http", "httpbin.org", "80"),
|
||||
("https", "httpbin.org", "443"),
|
||||
];
|
||||
|
||||
for (proxy_type, host, port) in test_cases {
|
||||
println!("Testing {proxy_type} proxy to {host}:{port}");
|
||||
|
||||
let args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
host,
|
||||
"--proxy-port",
|
||||
port,
|
||||
"--type",
|
||||
proxy_type,
|
||||
];
|
||||
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if output.status.success() {
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
println!("{proxy_type} proxy test passed");
|
||||
} else {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
println!("{proxy_type} proxy test failed: {stderr}");
|
||||
}
|
||||
}
|
||||
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test direct proxy (no upstream) functionality
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_direct_proxy() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Test starting a direct proxy (no upstream)
|
||||
let args = ["proxy", "start"];
|
||||
|
||||
println!("Starting direct proxy with nodecar...");
|
||||
let output = TestUtils::execute_nodecar_command(&nodecar_path, &args).await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
tracker.cleanup_all().await;
|
||||
return Err(format!("Direct proxy start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let stdout = String::from_utf8(output.stdout)?;
|
||||
let config: Value = serde_json::from_str(&stdout)?;
|
||||
|
||||
// Verify proxy configuration structure
|
||||
assert!(config["id"].is_string(), "Proxy ID should be a string");
|
||||
assert!(
|
||||
config["localPort"].is_number(),
|
||||
"Local port should be a number"
|
||||
);
|
||||
assert!(
|
||||
config["localUrl"].is_string(),
|
||||
"Local URL should be a string"
|
||||
);
|
||||
assert_eq!(
|
||||
config["upstreamUrl"].as_str().unwrap(),
|
||||
"DIRECT",
|
||||
"Upstream URL should be DIRECT"
|
||||
);
|
||||
|
||||
let proxy_id = config["id"].as_str().unwrap().to_string();
|
||||
let local_port = config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(proxy_id.clone());
|
||||
|
||||
println!("Direct proxy started with ID: {proxy_id} on port: {local_port}");
|
||||
|
||||
// Wait for the proxy to start listening
|
||||
let is_listening = TestUtils::wait_for_port_state(local_port, true, 10).await;
|
||||
assert!(
|
||||
is_listening,
|
||||
"Direct proxy should be listening on the assigned port"
|
||||
);
|
||||
|
||||
// Test stopping the proxy
|
||||
let stop_args = ["proxy", "stop", "--id", &proxy_id];
|
||||
let stop_output = TestUtils::execute_nodecar_command(&nodecar_path, &stop_args).await?;
|
||||
|
||||
assert!(
|
||||
stop_output.status.success(),
|
||||
"Direct proxy stop should succeed"
|
||||
);
|
||||
|
||||
let port_available = TestUtils::wait_for_port_state(local_port, false, 5).await;
|
||||
assert!(
|
||||
port_available,
|
||||
"Port should be available after stopping direct proxy"
|
||||
);
|
||||
|
||||
println!("Direct proxy test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Test SOCKS5 proxy chaining - create two proxies where the second uses the first as upstream
|
||||
#[tokio::test]
|
||||
async fn test_nodecar_socks5_proxy_chaining() -> Result<(), Box<dyn std::error::Error + Send + Sync>>
|
||||
{
|
||||
let nodecar_path = setup_test().await?;
|
||||
let mut tracker = TestResourceTracker::new(nodecar_path.clone());
|
||||
|
||||
// Step 1: Start a SOCKS5 proxy with a known working upstream (httpbin.org)
|
||||
let socks5_args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--host",
|
||||
"httpbin.org",
|
||||
"--proxy-port",
|
||||
"80",
|
||||
"--type",
|
||||
"http", // Use HTTP upstream for the first proxy
|
||||
];
|
||||
|
||||
println!("Starting first proxy with HTTP upstream...");
|
||||
let socks5_output = TestUtils::execute_nodecar_command(&nodecar_path, &socks5_args).await?;
|
||||
|
||||
if !socks5_output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&socks5_output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&socks5_output.stdout);
|
||||
tracker.cleanup_all().await;
|
||||
return Err(format!("First proxy start failed - stdout: {stdout}, stderr: {stderr}").into());
|
||||
}
|
||||
|
||||
let socks5_stdout = String::from_utf8(socks5_output.stdout)?;
|
||||
let socks5_config: Value = serde_json::from_str(&socks5_stdout)?;
|
||||
|
||||
let socks5_proxy_id = socks5_config["id"].as_str().unwrap().to_string();
|
||||
let socks5_local_port = socks5_config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(socks5_proxy_id.clone());
|
||||
|
||||
println!("First proxy started with ID: {socks5_proxy_id} on port: {socks5_local_port}");
|
||||
|
||||
// Step 2: Start a second proxy that uses the first proxy as upstream
|
||||
let http_proxy_args = [
|
||||
"proxy",
|
||||
"start",
|
||||
"--upstream",
|
||||
&format!("http://127.0.0.1:{socks5_local_port}"),
|
||||
];
|
||||
|
||||
println!("Starting second proxy with first proxy as upstream...");
|
||||
let http_output = TestUtils::execute_nodecar_command(&nodecar_path, &http_proxy_args).await?;
|
||||
|
||||
if !http_output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&http_output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&http_output.stdout);
|
||||
tracker.cleanup_all().await;
|
||||
return Err(
|
||||
format!("Second proxy with chained upstream failed - stdout: {stdout}, stderr: {stderr}")
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
let http_stdout = String::from_utf8(http_output.stdout)?;
|
||||
let http_config: Value = serde_json::from_str(&http_stdout)?;
|
||||
|
||||
let http_proxy_id = http_config["id"].as_str().unwrap().to_string();
|
||||
let http_local_port = http_config["localPort"].as_u64().unwrap() as u16;
|
||||
tracker.track_proxy(http_proxy_id.clone());
|
||||
|
||||
println!(
|
||||
"Second proxy started with ID: {http_proxy_id} on port: {http_local_port} (chained through first proxy)"
|
||||
);
|
||||
|
||||
// Verify both proxies are listening by waiting for them to be occupied
|
||||
let socks5_listening = TestUtils::wait_for_port_state(socks5_local_port, true, 5).await;
|
||||
let http_listening = TestUtils::wait_for_port_state(http_local_port, true, 5).await;
|
||||
|
||||
assert!(
|
||||
socks5_listening,
|
||||
"First proxy should be listening on port {socks5_local_port}"
|
||||
);
|
||||
assert!(
|
||||
http_listening,
|
||||
"Second proxy should be listening on port {http_local_port}"
|
||||
);
|
||||
|
||||
// Clean up both proxies
|
||||
let stop_http_args = ["proxy", "stop", "--id", &http_proxy_id];
|
||||
let stop_socks5_args = ["proxy", "stop", "--id", &socks5_proxy_id];
|
||||
|
||||
let http_stop_result = TestUtils::execute_nodecar_command(&nodecar_path, &stop_http_args).await;
|
||||
let socks5_stop_result =
|
||||
TestUtils::execute_nodecar_command(&nodecar_path, &stop_socks5_args).await;
|
||||
|
||||
// Verify cleanup
|
||||
assert!(
|
||||
http_stop_result.is_ok() && http_stop_result.unwrap().status.success(),
|
||||
"Second proxy stop should succeed"
|
||||
);
|
||||
assert!(
|
||||
socks5_stop_result.is_ok() && socks5_stop_result.unwrap().status.success(),
|
||||
"First proxy stop should succeed"
|
||||
);
|
||||
|
||||
let http_port_available = TestUtils::wait_for_port_state(http_local_port, false, 5).await;
|
||||
let socks5_port_available = TestUtils::wait_for_port_state(socks5_local_port, false, 5).await;
|
||||
|
||||
assert!(
|
||||
http_port_available,
|
||||
"Second proxy port should be available after stopping"
|
||||
);
|
||||
assert!(
|
||||
socks5_port_available,
|
||||
"First proxy port should be available after stopping"
|
||||
);
|
||||
|
||||
println!("Proxy chaining test completed successfully");
|
||||
tracker.cleanup_all().await;
|
||||
Ok(())
|
||||
}
|
||||
+1
-1
@@ -24,7 +24,7 @@ export default function RootLayout({
|
||||
return (
|
||||
<html lang="en" suppressHydrationWarning>
|
||||
<body
|
||||
className={`${geistSans.variable} ${geistMono.variable} antialiased`}
|
||||
className={`${geistSans.variable} ${geistMono.variable} antialiased overflow-hidden`}
|
||||
>
|
||||
<CustomThemeProvider>
|
||||
<WindowDragArea />
|
||||
|
||||
+578
-210
@@ -1,34 +1,29 @@
|
||||
"use client";
|
||||
|
||||
import { ChangeVersionDialog } from "@/components/change-version-dialog";
|
||||
import { CreateProfileDialog } from "@/components/create-profile-dialog";
|
||||
import { ImportProfileDialog } from "@/components/import-profile-dialog";
|
||||
import { ProfilesDataTable } from "@/components/profile-data-table";
|
||||
import { ProfileSelectorDialog } from "@/components/profile-selector-dialog";
|
||||
import { ProxySettingsDialog } from "@/components/proxy-settings-dialog";
|
||||
import { SettingsDialog } from "@/components/settings-dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { useAppUpdateNotifications } from "@/hooks/use-app-update-notifications";
|
||||
import { useUpdateNotifications } from "@/hooks/use-update-notifications";
|
||||
import { showErrorToast } from "@/lib/toast-utils";
|
||||
import type { BrowserProfile, ProxySettings } from "@/types";
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { listen } from "@tauri-apps/api/event";
|
||||
import { getCurrent } from "@tauri-apps/plugin-deep-link";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
import { FaDownload } from "react-icons/fa";
|
||||
import { GoGear, GoKebabHorizontal, GoPlus } from "react-icons/go";
|
||||
import { CamoufoxConfigDialog } from "@/components/camoufox-config-dialog";
|
||||
import { CreateProfileDialog } from "@/components/create-profile-dialog";
|
||||
import { DeleteConfirmationDialog } from "@/components/delete-confirmation-dialog";
|
||||
import { GroupAssignmentDialog } from "@/components/group-assignment-dialog";
|
||||
import { GroupBadges } from "@/components/group-badges";
|
||||
import { GroupManagementDialog } from "@/components/group-management-dialog";
|
||||
import HomeHeader from "@/components/home-header";
|
||||
import { ImportProfileDialog } from "@/components/import-profile-dialog";
|
||||
import { PermissionDialog } from "@/components/permission-dialog";
|
||||
import { ProfilesDataTable } from "@/components/profile-data-table";
|
||||
import { ProfileSelectorDialog } from "@/components/profile-selector-dialog";
|
||||
import { ProxyManagementDialog } from "@/components/proxy-management-dialog";
|
||||
import { ProxySettingsDialog } from "@/components/proxy-settings-dialog";
|
||||
import { SettingsDialog } from "@/components/settings-dialog";
|
||||
import { useAppUpdateNotifications } from "@/hooks/use-app-update-notifications";
|
||||
import type { PermissionType } from "@/hooks/use-permissions";
|
||||
import { usePermissions } from "@/hooks/use-permissions";
|
||||
import { useUpdateNotifications } from "@/hooks/use-update-notifications";
|
||||
import { showErrorToast, showToast } from "@/lib/toast-utils";
|
||||
import type { BrowserProfile, CamoufoxConfig, GroupWithCount } from "@/types";
|
||||
|
||||
type BrowserTypeString =
|
||||
| "mullvad-browser"
|
||||
@@ -37,7 +32,8 @@ type BrowserTypeString =
|
||||
| "chromium"
|
||||
| "brave"
|
||||
| "zen"
|
||||
| "tor-browser";
|
||||
| "tor-browser"
|
||||
| "camoufox";
|
||||
|
||||
interface PendingUrl {
|
||||
id: string;
|
||||
@@ -49,15 +45,117 @@ export default function Home() {
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [proxyDialogOpen, setProxyDialogOpen] = useState(false);
|
||||
const [createProfileDialogOpen, setCreateProfileDialogOpen] = useState(false);
|
||||
const [changeVersionDialogOpen, setChangeVersionDialogOpen] = useState(false);
|
||||
const [settingsDialogOpen, setSettingsDialogOpen] = useState(false);
|
||||
const [importProfileDialogOpen, setImportProfileDialogOpen] = useState(false);
|
||||
const [proxyManagementDialogOpen, setProxyManagementDialogOpen] =
|
||||
useState(false);
|
||||
const [camoufoxConfigDialogOpen, setCamoufoxConfigDialogOpen] =
|
||||
useState(false);
|
||||
const [groupManagementDialogOpen, setGroupManagementDialogOpen] =
|
||||
useState(false);
|
||||
const [groupAssignmentDialogOpen, setGroupAssignmentDialogOpen] =
|
||||
useState(false);
|
||||
const [selectedGroupId, setSelectedGroupId] = useState<string>("default");
|
||||
const [selectedProfilesForGroup, setSelectedProfilesForGroup] = useState<
|
||||
string[]
|
||||
>([]);
|
||||
const [selectedProfiles, setSelectedProfiles] = useState<string[]>([]);
|
||||
const [pendingUrls, setPendingUrls] = useState<PendingUrl[]>([]);
|
||||
const [currentProfileForProxy, setCurrentProfileForProxy] =
|
||||
useState<BrowserProfile | null>(null);
|
||||
const [currentProfileForVersionChange, setCurrentProfileForVersionChange] =
|
||||
const [currentProfileForCamoufoxConfig, setCurrentProfileForCamoufoxConfig] =
|
||||
useState<BrowserProfile | null>(null);
|
||||
const [hasCheckedStartupPrompt, setHasCheckedStartupPrompt] = useState(false);
|
||||
const [permissionDialogOpen, setPermissionDialogOpen] = useState(false);
|
||||
const [groups, setGroups] = useState<GroupWithCount[]>([]);
|
||||
const [areGroupsLoading, setGroupsLoading] = useState(true);
|
||||
const [currentPermissionType, setCurrentPermissionType] =
|
||||
useState<PermissionType>("microphone");
|
||||
const [showBulkDeleteConfirmation, setShowBulkDeleteConfirmation] =
|
||||
useState(false);
|
||||
const [isBulkDeleting, setIsBulkDeleting] = useState(false);
|
||||
const { isMicrophoneAccessGranted, isCameraAccessGranted, isInitialized } =
|
||||
usePermissions();
|
||||
|
||||
const handleSelectGroup = useCallback((groupId: string) => {
|
||||
setSelectedGroupId(groupId);
|
||||
setSelectedProfiles([]);
|
||||
}, []);
|
||||
|
||||
// Check for missing binaries and offer to download them
|
||||
const checkMissingBinaries = useCallback(async () => {
|
||||
try {
|
||||
const missingBinaries = await invoke<[string, string, string][]>(
|
||||
"check_missing_binaries",
|
||||
);
|
||||
|
||||
// Also check for missing GeoIP database
|
||||
const missingGeoIP = await invoke<boolean>(
|
||||
"check_missing_geoip_database",
|
||||
);
|
||||
|
||||
if (missingBinaries.length > 0 || missingGeoIP) {
|
||||
if (missingBinaries.length > 0) {
|
||||
console.log("Found missing binaries:", missingBinaries);
|
||||
}
|
||||
if (missingGeoIP) {
|
||||
console.log("Found missing GeoIP database for Camoufox");
|
||||
}
|
||||
|
||||
// Group missing binaries by browser type to avoid concurrent downloads
|
||||
const browserMap = new Map<string, string[]>();
|
||||
for (const [profileName, browser, version] of missingBinaries) {
|
||||
if (!browserMap.has(browser)) {
|
||||
browserMap.set(browser, []);
|
||||
}
|
||||
const versions = browserMap.get(browser);
|
||||
if (versions) {
|
||||
versions.push(`${version} (for ${profileName})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Show a toast notification about missing binaries and auto-download them
|
||||
let missingList = Array.from(browserMap.entries())
|
||||
.map(([browser, versions]) => `${browser}: ${versions.join(", ")}`)
|
||||
.join(", ");
|
||||
|
||||
if (missingGeoIP) {
|
||||
if (missingList) {
|
||||
missingList += ", GeoIP database for Camoufox";
|
||||
} else {
|
||||
missingList = "GeoIP database for Camoufox";
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Downloading missing components: ${missingList}`);
|
||||
|
||||
try {
|
||||
// Download missing binaries and GeoIP database sequentially to prevent conflicts
|
||||
const downloaded = await invoke<string[]>(
|
||||
"ensure_all_binaries_exist",
|
||||
);
|
||||
if (downloaded.length > 0) {
|
||||
console.log(
|
||||
"Successfully downloaded missing components:",
|
||||
downloaded,
|
||||
);
|
||||
}
|
||||
} catch (downloadError) {
|
||||
console.error(
|
||||
"Failed to download missing components:",
|
||||
downloadError,
|
||||
);
|
||||
setError(
|
||||
`Failed to download missing components: ${JSON.stringify(
|
||||
downloadError,
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
console.error("Failed to check missing components:", err);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Simple profiles loader without updates check (for use as callback)
|
||||
const loadProfiles = useCallback(async () => {
|
||||
@@ -66,13 +164,48 @@ export default function Home() {
|
||||
"list_browser_profiles",
|
||||
);
|
||||
setProfiles(profileList);
|
||||
|
||||
// Check for missing binaries after loading profiles
|
||||
await checkMissingBinaries();
|
||||
} catch (err: unknown) {
|
||||
console.error("Failed to load profiles:", err);
|
||||
setError(`Failed to load profiles: ${JSON.stringify(err)}`);
|
||||
}
|
||||
}, []);
|
||||
}, [checkMissingBinaries]);
|
||||
|
||||
// Auto-update functionality - pass loadProfiles to refresh profiles after updates
|
||||
const [processingUrls, setProcessingUrls] = useState<Set<string>>(new Set());
|
||||
|
||||
const handleUrlOpen = useCallback(
|
||||
async (url: string) => {
|
||||
// Prevent duplicate processing of the same URL
|
||||
if (processingUrls.has(url)) {
|
||||
console.log("URL already being processed:", url);
|
||||
return;
|
||||
}
|
||||
|
||||
setProcessingUrls((prev) => new Set(prev).add(url));
|
||||
|
||||
try {
|
||||
console.log("URL received for opening:", url);
|
||||
|
||||
// Always show profile selector for manual selection - never auto-open
|
||||
// Replace any existing pending URL with the new one
|
||||
setPendingUrls([{ id: Date.now().toString(), url }]);
|
||||
} finally {
|
||||
// Remove URL from processing set after a short delay to prevent rapid duplicates
|
||||
setTimeout(() => {
|
||||
setProcessingUrls((prev) => {
|
||||
const next = new Set(prev);
|
||||
next.delete(url);
|
||||
return next;
|
||||
});
|
||||
}, 1000);
|
||||
}
|
||||
},
|
||||
[processingUrls],
|
||||
);
|
||||
|
||||
// Auto-update functionality - use the existing hook for compatibility
|
||||
const updateNotifications = useUpdateNotifications(loadProfiles);
|
||||
const { checkForUpdates, isUpdating } = updateNotifications;
|
||||
|
||||
@@ -86,40 +219,34 @@ export default function Home() {
|
||||
|
||||
// Check for updates after loading profiles
|
||||
await checkForUpdates();
|
||||
await checkMissingBinaries();
|
||||
} catch (err: unknown) {
|
||||
console.error("Failed to load profiles:", err);
|
||||
setError(`Failed to load profiles: ${JSON.stringify(err)}`);
|
||||
}
|
||||
}, [checkForUpdates]);
|
||||
}, [checkForUpdates, checkMissingBinaries]);
|
||||
|
||||
useAppUpdateNotifications();
|
||||
|
||||
useEffect(() => {
|
||||
void loadProfilesWithUpdateCheck();
|
||||
// Check for startup URLs but only process them once
|
||||
const [hasCheckedStartupUrl, setHasCheckedStartupUrl] = useState(false);
|
||||
const checkCurrentUrl = useCallback(async () => {
|
||||
if (hasCheckedStartupUrl) return;
|
||||
|
||||
// Check for startup default browser prompt
|
||||
void checkStartupPrompt();
|
||||
try {
|
||||
const currentUrl = await getCurrent();
|
||||
if (currentUrl && currentUrl.length > 0) {
|
||||
console.log("Startup URL detected:", currentUrl[0]);
|
||||
void handleUrlOpen(currentUrl[0]);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to check current URL:", error);
|
||||
} finally {
|
||||
setHasCheckedStartupUrl(true);
|
||||
}
|
||||
}, [handleUrlOpen, hasCheckedStartupUrl]);
|
||||
|
||||
// Listen for URL open events
|
||||
void listenForUrlEvents();
|
||||
|
||||
// Check for startup URLs (when app was launched as default browser)
|
||||
void checkStartupUrls();
|
||||
|
||||
// Set up periodic update checks (every 30 minutes)
|
||||
const updateInterval = setInterval(
|
||||
() => {
|
||||
void checkForUpdates();
|
||||
},
|
||||
30 * 60 * 1000,
|
||||
);
|
||||
|
||||
return () => {
|
||||
clearInterval(updateInterval);
|
||||
};
|
||||
}, [loadProfilesWithUpdateCheck, checkForUpdates]);
|
||||
|
||||
const checkStartupPrompt = async () => {
|
||||
const checkStartupPrompt = useCallback(async () => {
|
||||
// Only check once during app startup to prevent reopening after dismissing notifications
|
||||
if (hasCheckedStartupPrompt) return;
|
||||
|
||||
@@ -130,27 +257,50 @@ export default function Home() {
|
||||
if (shouldShow) {
|
||||
setSettingsDialogOpen(true);
|
||||
}
|
||||
setHasCheckedStartupPrompt(true);
|
||||
} catch (error) {
|
||||
console.error("Failed to check startup prompt:", error);
|
||||
} finally {
|
||||
setHasCheckedStartupPrompt(true);
|
||||
}
|
||||
};
|
||||
}, [hasCheckedStartupPrompt]);
|
||||
|
||||
const checkStartupUrls = async () => {
|
||||
const checkAllPermissions = useCallback(async () => {
|
||||
try {
|
||||
const hasStartupUrl = await invoke<boolean>(
|
||||
"check_and_handle_startup_url",
|
||||
);
|
||||
if (hasStartupUrl) {
|
||||
console.log("Handled startup URL successfully");
|
||||
// Wait for permissions to be initialized before checking
|
||||
if (!isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if any permissions are not granted - prioritize missing permissions
|
||||
if (!isMicrophoneAccessGranted) {
|
||||
setCurrentPermissionType("microphone");
|
||||
setPermissionDialogOpen(true);
|
||||
} else if (!isCameraAccessGranted) {
|
||||
setCurrentPermissionType("camera");
|
||||
setPermissionDialogOpen(true);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to check startup URLs:", error);
|
||||
console.error("Failed to check permissions:", error);
|
||||
}
|
||||
};
|
||||
}, [isMicrophoneAccessGranted, isCameraAccessGranted, isInitialized]);
|
||||
|
||||
const listenForUrlEvents = async () => {
|
||||
const checkNextPermission = useCallback(() => {
|
||||
try {
|
||||
if (!isMicrophoneAccessGranted) {
|
||||
setCurrentPermissionType("microphone");
|
||||
setPermissionDialogOpen(true);
|
||||
} else if (!isCameraAccessGranted) {
|
||||
setCurrentPermissionType("camera");
|
||||
setPermissionDialogOpen(true);
|
||||
} else {
|
||||
setPermissionDialogOpen(false);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to check next permission:", error);
|
||||
}
|
||||
}, [isMicrophoneAccessGranted, isCameraAccessGranted]);
|
||||
|
||||
const listenForUrlEvents = useCallback(async () => {
|
||||
try {
|
||||
// Listen for URL open events from the deep link handler (when app is already running)
|
||||
await listen<string>("url-open-request", (event) => {
|
||||
@@ -161,7 +311,7 @@ export default function Home() {
|
||||
// Listen for show profile selector events
|
||||
await listen<string>("show-profile-selector", (event) => {
|
||||
console.log("Received show profile selector request:", event.payload);
|
||||
setPendingUrls([{ id: Date.now().toString(), url: event.payload }]);
|
||||
void handleUrlOpen(event.payload);
|
||||
});
|
||||
|
||||
// Listen for show create profile dialog events
|
||||
@@ -175,43 +325,61 @@ export default function Home() {
|
||||
);
|
||||
setCreateProfileDialogOpen(true);
|
||||
});
|
||||
|
||||
// Listen for custom logo click events
|
||||
const handleLogoUrlEvent = (event: CustomEvent) => {
|
||||
console.log("Received logo URL event:", event.detail);
|
||||
void handleUrlOpen(event.detail);
|
||||
};
|
||||
|
||||
window.addEventListener(
|
||||
"url-open-request",
|
||||
handleLogoUrlEvent as EventListener,
|
||||
);
|
||||
|
||||
// Return cleanup function
|
||||
return () => {
|
||||
window.removeEventListener(
|
||||
"url-open-request",
|
||||
handleLogoUrlEvent as EventListener,
|
||||
);
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Failed to setup URL listener:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleUrlOpen = async (url: string) => {
|
||||
try {
|
||||
// Use smart profile selection
|
||||
const result = await invoke<string>("smart_open_url", {
|
||||
url,
|
||||
});
|
||||
console.log("Smart URL opening succeeded:", result);
|
||||
// URL was handled successfully, no need to show selector
|
||||
} catch (error: unknown) {
|
||||
console.log(
|
||||
"Smart URL opening failed or requires profile selection:",
|
||||
error,
|
||||
);
|
||||
|
||||
// Show profile selector for manual selection
|
||||
// Replace any existing pending URL with the new one
|
||||
setPendingUrls([{ id: Date.now().toString(), url }]);
|
||||
}
|
||||
};
|
||||
}, [handleUrlOpen]);
|
||||
|
||||
const openProxyDialog = useCallback((profile: BrowserProfile | null) => {
|
||||
setCurrentProfileForProxy(profile);
|
||||
setProxyDialogOpen(true);
|
||||
}, []);
|
||||
|
||||
const openChangeVersionDialog = useCallback((profile: BrowserProfile) => {
|
||||
setCurrentProfileForVersionChange(profile);
|
||||
setChangeVersionDialogOpen(true);
|
||||
const handleConfigureCamoufox = useCallback((profile: BrowserProfile) => {
|
||||
setCurrentProfileForCamoufoxConfig(profile);
|
||||
setCamoufoxConfigDialogOpen(true);
|
||||
}, []);
|
||||
|
||||
const handleSaveCamoufoxConfig = useCallback(
|
||||
async (profile: BrowserProfile, config: CamoufoxConfig) => {
|
||||
setError(null);
|
||||
try {
|
||||
await invoke("update_camoufox_config", {
|
||||
profileName: profile.name,
|
||||
config,
|
||||
});
|
||||
await loadProfiles();
|
||||
setCamoufoxConfigDialogOpen(false);
|
||||
} catch (err: unknown) {
|
||||
console.error("Failed to update camoufox config:", err);
|
||||
setError(`Failed to update camoufox config: ${JSON.stringify(err)}`);
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
[loadProfiles],
|
||||
);
|
||||
|
||||
const handleSaveProxy = useCallback(
|
||||
async (proxySettings: ProxySettings) => {
|
||||
async (proxyId: string | null) => {
|
||||
setProxyDialogOpen(false);
|
||||
setError(null);
|
||||
|
||||
@@ -219,10 +387,11 @@ export default function Home() {
|
||||
if (currentProfileForProxy) {
|
||||
await invoke("update_profile_proxy", {
|
||||
profileName: currentProfileForProxy.name,
|
||||
proxy: proxySettings,
|
||||
proxyId: proxyId,
|
||||
});
|
||||
}
|
||||
await loadProfiles();
|
||||
// Trigger proxy data reload in the table
|
||||
} catch (err: unknown) {
|
||||
console.error("Failed to update proxy settings:", err);
|
||||
setError(`Failed to update proxy settings: ${JSON.stringify(err)}`);
|
||||
@@ -231,34 +400,52 @@ export default function Home() {
|
||||
[currentProfileForProxy, loadProfiles],
|
||||
);
|
||||
|
||||
const loadGroups = useCallback(async () => {
|
||||
setGroupsLoading(true);
|
||||
try {
|
||||
const groupsWithCounts = await invoke<GroupWithCount[]>(
|
||||
"get_groups_with_profile_counts",
|
||||
);
|
||||
setGroups(groupsWithCounts);
|
||||
} catch (err) {
|
||||
console.error("Failed to load groups with counts:", err);
|
||||
setGroups([]);
|
||||
} finally {
|
||||
setGroupsLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleCreateProfile = useCallback(
|
||||
async (profileData: {
|
||||
name: string;
|
||||
browserStr: BrowserTypeString;
|
||||
version: string;
|
||||
proxy?: ProxySettings;
|
||||
releaseType: string;
|
||||
proxyId?: string;
|
||||
camoufoxConfig?: CamoufoxConfig;
|
||||
groupId?: string;
|
||||
}) => {
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const profile = await invoke<BrowserProfile>(
|
||||
const _profile = await invoke<BrowserProfile>(
|
||||
"create_browser_profile_new",
|
||||
{
|
||||
name: profileData.name,
|
||||
browserStr: profileData.browserStr,
|
||||
version: profileData.version,
|
||||
releaseType: profileData.releaseType,
|
||||
proxyId: profileData.proxyId,
|
||||
camoufoxConfig: profileData.camoufoxConfig,
|
||||
groupId:
|
||||
profileData.groupId ||
|
||||
(selectedGroupId !== "default" ? selectedGroupId : undefined),
|
||||
},
|
||||
);
|
||||
|
||||
// Update proxy if provided
|
||||
if (profileData.proxy) {
|
||||
await invoke("update_profile_proxy", {
|
||||
profileName: profile.name,
|
||||
proxy: profileData.proxy,
|
||||
});
|
||||
}
|
||||
|
||||
await loadProfiles();
|
||||
await loadGroups();
|
||||
// Trigger proxy data reload in the table
|
||||
} catch (error) {
|
||||
setError(
|
||||
`Failed to create profile: ${
|
||||
@@ -268,7 +455,7 @@ export default function Home() {
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
[loadProfiles],
|
||||
[loadProfiles, loadGroups, selectedGroupId],
|
||||
);
|
||||
|
||||
const [runningProfiles, setRunningProfiles] = useState<Set<string>>(
|
||||
@@ -286,6 +473,9 @@ export default function Home() {
|
||||
const currentRunning = runningProfilesRef.current.has(profile.name);
|
||||
|
||||
if (isRunning !== currentRunning) {
|
||||
console.log(
|
||||
`Profile ${profile.name} (${profile.browser}) status changed: ${currentRunning} -> ${isRunning}`,
|
||||
);
|
||||
setRunningProfiles((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (isRunning) {
|
||||
@@ -340,43 +530,43 @@ export default function Home() {
|
||||
[loadProfiles, checkBrowserStatus, isUpdating],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (profiles.length === 0) return;
|
||||
|
||||
const interval = setInterval(() => {
|
||||
for (const profile of profiles) {
|
||||
void checkBrowserStatus(profile);
|
||||
}
|
||||
}, 500);
|
||||
|
||||
return () => {
|
||||
clearInterval(interval);
|
||||
};
|
||||
}, [profiles, checkBrowserStatus]);
|
||||
|
||||
useEffect(() => {
|
||||
runningProfilesRef.current = runningProfiles;
|
||||
}, [runningProfiles]);
|
||||
|
||||
useEffect(() => {
|
||||
if (error) {
|
||||
showErrorToast(error);
|
||||
setError(null);
|
||||
}
|
||||
}, [error]);
|
||||
|
||||
const handleDeleteProfile = useCallback(
|
||||
async (profile: BrowserProfile) => {
|
||||
setError(null);
|
||||
console.log("Attempting to delete profile:", profile.name);
|
||||
|
||||
try {
|
||||
// First check if the browser is running for this profile
|
||||
const isRunning = await invoke<boolean>("check_browser_status", {
|
||||
profile,
|
||||
});
|
||||
|
||||
if (isRunning) {
|
||||
setError(
|
||||
"Cannot delete profile while browser is running. Please stop the browser first.",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Attempt to delete the profile
|
||||
await invoke("delete_profile", { profileName: profile.name });
|
||||
console.log("Profile deletion command completed successfully");
|
||||
|
||||
// Give a small delay to ensure file system operations complete
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
// Reload profiles and groups to ensure UI is updated
|
||||
await loadProfiles();
|
||||
await loadGroups();
|
||||
|
||||
console.log("Profile deleted and profiles reloaded successfully");
|
||||
} catch (err: unknown) {
|
||||
console.error("Failed to delete profile:", err);
|
||||
setError(`Failed to delete profile: ${JSON.stringify(err)}`);
|
||||
const errorMessage = err instanceof Error ? err.message : String(err);
|
||||
setError(`Failed to delete profile: ${errorMessage}`);
|
||||
}
|
||||
},
|
||||
[loadProfiles],
|
||||
[loadProfiles, loadGroups],
|
||||
);
|
||||
|
||||
const handleRenameProfile = useCallback(
|
||||
@@ -408,74 +598,206 @@ export default function Home() {
|
||||
[loadProfiles],
|
||||
);
|
||||
|
||||
const handleDeleteSelectedProfiles = useCallback(
|
||||
async (profileNames: string[]) => {
|
||||
setError(null);
|
||||
try {
|
||||
await invoke("delete_selected_profiles", { profileNames });
|
||||
await loadProfiles();
|
||||
await loadGroups();
|
||||
} catch (err: unknown) {
|
||||
console.error("Failed to delete selected profiles:", err);
|
||||
setError(`Failed to delete selected profiles: ${JSON.stringify(err)}`);
|
||||
}
|
||||
},
|
||||
[loadProfiles, loadGroups],
|
||||
);
|
||||
|
||||
const handleAssignProfilesToGroup = useCallback((profileNames: string[]) => {
|
||||
setSelectedProfilesForGroup(profileNames);
|
||||
setGroupAssignmentDialogOpen(true);
|
||||
}, []);
|
||||
|
||||
const handleBulkDelete = useCallback(() => {
|
||||
if (selectedProfiles.length === 0) return;
|
||||
setShowBulkDeleteConfirmation(true);
|
||||
}, [selectedProfiles]);
|
||||
|
||||
const confirmBulkDelete = useCallback(async () => {
|
||||
if (selectedProfiles.length === 0) return;
|
||||
|
||||
setIsBulkDeleting(true);
|
||||
try {
|
||||
await invoke("delete_selected_profiles", {
|
||||
profileNames: selectedProfiles,
|
||||
});
|
||||
await loadProfiles();
|
||||
await loadGroups();
|
||||
setSelectedProfiles([]);
|
||||
setShowBulkDeleteConfirmation(false);
|
||||
} catch (error) {
|
||||
console.error("Failed to delete selected profiles:", error);
|
||||
setError(`Failed to delete selected profiles: ${JSON.stringify(error)}`);
|
||||
} finally {
|
||||
setIsBulkDeleting(false);
|
||||
}
|
||||
}, [selectedProfiles, loadProfiles, loadGroups]);
|
||||
|
||||
const handleBulkGroupAssignment = useCallback(() => {
|
||||
if (selectedProfiles.length === 0) return;
|
||||
handleAssignProfilesToGroup(selectedProfiles);
|
||||
setSelectedProfiles([]);
|
||||
}, [selectedProfiles, handleAssignProfilesToGroup]);
|
||||
|
||||
const handleGroupAssignmentComplete = useCallback(async () => {
|
||||
await loadProfiles();
|
||||
await loadGroups();
|
||||
setGroupAssignmentDialogOpen(false);
|
||||
setSelectedProfilesForGroup([]);
|
||||
}, [loadProfiles, loadGroups]);
|
||||
|
||||
const handleGroupManagementComplete = useCallback(async () => {
|
||||
await loadGroups();
|
||||
}, [loadGroups]);
|
||||
|
||||
useEffect(() => {
|
||||
void loadProfilesWithUpdateCheck();
|
||||
void loadGroups();
|
||||
|
||||
// Check for startup default browser prompt
|
||||
void checkStartupPrompt();
|
||||
|
||||
// Listen for URL open events and get cleanup function
|
||||
const setupListeners = async () => {
|
||||
const cleanup = await listenForUrlEvents();
|
||||
return cleanup;
|
||||
};
|
||||
|
||||
let cleanup: (() => void) | undefined;
|
||||
setupListeners().then((cleanupFn) => {
|
||||
cleanup = cleanupFn;
|
||||
});
|
||||
|
||||
// Check for startup URLs (when app was launched as default browser)
|
||||
void checkCurrentUrl();
|
||||
|
||||
// Set up periodic update checks (every 30 minutes)
|
||||
const updateInterval = setInterval(
|
||||
() => {
|
||||
void checkForUpdates();
|
||||
},
|
||||
30 * 60 * 1000,
|
||||
);
|
||||
|
||||
return () => {
|
||||
clearInterval(updateInterval);
|
||||
if (cleanup) {
|
||||
cleanup();
|
||||
}
|
||||
};
|
||||
}, [
|
||||
loadProfilesWithUpdateCheck,
|
||||
checkForUpdates,
|
||||
checkStartupPrompt,
|
||||
listenForUrlEvents,
|
||||
checkCurrentUrl,
|
||||
loadGroups,
|
||||
]);
|
||||
|
||||
// Show deprecation warning for unsupported profiles
|
||||
useEffect(() => {
|
||||
if (profiles.length === 0) return;
|
||||
|
||||
const hasDeprecated = profiles.some(
|
||||
(p) =>
|
||||
["tor-browser", "mullvad-browser"].includes(p.browser) ||
|
||||
(p.release_type === "nightly" && p.browser !== "firefox-developer"),
|
||||
);
|
||||
|
||||
if (hasDeprecated) {
|
||||
// Use a stable id to avoid duplicate toasts on re-renders
|
||||
showToast({
|
||||
id: "deprecated-profiles-warning",
|
||||
type: "error",
|
||||
title: "Some profiles will be deprecated soon",
|
||||
description:
|
||||
"Tor, Mullvad Browser and nightly profiles (except Firefox Developers Edition) will be removed in upcoming versions. Please check GitHub for migration instructions which will be available soon.",
|
||||
duration: 15000,
|
||||
});
|
||||
}
|
||||
}, [profiles]);
|
||||
|
||||
useEffect(() => {
|
||||
if (profiles.length === 0) return;
|
||||
|
||||
const interval = setInterval(() => {
|
||||
for (const profile of profiles) {
|
||||
void checkBrowserStatus(profile);
|
||||
}
|
||||
}, 500);
|
||||
|
||||
return () => {
|
||||
clearInterval(interval);
|
||||
};
|
||||
}, [profiles, checkBrowserStatus]);
|
||||
|
||||
useEffect(() => {
|
||||
runningProfilesRef.current = runningProfiles;
|
||||
}, [runningProfiles]);
|
||||
|
||||
useEffect(() => {
|
||||
if (error) {
|
||||
showErrorToast(error);
|
||||
setError(null);
|
||||
}
|
||||
}, [error]);
|
||||
|
||||
// Check permissions when they are initialized
|
||||
useEffect(() => {
|
||||
if (isInitialized) {
|
||||
void checkAllPermissions();
|
||||
}
|
||||
}, [isInitialized, checkAllPermissions]);
|
||||
|
||||
return (
|
||||
<div className="grid grid-rows-[20px_1fr_20px] items-center justify-items-center min-h-screen p-8 gap-8 sm:p-12 font-[family-name:var(--font-geist-sans)] bg-white dark:bg-black">
|
||||
<main className="flex flex-col row-start-2 gap-8 items-center w-full max-w-3xl">
|
||||
<Card className="w-full">
|
||||
<CardHeader>
|
||||
<div className="flex justify-between items-center">
|
||||
<CardTitle>Profiles</CardTitle>
|
||||
<div className="flex gap-2 items-center">
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
size="sm"
|
||||
variant="outline"
|
||||
className="flex gap-2 items-center"
|
||||
>
|
||||
<GoKebabHorizontal className="w-4 h-4" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end">
|
||||
<DropdownMenuItem
|
||||
onClick={() => {
|
||||
setSettingsDialogOpen(true);
|
||||
}}
|
||||
>
|
||||
<GoGear className="mr-2 w-4 h-4" />
|
||||
Settings
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem
|
||||
onClick={() => {
|
||||
setImportProfileDialogOpen(true);
|
||||
}}
|
||||
>
|
||||
<FaDownload className="mr-2 w-4 h-4" />
|
||||
Import Profile
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
size="sm"
|
||||
onClick={() => {
|
||||
setCreateProfileDialogOpen(true);
|
||||
}}
|
||||
className="flex gap-2 items-center"
|
||||
>
|
||||
<GoPlus className="w-4 h-4" />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>Create a new profile</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-6">
|
||||
<ProfilesDataTable
|
||||
data={profiles}
|
||||
onLaunchProfile={launchProfile}
|
||||
onKillProfile={handleKillProfile}
|
||||
onProxySettings={openProxyDialog}
|
||||
onDeleteProfile={handleDeleteProfile}
|
||||
onRenameProfile={handleRenameProfile}
|
||||
onChangeVersion={openChangeVersionDialog}
|
||||
runningProfiles={runningProfiles}
|
||||
isUpdating={isUpdating}
|
||||
/>
|
||||
</CardContent>
|
||||
</Card>
|
||||
<div className="grid grid-rows-[20px_1fr_20px] items-center justify-items-center min-h-screen gap-8 font-[family-name:var(--font-geist-sans)] bg-white dark:bg-black">
|
||||
<main className="flex flex-col row-start-2 gap-6 items-center w-full max-w-3xl">
|
||||
<div className="w-full">
|
||||
<HomeHeader
|
||||
selectedProfiles={selectedProfiles}
|
||||
onBulkDelete={handleBulkDelete}
|
||||
onBulkGroupAssignment={handleBulkGroupAssignment}
|
||||
onCreateProfileDialogOpen={setCreateProfileDialogOpen}
|
||||
onGroupManagementDialogOpen={setGroupManagementDialogOpen}
|
||||
onImportProfileDialogOpen={setImportProfileDialogOpen}
|
||||
onProxyManagementDialogOpen={setProxyManagementDialogOpen}
|
||||
onSettingsDialogOpen={setSettingsDialogOpen}
|
||||
/>
|
||||
</div>
|
||||
<div className="space-y-4 w-full">
|
||||
<GroupBadges
|
||||
selectedGroupId={selectedGroupId}
|
||||
onGroupSelect={handleSelectGroup}
|
||||
groups={groups}
|
||||
isLoading={areGroupsLoading}
|
||||
/>
|
||||
<ProfilesDataTable
|
||||
data={profiles}
|
||||
onLaunchProfile={launchProfile}
|
||||
onKillProfile={handleKillProfile}
|
||||
onProxySettings={openProxyDialog}
|
||||
onDeleteProfile={handleDeleteProfile}
|
||||
onRenameProfile={handleRenameProfile}
|
||||
onConfigureCamoufox={handleConfigureCamoufox}
|
||||
runningProfiles={runningProfiles}
|
||||
isUpdating={isUpdating}
|
||||
onDeleteSelectedProfiles={handleDeleteSelectedProfiles}
|
||||
onAssignProfilesToGroup={handleAssignProfilesToGroup}
|
||||
selectedGroupId={selectedGroupId}
|
||||
selectedProfiles={selectedProfiles}
|
||||
onSelectedProfilesChange={setSelectedProfiles}
|
||||
/>
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<ProxySettingsDialog
|
||||
@@ -483,8 +805,8 @@ export default function Home() {
|
||||
onClose={() => {
|
||||
setProxyDialogOpen(false);
|
||||
}}
|
||||
onSave={(proxy: ProxySettings) => void handleSaveProxy(proxy)}
|
||||
initialSettings={currentProfileForProxy?.proxy}
|
||||
onSave={handleSaveProxy}
|
||||
initialProxyId={currentProfileForProxy?.proxy_id}
|
||||
browserType={currentProfileForProxy?.browser}
|
||||
/>
|
||||
|
||||
@@ -494,6 +816,7 @@ export default function Home() {
|
||||
setCreateProfileDialogOpen(false);
|
||||
}}
|
||||
onCreateProfile={handleCreateProfile}
|
||||
selectedGroupId={selectedGroupId}
|
||||
/>
|
||||
|
||||
<SettingsDialog
|
||||
@@ -503,15 +826,6 @@ export default function Home() {
|
||||
}}
|
||||
/>
|
||||
|
||||
<ChangeVersionDialog
|
||||
isOpen={changeVersionDialogOpen}
|
||||
onClose={() => {
|
||||
setChangeVersionDialogOpen(false);
|
||||
}}
|
||||
profile={currentProfileForVersionChange}
|
||||
onVersionChanged={() => void loadProfiles()}
|
||||
/>
|
||||
|
||||
<ImportProfileDialog
|
||||
isOpen={importProfileDialogOpen}
|
||||
onClose={() => {
|
||||
@@ -520,6 +834,13 @@ export default function Home() {
|
||||
onImportComplete={() => void loadProfiles()}
|
||||
/>
|
||||
|
||||
<ProxyManagementDialog
|
||||
isOpen={proxyManagementDialogOpen}
|
||||
onClose={() => {
|
||||
setProxyManagementDialogOpen(false);
|
||||
}}
|
||||
/>
|
||||
|
||||
{pendingUrls.map((pendingUrl) => (
|
||||
<ProfileSelectorDialog
|
||||
key={pendingUrl.id}
|
||||
@@ -530,9 +851,56 @@ export default function Home() {
|
||||
);
|
||||
}}
|
||||
url={pendingUrl.url}
|
||||
isUpdating={isUpdating}
|
||||
runningProfiles={runningProfiles}
|
||||
/>
|
||||
))}
|
||||
|
||||
<PermissionDialog
|
||||
isOpen={permissionDialogOpen}
|
||||
onClose={() => {
|
||||
setPermissionDialogOpen(false);
|
||||
}}
|
||||
permissionType={currentPermissionType}
|
||||
onPermissionGranted={checkNextPermission}
|
||||
/>
|
||||
|
||||
<CamoufoxConfigDialog
|
||||
isOpen={camoufoxConfigDialogOpen}
|
||||
onClose={() => {
|
||||
setCamoufoxConfigDialogOpen(false);
|
||||
}}
|
||||
profile={currentProfileForCamoufoxConfig}
|
||||
onSave={handleSaveCamoufoxConfig}
|
||||
/>
|
||||
|
||||
<GroupManagementDialog
|
||||
isOpen={groupManagementDialogOpen}
|
||||
onClose={() => {
|
||||
setGroupManagementDialogOpen(false);
|
||||
}}
|
||||
onGroupManagementComplete={handleGroupManagementComplete}
|
||||
/>
|
||||
|
||||
<GroupAssignmentDialog
|
||||
isOpen={groupAssignmentDialogOpen}
|
||||
onClose={() => {
|
||||
setGroupAssignmentDialogOpen(false);
|
||||
}}
|
||||
selectedProfiles={selectedProfilesForGroup}
|
||||
onAssignmentComplete={handleGroupAssignmentComplete}
|
||||
/>
|
||||
|
||||
<DeleteConfirmationDialog
|
||||
isOpen={showBulkDeleteConfirmation}
|
||||
onClose={() => setShowBulkDeleteConfirmation(false)}
|
||||
onConfirm={confirmBulkDelete}
|
||||
title="Delete Selected Profiles"
|
||||
description={`This action cannot be undone. This will permanently delete ${selectedProfiles.length} profile${selectedProfiles.length !== 1 ? "s" : ""} and all associated data.`}
|
||||
confirmButtonText={`Delete ${selectedProfiles.length} Profile${selectedProfiles.length !== 1 ? "s" : ""}`}
|
||||
isLoading={isBulkDeleting}
|
||||
profileNames={selectedProfiles}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,26 +1,58 @@
|
||||
"use client";
|
||||
|
||||
import { FaDownload, FaTimes } from "react-icons/fa";
|
||||
import { LuCheckCheck, LuCog, LuRefreshCw } from "react-icons/lu";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import React from "react";
|
||||
import { FaDownload, FaTimes } from "react-icons/fa";
|
||||
import { LuRefreshCw } from "react-icons/lu";
|
||||
|
||||
interface AppUpdateInfo {
|
||||
current_version: string;
|
||||
new_version: string;
|
||||
release_notes: string;
|
||||
download_url: string;
|
||||
is_nightly: boolean;
|
||||
published_at: string;
|
||||
}
|
||||
import type { AppUpdateInfo, AppUpdateProgress } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface AppUpdateToastProps {
|
||||
updateInfo: AppUpdateInfo;
|
||||
onUpdate: (updateInfo: AppUpdateInfo) => Promise<void>;
|
||||
onDismiss: () => void;
|
||||
isUpdating?: boolean;
|
||||
updateProgress?: string;
|
||||
updateProgress?: AppUpdateProgress | null;
|
||||
}
|
||||
|
||||
function getStageIcon(stage?: string, isUpdating?: boolean) {
|
||||
if (!isUpdating) {
|
||||
return <FaDownload className="flex-shrink-0 w-5 h-5 text-blue-500" />;
|
||||
}
|
||||
|
||||
switch (stage) {
|
||||
case "downloading":
|
||||
return <FaDownload className="flex-shrink-0 w-5 h-5 text-blue-500" />;
|
||||
case "extracting":
|
||||
return (
|
||||
<LuRefreshCw className="flex-shrink-0 w-5 h-5 text-blue-500 animate-spin" />
|
||||
);
|
||||
case "installing":
|
||||
return (
|
||||
<LuCog className="flex-shrink-0 w-5 h-5 text-blue-500 animate-spin" />
|
||||
);
|
||||
case "completed":
|
||||
return <LuCheckCheck className="flex-shrink-0 w-5 h-5 text-green-500" />;
|
||||
default:
|
||||
return (
|
||||
<LuRefreshCw className="flex-shrink-0 w-5 h-5 text-blue-500 animate-spin" />
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function getStageDisplayName(stage?: string) {
|
||||
switch (stage) {
|
||||
case "downloading":
|
||||
return "Downloading";
|
||||
case "extracting":
|
||||
return "Extracting";
|
||||
case "installing":
|
||||
return "Installing";
|
||||
case "completed":
|
||||
return "Completed";
|
||||
default:
|
||||
return "Updating";
|
||||
}
|
||||
}
|
||||
|
||||
export function AppUpdateToast({
|
||||
@@ -34,22 +66,32 @@ export function AppUpdateToast({
|
||||
await onUpdate(updateInfo);
|
||||
};
|
||||
|
||||
const showDownloadProgress =
|
||||
isUpdating &&
|
||||
updateProgress?.stage === "downloading" &&
|
||||
updateProgress.percentage !== undefined;
|
||||
|
||||
const showOtherStageProgress =
|
||||
isUpdating &&
|
||||
updateProgress &&
|
||||
(updateProgress.stage === "extracting" ||
|
||||
updateProgress.stage === "installing" ||
|
||||
updateProgress.stage === "completed");
|
||||
|
||||
return (
|
||||
<div className="flex items-start w-full bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-4 shadow-lg max-w-md">
|
||||
<div className="flex items-start p-4 w-full max-w-md bg-card rounded-lg border border-border shadow-lg text-card-foreground">
|
||||
<div className="mr-3 mt-0.5">
|
||||
{isUpdating ? (
|
||||
<LuRefreshCw className="h-5 w-5 text-blue-500 animate-spin flex-shrink-0" />
|
||||
) : (
|
||||
<FaDownload className="h-5 w-5 text-blue-500 flex-shrink-0" />
|
||||
)}
|
||||
{getStageIcon(updateProgress?.stage, isUpdating)}
|
||||
</div>
|
||||
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-start justify-between gap-2">
|
||||
<div className="flex gap-2 justify-between items-start">
|
||||
<div className="flex flex-col gap-1">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="font-semibold text-foreground text-sm">
|
||||
Donut Browser Update Available
|
||||
<div className="flex gap-2 items-center">
|
||||
<span className="text-sm font-semibold text-foreground">
|
||||
{isUpdating
|
||||
? `${getStageDisplayName(updateProgress?.stage)} Donut Browser Update`
|
||||
: "Donut Browser Update Available"}
|
||||
</span>
|
||||
<Badge
|
||||
variant={updateInfo.is_nightly ? "secondary" : "default"}
|
||||
@@ -59,8 +101,14 @@ export function AppUpdateToast({
|
||||
</Badge>
|
||||
</div>
|
||||
<div className="text-xs text-muted-foreground">
|
||||
Update from {updateInfo.current_version} to{" "}
|
||||
<span className="font-medium">{updateInfo.new_version}</span>
|
||||
{isUpdating ? (
|
||||
updateProgress?.message || "Updating..."
|
||||
) : (
|
||||
<>
|
||||
Update from {updateInfo.current_version} to{" "}
|
||||
<span className="font-medium">{updateInfo.new_version}</span>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -69,52 +117,66 @@ export function AppUpdateToast({
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={onDismiss}
|
||||
className="h-6 w-6 p-0 shrink-0"
|
||||
className="p-0 w-6 h-6 shrink-0"
|
||||
>
|
||||
<FaTimes className="h-3 w-3" />
|
||||
<FaTimes className="w-3 h-3" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{isUpdating && updateProgress && (
|
||||
<div className="mt-2">
|
||||
<p className="text-xs text-muted-foreground">{updateProgress}</p>
|
||||
{/* Download progress */}
|
||||
{showDownloadProgress && updateProgress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<div className="flex justify-between items-center">
|
||||
<p className="flex-1 min-w-0 text-xs text-muted-foreground">
|
||||
{updateProgress.percentage?.toFixed(1)}%
|
||||
{updateProgress.speed && ` • ${updateProgress.speed} MB/s`}
|
||||
{updateProgress.eta && ` • ${updateProgress.eta} remaining`}
|
||||
</p>
|
||||
</div>
|
||||
<div className="w-full bg-muted rounded-full h-1.5">
|
||||
<div
|
||||
className="bg-primary h-1.5 rounded-full transition-all duration-300"
|
||||
style={{ width: `${updateProgress.percentage}%` }}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Other stage progress (with visual indicators) */}
|
||||
{showOtherStageProgress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
{/* Progress indicator for non-downloading stages */}
|
||||
<div className="w-full bg-muted rounded-full h-1.5">
|
||||
<div
|
||||
className={`h-1.5 rounded-full transition-all duration-500 ${
|
||||
updateProgress.stage === "completed"
|
||||
? "bg-green-500 w-full"
|
||||
: "bg-primary w-full animate-pulse"
|
||||
}`}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isUpdating && (
|
||||
<div className="flex items-center gap-2 mt-3">
|
||||
<Button
|
||||
<div className="flex gap-2 items-center mt-3">
|
||||
<RippleButton
|
||||
onClick={() => void handleUpdateClick()}
|
||||
size="sm"
|
||||
className="flex items-center gap-2 text-xs"
|
||||
className="flex gap-2 items-center text-xs"
|
||||
>
|
||||
<FaDownload className="h-3 w-3" />
|
||||
<FaDownload className="w-3 h-3" />
|
||||
Update Now
|
||||
</Button>
|
||||
<Button
|
||||
</RippleButton>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={onDismiss}
|
||||
size="sm"
|
||||
className="text-xs"
|
||||
>
|
||||
Later
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{updateInfo.release_notes && !isUpdating && (
|
||||
<div className="mt-2">
|
||||
<details className="text-xs">
|
||||
<summary className="cursor-pointer text-muted-foreground hover:text-foreground">
|
||||
Release Notes
|
||||
</summary>
|
||||
<div className="mt-1 text-muted-foreground whitespace-pre-wrap max-h-32 overflow-y-auto">
|
||||
{updateInfo.release_notes.length > 200
|
||||
? `${updateInfo.release_notes.substring(0, 200)}...`
|
||||
: updateInfo.release_notes}
|
||||
</div>
|
||||
</details>
|
||||
</RippleButton>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,135 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState } from "react";
|
||||
import { SharedCamoufoxConfigForm } from "@/components/shared-camoufox-config-form";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import type { BrowserProfile, CamoufoxConfig } from "@/types";
|
||||
import { LoadingButton } from "./loading-button";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface CamoufoxConfigDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
profile: BrowserProfile | null;
|
||||
onSave: (profile: BrowserProfile, config: CamoufoxConfig) => Promise<void>;
|
||||
}
|
||||
|
||||
export function CamoufoxConfigDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
profile,
|
||||
onSave,
|
||||
}: CamoufoxConfigDialogProps) {
|
||||
const [config, setConfig] = useState<CamoufoxConfig>({
|
||||
geoip: true,
|
||||
});
|
||||
const [isSaving, setIsSaving] = useState(false);
|
||||
|
||||
// Initialize config when profile changes
|
||||
useEffect(() => {
|
||||
if (profile && profile.browser === "camoufox") {
|
||||
setConfig(
|
||||
profile.camoufox_config || {
|
||||
geoip: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
}, [profile]);
|
||||
|
||||
const updateConfig = (key: keyof CamoufoxConfig, value: unknown) => {
|
||||
setConfig((prev) => ({ ...prev, [key]: value }));
|
||||
};
|
||||
|
||||
const handleSave = async () => {
|
||||
if (!profile) return;
|
||||
|
||||
// Validate fingerprint JSON if it exists
|
||||
if (config.fingerprint) {
|
||||
try {
|
||||
JSON.parse(config.fingerprint);
|
||||
} catch (_error) {
|
||||
const { toast } = await import("sonner");
|
||||
toast.error("Invalid fingerprint configuration", {
|
||||
description:
|
||||
"The fingerprint configuration contains invalid JSON. Please check your advanced settings.",
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
setIsSaving(true);
|
||||
try {
|
||||
await onSave(profile, config);
|
||||
onClose();
|
||||
} catch (error) {
|
||||
console.error("Failed to save camoufox config:", error);
|
||||
const { toast } = await import("sonner");
|
||||
toast.error("Failed to save configuration", {
|
||||
description:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
} finally {
|
||||
setIsSaving(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleClose = () => {
|
||||
// Reset config to original when closing without saving
|
||||
if (profile && profile.browser === "camoufox") {
|
||||
setConfig(
|
||||
profile.camoufox_config || {
|
||||
geoip: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
onClose();
|
||||
};
|
||||
|
||||
if (!profile || profile.browser !== "camoufox") {
|
||||
return null;
|
||||
}
|
||||
|
||||
// No OS warning needed anymore since we removed OS selection
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={handleClose}>
|
||||
<DialogContent className="max-w-3xl max-h-[90vh] flex flex-col">
|
||||
<DialogHeader className="flex-shrink-0">
|
||||
<DialogTitle>
|
||||
Configure Camoufox Settings - {profile.name}
|
||||
</DialogTitle>
|
||||
</DialogHeader>
|
||||
|
||||
<ScrollArea className="flex-1 h-[400px]">
|
||||
<div className="py-4">
|
||||
<SharedCamoufoxConfigForm
|
||||
config={config}
|
||||
onConfigChange={updateConfig}
|
||||
forceAdvanced={true}
|
||||
/>
|
||||
</div>
|
||||
</ScrollArea>
|
||||
|
||||
<DialogFooter className="flex-shrink-0 pt-4 border-t">
|
||||
<RippleButton variant="outline" onClick={handleClose}>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
isLoading={isSaving}
|
||||
onClick={handleSave}
|
||||
disabled={isSaving}
|
||||
>
|
||||
Save
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,196 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { VersionSelector } from "@/components/version-selector";
|
||||
import { useBrowserDownload } from "@/hooks/use-browser-download";
|
||||
import type { BrowserProfile } from "@/types";
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useEffect, useState } from "react";
|
||||
import { LuTriangleAlert } from "react-icons/lu";
|
||||
|
||||
interface ChangeVersionDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
profile: BrowserProfile | null;
|
||||
onVersionChanged: () => void;
|
||||
}
|
||||
|
||||
export function ChangeVersionDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
profile,
|
||||
onVersionChanged,
|
||||
}: ChangeVersionDialogProps) {
|
||||
const [selectedVersion, setSelectedVersion] = useState<string | null>(null);
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
const [showDowngradeWarning, setShowDowngradeWarning] = useState(false);
|
||||
const [acknowledgeDowngrade, setAcknowledgeDowngrade] = useState(false);
|
||||
|
||||
const {
|
||||
availableVersions,
|
||||
downloadedVersions,
|
||||
isDownloading,
|
||||
loadVersions,
|
||||
loadDownloadedVersions,
|
||||
downloadBrowser,
|
||||
isVersionDownloaded,
|
||||
} = useBrowserDownload();
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen && profile) {
|
||||
setSelectedVersion(profile.version);
|
||||
setAcknowledgeDowngrade(false);
|
||||
void loadVersions(profile.browser);
|
||||
void loadDownloadedVersions(profile.browser);
|
||||
}
|
||||
}, [isOpen, profile, loadVersions, loadDownloadedVersions]);
|
||||
|
||||
useEffect(() => {
|
||||
if (profile && selectedVersion) {
|
||||
// Check if this is a downgrade
|
||||
const currentVersionIndex = availableVersions.findIndex(
|
||||
(v) => v.tag_name === profile.version,
|
||||
);
|
||||
const selectedVersionIndex = availableVersions.findIndex(
|
||||
(v) => v.tag_name === selectedVersion,
|
||||
);
|
||||
|
||||
// If selected version has a higher index, it's older (downgrade)
|
||||
const isDowngrade =
|
||||
currentVersionIndex !== -1 &&
|
||||
selectedVersionIndex !== -1 &&
|
||||
selectedVersionIndex > currentVersionIndex;
|
||||
setShowDowngradeWarning(isDowngrade);
|
||||
|
||||
if (!isDowngrade) {
|
||||
setAcknowledgeDowngrade(false);
|
||||
}
|
||||
}
|
||||
}, [selectedVersion, profile, availableVersions]);
|
||||
|
||||
const handleDownload = async () => {
|
||||
if (!profile || !selectedVersion) return;
|
||||
await downloadBrowser(profile.browser, selectedVersion);
|
||||
};
|
||||
|
||||
const handleVersionChange = async () => {
|
||||
if (!profile || !selectedVersion) return;
|
||||
|
||||
setIsUpdating(true);
|
||||
try {
|
||||
await invoke("update_profile_version", {
|
||||
profileName: profile.name,
|
||||
version: selectedVersion,
|
||||
});
|
||||
onVersionChanged();
|
||||
onClose();
|
||||
} catch (error) {
|
||||
console.error("Failed to update profile version:", error);
|
||||
} finally {
|
||||
setIsUpdating(false);
|
||||
}
|
||||
};
|
||||
|
||||
const canUpdate =
|
||||
profile &&
|
||||
selectedVersion &&
|
||||
selectedVersion !== profile.version &&
|
||||
selectedVersion &&
|
||||
isVersionDownloaded(selectedVersion) &&
|
||||
(!showDowngradeWarning || acknowledgeDowngrade);
|
||||
|
||||
if (!profile) return null;
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Change Browser Version</DialogTitle>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="grid gap-4 py-4">
|
||||
<div className="space-y-2">
|
||||
<Label className="text-sm font-medium">Profile:</Label>
|
||||
<div className="p-2 bg-muted rounded text-sm">{profile.name}</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label className="text-sm font-medium">Current Version:</Label>
|
||||
<div className="p-2 bg-muted rounded text-sm">
|
||||
{profile.version}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Version Selection */}
|
||||
<div className="grid gap-2">
|
||||
<Label>New Version</Label>
|
||||
<VersionSelector
|
||||
selectedVersion={selectedVersion}
|
||||
onVersionSelect={setSelectedVersion}
|
||||
availableVersions={availableVersions}
|
||||
downloadedVersions={downloadedVersions}
|
||||
isDownloading={isDownloading}
|
||||
onDownload={() => {
|
||||
void handleDownload();
|
||||
}}
|
||||
placeholder="Select version..."
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Downgrade Warning */}
|
||||
{showDowngradeWarning && (
|
||||
<Alert className="border-orange-700">
|
||||
<LuTriangleAlert className="h-4 w-4 text-orange-700" />
|
||||
<AlertTitle className="text-orange-700">
|
||||
Downgrade Warning
|
||||
</AlertTitle>
|
||||
<AlertDescription className="text-orange-700">
|
||||
You are about to downgrade from version {profile.version} to{" "}
|
||||
{selectedVersion}. This may lead to compatibility issues, data
|
||||
loss, or unexpected behavior.
|
||||
<div className="flex items-center space-x-2 mt-3">
|
||||
<Checkbox
|
||||
id="acknowledge-downgrade"
|
||||
checked={acknowledgeDowngrade}
|
||||
onCheckedChange={(checked) => {
|
||||
setAcknowledgeDowngrade(checked as boolean);
|
||||
}}
|
||||
/>
|
||||
<Label htmlFor="acknowledge-downgrade" className="text-sm">
|
||||
I understand the risks and want to proceed
|
||||
</Label>
|
||||
</div>
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<LoadingButton
|
||||
isLoading={isUpdating}
|
||||
onClick={() => {
|
||||
void handleVersionChange();
|
||||
}}
|
||||
disabled={!canUpdate}
|
||||
>
|
||||
{isUpdating ? "Updating..." : "Update Version"}
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,119 @@
|
||||
"use client";
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useCallback, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import type { ProfileGroup } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface CreateGroupDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
onGroupCreated: (group: ProfileGroup) => void;
|
||||
}
|
||||
|
||||
export function CreateGroupDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
onGroupCreated,
|
||||
}: CreateGroupDialogProps) {
|
||||
const [groupName, setGroupName] = useState("");
|
||||
const [isCreating, setIsCreating] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const handleCreate = useCallback(async () => {
|
||||
if (!groupName.trim()) return;
|
||||
|
||||
setIsCreating(true);
|
||||
setError(null);
|
||||
try {
|
||||
const newGroup = await invoke<ProfileGroup>("create_profile_group", {
|
||||
name: groupName.trim(),
|
||||
});
|
||||
|
||||
toast.success("Group created successfully");
|
||||
onGroupCreated(newGroup);
|
||||
setGroupName("");
|
||||
onClose();
|
||||
} catch (err) {
|
||||
console.error("Failed to create group:", err);
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : "Failed to create group";
|
||||
setError(errorMessage);
|
||||
toast.error(errorMessage);
|
||||
} finally {
|
||||
setIsCreating(false);
|
||||
}
|
||||
}, [groupName, onGroupCreated, onClose]);
|
||||
|
||||
const handleClose = useCallback(() => {
|
||||
setGroupName("");
|
||||
setError(null);
|
||||
onClose();
|
||||
}, [onClose]);
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={handleClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Create New Group</DialogTitle>
|
||||
<DialogDescription>
|
||||
Create a new group to organize your browser profiles.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="group-name">Group Name</Label>
|
||||
<Input
|
||||
id="group-name"
|
||||
placeholder="Enter group name..."
|
||||
value={groupName}
|
||||
onChange={(e) => setGroupName(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" && groupName.trim()) {
|
||||
void handleCreate();
|
||||
}
|
||||
}}
|
||||
disabled={isCreating}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="p-3 text-sm text-red-600 bg-red-50 rounded-md dark:bg-red-900/20 dark:text-red-400">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={handleClose}
|
||||
disabled={isCreating}
|
||||
>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
isLoading={isCreating}
|
||||
onClick={() => void handleCreate()}
|
||||
disabled={!groupName.trim()}
|
||||
>
|
||||
Create
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,8 +1,12 @@
|
||||
"use client";
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { GoPlus } from "react-icons/go";
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import { ProxyFormDialog } from "@/components/proxy-form-dialog";
|
||||
import { SharedCamoufoxConfigForm } from "@/components/shared-camoufox-config-form";
|
||||
import { Combobox } from "@/components/ui/combobox";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
@@ -12,6 +16,7 @@ import {
|
||||
} from "@/components/ui/dialog";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
@@ -19,19 +24,11 @@ import {
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { VersionSelector } from "@/components/version-selector";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
import { useBrowserDownload } from "@/hooks/use-browser-download";
|
||||
import { useBrowserSupport } from "@/hooks/use-browser-support";
|
||||
import { getBrowserDisplayName } from "@/lib/browser-utils";
|
||||
import type { BrowserProfile, ProxySettings } from "@/types";
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useEffect, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { getBrowserIcon } from "@/lib/browser-utils";
|
||||
import type { BrowserReleaseTypes, CamoufoxConfig, StoredProxy } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
type BrowserTypeString =
|
||||
| "mullvad-browser"
|
||||
@@ -40,7 +37,8 @@ type BrowserTypeString =
|
||||
| "chromium"
|
||||
| "brave"
|
||||
| "zen"
|
||||
| "tor-browser";
|
||||
| "tor-browser"
|
||||
| "camoufox";
|
||||
|
||||
interface CreateProfileDialogProps {
|
||||
isOpen: boolean;
|
||||
@@ -49,168 +47,287 @@ interface CreateProfileDialogProps {
|
||||
name: string;
|
||||
browserStr: BrowserTypeString;
|
||||
version: string;
|
||||
proxy?: ProxySettings;
|
||||
releaseType: string;
|
||||
proxyId?: string;
|
||||
camoufoxConfig?: CamoufoxConfig;
|
||||
groupId?: string;
|
||||
}) => Promise<void>;
|
||||
selectedGroupId?: string;
|
||||
}
|
||||
|
||||
interface BrowserOption {
|
||||
value: BrowserTypeString;
|
||||
label: string;
|
||||
}
|
||||
|
||||
const browserOptions: BrowserOption[] = [
|
||||
{
|
||||
value: "firefox",
|
||||
label: "Firefox",
|
||||
},
|
||||
{
|
||||
value: "firefox-developer",
|
||||
label: "Firefox Developer Edition",
|
||||
},
|
||||
{
|
||||
value: "chromium",
|
||||
label: "Chromium",
|
||||
},
|
||||
{
|
||||
value: "brave",
|
||||
label: "Brave",
|
||||
},
|
||||
{
|
||||
value: "zen",
|
||||
label: "Zen Browser",
|
||||
},
|
||||
{
|
||||
value: "mullvad-browser",
|
||||
label: "Mullvad Browser",
|
||||
},
|
||||
{
|
||||
value: "tor-browser",
|
||||
label: "Tor Browser",
|
||||
},
|
||||
];
|
||||
|
||||
export function CreateProfileDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
onCreateProfile,
|
||||
selectedGroupId,
|
||||
}: CreateProfileDialogProps) {
|
||||
const [profileName, setProfileName] = useState("");
|
||||
const [activeTab, setActiveTab] = useState("anti-detect");
|
||||
|
||||
// Regular browser states
|
||||
const [selectedBrowser, setSelectedBrowser] =
|
||||
useState<BrowserTypeString | null>("mullvad-browser");
|
||||
const [selectedVersion, setSelectedVersion] = useState<string | null>(null);
|
||||
useState<BrowserTypeString | null>("camoufox");
|
||||
const [selectedProxyId, setSelectedProxyId] = useState<string>();
|
||||
|
||||
const handleTabChange = (value: string) => {
|
||||
if (value === "regular") {
|
||||
setSelectedBrowser(null);
|
||||
} else if (value === "anti-detect") {
|
||||
setSelectedBrowser("camoufox");
|
||||
}
|
||||
|
||||
setActiveTab(value);
|
||||
};
|
||||
|
||||
// Camoufox anti-detect states
|
||||
const [camoufoxConfig, setCamoufoxConfig] = useState<CamoufoxConfig>({
|
||||
geoip: true, // Default to automatic geoip
|
||||
});
|
||||
|
||||
const [supportedBrowsers, setSupportedBrowsers] = useState<string[]>([]);
|
||||
const [storedProxies, setStoredProxies] = useState<StoredProxy[]>([]);
|
||||
const [showProxyForm, setShowProxyForm] = useState(false);
|
||||
const [isCreating, setIsCreating] = useState(false);
|
||||
const [existingProfiles, setExistingProfiles] = useState<BrowserProfile[]>(
|
||||
[],
|
||||
);
|
||||
|
||||
// Proxy settings
|
||||
const [proxyEnabled, setProxyEnabled] = useState(false);
|
||||
const [proxyType, setProxyType] = useState("http");
|
||||
const [proxyHost, setProxyHost] = useState("");
|
||||
const [proxyPort, setProxyPort] = useState(8080);
|
||||
const [releaseTypes, setReleaseTypes] = useState<BrowserReleaseTypes>();
|
||||
const loadingBrowserRef = useRef<string | null>(null);
|
||||
|
||||
// Use the browser download hook
|
||||
const {
|
||||
availableVersions,
|
||||
downloadedVersions,
|
||||
isDownloading,
|
||||
loadVersions,
|
||||
loadDownloadedVersions,
|
||||
isBrowserDownloading,
|
||||
downloadBrowser,
|
||||
loadDownloadedVersions,
|
||||
isVersionDownloaded,
|
||||
} = useBrowserDownload();
|
||||
|
||||
const {
|
||||
supportedBrowsers,
|
||||
isLoading: isLoadingSupport,
|
||||
isBrowserSupported,
|
||||
} = useBrowserSupport();
|
||||
const loadSupportedBrowsers = useCallback(async () => {
|
||||
try {
|
||||
const browsers = await invoke<string[]>("get_supported_browsers");
|
||||
setSupportedBrowsers(browsers);
|
||||
} catch (error) {
|
||||
console.error("Failed to load supported browsers:", error);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const loadStoredProxies = useCallback(async () => {
|
||||
try {
|
||||
const proxies = await invoke<StoredProxy[]>("get_stored_proxies");
|
||||
setStoredProxies(proxies);
|
||||
} catch (error) {
|
||||
console.error("Failed to load stored proxies:", error);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const checkAndDownloadGeoIPDatabase = useCallback(async () => {
|
||||
try {
|
||||
const isAvailable = await invoke<boolean>("is_geoip_database_available");
|
||||
if (!isAvailable) {
|
||||
console.log("GeoIP database not available, downloading...");
|
||||
await invoke("download_geoip_database");
|
||||
console.log("GeoIP database downloaded successfully");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to check/download GeoIP database:", error);
|
||||
// Don't show error to user as this is not critical for profile creation
|
||||
}
|
||||
}, []);
|
||||
|
||||
const loadReleaseTypes = useCallback(
|
||||
async (browser: string) => {
|
||||
// Set loading state
|
||||
loadingBrowserRef.current = browser;
|
||||
|
||||
try {
|
||||
const rawReleaseTypes = await invoke<BrowserReleaseTypes>(
|
||||
"get_browser_release_types",
|
||||
{ browserStr: browser },
|
||||
);
|
||||
|
||||
// Only update state if this browser is still the one we're loading
|
||||
if (loadingBrowserRef.current === browser) {
|
||||
// Filter to enforce stable-only creation, except Firefox Developer (nightly-only)
|
||||
if (browser === "camoufox") {
|
||||
const filtered: BrowserReleaseTypes = {};
|
||||
if (rawReleaseTypes.stable)
|
||||
filtered.stable = rawReleaseTypes.stable;
|
||||
setReleaseTypes(filtered);
|
||||
} else if (browser === "firefox-developer") {
|
||||
const filtered: BrowserReleaseTypes = {};
|
||||
if (rawReleaseTypes.nightly)
|
||||
filtered.nightly = rawReleaseTypes.nightly;
|
||||
setReleaseTypes(filtered);
|
||||
} else {
|
||||
const filtered: BrowserReleaseTypes = {};
|
||||
if (rawReleaseTypes.stable)
|
||||
filtered.stable = rawReleaseTypes.stable;
|
||||
setReleaseTypes(filtered);
|
||||
}
|
||||
|
||||
// Load downloaded versions for this browser
|
||||
await loadDownloadedVersions(browser);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Failed to load release types for ${browser}:`, error);
|
||||
} finally {
|
||||
// Clear loading state only if we're still loading this browser
|
||||
if (loadingBrowserRef.current === browser) {
|
||||
loadingBrowserRef.current = null;
|
||||
}
|
||||
}
|
||||
},
|
||||
[loadDownloadedVersions],
|
||||
);
|
||||
|
||||
// Load data when dialog opens
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
void loadExistingProfiles();
|
||||
void loadSupportedBrowsers();
|
||||
void loadStoredProxies();
|
||||
// Load camoufox release types when dialog opens
|
||||
void loadReleaseTypes("camoufox");
|
||||
// Check and download GeoIP database if needed for Camoufox
|
||||
void checkAndDownloadGeoIPDatabase();
|
||||
}
|
||||
}, [isOpen]);
|
||||
}, [
|
||||
isOpen,
|
||||
loadSupportedBrowsers,
|
||||
loadStoredProxies,
|
||||
loadReleaseTypes,
|
||||
checkAndDownloadGeoIPDatabase,
|
||||
]);
|
||||
|
||||
// Load release types when browser selection changes
|
||||
useEffect(() => {
|
||||
if (supportedBrowsers.length > 0) {
|
||||
// Set default browser to first supported browser
|
||||
if (supportedBrowsers.includes("mullvad-browser")) {
|
||||
setSelectedBrowser("mullvad-browser");
|
||||
} else if (supportedBrowsers.length > 0) {
|
||||
setSelectedBrowser(supportedBrowsers[0] as BrowserTypeString);
|
||||
if (selectedBrowser) {
|
||||
// Cancel any previous loading
|
||||
loadingBrowserRef.current = null;
|
||||
// Clear previous release types immediately to prevent showing stale data
|
||||
setReleaseTypes({});
|
||||
void loadReleaseTypes(selectedBrowser);
|
||||
}
|
||||
}, [selectedBrowser, loadReleaseTypes]);
|
||||
|
||||
// Helper function to get the best available version respecting rules
|
||||
const getBestAvailableVersion = useCallback(
|
||||
(browserType?: string) => {
|
||||
if (!releaseTypes) return null;
|
||||
|
||||
// Firefox Developer Edition: nightly-only
|
||||
if (browserType === "firefox-developer" && releaseTypes.nightly) {
|
||||
return {
|
||||
version: releaseTypes.nightly,
|
||||
releaseType: "nightly" as const,
|
||||
};
|
||||
}
|
||||
}
|
||||
}, [supportedBrowsers]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen && selectedBrowser) {
|
||||
// Reset selected version when browser changes
|
||||
setSelectedVersion(null);
|
||||
void loadVersions(selectedBrowser);
|
||||
void loadDownloadedVersions(selectedBrowser);
|
||||
}
|
||||
}, [isOpen, selectedBrowser, loadVersions, loadDownloadedVersions]);
|
||||
|
||||
// Set default version when versions are loaded and no version is selected
|
||||
useEffect(() => {
|
||||
if (availableVersions.length > 0 && selectedBrowser) {
|
||||
// Always reset version when browser changes or versions are loaded
|
||||
// Find the latest stable version (not alpha/beta)
|
||||
const stableVersions = availableVersions.filter((v) => !v.is_nightly);
|
||||
|
||||
if (stableVersions.length > 0) {
|
||||
// Select the first stable version (they're already sorted newest first)
|
||||
setSelectedVersion(stableVersions[0].tag_name);
|
||||
} else if (availableVersions.length > 0) {
|
||||
// If no stable version found, select the first available version
|
||||
setSelectedVersion(availableVersions[0].tag_name);
|
||||
// All others: stable-only
|
||||
if (releaseTypes.stable) {
|
||||
return { version: releaseTypes.stable, releaseType: "stable" as const };
|
||||
}
|
||||
}
|
||||
}, [availableVersions, selectedBrowser]);
|
||||
return null;
|
||||
},
|
||||
[releaseTypes],
|
||||
);
|
||||
|
||||
const loadExistingProfiles = async () => {
|
||||
try {
|
||||
const profiles = await invoke<BrowserProfile[]>("list_browser_profiles");
|
||||
setExistingProfiles(profiles);
|
||||
} catch (error) {
|
||||
console.error("Failed to load existing profiles:", error);
|
||||
}
|
||||
};
|
||||
const handleDownload = async (browserStr: string) => {
|
||||
const bestVersion = getBestAvailableVersion(browserStr);
|
||||
|
||||
const handleDownload = async () => {
|
||||
if (!selectedBrowser || !selectedVersion) return;
|
||||
await downloadBrowser(selectedBrowser, selectedVersion);
|
||||
};
|
||||
|
||||
const validateProfileName = (name: string): string | null => {
|
||||
const trimmedName = name.trim();
|
||||
|
||||
if (!trimmedName) {
|
||||
return "Profile name cannot be empty";
|
||||
}
|
||||
|
||||
// Check for duplicate names (case insensitive)
|
||||
const isDuplicate = existingProfiles.some(
|
||||
(profile) => profile.name.toLowerCase() === trimmedName.toLowerCase(),
|
||||
);
|
||||
|
||||
if (isDuplicate) {
|
||||
return "A profile with this name already exists";
|
||||
}
|
||||
|
||||
return null;
|
||||
};
|
||||
|
||||
// Helper to determine if proxy should be disabled for the selected browser
|
||||
const isProxyDisabled = selectedBrowser === "tor-browser";
|
||||
|
||||
// Update proxy enabled state when browser changes to tor-browser
|
||||
useEffect(() => {
|
||||
if (selectedBrowser === "tor-browser" && proxyEnabled) {
|
||||
setProxyEnabled(false);
|
||||
}
|
||||
}, [selectedBrowser, proxyEnabled]);
|
||||
|
||||
const handleCreate = async () => {
|
||||
if (!profileName.trim() || !selectedBrowser || !selectedVersion) return;
|
||||
|
||||
// Validate profile name
|
||||
const nameError = validateProfileName(profileName);
|
||||
if (nameError) {
|
||||
toast.error(nameError);
|
||||
if (!bestVersion) {
|
||||
console.error("No version available for download");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await downloadBrowser(browserStr, bestVersion.version);
|
||||
} catch (error) {
|
||||
console.error("Failed to download browser:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const handleCreate = async () => {
|
||||
if (!profileName.trim()) return;
|
||||
|
||||
setIsCreating(true);
|
||||
try {
|
||||
const proxy =
|
||||
proxyEnabled && !isProxyDisabled
|
||||
? {
|
||||
enabled: true,
|
||||
proxy_type: proxyType,
|
||||
host: proxyHost,
|
||||
port: proxyPort,
|
||||
}
|
||||
: undefined;
|
||||
if (activeTab === "regular") {
|
||||
if (!selectedBrowser) {
|
||||
console.error("Missing required browser selection");
|
||||
return;
|
||||
}
|
||||
|
||||
await onCreateProfile({
|
||||
name: profileName.trim(),
|
||||
browserStr: selectedBrowser,
|
||||
version: selectedVersion,
|
||||
proxy,
|
||||
});
|
||||
// Use the best available version (stable preferred, nightly as fallback)
|
||||
const bestVersion = getBestAvailableVersion(selectedBrowser);
|
||||
if (!bestVersion) {
|
||||
console.error("No version available");
|
||||
return;
|
||||
}
|
||||
|
||||
// Reset form
|
||||
setProfileName("");
|
||||
setSelectedVersion(null);
|
||||
setProxyEnabled(false);
|
||||
setProxyHost("");
|
||||
setProxyPort(8080);
|
||||
onClose();
|
||||
await onCreateProfile({
|
||||
name: profileName.trim(),
|
||||
browserStr: selectedBrowser,
|
||||
version: bestVersion.version,
|
||||
releaseType: bestVersion.releaseType,
|
||||
proxyId: selectedProxyId,
|
||||
groupId: selectedGroupId !== "default" ? selectedGroupId : undefined,
|
||||
});
|
||||
} else {
|
||||
// Anti-detect tab - always use Camoufox with best available version
|
||||
const bestCamoufoxVersion = getBestAvailableVersion("camoufox");
|
||||
if (!bestCamoufoxVersion) {
|
||||
console.error("No Camoufox version available");
|
||||
return;
|
||||
}
|
||||
|
||||
// The fingerprint will be generated at launch time by the Rust backend
|
||||
// We don't need to generate it here during profile creation
|
||||
const finalCamoufoxConfig = { ...camoufoxConfig };
|
||||
|
||||
await onCreateProfile({
|
||||
name: profileName.trim(),
|
||||
browserStr: "camoufox" as BrowserTypeString,
|
||||
version: bestCamoufoxVersion.version,
|
||||
releaseType: bestCamoufoxVersion.releaseType,
|
||||
proxyId: selectedProxyId,
|
||||
camoufoxConfig: finalCamoufoxConfig,
|
||||
groupId: selectedGroupId !== "default" ? selectedGroupId : undefined,
|
||||
});
|
||||
}
|
||||
|
||||
handleClose();
|
||||
} catch (error) {
|
||||
console.error("Failed to create profile:", error);
|
||||
} finally {
|
||||
@@ -218,220 +335,307 @@ export function CreateProfileDialog({
|
||||
}
|
||||
};
|
||||
|
||||
const nameError = profileName.trim()
|
||||
? validateProfileName(profileName)
|
||||
: null;
|
||||
const canCreate =
|
||||
profileName.trim() &&
|
||||
selectedBrowser &&
|
||||
selectedVersion &&
|
||||
isVersionDownloaded(selectedVersion) &&
|
||||
(!proxyEnabled || isProxyDisabled || (proxyHost && proxyPort)) &&
|
||||
!nameError;
|
||||
const handleClose = () => {
|
||||
// Cancel any ongoing loading
|
||||
loadingBrowserRef.current = null;
|
||||
|
||||
// Reset all states
|
||||
setProfileName("");
|
||||
setSelectedBrowser(null);
|
||||
setSelectedProxyId(undefined);
|
||||
setReleaseTypes({});
|
||||
setCamoufoxConfig({
|
||||
geoip: true, // Reset to automatic geoip
|
||||
});
|
||||
setActiveTab("anti-detect");
|
||||
onClose();
|
||||
};
|
||||
|
||||
const updateCamoufoxConfig = (key: keyof CamoufoxConfig, value: unknown) => {
|
||||
setCamoufoxConfig((prev) => ({ ...prev, [key]: value }));
|
||||
};
|
||||
|
||||
// Check if browser version is downloaded and available
|
||||
const isBrowserVersionAvailable = useCallback(
|
||||
(browserStr: string) => {
|
||||
const bestVersion = getBestAvailableVersion(browserStr);
|
||||
return bestVersion && isVersionDownloaded(bestVersion.version);
|
||||
},
|
||||
[isVersionDownloaded, getBestAvailableVersion],
|
||||
);
|
||||
|
||||
// Check if browser is currently downloading
|
||||
const isBrowserCurrentlyDownloading = useCallback(
|
||||
(browserStr: string) => {
|
||||
return isBrowserDownloading(browserStr);
|
||||
},
|
||||
[isBrowserDownloading],
|
||||
);
|
||||
|
||||
const isCreateDisabled = useMemo(() => {
|
||||
if (!profileName.trim()) return true;
|
||||
if (!selectedBrowser) return true;
|
||||
if (isBrowserCurrentlyDownloading(selectedBrowser)) return true;
|
||||
if (!isBrowserVersionAvailable(selectedBrowser)) return true;
|
||||
|
||||
return false;
|
||||
}, [
|
||||
profileName,
|
||||
selectedBrowser,
|
||||
isBrowserCurrentlyDownloading,
|
||||
isBrowserVersionAvailable,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
console.log(
|
||||
selectedBrowser,
|
||||
selectedBrowser && isBrowserCurrentlyDownloading(selectedBrowser),
|
||||
selectedBrowser && isBrowserVersionAvailable(selectedBrowser),
|
||||
selectedBrowser && getBestAvailableVersion(selectedBrowser),
|
||||
);
|
||||
}, [
|
||||
selectedBrowser,
|
||||
isBrowserCurrentlyDownloading,
|
||||
isBrowserVersionAvailable,
|
||||
getBestAvailableVersion,
|
||||
]);
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<Dialog open={isOpen} onOpenChange={handleClose}>
|
||||
<DialogContent className="w-full max-h-[90vh] flex flex-col">
|
||||
<DialogHeader className="flex-shrink-0">
|
||||
<DialogTitle>Create New Profile</DialogTitle>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="grid gap-4 py-4">
|
||||
{/* Profile Name */}
|
||||
<div className="grid gap-2">
|
||||
<Label htmlFor="profile-name">Profile Name</Label>
|
||||
<Input
|
||||
id="profile-name"
|
||||
value={profileName}
|
||||
onChange={(e) => {
|
||||
setProfileName(e.target.value);
|
||||
}}
|
||||
placeholder="Enter profile name"
|
||||
className={nameError ? "border-red-500" : ""}
|
||||
/>
|
||||
{nameError && <p className="text-sm text-red-600">{nameError}</p>}
|
||||
</div>
|
||||
|
||||
{/* Browser Selection */}
|
||||
<div className="grid gap-2">
|
||||
<Label>Browser</Label>
|
||||
<Select
|
||||
value={selectedBrowser ?? undefined}
|
||||
onValueChange={(value) => {
|
||||
setSelectedBrowser(value as BrowserTypeString);
|
||||
}}
|
||||
disabled={isLoadingSupport}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue
|
||||
placeholder={
|
||||
isLoadingSupport ? "Loading browsers..." : "Select browser"
|
||||
}
|
||||
/>
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{(
|
||||
[
|
||||
"mullvad-browser",
|
||||
"firefox",
|
||||
"firefox-developer",
|
||||
"chromium",
|
||||
"brave",
|
||||
"zen",
|
||||
"tor-browser",
|
||||
] as BrowserTypeString[]
|
||||
).map((browser) => {
|
||||
const isSupported = isBrowserSupported(browser);
|
||||
const displayName = getBrowserDisplayName(browser);
|
||||
|
||||
if (!isSupported) {
|
||||
return (
|
||||
<Tooltip key={browser}>
|
||||
<TooltipTrigger asChild>
|
||||
<SelectItem
|
||||
value={browser}
|
||||
disabled={true}
|
||||
className="opacity-50"
|
||||
>
|
||||
{displayName} (Not supported)
|
||||
</SelectItem>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>
|
||||
{displayName} is not supported on your current
|
||||
platform or architecture.
|
||||
</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<SelectItem key={browser} value={browser}>
|
||||
{displayName}
|
||||
</SelectItem>
|
||||
);
|
||||
})}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
|
||||
{/* Version Selection */}
|
||||
<div className="grid gap-2">
|
||||
<Label>Version</Label>
|
||||
<VersionSelector
|
||||
selectedVersion={selectedVersion}
|
||||
onVersionSelect={setSelectedVersion}
|
||||
availableVersions={availableVersions}
|
||||
downloadedVersions={downloadedVersions}
|
||||
isDownloading={isDownloading}
|
||||
onDownload={() => {
|
||||
void handleDownload();
|
||||
}}
|
||||
placeholder="Select version..."
|
||||
/>
|
||||
</div>
|
||||
|
||||
{/* Proxy Settings */}
|
||||
<div className="grid gap-4 pt-4 border-t">
|
||||
<div className="flex items-center space-x-2">
|
||||
{isProxyDisabled ? (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div className="flex items-center space-x-2 opacity-50">
|
||||
<Checkbox
|
||||
id="proxy-enabled"
|
||||
checked={false}
|
||||
disabled={true}
|
||||
/>
|
||||
<Label htmlFor="proxy-enabled" className="text-gray-500">
|
||||
Enable Proxy
|
||||
</Label>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>
|
||||
Tor Browser has its own built-in proxy system and
|
||||
doesn't support additional proxy configuration
|
||||
</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
) : (
|
||||
<>
|
||||
<Checkbox
|
||||
id="proxy-enabled"
|
||||
checked={proxyEnabled}
|
||||
onCheckedChange={(checked) => {
|
||||
setProxyEnabled(checked as boolean);
|
||||
}}
|
||||
/>
|
||||
<Label htmlFor="proxy-enabled">Enable Proxy</Label>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{proxyEnabled && !isProxyDisabled && (
|
||||
<>
|
||||
<div className="grid gap-2">
|
||||
<Label>Proxy Type</Label>
|
||||
<Select value={proxyType} onValueChange={setProxyType}>
|
||||
<SelectTrigger>
|
||||
<SelectValue />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{["http", "https", "socks4", "socks5"].map((type) => (
|
||||
<SelectItem key={type} value={type}>
|
||||
{type.toUpperCase()}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
|
||||
<div className="grid gap-2">
|
||||
<Label htmlFor="proxy-host">Host</Label>
|
||||
<Input
|
||||
id="proxy-host"
|
||||
value={proxyHost}
|
||||
onChange={(e) => {
|
||||
setProxyHost(e.target.value);
|
||||
}}
|
||||
placeholder="e.g. 127.0.0.1"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="grid gap-2">
|
||||
<Label htmlFor="proxy-port">Port</Label>
|
||||
<Input
|
||||
id="proxy-port"
|
||||
type="number"
|
||||
value={proxyPort}
|
||||
onChange={(e) => {
|
||||
setProxyPort(Number.parseInt(e.target.value, 10) || 0);
|
||||
}}
|
||||
placeholder="e.g. 8080"
|
||||
min="1"
|
||||
max="65535"
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<Button variant="outline" onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<LoadingButton
|
||||
isLoading={isCreating}
|
||||
onClick={() => void handleCreate()}
|
||||
disabled={!canCreate}
|
||||
<Tabs
|
||||
value={activeTab}
|
||||
onValueChange={handleTabChange}
|
||||
className="flex flex-col flex-1 w-full min-h-0"
|
||||
>
|
||||
<TabsList
|
||||
className="grid flex-shrink-0 grid-cols-2 w-full"
|
||||
defaultValue="anti-detect"
|
||||
>
|
||||
Create Profile
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
<TabsTrigger value="anti-detect">Anti-Detect</TabsTrigger>
|
||||
<TabsTrigger value="regular">Regular</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
<ScrollArea className="flex-1 h-[330px] overflow-y-hidden">
|
||||
<div className="flex flex-col justify-center items-center w-full">
|
||||
<div className="py-4 space-y-6 w-full max-w-md">
|
||||
{/* Profile Name - Common to both tabs */}
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="profile-name">Profile Name</Label>
|
||||
<Input
|
||||
id="profile-name"
|
||||
value={profileName}
|
||||
onChange={(e) => setProfileName(e.target.value)}
|
||||
placeholder="Enter profile name"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<TabsContent value="regular" className="mt-0 space-y-6">
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label>Browser</Label>
|
||||
<Combobox
|
||||
options={browserOptions
|
||||
.filter(
|
||||
(browser) =>
|
||||
supportedBrowsers.includes(browser.value) &&
|
||||
browser.value !== "mullvad-browser" &&
|
||||
browser.value !== "tor-browser",
|
||||
)
|
||||
.map((browser) => {
|
||||
const IconComponent = getBrowserIcon(browser.value);
|
||||
return {
|
||||
value: browser.value,
|
||||
label: browser.label,
|
||||
icon: IconComponent,
|
||||
};
|
||||
})}
|
||||
value={selectedBrowser || ""}
|
||||
onValueChange={(value) =>
|
||||
setSelectedBrowser(value as BrowserTypeString)
|
||||
}
|
||||
placeholder="Select a browser..."
|
||||
searchPlaceholder="Search browsers..."
|
||||
/>
|
||||
</div>
|
||||
|
||||
{selectedBrowser && (
|
||||
<div className="space-y-3">
|
||||
{!isBrowserCurrentlyDownloading(selectedBrowser) &&
|
||||
!isBrowserVersionAvailable(selectedBrowser) &&
|
||||
getBestAvailableVersion(selectedBrowser) && (
|
||||
<div className="flex gap-3 items-center">
|
||||
<p className="text-sm text-muted-foreground">
|
||||
{(() => {
|
||||
const bestVersion =
|
||||
getBestAvailableVersion(selectedBrowser);
|
||||
return `Latest version (${bestVersion?.version}) needs to be downloaded`;
|
||||
})()}
|
||||
</p>
|
||||
<LoadingButton
|
||||
onClick={() => handleDownload(selectedBrowser)}
|
||||
isLoading={isBrowserCurrentlyDownloading(
|
||||
selectedBrowser,
|
||||
)}
|
||||
className="ml-auto"
|
||||
size="sm"
|
||||
disabled={isBrowserCurrentlyDownloading(
|
||||
selectedBrowser,
|
||||
)}
|
||||
>
|
||||
Download
|
||||
</LoadingButton>
|
||||
</div>
|
||||
)}
|
||||
{!isBrowserCurrentlyDownloading(selectedBrowser) &&
|
||||
isBrowserVersionAvailable(selectedBrowser) && (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
{(() => {
|
||||
const bestVersion =
|
||||
getBestAvailableVersion(selectedBrowser);
|
||||
return `✓ Latest version (${bestVersion?.version}) is available`;
|
||||
})()}
|
||||
</div>
|
||||
)}
|
||||
{isBrowserCurrentlyDownloading(selectedBrowser) && (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
{(() => {
|
||||
const bestVersion =
|
||||
getBestAvailableVersion(selectedBrowser);
|
||||
return `Downloading version (${bestVersion?.version})...`;
|
||||
})()}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="anti-detect" className="mt-0 space-y-6">
|
||||
<div className="space-y-6">
|
||||
{/* Camoufox Download Status */}
|
||||
{!isBrowserCurrentlyDownloading("camoufox") &&
|
||||
!isBrowserVersionAvailable("camoufox") &&
|
||||
getBestAvailableVersion("camoufox") && (
|
||||
<div className="flex gap-3 items-center p-3 rounded-md border">
|
||||
<p className="text-sm text-muted-foreground">
|
||||
{(() => {
|
||||
const bestVersion =
|
||||
getBestAvailableVersion("camoufox");
|
||||
return `Camoufox version (${bestVersion?.version}) needs to be downloaded`;
|
||||
})()}
|
||||
</p>
|
||||
<LoadingButton
|
||||
onClick={() => handleDownload("camoufox")}
|
||||
isLoading={isBrowserCurrentlyDownloading(
|
||||
"camoufox",
|
||||
)}
|
||||
size="sm"
|
||||
disabled={isBrowserCurrentlyDownloading("camoufox")}
|
||||
>
|
||||
{isBrowserCurrentlyDownloading("camoufox")
|
||||
? "Downloading..."
|
||||
: "Download"}
|
||||
</LoadingButton>
|
||||
</div>
|
||||
)}
|
||||
{!isBrowserCurrentlyDownloading("camoufox") &&
|
||||
isBrowserVersionAvailable("camoufox") && (
|
||||
<div className="p-3 text-sm rounded-md border text-muted-foreground">
|
||||
{(() => {
|
||||
const bestVersion =
|
||||
getBestAvailableVersion("camoufox");
|
||||
return `✓ Camoufox version (${bestVersion?.version}) is available`;
|
||||
})()}
|
||||
</div>
|
||||
)}
|
||||
{isBrowserCurrentlyDownloading("camoufox") && (
|
||||
<div className="p-3 text-sm rounded-md border text-muted-foreground">
|
||||
{(() => {
|
||||
const bestVersion =
|
||||
getBestAvailableVersion("camoufox");
|
||||
return `Downloading Camoufox version (${bestVersion?.version})...`;
|
||||
})()}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<SharedCamoufoxConfigForm
|
||||
config={camoufoxConfig}
|
||||
onConfigChange={updateCamoufoxConfig}
|
||||
isCreating
|
||||
/>
|
||||
</div>
|
||||
</TabsContent>
|
||||
|
||||
{/* Proxy Selection - Common to both tabs - Always visible */}
|
||||
<div className="space-y-3">
|
||||
<div className="flex justify-between items-center">
|
||||
<Label>Proxy</Label>
|
||||
<RippleButton
|
||||
size="sm"
|
||||
variant="outline"
|
||||
onClick={() => setShowProxyForm(true)}
|
||||
className="px-2 h-7 text-xs"
|
||||
>
|
||||
<GoPlus className="mr-1 w-3 h-3" /> Add Proxy
|
||||
</RippleButton>
|
||||
</div>
|
||||
{storedProxies.length > 0 ? (
|
||||
<Select
|
||||
value={selectedProxyId || "none"}
|
||||
onValueChange={(value) =>
|
||||
setSelectedProxyId(value === "none" ? undefined : value)
|
||||
}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="No proxy" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="none">No proxy</SelectItem>
|
||||
{storedProxies.map((proxy) => (
|
||||
<SelectItem key={proxy.id} value={proxy.id}>
|
||||
{proxy.name}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
) : (
|
||||
<div className="flex gap-3 items-center p-3 text-sm rounded-md border text-muted-foreground">
|
||||
No proxies available. Add one to route this profile's
|
||||
traffic.
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</ScrollArea>
|
||||
|
||||
<DialogFooter className="flex-shrink-0 pt-4 border-t">
|
||||
<RippleButton variant="outline" onClick={handleClose}>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
onClick={handleCreate}
|
||||
isLoading={isCreating}
|
||||
disabled={isCreateDisabled}
|
||||
>
|
||||
Create
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</Tabs>
|
||||
</DialogContent>
|
||||
<ProxyFormDialog
|
||||
isOpen={showProxyForm}
|
||||
onClose={() => setShowProxyForm(false)}
|
||||
onSave={(proxy) => {
|
||||
setStoredProxies((prev) => [...prev, proxy]);
|
||||
setSelectedProxyId(proxy.id);
|
||||
}}
|
||||
/>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
* - Progress bars for downloads/updates
|
||||
* - Success/error states
|
||||
* - Customizable icons and content
|
||||
* - Auto-update notifications
|
||||
*
|
||||
* Usage Examples:
|
||||
*
|
||||
@@ -23,6 +24,11 @@
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* Auto-update toast:
|
||||
* ```
|
||||
* showAutoUpdateToast("Firefox", "125.0.1");
|
||||
* ```
|
||||
*
|
||||
* Download progress toast:
|
||||
* ```
|
||||
* showToast({
|
||||
@@ -42,11 +48,11 @@
|
||||
* ```
|
||||
*/
|
||||
|
||||
import React from "react";
|
||||
import {
|
||||
LuCheckCheck,
|
||||
LuDownload,
|
||||
LuRefreshCw,
|
||||
LuRocket,
|
||||
LuTriangleAlert,
|
||||
} from "react-icons/lu";
|
||||
|
||||
@@ -90,6 +96,7 @@ interface VersionUpdateToastProps extends BaseToastProps {
|
||||
current: number;
|
||||
total: number;
|
||||
found: number;
|
||||
current_browser?: string;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -126,6 +133,7 @@ function getToastIcon(type: ToastProps["type"], stage?: string) {
|
||||
);
|
||||
}
|
||||
return <LuDownload className="flex-shrink-0 w-4 h-4 text-blue-500" />;
|
||||
|
||||
case "version-update":
|
||||
return (
|
||||
<LuRefreshCw className="flex-shrink-0 w-4 h-4 text-blue-500 animate-spin" />
|
||||
@@ -138,6 +146,10 @@ function getToastIcon(type: ToastProps["type"], stage?: string) {
|
||||
return (
|
||||
<LuRefreshCw className="flex-shrink-0 w-4 h-4 text-purple-500 animate-spin" />
|
||||
);
|
||||
case "loading":
|
||||
return (
|
||||
<div className="flex-shrink-0 w-4 h-4 rounded-full border-2 border-blue-500 animate-spin border-t-transparent" />
|
||||
);
|
||||
default:
|
||||
return (
|
||||
<div className="flex-shrink-0 w-4 h-4 rounded-full border-2 border-blue-500 animate-spin border-t-transparent" />
|
||||
@@ -150,11 +162,33 @@ export function UnifiedToast(props: ToastProps) {
|
||||
const stage = "stage" in props ? props.stage : undefined;
|
||||
const progress = "progress" in props ? props.progress : undefined;
|
||||
|
||||
// Check if this is an auto-update toast
|
||||
const isAutoUpdate = title.includes("update started");
|
||||
|
||||
return (
|
||||
<div className="flex items-start p-3 w-full bg-white rounded-lg border border-gray-200 shadow-lg dark:bg-gray-800 dark:border-gray-700">
|
||||
<div className="mr-3 mt-0.5">{getToastIcon(type, stage)}</div>
|
||||
<div
|
||||
className={`flex items-start p-3 w-96 rounded-lg border shadow-lg ${
|
||||
isAutoUpdate
|
||||
? "bg-emerald-50 border-emerald-200 dark:bg-emerald-950 dark:border-emerald-800"
|
||||
: "bg-white border-gray-200 dark:bg-gray-800 dark:border-gray-700"
|
||||
}`}
|
||||
data-toast-type={isAutoUpdate ? "auto-update" : "default"}
|
||||
>
|
||||
<div className="mr-3 mt-0.5">
|
||||
{isAutoUpdate ? (
|
||||
<LuRocket className="flex-shrink-0 w-4 h-4 text-emerald-500" />
|
||||
) : (
|
||||
getToastIcon(type, stage)
|
||||
)}
|
||||
</div>
|
||||
<div className="flex-1 min-w-0">
|
||||
<p className="text-sm font-medium leading-tight text-gray-900 dark:text-white">
|
||||
<p
|
||||
className={`text-sm font-medium leading-tight ${
|
||||
isAutoUpdate
|
||||
? "text-emerald-900 dark:text-emerald-100"
|
||||
: "text-gray-900 dark:text-white"
|
||||
}`}
|
||||
>
|
||||
{title}
|
||||
</p>
|
||||
|
||||
@@ -181,26 +215,30 @@ export function UnifiedToast(props: ToastProps) {
|
||||
)}
|
||||
|
||||
{/* Version update progress */}
|
||||
{type === "version-update" && progress && "found" in progress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<p className="text-xs text-gray-600 dark:text-gray-300">
|
||||
{progress.found} new versions found so far
|
||||
</p>
|
||||
<div className="flex items-center space-x-2">
|
||||
<div className="flex-1 bg-gray-200 dark:bg-gray-700 rounded-full h-1.5 min-w-0">
|
||||
<div
|
||||
className="bg-blue-500 h-1.5 rounded-full transition-all duration-300"
|
||||
style={{
|
||||
width: `${(progress.current / progress.total) * 100}%`,
|
||||
}}
|
||||
/>
|
||||
{type === "version-update" &&
|
||||
progress &&
|
||||
"current_browser" in progress && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<p className="text-xs text-gray-600 dark:text-gray-300">
|
||||
{progress.current_browser && (
|
||||
<>Looking for updates for {progress.current_browser}</>
|
||||
)}
|
||||
</p>
|
||||
<div className="flex items-center space-x-2">
|
||||
<div className="flex-1 bg-gray-200 dark:bg-gray-700 rounded-full h-1.5 min-w-0">
|
||||
<div
|
||||
className="bg-blue-500 h-1.5 rounded-full transition-all duration-300"
|
||||
style={{
|
||||
width: `${(progress.current / progress.total) * 100}%`,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
<span className="w-8 text-xs text-right text-gray-500 whitespace-nowrap dark:text-gray-400 shrink-0">
|
||||
{progress.current}/{progress.total}
|
||||
</span>
|
||||
</div>
|
||||
<span className="w-8 text-xs text-right text-gray-500 whitespace-nowrap dark:text-gray-400 shrink-0">
|
||||
{progress.current}/{progress.total}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
)}
|
||||
|
||||
{/* Twilight update progress */}
|
||||
{type === "twilight-update" && (
|
||||
@@ -220,7 +258,13 @@ export function UnifiedToast(props: ToastProps) {
|
||||
|
||||
{/* Description */}
|
||||
{description && (
|
||||
<p className="mt-1 text-xs leading-tight text-gray-600 dark:text-gray-300">
|
||||
<p
|
||||
className={`mt-1 text-xs leading-tight ${
|
||||
isAutoUpdate
|
||||
? "text-emerald-700 dark:text-emerald-300"
|
||||
: "text-gray-600 dark:text-gray-300"
|
||||
}`}
|
||||
>
|
||||
{description}
|
||||
</p>
|
||||
)}
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { LoadingButton } from "./loading-button";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface DeleteConfirmationDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
onConfirm: () => void | Promise<void>;
|
||||
title: string;
|
||||
description: string;
|
||||
confirmButtonText?: string;
|
||||
isLoading?: boolean;
|
||||
profileNames?: string[];
|
||||
}
|
||||
|
||||
export function DeleteConfirmationDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
onConfirm,
|
||||
title,
|
||||
description,
|
||||
confirmButtonText = "Delete",
|
||||
isLoading = false,
|
||||
profileNames,
|
||||
}: DeleteConfirmationDialogProps) {
|
||||
const handleConfirm = async () => {
|
||||
await onConfirm();
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle>{title}</DialogTitle>
|
||||
<DialogDescription>{description}</DialogDescription>
|
||||
{profileNames && profileNames.length > 0 && (
|
||||
<div className="mt-4">
|
||||
<p className="text-sm font-medium mb-2">
|
||||
Profiles to be deleted:
|
||||
</p>
|
||||
<div className="bg-muted rounded-md p-3 max-h-32 overflow-y-auto">
|
||||
<ul className="space-y-1">
|
||||
{profileNames.map((name) => (
|
||||
<li key={name} className="text-sm text-muted-foreground">
|
||||
• {name}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</DialogHeader>
|
||||
<DialogFooter>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={onClose}
|
||||
disabled={isLoading}
|
||||
>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
variant="destructive"
|
||||
onClick={() => void handleConfirm()}
|
||||
isLoading={isLoading}
|
||||
>
|
||||
{confirmButtonText}
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,213 @@
|
||||
"use client";
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { RadioGroup, RadioGroupItem } from "@/components/ui/radio-group";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import type { BrowserProfile, ProfileGroup } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface DeleteGroupDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
group: ProfileGroup | null;
|
||||
onGroupDeleted: () => void;
|
||||
}
|
||||
|
||||
export function DeleteGroupDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
group,
|
||||
onGroupDeleted,
|
||||
}: DeleteGroupDialogProps) {
|
||||
const [associatedProfiles, setAssociatedProfiles] = useState<
|
||||
BrowserProfile[]
|
||||
>([]);
|
||||
const [deleteAction, setDeleteAction] = useState<"move" | "delete">("move");
|
||||
const [isDeleting, setIsDeleting] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const loadAssociatedProfiles = useCallback(async () => {
|
||||
if (!group) return;
|
||||
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const allProfiles = await invoke<BrowserProfile[]>(
|
||||
"list_browser_profiles",
|
||||
);
|
||||
const groupProfiles = allProfiles.filter(
|
||||
(profile) => profile.group_id === group.id,
|
||||
);
|
||||
setAssociatedProfiles(groupProfiles);
|
||||
} catch (err) {
|
||||
console.error("Failed to load associated profiles:", err);
|
||||
setError(err instanceof Error ? err.message : "Failed to load profiles");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [group]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen && group) {
|
||||
void loadAssociatedProfiles();
|
||||
}
|
||||
}, [isOpen, group, loadAssociatedProfiles]);
|
||||
|
||||
const handleDelete = useCallback(async () => {
|
||||
if (!group) return;
|
||||
|
||||
setIsDeleting(true);
|
||||
setError(null);
|
||||
try {
|
||||
if (deleteAction === "delete" && associatedProfiles.length > 0) {
|
||||
// Delete all associated profiles first
|
||||
const profileNames = associatedProfiles.map((p) => p.name);
|
||||
await invoke("delete_selected_profiles", { profileNames });
|
||||
} else if (deleteAction === "move" && associatedProfiles.length > 0) {
|
||||
// Move profiles to default group (null group_id)
|
||||
const profileNames = associatedProfiles.map((p) => p.name);
|
||||
await invoke("assign_profiles_to_group", {
|
||||
profileNames,
|
||||
groupId: null,
|
||||
});
|
||||
}
|
||||
|
||||
// Delete the group
|
||||
await invoke("delete_profile_group", { groupId: group.id });
|
||||
|
||||
toast.success("Group deleted successfully");
|
||||
onGroupDeleted();
|
||||
onClose();
|
||||
} catch (err) {
|
||||
console.error("Failed to delete group:", err);
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : "Failed to delete group";
|
||||
setError(errorMessage);
|
||||
toast.error(errorMessage);
|
||||
} finally {
|
||||
setIsDeleting(false);
|
||||
}
|
||||
}, [group, deleteAction, associatedProfiles, onGroupDeleted, onClose]);
|
||||
|
||||
const handleClose = useCallback(() => {
|
||||
setError(null);
|
||||
setDeleteAction("move");
|
||||
setAssociatedProfiles([]);
|
||||
onClose();
|
||||
}, [onClose]);
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={handleClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Delete Group</DialogTitle>
|
||||
<DialogDescription>
|
||||
This action cannot be undone. This will permanently delete the group
|
||||
"{group?.name}".
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-4">
|
||||
{isLoading ? (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
Loading associated profiles...
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
{associatedProfiles.length > 0 && (
|
||||
<div className="space-y-3">
|
||||
<div className="space-y-2">
|
||||
<Label>
|
||||
Associated Profiles ({associatedProfiles.length})
|
||||
</Label>
|
||||
<ScrollArea className="h-32 w-full border rounded-md p-3">
|
||||
<div className="space-y-1">
|
||||
{associatedProfiles.map((profile) => (
|
||||
<div key={profile.id} className="text-sm">
|
||||
• {profile.name}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
<Label>What should happen to these profiles?</Label>
|
||||
<RadioGroup
|
||||
value={deleteAction}
|
||||
onValueChange={(value) =>
|
||||
setDeleteAction(value as "move" | "delete")
|
||||
}
|
||||
>
|
||||
<div className="flex items-center space-x-2">
|
||||
<RadioGroupItem value="move" id="move" />
|
||||
<Label htmlFor="move" className="text-sm">
|
||||
Move profiles to Default group
|
||||
</Label>
|
||||
</div>
|
||||
<div className="flex items-center space-x-2">
|
||||
<RadioGroupItem value="delete" id="delete" />
|
||||
<Label
|
||||
htmlFor="delete"
|
||||
className="text-sm text-red-600"
|
||||
>
|
||||
Delete profiles along with the group
|
||||
</Label>
|
||||
</div>
|
||||
</RadioGroup>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{associatedProfiles.length === 0 && !isLoading && (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
This group has no associated profiles.
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div className="p-3 text-sm text-red-600 bg-red-50 rounded-md dark:bg-red-900/20 dark:text-red-400">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={handleClose}
|
||||
disabled={isDeleting}
|
||||
>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
variant="destructive"
|
||||
isLoading={isDeleting}
|
||||
onClick={() => void handleDelete()}
|
||||
disabled={isLoading}
|
||||
>
|
||||
Delete Group
|
||||
{deleteAction === "delete" &&
|
||||
associatedProfiles.length > 0 &&
|
||||
" & Profiles"}
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,129 @@
|
||||
"use client";
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { toast } from "sonner";
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import type { ProfileGroup } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface EditGroupDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
group: ProfileGroup | null;
|
||||
onGroupUpdated: (group: ProfileGroup) => void;
|
||||
}
|
||||
|
||||
export function EditGroupDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
group,
|
||||
onGroupUpdated,
|
||||
}: EditGroupDialogProps) {
|
||||
const [groupName, setGroupName] = useState("");
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (group) {
|
||||
setGroupName(group.name);
|
||||
} else {
|
||||
setGroupName("");
|
||||
}
|
||||
setError(null);
|
||||
}, [group]);
|
||||
|
||||
const handleUpdate = useCallback(async () => {
|
||||
if (!group || !groupName.trim()) return;
|
||||
|
||||
setIsUpdating(true);
|
||||
setError(null);
|
||||
try {
|
||||
const updatedGroup = await invoke<ProfileGroup>("update_profile_group", {
|
||||
groupId: group.id,
|
||||
name: groupName.trim(),
|
||||
});
|
||||
|
||||
toast.success("Group updated successfully");
|
||||
onGroupUpdated(updatedGroup);
|
||||
onClose();
|
||||
} catch (err) {
|
||||
console.error("Failed to update group:", err);
|
||||
const errorMessage =
|
||||
err instanceof Error ? err.message : "Failed to update group";
|
||||
setError(errorMessage);
|
||||
toast.error(errorMessage);
|
||||
} finally {
|
||||
setIsUpdating(false);
|
||||
}
|
||||
}, [group, groupName, onGroupUpdated, onClose]);
|
||||
|
||||
const handleClose = useCallback(() => {
|
||||
setError(null);
|
||||
onClose();
|
||||
}, [onClose]);
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={handleClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Edit Group</DialogTitle>
|
||||
<DialogDescription>
|
||||
Update the name of the group "{group?.name}".
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="group-name">Group Name</Label>
|
||||
<Input
|
||||
id="group-name"
|
||||
placeholder="Enter group name..."
|
||||
value={groupName}
|
||||
onChange={(e) => setGroupName(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter" && groupName.trim()) {
|
||||
void handleUpdate();
|
||||
}
|
||||
}}
|
||||
disabled={isUpdating}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="p-3 text-sm text-red-600 bg-red-50 rounded-md dark:bg-red-900/20 dark:text-red-400">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={handleClose}
|
||||
disabled={isUpdating}
|
||||
>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
isLoading={isUpdating}
|
||||
onClick={() => void handleUpdate()}
|
||||
disabled={!groupName.trim() || groupName === group?.name}
|
||||
>
|
||||
Update Group
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,204 @@
|
||||
"use client";
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { GoPlus } from "react-icons/go";
|
||||
import { toast } from "sonner";
|
||||
import { CreateGroupDialog } from "@/components/create-group-dialog";
|
||||
import { LoadingButton } from "@/components/loading-button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import type { ProfileGroup } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface GroupAssignmentDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
selectedProfiles: string[];
|
||||
onAssignmentComplete: () => void;
|
||||
}
|
||||
|
||||
export function GroupAssignmentDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
selectedProfiles,
|
||||
onAssignmentComplete,
|
||||
}: GroupAssignmentDialogProps) {
|
||||
const [groups, setGroups] = useState<ProfileGroup[]>([]);
|
||||
const [selectedGroupId, setSelectedGroupId] = useState<string | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [isAssigning, setIsAssigning] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [createDialogOpen, setCreateDialogOpen] = useState(false);
|
||||
|
||||
const loadGroups = useCallback(async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const groupList = await invoke<ProfileGroup[]>("get_profile_groups");
|
||||
setGroups(groupList);
|
||||
} catch (err) {
|
||||
console.error("Failed to load groups:", err);
|
||||
setError(err instanceof Error ? err.message : "Failed to load groups");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleAssign = useCallback(async () => {
|
||||
setIsAssigning(true);
|
||||
setError(null);
|
||||
try {
|
||||
await invoke("assign_profiles_to_group", {
|
||||
profileNames: selectedProfiles,
|
||||
groupId: selectedGroupId,
|
||||
});
|
||||
|
||||
const groupName = selectedGroupId
|
||||
? groups.find((g) => g.id === selectedGroupId)?.name || "Unknown Group"
|
||||
: "Default";
|
||||
|
||||
toast.success(
|
||||
`Successfully assigned ${selectedProfiles.length} profile(s) to ${groupName}`,
|
||||
);
|
||||
onAssignmentComplete();
|
||||
onClose();
|
||||
} catch (err) {
|
||||
console.error("Failed to assign profiles to group:", err);
|
||||
const errorMessage =
|
||||
err instanceof Error
|
||||
? err.message
|
||||
: "Failed to assign profiles to group";
|
||||
setError(errorMessage);
|
||||
toast.error(errorMessage);
|
||||
} finally {
|
||||
setIsAssigning(false);
|
||||
}
|
||||
}, [
|
||||
selectedProfiles,
|
||||
selectedGroupId,
|
||||
groups,
|
||||
onAssignmentComplete,
|
||||
onClose,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
void loadGroups();
|
||||
setSelectedGroupId(null);
|
||||
setError(null);
|
||||
}
|
||||
}, [isOpen, loadGroups]);
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent className="max-w-md">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Assign to Group</DialogTitle>
|
||||
<DialogDescription>
|
||||
Assign {selectedProfiles.length} selected profile(s) to a group.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label>Selected Profiles:</Label>
|
||||
<div className="p-3 bg-muted rounded-md max-h-32 overflow-y-auto">
|
||||
<ul className="text-sm space-y-1">
|
||||
{selectedProfiles.map((profileName) => (
|
||||
<li key={profileName} className="truncate">
|
||||
• {profileName}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<div className="flex justify-between items-center">
|
||||
<Label htmlFor="group-select">Assign to Group:</Label>
|
||||
<RippleButton
|
||||
size="sm"
|
||||
variant="outline"
|
||||
className="h-7 px-2 text-xs"
|
||||
onClick={() => setCreateDialogOpen(true)}
|
||||
>
|
||||
<GoPlus className="mr-1 w-3 h-3" /> Create Group
|
||||
</RippleButton>
|
||||
</div>
|
||||
{isLoading ? (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
Loading groups...
|
||||
</div>
|
||||
) : (
|
||||
<Select
|
||||
value={selectedGroupId || "default"}
|
||||
onValueChange={(value) => {
|
||||
setSelectedGroupId(value === "default" ? null : value);
|
||||
}}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select a group" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value="default">Default (No Group)</SelectItem>
|
||||
{groups.map((group) => (
|
||||
<SelectItem key={group.id} value={group.id}>
|
||||
{group.name}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="p-3 text-sm text-red-600 bg-red-50 rounded-md dark:bg-red-900/20 dark:text-red-400">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<RippleButton
|
||||
variant="outline"
|
||||
onClick={onClose}
|
||||
disabled={isAssigning}
|
||||
>
|
||||
Cancel
|
||||
</RippleButton>
|
||||
<LoadingButton
|
||||
isLoading={isAssigning}
|
||||
onClick={() => void handleAssign()}
|
||||
disabled={isLoading}
|
||||
>
|
||||
Assign
|
||||
</LoadingButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
<CreateGroupDialog
|
||||
isOpen={createDialogOpen}
|
||||
onClose={() => setCreateDialogOpen(false)}
|
||||
onGroupCreated={(group) => {
|
||||
setGroups((prev) => [...prev, group]);
|
||||
setSelectedGroupId(group.id);
|
||||
setCreateDialogOpen(false);
|
||||
}}
|
||||
/>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
"use client";
|
||||
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import type { GroupWithCount } from "@/types";
|
||||
|
||||
interface GroupBadgesProps {
|
||||
selectedGroupId: string | null;
|
||||
onGroupSelect: (groupId: string) => void;
|
||||
refreshTrigger?: number;
|
||||
groups: GroupWithCount[];
|
||||
isLoading: boolean;
|
||||
}
|
||||
|
||||
export function GroupBadges({
|
||||
selectedGroupId,
|
||||
onGroupSelect,
|
||||
groups,
|
||||
isLoading,
|
||||
}: GroupBadgesProps) {
|
||||
if (isLoading && !groups.length) {
|
||||
return (
|
||||
<div className="flex flex-wrap gap-2 mb-4">
|
||||
<div className="flex items-center gap-2 px-4.5 py-1.5 text-xs">
|
||||
Loading groups...
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (groups.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-wrap gap-2 mb-4">
|
||||
{groups.map((group) => (
|
||||
<Badge
|
||||
key={group.id}
|
||||
variant={selectedGroupId === group.id ? "default" : "secondary"}
|
||||
className="flex gap-2 items-center px-3 py-1 transition-colors cursor-pointer dark:hover:bg-primary/60 hover:bg-primary/80"
|
||||
onClick={() => {
|
||||
onGroupSelect(selectedGroupId === group.id ? "default" : group.id);
|
||||
}}
|
||||
>
|
||||
<span>{group.name}</span>
|
||||
<span className="bg-background/20 text-xs px-1.5 py-0.5 rounded-sm">
|
||||
{group.count}
|
||||
</span>
|
||||
</Badge>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,219 @@
|
||||
"use client";
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import { useCallback, useEffect, useState } from "react";
|
||||
import { GoPlus } from "react-icons/go";
|
||||
import { LuPencil, LuTrash2 } from "react-icons/lu";
|
||||
import { CreateGroupDialog } from "@/components/create-group-dialog";
|
||||
import { DeleteGroupDialog } from "@/components/delete-group-dialog";
|
||||
import { EditGroupDialog } from "@/components/edit-group-dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import type { ProfileGroup } from "@/types";
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
interface GroupManagementDialogProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
onGroupManagementComplete: () => void;
|
||||
}
|
||||
|
||||
export function GroupManagementDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
onGroupManagementComplete,
|
||||
}: GroupManagementDialogProps) {
|
||||
const [groups, setGroups] = useState<ProfileGroup[]>([]);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
// Dialog states
|
||||
const [createDialogOpen, setCreateDialogOpen] = useState(false);
|
||||
const [editDialogOpen, setEditDialogOpen] = useState(false);
|
||||
const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
|
||||
const [selectedGroup, setSelectedGroup] = useState<ProfileGroup | null>(null);
|
||||
|
||||
const loadGroups = useCallback(async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const groupList = await invoke<ProfileGroup[]>("get_profile_groups");
|
||||
setGroups(groupList);
|
||||
} catch (err) {
|
||||
console.error("Failed to load groups:", err);
|
||||
setError(err instanceof Error ? err.message : "Failed to load groups");
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
const handleGroupCreated = useCallback(
|
||||
(newGroup: ProfileGroup) => {
|
||||
setGroups((prev) => [...prev, newGroup]);
|
||||
onGroupManagementComplete();
|
||||
},
|
||||
[onGroupManagementComplete],
|
||||
);
|
||||
|
||||
const handleGroupUpdated = useCallback(
|
||||
(updatedGroup: ProfileGroup) => {
|
||||
setGroups((prev) =>
|
||||
prev.map((group) =>
|
||||
group.id === updatedGroup.id ? updatedGroup : group,
|
||||
),
|
||||
);
|
||||
onGroupManagementComplete();
|
||||
},
|
||||
[onGroupManagementComplete],
|
||||
);
|
||||
|
||||
const handleGroupDeleted = useCallback(() => {
|
||||
void loadGroups();
|
||||
onGroupManagementComplete();
|
||||
}, [loadGroups, onGroupManagementComplete]);
|
||||
|
||||
const handleEditGroup = useCallback((group: ProfileGroup) => {
|
||||
setSelectedGroup(group);
|
||||
setEditDialogOpen(true);
|
||||
}, []);
|
||||
|
||||
const handleDeleteGroup = useCallback((group: ProfileGroup) => {
|
||||
setSelectedGroup(group);
|
||||
setDeleteDialogOpen(true);
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
void loadGroups();
|
||||
}
|
||||
}, [isOpen, loadGroups]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent className="max-w-2xl">
|
||||
<DialogHeader>
|
||||
<DialogTitle>Manage Profile Groups</DialogTitle>
|
||||
<DialogDescription>
|
||||
Create, edit, and delete profile groups. Profiles without a group
|
||||
will appear in the "Default" group.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
<div className="space-y-4">
|
||||
{/* Create new group button */}
|
||||
<div className="flex justify-between items-center">
|
||||
<Label>Groups</Label>
|
||||
<RippleButton
|
||||
size="sm"
|
||||
onClick={() => setCreateDialogOpen(true)}
|
||||
className="flex gap-2 items-center"
|
||||
>
|
||||
<GoPlus className="w-4 h-4" />
|
||||
Create
|
||||
</RippleButton>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="p-3 text-sm text-red-600 bg-red-50 rounded-md dark:bg-red-900/20 dark:text-red-400">
|
||||
{error}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Groups list */}
|
||||
{isLoading ? (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
Loading groups...
|
||||
</div>
|
||||
) : groups.length === 0 ? (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
No groups created yet. Create your first group using the button
|
||||
above.
|
||||
</div>
|
||||
) : (
|
||||
<div className="border rounded-md">
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Name</TableHead>
|
||||
<TableHead className="w-24">Actions</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{groups.map((group) => (
|
||||
<TableRow key={group.id}>
|
||||
<TableCell className="font-medium">
|
||||
{group.name}
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<div className="flex gap-1">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => handleEditGroup(group)}
|
||||
>
|
||||
<LuPencil className="w-4 h-4" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={() => handleDeleteGroup(group)}
|
||||
>
|
||||
<LuTrash2 className="w-4 h-4" />
|
||||
</Button>
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<DialogFooter>
|
||||
<RippleButton variant="outline" onClick={onClose}>
|
||||
Close
|
||||
</RippleButton>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
|
||||
<CreateGroupDialog
|
||||
isOpen={createDialogOpen}
|
||||
onClose={() => setCreateDialogOpen(false)}
|
||||
onGroupCreated={handleGroupCreated}
|
||||
/>
|
||||
|
||||
<EditGroupDialog
|
||||
isOpen={editDialogOpen}
|
||||
onClose={() => setEditDialogOpen(false)}
|
||||
group={selectedGroup}
|
||||
onGroupUpdated={handleGroupUpdated}
|
||||
/>
|
||||
|
||||
<DeleteGroupDialog
|
||||
isOpen={deleteDialogOpen}
|
||||
onClose={() => setDeleteDialogOpen(false)}
|
||||
group={selectedGroup}
|
||||
onGroupDeleted={handleGroupDeleted}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user