mirror of
https://github.com/zhom/donutbrowser.git
synced 2026-05-05 01:55:12 +02:00
Compare commits
74 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e3d487f846 | |||
| b4b7609534 | |||
| 8bf40fbc62 | |||
| 630cf74ab9 | |||
| b8d8039c80 | |||
| f1c4245c5a | |||
| 5cc816ecc5 | |||
| 7409cf7851 | |||
| d36d5430ca | |||
| 7518ee9e87 | |||
| ab8db06dfb | |||
| 0b43c6776b | |||
| 564c57fefc | |||
| d3cf91c5d3 | |||
| 729307be7b | |||
| c736eb9195 | |||
| 68d0741f38 | |||
| ae59ba802e | |||
| 73de070478 | |||
| 187d3414d8 | |||
| cc74589243 | |||
| 55974d17be | |||
| cbd0312618 | |||
| 41205ab31d | |||
| bfec778d19 | |||
| 0cb738c5ae | |||
| a82a73b3f4 | |||
| 49eca7271f | |||
| 487c72cbb7 | |||
| aec4a0c3af | |||
| c37675bce2 | |||
| ccdc411e7f | |||
| bec3fa142c | |||
| d725040b6e | |||
| 81c00538a9 | |||
| 1c5444928d | |||
| 85f8630389 | |||
| 57ead61139 | |||
| ef00c59063 | |||
| a61f42b645 | |||
| 3dd66069b5 | |||
| 14c7ded062 | |||
| d58b68fd50 | |||
| 3e69fea338 | |||
| fe2125beba | |||
| 23cfa84998 | |||
| 3e3ec29f58 | |||
| b1b91e94c0 | |||
| c624196dbb | |||
| b24568043c | |||
| d201cc90d1 | |||
| a118ccc349 | |||
| effe229067 | |||
| 98a8369f60 | |||
| f7ae299771 | |||
| c43f141907 | |||
| cd33accb1a | |||
| ca89b917f4 | |||
| 6ad183ab89 | |||
| c83950bee7 | |||
| 0047c80967 | |||
| 3d7bd2b14c | |||
| 8899e58987 | |||
| acf8651bd1 | |||
| ef534ee779 | |||
| 75bb10cf61 | |||
| 6f9e0de633 | |||
| 39c2a9f6f0 | |||
| 4b6f08fca3 | |||
| 24eff75d4e | |||
| 11869855e9 | |||
| 0d1f1f1497 | |||
| e8026d817f | |||
| d1ca4273de |
@@ -31,7 +31,7 @@ jobs:
|
||||
# build-mode: none
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 #v2.8.1
|
||||
uses: swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 #v2.8.2
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
name: Contributors
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -19,7 +21,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
- name: Contribute List
|
||||
uses: akhilmhdh/contributors-readme-action@83ea0b4f1ac928fbfe88b9e8460a932a528eb79f #v2.3.11
|
||||
env:
|
||||
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Get issue templates
|
||||
id: get-templates
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Validate issue with AI
|
||||
id: validate
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
with:
|
||||
prompt-file: issue_analysis.txt
|
||||
system-prompt: |
|
||||
@@ -115,13 +115,14 @@ jobs:
|
||||
- name: Parse validation result and take action
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RESPONSE_FILE: ${{ steps.validate.outputs.response-file }}
|
||||
RESPONSE: ${{ steps.validate.outputs.response }}
|
||||
run: |
|
||||
# Prefer reading from the response file to avoid output truncation
|
||||
RESPONSE_FILE='${{ steps.validate.outputs.response-file }}'
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RAW_OUTPUT=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
RAW_OUTPUT='${{ steps.validate.outputs.response }}'
|
||||
RAW_OUTPUT="$RESPONSE"
|
||||
fi
|
||||
|
||||
# Extract JSON if wrapped in markdown code fences; otherwise use raw
|
||||
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
scan-scheduled:
|
||||
name: Scheduled Security Scan
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'schedule' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
scan-pr:
|
||||
name: PR Security Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -29,7 +29,7 @@ jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_run:
|
||||
workflows: ["Release"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -11,19 +13,40 @@ permissions:
|
||||
jobs:
|
||||
generate-release-notes:
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.ref, 'refs/tags/v') && !github.event.release.prerelease
|
||||
if: github.event.workflow_run.conclusion == 'success' && startsWith(github.event.workflow_run.head_branch, 'v')
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
with:
|
||||
fetch-depth: 0 # Fetch full history to compare with previous release
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get release info
|
||||
id: get-release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG_NAME: ${{ github.event.workflow_run.head_branch }}
|
||||
run: |
|
||||
echo "tag-name=$TAG_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get release info by tag
|
||||
RELEASE_INFO=$(gh api /repos/${{ github.repository }}/releases/tags/$TAG_NAME)
|
||||
RELEASE_ID=$(echo "$RELEASE_INFO" | jq -r '.id')
|
||||
IS_PRERELEASE=$(echo "$RELEASE_INFO" | jq -r '.prerelease')
|
||||
|
||||
echo "release-id=$RELEASE_ID" >> $GITHUB_OUTPUT
|
||||
echo "is-prerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "$IS_PRERELEASE" = "true" ]; then
|
||||
echo "Skipping release notes generation for prerelease"
|
||||
fi
|
||||
|
||||
- name: Get previous release tag
|
||||
id: get-previous-tag
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
env:
|
||||
CURRENT_TAG: ${{ steps.get-release.outputs.tag-name }}
|
||||
run: |
|
||||
# Get the previous release tag (excluding the current one)
|
||||
CURRENT_TAG="${{ github.ref_name }}"
|
||||
PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | grep -v "$CURRENT_TAG" | head -n 1)
|
||||
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
@@ -38,16 +61,16 @@ jobs:
|
||||
|
||||
- name: Get commit messages between releases
|
||||
id: get-commits
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
env:
|
||||
PREVIOUS_TAG: ${{ steps.get-previous-tag.outputs.previous-tag }}
|
||||
CURRENT_TAG: ${{ steps.get-previous-tag.outputs.current-tag }}
|
||||
run: |
|
||||
# Get commit messages between previous and current release
|
||||
PREVIOUS_TAG="${{ steps.get-previous-tag.outputs.previous-tag }}"
|
||||
CURRENT_TAG="${{ steps.get-previous-tag.outputs.current-tag }}"
|
||||
|
||||
# Get commit log with detailed format
|
||||
COMMIT_LOG=$(git log --pretty=format:"- %s (%h by %an)" $PREVIOUS_TAG..$CURRENT_TAG --no-merges)
|
||||
COMMIT_LOG=$(git log --pretty=format:"- %s (%h by %an)" "$PREVIOUS_TAG".."$CURRENT_TAG" --no-merges)
|
||||
|
||||
# Get changed files summary
|
||||
CHANGED_FILES=$(git diff --name-status $PREVIOUS_TAG..$CURRENT_TAG | head -20)
|
||||
CHANGED_FILES=$(git diff --name-status "$PREVIOUS_TAG".."$CURRENT_TAG" | head -20)
|
||||
|
||||
# Save to files for AI processing
|
||||
echo "$COMMIT_LOG" > commits.txt
|
||||
@@ -58,7 +81,8 @@ jobs:
|
||||
|
||||
- name: Generate release notes with AI
|
||||
id: generate-notes
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
with:
|
||||
prompt-file: commits.txt
|
||||
system-prompt: |
|
||||
@@ -101,23 +125,27 @@ jobs:
|
||||
model: gpt-4o
|
||||
|
||||
- name: Update release with generated notes
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RESPONSE_FILE: ${{ steps.generate-notes.outputs.response-file }}
|
||||
RESPONSE_OUTPUT: ${{ steps.generate-notes.outputs.response }}
|
||||
RELEASE_ID: ${{ steps.get-release.outputs.release-id }}
|
||||
run: |
|
||||
# Prefer reading from the response file to avoid output truncation
|
||||
RESPONSE_FILE='${{ steps.generate-notes.outputs.response-file }}'
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RELEASE_NOTES=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
RELEASE_NOTES='${{ steps.generate-notes.outputs.response }}'
|
||||
RELEASE_NOTES="$RESPONSE_OUTPUT"
|
||||
fi
|
||||
|
||||
# Update the release with the generated notes
|
||||
gh api --method PATCH /repos/${{ github.repository }}/releases/${{ github.event.release.id }} \
|
||||
gh api --method PATCH /repos/${{ github.repository }}/releases/"$RELEASE_ID" \
|
||||
--field body="$RELEASE_NOTES"
|
||||
|
||||
echo "✅ Release notes updated successfully!"
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
rm -f commits.txt changes.txt
|
||||
|
||||
@@ -13,7 +13,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
@@ -131,7 +131,7 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 #v2.8.1
|
||||
uses: swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 #v2.8.2
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -104,7 +104,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -112,7 +112,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
@@ -130,7 +130,7 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 #v2.8.1
|
||||
uses: swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 #v2.8.2
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
|
||||
@@ -21,6 +21,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions Repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
- name: Spell Check Repo
|
||||
uses: crate-ci/typos@626c4bedb751ce0b7f03262ca97ddda9a076ae1c #v1.39.2
|
||||
uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06 #v1.40.0
|
||||
|
||||
@@ -12,7 +12,7 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: "This issue has been inactive for 60 days. Please respond to keep it open."
|
||||
|
||||
Vendored
+9
-3
@@ -54,6 +54,7 @@
|
||||
"esac",
|
||||
"esbuild",
|
||||
"etree",
|
||||
"firstrun",
|
||||
"flate",
|
||||
"frontmost",
|
||||
"geoip",
|
||||
@@ -66,10 +67,12 @@
|
||||
"hkcu",
|
||||
"hooksconfig",
|
||||
"hookspath",
|
||||
"Hoverable",
|
||||
"icns",
|
||||
"idlelib",
|
||||
"idletime",
|
||||
"idna",
|
||||
"infobars",
|
||||
"Inno",
|
||||
"kdeglobals",
|
||||
"keras",
|
||||
@@ -77,6 +80,7 @@
|
||||
"killall",
|
||||
"Kolkata",
|
||||
"kreadconfig",
|
||||
"langpack",
|
||||
"launchservices",
|
||||
"letterboxing",
|
||||
"libatk",
|
||||
@@ -101,13 +105,12 @@
|
||||
"mstone",
|
||||
"msvc",
|
||||
"msys",
|
||||
"Mullvad",
|
||||
"mullvadbrowser",
|
||||
"mypy",
|
||||
"noarchive",
|
||||
"nobrowse",
|
||||
"noconfirm",
|
||||
"nodecar",
|
||||
"NODELAY",
|
||||
"nodemon",
|
||||
"norestart",
|
||||
"NSIS",
|
||||
@@ -139,6 +142,7 @@
|
||||
"pyoxidizer",
|
||||
"pytest",
|
||||
"pyyaml",
|
||||
"reportingpolicy",
|
||||
"reqwest",
|
||||
"ridedott",
|
||||
"rlib",
|
||||
@@ -149,6 +153,7 @@
|
||||
"screeninfo",
|
||||
"selectables",
|
||||
"serde",
|
||||
"sessionstore",
|
||||
"setpriority",
|
||||
"setsid",
|
||||
"SETTINGCHANGE",
|
||||
@@ -181,9 +186,9 @@
|
||||
"timedatectl",
|
||||
"titlebar",
|
||||
"tkinter",
|
||||
"Torbrowser",
|
||||
"tqdm",
|
||||
"trackingprotection",
|
||||
"trailhead",
|
||||
"turbopack",
|
||||
"turtledemo",
|
||||
"udeps",
|
||||
@@ -192,6 +197,7 @@
|
||||
"unrs",
|
||||
"urlencoding",
|
||||
"urllib",
|
||||
"utoipa",
|
||||
"venv",
|
||||
"vercel",
|
||||
"VERYSILENT",
|
||||
|
||||
@@ -12,6 +12,16 @@ Do keep in mind before you start working on an issue / posting a PR:
|
||||
- Confirm if other contributors are working on the same issue
|
||||
- Check if the feature aligns with our roadmap and project goals
|
||||
|
||||
## Contributor License Agreement
|
||||
|
||||
By contributing to Donut Browser, you agree that your contributions will be licensed under the same terms as the project. You must agree to our [Contributor License Agreement](CONTRIBUTOR_LICENSE_AGREEMENT.md) before your contributions can be accepted. This agreement ensures that:
|
||||
|
||||
- Your contributions can be used in the open source version of Donut Browser (licensed under AGPL-3.0)
|
||||
- Donut Browser can offer commercial licenses for the software, including your contributions
|
||||
- You retain all rights to use your contributions for any other purpose
|
||||
|
||||
When you submit your first pull request, you acknowledge that you agree to the terms of the Contributor License Agreement.
|
||||
|
||||
## Tips & Things to Consider
|
||||
|
||||
- PRs with tests are highly appreciated
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
# Donut Browser Software Grant and Contributor License Agreement ("Agreement")
|
||||
|
||||
This agreement is based on the Apache Software Foundation Contributor License Agreement. (v r190612)
|
||||
|
||||
Thank you for your interest in the Donut Browser project ("Donut Browser" or "the Project"). In order to clarify the intellectual property license granted with Contributions from any person or entity, Donut Browser must have a Contributor License Agreement (CLA) on file that has been agreed to by each Contributor, indicating agreement to the license terms below. This license is for your protection as a Contributor as well as the protection of Donut Browser and its users; it does not change your rights to use your own Contributions for any other purpose. This Agreement allows an individual to contribute to Donut Browser on that individual's own behalf, or an entity (the "Corporation") to submit Contributions to Donut Browser, to authorize Contributions submitted by its designated employees to Donut Browser, and to grant copyright and patent licenses thereto.
|
||||
|
||||
You accept and agree to the following terms and conditions for Your present and future Contributions submitted to Donut Browser. Except for the license granted herein to Donut Browser and recipients of software distributed by Donut Browser, You reserve all right, title, and interest in and to Your Contributions.
|
||||
|
||||
1. Definitions. "You" (or "Your") shall mean the copyright owner or legal entity authorized by the copyright owner that is making this Agreement with Donut Browser. For legal entities, the entity making a Contribution and all other entities that control, are controlled by, or are under common control with that entity are considered to be a single Contributor. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "Contribution" shall mean any work, as well as any modifications or additions to an existing work, that is intentionally submitted by You to Donut Browser for inclusion in, or documentation of, any of the products owned or managed by Donut Browser (the "Work"). For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to Donut Browser or its representatives, including but not limited to communication on electronic mailing lists, source code control systems (such as GitHub), and issue tracking systems that are managed by, or on behalf of, Donut Browser for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by You as "Not a Contribution."
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this Agreement, You hereby grant to Donut Browser and to recipients of software distributed by Donut Browser a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your Contributions and such derivative works under any license terms, including but not limited to the GNU Affero General Public License version 3 (AGPL-3.0) and any commercial or proprietary license terms that Donut Browser may choose to offer. This grant includes the right for Donut Browser to offer the Work, including Your Contributions, under multiple licenses simultaneously (dual or multi-licensing), including both open source and commercial licenses.
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this Agreement, You hereby grant to Donut Browser and to recipients of software distributed by Donut Browser a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by You that are necessarily infringed by Your Contribution(s) alone or by combination of Your Contribution(s) with the Work to which such Contribution(s) were submitted. If any entity institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that your Contribution, or the Work to which you have contributed, constitutes direct or contributory patent infringement, then any patent licenses granted to that entity under this Agreement for that Contribution or Work shall terminate as of the date such litigation is filed.
|
||||
4. You represent that You are legally entitled to grant the above license. If You are an individual, and if Your employer(s) has rights to intellectual property that you create that includes Your Contributions, you represent that You have received permission to make Contributions on behalf of that employer, or that Your employer has waived such rights for your Contributions to Donut Browser. If You are a Corporation, any individual who makes a contribution from an account associated with You will be considered authorized to Contribute on Your behalf.
|
||||
5. You represent that each of Your Contributions is Your original creation (see section 7 for submissions on behalf of others).
|
||||
6. You are not expected to provide support for Your Contributions, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contributions on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
7. Should You wish to submit work that is not Your original creation, You may submit it to Donut Browser separately from any Contribution, identifying the complete details of its source and of any license or other restriction (including, but not limited to, related patents, trademarks, and license agreements) of which you are personally aware, and conspicuously marking the work as "Submitted on behalf of a third-party: [named here]".
|
||||
@@ -29,6 +29,9 @@
|
||||
}
|
||||
},
|
||||
"css": {
|
||||
"parser": {
|
||||
"tailwindDirectives": true
|
||||
},
|
||||
"formatter": {
|
||||
"quoteStyle": "double"
|
||||
}
|
||||
|
||||
+4
-1
@@ -10,6 +10,7 @@
|
||||
"cssVariables": true,
|
||||
"prefix": ""
|
||||
},
|
||||
"iconLibrary": "react-icons",
|
||||
"aliases": {
|
||||
"components": "@/components",
|
||||
"utils": "@/lib/utils",
|
||||
@@ -17,5 +18,7 @@
|
||||
"lib": "@/lib",
|
||||
"hooks": "@/hooks"
|
||||
},
|
||||
"iconLibrary": "lucide"
|
||||
"registries": {
|
||||
"@animate-ui": "https://animate-ui.com/r/{name}.json"
|
||||
}
|
||||
}
|
||||
|
||||
Vendored
+1
-1
@@ -1,6 +1,6 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
/// <reference path="./dist/types/routes.d.ts" />
|
||||
import "./dist/dev/types/routes.d.ts";
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
|
||||
@@ -21,15 +21,15 @@
|
||||
"author": "",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@types/node": "^24.10.0",
|
||||
"@types/node": "^25.0.3",
|
||||
"commander": "^14.0.2",
|
||||
"donutbrowser-camoufox-js": "^0.7.0",
|
||||
"dotenv": "^17.2.3",
|
||||
"fingerprint-generator": "^2.1.76",
|
||||
"fingerprint-generator": "^2.1.78",
|
||||
"get-port": "^7.1.0",
|
||||
"nodemon": "^3.1.11",
|
||||
"playwright-core": "^1.56.1",
|
||||
"proxy-chain": "^2.5.9",
|
||||
"playwright-core": "^1.57.0",
|
||||
"proxy-chain": "^2.7.0",
|
||||
"tmp": "^0.2.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.9.3"
|
||||
|
||||
+18
-18
@@ -2,7 +2,7 @@
|
||||
"name": "donutbrowser",
|
||||
"private": true,
|
||||
"license": "AGPL-3.0",
|
||||
"version": "0.13.0",
|
||||
"version": "0.13.8",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "next dev --turbopack",
|
||||
@@ -41,7 +41,7 @@
|
||||
"@radix-ui/react-tabs": "^1.1.13",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@tanstack/react-table": "^8.21.3",
|
||||
"@tauri-apps/api": "^2.9.0",
|
||||
"@tauri-apps/api": "^2.9.1",
|
||||
"@tauri-apps/plugin-deep-link": "^2.4.5",
|
||||
"@tauri-apps/plugin-dialog": "^2.4.2",
|
||||
"@tauri-apps/plugin-fs": "~2.4.4",
|
||||
@@ -51,33 +51,33 @@
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"color": "^5.0.2",
|
||||
"color": "^5.0.3",
|
||||
"flag-icons": "^7.5.0",
|
||||
"lucide-react": "^0.555.0",
|
||||
"motion": "^12.23.24",
|
||||
"next": "^15.5.6",
|
||||
"lucide-react": "^0.562.0",
|
||||
"motion": "^12.23.26",
|
||||
"next": "^16.1.0",
|
||||
"next-themes": "^0.4.6",
|
||||
"radix-ui": "^1.4.3",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react": "^19.2.3",
|
||||
"react-dom": "^19.2.3",
|
||||
"react-icons": "^5.5.0",
|
||||
"recharts": "2.15.4",
|
||||
"recharts": "3.6.0",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"tauri-plugin-macos-permissions-api": "^2.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.2.3",
|
||||
"@tailwindcss/postcss": "^4.1.17",
|
||||
"@tauri-apps/cli": "^2.9.4",
|
||||
"@biomejs/biome": "2.3.10",
|
||||
"@tailwindcss/postcss": "^4.1.18",
|
||||
"@tauri-apps/cli": "^2.9.6",
|
||||
"@types/color": "^4.2.0",
|
||||
"@types/node": "^24.10.0",
|
||||
"@types/react": "^19.2.3",
|
||||
"@types/react-dom": "^19.2.2",
|
||||
"@vitejs/plugin-react": "^5.1.0",
|
||||
"@types/node": "^25.0.3",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^5.1.2",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^16.2.6",
|
||||
"tailwindcss": "^4.1.17",
|
||||
"lint-staged": "^16.2.7",
|
||||
"tailwindcss": "^4.1.18",
|
||||
"ts-unused-exports": "^11.0.1",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "~5.9.3"
|
||||
|
||||
Generated
+1275
-1256
File diff suppressed because it is too large
Load Diff
Generated
+212
-218
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "donutbrowser"
|
||||
version = "0.13.0"
|
||||
version = "0.13.8"
|
||||
description = "Simple Yet Powerful Anti-Detect Browser"
|
||||
authors = ["zhom@github"]
|
||||
edition = "2021"
|
||||
@@ -39,6 +39,7 @@ tauri-plugin-dialog = "2"
|
||||
tauri-plugin-macos-permissions = "2"
|
||||
tauri-plugin-log = "2"
|
||||
log = "0.4"
|
||||
env_logger = "0.11"
|
||||
|
||||
directories = "6"
|
||||
reqwest = { version = "0.12", features = ["json", "stream", "socks"] }
|
||||
@@ -49,14 +50,14 @@ base64 = "0.22"
|
||||
libc = "0.2"
|
||||
async-trait = "0.1"
|
||||
futures-util = "0.3"
|
||||
zip = "6"
|
||||
zip = "7"
|
||||
tar = "0"
|
||||
bzip2 = "0"
|
||||
flate2 = "1"
|
||||
lzma-rs = "0"
|
||||
msi-extract = "0"
|
||||
|
||||
uuid = { version = "1.18", features = ["v4", "serde"] }
|
||||
uuid = { version = "1.19", features = ["v4", "serde"] }
|
||||
url = "2.5"
|
||||
urlencoding = "2.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
@@ -90,6 +91,7 @@ windows = { version = "0.62", features = [
|
||||
"Win32_System_Threading",
|
||||
"Win32_System_Diagnostics_Debug",
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_System_IO",
|
||||
"Win32_Security",
|
||||
"Win32_Storage_FileSystem",
|
||||
"Win32_System_Registry",
|
||||
|
||||
@@ -42,6 +42,10 @@ fn main() {
|
||||
println!("cargo:rerun-if-changed=src/proxy_runner.rs");
|
||||
println!("cargo:rerun-if-changed=src/proxy_storage.rs");
|
||||
|
||||
// Tell Cargo to rebuild when binaries directory contents change
|
||||
// This ensures tauri_build is re-run after sidecar binaries are copied
|
||||
println!("cargo:rerun-if-changed=binaries");
|
||||
|
||||
// Only run tauri_build if all external binaries exist
|
||||
// This allows building donut-proxy sidecar without the other binaries present
|
||||
if external_binaries_exist() {
|
||||
|
||||
@@ -3,9 +3,18 @@
|
||||
"identifier": "default",
|
||||
"description": "enables the default permissions",
|
||||
"windows": ["main"],
|
||||
"webviews": ["main"],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"core:event:default",
|
||||
"core:event:allow-listen",
|
||||
"core:event:allow-emit",
|
||||
"core:event:allow-emit-to",
|
||||
"core:event:allow-unlisten",
|
||||
"core:image:default",
|
||||
"core:menu:default",
|
||||
"core:path:default",
|
||||
"core:tray:default",
|
||||
"core:webview:default",
|
||||
"core:window:default",
|
||||
"core:window:allow-start-dragging",
|
||||
"core:window:allow-close",
|
||||
|
||||
+36
-404
@@ -292,7 +292,6 @@ pub fn is_browser_version_nightly(
|
||||
// This will be handled in the API parsing, so this fallback is for cached versions
|
||||
is_nightly_version(version)
|
||||
}
|
||||
"mullvad-browser" | "tor-browser" => is_nightly_version(version),
|
||||
"chromium" => {
|
||||
// Chromium builds are generally stable snapshots
|
||||
false
|
||||
@@ -349,7 +348,6 @@ pub struct ApiClient {
|
||||
firefox_dev_api_base: String,
|
||||
github_api_base: String,
|
||||
chromium_api_base: String,
|
||||
tor_archive_base: String,
|
||||
}
|
||||
|
||||
impl ApiClient {
|
||||
@@ -366,7 +364,6 @@ impl ApiClient {
|
||||
github_api_base: "https://api.github.com".to_string(),
|
||||
chromium_api_base: "https://commondatastorage.googleapis.com/chromium-browser-snapshots"
|
||||
.to_string(),
|
||||
tor_archive_base: "https://archive.torproject.org/tor-package-archive/torbrowser".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -439,7 +436,6 @@ impl ApiClient {
|
||||
firefox_dev_api_base: String,
|
||||
github_api_base: String,
|
||||
chromium_api_base: String,
|
||||
tor_archive_base: String,
|
||||
) -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
@@ -447,7 +443,6 @@ impl ApiClient {
|
||||
firefox_dev_api_base,
|
||||
github_api_base,
|
||||
chromium_api_base,
|
||||
tor_archive_base,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -564,7 +559,6 @@ impl ApiClient {
|
||||
let cached_data: CachedGithubData = serde_json::from_str(&content).ok()?;
|
||||
|
||||
// Always use cached GitHub releases - cache never expires, only gets updated with new versions
|
||||
log::info!("Using cached GitHub releases for {browser}");
|
||||
Some(cached_data.releases)
|
||||
}
|
||||
|
||||
@@ -724,45 +718,6 @@ impl ApiClient {
|
||||
Ok(releases)
|
||||
}
|
||||
|
||||
pub async fn fetch_mullvad_releases_with_caching(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
) -> Result<Vec<GithubRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_releases) = self.load_cached_github_releases("mullvad") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Fetching Mullvad releases from GitHub API");
|
||||
let base_url = format!(
|
||||
"{}/repos/mullvad/mullvad-browser/releases",
|
||||
self.github_api_base
|
||||
);
|
||||
let releases = self.fetch_github_releases_multiple_pages(&base_url).await?;
|
||||
|
||||
let mut releases: Vec<GithubRelease> = releases
|
||||
.into_iter()
|
||||
.map(|mut release| {
|
||||
release.is_nightly = release.prerelease;
|
||||
release
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Sort releases using the new version sorting system
|
||||
sort_github_releases(&mut releases);
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_github_releases("mullvad", &releases) {
|
||||
log::error!("Failed to cache Mullvad releases: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(releases)
|
||||
}
|
||||
|
||||
pub async fn fetch_zen_releases_with_caching(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
@@ -881,11 +836,11 @@ impl ApiClient {
|
||||
};
|
||||
|
||||
// Look for assets matching the pattern: camoufox-{version}-{release}-{os}.{arch}.zip
|
||||
// Use ends_with for precise matching to avoid false positives
|
||||
let pattern = format!(".{os_name}.{arch_name}.zip");
|
||||
assets.iter().any(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.starts_with("camoufox-")
|
||||
&& name.contains(&format!("-{os_name}.{arch_name}.zip"))
|
||||
&& name.ends_with(".zip")
|
||||
name.starts_with("camoufox-") && name.ends_with(&pattern)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -945,13 +900,20 @@ impl ApiClient {
|
||||
pub async fn fetch_chromium_latest_version(
|
||||
&self,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Use architecture-aware URL for Chromium
|
||||
let arch = if cfg!(target_arch = "aarch64") {
|
||||
"Mac_Arm"
|
||||
} else {
|
||||
"Mac"
|
||||
// Use platform-aware URL for Chromium to match download URL generation
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
let platform_str = match (&os[..], &arch[..]) {
|
||||
("windows", "x64") => "Win_x64",
|
||||
("windows", "arm64") => "Win_Arm64",
|
||||
("linux", "x64") => "Linux_x64",
|
||||
("linux", "arm64") => return Err("Chromium doesn't support ARM64 on Linux".into()),
|
||||
("macos", "x64") => "Mac",
|
||||
("macos", "arm64") => "Mac_Arm",
|
||||
_ => {
|
||||
return Err(format!("Unsupported platform/architecture for Chromium: {os}/{arch}").into())
|
||||
}
|
||||
};
|
||||
let url = format!("{}/{arch}/LAST_CHANGE", self.chromium_api_base);
|
||||
let url = format!("{}/{platform_str}/LAST_CHANGE", self.chromium_api_base);
|
||||
let version = self
|
||||
.client
|
||||
.get(&url)
|
||||
@@ -1103,107 +1065,6 @@ impl ApiClient {
|
||||
Ok(compatible_releases)
|
||||
}
|
||||
|
||||
pub async fn fetch_tor_releases_with_caching(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Check cache first (unless bypassing)
|
||||
if !no_caching {
|
||||
if let Some(cached_releases) = self.load_cached_versions("tor-browser") {
|
||||
return Ok(cached_releases);
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Fetching TOR releases from archive...");
|
||||
let url = format!("{}/", self.tor_archive_base);
|
||||
let html = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?
|
||||
.text()
|
||||
.await?;
|
||||
|
||||
// Parse HTML to extract version directories
|
||||
let mut version_candidates = Vec::new();
|
||||
|
||||
// Look for directory links in the HTML
|
||||
for line in html.lines() {
|
||||
if line.contains("<a href=\"") && line.contains("/\">") {
|
||||
// Extract the directory name from the href attribute
|
||||
if let Some(start) = line.find("<a href=\"") {
|
||||
let start = start + 9; // Length of "<a href=\""
|
||||
if let Some(end) = line[start..].find("/\">") {
|
||||
let version = &line[start..start + end];
|
||||
|
||||
// Skip parent directory and non-version entries
|
||||
if version != ".."
|
||||
&& !version.is_empty()
|
||||
&& version.chars().next().unwrap_or('a').is_ascii_digit()
|
||||
{
|
||||
version_candidates.push(version.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort version candidates using the new version sorting system
|
||||
sort_versions(&mut version_candidates);
|
||||
|
||||
// Only check the first 10 versions to avoid being too slow
|
||||
let mut version_strings = Vec::new();
|
||||
for version in version_candidates.into_iter().take(10) {
|
||||
// Check if this version has a macOS DMG file
|
||||
if let Ok(has_macos) = self.check_tor_version_has_macos(&version).await {
|
||||
if has_macos {
|
||||
version_strings.push(version);
|
||||
}
|
||||
}
|
||||
|
||||
// Add a small delay to avoid overwhelming the server
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
// Convert to BrowserRelease objects
|
||||
let releases: Vec<BrowserRelease> = version_strings
|
||||
.into_iter()
|
||||
.map(|version| BrowserRelease {
|
||||
version: version.clone(),
|
||||
date: "".to_string(), // TOR archive doesn't provide structured dates
|
||||
is_prerelease: false, // Assume all archived versions are stable
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
if !no_caching {
|
||||
if let Err(e) = self.save_cached_versions("tor-browser", &releases) {
|
||||
log::error!("Failed to cache TOR versions: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
Ok(releases)
|
||||
}
|
||||
|
||||
async fn check_tor_version_has_macos(
|
||||
&self,
|
||||
version: &str,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let url = format!("{}/{version}/", self.tor_archive_base);
|
||||
let html = self
|
||||
.client
|
||||
.get(&url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?
|
||||
.text()
|
||||
.await?;
|
||||
|
||||
// Check if there's a macOS DMG file in this version directory
|
||||
Ok(html.contains("tor-browser-macos-") && html.contains(".dmg"))
|
||||
}
|
||||
|
||||
/// Check if a Zen twilight release has been updated by comparing file size
|
||||
pub async fn check_twilight_update(
|
||||
&self,
|
||||
@@ -1303,7 +1164,6 @@ mod tests {
|
||||
base_url.clone(), // firefox_dev_api_base
|
||||
base_url.clone(), // github_api_base
|
||||
base_url.clone(), // chromium_api_base
|
||||
base_url.clone(), // tor_archive_base
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1527,47 +1387,6 @@ mod tests {
|
||||
assert_eq!(releases[0].version, "140.0b1");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_mullvad_api() {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
let mock_response = r#"[
|
||||
{
|
||||
"tag_name": "14.5a6",
|
||||
"name": "Mullvad Browser 14.5a6",
|
||||
"prerelease": true,
|
||||
"published_at": "2024-01-15T10:00:00Z",
|
||||
"assets": [
|
||||
{
|
||||
"name": "mullvad-browser-macos-14.5a6.dmg",
|
||||
"browser_download_url": "https://example.com/mullvad-14.5a6.dmg",
|
||||
"size": 100000000
|
||||
}
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_response)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let result = client.fetch_mullvad_releases_with_caching(true).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let releases = result.unwrap();
|
||||
assert!(!releases.is_empty());
|
||||
assert_eq!(releases[0].tag_name, "14.5a6");
|
||||
assert!(releases[0].is_nightly);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_zen_api() {
|
||||
let server = setup_mock_server().await;
|
||||
@@ -1668,14 +1487,19 @@ mod tests {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
let arch = if cfg!(target_arch = "aarch64") {
|
||||
"Mac_Arm"
|
||||
} else {
|
||||
"Mac"
|
||||
let (os, arch) = ApiClient::get_platform_info();
|
||||
let platform_str = match (&os[..], &arch[..]) {
|
||||
("windows", "x64") => "Win_x64",
|
||||
("windows", "arm64") => "Win_Arm64",
|
||||
("linux", "x64") => "Linux_x64",
|
||||
("linux", "arm64") => return,
|
||||
("macos", "x64") => "Mac",
|
||||
("macos", "arm64") => "Mac_Arm",
|
||||
_ => return,
|
||||
};
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/{arch}/LAST_CHANGE")))
|
||||
.and(path(format!("/{platform_str}/LAST_CHANGE")))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string("1465660")
|
||||
@@ -1696,14 +1520,19 @@ mod tests {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
let arch = if cfg!(target_arch = "aarch64") {
|
||||
"Mac_Arm"
|
||||
} else {
|
||||
"Mac"
|
||||
let (os, arch) = ApiClient::get_platform_info();
|
||||
let platform_str = match (&os[..], &arch[..]) {
|
||||
("windows", "x64") => "Win_x64",
|
||||
("windows", "arm64") => "Win_Arm64",
|
||||
("linux", "x64") => "Linux_x64",
|
||||
("linux", "arm64") => return,
|
||||
("macos", "x64") => "Mac",
|
||||
("macos", "arm64") => "Mac_Arm",
|
||||
_ => return,
|
||||
};
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/{arch}/LAST_CHANGE")))
|
||||
.and(path(format!("/{platform_str}/LAST_CHANGE")))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string("1465660")
|
||||
@@ -1721,125 +1550,6 @@ mod tests {
|
||||
assert!(!releases[0].is_prerelease);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_tor_api() {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
let mock_html = r#"
|
||||
<html>
|
||||
<body>
|
||||
<a href="../">../</a>
|
||||
<a href="14.0.4/">14.0.4/</a>
|
||||
<a href="14.0.3/">14.0.3/</a>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
|
||||
let version_html = r#"
|
||||
<html>
|
||||
<body>
|
||||
<a href="tor-browser-macos-14.0.4.dmg">tor-browser-macos-14.0.4.dmg</a>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_html)
|
||||
.insert_header("content-type", "text/html"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/14.0.4/"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(version_html)
|
||||
.insert_header("content-type", "text/html"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/14.0.3/"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(version_html.replace("14.0.4", "14.0.3"))
|
||||
.insert_header("content-type", "text/html"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let result = client.fetch_tor_releases_with_caching(true).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let releases = result.unwrap();
|
||||
assert!(!releases.is_empty());
|
||||
assert_eq!(releases[0].version, "14.0.4");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_tor_version_check() {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
let version_html = r#"
|
||||
<html>
|
||||
<body>
|
||||
<a href="tor-browser-macos-14.0.4.dmg">tor-browser-macos-14.0.4.dmg</a>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/14.0.4/"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(version_html)
|
||||
.insert_header("content-type", "text/html"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let result = client.check_tor_version_has_macos("14.0.4").await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
assert!(result.unwrap());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_tor_version_check_no_macos() {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
let version_html = r#"
|
||||
<html>
|
||||
<body>
|
||||
<a href="tor-browser-linux-14.0.4.tar.xz">tor-browser-linux-14.0.4.tar.xz</a>
|
||||
</body>
|
||||
</html>
|
||||
"#;
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/14.0.5/"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(version_html)
|
||||
.insert_header("content-type", "text/html"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let result = client.check_tor_version_has_macos("14.0.5").await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
assert!(!result.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_nightly_version() {
|
||||
assert!(is_nightly_version("1.2.3a1"));
|
||||
@@ -1911,84 +1621,6 @@ mod tests {
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_mullvad_pagination_two_pages() {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
// Page 1 response with Link: rel="next" header
|
||||
let mock_page1 = r#"[
|
||||
{
|
||||
"tag_name": "100.0",
|
||||
"name": "Mullvad Browser 100.0",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-07-01T00:00:00Z",
|
||||
"assets": [
|
||||
{ "name": "mullvad-browser-macos-100.0.dmg", "browser_download_url": "https://example.com/100.0.dmg", "size": 1 }
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
// Page 2 response
|
||||
let mock_page2 = r#"[
|
||||
{
|
||||
"tag_name": "99.0",
|
||||
"name": "Mullvad Browser 99.0",
|
||||
"prerelease": false,
|
||||
"published_at": "2024-06-01T00:00:00Z",
|
||||
"assets": [
|
||||
{ "name": "mullvad-browser-macos-99.0.dmg", "browser_download_url": "https://example.com/99.0.dmg", "size": 1 }
|
||||
]
|
||||
}
|
||||
]"#;
|
||||
|
||||
// Mock page 1
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.and(query_param("page", "1"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_page1)
|
||||
.insert_header("content-type", "application/json")
|
||||
.insert_header(
|
||||
"link",
|
||||
format!(
|
||||
"<{}?per_page=100&page=2>; rel=\"next\", <{}?per_page=100&page=2>; rel=\"last\"",
|
||||
server.uri().to_string() + "/repos/mullvad/mullvad-browser/releases",
|
||||
server.uri().to_string() + "/repos/mullvad/mullvad-browser/releases"
|
||||
),
|
||||
),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
// Mock page 2
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/repos/mullvad/mullvad-browser/releases"))
|
||||
.and(query_param("per_page", "100"))
|
||||
.and(query_param("page", "2"))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string(mock_page2)
|
||||
.insert_header("content-type", "application/json"),
|
||||
)
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let result = client.fetch_mullvad_releases_with_caching(true).await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let releases = result.unwrap();
|
||||
// We currently only fetch 1 page intentionally; ensure we at least got page 1
|
||||
assert_eq!(
|
||||
releases.len(),
|
||||
1,
|
||||
"Should fetch only the first page of results"
|
||||
);
|
||||
assert_eq!(releases[0].tag_name, "100.0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_camoufox_beta_version_parsing() {
|
||||
// Test specific Camoufox beta versions that are causing issues
|
||||
|
||||
@@ -784,6 +784,20 @@ impl AppAutoUpdater {
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let file_path = dest_dir.join(filename);
|
||||
|
||||
// First, try to get the file size via HEAD request
|
||||
// This is more reliable than GET content-length for some CDN configurations
|
||||
// especially when dealing with redirects (like GitHub releases)
|
||||
let head_size = self
|
||||
.client
|
||||
.head(download_url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await
|
||||
.ok()
|
||||
.and_then(|r| r.content_length());
|
||||
|
||||
log::info!("HEAD request for download size: {:?} bytes", head_size);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(download_url)
|
||||
@@ -795,7 +809,9 @@ impl AppAutoUpdater {
|
||||
return Err(format!("Download failed with status: {}", response.status()).into());
|
||||
}
|
||||
|
||||
let total_size = response.content_length().unwrap_or(0);
|
||||
// Use HEAD size if available, otherwise fall back to GET content-length
|
||||
let total_size = head_size.or(response.content_length()).unwrap_or(0);
|
||||
log::info!("Final download size: {} bytes", total_size);
|
||||
let mut file = fs::File::create(&file_path)?;
|
||||
let mut stream = response.bytes_stream();
|
||||
let mut downloaded = 0u64;
|
||||
@@ -999,6 +1015,22 @@ impl AppAutoUpdater {
|
||||
// Clean up backup after successful installation
|
||||
let _ = fs::remove_dir_all(&backup_path);
|
||||
|
||||
// Clean up old "Donut Browser.app" if it exists (from before the project rename)
|
||||
if let Some(parent_dir) = current_app_path.parent() {
|
||||
let old_app_path = parent_dir.join("Donut Browser.app");
|
||||
if old_app_path.exists() && old_app_path != current_app_path {
|
||||
log::info!(
|
||||
"Removing old 'Donut Browser.app' from: {}",
|
||||
old_app_path.display()
|
||||
);
|
||||
if let Err(e) = fs::remove_dir_all(&old_app_path) {
|
||||
log::warn!("Warning: Failed to remove old 'Donut Browser.app': {e}");
|
||||
} else {
|
||||
log::info!("Successfully removed old 'Donut Browser.app'");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
@@ -82,6 +82,12 @@ fn build_proxy_url(
|
||||
|
||||
#[tokio::main(flavor = "multi_thread")]
|
||||
async fn main() {
|
||||
// Initialize logger to write to stderr (which will be redirected to file)
|
||||
env_logger::Builder::from_default_env()
|
||||
.filter_level(log::LevelFilter::Debug)
|
||||
.format_timestamp_millis()
|
||||
.init();
|
||||
|
||||
// Set up panic handler to log panics before process exits
|
||||
std::panic::set_hook(Box::new(|panic_info| {
|
||||
log::error!("PANIC in proxy worker: {:?}", panic_info);
|
||||
|
||||
+118
-99
@@ -13,39 +13,33 @@ pub struct ProxySettings {
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub enum BrowserType {
|
||||
MullvadBrowser,
|
||||
Chromium,
|
||||
Firefox,
|
||||
FirefoxDeveloper,
|
||||
Brave,
|
||||
Zen,
|
||||
TorBrowser,
|
||||
Camoufox,
|
||||
}
|
||||
|
||||
impl BrowserType {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
BrowserType::MullvadBrowser => "mullvad-browser",
|
||||
BrowserType::Chromium => "chromium",
|
||||
BrowserType::Firefox => "firefox",
|
||||
BrowserType::FirefoxDeveloper => "firefox-developer",
|
||||
BrowserType::Brave => "brave",
|
||||
BrowserType::Zen => "zen",
|
||||
BrowserType::TorBrowser => "tor-browser",
|
||||
BrowserType::Camoufox => "camoufox",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_str(s: &str) -> Result<Self, String> {
|
||||
match s {
|
||||
"mullvad-browser" => Ok(BrowserType::MullvadBrowser),
|
||||
"chromium" => Ok(BrowserType::Chromium),
|
||||
"firefox" => Ok(BrowserType::Firefox),
|
||||
"firefox-developer" => Ok(BrowserType::FirefoxDeveloper),
|
||||
"brave" => Ok(BrowserType::Brave),
|
||||
"zen" => Ok(BrowserType::Zen),
|
||||
"tor-browser" => Ok(BrowserType::TorBrowser),
|
||||
"camoufox" => Ok(BrowserType::Camoufox),
|
||||
_ => Err(format!("Unknown browser type: {s}")),
|
||||
}
|
||||
@@ -85,21 +79,120 @@ mod macos {
|
||||
executable_dir.push("Contents");
|
||||
executable_dir.push("MacOS");
|
||||
|
||||
// Find the first executable in the MacOS directory
|
||||
let executable_path = std::fs::read_dir(&executable_dir)?
|
||||
// Find executables matching the browser name pattern
|
||||
let candidates: Vec<_> = std::fs::read_dir(&executable_dir)?
|
||||
.filter_map(Result::ok)
|
||||
.find(|entry| {
|
||||
.filter(|entry| {
|
||||
let binding = entry.file_name();
|
||||
let name = binding.to_string_lossy();
|
||||
name.starts_with("firefox")
|
||||
|| name.starts_with("mullvad")
|
||||
|| name.starts_with("zen")
|
||||
|| name.starts_with("tor")
|
||||
|| name.starts_with("camoufox")
|
||||
|| name.contains("Browser")
|
||||
})
|
||||
.map(|entry| entry.path())
|
||||
.ok_or("No executable found in MacOS directory")?;
|
||||
.collect();
|
||||
|
||||
if candidates.is_empty() {
|
||||
return Err("No executable found in MacOS directory".into());
|
||||
}
|
||||
|
||||
// For Camoufox, validate architecture compatibility
|
||||
let executable_path = if candidates.iter().any(|p| {
|
||||
p.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.map(|n| n.starts_with("camoufox"))
|
||||
.unwrap_or(false)
|
||||
}) {
|
||||
// Find the executable that matches the current architecture
|
||||
let current_arch = if cfg!(target_arch = "x86_64") {
|
||||
"x86_64"
|
||||
} else if cfg!(target_arch = "aarch64") {
|
||||
"arm64"
|
||||
} else {
|
||||
return Err("Unsupported architecture".into());
|
||||
};
|
||||
|
||||
// Try to find an executable that matches the current architecture
|
||||
// Use file command to check architecture
|
||||
let mut found_executable = None;
|
||||
let mut file_command_available = true;
|
||||
|
||||
for candidate in &candidates {
|
||||
match std::process::Command::new("file").arg(candidate).output() {
|
||||
Ok(output) => {
|
||||
if output.status.success() {
|
||||
if let Ok(output_str) = String::from_utf8(output.stdout) {
|
||||
let is_compatible = if current_arch == "x86_64" {
|
||||
output_str.contains("x86_64") || output_str.contains("i386")
|
||||
} else {
|
||||
output_str.contains("arm64") || output_str.contains("aarch64")
|
||||
};
|
||||
|
||||
if is_compatible {
|
||||
found_executable = Some(candidate.clone());
|
||||
log::info!(
|
||||
"Found compatible Camoufox executable for {}: {}",
|
||||
current_arch,
|
||||
candidate.display()
|
||||
);
|
||||
break;
|
||||
} else {
|
||||
log::warn!(
|
||||
"Skipping incompatible Camoufox executable: {} (architecture: {})",
|
||||
candidate.display(),
|
||||
output_str.trim()
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::warn!(
|
||||
"Failed to check architecture for {}: file command returned non-zero exit code",
|
||||
candidate.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!(
|
||||
"Failed to check architecture for {} using file command: {}",
|
||||
candidate.display(),
|
||||
e
|
||||
);
|
||||
file_command_available = false;
|
||||
// Continue checking other candidates
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no compatible executable found but we have candidates, use the first one
|
||||
// (fallback for cases where file command isn't available or failed)
|
||||
if found_executable.is_none() && !candidates.is_empty() {
|
||||
if !file_command_available {
|
||||
log::warn!(
|
||||
"file command not available, using first candidate: {}",
|
||||
candidates[0].display()
|
||||
);
|
||||
} else {
|
||||
log::warn!(
|
||||
"No compatible executable found for architecture {}, using first candidate: {}",
|
||||
current_arch,
|
||||
candidates[0].display()
|
||||
);
|
||||
}
|
||||
found_executable = Some(candidates[0].clone());
|
||||
}
|
||||
|
||||
found_executable.ok_or_else(|| {
|
||||
format!(
|
||||
"No compatible Camoufox executable found for architecture {}. Available executables: {:?}",
|
||||
current_arch,
|
||||
candidates
|
||||
)
|
||||
})?
|
||||
} else {
|
||||
// For other browsers, use the first matching executable
|
||||
candidates[0].clone()
|
||||
};
|
||||
|
||||
Ok(executable_path)
|
||||
}
|
||||
@@ -190,28 +283,9 @@ mod linux {
|
||||
browser_subdir.join("firefox"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
],
|
||||
BrowserType::MullvadBrowser => {
|
||||
vec![
|
||||
browser_subdir.join("firefox"),
|
||||
browser_subdir.join("mullvad-browser"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
]
|
||||
}
|
||||
BrowserType::Zen => {
|
||||
vec![browser_subdir.join("zen"), browser_subdir.join("zen-bin")]
|
||||
}
|
||||
BrowserType::TorBrowser => {
|
||||
vec![
|
||||
// Common Tor Browser launchers
|
||||
browser_subdir.join("tor-browser"),
|
||||
// Firefox-based binaries
|
||||
browser_subdir.join("firefox"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
// Sometimes packaged similarly to Firefox
|
||||
install_dir.join("firefox").join("firefox"),
|
||||
install_dir.join("firefox").join("firefox-bin"),
|
||||
]
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
vec![
|
||||
install_dir.join("camoufox-bin"),
|
||||
@@ -303,23 +377,9 @@ mod linux {
|
||||
install_dir.join("firefox").join("firefox"),
|
||||
]
|
||||
}
|
||||
BrowserType::MullvadBrowser => {
|
||||
vec![
|
||||
browser_subdir.join("mullvad-browser"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
browser_subdir.join("firefox"),
|
||||
]
|
||||
}
|
||||
BrowserType::Zen => {
|
||||
vec![browser_subdir.join("zen"), browser_subdir.join("zen-bin")]
|
||||
}
|
||||
BrowserType::TorBrowser => {
|
||||
vec![
|
||||
browser_subdir.join("tor-browser"),
|
||||
browser_subdir.join("firefox-bin"),
|
||||
browser_subdir.join("firefox"),
|
||||
]
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
vec![
|
||||
install_dir.join("camoufox-bin"),
|
||||
@@ -424,9 +484,7 @@ mod windows {
|
||||
if path.extension().is_some_and(|ext| ext == "exe") {
|
||||
let name = path.file_stem().unwrap_or_default().to_string_lossy();
|
||||
if name.starts_with("firefox")
|
||||
|| name.starts_with("mullvad")
|
||||
|| name.starts_with("zen")
|
||||
|| name.starts_with("tor")
|
||||
|| name.starts_with("camoufox")
|
||||
|| name.contains("browser")
|
||||
{
|
||||
@@ -510,9 +568,7 @@ mod windows {
|
||||
if path.extension().is_some_and(|ext| ext == "exe") {
|
||||
let name = path.file_stem().unwrap_or_default().to_string_lossy();
|
||||
if name.starts_with("firefox")
|
||||
|| name.starts_with("mullvad")
|
||||
|| name.starts_with("zen")
|
||||
|| name.starts_with("tor")
|
||||
|| name.starts_with("camoufox")
|
||||
|| name.contains("browser")
|
||||
{
|
||||
@@ -624,22 +680,9 @@ impl Browser for FirefoxBrowser {
|
||||
args.push("--headless".to_string());
|
||||
}
|
||||
|
||||
// Use -no-remote for browsers that require it for security (Mullvad, Tor) or when remote debugging
|
||||
match self.browser_type {
|
||||
BrowserType::MullvadBrowser | BrowserType::TorBrowser => {
|
||||
args.push("-no-remote".to_string());
|
||||
}
|
||||
BrowserType::Firefox
|
||||
| BrowserType::FirefoxDeveloper
|
||||
| BrowserType::Zen
|
||||
| BrowserType::Camoufox => {
|
||||
// Use -no-remote when remote debugging to avoid conflicts
|
||||
if remote_debugging_port.is_some() {
|
||||
args.push("-no-remote".to_string());
|
||||
}
|
||||
// Don't use -no-remote for normal launches so we can communicate with existing instances
|
||||
}
|
||||
_ => {}
|
||||
// Use -no-remote when remote debugging to avoid conflicts with existing instances
|
||||
if remote_debugging_port.is_some() {
|
||||
args.push("-no-remote".to_string());
|
||||
}
|
||||
|
||||
// Firefox-based browsers use profile directory and user.js for proxy configuration
|
||||
@@ -737,6 +780,12 @@ impl Browser for ChromiumBrowser {
|
||||
"--disable-background-timer-throttling".to_string(),
|
||||
"--crash-server-url=".to_string(),
|
||||
"--disable-updater".to_string(),
|
||||
// Disable quit confirmation and session restore prompts
|
||||
"--disable-session-crashed-bubble".to_string(),
|
||||
"--hide-crash-restore-bubble".to_string(),
|
||||
"--disable-infobars".to_string(),
|
||||
// Disable QUIC/HTTP3 to ensure traffic goes through HTTP proxy
|
||||
"--disable-quic".to_string(),
|
||||
];
|
||||
|
||||
// Add remote debugging if requested
|
||||
@@ -910,11 +959,9 @@ impl BrowserFactory {
|
||||
|
||||
pub fn create_browser(&self, browser_type: BrowserType) -> Box<dyn Browser> {
|
||||
match browser_type {
|
||||
BrowserType::MullvadBrowser
|
||||
| BrowserType::Firefox
|
||||
| BrowserType::FirefoxDeveloper
|
||||
| BrowserType::Zen
|
||||
| BrowserType::TorBrowser => Box::new(FirefoxBrowser::new(browser_type)),
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper | BrowserType::Zen => {
|
||||
Box::new(FirefoxBrowser::new(browser_type))
|
||||
}
|
||||
BrowserType::Chromium | BrowserType::Brave => Box::new(ChromiumBrowser::new(browser_type)),
|
||||
BrowserType::Camoufox => Box::new(CamoufoxBrowser::new()),
|
||||
}
|
||||
@@ -992,20 +1039,14 @@ mod tests {
|
||||
#[test]
|
||||
fn test_browser_type_conversions() {
|
||||
// Test as_str
|
||||
assert_eq!(BrowserType::MullvadBrowser.as_str(), "mullvad-browser");
|
||||
assert_eq!(BrowserType::Firefox.as_str(), "firefox");
|
||||
assert_eq!(BrowserType::FirefoxDeveloper.as_str(), "firefox-developer");
|
||||
assert_eq!(BrowserType::Chromium.as_str(), "chromium");
|
||||
assert_eq!(BrowserType::Brave.as_str(), "brave");
|
||||
assert_eq!(BrowserType::Zen.as_str(), "zen");
|
||||
assert_eq!(BrowserType::TorBrowser.as_str(), "tor-browser");
|
||||
assert_eq!(BrowserType::Camoufox.as_str(), "camoufox");
|
||||
|
||||
// Test from_str - use expect with descriptive messages instead of unwrap
|
||||
assert_eq!(
|
||||
BrowserType::from_str("mullvad-browser").expect("mullvad-browser should be valid"),
|
||||
BrowserType::MullvadBrowser
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("firefox").expect("firefox should be valid"),
|
||||
BrowserType::Firefox
|
||||
@@ -1026,10 +1067,6 @@ mod tests {
|
||||
BrowserType::from_str("zen").expect("zen should be valid"),
|
||||
BrowserType::Zen
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("tor-browser").expect("tor-browser should be valid"),
|
||||
BrowserType::TorBrowser
|
||||
);
|
||||
assert_eq!(
|
||||
BrowserType::from_str("camoufox").expect("camoufox should be valid"),
|
||||
BrowserType::Camoufox
|
||||
@@ -1096,30 +1133,12 @@ mod tests {
|
||||
"Firefox should include debugging port"
|
||||
);
|
||||
|
||||
// Test Mullvad Browser (should always use -no-remote)
|
||||
let browser = FirefoxBrowser::new(BrowserType::MullvadBrowser);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None, None, false)
|
||||
.expect("Failed to create launch args for Mullvad Browser");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile", "-no-remote"]);
|
||||
|
||||
// Test Tor Browser (should always use -no-remote)
|
||||
let browser = FirefoxBrowser::new(BrowserType::TorBrowser);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None, None, false)
|
||||
.expect("Failed to create launch args for Tor Browser");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile", "-no-remote"]);
|
||||
|
||||
// Test Zen Browser (should not use -no-remote for normal launch)
|
||||
// Test Zen Browser (no special flags without remote debugging)
|
||||
let browser = FirefoxBrowser::new(BrowserType::Zen);
|
||||
let args = browser
|
||||
.create_launch_args("/path/to/profile", None, None, None, false)
|
||||
.expect("Failed to create launch args for Zen Browser");
|
||||
assert_eq!(args, vec!["-profile", "/path/to/profile"]);
|
||||
assert!(
|
||||
!args.contains(&"-no-remote".to_string()),
|
||||
"Zen Browser should not use -no-remote for normal launch"
|
||||
);
|
||||
|
||||
// Test headless mode
|
||||
let args = browser
|
||||
|
||||
+325
-251
@@ -33,29 +33,6 @@ impl BrowserRunner {
|
||||
&BROWSER_RUNNER
|
||||
}
|
||||
|
||||
// Helper function to check if a process matches TOR/Mullvad browser
|
||||
fn is_tor_or_mullvad_browser(
|
||||
&self,
|
||||
exe_name: &str,
|
||||
cmd: &[std::ffi::OsString],
|
||||
browser_type: &str,
|
||||
) -> bool {
|
||||
#[cfg(target_os = "macos")]
|
||||
return platform_browser::macos::is_tor_or_mullvad_browser(exe_name, cmd, browser_type);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return platform_browser::windows::is_tor_or_mullvad_browser(exe_name, cmd, browser_type);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return platform_browser::linux::is_tor_or_mullvad_browser(exe_name, cmd, browser_type);
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
{
|
||||
let _ = (exe_name, cmd, browser_type);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_binaries_dir(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
@@ -395,148 +372,117 @@ impl BrowserRunner {
|
||||
profile.id
|
||||
);
|
||||
|
||||
// For TOR and Mullvad browsers, we need to find the actual browser process
|
||||
// because they use launcher scripts that spawn the real browser process
|
||||
let mut actual_pid = launcher_pid;
|
||||
|
||||
if matches!(
|
||||
browser_type,
|
||||
BrowserType::TorBrowser | BrowserType::MullvadBrowser
|
||||
) {
|
||||
// Wait a moment for the actual browser process to start
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(3000)).await;
|
||||
|
||||
// Find the actual browser process
|
||||
let system = System::new_all();
|
||||
for (pid, process) in system.processes() {
|
||||
let process_name = process.name().to_str().unwrap_or("");
|
||||
let process_cmd = process.cmd();
|
||||
let pid_u32 = pid.as_u32();
|
||||
|
||||
// Skip if this is the launcher process itself
|
||||
if pid_u32 == launcher_pid {
|
||||
continue;
|
||||
}
|
||||
|
||||
if self.is_tor_or_mullvad_browser(process_name, process_cmd, &profile.browser) {
|
||||
log::info!(
|
||||
"Found actual {} browser process: PID {} ({})",
|
||||
profile.browser,
|
||||
pid_u32,
|
||||
process_name
|
||||
);
|
||||
actual_pid = pid_u32;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// On macOS, when launching via `open -a`, the child PID is the `open` helper.
|
||||
// Resolve and store the actual browser PID for all browser types.
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// Give the browser a moment to start
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(1500)).await;
|
||||
let actual_pid = {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// Give the browser a moment to start
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(1500)).await;
|
||||
|
||||
let system = System::new_all();
|
||||
let profiles_dir = self.profile_manager.get_profiles_dir();
|
||||
let profile_data_path = profile.get_profile_data_path(&profiles_dir);
|
||||
let profile_data_path_str = profile_data_path.to_string_lossy();
|
||||
let system = System::new_all();
|
||||
let profiles_dir = self.profile_manager.get_profiles_dir();
|
||||
let profile_data_path = profile.get_profile_data_path(&profiles_dir);
|
||||
let profile_data_path_str = profile_data_path.to_string_lossy();
|
||||
|
||||
for (pid, process) in system.processes() {
|
||||
let cmd = process.cmd();
|
||||
if cmd.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let mut resolved_pid = launcher_pid;
|
||||
|
||||
// Determine if this process matches the intended browser type
|
||||
let exe_name_lower = process.name().to_string_lossy().to_lowercase();
|
||||
let is_correct_browser = match profile.browser.as_str() {
|
||||
"firefox" => {
|
||||
exe_name_lower.contains("firefox")
|
||||
&& !exe_name_lower.contains("developer")
|
||||
&& !exe_name_lower.contains("tor")
|
||||
&& !exe_name_lower.contains("mullvad")
|
||||
&& !exe_name_lower.contains("camoufox")
|
||||
for (pid, process) in system.processes() {
|
||||
let cmd = process.cmd();
|
||||
if cmd.is_empty() {
|
||||
continue;
|
||||
}
|
||||
"firefox-developer" => {
|
||||
// More flexible detection for Firefox Developer Edition
|
||||
(exe_name_lower.contains("firefox") && exe_name_lower.contains("developer"))
|
||||
|| (exe_name_lower.contains("firefox")
|
||||
&& cmd.iter().any(|arg| {
|
||||
let arg_str = arg.to_str().unwrap_or("");
|
||||
arg_str.contains("Developer")
|
||||
|| arg_str.contains("developer")
|
||||
|| arg_str.contains("FirefoxDeveloperEdition")
|
||||
|| arg_str.contains("firefox-developer")
|
||||
}))
|
||||
|| exe_name_lower == "firefox" // Firefox Developer might just show as "firefox"
|
||||
}
|
||||
"mullvad-browser" => {
|
||||
self.is_tor_or_mullvad_browser(&exe_name_lower, cmd, "mullvad-browser")
|
||||
}
|
||||
"tor-browser" => self.is_tor_or_mullvad_browser(&exe_name_lower, cmd, "tor-browser"),
|
||||
"zen" => exe_name_lower.contains("zen"),
|
||||
"chromium" => exe_name_lower.contains("chromium") || exe_name_lower.contains("chrome"),
|
||||
"brave" => exe_name_lower.contains("brave") || exe_name_lower.contains("Brave"),
|
||||
_ => false,
|
||||
};
|
||||
|
||||
if !is_correct_browser {
|
||||
continue;
|
||||
}
|
||||
// Determine if this process matches the intended browser type
|
||||
let exe_name_lower = process.name().to_string_lossy().to_lowercase();
|
||||
let is_correct_browser = match profile.browser.as_str() {
|
||||
"firefox" => {
|
||||
exe_name_lower.contains("firefox")
|
||||
&& !exe_name_lower.contains("developer")
|
||||
&& !exe_name_lower.contains("camoufox")
|
||||
}
|
||||
"firefox-developer" => {
|
||||
// More flexible detection for Firefox Developer Edition
|
||||
(exe_name_lower.contains("firefox") && exe_name_lower.contains("developer"))
|
||||
|| (exe_name_lower.contains("firefox")
|
||||
&& cmd.iter().any(|arg| {
|
||||
let arg_str = arg.to_str().unwrap_or("");
|
||||
arg_str.contains("Developer")
|
||||
|| arg_str.contains("developer")
|
||||
|| arg_str.contains("FirefoxDeveloperEdition")
|
||||
|| arg_str.contains("firefox-developer")
|
||||
}))
|
||||
|| exe_name_lower == "firefox" // Firefox Developer might just show as "firefox"
|
||||
}
|
||||
"zen" => exe_name_lower.contains("zen"),
|
||||
"chromium" => exe_name_lower.contains("chromium") || exe_name_lower.contains("chrome"),
|
||||
"brave" => exe_name_lower.contains("brave") || exe_name_lower.contains("Brave"),
|
||||
_ => false,
|
||||
};
|
||||
|
||||
// Check for profile path match
|
||||
let profile_path_match = if matches!(
|
||||
profile.browser.as_str(),
|
||||
"firefox" | "firefox-developer" | "tor-browser" | "mullvad-browser" | "zen"
|
||||
) {
|
||||
// Firefox-based browsers: look for -profile argument followed by path
|
||||
let mut found_profile_arg = false;
|
||||
for (i, arg) in cmd.iter().enumerate() {
|
||||
if let Some(arg_str) = arg.to_str() {
|
||||
if arg_str == "-profile" && i + 1 < cmd.len() {
|
||||
if let Some(next_arg) = cmd.get(i + 1).and_then(|a| a.to_str()) {
|
||||
if next_arg == profile_data_path_str {
|
||||
found_profile_arg = true;
|
||||
break;
|
||||
if !is_correct_browser {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for profile path match
|
||||
let profile_path_match = if matches!(
|
||||
profile.browser.as_str(),
|
||||
"firefox" | "firefox-developer" | "zen"
|
||||
) {
|
||||
// Firefox-based browsers: look for -profile argument followed by path
|
||||
let mut found_profile_arg = false;
|
||||
for (i, arg) in cmd.iter().enumerate() {
|
||||
if let Some(arg_str) = arg.to_str() {
|
||||
if arg_str == "-profile" && i + 1 < cmd.len() {
|
||||
if let Some(next_arg) = cmd.get(i + 1).and_then(|a| a.to_str()) {
|
||||
if next_arg == profile_data_path_str {
|
||||
found_profile_arg = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Also check for combined -profile=path format
|
||||
if arg_str == format!("-profile={profile_data_path_str}") {
|
||||
found_profile_arg = true;
|
||||
break;
|
||||
}
|
||||
// Check if the argument is the profile path directly
|
||||
if arg_str == profile_data_path_str {
|
||||
found_profile_arg = true;
|
||||
break;
|
||||
// Also check for combined -profile=path format
|
||||
if arg_str == format!("-profile={profile_data_path_str}") {
|
||||
found_profile_arg = true;
|
||||
break;
|
||||
}
|
||||
// Check if the argument is the profile path directly
|
||||
if arg_str == profile_data_path_str {
|
||||
found_profile_arg = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
found_profile_arg
|
||||
} else {
|
||||
// Chromium-based browsers: look for --user-data-dir argument
|
||||
cmd.iter().any(|s| {
|
||||
if let Some(arg) = s.to_str() {
|
||||
arg == format!("--user-data-dir={profile_data_path_str}")
|
||||
|| arg == profile_data_path_str
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
};
|
||||
found_profile_arg
|
||||
} else {
|
||||
// Chromium-based browsers: look for --user-data-dir argument
|
||||
cmd.iter().any(|s| {
|
||||
if let Some(arg) = s.to_str() {
|
||||
arg == format!("--user-data-dir={profile_data_path_str}")
|
||||
|| arg == profile_data_path_str
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
if profile_path_match {
|
||||
let pid_u32 = pid.as_u32();
|
||||
if pid_u32 != launcher_pid {
|
||||
actual_pid = pid_u32;
|
||||
break;
|
||||
if profile_path_match {
|
||||
let pid_u32 = pid.as_u32();
|
||||
if pid_u32 != launcher_pid {
|
||||
resolved_pid = pid_u32;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resolved_pid
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
{
|
||||
launcher_pid
|
||||
}
|
||||
};
|
||||
|
||||
// Update profile with process info and save
|
||||
let mut updated_profile = profile.clone();
|
||||
@@ -552,11 +498,7 @@ impl BrowserRunner {
|
||||
if profile.proxy_id.is_some()
|
||||
&& matches!(
|
||||
browser_type,
|
||||
BrowserType::Firefox
|
||||
| BrowserType::FirefoxDeveloper
|
||||
| BrowserType::Zen
|
||||
| BrowserType::TorBrowser
|
||||
| BrowserType::MullvadBrowser
|
||||
BrowserType::Firefox | BrowserType::FirefoxDeveloper | BrowserType::Zen
|
||||
)
|
||||
{
|
||||
// Proxy settings for Firefox-based browsers are applied via user.js file
|
||||
@@ -714,48 +656,9 @@ impl BrowserRunner {
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
return Err("Unsupported platform".into());
|
||||
}
|
||||
BrowserType::MullvadBrowser | BrowserType::TorBrowser => {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let profiles_dir = self.profile_manager.get_profiles_dir();
|
||||
return platform_browser::macos::open_url_in_existing_browser_tor_mullvad(
|
||||
&updated_profile,
|
||||
url,
|
||||
browser_type,
|
||||
&browser_dir,
|
||||
&profiles_dir,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
let profiles_dir = self.profile_manager.get_profiles_dir();
|
||||
return platform_browser::windows::open_url_in_existing_browser_tor_mullvad(
|
||||
&updated_profile,
|
||||
url,
|
||||
browser_type,
|
||||
&browser_dir,
|
||||
&profiles_dir,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let profiles_dir = self.profile_manager.get_profiles_dir();
|
||||
return platform_browser::linux::open_url_in_existing_browser_tor_mullvad(
|
||||
&updated_profile,
|
||||
url,
|
||||
browser_type,
|
||||
&browser_dir,
|
||||
&profiles_dir,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
return Err("Unsupported platform".into());
|
||||
BrowserType::Camoufox => {
|
||||
// Camoufox uses nodecar for launching, URL opening is handled differently
|
||||
Err("URL opening in existing Camoufox instance is not supported".into())
|
||||
}
|
||||
BrowserType::Chromium | BrowserType::Brave => {
|
||||
#[cfg(target_os = "macos")]
|
||||
@@ -800,10 +703,6 @@ impl BrowserRunner {
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
return Err("Unsupported platform".into());
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
// This should never be reached due to the early return above, but handle it just in case
|
||||
Err("Camoufox URL opening should be handled in the early return above".into())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -848,7 +747,7 @@ impl BrowserRunner {
|
||||
// For Firefox-based browsers, apply PAC/user.js to point to the local proxy
|
||||
if matches!(
|
||||
profile.browser.as_str(),
|
||||
"firefox" | "firefox-developer" | "zen" | "tor-browser" | "mullvad-browser"
|
||||
"firefox" | "firefox-developer" | "zen"
|
||||
) {
|
||||
let profiles_dir = self.profile_manager.get_profiles_dir();
|
||||
let profile_path = profiles_dir.join(profile.id.to_string()).join("profile");
|
||||
@@ -949,19 +848,6 @@ impl BrowserRunner {
|
||||
if let Some(url_ref) = url.as_ref() {
|
||||
log::info!("Opening URL in existing browser: {url_ref}");
|
||||
|
||||
// For TOR/Mullvad browsers, add extra verification
|
||||
if matches!(
|
||||
final_profile.browser.as_str(),
|
||||
"tor-browser" | "mullvad-browser"
|
||||
) {
|
||||
log::info!("TOR/Mullvad browser detected - ensuring we have correct PID");
|
||||
if final_profile.process_id.is_none() {
|
||||
log::info!(
|
||||
"ERROR: No PID found for running TOR/Mullvad browser - this should not happen"
|
||||
);
|
||||
return Err("No PID found for running browser".into());
|
||||
}
|
||||
}
|
||||
match self
|
||||
.open_url_in_existing_browser(
|
||||
app_handle.clone(),
|
||||
@@ -978,18 +864,22 @@ impl BrowserRunner {
|
||||
Err(e) => {
|
||||
log::info!("Failed to open URL in existing browser: {e}");
|
||||
|
||||
// For Mullvad and Tor browsers, don't fall back to new instance since they use -no-remote
|
||||
// and can't have multiple instances with the same profile
|
||||
match final_profile.browser.as_str() {
|
||||
"mullvad-browser" | "tor-browser" => {
|
||||
Err(format!("Failed to open URL in existing {} browser. Cannot launch new instance due to profile conflict: {}", final_profile.browser, e).into())
|
||||
}
|
||||
_ => {
|
||||
log::info!("Falling back to new instance for browser: {}", final_profile.browser);
|
||||
// Fallback to launching a new instance for other browsers
|
||||
self.launch_browser_internal(app_handle.clone(), &final_profile, url, internal_proxy_settings, None, false).await
|
||||
}
|
||||
}
|
||||
// Fall back to launching a new instance
|
||||
log::info!(
|
||||
"Falling back to new instance for browser: {}",
|
||||
final_profile.browser
|
||||
);
|
||||
// Fallback to launching a new instance for other browsers
|
||||
self
|
||||
.launch_browser_internal(
|
||||
app_handle.clone(),
|
||||
&final_profile,
|
||||
url,
|
||||
internal_proxy_settings,
|
||||
None,
|
||||
false,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -1095,9 +985,9 @@ impl BrowserRunner {
|
||||
.await
|
||||
{
|
||||
Ok(stopped) => {
|
||||
if stopped {
|
||||
// Verify the process actually died by checking after a short delay
|
||||
if let Some(pid) = camoufox_process.processId {
|
||||
if let Some(pid) = camoufox_process.processId {
|
||||
if stopped {
|
||||
// Verify the process actually died by checking after a short delay
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
|
||||
@@ -1129,7 +1019,20 @@ impl BrowserRunner {
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
process_actually_stopped = true;
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped =
|
||||
system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -1139,7 +1042,20 @@ impl BrowserRunner {
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
process_actually_stopped = true;
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped =
|
||||
system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
@@ -1150,19 +1066,109 @@ impl BrowserRunner {
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
process_actually_stopped = true;
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped =
|
||||
system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
process_actually_stopped = true; // No PID to verify, assume stopped
|
||||
// stop_camoufox returned false, try to force kill the process
|
||||
log::warn!(
|
||||
"Camoufox stop command returned false for process {} (PID: {:?}) - attempting force kill",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(e) = platform_browser::macos::kill_browser_process_impl(
|
||||
pid,
|
||||
Some(&profile_path_str),
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(e) = platform_browser::linux::kill_browser_process_impl(pid).await {
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(e) = platform_browser::windows::kill_browser_process_impl(pid).await
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::warn!(
|
||||
"Failed to stop Camoufox process: {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
camoufox_process.processId
|
||||
);
|
||||
// No PID available, assume stopped if stop_camoufox returned true
|
||||
process_actually_stopped = stopped;
|
||||
if !stopped {
|
||||
log::warn!(
|
||||
"Failed to stop Camoufox process {} but no PID available for force kill",
|
||||
camoufox_process.id
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -1171,6 +1177,71 @@ impl BrowserRunner {
|
||||
camoufox_process.id,
|
||||
e
|
||||
);
|
||||
// Try to force kill if we have a PID
|
||||
if let Some(pid) = camoufox_process.processId {
|
||||
log::info!(
|
||||
"Attempting force kill after stop_camoufox error for PID: {}",
|
||||
pid
|
||||
);
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(kill_err) =
|
||||
platform_browser::macos::kill_browser_process_impl(pid, Some(&profile_path_str))
|
||||
.await
|
||||
{
|
||||
log::error!(
|
||||
"Failed to force kill Camoufox process {}: {}",
|
||||
pid,
|
||||
kill_err
|
||||
);
|
||||
} else {
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(kill_err) =
|
||||
platform_browser::linux::kill_browser_process_impl(pid).await
|
||||
{
|
||||
log::error!(
|
||||
"Failed to force kill Camoufox process {}: {}",
|
||||
pid,
|
||||
kill_err
|
||||
);
|
||||
} else {
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(kill_err) =
|
||||
platform_browser::windows::kill_browser_process_impl(pid).await
|
||||
{
|
||||
log::error!(
|
||||
"Failed to force kill Camoufox process {}: {}",
|
||||
pid,
|
||||
kill_err
|
||||
);
|
||||
} else {
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1191,9 +1262,20 @@ impl BrowserRunner {
|
||||
}
|
||||
}
|
||||
|
||||
// Log warning if process wasn't confirmed stopped, but continue with cleanup
|
||||
// If process wasn't confirmed stopped, return an error
|
||||
if !process_actually_stopped {
|
||||
log::warn!("Camoufox process may still be running, but proceeding with cleanup");
|
||||
log::error!(
|
||||
"Failed to stop Camoufox process for profile: {} (ID: {}) - process may still be running",
|
||||
profile.name,
|
||||
profile.id
|
||||
);
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to stop Camoufox process for profile {} - process may still be running",
|
||||
profile.name
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
// Clear the process ID from the profile
|
||||
@@ -1316,8 +1398,6 @@ impl BrowserRunner {
|
||||
"firefox" => {
|
||||
exe_name.contains("firefox")
|
||||
&& !exe_name.contains("developer")
|
||||
&& !exe_name.contains("tor")
|
||||
&& !exe_name.contains("mullvad")
|
||||
&& !exe_name.contains("camoufox")
|
||||
}
|
||||
"firefox-developer" => {
|
||||
@@ -1333,8 +1413,6 @@ impl BrowserRunner {
|
||||
}))
|
||||
|| exe_name == "firefox" // Firefox Developer might just show as "firefox"
|
||||
}
|
||||
"mullvad-browser" => self.is_tor_or_mullvad_browser(&exe_name, cmd, "mullvad-browser"),
|
||||
"tor-browser" => self.is_tor_or_mullvad_browser(&exe_name, cmd, "tor-browser"),
|
||||
"zen" => exe_name.contains("zen"),
|
||||
"chromium" => exe_name.contains("chromium") || exe_name.contains("chrome"),
|
||||
"brave" => exe_name.contains("brave") || exe_name.contains("Brave"),
|
||||
@@ -1349,7 +1427,7 @@ impl BrowserRunner {
|
||||
|
||||
let profile_path_match = if matches!(
|
||||
profile.browser.as_str(),
|
||||
"firefox" | "firefox-developer" | "tor-browser" | "mullvad-browser" | "zen"
|
||||
"firefox" | "firefox-developer" | "zen"
|
||||
) {
|
||||
// Firefox-based browsers: look for -profile argument followed by path
|
||||
let mut found_profile_arg = false;
|
||||
@@ -1598,8 +1676,6 @@ impl BrowserRunner {
|
||||
"firefox" => {
|
||||
exe_name.contains("firefox")
|
||||
&& !exe_name.contains("developer")
|
||||
&& !exe_name.contains("tor")
|
||||
&& !exe_name.contains("mullvad")
|
||||
&& !exe_name.contains("camoufox")
|
||||
}
|
||||
"firefox-developer" => {
|
||||
@@ -1615,8 +1691,6 @@ impl BrowserRunner {
|
||||
}))
|
||||
|| exe_name == "firefox" // Firefox Developer might just show as "firefox"
|
||||
}
|
||||
"mullvad-browser" => self.is_tor_or_mullvad_browser(&exe_name, cmd, "mullvad-browser"),
|
||||
"tor-browser" => self.is_tor_or_mullvad_browser(&exe_name, cmd, "tor-browser"),
|
||||
"zen" => exe_name.contains("zen"),
|
||||
"chromium" => exe_name.contains("chromium") || exe_name.contains("chrome"),
|
||||
"brave" => exe_name.contains("brave") || exe_name.contains("Brave"),
|
||||
@@ -1630,7 +1704,7 @@ impl BrowserRunner {
|
||||
// Check for profile path match with improved logic
|
||||
let profile_path_match = if matches!(
|
||||
profile.browser.as_str(),
|
||||
"firefox" | "firefox-developer" | "tor-browser" | "mullvad-browser" | "zen"
|
||||
"firefox" | "firefox-developer" | "zen"
|
||||
) {
|
||||
// Firefox-based browsers: look for -profile argument followed by path
|
||||
let mut found_profile_arg = false;
|
||||
@@ -1792,7 +1866,7 @@ pub async fn launch_browser_profile(
|
||||
// For Firefox-based browsers, always apply PAC/user.js to point to the local proxy
|
||||
if matches!(
|
||||
profile_for_launch.browser.as_str(),
|
||||
"firefox" | "firefox-developer" | "zen" | "tor-browser" | "mullvad-browser"
|
||||
"firefox" | "firefox-developer" | "zen"
|
||||
) {
|
||||
let profiles_dir = browser_runner.profile_manager.get_profiles_dir();
|
||||
let profile_path = profiles_dir
|
||||
|
||||
@@ -54,14 +54,6 @@ impl BrowserVersionManager {
|
||||
|
||||
match browser {
|
||||
"firefox" | "firefox-developer" => Ok(true),
|
||||
"mullvad-browser" => {
|
||||
// Mullvad doesn't support ARM64 on Windows and Linux
|
||||
if arch == "arm64" && (os == "windows" || os == "linux") {
|
||||
Ok(false)
|
||||
} else {
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
"zen" => {
|
||||
// Zen supports all platforms and architectures
|
||||
Ok(true)
|
||||
@@ -78,14 +70,6 @@ impl BrowserVersionManager {
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
"tor-browser" => {
|
||||
// TOR Browser doesn't support ARM64 on Windows and Linux
|
||||
if arch == "arm64" && (os == "windows" || os == "linux") {
|
||||
Ok(false)
|
||||
} else {
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
"camoufox" => {
|
||||
// Camoufox supports all platforms and architectures according to the JS code
|
||||
Ok(true)
|
||||
@@ -99,11 +83,9 @@ impl BrowserVersionManager {
|
||||
let all_browsers = vec![
|
||||
"firefox",
|
||||
"firefox-developer",
|
||||
"mullvad-browser",
|
||||
"zen",
|
||||
"brave",
|
||||
"chromium",
|
||||
"tor-browser",
|
||||
"camoufox",
|
||||
];
|
||||
|
||||
@@ -238,11 +220,9 @@ impl BrowserVersionManager {
|
||||
let fresh_versions = match browser {
|
||||
"firefox" => self.fetch_firefox_versions(true).await?, // Always fetch fresh for merging
|
||||
"firefox-developer" => self.fetch_firefox_developer_versions(true).await?,
|
||||
"mullvad-browser" => self.fetch_mullvad_versions(true).await?,
|
||||
"zen" => self.fetch_zen_versions(true).await?,
|
||||
"brave" => self.fetch_brave_versions(true).await?,
|
||||
"chromium" => self.fetch_chromium_versions(true).await?,
|
||||
"tor-browser" => self.fetch_tor_versions(true).await?,
|
||||
"camoufox" => self.fetch_camoufox_versions(true).await?,
|
||||
_ => return Err(format!("Unsupported browser: {browser}").into()),
|
||||
};
|
||||
@@ -356,27 +336,6 @@ impl BrowserVersionManager {
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
"mullvad-browser" => {
|
||||
let releases = self.fetch_mullvad_releases_detailed(true).await?;
|
||||
merged_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
if let Some(release) = releases.iter().find(|r| r.tag_name == version) {
|
||||
BrowserVersionInfo {
|
||||
version: release.tag_name.clone(),
|
||||
is_prerelease: release.is_nightly,
|
||||
date: release.published_at.clone(),
|
||||
}
|
||||
} else {
|
||||
BrowserVersionInfo {
|
||||
version: version.clone(),
|
||||
is_prerelease: false, // Mullvad usually stable releases
|
||||
date: "".to_string(),
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
"zen" => {
|
||||
let releases = self.fetch_zen_releases_detailed(true).await?;
|
||||
merged_versions
|
||||
@@ -444,31 +403,6 @@ impl BrowserVersionManager {
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
"tor-browser" => {
|
||||
let releases = self.fetch_tor_releases_detailed(true).await?;
|
||||
merged_versions
|
||||
.into_iter()
|
||||
.map(|version| {
|
||||
if let Some(release) = releases.iter().find(|r| r.version == version) {
|
||||
BrowserVersionInfo {
|
||||
version: release.version.clone(),
|
||||
is_prerelease: crate::api_client::is_browser_version_nightly(
|
||||
"tor-browser",
|
||||
&release.version,
|
||||
None,
|
||||
),
|
||||
date: release.date.clone(),
|
||||
}
|
||||
} else {
|
||||
BrowserVersionInfo {
|
||||
version: version.clone(),
|
||||
is_prerelease: false, // TOR Browser usually stable releases
|
||||
date: "".to_string(),
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
"camoufox" => {
|
||||
let releases = self.fetch_camoufox_releases_detailed(true).await?;
|
||||
merged_versions
|
||||
@@ -602,50 +536,6 @@ impl BrowserVersionManager {
|
||||
is_archive,
|
||||
})
|
||||
}
|
||||
"mullvad-browser" => {
|
||||
// Mullvad Browser doesn't support ARM64 on Windows and Linux
|
||||
if arch == "arm64" && (os == "windows" || os == "linux") {
|
||||
return Err(format!("Mullvad Browser doesn't support ARM64 on {os}").into());
|
||||
}
|
||||
|
||||
let (platform_str, filename, is_archive) = match os.as_str() {
|
||||
"windows" => {
|
||||
if arch == "arm64" {
|
||||
return Err("Mullvad Browser doesn't support ARM64 on Windows".into());
|
||||
}
|
||||
(
|
||||
"windows-x86_64",
|
||||
format!("mullvad-browser-windows-x86_64-{version}.exe"),
|
||||
false,
|
||||
)
|
||||
}
|
||||
"linux" => {
|
||||
if arch == "arm64" {
|
||||
return Err("Mullvad Browser doesn't support ARM64 on Linux".into());
|
||||
}
|
||||
(
|
||||
"x86_64",
|
||||
format!("mullvad-browser-x86_64-{version}.tar.xz"),
|
||||
true,
|
||||
)
|
||||
}
|
||||
"macos" => (
|
||||
"macos",
|
||||
format!("mullvad-browser-macos-{version}.dmg"),
|
||||
true,
|
||||
),
|
||||
_ => return Err(format!("Unsupported platform for Mullvad Browser: {os}").into()),
|
||||
};
|
||||
|
||||
Ok(DownloadInfo {
|
||||
url: format!(
|
||||
"https://github.com/mullvad/mullvad-browser/releases/download/{version}/mullvad-browser-{platform_str}-{version}{}",
|
||||
if os == "windows" { ".exe" } else if os == "linux" { ".tar.xz" } else { ".dmg" }
|
||||
),
|
||||
filename,
|
||||
is_archive,
|
||||
})
|
||||
}
|
||||
"zen" => {
|
||||
let (asset_name, filename, is_archive) = match (&os[..], &arch[..]) {
|
||||
("windows", "x64") => ("zen.installer.exe", format!("zen-{version}.exe"), false),
|
||||
@@ -731,46 +621,6 @@ impl BrowserVersionManager {
|
||||
is_archive: true,
|
||||
})
|
||||
}
|
||||
"tor-browser" => {
|
||||
// TOR Browser doesn't support ARM64 on Windows and Linux
|
||||
if arch == "arm64" && (os == "windows" || os == "linux") {
|
||||
return Err(format!("TOR Browser doesn't support ARM64 on {os}").into());
|
||||
}
|
||||
|
||||
let (platform_str, filename, is_archive) = match os.as_str() {
|
||||
"windows" => {
|
||||
if arch == "arm64" {
|
||||
return Err("TOR Browser doesn't support ARM64 on Windows".into());
|
||||
}
|
||||
(
|
||||
"windows-x86_64-portable",
|
||||
format!("tor-browser-windows-x86_64-portable-{version}.exe"),
|
||||
false,
|
||||
)
|
||||
}
|
||||
"linux" => {
|
||||
if arch == "arm64" {
|
||||
return Err("TOR Browser doesn't support ARM64 on Linux".into());
|
||||
}
|
||||
(
|
||||
"linux-x86_64",
|
||||
format!("tor-browser-linux-x86_64-{version}.tar.xz"),
|
||||
true,
|
||||
)
|
||||
}
|
||||
"macos" => ("macos", format!("tor-browser-macos-{version}.dmg"), true),
|
||||
_ => return Err(format!("Unsupported platform for TOR Browser: {os}").into()),
|
||||
};
|
||||
|
||||
Ok(DownloadInfo {
|
||||
url: format!(
|
||||
"https://archive.torproject.org/tor-package-archive/torbrowser/{version}/tor-browser-{platform_str}-{version}{}",
|
||||
if os == "windows" { ".exe" } else if os == "linux" { ".tar.xz" } else { ".dmg" }
|
||||
),
|
||||
filename,
|
||||
is_archive,
|
||||
})
|
||||
}
|
||||
"camoufox" => {
|
||||
// Camoufox downloads from GitHub releases with pattern: camoufox-{version}-{release}-{os}.{arch}.zip
|
||||
let (os_name, arch_name) = match (&os[..], &arch[..]) {
|
||||
@@ -864,24 +714,6 @@ impl BrowserVersionManager {
|
||||
.await
|
||||
}
|
||||
|
||||
async fn fetch_mullvad_versions(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let releases = self.fetch_mullvad_releases_detailed(no_caching).await?;
|
||||
Ok(releases.into_iter().map(|r| r.tag_name).collect())
|
||||
}
|
||||
|
||||
async fn fetch_mullvad_releases_detailed(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
) -> Result<Vec<GithubRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
self
|
||||
.api_client
|
||||
.fetch_mullvad_releases_with_caching(no_caching)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn fetch_zen_versions(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
@@ -971,24 +803,6 @@ impl BrowserVersionManager {
|
||||
.await
|
||||
}
|
||||
|
||||
async fn fetch_tor_versions(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let releases = self.fetch_tor_releases_detailed(no_caching).await?;
|
||||
Ok(releases.into_iter().map(|r| r.version).collect())
|
||||
}
|
||||
|
||||
async fn fetch_tor_releases_detailed(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
) -> Result<Vec<BrowserRelease>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
self
|
||||
.api_client
|
||||
.fetch_tor_releases_with_caching(no_caching)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn fetch_camoufox_versions(
|
||||
&self,
|
||||
no_caching: bool,
|
||||
@@ -1036,7 +850,6 @@ mod tests {
|
||||
base_url.clone(), // firefox_dev_api_base
|
||||
base_url.clone(), // github_api_base
|
||||
base_url.clone(), // chromium_api_base
|
||||
base_url.clone(), // tor_archive_base
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1130,40 +943,6 @@ mod tests {
|
||||
.url
|
||||
.contains("/pub/devedition/releases/139.0b1/"));
|
||||
|
||||
// Test Mullvad Browser
|
||||
let mullvad_info = service
|
||||
.get_download_info("mullvad-browser", "14.5a6")
|
||||
.unwrap();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
assert_eq!(mullvad_info.filename, "mullvad-browser-macos-14.5a6.dmg");
|
||||
assert!(mullvad_info.url.contains("mullvad-browser-macos-14.5a6"));
|
||||
assert!(mullvad_info.is_archive);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
assert_eq!(
|
||||
mullvad_info.filename,
|
||||
"mullvad-browser-x86_64-14.5a6.tar.xz"
|
||||
);
|
||||
assert!(mullvad_info.url.contains("mullvad-browser-x86_64-14.5a6"));
|
||||
assert!(mullvad_info.is_archive);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
assert_eq!(
|
||||
mullvad_info.filename,
|
||||
"mullvad-browser-windows-x86_64-14.5a6.exe"
|
||||
);
|
||||
assert!(mullvad_info
|
||||
.url
|
||||
.contains("mullvad-browser-windows-x86_64-14.5a6"));
|
||||
assert!(!mullvad_info.is_archive);
|
||||
}
|
||||
|
||||
// Test Zen Browser
|
||||
let zen_info = service.get_download_info("zen", "1.11b").unwrap();
|
||||
|
||||
@@ -1188,35 +967,6 @@ mod tests {
|
||||
assert!(!zen_info.is_archive);
|
||||
}
|
||||
|
||||
// Test Tor Browser
|
||||
let tor_info = service.get_download_info("tor-browser", "14.0.4").unwrap();
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
assert_eq!(tor_info.filename, "tor-browser-macos-14.0.4.dmg");
|
||||
assert!(tor_info.url.contains("tor-browser-macos-14.0.4"));
|
||||
assert!(tor_info.is_archive);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
assert_eq!(tor_info.filename, "tor-browser-linux-x86_64-14.0.4.tar.xz");
|
||||
assert!(tor_info.url.contains("tor-browser-linux-x86_64-14.0.4"));
|
||||
assert!(tor_info.is_archive);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
assert_eq!(
|
||||
tor_info.filename,
|
||||
"tor-browser-windows-x86_64-portable-14.0.4.exe"
|
||||
);
|
||||
assert!(tor_info
|
||||
.url
|
||||
.contains("tor-browser-windows-x86_64-portable-14.0.4"));
|
||||
assert!(!tor_info.is_archive);
|
||||
}
|
||||
|
||||
// Test Chromium
|
||||
let chromium_info = service.get_download_info("chromium", "1465660").unwrap();
|
||||
|
||||
|
||||
+21
-91
@@ -145,30 +145,6 @@ impl Downloader {
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
BrowserType::MullvadBrowser => {
|
||||
// For Mullvad, verify the asset exists
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_mullvad_releases_with_caching(true)
|
||||
.await?;
|
||||
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| r.tag_name == version)
|
||||
.ok_or(format!("Mullvad version {version} not found"))?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset
|
||||
let asset_url = self
|
||||
.find_mullvad_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Mullvad version {version} on {os}/{arch}"
|
||||
))?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
// For Camoufox, verify the asset exists and find the correct download URL
|
||||
let releases = self
|
||||
@@ -327,46 +303,6 @@ impl Downloader {
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
/// Find the appropriate Mullvad asset for the current platform and architecture
|
||||
fn find_mullvad_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Mullvad asset naming patterns:
|
||||
// Windows: mullvad-browser-windows-x86_64-VERSION.exe
|
||||
// macOS: mullvad-browser-macos-VERSION.dmg
|
||||
// Linux: mullvad-browser-x86_64-VERSION.tar.xz
|
||||
|
||||
let asset = match (os, arch) {
|
||||
("windows", "x64") => assets.iter().find(|asset| {
|
||||
asset.name.contains("windows")
|
||||
&& asset.name.contains("x86_64")
|
||||
&& asset.name.ends_with(".exe")
|
||||
}),
|
||||
("windows", "arm64") => {
|
||||
// Mullvad doesn't support ARM64 on Windows
|
||||
None
|
||||
}
|
||||
("macos", _) => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name.contains("macos") && asset.name.ends_with(".dmg")),
|
||||
("linux", "x64") => assets.iter().find(|asset| {
|
||||
asset.name.contains("x86_64")
|
||||
&& asset.name.ends_with(".tar.xz")
|
||||
&& !asset.name.contains("windows")
|
||||
}),
|
||||
("linux", "arm64") => {
|
||||
// Mullvad doesn't support ARM64 on Linux
|
||||
None
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
/// Find the appropriate Camoufox asset for the current platform and architecture
|
||||
fn find_camoufox_asset(
|
||||
&self,
|
||||
@@ -385,15 +321,31 @@ impl Downloader {
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
// Look for assets matching the pattern
|
||||
// Use ends_with for precise matching to avoid false positives
|
||||
let pattern = format!(".{os_name}.{arch_name}.zip");
|
||||
let asset = assets.iter().find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.starts_with("camoufox-")
|
||||
&& name.contains(&format!("-{os_name}.{arch_name}.zip"))
|
||||
&& name.ends_with(".zip")
|
||||
name.starts_with("camoufox-") && name.ends_with(&pattern)
|
||||
});
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
if let Some(asset) = asset {
|
||||
log::info!(
|
||||
"Selected Camoufox asset for {}/{}: {}",
|
||||
os,
|
||||
arch,
|
||||
asset.name
|
||||
);
|
||||
Some(asset.browser_download_url.clone())
|
||||
} else {
|
||||
log::warn!(
|
||||
"No matching Camoufox asset found for {}/{} with pattern '{}'. Available assets: {:?}",
|
||||
os,
|
||||
arch,
|
||||
pattern,
|
||||
assets.iter().map(|a| &a.name).collect::<Vec<_>>()
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn download_browser<R: tauri::Runtime>(
|
||||
@@ -937,7 +889,6 @@ mod tests {
|
||||
base_url.clone(), // firefox_dev_api_base
|
||||
base_url.clone(), // github_api_base
|
||||
base_url.clone(), // chromium_api_base
|
||||
base_url.clone(), // tor_archive_base
|
||||
)
|
||||
}
|
||||
|
||||
@@ -984,27 +935,6 @@ mod tests {
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_tor_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "https://archive.torproject.org/tor-package-archive/torbrowser/14.0.4/tor-browser-macos-14.0.4.dmg".to_string(),
|
||||
filename: "tor-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::TorBrowser, "14.0.4", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_with_progress() {
|
||||
let server = setup_mock_server().await;
|
||||
|
||||
@@ -868,9 +868,6 @@ impl Extractor {
|
||||
"chromium.exe",
|
||||
"zen.exe",
|
||||
"brave.exe",
|
||||
"tor-browser.exe",
|
||||
"tor.exe",
|
||||
"mullvad-browser.exe",
|
||||
];
|
||||
|
||||
// First try priority executable names
|
||||
@@ -937,8 +934,6 @@ impl Extractor {
|
||||
|| file_name.contains("chromium")
|
||||
|| file_name.contains("zen")
|
||||
|| file_name.contains("brave")
|
||||
|| file_name.contains("tor")
|
||||
|| file_name.contains("mullvad")
|
||||
|| file_name.contains("browser")
|
||||
{
|
||||
return Ok(path);
|
||||
@@ -1012,15 +1007,6 @@ impl Extractor {
|
||||
"brave-browser-beta",
|
||||
"brave-browser-dev",
|
||||
"brave-bin",
|
||||
// Tor Browser variants
|
||||
"tor-browser",
|
||||
"torbrowser-launcher",
|
||||
"tor-browser_en-US",
|
||||
"start-tor-browser",
|
||||
"Browser/start-tor-browser",
|
||||
// Mullvad Browser
|
||||
"mullvad-browser",
|
||||
"mullvad-browser-bin",
|
||||
// Camoufox variants
|
||||
"camoufox",
|
||||
"camoufox-bin",
|
||||
@@ -1049,19 +1035,14 @@ impl Extractor {
|
||||
"chromium",
|
||||
"brave",
|
||||
"zen",
|
||||
"tor-browser",
|
||||
"mullvad-browser",
|
||||
"camoufox",
|
||||
".",
|
||||
"./",
|
||||
"firefox",
|
||||
"mullvad-browser",
|
||||
"tor-browser_en-US",
|
||||
"Browser",
|
||||
"browser",
|
||||
"opt/google/chrome",
|
||||
"opt/brave.com/brave",
|
||||
"opt/mullvad-browser",
|
||||
"opt/camoufox",
|
||||
"usr/lib/firefox",
|
||||
"usr/lib/chromium",
|
||||
@@ -1159,8 +1140,7 @@ impl Extractor {
|
||||
|| name_lower.contains("chrome")
|
||||
|| name_lower.contains("brave")
|
||||
|| name_lower.contains("zen")
|
||||
|| name_lower.contains("tor")
|
||||
|| name_lower.contains("mullvad")
|
||||
|| name_lower.contains("camoufox")
|
||||
|| name_lower.ends_with(".appimage")
|
||||
|| !name_lower.contains('.')
|
||||
{
|
||||
@@ -1215,8 +1195,6 @@ impl Extractor {
|
||||
|| name_lower.contains("chrome")
|
||||
|| name_lower.contains("brave")
|
||||
|| name_lower.contains("zen")
|
||||
|| name_lower.contains("tor")
|
||||
|| name_lower.contains("mullvad")
|
||||
|| name_lower.contains("camoufox")
|
||||
|| file_name.ends_with(".AppImage")
|
||||
{
|
||||
|
||||
@@ -249,12 +249,8 @@ async fn is_geoip_database_available() -> Result<bool, String> {
|
||||
|
||||
#[tauri::command]
|
||||
async fn get_all_traffic_snapshots() -> Result<Vec<crate::traffic_stats::TrafficSnapshot>, String> {
|
||||
Ok(
|
||||
crate::traffic_stats::list_traffic_stats()
|
||||
.into_iter()
|
||||
.map(|s| s.to_snapshot())
|
||||
.collect(),
|
||||
)
|
||||
// Use real-time snapshots that merge in-memory data with disk data
|
||||
Ok(crate::traffic_stats::get_all_traffic_snapshots_realtime())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@@ -594,7 +590,8 @@ pub fn run() {
|
||||
// Periodically broadcast browser running status to the frontend
|
||||
let app_handle_status = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_millis(500));
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(5));
|
||||
interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
let mut last_running_states: std::collections::HashMap<String, bool> =
|
||||
std::collections::HashMap::new();
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use crate::browser::{create_browser, BrowserType};
|
||||
use crate::profile::BrowserProfile;
|
||||
use std::ffi::OsString;
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
@@ -10,40 +9,6 @@ pub mod macos {
|
||||
use super::*;
|
||||
use sysinfo::{Pid, System};
|
||||
|
||||
pub fn is_tor_or_mullvad_browser(exe_name: &str, cmd: &[OsString], browser_type: &str) -> bool {
|
||||
match browser_type {
|
||||
"mullvad-browser" => {
|
||||
let has_mullvad_in_exe = exe_name.contains("mullvad");
|
||||
let has_firefox_exe = exe_name == "firefox" || exe_name.contains("firefox-bin");
|
||||
let has_mullvad_in_cmd = cmd.iter().any(|arg| {
|
||||
let arg_str = arg.to_str().unwrap_or("");
|
||||
arg_str.contains("Mullvad Browser.app")
|
||||
|| arg_str.contains("mullvad")
|
||||
|| arg_str.contains("Mullvad")
|
||||
|| arg_str.contains("/Applications/Mullvad Browser.app/")
|
||||
|| arg_str.contains("MullvadBrowser")
|
||||
});
|
||||
|
||||
has_mullvad_in_exe || (has_firefox_exe && has_mullvad_in_cmd)
|
||||
}
|
||||
"tor-browser" => {
|
||||
let has_tor_in_exe = exe_name.contains("tor");
|
||||
let has_firefox_exe = exe_name == "firefox" || exe_name.contains("firefox-bin");
|
||||
let has_tor_in_cmd = cmd.iter().any(|arg| {
|
||||
let arg_str = arg.to_str().unwrap_or("");
|
||||
arg_str.contains("Tor Browser.app")
|
||||
|| arg_str.contains("tor-browser")
|
||||
|| arg_str.contains("TorBrowser")
|
||||
|| arg_str.contains("/Applications/Tor Browser.app/")
|
||||
|| arg_str.contains("TorBrowser-Data")
|
||||
});
|
||||
|
||||
has_tor_in_exe || (has_firefox_exe && has_tor_in_cmd)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn launch_browser_process(
|
||||
executable_path: &std::path::Path,
|
||||
args: &[String],
|
||||
@@ -375,122 +340,6 @@ end try
|
||||
descendants
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_tor_mullvad(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
browser_type: BrowserType,
|
||||
browser_dir: &Path,
|
||||
_profiles_dir: &Path,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let pid = profile.process_id.unwrap();
|
||||
|
||||
log::info!("Opening URL in TOR/Mullvad browser using file-based approach (PID: {pid})");
|
||||
|
||||
// Method 1: Try using a temporary HTML file approach
|
||||
log::info!("Attempting file-based URL opening for TOR/Mullvad browser");
|
||||
|
||||
let temp_dir = std::env::temp_dir();
|
||||
let temp_file_name = format!("donut_browser_url_{}.html", std::process::id());
|
||||
let temp_file_path = temp_dir.join(&temp_file_name);
|
||||
|
||||
let html_content = format!(
|
||||
r#"<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="refresh" content="0; url={url}">
|
||||
<title>Redirecting...</title>
|
||||
<script>
|
||||
window.location.href = "{url}";
|
||||
</script>
|
||||
</head>
|
||||
<body>
|
||||
<p>Redirecting to <a href="{url}">{url}</a>...</p>
|
||||
</body>
|
||||
</html>"#
|
||||
);
|
||||
|
||||
match std::fs::write(&temp_file_path, html_content) {
|
||||
Ok(()) => {
|
||||
log::info!("Created temporary HTML file: {temp_file_path:?}");
|
||||
|
||||
let browser = create_browser(browser_type.clone());
|
||||
if let Ok(executable_path) = browser.get_executable_path(browser_dir) {
|
||||
let open_result = Command::new("open")
|
||||
.args([
|
||||
"-a",
|
||||
executable_path.to_str().unwrap(),
|
||||
temp_file_path.to_str().unwrap(),
|
||||
])
|
||||
.output();
|
||||
|
||||
// Clean up the temporary file after a short delay
|
||||
let temp_file_path_clone = temp_file_path.clone();
|
||||
tokio::spawn(async move {
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(5)).await;
|
||||
let _ = std::fs::remove_file(temp_file_path_clone);
|
||||
});
|
||||
|
||||
match open_result {
|
||||
Ok(output) if output.status.success() => {
|
||||
log::info!("Successfully opened URL using file-based approach");
|
||||
return Ok(());
|
||||
}
|
||||
Ok(output) => {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
log::info!("File-based approach failed: {stderr}");
|
||||
}
|
||||
Err(e) => {
|
||||
log::info!("File-based approach error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let _ = std::fs::remove_file(&temp_file_path);
|
||||
}
|
||||
Err(e) => {
|
||||
log::info!("Failed to create temporary HTML file: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
// Method 2: Try using the 'open' command directly with the URL
|
||||
log::info!("Attempting direct URL opening with 'open' command");
|
||||
|
||||
let browser = create_browser(browser_type.clone());
|
||||
if let Ok(executable_path) = browser.get_executable_path(browser_dir) {
|
||||
let direct_open_result = Command::new("open")
|
||||
.args(["-a", executable_path.to_str().unwrap(), url])
|
||||
.output();
|
||||
|
||||
match direct_open_result {
|
||||
Ok(output) if output.status.success() => {
|
||||
log::info!("Successfully opened URL using direct 'open' command");
|
||||
return Ok(());
|
||||
}
|
||||
Ok(output) => {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
log::info!("Direct 'open' command failed: {stderr}");
|
||||
}
|
||||
Err(e) => {
|
||||
log::info!("Direct 'open' command error: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If all methods fail, return a helpful error message
|
||||
Err(
|
||||
format!(
|
||||
"Failed to open URL in existing TOR/Mullvad browser (PID: {pid}). All methods failed:\n\
|
||||
1. File-based approach failed\n\
|
||||
2. Direct 'open' command failed\n\
|
||||
\n\
|
||||
This may be due to browser security restrictions or the browser process may have changed.\n\
|
||||
Try closing and reopening the browser, or manually paste the URL: {url}"
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_chromium(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
@@ -622,42 +471,6 @@ end try
|
||||
pub mod windows {
|
||||
use super::*;
|
||||
|
||||
pub fn is_tor_or_mullvad_browser(exe_name: &str, cmd: &[OsString], browser_type: &str) -> bool {
|
||||
let exe_lower = exe_name.to_lowercase();
|
||||
|
||||
// Check for Firefox-based browsers first by executable name
|
||||
let is_firefox_family = exe_lower.contains("firefox") || exe_lower.contains(".exe");
|
||||
|
||||
if !is_firefox_family {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check command arguments for profile paths and browser-specific indicators
|
||||
let cmd_line = cmd
|
||||
.iter()
|
||||
.map(|s| s.to_string_lossy().to_lowercase())
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
|
||||
match browser_type {
|
||||
"tor-browser" => {
|
||||
// Check for TOR browser specific paths and arguments
|
||||
cmd_line.contains("tor")
|
||||
|| cmd_line.contains("browser\\torbrowser")
|
||||
|| cmd_line.contains("tor-browser")
|
||||
|| cmd_line.contains("profile") && (cmd_line.contains("tor") || cmd_line.contains("tbb"))
|
||||
}
|
||||
"mullvad-browser" => {
|
||||
// Check for Mullvad browser specific paths and arguments
|
||||
cmd_line.contains("mullvad")
|
||||
|| cmd_line.contains("browser\\mullvadbrowser")
|
||||
|| cmd_line.contains("mullvad-browser")
|
||||
|| cmd_line.contains("profile") && cmd_line.contains("mullvad")
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn launch_browser_process(
|
||||
executable_path: &std::path::Path,
|
||||
args: &[String],
|
||||
@@ -782,48 +595,6 @@ pub mod windows {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_tor_mullvad(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
browser_type: BrowserType,
|
||||
browser_dir: &Path,
|
||||
profiles_dir: &Path,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// On Windows, TOR and Mullvad browsers can sometimes accept URLs via command line
|
||||
// even with -no-remote, by launching a new instance that hands off to existing one
|
||||
let browser = create_browser(browser_type.clone());
|
||||
let executable_path = browser
|
||||
.get_executable_path(browser_dir)
|
||||
.map_err(|e| format!("Failed to get executable path: {}", e))?;
|
||||
|
||||
let mut cmd = Command::new(&executable_path);
|
||||
let profile_data_path = profile.get_profile_data_path(profiles_dir);
|
||||
cmd.args(["-profile", &profile_data_path.to_string_lossy(), url]);
|
||||
|
||||
// Set working directory
|
||||
if let Some(parent_dir) = browser_dir
|
||||
.parent()
|
||||
.or_else(|| browser_dir.ancestors().nth(1))
|
||||
{
|
||||
cmd.current_dir(parent_dir);
|
||||
}
|
||||
|
||||
let output = cmd.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to open URL in existing {}: {}. Note: TOR and Mullvad browsers may require manual URL opening for security reasons.",
|
||||
browser_type.as_str(),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_chromium(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
@@ -909,15 +680,6 @@ pub mod windows {
|
||||
pub mod linux {
|
||||
use super::*;
|
||||
|
||||
pub fn is_tor_or_mullvad_browser(
|
||||
_exe_name: &str,
|
||||
_cmd: &[OsString],
|
||||
_browser_type: &str,
|
||||
) -> bool {
|
||||
// Linux implementation would go here
|
||||
false
|
||||
}
|
||||
|
||||
pub async fn launch_browser_process(
|
||||
executable_path: &std::path::Path,
|
||||
args: &[String],
|
||||
@@ -1074,16 +836,6 @@ pub mod linux {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_tor_mullvad(
|
||||
_profile: &BrowserProfile,
|
||||
_url: &str,
|
||||
_browser_type: BrowserType,
|
||||
_browser_dir: &Path,
|
||||
_profiles_dir: &Path,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
Err("Opening URLs in existing Firefox-based browsers is not supported on Linux when using -no-remote".into())
|
||||
}
|
||||
|
||||
pub async fn open_url_in_existing_browser_chromium(
|
||||
profile: &BrowserProfile,
|
||||
url: &str,
|
||||
|
||||
+124
-173
@@ -750,7 +750,7 @@ impl ProfileManager {
|
||||
|
||||
// For non-camoufox browsers, use the existing PID-based logic
|
||||
let inner_profile = profile.clone();
|
||||
let system = System::new_all();
|
||||
let mut system = System::new();
|
||||
let mut is_running = false;
|
||||
let mut found_pid: Option<u32> = None;
|
||||
|
||||
@@ -765,10 +765,8 @@ impl ProfileManager {
|
||||
let profile_path_match = cmd.iter().any(|s| {
|
||||
let arg = s.to_str().unwrap_or("");
|
||||
// For Firefox-based browsers, check for exact profile path match
|
||||
if profile.browser == "tor-browser"
|
||||
|| profile.browser == "firefox"
|
||||
if profile.browser == "firefox"
|
||||
|| profile.browser == "firefox-developer"
|
||||
|| profile.browser == "mullvad-browser"
|
||||
|| profile.browser == "zen"
|
||||
{
|
||||
arg == profile_data_path_str
|
||||
@@ -794,6 +792,8 @@ impl ProfileManager {
|
||||
|
||||
// If we didn't find the browser with the stored PID, search all processes
|
||||
if !is_running {
|
||||
// Refresh all processes only when we need to search (expensive but necessary)
|
||||
system.refresh_all();
|
||||
for (pid, process) in system.processes() {
|
||||
let cmd = process.cmd();
|
||||
if cmd.len() >= 2 {
|
||||
@@ -803,13 +803,9 @@ impl ProfileManager {
|
||||
"firefox" => {
|
||||
exe_name.contains("firefox")
|
||||
&& !exe_name.contains("developer")
|
||||
&& !exe_name.contains("tor")
|
||||
&& !exe_name.contains("mullvad")
|
||||
&& !exe_name.contains("camoufox")
|
||||
}
|
||||
"firefox-developer" => exe_name.contains("firefox") && exe_name.contains("developer"),
|
||||
"mullvad-browser" => self.is_tor_or_mullvad_browser(&exe_name, cmd, "mullvad-browser"),
|
||||
"tor-browser" => self.is_tor_or_mullvad_browser(&exe_name, cmd, "tor-browser"),
|
||||
"zen" => exe_name.contains("zen"),
|
||||
"chromium" => exe_name.contains("chromium"),
|
||||
"brave" => exe_name.contains("brave"),
|
||||
@@ -832,10 +828,8 @@ impl ProfileManager {
|
||||
// Camoufox uses user_data_dir like Chromium browsers
|
||||
arg.contains(&format!("--user-data-dir={profile_data_path_str}"))
|
||||
|| arg == profile_data_path_str
|
||||
} else if profile.browser == "tor-browser"
|
||||
|| profile.browser == "firefox"
|
||||
} else if profile.browser == "firefox"
|
||||
|| profile.browser == "firefox-developer"
|
||||
|| profile.browser == "mullvad-browser"
|
||||
|| profile.browser == "zen"
|
||||
{
|
||||
arg == profile_data_path_str
|
||||
@@ -882,7 +876,6 @@ impl ProfileManager {
|
||||
None => inner_profile.clone(),
|
||||
};
|
||||
|
||||
let previous_pid = latest_profile.process_id;
|
||||
let mut merged = latest_profile.clone();
|
||||
|
||||
if let Some(pid) = found_pid {
|
||||
@@ -898,13 +891,6 @@ impl ProfileManager {
|
||||
if let Err(e) = self.save_profile(&merged) {
|
||||
log::warn!("Warning: Failed to clear profile PID: {e}");
|
||||
}
|
||||
|
||||
// Stop any associated proxy immediately when the browser stops
|
||||
if let Some(old_pid) = previous_pid {
|
||||
let _ = crate::proxy_manager::PROXY_MANAGER
|
||||
.stop_proxy(app_handle.clone(), old_pid)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
// Emit profile update event to frontend
|
||||
@@ -982,18 +968,12 @@ impl ProfileManager {
|
||||
None => profile.clone(),
|
||||
};
|
||||
|
||||
if let Some(old_pid) = latest.process_id {
|
||||
if latest.process_id.is_some() {
|
||||
latest.process_id = None;
|
||||
if let Err(e) = self.save_profile(&latest) {
|
||||
log::warn!("Warning: Failed to clear Camoufox profile process info: {e}");
|
||||
}
|
||||
|
||||
// Stop any proxy tied to this old PID immediately
|
||||
let _ = crate::proxy_manager::PROXY_MANAGER
|
||||
.stop_proxy(app_handle.clone(), old_pid)
|
||||
.await;
|
||||
|
||||
// Emit profile update event to frontend
|
||||
if let Err(e) = app_handle.emit("profile-updated", &latest) {
|
||||
log::warn!("Warning: Failed to emit profile update event: {e}");
|
||||
}
|
||||
@@ -1018,7 +998,7 @@ impl ProfileManager {
|
||||
None => profile.clone(),
|
||||
};
|
||||
|
||||
if let Some(old_pid) = latest.process_id {
|
||||
if latest.process_id.is_some() {
|
||||
latest.process_id = None;
|
||||
if let Err(e2) = self.save_profile(&latest) {
|
||||
log::warn!(
|
||||
@@ -1026,11 +1006,6 @@ impl ProfileManager {
|
||||
);
|
||||
}
|
||||
|
||||
// Best-effort stop of proxy tied to old PID
|
||||
let _ = crate::proxy_manager::PROXY_MANAGER
|
||||
.stop_proxy(app_handle.clone(), old_pid)
|
||||
.await;
|
||||
|
||||
// Emit profile update event to frontend
|
||||
if let Err(e3) = app_handle.emit("profile-updated", &latest) {
|
||||
log::warn!("Warning: Failed to emit profile update event: {e3}");
|
||||
@@ -1042,36 +1017,9 @@ impl ProfileManager {
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to check if a process matches TOR/Mullvad browser
|
||||
fn is_tor_or_mullvad_browser(
|
||||
&self,
|
||||
exe_name: &str,
|
||||
cmd: &[std::ffi::OsString],
|
||||
browser_type: &str,
|
||||
) -> bool {
|
||||
#[cfg(target_os = "macos")]
|
||||
return crate::platform_browser::macos::is_tor_or_mullvad_browser(exe_name, cmd, browser_type);
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
return crate::platform_browser::windows::is_tor_or_mullvad_browser(
|
||||
exe_name,
|
||||
cmd,
|
||||
browser_type,
|
||||
);
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
return crate::platform_browser::linux::is_tor_or_mullvad_browser(exe_name, cmd, browser_type);
|
||||
|
||||
#[cfg(not(any(target_os = "macos", target_os = "windows", target_os = "linux")))]
|
||||
{
|
||||
let _ = (exe_name, cmd, browser_type);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn get_common_firefox_preferences(&self) -> Vec<String> {
|
||||
vec![
|
||||
// Disable default browser updates
|
||||
// Disable default browser check
|
||||
"user_pref(\"browser.shell.checkDefaultBrowser\", false);".to_string(),
|
||||
"user_pref(\"browser.shell.skipDefaultBrowserCheckOnFirstRun\", true);".to_string(),
|
||||
"user_pref(\"browser.preferences.moreFromMozilla\", false);".to_string(),
|
||||
@@ -1086,27 +1034,58 @@ impl ProfileManager {
|
||||
// Keep extension updates enabled
|
||||
"user_pref(\"extensions.update.enabled\", true);".to_string(),
|
||||
"user_pref(\"extensions.update.autoUpdateDefault\", true);".to_string(),
|
||||
// Completely disable browser update checking
|
||||
"user_pref(\"app.update.enabled\", false);".to_string(),
|
||||
"user_pref(\"app.update.staging.enabled\", false);".to_string(),
|
||||
"user_pref(\"app.update.timerFirstInterval\", -1);".to_string(),
|
||||
"user_pref(\"app.update.download.maxAttempts\", 0);".to_string(),
|
||||
"user_pref(\"app.update.elevate.maxAttempts\", 0);".to_string(),
|
||||
"user_pref(\"app.update.disabledForTesting\", true);".to_string(),
|
||||
"user_pref(\"app.update.auto\", false);".to_string(),
|
||||
"user_pref(\"app.update.mode\", 0);".to_string(),
|
||||
"user_pref(\"app.update.promptWaitTime\", -1);".to_string(),
|
||||
"user_pref(\"app.update.service.enabled\", false);".to_string(),
|
||||
"user_pref(\"app.update.staging.enabled\", false);".to_string(),
|
||||
"user_pref(\"app.update.silent\", true);".to_string(),
|
||||
"user_pref(\"app.update.disabledForTesting\", true);".to_string(),
|
||||
// Prevent update URL access entirely
|
||||
"user_pref(\"app.update.url\", \"\");".to_string(),
|
||||
"user_pref(\"app.update.url.manual\", \"\");".to_string(),
|
||||
"user_pref(\"app.update.url.details\", \"\");".to_string(),
|
||||
// Disable update timing/scheduling
|
||||
"user_pref(\"app.update.timerFirstInterval\", 999999999);".to_string(),
|
||||
"user_pref(\"app.update.interval\", 999999999);".to_string(),
|
||||
"user_pref(\"app.update.background.interval\", 999999999);".to_string(),
|
||||
"user_pref(\"app.update.idletime\", 999999999);".to_string(),
|
||||
"user_pref(\"app.update.promptWaitTime\", 999999999);".to_string(),
|
||||
// Disable update attempts
|
||||
"user_pref(\"app.update.download.maxAttempts\", 0);".to_string(),
|
||||
"user_pref(\"app.update.elevate.maxAttempts\", 0);".to_string(),
|
||||
"user_pref(\"app.update.checkInstallTime\", false);".to_string(),
|
||||
"user_pref(\"app.update.interval\", -1);".to_string(),
|
||||
"user_pref(\"app.update.background.interval\", -1);".to_string(),
|
||||
"user_pref(\"app.update.idletime\", -1);".to_string(),
|
||||
// Suppress additional update UI/prompts
|
||||
// Suppress update UI/prompts/notifications
|
||||
"user_pref(\"app.update.doorhanger\", false);".to_string(),
|
||||
"user_pref(\"app.update.badge\", false);".to_string(),
|
||||
"user_pref(\"app.update.notifyDuringDownload\", false);".to_string(),
|
||||
"user_pref(\"app.update.background.scheduling.enabled\", false);".to_string(),
|
||||
"user_pref(\"app.update.background.enabled\", false);".to_string(),
|
||||
// Disable BITS (Windows Background Intelligent Transfer Service) updates
|
||||
"user_pref(\"app.update.BITS.enabled\", false);".to_string(),
|
||||
// Disable language pack updates
|
||||
"user_pref(\"app.update.langpack.enabled\", false);".to_string(),
|
||||
// Suppress upgrade dialogs on startup
|
||||
"user_pref(\"browser.startup.upgradeDialog.enabled\", false);".to_string(),
|
||||
// Disable update ping telemetry
|
||||
"user_pref(\"toolkit.telemetry.updatePing.enabled\", false);".to_string(),
|
||||
// Zen browser specific - disable welcome screen and updates
|
||||
"user_pref(\"zen.welcome-screen.seen\", true);".to_string(),
|
||||
"user_pref(\"zen.updates.enabled\", false);".to_string(),
|
||||
"user_pref(\"zen.updates.check-for-updates\", false);".to_string(),
|
||||
// Additional first-run suppressions
|
||||
"user_pref(\"app.normandy.first_run\", false);".to_string(),
|
||||
"user_pref(\"trailhead.firstrun.didSeeAboutWelcome\", true);".to_string(),
|
||||
"user_pref(\"datareporting.policy.dataSubmissionPolicyBypassNotification\", true);"
|
||||
.to_string(),
|
||||
"user_pref(\"toolkit.telemetry.reportingpolicy.firstRun\", false);".to_string(),
|
||||
// Disable quit confirmation dialogs
|
||||
"user_pref(\"browser.warnOnQuit\", false);".to_string(),
|
||||
"user_pref(\"browser.showQuitWarning\", false);".to_string(),
|
||||
"user_pref(\"browser.tabs.warnOnClose\", false);".to_string(),
|
||||
"user_pref(\"browser.tabs.warnOnCloseOtherTabs\", false);".to_string(),
|
||||
"user_pref(\"browser.sessionstore.warnOnQuit\", false);".to_string(),
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1128,114 +1107,76 @@ impl ProfileManager {
|
||||
|
||||
let mut preferences = Vec::new();
|
||||
|
||||
// Get the UUID directory (parent of profile data directory)
|
||||
let uuid_dir = profile_data_path
|
||||
.parent()
|
||||
.ok_or("Invalid profile path - cannot find UUID directory")?;
|
||||
|
||||
// Add common Firefox preferences (like disabling default browser check)
|
||||
preferences.extend(self.get_common_firefox_preferences());
|
||||
|
||||
// Use embedded PAC template instead of reading from file
|
||||
const PAC_TEMPLATE: &str = r#"function FindProxyForURL(url, host) {
|
||||
return "{{proxy_url}}";
|
||||
}"#;
|
||||
// Determine which proxy settings to use
|
||||
let effective_proxy = internal_proxy.unwrap_or(proxy);
|
||||
let proxy_host = &effective_proxy.host;
|
||||
let proxy_port = effective_proxy.port;
|
||||
|
||||
// Format proxy URL based on type and whether we have an internal proxy
|
||||
let proxy_url = if let Some(internal) = internal_proxy {
|
||||
// Use internal proxy (local proxy) as the primary proxy
|
||||
// This is the local proxy that forwards to the upstream proxy
|
||||
log::info!(
|
||||
"Applying local proxy settings to Firefox profile: {}:{}",
|
||||
internal.host,
|
||||
internal.port
|
||||
);
|
||||
format!("HTTP {}:{}", internal.host, internal.port)
|
||||
} else {
|
||||
// Use user-configured proxy directly (upstream proxy)
|
||||
log::info!(
|
||||
"Applying upstream proxy settings to Firefox profile: {}:{} ({})",
|
||||
proxy.host,
|
||||
proxy.port,
|
||||
proxy.proxy_type
|
||||
);
|
||||
match proxy.proxy_type.as_str() {
|
||||
"http" => format!("HTTP {}:{}", proxy.host, proxy.port),
|
||||
"https" => format!("HTTPS {}:{}", proxy.host, proxy.port),
|
||||
"socks4" => format!("SOCKS4 {}:{}", proxy.host, proxy.port),
|
||||
"socks5" => format!("SOCKS5 {}:{}", proxy.host, proxy.port),
|
||||
_ => return Err(format!("Unsupported proxy type: {}", proxy.proxy_type).into()),
|
||||
}
|
||||
};
|
||||
// Check if this is a SOCKS proxy (only possible when using upstream directly)
|
||||
let is_socks =
|
||||
internal_proxy.is_none() && (proxy.proxy_type == "socks4" || proxy.proxy_type == "socks5");
|
||||
|
||||
// Replace placeholders in PAC file
|
||||
let pac_content = PAC_TEMPLATE
|
||||
.replace("{{proxy_url}}", &proxy_url)
|
||||
.replace("{{proxy_credentials}}", ""); // Credentials are now handled by the PAC file
|
||||
|
||||
// Save PAC file in UUID directory
|
||||
let pac_path = uuid_dir.join("proxy.pac");
|
||||
log::info!(
|
||||
"Creating PAC file at: {} with proxy: {}",
|
||||
pac_path.display(),
|
||||
proxy_url
|
||||
);
|
||||
fs::write(&pac_path, &pac_content)?;
|
||||
log::info!(
|
||||
"Created PAC file at: {} with content: {}",
|
||||
pac_path.display(),
|
||||
pac_content
|
||||
"Applying manual proxy settings to Firefox profile: {}:{} (is_internal: {}, is_socks: {})",
|
||||
proxy_host,
|
||||
proxy_port,
|
||||
internal_proxy.is_some(),
|
||||
is_socks
|
||||
);
|
||||
|
||||
// Configure Firefox to use the PAC file
|
||||
// Convert path to absolute and properly format for file:// URL
|
||||
let pac_path_absolute = pac_path.canonicalize().unwrap_or_else(|_| pac_path.clone());
|
||||
let pac_url = if cfg!(windows) {
|
||||
// Windows: file:///C:/path/to/file.pac
|
||||
format!(
|
||||
"file:///{}",
|
||||
pac_path_absolute.to_string_lossy().replace('\\', "/")
|
||||
)
|
||||
// Use MANUAL proxy configuration (type 1) instead of PAC file (type 2)
|
||||
// PAC files with file:// URLs are blocked by privacy-focused browsers like Zen
|
||||
// Manual proxy configuration works reliably across all Firefox variants
|
||||
preferences.push("user_pref(\"network.proxy.type\", 1);".to_string());
|
||||
|
||||
if is_socks {
|
||||
// SOCKS proxy configuration
|
||||
preferences.extend([
|
||||
format!("user_pref(\"network.proxy.socks\", \"{}\");", proxy_host),
|
||||
format!("user_pref(\"network.proxy.socks_port\", {});", proxy_port),
|
||||
format!(
|
||||
"user_pref(\"network.proxy.socks_version\", {});",
|
||||
if proxy.proxy_type == "socks5" { 5 } else { 4 }
|
||||
),
|
||||
"user_pref(\"network.proxy.http\", \"\");".to_string(),
|
||||
"user_pref(\"network.proxy.http_port\", 0);".to_string(),
|
||||
"user_pref(\"network.proxy.ssl\", \"\");".to_string(),
|
||||
"user_pref(\"network.proxy.ssl_port\", 0);".to_string(),
|
||||
]);
|
||||
} else {
|
||||
// Unix/macOS: file:///absolute/path/to/file.pac (three slashes for absolute path)
|
||||
format!("file://{}", pac_path_absolute.to_string_lossy())
|
||||
};
|
||||
|
||||
log::info!("PAC file path (absolute): {}", pac_path_absolute.display());
|
||||
log::info!("PAC file URL for Firefox: {}", pac_url);
|
||||
// HTTP/HTTPS proxy configuration (including our internal local proxy)
|
||||
preferences.extend([
|
||||
format!("user_pref(\"network.proxy.http\", \"{}\");", proxy_host),
|
||||
format!("user_pref(\"network.proxy.http_port\", {});", proxy_port),
|
||||
format!("user_pref(\"network.proxy.ssl\", \"{}\");", proxy_host),
|
||||
format!("user_pref(\"network.proxy.ssl_port\", {});", proxy_port),
|
||||
format!("user_pref(\"network.proxy.ftp\", \"{}\");", proxy_host),
|
||||
format!("user_pref(\"network.proxy.ftp_port\", {});", proxy_port),
|
||||
"user_pref(\"network.proxy.socks\", \"\");".to_string(),
|
||||
"user_pref(\"network.proxy.socks_port\", 0);".to_string(),
|
||||
]);
|
||||
}
|
||||
|
||||
// Common proxy settings - keep it simple like proxy-chain expected
|
||||
preferences.extend([
|
||||
"user_pref(\"network.proxy.type\", 2);".to_string(),
|
||||
format!(
|
||||
"user_pref(\"network.proxy.autoconfig_url\", \"{}\");",
|
||||
pac_url
|
||||
),
|
||||
"user_pref(\"network.proxy.failover_direct\", false);".to_string(),
|
||||
"user_pref(\"network.proxy.socks_remote_dns\", true);".to_string(),
|
||||
"user_pref(\"network.proxy.no_proxies_on\", \"\");".to_string(),
|
||||
"user_pref(\"signon.autologin.proxy\", true);".to_string(),
|
||||
"user_pref(\"network.proxy.share_proxy_settings\", false);".to_string(),
|
||||
"user_pref(\"network.automatic-ntlm-auth.allow-proxies\", false);".to_string(),
|
||||
"user_pref(\"network.auth-use-sspi\", false);".to_string(),
|
||||
"user_pref(\"network.proxy.autoconfig_url\", \"\");".to_string(),
|
||||
// Disable QUIC/HTTP3 - it bypasses HTTP proxy
|
||||
"user_pref(\"network.http.http3.enable\", false);".to_string(),
|
||||
"user_pref(\"network.http.http3.enabled\", false);".to_string(),
|
||||
]);
|
||||
|
||||
// Write settings to user.js file
|
||||
let user_js_content = preferences.join("\n");
|
||||
fs::write(user_js_path, &user_js_content)?;
|
||||
log::info!("Updated user.js with proxy settings. PAC URL: {}", pac_url);
|
||||
if let Some(internal) = internal_proxy {
|
||||
log::info!(
|
||||
"Firefox will use LOCAL proxy: {}:{} (which forwards to upstream)",
|
||||
internal.host,
|
||||
internal.port
|
||||
);
|
||||
} else {
|
||||
log::info!(
|
||||
"Firefox will use UPSTREAM proxy directly: {}:{}",
|
||||
proxy.host,
|
||||
proxy.port
|
||||
);
|
||||
}
|
||||
log::info!(
|
||||
"Updated user.js with manual proxy settings: {}:{}",
|
||||
proxy_host,
|
||||
proxy_port
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1283,7 +1224,9 @@ mod tests {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
// Mock the base directories by setting environment variables
|
||||
std::env::set_var("HOME", temp_dir.path());
|
||||
unsafe {
|
||||
std::env::set_var("HOME", temp_dir.path());
|
||||
}
|
||||
|
||||
let profile_manager = ProfileManager::instance();
|
||||
(profile_manager, temp_dir)
|
||||
@@ -1397,20 +1340,28 @@ mod tests {
|
||||
assert!(user_js_path.exists(), "user.js should be created");
|
||||
|
||||
let content = fs::read_to_string(&user_js_path).expect("Should read user.js");
|
||||
|
||||
// Check for manual proxy configuration (type 1) instead of PAC (type 2)
|
||||
// Manual proxy is used because PAC file:// URLs are blocked by privacy browsers like Zen
|
||||
assert!(
|
||||
content.contains("network.proxy.type"),
|
||||
"Should contain proxy type setting"
|
||||
content.contains("network.proxy.type\", 1"),
|
||||
"Should set proxy type to 1 (manual)"
|
||||
);
|
||||
assert!(content.contains("2"), "Should set proxy type to 2 (PAC)");
|
||||
|
||||
// Check that PAC file was created
|
||||
let pac_path = uuid_dir.join("proxy.pac");
|
||||
assert!(pac_path.exists(), "proxy.pac should be created");
|
||||
|
||||
let pac_content = fs::read_to_string(&pac_path).expect("Should read proxy.pac");
|
||||
assert!(
|
||||
pac_content.contains("FindProxyForURL"),
|
||||
"PAC file should contain FindProxyForURL function"
|
||||
content.contains("network.proxy.http\", \"proxy.example.com\""),
|
||||
"Should set HTTP proxy host"
|
||||
);
|
||||
assert!(
|
||||
content.contains("network.proxy.http_port\", 8080"),
|
||||
"Should set HTTP proxy port"
|
||||
);
|
||||
assert!(
|
||||
content.contains("network.proxy.ssl\", \"proxy.example.com\""),
|
||||
"Should set SSL proxy host"
|
||||
);
|
||||
assert!(
|
||||
content.contains("network.proxy.ssl_port\", 8080"),
|
||||
"Should set SSL proxy port"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,9 +59,6 @@ impl ProfileImporter {
|
||||
// Detect Zen Browser profiles
|
||||
detected_profiles.extend(self.detect_zen_browser_profiles()?);
|
||||
|
||||
// NOTE: Mullvad and Tor Browser profile imports are no longer supported.
|
||||
// We intentionally do not detect these profiles to avoid offering them in the UI.
|
||||
|
||||
// Remove duplicates based on path
|
||||
let mut seen_paths = HashSet::new();
|
||||
let unique_profiles: Vec<DetectedProfile> = detected_profiles
|
||||
@@ -495,9 +492,7 @@ impl ProfileImporter {
|
||||
"firefox-developer" => "Firefox Developer",
|
||||
"chromium" => "Chrome/Chromium",
|
||||
"brave" => "Brave",
|
||||
"mullvad-browser" => "Mullvad Browser",
|
||||
"zen" => "Zen Browser",
|
||||
"tor-browser" => "Tor Browser",
|
||||
_ => "Unknown Browser",
|
||||
}
|
||||
}
|
||||
@@ -509,11 +504,6 @@ impl ProfileImporter {
|
||||
browser_type: &str,
|
||||
new_profile_name: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
// Disable imports for Mullvad and Tor browsers
|
||||
if browser_type == "mullvad-browser" || browser_type == "tor-browser" {
|
||||
return Err("Importing Mullvad Browser or Tor Browser profiles is not supported".into());
|
||||
}
|
||||
|
||||
// Validate that source path exists
|
||||
let source_path = Path::new(source_path);
|
||||
if !source_path.exists() {
|
||||
@@ -685,15 +675,7 @@ mod tests {
|
||||
"Chrome/Chromium"
|
||||
);
|
||||
assert_eq!(importer.get_browser_display_name("brave"), "Brave");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("mullvad-browser"),
|
||||
"Mullvad Browser"
|
||||
);
|
||||
assert_eq!(importer.get_browser_display_name("zen"), "Zen Browser");
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("tor-browser"),
|
||||
"Tor Browser"
|
||||
);
|
||||
assert_eq!(
|
||||
importer.get_browser_display_name("unknown"),
|
||||
"Unknown Browser"
|
||||
|
||||
+119
-61
@@ -491,7 +491,6 @@ impl ProxyManager {
|
||||
"https://ipinfo.io/ip",
|
||||
"https://icanhazip.com",
|
||||
"https://ifconfig.co/ip",
|
||||
"https://ipecho.net/plain",
|
||||
];
|
||||
|
||||
// Create HTTP client with proxy
|
||||
@@ -596,11 +595,6 @@ impl ProxyManager {
|
||||
browser_pid: u32,
|
||||
profile_id: Option<&str>,
|
||||
) -> Result<ProxySettings, String> {
|
||||
// First, proactively cleanup any dead proxies so we don't accidentally reuse stale ones
|
||||
let _ = self.cleanup_dead_proxies(app_handle.clone()).await;
|
||||
|
||||
// If we have a previous proxy tied to this profile, and the upstream settings are changing,
|
||||
// stop it before starting a new one so the change takes effect immediately.
|
||||
if let Some(name) = profile_id {
|
||||
// Check if we have an active proxy recorded for this profile
|
||||
let maybe_existing_id = {
|
||||
@@ -626,30 +620,29 @@ impl ProxyManager {
|
||||
&& existing.upstream_host == desired_host
|
||||
&& existing.upstream_port == desired_port;
|
||||
|
||||
if !is_same_upstream {
|
||||
// Stop the previous proxy tied to this profile (best effort)
|
||||
// We don't know the original PID mapping that created it; iterate to find its key
|
||||
let pid_to_stop = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies.iter().find_map(|(pid, info)| {
|
||||
if info.id == existing_id {
|
||||
Some(*pid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
};
|
||||
if let Some(pid) = pid_to_stop {
|
||||
let _ = self.stop_proxy(app_handle.clone(), pid).await;
|
||||
if is_same_upstream {
|
||||
// Settings match - can reuse existing proxy
|
||||
// Just update the PID mapping if needed
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
if proxies.contains_key(&browser_pid) {
|
||||
// Already mapped, reuse it
|
||||
return Ok(ProxySettings {
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: existing.local_port,
|
||||
username: None,
|
||||
password: None,
|
||||
});
|
||||
}
|
||||
// Need to add this PID to the mapping - we'll do that after starting
|
||||
}
|
||||
// Settings differ - we'll create a new proxy, but don't stop the old one
|
||||
// It will be cleaned up by periodic cleanup if it becomes dead
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check if we already have a proxy for this browser PID. If it exists but the upstream
|
||||
// settings don't match the newly requested ones, stop it and create a new proxy so that
|
||||
// changes take effect immediately.
|
||||
let mut needs_restart = false;
|
||||
// Check if we already have a proxy for this browser PID
|
||||
// If settings match, reuse it; otherwise create a new one (don't stop the old one)
|
||||
{
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
if let Some(existing) = proxies.get(&browser_pid) {
|
||||
@@ -664,7 +657,7 @@ impl ProxyManager {
|
||||
&& existing.upstream_port == desired_port;
|
||||
|
||||
if is_same_upstream {
|
||||
// Check if profile_id matches - if not, we need to restart to update tracking
|
||||
// Check if profile_id matches
|
||||
let profile_id_matches = match (profile_id, &existing.profile_id) {
|
||||
(Some(ref new_id), Some(ref old_id)) => new_id == old_id,
|
||||
(None, None) => true,
|
||||
@@ -672,7 +665,7 @@ impl ProxyManager {
|
||||
};
|
||||
|
||||
if profile_id_matches {
|
||||
// Reuse existing local proxy (profile_id matches)
|
||||
// Reuse existing local proxy (settings and profile_id match)
|
||||
return Ok(ProxySettings {
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
@@ -680,28 +673,15 @@ impl ProxyManager {
|
||||
username: None,
|
||||
password: None,
|
||||
});
|
||||
} else {
|
||||
// Profile ID changed - need to restart proxy to update tracking
|
||||
log::info!(
|
||||
"Profile ID changed for proxy {}: {:?} -> {:?}, restarting proxy",
|
||||
existing.id,
|
||||
existing.profile_id,
|
||||
profile_id
|
||||
);
|
||||
needs_restart = true;
|
||||
}
|
||||
} else {
|
||||
// Upstream changed; we must restart the local proxy so that traffic is routed correctly
|
||||
needs_restart = true;
|
||||
// Profile ID changed - we'll create a new proxy but don't stop the old one
|
||||
// It will be cleaned up by periodic cleanup if it becomes dead
|
||||
}
|
||||
// Upstream changed - we'll create a new proxy but don't stop the old one
|
||||
// It will be cleaned up by periodic cleanup if it becomes dead
|
||||
}
|
||||
}
|
||||
|
||||
if needs_restart {
|
||||
// Best-effort stop of the old proxy for this PID before starting a new one
|
||||
let _ = self.stop_proxy(app_handle.clone(), browser_pid).await;
|
||||
}
|
||||
|
||||
// Start a new proxy using the donut-proxy binary with the correct CLI interface
|
||||
let mut proxy_cmd = app_handle
|
||||
.shell()
|
||||
@@ -956,30 +936,108 @@ impl ProxyManager {
|
||||
}
|
||||
}
|
||||
|
||||
// Check if a process is still running
|
||||
fn is_process_running(&self, pid: u32) -> bool {
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
system.process(Pid::from(pid as usize)).is_some()
|
||||
}
|
||||
|
||||
// Clean up proxies for dead browser processes
|
||||
// Only clean up orphaned config files where the proxy process itself is dead
|
||||
pub async fn cleanup_dead_proxies(
|
||||
&self,
|
||||
app_handle: tauri::AppHandle,
|
||||
) -> Result<Vec<u32>, String> {
|
||||
let dead_pids = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies
|
||||
.keys()
|
||||
.filter(|&&pid| pid != 0 && !self.is_process_running(pid)) // Skip temporary PID 0
|
||||
.copied()
|
||||
.collect::<Vec<u32>>()
|
||||
// Don't stop proxies for dead browser processes - let them run indefinitely
|
||||
// The proxy processes are idle and don't consume CPU when not in use
|
||||
// Only clean up config files where the proxy process itself is dead (see below)
|
||||
let dead_pids: Vec<u32> = Vec::new();
|
||||
|
||||
// Clean up orphaned proxy configs (only where proxy process is definitely dead)
|
||||
// IMPORTANT: Only clean up configs where the proxy process itself is dead
|
||||
// If the proxy process is running (even if idle), leave it alone
|
||||
// The user doesn't care if proxy processes run indefinitely as long as they're not consuming CPU
|
||||
let orphaned_configs = {
|
||||
use crate::proxy_storage::{is_process_running, list_proxy_configs};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
let all_configs = list_proxy_configs();
|
||||
let tracked_proxy_ids: std::collections::HashSet<String> = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies.values().map(|p| p.id.clone()).collect()
|
||||
};
|
||||
|
||||
// Get current time for grace period check
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
|
||||
all_configs
|
||||
.into_iter()
|
||||
.filter(|config| {
|
||||
// If proxy is tracked in active_proxies, it's definitely not orphaned
|
||||
if tracked_proxy_ids.contains(&config.id) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Extract creation time from proxy ID (format: proxy_{timestamp}_{random})
|
||||
// This gives us a grace period for newly created proxies
|
||||
let proxy_age = config
|
||||
.id
|
||||
.strip_prefix("proxy_")
|
||||
.and_then(|s| s.split('_').next())
|
||||
.and_then(|s| s.parse::<u64>().ok())
|
||||
.map(|created_at| now.saturating_sub(created_at))
|
||||
.unwrap_or(0);
|
||||
|
||||
// Grace period: don't clean up proxies created in the last 120 seconds
|
||||
// This prevents race conditions during startup (increased from 60 to 120 for safety)
|
||||
if proxy_age < 120 {
|
||||
log::debug!(
|
||||
"Skipping cleanup of proxy {} - too new (age: {}s)",
|
||||
config.id,
|
||||
proxy_age
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// ONLY clean up if we can verify the proxy process is dead
|
||||
// If proxy process is running, leave it alone (even if idle)
|
||||
if let Some(proxy_pid) = config.pid {
|
||||
// Check if proxy process is actually dead
|
||||
if !is_process_running(proxy_pid) {
|
||||
// Proxy process is dead, clean up the config file
|
||||
log::info!(
|
||||
"Proxy {} process (PID {}) is dead, will clean up config",
|
||||
config.id,
|
||||
proxy_pid
|
||||
);
|
||||
return true;
|
||||
}
|
||||
// Proxy process is running - leave it alone
|
||||
log::debug!(
|
||||
"Skipping cleanup of proxy {} - process (PID {}) is still running",
|
||||
config.id,
|
||||
proxy_pid
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// No PID in config - can't verify if process is dead
|
||||
// Be conservative: don't clean up (might be starting up or PID not set yet)
|
||||
log::debug!(
|
||||
"Skipping cleanup of proxy {} - no PID in config (might be starting up)",
|
||||
config.id
|
||||
);
|
||||
false
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
for dead_pid in &dead_pids {
|
||||
log::info!("Cleaning up proxy for dead browser process PID: {dead_pid}");
|
||||
let _ = self.stop_proxy(app_handle.clone(), *dead_pid).await;
|
||||
// Clean up orphaned config files (proxy process is dead)
|
||||
for config in orphaned_configs {
|
||||
log::info!(
|
||||
"Cleaning up orphaned proxy config: {} (proxy process is dead)",
|
||||
config.id
|
||||
);
|
||||
// Just delete the config file - the process is already dead
|
||||
use crate::proxy_storage::delete_proxy_config;
|
||||
delete_proxy_config(&config.id);
|
||||
}
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
|
||||
@@ -59,18 +59,12 @@ pub async fn start_proxy_process_with_profile(
|
||||
cmd.stdin(Stdio::null());
|
||||
cmd.stdout(Stdio::null());
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
let log_path = std::path::PathBuf::from("/tmp").join(format!("donut-proxy-{}.log", id));
|
||||
if let Ok(file) = std::fs::File::create(&log_path) {
|
||||
log::error!("Proxy worker stderr will be logged to: {:?}", log_path);
|
||||
cmd.stderr(Stdio::from(file));
|
||||
} else {
|
||||
cmd.stderr(Stdio::null());
|
||||
}
|
||||
}
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
// Always log to file for diagnostics (both debug and release builds)
|
||||
let log_path = std::path::PathBuf::from("/tmp").join(format!("donut-proxy-{}.log", id));
|
||||
if let Ok(file) = std::fs::File::create(&log_path) {
|
||||
log::info!("Proxy worker stderr will be logged to: {:?}", log_path);
|
||||
cmd.stderr(Stdio::from(file));
|
||||
} else {
|
||||
cmd.stderr(Stdio::null());
|
||||
}
|
||||
|
||||
|
||||
+572
-100
@@ -359,14 +359,10 @@ async fn connect_via_socks(
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_http(
|
||||
async fn handle_http_via_socks4(
|
||||
req: Request<hyper::body::Incoming>,
|
||||
upstream_url: Option<String>,
|
||||
upstream_url: &str,
|
||||
) -> Result<Response<Full<Bytes>>, Infallible> {
|
||||
// Use reqwest for all HTTP requests as it handles proxies better
|
||||
// This is faster and more reliable than trying to use hyper-proxy with version conflicts
|
||||
use reqwest::Client;
|
||||
|
||||
// Extract domain for traffic tracking
|
||||
let domain = req
|
||||
.uri()
|
||||
@@ -374,6 +370,340 @@ async fn handle_http(
|
||||
.map(|h| h.to_string())
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
|
||||
// Parse upstream SOCKS4 proxy URL
|
||||
let upstream = match Url::parse(upstream_url) {
|
||||
Ok(url) => url,
|
||||
Err(e) => {
|
||||
log::error!("Failed to parse SOCKS4 proxy URL: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from("Invalid proxy URL")));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
};
|
||||
|
||||
let socks_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let socks_port = upstream.port().unwrap_or(1080);
|
||||
let socks_addr = format!("{}:{}", socks_host, socks_port);
|
||||
|
||||
// Parse target from request URI
|
||||
let target_uri = req.uri();
|
||||
let target_host = target_uri.host().unwrap_or("localhost");
|
||||
let target_port = target_uri.port_u16().unwrap_or(80);
|
||||
|
||||
// Connect to SOCKS4 proxy
|
||||
let mut socks_stream = match TcpStream::connect(&socks_addr).await {
|
||||
Ok(stream) => stream,
|
||||
Err(e) => {
|
||||
log::error!("Failed to connect to SOCKS4 proxy {}: {}", socks_addr, e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to connect to SOCKS4 proxy: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
};
|
||||
|
||||
// Resolve target host to IP (SOCKS4 requires IP addresses)
|
||||
let target_ip = match tokio::net::lookup_host((target_host, target_port)).await {
|
||||
Ok(mut addrs) => {
|
||||
if let Some(addr) = addrs.next() {
|
||||
match addr.ip() {
|
||||
std::net::IpAddr::V4(ipv4) => ipv4.octets(),
|
||||
std::net::IpAddr::V6(_) => {
|
||||
log::error!("SOCKS4 does not support IPv6");
|
||||
let mut response = Response::new(Full::new(Bytes::from(
|
||||
"SOCKS4 does not support IPv6 addresses",
|
||||
)));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::error!("Failed to resolve target host: {}", target_host);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to resolve target host: {}",
|
||||
target_host
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to resolve target host {}: {}", target_host, e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to resolve target host: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
};
|
||||
|
||||
// Build SOCKS4 CONNECT request
|
||||
let mut socks_request = vec![0x04, 0x01]; // SOCKS4, CONNECT
|
||||
socks_request.extend_from_slice(&target_port.to_be_bytes());
|
||||
socks_request.extend_from_slice(&target_ip);
|
||||
socks_request.push(0); // NULL terminator for userid
|
||||
|
||||
// Send SOCKS4 CONNECT request
|
||||
if let Err(e) = socks_stream.write_all(&socks_request).await {
|
||||
log::error!("Failed to send SOCKS4 CONNECT request: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to send SOCKS4 request: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// Read SOCKS4 response
|
||||
let mut socks_response = [0u8; 8];
|
||||
if let Err(e) = socks_stream.read_exact(&mut socks_response).await {
|
||||
log::error!("Failed to read SOCKS4 response: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to read SOCKS4 response: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// Check SOCKS4 response (second byte should be 0x5A for success)
|
||||
if socks_response[1] != 0x5A {
|
||||
log::error!(
|
||||
"SOCKS4 connection failed, response code: {}",
|
||||
socks_response[1]
|
||||
);
|
||||
let mut response = Response::new(Full::new(Bytes::from("SOCKS4 connection failed")));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// Now send the HTTP request through the SOCKS4 connection
|
||||
// Build HTTP request line
|
||||
let method = req.method().as_str();
|
||||
let path = target_uri
|
||||
.path_and_query()
|
||||
.map(|pq| pq.as_str())
|
||||
.unwrap_or("/");
|
||||
let http_version = if req.version() == hyper::Version::HTTP_11 {
|
||||
"HTTP/1.1"
|
||||
} else {
|
||||
"HTTP/1.0"
|
||||
};
|
||||
|
||||
let mut http_request = format!("{} {} {}\r\n", method, path, http_version);
|
||||
|
||||
// Add Host header if not present
|
||||
let mut has_host = false;
|
||||
for (name, value) in req.headers().iter() {
|
||||
if name.as_str().eq_ignore_ascii_case("host") {
|
||||
has_host = true;
|
||||
}
|
||||
// Skip proxy-specific headers
|
||||
if name.as_str().eq_ignore_ascii_case("proxy-authorization")
|
||||
|| name.as_str().eq_ignore_ascii_case("proxy-connection")
|
||||
|| name.as_str().eq_ignore_ascii_case("proxy-authenticate")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Skip Content-Length and Transfer-Encoding - we'll add our own Content-Length
|
||||
// based on the collected body size. Having both violates HTTP/1.1 (RFC 7230).
|
||||
if name.as_str().eq_ignore_ascii_case("content-length")
|
||||
|| name.as_str().eq_ignore_ascii_case("transfer-encoding")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Ok(val) = value.to_str() {
|
||||
http_request.push_str(&format!("{}: {}\r\n", name.as_str(), val));
|
||||
}
|
||||
}
|
||||
|
||||
if !has_host {
|
||||
http_request.push_str(&format!("Host: {}:{}\r\n", target_host, target_port));
|
||||
}
|
||||
|
||||
// Get body
|
||||
let body_bytes = match req.collect().await {
|
||||
Ok(collected) => collected.to_bytes(),
|
||||
Err(_) => Bytes::new(),
|
||||
};
|
||||
|
||||
// Add Content-Length if there's a body
|
||||
if !body_bytes.is_empty() {
|
||||
http_request.push_str(&format!("Content-Length: {}\r\n", body_bytes.len()));
|
||||
}
|
||||
|
||||
http_request.push_str("\r\n");
|
||||
|
||||
// Send HTTP request
|
||||
if let Err(e) = socks_stream.write_all(http_request.as_bytes()).await {
|
||||
log::error!("Failed to send HTTP request through SOCKS4: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to send HTTP request: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// Send body if present
|
||||
if !body_bytes.is_empty() {
|
||||
if let Err(e) = socks_stream.write_all(&body_bytes).await {
|
||||
log::error!("Failed to send HTTP body through SOCKS4: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to send HTTP body: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
|
||||
// Read HTTP response
|
||||
let mut response_buffer = Vec::with_capacity(8192);
|
||||
let mut temp_buf = [0u8; 4096];
|
||||
let mut content_length: Option<usize> = None;
|
||||
let mut is_chunked = false;
|
||||
|
||||
// Read until we have complete headers
|
||||
loop {
|
||||
match socks_stream.read(&mut temp_buf).await {
|
||||
Ok(0) => break, // Connection closed
|
||||
Ok(n) => {
|
||||
response_buffer.extend_from_slice(&temp_buf[..n]);
|
||||
// Check for end of headers (\r\n\r\n)
|
||||
if let Some(pos) = response_buffer.windows(4).position(|w| w == b"\r\n\r\n") {
|
||||
// Parse headers
|
||||
let headers_str = String::from_utf8_lossy(&response_buffer[..pos + 4]);
|
||||
for line in headers_str.lines() {
|
||||
let line_lower = line.to_lowercase();
|
||||
if line_lower.starts_with("content-length:") {
|
||||
if let Some(len_str) = line.split(':').nth(1) {
|
||||
if let Ok(len) = len_str.trim().parse::<usize>() {
|
||||
content_length = Some(len);
|
||||
}
|
||||
}
|
||||
} else if line_lower.starts_with("transfer-encoding:") && line_lower.contains("chunked")
|
||||
{
|
||||
is_chunked = true;
|
||||
}
|
||||
}
|
||||
// Read body if Content-Length is specified and we don't have it all
|
||||
if let Some(cl) = content_length {
|
||||
let body_start = pos + 4;
|
||||
let body_received = response_buffer.len() - body_start;
|
||||
if body_received < cl {
|
||||
// Read remaining body (but don't use read_exact as connection might close)
|
||||
let remaining = cl - body_received;
|
||||
let mut read_so_far = 0;
|
||||
while read_so_far < remaining {
|
||||
match socks_stream.read(&mut temp_buf).await {
|
||||
Ok(0) => break, // Connection closed
|
||||
Ok(m) => {
|
||||
let to_read = (remaining - read_so_far).min(m);
|
||||
response_buffer.extend_from_slice(&temp_buf[..to_read]);
|
||||
read_so_far += to_read;
|
||||
if to_read < m {
|
||||
// More data than needed, might be next response - stop here
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if !is_chunked {
|
||||
// No Content-Length and not chunked - read until connection closes
|
||||
// But limit to reasonable size to avoid memory issues
|
||||
let max_body_size = 10 * 1024 * 1024; // 10MB max
|
||||
while response_buffer.len() < max_body_size {
|
||||
match socks_stream.read(&mut temp_buf).await {
|
||||
Ok(0) => break, // Connection closed
|
||||
Ok(n) => {
|
||||
response_buffer.extend_from_slice(&temp_buf[..n]);
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: Chunked encoding is complex to parse manually, so we'll read what we can
|
||||
// For full chunked support, we'd need a proper HTTP parser
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Error reading HTTP response from SOCKS4: {}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse HTTP response
|
||||
let response_str = String::from_utf8_lossy(&response_buffer);
|
||||
let mut lines = response_str.lines();
|
||||
let status_line = lines.next().unwrap_or("HTTP/1.1 500 Internal Server Error");
|
||||
let status_parts: Vec<&str> = status_line.split_whitespace().collect();
|
||||
let status_code = status_parts
|
||||
.get(1)
|
||||
.and_then(|s| s.parse::<u16>().ok())
|
||||
.unwrap_or(500);
|
||||
|
||||
// Find header/body boundary
|
||||
let header_end = response_buffer
|
||||
.windows(4)
|
||||
.position(|w| w == b"\r\n\r\n")
|
||||
.map(|p| p + 4)
|
||||
.unwrap_or(response_buffer.len());
|
||||
|
||||
let body = response_buffer[header_end..].to_vec();
|
||||
|
||||
// Record request in traffic tracker
|
||||
let response_size = body.len() as u64;
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
tracker.record_request(&domain, body_bytes.len() as u64, response_size);
|
||||
}
|
||||
|
||||
let mut hyper_response = Response::new(Full::new(Bytes::from(body)));
|
||||
*hyper_response.status_mut() = StatusCode::from_u16(status_code).unwrap();
|
||||
|
||||
Ok(hyper_response)
|
||||
}
|
||||
|
||||
async fn handle_http(
|
||||
req: Request<hyper::body::Incoming>,
|
||||
upstream_url: Option<String>,
|
||||
) -> Result<Response<Full<Bytes>>, Infallible> {
|
||||
// Extract domain for traffic tracking
|
||||
let domain = req
|
||||
.uri()
|
||||
.host()
|
||||
.map(|h| h.to_string())
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
|
||||
log::error!(
|
||||
"DEBUG: Handling HTTP request: {} {} (host: {:?})",
|
||||
req.method(),
|
||||
req.uri(),
|
||||
req.uri().host()
|
||||
);
|
||||
|
||||
// Check if we need to handle SOCKS4 manually (reqwest doesn't support it)
|
||||
if let Some(ref upstream) = upstream_url {
|
||||
if upstream != "DIRECT" {
|
||||
if let Ok(url) = Url::parse(upstream) {
|
||||
if url.scheme() == "socks4" {
|
||||
// Handle SOCKS4 manually for HTTP requests
|
||||
return handle_http_via_socks4(req, upstream).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use reqwest for HTTP/HTTPS/SOCKS5 proxies
|
||||
use reqwest::Client;
|
||||
|
||||
let client_builder = Client::builder();
|
||||
let client = if let Some(ref upstream) = upstream_url {
|
||||
if upstream == "DIRECT" {
|
||||
@@ -490,6 +820,7 @@ fn build_reqwest_client_with_proxy(
|
||||
let proxy = match scheme {
|
||||
"http" | "https" => {
|
||||
// For HTTP/HTTPS proxies, reqwest handles them directly
|
||||
// Note: HTTPS proxy URLs still use HTTP CONNECT method, reqwest handles TLS automatically
|
||||
Proxy::http(upstream_url)?
|
||||
}
|
||||
"socks5" => {
|
||||
@@ -497,8 +828,9 @@ fn build_reqwest_client_with_proxy(
|
||||
Proxy::all(upstream_url)?
|
||||
}
|
||||
"socks4" => {
|
||||
// SOCKS4 is not directly supported by reqwest, would need custom handling
|
||||
return Err("SOCKS4 not supported for HTTP requests via reqwest".into());
|
||||
// SOCKS4 is handled manually in handle_http_via_socks4
|
||||
// This should not be reached, but return error as fallback
|
||||
return Err("SOCKS4 should be handled manually".into());
|
||||
}
|
||||
_ => {
|
||||
return Err(format!("Unsupported proxy scheme: {}", scheme).into());
|
||||
@@ -592,14 +924,80 @@ pub async fn run_proxy_server(config: ProxyConfig) -> Result<(), Box<dyn std::er
|
||||
);
|
||||
log::error!("Proxy server entering accept loop - process should stay alive");
|
||||
|
||||
// Start a background task to write lightweight session snapshots for real-time updates
|
||||
// These are much smaller than full stats and can be written frequently (~100 bytes every 2 seconds)
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
let tracker_clone = tracker.clone();
|
||||
tokio::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(2));
|
||||
interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
// Write lightweight session snapshot (only current counters, ~100 bytes)
|
||||
if let Err(e) = tracker_clone.write_session_snapshot() {
|
||||
log::debug!("Failed to write session snapshot: {}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Start a background task to periodically flush traffic stats to disk
|
||||
// Use adaptive flush frequency: every 5 seconds when active, every 30 seconds when idle
|
||||
tokio::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(1));
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(5));
|
||||
interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
let mut last_activity_time = std::time::Instant::now();
|
||||
let mut last_flush_time = std::time::Instant::now();
|
||||
let mut current_interval_secs = 5u64;
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
if let Err(e) = tracker.flush_to_disk() {
|
||||
log::error!("Failed to flush traffic stats: {}", e);
|
||||
let (sent, recv, requests) = tracker.get_snapshot();
|
||||
let current_bytes = sent + recv;
|
||||
let time_since_activity = last_activity_time.elapsed();
|
||||
let time_since_flush = last_flush_time.elapsed();
|
||||
let has_traffic = current_bytes > 0 || requests > 0;
|
||||
|
||||
// Determine flush frequency based on activity
|
||||
// When active: flush every 5 seconds
|
||||
// When idle: flush every 30 seconds
|
||||
let desired_interval_secs =
|
||||
if has_traffic || time_since_activity < std::time::Duration::from_secs(30) {
|
||||
5u64
|
||||
} else {
|
||||
30u64
|
||||
};
|
||||
|
||||
// Update interval if needed
|
||||
if desired_interval_secs != current_interval_secs {
|
||||
current_interval_secs = desired_interval_secs;
|
||||
interval = tokio::time::interval(tokio::time::Duration::from_secs(desired_interval_secs));
|
||||
}
|
||||
|
||||
// Only flush if enough time has passed since last flush
|
||||
let flush_interval = std::time::Duration::from_secs(desired_interval_secs);
|
||||
let should_flush = time_since_flush >= flush_interval;
|
||||
|
||||
if should_flush {
|
||||
match tracker.flush_to_disk() {
|
||||
Ok(Some((sent, recv))) => {
|
||||
// Successful flush with data
|
||||
last_flush_time = std::time::Instant::now();
|
||||
if sent > 0 || recv > 0 {
|
||||
last_activity_time = std::time::Instant::now();
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
// No data to flush - this is normal
|
||||
last_flush_time = std::time::Instant::now();
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to flush traffic stats: {}", e);
|
||||
// Don't update flush time on error - retry sooner
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -609,40 +1007,106 @@ pub async fn run_proxy_server(config: ProxyConfig) -> Result<(), Box<dyn std::er
|
||||
// This ensures the process doesn't exit even if there are no active connections
|
||||
loop {
|
||||
match listener.accept().await {
|
||||
Ok((mut stream, _)) => {
|
||||
Ok((mut stream, peer_addr)) => {
|
||||
// Enable TCP_NODELAY to ensure small packets are sent immediately
|
||||
// This is critical for CONNECT responses to be sent before tunneling begins
|
||||
let _ = stream.set_nodelay(true);
|
||||
log::error!("DEBUG: Accepted connection from {:?}", peer_addr);
|
||||
|
||||
let upstream = upstream_url.clone();
|
||||
|
||||
tokio::task::spawn(async move {
|
||||
// Read first bytes to detect CONNECT requests
|
||||
// CONNECT requests need special handling for tunneling
|
||||
let mut peek_buffer = [0u8; 8];
|
||||
// Use a larger buffer to ensure we can detect CONNECT even with partial reads
|
||||
let mut peek_buffer = [0u8; 16];
|
||||
match stream.read(&mut peek_buffer).await {
|
||||
Ok(n) if n >= 7 => {
|
||||
let request_start = String::from_utf8_lossy(&peek_buffer[..n.min(7)]);
|
||||
if request_start.starts_with("CONNECT") {
|
||||
Ok(0) => {
|
||||
log::error!("DEBUG: Connection closed immediately (0 bytes read)");
|
||||
}
|
||||
Ok(n) => {
|
||||
// Check if this looks like a CONNECT request
|
||||
// Be more lenient - check if the first bytes match "CONNECT" (case-insensitive)
|
||||
let request_start_upper =
|
||||
String::from_utf8_lossy(&peek_buffer[..n.min(7)]).to_uppercase();
|
||||
let is_connect = request_start_upper.starts_with("CONNECT");
|
||||
|
||||
log::error!(
|
||||
"DEBUG: Read {} bytes, starts with: {:?}, is_connect: {}",
|
||||
n,
|
||||
String::from_utf8_lossy(&peek_buffer[..n.min(20)]),
|
||||
is_connect
|
||||
);
|
||||
|
||||
if is_connect {
|
||||
// Handle CONNECT request manually for tunneling
|
||||
let mut full_request = Vec::with_capacity(4096);
|
||||
full_request.extend_from_slice(&peek_buffer[..n]);
|
||||
|
||||
// Read the rest of the CONNECT request
|
||||
// Read the rest of the CONNECT request until we have the full headers
|
||||
// CONNECT requests end with \r\n\r\n (or \n\n)
|
||||
let mut remaining = [0u8; 4096];
|
||||
let mut total_read = n;
|
||||
let max_reads = 100; // Prevent infinite loop
|
||||
let mut reads = 0;
|
||||
|
||||
loop {
|
||||
if reads >= max_reads {
|
||||
log::error!("DEBUG: Max reads reached, breaking");
|
||||
break;
|
||||
}
|
||||
|
||||
match stream.read(&mut remaining).await {
|
||||
Ok(0) => break,
|
||||
Ok(m) => {
|
||||
full_request.extend_from_slice(&remaining[..m]);
|
||||
Ok(0) => {
|
||||
// Connection closed, but we might have a complete request
|
||||
if full_request.ends_with(b"\r\n\r\n") || full_request.ends_with(b"\n\n") {
|
||||
break;
|
||||
}
|
||||
// If we have some data, try to process it anyway
|
||||
if total_read > 0 {
|
||||
break;
|
||||
}
|
||||
return; // No data at all
|
||||
}
|
||||
Ok(m) => {
|
||||
reads += 1;
|
||||
total_read += m;
|
||||
full_request.extend_from_slice(&remaining[..m]);
|
||||
|
||||
// Check if we have complete headers
|
||||
if full_request.ends_with(b"\r\n\r\n") || full_request.ends_with(b"\n\n") {
|
||||
break;
|
||||
}
|
||||
|
||||
// Also check if we have enough to parse (at least "CONNECT host:port HTTP/1.x")
|
||||
if total_read >= 20 {
|
||||
// Check if we have a newline that might indicate end of request line
|
||||
if let Some(pos) = full_request.iter().position(|&b| b == b'\n') {
|
||||
if pos < full_request.len() - 1 {
|
||||
// We have at least the request line, check if we have headers
|
||||
let request_str = String::from_utf8_lossy(&full_request);
|
||||
if request_str.contains("\r\n\r\n") || request_str.contains("\n\n") {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("DEBUG: Error reading CONNECT request: {:?}", e);
|
||||
// If we have some data, try to process it
|
||||
if total_read > 0 {
|
||||
break;
|
||||
}
|
||||
return;
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
|
||||
// Handle CONNECT manually
|
||||
log::error!(
|
||||
"DEBUG: Handling CONNECT manually for: {}",
|
||||
String::from_utf8_lossy(&full_request[..full_request.len().min(100)])
|
||||
String::from_utf8_lossy(&full_request[..full_request.len().min(200)])
|
||||
);
|
||||
if let Err(e) = handle_connect_from_buffer(stream, full_request, upstream).await {
|
||||
log::error!("Error handling CONNECT request: {:?}", e);
|
||||
@@ -651,7 +1115,14 @@ pub async fn run_proxy_server(config: ProxyConfig) -> Result<(), Box<dyn std::er
|
||||
}
|
||||
return;
|
||||
}
|
||||
// Not CONNECT - reconstruct stream with consumed bytes prepended
|
||||
|
||||
// Not CONNECT (or partial read) - reconstruct stream with consumed bytes prepended
|
||||
// This is critical: we MUST prepend any bytes we consumed, even if < 7 bytes
|
||||
log::error!(
|
||||
"DEBUG: Non-CONNECT request, first {} bytes: {:?}",
|
||||
n,
|
||||
String::from_utf8_lossy(&peek_buffer[..n.min(50)])
|
||||
);
|
||||
let prepended_bytes = peek_buffer[..n].to_vec();
|
||||
let prepended_reader = PrependReader {
|
||||
prepended: prepended_bytes,
|
||||
@@ -664,17 +1135,10 @@ pub async fn run_proxy_server(config: ProxyConfig) -> Result<(), Box<dyn std::er
|
||||
if let Err(err) = http1::Builder::new().serve_connection(io, service).await {
|
||||
log::error!("Error serving connection: {:?}", err);
|
||||
}
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// For non-CONNECT requests, use hyper's HTTP handling
|
||||
let io = TokioIo::new(stream);
|
||||
let service = service_fn(move |req| handle_request(req, upstream.clone()));
|
||||
|
||||
if let Err(err) = http1::Builder::new().serve_connection(io, service).await {
|
||||
log::error!("Error serving connection: {:?}", err);
|
||||
Err(e) => {
|
||||
log::error!("Error reading from connection: {:?}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -729,84 +1193,92 @@ async fn handle_connect_from_buffer(
|
||||
}
|
||||
|
||||
// Connect to target (directly or via upstream proxy)
|
||||
let target_stream = if upstream_url.is_none()
|
||||
|| upstream_url
|
||||
.as_ref()
|
||||
.map(|s| s == "DIRECT")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
// Direct connection
|
||||
TcpStream::connect((target_host, target_port)).await?
|
||||
} else {
|
||||
// Connect via upstream proxy
|
||||
let upstream = Url::parse(upstream_url.as_ref().unwrap())?;
|
||||
let scheme = upstream.scheme();
|
||||
let target_stream = match upstream_url.as_ref() {
|
||||
None => {
|
||||
// Direct connection
|
||||
TcpStream::connect((target_host, target_port)).await?
|
||||
}
|
||||
Some(url) if url == "DIRECT" => {
|
||||
// Direct connection
|
||||
TcpStream::connect((target_host, target_port)).await?
|
||||
}
|
||||
Some(upstream_url_str) => {
|
||||
// Connect via upstream proxy
|
||||
let upstream = Url::parse(upstream_url_str)?;
|
||||
let scheme = upstream.scheme();
|
||||
|
||||
match scheme {
|
||||
"http" | "https" => {
|
||||
// Connect via HTTP proxy CONNECT
|
||||
let proxy_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let proxy_port = upstream.port().unwrap_or(8080);
|
||||
let mut proxy_stream = TcpStream::connect((proxy_host, proxy_port)).await?;
|
||||
match scheme {
|
||||
"http" | "https" => {
|
||||
// Connect via HTTP/HTTPS proxy CONNECT
|
||||
// Note: HTTPS proxy URLs still use HTTP CONNECT method (CONNECT is always HTTP-based)
|
||||
// For HTTPS proxies, reqwest handles TLS automatically in handle_http
|
||||
// For manual CONNECT here, we use plain TCP - HTTPS proxy CONNECT typically works over plain TCP
|
||||
let proxy_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let proxy_port = upstream.port().unwrap_or(8080);
|
||||
let mut proxy_stream = TcpStream::connect((proxy_host, proxy_port)).await?;
|
||||
|
||||
// Add authentication if provided
|
||||
let mut connect_req = format!(
|
||||
"CONNECT {}:{} HTTP/1.1\r\nHost: {}:{}\r\n",
|
||||
target_host, target_port, target_host, target_port
|
||||
);
|
||||
// Add authentication if provided
|
||||
let mut connect_req = format!(
|
||||
"CONNECT {}:{} HTTP/1.1\r\nHost: {}:{}\r\n",
|
||||
target_host, target_port, target_host, target_port
|
||||
);
|
||||
|
||||
if !upstream.username().is_empty() {
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
let auth = general_purpose::STANDARD.encode(format!("{}:{}", username, password));
|
||||
connect_req.push_str(&format!("Proxy-Authorization: Basic {}\r\n", auth));
|
||||
}
|
||||
|
||||
connect_req.push_str("\r\n");
|
||||
|
||||
// Send CONNECT request to upstream proxy
|
||||
proxy_stream.write_all(connect_req.as_bytes()).await?;
|
||||
|
||||
// Read response
|
||||
let mut buffer = [0u8; 4096];
|
||||
let n = proxy_stream.read(&mut buffer).await?;
|
||||
let response = String::from_utf8_lossy(&buffer[..n]);
|
||||
|
||||
if !response.starts_with("HTTP/1.1 200") && !response.starts_with("HTTP/1.0 200") {
|
||||
return Err(format!("Upstream proxy CONNECT failed: {}", response).into());
|
||||
}
|
||||
|
||||
proxy_stream
|
||||
}
|
||||
"socks4" | "socks5" => {
|
||||
// Connect via SOCKS proxy
|
||||
let socks_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let socks_port = upstream.port().unwrap_or(1080);
|
||||
let socks_addr = format!("{}:{}", socks_host, socks_port);
|
||||
|
||||
if !upstream.username().is_empty() {
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
let auth = general_purpose::STANDARD.encode(format!("{}:{}", username, password));
|
||||
connect_req.push_str(&format!("Proxy-Authorization: Basic {}\r\n", auth));
|
||||
|
||||
connect_via_socks(
|
||||
&socks_addr,
|
||||
target_host,
|
||||
target_port,
|
||||
scheme == "socks5",
|
||||
if !username.is_empty() {
|
||||
Some((username, password))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
.await?
|
||||
}
|
||||
|
||||
connect_req.push_str("\r\n");
|
||||
|
||||
// Send CONNECT request to upstream proxy
|
||||
proxy_stream.write_all(connect_req.as_bytes()).await?;
|
||||
|
||||
// Read response
|
||||
let mut buffer = [0u8; 4096];
|
||||
let n = proxy_stream.read(&mut buffer).await?;
|
||||
let response = String::from_utf8_lossy(&buffer[..n]);
|
||||
|
||||
if !response.starts_with("HTTP/1.1 200") && !response.starts_with("HTTP/1.0 200") {
|
||||
return Err(format!("Upstream proxy CONNECT failed: {}", response).into());
|
||||
_ => {
|
||||
return Err(format!("Unsupported upstream proxy scheme: {}", scheme).into());
|
||||
}
|
||||
|
||||
proxy_stream
|
||||
}
|
||||
"socks4" | "socks5" => {
|
||||
// Connect via SOCKS proxy
|
||||
let socks_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let socks_port = upstream.port().unwrap_or(1080);
|
||||
let socks_addr = format!("{}:{}", socks_host, socks_port);
|
||||
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
|
||||
connect_via_socks(
|
||||
&socks_addr,
|
||||
target_host,
|
||||
target_port,
|
||||
scheme == "socks5",
|
||||
if !username.is_empty() {
|
||||
Some((username, password))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
.await?
|
||||
}
|
||||
_ => {
|
||||
return Err(format!("Unsupported upstream proxy scheme: {}", scheme).into());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Enable TCP_NODELAY on target stream for immediate data transfer
|
||||
let _ = target_stream.set_nodelay(true);
|
||||
|
||||
// Send 200 Connection Established response to client
|
||||
// CRITICAL: Must flush after writing to ensure response is sent before tunneling
|
||||
client_stream
|
||||
|
||||
@@ -133,6 +133,6 @@ pub fn generate_proxy_id() -> String {
|
||||
|
||||
pub fn is_process_running(pid: u32) -> bool {
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
let system = System::new();
|
||||
system.process(Pid::from(pid as usize)).is_some()
|
||||
}
|
||||
|
||||
+533
-17
@@ -17,6 +17,19 @@ pub struct BandwidthDataPoint {
|
||||
pub bytes_received: u64,
|
||||
}
|
||||
|
||||
/// Individual domain access data point for time-series tracking
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DomainAccessPoint {
|
||||
/// Unix timestamp in seconds
|
||||
pub timestamp: u64,
|
||||
/// Domain name
|
||||
pub domain: String,
|
||||
/// Bytes sent in this request
|
||||
pub bytes_sent: u64,
|
||||
/// Bytes received in this request
|
||||
pub bytes_received: u64,
|
||||
}
|
||||
|
||||
/// Domain access information
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DomainAccess {
|
||||
@@ -69,6 +82,9 @@ pub struct TrafficStats {
|
||||
pub session_start: u64,
|
||||
/// Last update timestamp
|
||||
pub last_update: u64,
|
||||
/// Timestamp of the last flush to disk (used to avoid double-counting session snapshots)
|
||||
#[serde(default)]
|
||||
pub last_flush_timestamp: u64,
|
||||
/// Total bytes sent across all time
|
||||
pub total_bytes_sent: u64,
|
||||
/// Total bytes received across all time
|
||||
@@ -78,9 +94,12 @@ pub struct TrafficStats {
|
||||
/// Bandwidth data points (time-series, 1 point per second, stored indefinitely)
|
||||
#[serde(default)]
|
||||
pub bandwidth_history: Vec<BandwidthDataPoint>,
|
||||
/// Domain access statistics
|
||||
/// Domain access statistics (aggregated all-time)
|
||||
#[serde(default)]
|
||||
pub domains: HashMap<String, DomainAccess>,
|
||||
/// Domain access history (time-series for filtering by period)
|
||||
#[serde(default)]
|
||||
pub domain_access_history: Vec<DomainAccessPoint>,
|
||||
/// Unique IPs accessed
|
||||
#[serde(default)]
|
||||
pub unique_ips: Vec<String>,
|
||||
@@ -94,11 +113,13 @@ impl TrafficStats {
|
||||
profile_id,
|
||||
session_start: now,
|
||||
last_update: now,
|
||||
last_flush_timestamp: 0,
|
||||
total_bytes_sent: 0,
|
||||
total_bytes_received: 0,
|
||||
total_requests: 0,
|
||||
bandwidth_history: Vec::new(),
|
||||
domains: HashMap::new(),
|
||||
domain_access_history: Vec::new(),
|
||||
unique_ips: Vec::new(),
|
||||
}
|
||||
}
|
||||
@@ -158,11 +179,43 @@ impl TrafficStats {
|
||||
});
|
||||
}
|
||||
|
||||
/// Prune old data to prevent unbounded growth
|
||||
/// Keeps only the last 7 days of bandwidth history and domain access history
|
||||
pub fn prune_old_data(&mut self) {
|
||||
const RETENTION_SECONDS: u64 = 7 * 24 * 60 * 60; // 7 days
|
||||
let now = current_timestamp();
|
||||
let cutoff = now.saturating_sub(RETENTION_SECONDS);
|
||||
|
||||
// Prune bandwidth history
|
||||
self.bandwidth_history.retain(|dp| dp.timestamp >= cutoff);
|
||||
|
||||
// Prune domain access history
|
||||
self
|
||||
.domain_access_history
|
||||
.retain(|dp| dp.timestamp >= cutoff);
|
||||
|
||||
// Remove domains that haven't been accessed recently and have no recent history
|
||||
let recent_domains: std::collections::HashSet<String> = self
|
||||
.domain_access_history
|
||||
.iter()
|
||||
.filter(|dp| dp.timestamp >= cutoff)
|
||||
.map(|dp| dp.domain.clone())
|
||||
.collect();
|
||||
|
||||
// Keep domains that were accessed recently OR have high total traffic
|
||||
self.domains.retain(|domain, access| {
|
||||
recent_domains.contains(domain)
|
||||
|| access.last_access >= cutoff
|
||||
|| (access.bytes_sent + access.bytes_received) > 1_000_000 // Keep domains with >1MB traffic
|
||||
});
|
||||
}
|
||||
|
||||
/// Record a request to a domain
|
||||
pub fn record_request(&mut self, domain: &str, bytes_sent: u64, bytes_received: u64) {
|
||||
let now = current_timestamp();
|
||||
self.total_requests += 1;
|
||||
|
||||
// Update aggregated domain stats
|
||||
let entry = self
|
||||
.domains
|
||||
.entry(domain.to_string())
|
||||
@@ -179,6 +232,14 @@ impl TrafficStats {
|
||||
entry.bytes_sent += bytes_sent;
|
||||
entry.bytes_received += bytes_received;
|
||||
entry.last_access = now;
|
||||
|
||||
// Add to domain access history for time-period filtering
|
||||
self.domain_access_history.push(DomainAccessPoint {
|
||||
timestamp: now,
|
||||
domain: domain.to_string(),
|
||||
bytes_sent,
|
||||
bytes_received,
|
||||
});
|
||||
}
|
||||
|
||||
/// Record an IP address access
|
||||
@@ -209,6 +270,63 @@ fn current_timestamp() -> u64 {
|
||||
.as_secs()
|
||||
}
|
||||
|
||||
/// File lock guard for preventing concurrent writes
|
||||
struct FileLockGuard {
|
||||
_file: std::fs::File,
|
||||
}
|
||||
|
||||
/// Acquire a file lock for exclusive access
|
||||
/// On Unix, uses flock; on Windows, uses file handles
|
||||
fn acquire_file_lock(lock_path: &PathBuf) -> Result<FileLockGuard, Box<dyn std::error::Error>> {
|
||||
use std::fs::OpenOptions;
|
||||
|
||||
let file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(false)
|
||||
.open(lock_path)?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::io::AsRawFd;
|
||||
let fd = file.as_raw_fd();
|
||||
unsafe {
|
||||
if libc::flock(fd, libc::LOCK_EX | libc::LOCK_NB) != 0 {
|
||||
return Err("Failed to acquire file lock".into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::os::windows::io::AsRawHandle;
|
||||
use windows::Win32::Foundation::HANDLE;
|
||||
use windows::Win32::Storage::FileSystem::LockFileEx;
|
||||
use windows::Win32::Storage::FileSystem::LOCKFILE_EXCLUSIVE_LOCK;
|
||||
use windows::Win32::Storage::FileSystem::LOCKFILE_FAIL_IMMEDIATELY;
|
||||
use windows::Win32::System::IO::OVERLAPPED;
|
||||
|
||||
let handle = HANDLE(file.as_raw_handle() as *mut core::ffi::c_void);
|
||||
unsafe {
|
||||
let mut overlapped: OVERLAPPED = std::mem::zeroed();
|
||||
if LockFileEx(
|
||||
handle,
|
||||
LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY,
|
||||
Some(0),
|
||||
u32::MAX,
|
||||
u32::MAX,
|
||||
&mut overlapped,
|
||||
)
|
||||
.is_err()
|
||||
{
|
||||
return Err("Failed to acquire file lock".into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(FileLockGuard { _file: file })
|
||||
}
|
||||
|
||||
/// Get the traffic stats storage directory
|
||||
pub fn get_traffic_stats_dir() -> PathBuf {
|
||||
let base_dirs = BaseDirs::new().expect("Failed to get base directories");
|
||||
@@ -362,6 +480,14 @@ fn merge_traffic_stats(dest: &mut TrafficStats, src: &TrafficStats) {
|
||||
entry.last_access = entry.last_access.max(access.last_access);
|
||||
}
|
||||
|
||||
// Merge domain access history
|
||||
let mut combined_domain_history: Vec<DomainAccessPoint> = dest.domain_access_history.clone();
|
||||
for point in &src.domain_access_history {
|
||||
combined_domain_history.push(point.clone());
|
||||
}
|
||||
combined_domain_history.sort_by_key(|p| p.timestamp);
|
||||
dest.domain_access_history = combined_domain_history;
|
||||
|
||||
// Merge unique IPs
|
||||
for ip in &src.unique_ips {
|
||||
if !dest.unique_ips.contains(ip) {
|
||||
@@ -398,6 +524,17 @@ pub fn clear_all_traffic_stats() -> Result<(), Box<dyn std::error::Error>> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Lightweight session snapshot for real-time updates (written frequently, separate from full stats)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct SessionSnapshot {
|
||||
proxy_id: String,
|
||||
profile_id: Option<String>,
|
||||
timestamp: u64,
|
||||
bytes_sent: u64,
|
||||
bytes_received: u64,
|
||||
requests: u64,
|
||||
}
|
||||
|
||||
/// Live bandwidth tracker for real-time stats collection in the proxy
|
||||
/// This is designed to be used from within the proxy server
|
||||
pub struct LiveTrafficTracker {
|
||||
@@ -410,6 +547,7 @@ pub struct LiveTrafficTracker {
|
||||
ips: RwLock<Vec<String>>,
|
||||
#[allow(dead_code)]
|
||||
session_start: u64,
|
||||
last_session_write: std::sync::atomic::AtomicU64,
|
||||
}
|
||||
|
||||
impl LiveTrafficTracker {
|
||||
@@ -423,9 +561,46 @@ impl LiveTrafficTracker {
|
||||
domain_stats: RwLock::new(HashMap::new()),
|
||||
ips: RwLock::new(Vec::new()),
|
||||
session_start: current_timestamp(),
|
||||
last_session_write: std::sync::atomic::AtomicU64::new(0),
|
||||
}
|
||||
}
|
||||
|
||||
/// Write a lightweight session snapshot for real-time updates
|
||||
/// This is much smaller than full stats and can be written frequently
|
||||
pub fn write_session_snapshot(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let now = current_timestamp();
|
||||
let last_write = self.last_session_write.load(Ordering::Relaxed);
|
||||
|
||||
// Only write if at least 1 second has passed (avoid excessive writes)
|
||||
if now.saturating_sub(last_write) < 1 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let snapshot = SessionSnapshot {
|
||||
proxy_id: self.proxy_id.clone(),
|
||||
profile_id: self.profile_id.clone(),
|
||||
timestamp: now,
|
||||
bytes_sent: self.bytes_sent.load(Ordering::Relaxed),
|
||||
bytes_received: self.bytes_received.load(Ordering::Relaxed),
|
||||
requests: self.requests.load(Ordering::Relaxed),
|
||||
};
|
||||
|
||||
let storage_key = self
|
||||
.profile_id
|
||||
.clone()
|
||||
.unwrap_or_else(|| self.proxy_id.clone());
|
||||
let session_file = get_traffic_stats_dir().join(format!("{}.session.json", storage_key));
|
||||
|
||||
// Write atomically using a temp file
|
||||
let temp_file = session_file.with_extension("tmp");
|
||||
let content = serde_json::to_string(&snapshot)?;
|
||||
fs::write(&temp_file, content)?;
|
||||
fs::rename(&temp_file, &session_file)?;
|
||||
|
||||
self.last_session_write.store(now, Ordering::Relaxed);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add_bytes_sent(&self, bytes: u64) {
|
||||
self.bytes_sent.fetch_add(bytes, Ordering::Relaxed);
|
||||
}
|
||||
@@ -475,10 +650,120 @@ impl LiveTrafficTracker {
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a real-time snapshot that merges in-memory data with disk-stored data
|
||||
/// This provides near real-time updates without waiting for disk flush
|
||||
pub fn to_realtime_snapshot(&self) -> TrafficSnapshot {
|
||||
let now = current_timestamp();
|
||||
let cutoff = now.saturating_sub(60); // Last 60 seconds for mini chart
|
||||
|
||||
// Get in-memory counters (not yet flushed to disk)
|
||||
let in_memory_sent = self.bytes_sent.load(Ordering::Relaxed);
|
||||
let in_memory_recv = self.bytes_received.load(Ordering::Relaxed);
|
||||
let in_memory_requests = self.requests.load(Ordering::Relaxed);
|
||||
|
||||
// Load disk-stored stats
|
||||
let storage_key = self
|
||||
.profile_id
|
||||
.clone()
|
||||
.unwrap_or_else(|| self.proxy_id.clone());
|
||||
let disk_stats = load_traffic_stats(&storage_key);
|
||||
|
||||
if let Some(stats) = disk_stats {
|
||||
// Merge in-memory data with disk data
|
||||
let total_sent = stats.total_bytes_sent + in_memory_sent;
|
||||
let total_recv = stats.total_bytes_received + in_memory_recv;
|
||||
let total_requests = stats.total_requests + in_memory_requests;
|
||||
|
||||
// Get current bandwidth from in-memory counters (most recent)
|
||||
// For the chart, we'll use disk data + current in-memory data point
|
||||
let mut recent_bandwidth = stats
|
||||
.bandwidth_history
|
||||
.iter()
|
||||
.filter(|dp| dp.timestamp >= cutoff)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Add current second's data if we have in-memory traffic
|
||||
if in_memory_sent > 0 || in_memory_recv > 0 {
|
||||
// Check if we already have a data point for this second
|
||||
if let Some(last) = recent_bandwidth.last_mut() {
|
||||
if last.timestamp == now {
|
||||
last.bytes_sent += in_memory_sent;
|
||||
last.bytes_received += in_memory_recv;
|
||||
} else {
|
||||
recent_bandwidth.push(BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
recent_bandwidth.push(BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
TrafficSnapshot {
|
||||
profile_id: self.profile_id.clone(),
|
||||
session_start: stats.session_start,
|
||||
last_update: now,
|
||||
total_bytes_sent: total_sent,
|
||||
total_bytes_received: total_recv,
|
||||
total_requests,
|
||||
current_bytes_sent: in_memory_sent,
|
||||
current_bytes_received: in_memory_recv,
|
||||
recent_bandwidth,
|
||||
}
|
||||
} else {
|
||||
// No disk data yet, use only in-memory data
|
||||
let recent_bandwidth = if in_memory_sent > 0 || in_memory_recv > 0 {
|
||||
vec![BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
}]
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
TrafficSnapshot {
|
||||
profile_id: self.profile_id.clone(),
|
||||
session_start: self.session_start,
|
||||
last_update: now,
|
||||
total_bytes_sent: in_memory_sent,
|
||||
total_bytes_received: in_memory_recv,
|
||||
total_requests: in_memory_requests,
|
||||
current_bytes_sent: in_memory_sent,
|
||||
current_bytes_received: in_memory_recv,
|
||||
recent_bandwidth,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Flush current stats to disk and return the delta
|
||||
pub fn flush_to_disk(&self) -> Result<(u64, u64), Box<dyn std::error::Error>> {
|
||||
let bytes_sent = self.bytes_sent.swap(0, Ordering::Relaxed);
|
||||
let bytes_received = self.bytes_received.swap(0, Ordering::Relaxed);
|
||||
/// Returns None if there's no new data to flush
|
||||
pub fn flush_to_disk(&self) -> Result<Option<(u64, u64)>, Box<dyn std::error::Error>> {
|
||||
let bytes_sent = self.bytes_sent.load(Ordering::Relaxed);
|
||||
let bytes_received = self.bytes_received.load(Ordering::Relaxed);
|
||||
|
||||
// Check if there's any new data to flush
|
||||
let has_domain_updates = {
|
||||
let domain_map = self.domain_stats.read().ok();
|
||||
domain_map.is_some_and(|dm| !dm.is_empty())
|
||||
};
|
||||
|
||||
let has_ip_updates = {
|
||||
let ips = self.ips.read().ok();
|
||||
ips.is_some_and(|i| !i.is_empty())
|
||||
};
|
||||
|
||||
// Only flush if there's meaningful new data (bytes or domain/IP updates)
|
||||
if bytes_sent == 0 && bytes_received == 0 && !has_domain_updates && !has_ip_updates {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Use profile_id as storage key if available, otherwise fall back to proxy_id
|
||||
let storage_key = self
|
||||
@@ -486,6 +771,19 @@ impl LiveTrafficTracker {
|
||||
.clone()
|
||||
.unwrap_or_else(|| self.proxy_id.clone());
|
||||
|
||||
// Use file locking to prevent concurrent writes from multiple proxy processes
|
||||
let lock_path = get_traffic_stats_dir().join(format!("{}.lock", storage_key));
|
||||
let _lock = match acquire_file_lock(&lock_path) {
|
||||
Ok(lock) => lock,
|
||||
Err(e) => {
|
||||
// If lock acquisition fails, reset counters to prevent indefinite accumulation
|
||||
// The data will be lost, but this prevents memory growth
|
||||
let _ = self.bytes_sent.swap(0, Ordering::Relaxed);
|
||||
let _ = self.bytes_received.swap(0, Ordering::Relaxed);
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
// Load or create stats using the storage key
|
||||
let mut stats = load_traffic_stats(&storage_key)
|
||||
.unwrap_or_else(|| TrafficStats::new(self.proxy_id.clone(), self.profile_id.clone()));
|
||||
@@ -498,8 +796,25 @@ impl LiveTrafficTracker {
|
||||
// Update the proxy_id to current session (for debugging/tracking)
|
||||
stats.proxy_id = self.proxy_id.clone();
|
||||
|
||||
// Prune old data before adding new data to keep file size manageable
|
||||
stats.prune_old_data();
|
||||
|
||||
// Update flush timestamp BEFORE reading/resetting counters
|
||||
// This prevents double-counting session snapshots written after this timestamp
|
||||
// If we set it after reading counters, a session snapshot written just before
|
||||
// the flush completes could have a timestamp newer than last_flush_timestamp,
|
||||
// causing its data to be added even though it was already included in the flush
|
||||
let now = current_timestamp();
|
||||
stats.last_flush_timestamp = now;
|
||||
stats.last_update = now;
|
||||
|
||||
// Reset counters after reading (lock is held, so flush will proceed)
|
||||
let sent = self.bytes_sent.swap(0, Ordering::Relaxed);
|
||||
let received = self.bytes_received.swap(0, Ordering::Relaxed);
|
||||
let _requests = self.requests.swap(0, Ordering::Relaxed);
|
||||
|
||||
// Update bandwidth history
|
||||
stats.record_bandwidth(bytes_sent, bytes_received);
|
||||
stats.record_bandwidth(sent, received);
|
||||
|
||||
// Update domain stats
|
||||
if let Ok(mut domain_map) = self.domain_stats.write() {
|
||||
@@ -510,17 +825,17 @@ impl LiveTrafficTracker {
|
||||
}
|
||||
}
|
||||
|
||||
// Update IPs
|
||||
if let Ok(ips) = self.ips.read() {
|
||||
for ip in ips.iter() {
|
||||
stats.record_ip(ip);
|
||||
// Update IPs and clear them after flushing (like domain_stats)
|
||||
if let Ok(mut ips) = self.ips.write() {
|
||||
for ip in ips.drain(..) {
|
||||
stats.record_ip(&ip);
|
||||
}
|
||||
}
|
||||
|
||||
// Save to disk
|
||||
// Save to disk (lock is still held)
|
||||
save_traffic_stats(&stats)?;
|
||||
|
||||
Ok((bytes_sent, bytes_received))
|
||||
Ok(Some((sent, received)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -557,7 +872,9 @@ pub struct FilteredTrafficStats {
|
||||
/// Period stats: bytes sent/received within the requested period
|
||||
pub period_bytes_sent: u64,
|
||||
pub period_bytes_received: u64,
|
||||
/// Domain access statistics (always full, as it's already aggregated)
|
||||
/// Period requests within the requested period
|
||||
pub period_requests: u64,
|
||||
/// Domain access statistics filtered to requested time period
|
||||
pub domains: HashMap<String, DomainAccess>,
|
||||
/// Unique IPs accessed
|
||||
pub unique_ips: Vec<String>,
|
||||
@@ -565,11 +882,36 @@ pub struct FilteredTrafficStats {
|
||||
|
||||
/// Get traffic stats for a profile, filtered to a specific time period
|
||||
/// seconds: number of seconds to include (0 = all time)
|
||||
/// Merges in-memory data with disk data for real-time updates
|
||||
pub fn get_traffic_stats_for_period(
|
||||
profile_id: &str,
|
||||
seconds: u64,
|
||||
) -> Option<FilteredTrafficStats> {
|
||||
let stats = load_traffic_stats(profile_id)?;
|
||||
// Get in-memory data if available
|
||||
let in_memory_sent = get_traffic_tracker()
|
||||
.and_then(|t| {
|
||||
if t.profile_id.as_deref() == Some(profile_id) {
|
||||
Some(t.bytes_sent.load(Ordering::Relaxed))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(0);
|
||||
let in_memory_recv = get_traffic_tracker()
|
||||
.and_then(|t| {
|
||||
if t.profile_id.as_deref() == Some(profile_id) {
|
||||
Some(t.bytes_received.load(Ordering::Relaxed))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(0);
|
||||
|
||||
let mut stats = load_traffic_stats(profile_id)?;
|
||||
|
||||
// Merge in-memory counters with disk data for real-time totals
|
||||
stats.total_bytes_sent += in_memory_sent;
|
||||
stats.total_bytes_received += in_memory_recv;
|
||||
|
||||
let now = current_timestamp();
|
||||
let cutoff = if seconds == 0 {
|
||||
@@ -579,38 +921,212 @@ pub fn get_traffic_stats_for_period(
|
||||
};
|
||||
|
||||
// Filter bandwidth history to requested period
|
||||
let filtered_history: Vec<BandwidthDataPoint> = stats
|
||||
let mut filtered_history: Vec<BandwidthDataPoint> = stats
|
||||
.bandwidth_history
|
||||
.iter()
|
||||
.filter(|dp| dp.timestamp >= cutoff)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
// Calculate period totals
|
||||
// Add current in-memory data point for real-time display
|
||||
if (seconds == 0 || now.saturating_sub(seconds) <= now)
|
||||
&& (in_memory_sent > 0 || in_memory_recv > 0)
|
||||
{
|
||||
// Check if we already have a data point for this second
|
||||
if let Some(last) = filtered_history.last_mut() {
|
||||
if last.timestamp == now {
|
||||
last.bytes_sent += in_memory_sent;
|
||||
last.bytes_received += in_memory_recv;
|
||||
} else {
|
||||
filtered_history.push(BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
filtered_history.push(BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate period totals for bandwidth (includes in-memory data)
|
||||
let period_bytes_sent: u64 = filtered_history.iter().map(|dp| dp.bytes_sent).sum();
|
||||
let period_bytes_received: u64 = filtered_history.iter().map(|dp| dp.bytes_received).sum();
|
||||
|
||||
// Filter and aggregate domain stats for the period
|
||||
let mut filtered_domains: HashMap<String, DomainAccess> = HashMap::new();
|
||||
let mut period_requests: u64 = 0;
|
||||
|
||||
for access in stats
|
||||
.domain_access_history
|
||||
.iter()
|
||||
.filter(|a| a.timestamp >= cutoff)
|
||||
{
|
||||
period_requests += 1;
|
||||
let entry = filtered_domains
|
||||
.entry(access.domain.clone())
|
||||
.or_insert(DomainAccess {
|
||||
domain: access.domain.clone(),
|
||||
request_count: 0,
|
||||
bytes_sent: 0,
|
||||
bytes_received: 0,
|
||||
first_access: access.timestamp,
|
||||
last_access: access.timestamp,
|
||||
});
|
||||
|
||||
entry.request_count += 1;
|
||||
entry.bytes_sent += access.bytes_sent;
|
||||
entry.bytes_received += access.bytes_received;
|
||||
entry.first_access = entry.first_access.min(access.timestamp);
|
||||
entry.last_access = entry.last_access.max(access.timestamp);
|
||||
}
|
||||
|
||||
// If no domain_access_history exists (old data), fall back to all-time domains
|
||||
let domains = if stats.domain_access_history.is_empty() {
|
||||
stats.domains
|
||||
} else {
|
||||
filtered_domains
|
||||
};
|
||||
|
||||
Some(FilteredTrafficStats {
|
||||
profile_id: stats.profile_id,
|
||||
session_start: stats.session_start,
|
||||
last_update: stats.last_update,
|
||||
last_update: now, // Use current time for real-time updates
|
||||
total_bytes_sent: stats.total_bytes_sent,
|
||||
total_bytes_received: stats.total_bytes_received,
|
||||
total_requests: stats.total_requests,
|
||||
bandwidth_history: filtered_history,
|
||||
period_bytes_sent,
|
||||
period_bytes_received,
|
||||
domains: stats.domains,
|
||||
period_requests,
|
||||
domains,
|
||||
unique_ips: stats.unique_ips,
|
||||
})
|
||||
}
|
||||
|
||||
/// Get lightweight traffic snapshot for a profile (for mini charts, only recent 60 seconds)
|
||||
/// Merges in-memory data with disk data for real-time updates
|
||||
pub fn get_traffic_snapshot_for_profile(profile_id: &str) -> Option<TrafficSnapshot> {
|
||||
// First try to get real-time data from active tracker
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
let tracker_profile_id = tracker.profile_id.as_deref();
|
||||
if tracker_profile_id == Some(profile_id) {
|
||||
return Some(tracker.to_realtime_snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to disk data
|
||||
let stats = load_traffic_stats(profile_id)?;
|
||||
Some(stats.to_snapshot())
|
||||
}
|
||||
|
||||
/// Load session snapshot from disk (written by proxy worker processes)
|
||||
fn load_session_snapshot(profile_id: &str) -> Option<SessionSnapshot> {
|
||||
let session_file = get_traffic_stats_dir().join(format!("{}.session.json", profile_id));
|
||||
if !session_file.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&session_file).ok()?;
|
||||
serde_json::from_str::<SessionSnapshot>(&content).ok()
|
||||
}
|
||||
|
||||
/// Get all traffic snapshots with real-time data merged
|
||||
/// This provides near real-time updates by merging session snapshots with disk data
|
||||
pub fn get_all_traffic_snapshots_realtime() -> Vec<TrafficSnapshot> {
|
||||
use std::collections::HashMap;
|
||||
|
||||
// Start with disk-stored stats
|
||||
let mut snapshots: HashMap<String, TrafficSnapshot> = list_traffic_stats()
|
||||
.into_iter()
|
||||
.map(|s| {
|
||||
let key = s.profile_id.clone().unwrap_or_else(|| s.proxy_id.clone());
|
||||
(key, s.to_snapshot())
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Try to merge in real-time data from active tracker (if in same process)
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
let key = tracker
|
||||
.profile_id
|
||||
.clone()
|
||||
.unwrap_or_else(|| tracker.proxy_id.clone());
|
||||
let realtime_snapshot = tracker.to_realtime_snapshot();
|
||||
snapshots.insert(key, realtime_snapshot);
|
||||
}
|
||||
|
||||
// Also merge session snapshots from proxy worker processes
|
||||
let storage_dir = get_traffic_stats_dir();
|
||||
if let Ok(entries) = fs::read_dir(&storage_dir) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
|
||||
if file_name.ends_with(".session.json") {
|
||||
if let Some(profile_id) = file_name.strip_suffix(".session.json") {
|
||||
if let Some(session) = load_session_snapshot(profile_id) {
|
||||
// Merge session data with disk snapshot
|
||||
if let Some(snapshot) = snapshots.get_mut(profile_id) {
|
||||
// Only merge session data if it's newer than the last flush
|
||||
// Session snapshots written before the last flush contain bytes already
|
||||
// included in disk totals, so merging them would cause double-counting
|
||||
let disk_stats = load_traffic_stats(profile_id);
|
||||
let last_flush = disk_stats
|
||||
.as_ref()
|
||||
.map(|s| s.last_flush_timestamp)
|
||||
.unwrap_or(0);
|
||||
|
||||
if session.timestamp > last_flush {
|
||||
// Session data contains in-memory counters not yet flushed to disk
|
||||
// Disk snapshot contains cumulative totals already flushed
|
||||
// We need to ADD them, not take the max, to get the true total
|
||||
snapshot.total_bytes_sent =
|
||||
snapshot.total_bytes_sent.saturating_add(session.bytes_sent);
|
||||
snapshot.total_bytes_received = snapshot
|
||||
.total_bytes_received
|
||||
.saturating_add(session.bytes_received);
|
||||
snapshot.total_requests =
|
||||
snapshot.total_requests.saturating_add(session.requests);
|
||||
snapshot.current_bytes_sent = session.bytes_sent;
|
||||
snapshot.current_bytes_received = session.bytes_received;
|
||||
snapshot.last_update = session.timestamp;
|
||||
} else {
|
||||
// Session snapshot is stale (written before last flush)
|
||||
// Use current values from disk snapshot, but update timestamp if session is newer
|
||||
if session.timestamp > snapshot.last_update {
|
||||
snapshot.last_update = session.timestamp;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Create new snapshot from session data
|
||||
snapshots.insert(
|
||||
profile_id.to_string(),
|
||||
TrafficSnapshot {
|
||||
profile_id: session.profile_id,
|
||||
session_start: current_timestamp().saturating_sub(60),
|
||||
last_update: session.timestamp,
|
||||
total_bytes_sent: session.bytes_sent,
|
||||
total_bytes_received: session.bytes_received,
|
||||
total_requests: session.requests,
|
||||
current_bytes_sent: session.bytes_sent,
|
||||
current_bytes_received: session.bytes_received,
|
||||
recent_bandwidth: vec![],
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
snapshots.into_values().collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -32,7 +32,7 @@ pub struct BackgroundUpdateResult {
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct BackgroundUpdateState {
|
||||
pub(crate) struct BackgroundUpdateState {
|
||||
last_update_time: u64,
|
||||
update_interval_hours: u64,
|
||||
}
|
||||
@@ -78,12 +78,12 @@ impl VersionUpdater {
|
||||
Ok(cache_dir)
|
||||
}
|
||||
|
||||
fn get_background_update_state_file() -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
pub(crate) fn get_background_update_state_file() -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
let cache_dir = Self::get_cache_dir()?;
|
||||
Ok(cache_dir.join("background_update_state.json"))
|
||||
}
|
||||
|
||||
fn load_background_update_state() -> BackgroundUpdateState {
|
||||
pub(crate) fn load_background_update_state() -> BackgroundUpdateState {
|
||||
let state_file = match Self::get_background_update_state_file() {
|
||||
Ok(file) => file,
|
||||
Err(_) => return BackgroundUpdateState::default(),
|
||||
@@ -101,7 +101,7 @@ impl VersionUpdater {
|
||||
serde_json::from_str(&content).unwrap_or_default()
|
||||
}
|
||||
|
||||
fn save_background_update_state(
|
||||
pub(crate) fn save_background_update_state(
|
||||
state: &BackgroundUpdateState,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let state_file = Self::get_background_update_state_file()?;
|
||||
@@ -516,50 +516,31 @@ pub async fn clear_all_version_cache_and_refetch(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serial_test::serial;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper function to create a unique test state file
|
||||
fn get_test_state_file(test_name: &str) -> PathBuf {
|
||||
let cache_dir = VersionUpdater::get_cache_dir().unwrap();
|
||||
cache_dir.join(format!("test_{test_name}_state.json"))
|
||||
fn setup_test_env() -> TempDir {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
env::set_var("HOME", temp_dir.path());
|
||||
temp_dir
|
||||
}
|
||||
|
||||
fn save_test_state(
|
||||
test_name: &str,
|
||||
state: &BackgroundUpdateState,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let state_file = get_test_state_file(test_name);
|
||||
let content = serde_json::to_string_pretty(state)?;
|
||||
fs::write(&state_file, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn load_test_state(test_name: &str) -> BackgroundUpdateState {
|
||||
let state_file = get_test_state_file(test_name);
|
||||
|
||||
if !state_file.exists() {
|
||||
return BackgroundUpdateState::default();
|
||||
}
|
||||
|
||||
let content = match fs::read_to_string(&state_file) {
|
||||
Ok(content) => content,
|
||||
Err(_) => return BackgroundUpdateState::default(),
|
||||
};
|
||||
|
||||
match serde_json::from_str(&content) {
|
||||
Ok(state) => state,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to parse test state file {:?}: {}", state_file, e);
|
||||
BackgroundUpdateState::default()
|
||||
}
|
||||
fn cleanup_state_file() {
|
||||
if let Ok(state_file) = VersionUpdater::get_background_update_state_file() {
|
||||
let _ = fs::remove_file(&state_file);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial]
|
||||
fn test_background_update_state_persistence() {
|
||||
let test_name = "persistence";
|
||||
let _temp_dir = setup_test_env();
|
||||
|
||||
// Clean up any existing test file first
|
||||
let _ = fs::remove_file(get_test_state_file(test_name));
|
||||
// Clean up any existing state file first
|
||||
if let Ok(state_file) = VersionUpdater::get_background_update_state_file() {
|
||||
let _ = fs::remove_file(&state_file);
|
||||
}
|
||||
|
||||
// Create a test state
|
||||
let test_state = BackgroundUpdateState {
|
||||
@@ -568,33 +549,55 @@ mod tests {
|
||||
};
|
||||
|
||||
// Save the state
|
||||
save_test_state(test_name, &test_state).unwrap();
|
||||
let save_result = VersionUpdater::save_background_update_state(&test_state);
|
||||
assert!(save_result.is_ok(), "Should save state successfully");
|
||||
|
||||
// Verify file was created
|
||||
let state_file = get_test_state_file(test_name);
|
||||
let state_file = VersionUpdater::get_background_update_state_file().unwrap();
|
||||
assert!(state_file.exists(), "State file should exist after saving");
|
||||
|
||||
// Load the state back
|
||||
let loaded_state = load_test_state(test_name);
|
||||
// Read the file directly to verify contents
|
||||
let file_content = fs::read_to_string(&state_file).expect("Should read state file");
|
||||
let file_state: BackgroundUpdateState =
|
||||
serde_json::from_str(&file_content).expect("Should parse state file");
|
||||
|
||||
// Verify the file contents match what we saved
|
||||
assert_eq!(
|
||||
file_state.last_update_time, test_state.last_update_time,
|
||||
"File last_update_time should match. Expected: {}, Got: {}",
|
||||
test_state.last_update_time, file_state.last_update_time
|
||||
);
|
||||
assert_eq!(
|
||||
file_state.update_interval_hours, test_state.update_interval_hours,
|
||||
"File update_interval_hours should match"
|
||||
);
|
||||
|
||||
// Load the state back using the method
|
||||
let loaded_state = VersionUpdater::load_background_update_state();
|
||||
|
||||
// Verify the values match
|
||||
assert_eq!(
|
||||
loaded_state.last_update_time, test_state.last_update_time,
|
||||
"last_update_time should match. Expected: {}, Got: {}",
|
||||
"Loaded last_update_time should match. Expected: {}, Got: {}",
|
||||
test_state.last_update_time, loaded_state.last_update_time
|
||||
);
|
||||
assert_eq!(
|
||||
loaded_state.update_interval_hours, test_state.update_interval_hours,
|
||||
"update_interval_hours should match"
|
||||
"Loaded update_interval_hours should match"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
let _ = fs::remove_file(get_test_state_file(test_name));
|
||||
cleanup_state_file();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial]
|
||||
fn test_should_run_background_update_logic() {
|
||||
// Create isolated test states to avoid interference
|
||||
let _temp_dir = setup_test_env();
|
||||
|
||||
// Clean up any existing state file first
|
||||
cleanup_state_file();
|
||||
|
||||
let current_time = VersionUpdater::get_current_timestamp();
|
||||
|
||||
// Test with recent update (should not update)
|
||||
@@ -643,6 +646,9 @@ mod tests {
|
||||
should_update_never,
|
||||
"Should update when never updated before"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
cleanup_state_file();
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://schema.tauri.app/config/2",
|
||||
"productName": "Donut",
|
||||
"version": "0.13.0",
|
||||
"version": "0.13.8",
|
||||
"identifier": "com.donutbrowser",
|
||||
"build": {
|
||||
"beforeDevCommand": "pnpm copy-proxy-binary && pnpm dev",
|
||||
|
||||
+2
-6
@@ -30,13 +30,11 @@ import { showErrorToast, showToast } from "@/lib/toast-utils";
|
||||
import type { BrowserProfile, CamoufoxConfig } from "@/types";
|
||||
|
||||
type BrowserTypeString =
|
||||
| "mullvad-browser"
|
||||
| "firefox"
|
||||
| "firefox-developer"
|
||||
| "chromium"
|
||||
| "brave"
|
||||
| "zen"
|
||||
| "tor-browser"
|
||||
| "camoufox";
|
||||
|
||||
interface PendingUrl {
|
||||
@@ -648,9 +646,7 @@ export default function Home() {
|
||||
if (profiles.length === 0) return;
|
||||
|
||||
const deprecatedProfiles = profiles.filter(
|
||||
(p) =>
|
||||
["tor-browser", "mullvad-browser"].includes(p.browser) ||
|
||||
(p.release_type === "nightly" && p.browser !== "firefox-developer"),
|
||||
(p) => p.release_type === "nightly" && p.browser !== "firefox-developer",
|
||||
);
|
||||
|
||||
if (deprecatedProfiles.length > 0) {
|
||||
@@ -661,7 +657,7 @@ export default function Home() {
|
||||
id: "deprecated-profiles-warning",
|
||||
type: "error",
|
||||
title: "Some profiles will be deprecated soon",
|
||||
description: `The following profiles will be deprecated soon: ${deprecatedNames}. Tor Browser, Mullvad Browser, and nightly profiles (except Firefox Developers Edition) will be removed in upcoming versions. Please check GitHub for migration instructions.`,
|
||||
description: `The following profiles will be deprecated soon: ${deprecatedNames}. Nightly profiles (except Firefox Developers Edition) will be removed in upcoming versions. Please check GitHub for migration instructions.`,
|
||||
duration: 15000,
|
||||
action: {
|
||||
label: "Learn more",
|
||||
|
||||
@@ -69,11 +69,11 @@ export function BandwidthMiniChart({
|
||||
type="button"
|
||||
onClick={onClick}
|
||||
className={cn(
|
||||
"relative flex items-center gap-1.5 px-2 rounded cursor-pointer hover:bg-accent/50 transition-colors min-w-[130px] border-none bg-transparent",
|
||||
"relative flex items-center gap-1.5 px-2 rounded cursor-pointer hover:bg-accent/50 transition-colors min-w-[120px] border-none bg-transparent",
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<div className="flex-1 h-3">
|
||||
<div className="flex-1 h-3 pointer-events-none">
|
||||
<ResponsiveContainer width="100%" height="100%">
|
||||
<AreaChart
|
||||
data={chartData}
|
||||
@@ -106,6 +106,8 @@ export function BandwidthMiniChart({
|
||||
strokeWidth={1}
|
||||
fill="url(#bandwidthGradient)"
|
||||
isAnimationActive={false}
|
||||
dot={false}
|
||||
activeDot={false}
|
||||
/>
|
||||
</AreaChart>
|
||||
</ResponsiveContainer>
|
||||
|
||||
@@ -42,13 +42,11 @@ const getCurrentOS = (): CamoufoxOS => {
|
||||
import { RippleButton } from "./ui/ripple";
|
||||
|
||||
type BrowserTypeString =
|
||||
| "mullvad-browser"
|
||||
| "firefox"
|
||||
| "firefox-developer"
|
||||
| "chromium"
|
||||
| "brave"
|
||||
| "zen"
|
||||
| "tor-browser"
|
||||
| "camoufox";
|
||||
|
||||
interface CreateProfileDialogProps {
|
||||
@@ -92,14 +90,6 @@ const browserOptions: BrowserOption[] = [
|
||||
value: "zen",
|
||||
label: "Zen Browser",
|
||||
},
|
||||
{
|
||||
value: "mullvad-browser",
|
||||
label: "Mullvad Browser",
|
||||
},
|
||||
{
|
||||
value: "tor-browser",
|
||||
label: "Tor Browser",
|
||||
},
|
||||
];
|
||||
|
||||
export function CreateProfileDialog({
|
||||
@@ -429,12 +419,9 @@ export function CreateProfileDialog({
|
||||
isBrowserVersionAvailable,
|
||||
]);
|
||||
|
||||
// Filter supported browsers for regular browsers (excluding mullvad and tor)
|
||||
const regularBrowsers = browserOptions.filter(
|
||||
(browser) =>
|
||||
supportedBrowsers.includes(browser.value) &&
|
||||
browser.value !== "mullvad-browser" &&
|
||||
browser.value !== "tor-browser",
|
||||
// Filter supported browsers for regular browsers
|
||||
const regularBrowsers = browserOptions.filter((browser) =>
|
||||
supportedBrowsers.includes(browser.value),
|
||||
);
|
||||
|
||||
return (
|
||||
|
||||
@@ -62,10 +62,7 @@ export function ImportProfileDialog({
|
||||
const { supportedBrowsers, isLoading: isLoadingSupport } =
|
||||
useBrowserSupport();
|
||||
|
||||
// Exclude browsers that are no longer supported for import
|
||||
const importableBrowsers = supportedBrowsers.filter(
|
||||
(b) => b !== "mullvad-browser" && b !== "tor-browser",
|
||||
);
|
||||
const importableBrowsers = supportedBrowsers;
|
||||
|
||||
const loadDetectedProfiles = useCallback(async () => {
|
||||
setIsLoading(true);
|
||||
|
||||
@@ -287,7 +287,6 @@ const MultipleSelector = React.forwardRef<
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [debouncedSearchTerm, groupBy, open, triggerSearchOnFocus, onSearch]);
|
||||
|
||||
// biome-ignore lint/correctness/noNestedComponentDefinitions: public code, TODO: fix
|
||||
const CreatableItem = () => {
|
||||
if (!creatable) return undefined;
|
||||
if (
|
||||
|
||||
@@ -331,7 +331,9 @@ const TagsCell = React.memo<{
|
||||
ref={containerRef as unknown as React.RefObject<HTMLButtonElement>}
|
||||
className={cn(
|
||||
"flex overflow-hidden gap-1 items-center px-2 py-1 h-6 w-full bg-transparent rounded border-none cursor-pointer",
|
||||
isDisabled ? "opacity-60" : "cursor-pointer hover:bg-accent/50",
|
||||
isDisabled
|
||||
? "opacity-60 cursor-not-allowed"
|
||||
: "cursor-pointer hover:bg-accent/50",
|
||||
)}
|
||||
onClick={() => {
|
||||
if (!isDisabled) setOpenTagsEditorFor(profile.id);
|
||||
@@ -354,7 +356,7 @@ const TagsCell = React.memo<{
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="w-48 h-6 cursor-pointer">
|
||||
<div className="w-40 h-6 cursor-pointer">
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>{ButtonContent}</TooltipTrigger>
|
||||
{hiddenCount > 0 && (
|
||||
@@ -380,13 +382,13 @@ const TagsCell = React.memo<{
|
||||
return (
|
||||
<div
|
||||
className={cn(
|
||||
"w-48 h-6 relative",
|
||||
"w-40 h-6 relative",
|
||||
isDisabled && "opacity-60 pointer-events-none",
|
||||
)}
|
||||
>
|
||||
<div
|
||||
ref={editorRef}
|
||||
className="absolute top-0 left-0 z-50 w-48 min-h-6 bg-popover rounded-md shadow-md"
|
||||
className="absolute top-0 left-0 z-50 w-40 min-h-6 bg-popover rounded-md shadow-md"
|
||||
>
|
||||
<MultipleSelector
|
||||
value={valueOptions}
|
||||
@@ -866,8 +868,12 @@ export function ProfilesDataTable({
|
||||
);
|
||||
|
||||
// Fetch traffic snapshots for running profiles (lightweight, real-time data)
|
||||
// Using runningProfiles.size as dependency to avoid Set reference comparison issues
|
||||
const runningCount = runningProfiles.size;
|
||||
// Convert Set to sorted array to avoid Set reference comparison issues in dependencies
|
||||
const runningProfileIds = React.useMemo(
|
||||
() => Array.from(runningProfiles).sort(),
|
||||
[runningProfiles],
|
||||
);
|
||||
const runningCount = runningProfileIds.length;
|
||||
React.useEffect(() => {
|
||||
if (!browserState.isClient) return;
|
||||
|
||||
@@ -884,9 +890,12 @@ export function ProfilesDataTable({
|
||||
const newSnapshots: Record<string, TrafficSnapshot> = {};
|
||||
for (const snapshot of allSnapshots) {
|
||||
if (snapshot.profile_id) {
|
||||
const existing = newSnapshots[snapshot.profile_id];
|
||||
if (!existing || snapshot.last_update > existing.last_update) {
|
||||
newSnapshots[snapshot.profile_id] = snapshot;
|
||||
// Only keep snapshots for profiles that are currently running
|
||||
if (runningProfileIds.includes(snapshot.profile_id)) {
|
||||
const existing = newSnapshots[snapshot.profile_id];
|
||||
if (!existing || snapshot.last_update > existing.last_update) {
|
||||
newSnapshots[snapshot.profile_id] = snapshot;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -899,7 +908,27 @@ export function ProfilesDataTable({
|
||||
void fetchTrafficSnapshots();
|
||||
const interval = setInterval(fetchTrafficSnapshots, 1000);
|
||||
return () => clearInterval(interval);
|
||||
}, [browserState.isClient, runningCount]);
|
||||
}, [browserState.isClient, runningCount, runningProfileIds]);
|
||||
|
||||
// Clean up snapshots for profiles that are no longer running
|
||||
React.useEffect(() => {
|
||||
if (!browserState.isClient) return;
|
||||
|
||||
setTrafficSnapshots((prev) => {
|
||||
const cleaned: Record<string, TrafficSnapshot> = {};
|
||||
for (const [profileId, snapshot] of Object.entries(prev)) {
|
||||
// Only keep snapshots for profiles that are currently running
|
||||
if (runningProfileIds.includes(profileId)) {
|
||||
cleaned[profileId] = snapshot;
|
||||
}
|
||||
}
|
||||
// Only update if something was removed
|
||||
if (Object.keys(cleaned).length !== Object.keys(prev).length) {
|
||||
return cleaned;
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
}, [browserState.isClient, runningProfileIds]);
|
||||
|
||||
// Clear launching/stopping spinners when backend reports running status changes
|
||||
React.useEffect(() => {
|
||||
@@ -1451,8 +1480,9 @@ export function ProfilesDataTable({
|
||||
size="sm"
|
||||
disabled={!canLaunch || isLaunching || isStopping}
|
||||
className={cn(
|
||||
"cursor-pointer min-w-[70px] h-7",
|
||||
!canLaunch && "opacity-50",
|
||||
"min-w-[70px] h-7",
|
||||
!canLaunch && "opacity-50 cursor-not-allowed",
|
||||
canLaunch && "cursor-pointer",
|
||||
)}
|
||||
onClick={() =>
|
||||
isRunning
|
||||
@@ -1677,25 +1707,19 @@ export function ProfilesDataTable({
|
||||
? (meta.storedProxies.find((p) => p.id === effectiveProxyId) ??
|
||||
null)
|
||||
: null;
|
||||
const displayName =
|
||||
profile.browser === "tor-browser"
|
||||
? "Not supported"
|
||||
: effectiveProxy
|
||||
? effectiveProxy.name
|
||||
: "Not Selected";
|
||||
const displayName = effectiveProxy
|
||||
? effectiveProxy.name
|
||||
: "Not Selected";
|
||||
const profileHasProxy = Boolean(effectiveProxy);
|
||||
const tooltipText =
|
||||
profile.browser === "tor-browser"
|
||||
? "Proxies are not supported for TOR browser"
|
||||
: profileHasProxy && effectiveProxy
|
||||
? effectiveProxy.name
|
||||
: null;
|
||||
profileHasProxy && effectiveProxy ? effectiveProxy.name : null;
|
||||
const isSelectorOpen = meta.openProxySelectorFor === profile.id;
|
||||
|
||||
// When profile is running, show bandwidth chart instead of proxy selector
|
||||
if (isRunning && meta.trafficSnapshots) {
|
||||
// Find the traffic snapshot for this profile by matching profile_id
|
||||
const snapshot = meta.trafficSnapshots[profile.id];
|
||||
// Only use recent_bandwidth (last 60 seconds) - minimal data needed for mini chart
|
||||
// Create a new array reference to ensure React detects changes
|
||||
const bandwidthData = snapshot?.recent_bandwidth
|
||||
? [...snapshot.recent_bandwidth]
|
||||
@@ -1714,23 +1738,6 @@ export function ProfilesDataTable({
|
||||
);
|
||||
}
|
||||
|
||||
if (profile.browser === "tor-browser") {
|
||||
return (
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<span className="flex gap-2 items-center">
|
||||
<span className="text-sm text-muted-foreground">
|
||||
Not supported
|
||||
</span>
|
||||
</span>
|
||||
</TooltipTrigger>
|
||||
{(tooltipText || displayName.length > 10) && (
|
||||
<TooltipContent>{tooltipText || displayName}</TooltipContent>
|
||||
)}
|
||||
</Tooltip>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex gap-2 items-center">
|
||||
<Popover
|
||||
|
||||
@@ -142,11 +142,7 @@ export function ProfileSelectorDialog({
|
||||
const runningAvailableProfile = profiles.find((profile) => {
|
||||
const isRunning = runningProfiles.has(profile.id);
|
||||
// Simple check without browserState dependency
|
||||
return (
|
||||
isRunning &&
|
||||
profile.browser !== "tor-browser" &&
|
||||
profile.browser !== "mullvad-browser"
|
||||
);
|
||||
return isRunning;
|
||||
});
|
||||
|
||||
if (runningAvailableProfile) {
|
||||
|
||||
@@ -49,10 +49,9 @@ export function ProxyAssignmentDialog({
|
||||
setIsAssigning(true);
|
||||
setError(null);
|
||||
try {
|
||||
// Filter out TOR browser profiles as they don't support proxies
|
||||
const validProfiles = selectedProfiles.filter((profileId) => {
|
||||
const profile = profiles.find((p) => p.id === profileId);
|
||||
return profile && profile.browser !== "tor-browser";
|
||||
return profile;
|
||||
});
|
||||
|
||||
if (validProfiles.length === 0) {
|
||||
@@ -119,15 +118,9 @@ export function ProxyAssignmentDialog({
|
||||
(p: BrowserProfile) => p.id === profileId,
|
||||
);
|
||||
const displayName = profile ? profile.name : profileId;
|
||||
const isTorBrowser = profile?.browser === "tor-browser";
|
||||
return (
|
||||
<li key={profileId} className="truncate">
|
||||
• {displayName}
|
||||
{isTorBrowser && (
|
||||
<span className="ml-2 text-xs text-muted-foreground">
|
||||
(TOR - no proxy support)
|
||||
</span>
|
||||
)}
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import { invoke } from "@tauri-apps/api/core";
|
||||
import * as React from "react";
|
||||
import type { TooltipProps } from "recharts";
|
||||
import {
|
||||
Area,
|
||||
AreaChart,
|
||||
@@ -12,10 +11,7 @@ import {
|
||||
XAxis,
|
||||
YAxis,
|
||||
} from "recharts";
|
||||
import type {
|
||||
NameType,
|
||||
ValueType,
|
||||
} from "recharts/types/component/DefaultTooltipContent";
|
||||
import type { TooltipContentProps } from "recharts/types/component/Tooltip";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
@@ -30,6 +26,11 @@ import {
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import {
|
||||
TooltipContent,
|
||||
TooltipTrigger,
|
||||
Tooltip as UITooltip,
|
||||
} from "@/components/ui/tooltip";
|
||||
import type { FilteredTrafficStats } from "@/types";
|
||||
|
||||
type TimePeriod =
|
||||
@@ -94,6 +95,53 @@ function getSecondsForPeriod(period: TimePeriod): number {
|
||||
}
|
||||
}
|
||||
|
||||
const TruncatedDomain = React.memo<{ domain: string }>(({ domain }) => {
|
||||
const ref = React.useRef<HTMLSpanElement>(null);
|
||||
const [isTruncated, setIsTruncated] = React.useState(false);
|
||||
|
||||
const checkTruncation = React.useCallback(() => {
|
||||
if (ref.current) {
|
||||
setIsTruncated(ref.current.scrollWidth > ref.current.clientWidth);
|
||||
}
|
||||
}, []);
|
||||
|
||||
React.useLayoutEffect(() => {
|
||||
checkTruncation();
|
||||
});
|
||||
|
||||
React.useEffect(() => {
|
||||
const resizeObserver = new ResizeObserver(checkTruncation);
|
||||
if (ref.current) {
|
||||
resizeObserver.observe(ref.current);
|
||||
}
|
||||
|
||||
return () => {
|
||||
resizeObserver.disconnect();
|
||||
};
|
||||
}, [checkTruncation]);
|
||||
|
||||
const content = (
|
||||
<span ref={ref} className="truncate max-w-[200px] block">
|
||||
{domain}
|
||||
</span>
|
||||
);
|
||||
|
||||
if (!isTruncated) {
|
||||
return content;
|
||||
}
|
||||
|
||||
return (
|
||||
<UITooltip>
|
||||
<TooltipTrigger asChild>{content}</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
<p>{domain}</p>
|
||||
</TooltipContent>
|
||||
</UITooltip>
|
||||
);
|
||||
});
|
||||
|
||||
TruncatedDomain.displayName = "TruncatedDomain";
|
||||
|
||||
export function TrafficDetailsDialog({
|
||||
isOpen,
|
||||
onClose,
|
||||
@@ -123,7 +171,11 @@ export function TrafficDetailsDialog({
|
||||
void fetchStats();
|
||||
const interval = setInterval(fetchStats, 2000);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
return () => {
|
||||
clearInterval(interval);
|
||||
// Clear stats from memory when dialog closes to free up memory
|
||||
setStats(null);
|
||||
};
|
||||
}, [isOpen, profileId, timePeriod]);
|
||||
|
||||
// Transform data for chart (already filtered by backend)
|
||||
@@ -140,7 +192,7 @@ export function TrafficDetailsDialog({
|
||||
|
||||
// Tooltip render function
|
||||
const renderTooltip = React.useCallback(
|
||||
(props: TooltipProps<ValueType, NameType>) => {
|
||||
(props: TooltipContentProps<number, string>) => {
|
||||
const { active, payload, label } = props;
|
||||
if (!active || !payload?.length) return null;
|
||||
|
||||
@@ -356,9 +408,11 @@ export function TrafficDetailsDialog({
|
||||
</p>
|
||||
</div>
|
||||
<div className="bg-muted/50 rounded-lg p-3">
|
||||
<p className="text-xs text-muted-foreground">Total Requests</p>
|
||||
<p className="text-xs text-muted-foreground">
|
||||
Requests ({timePeriod === "all" ? "total" : timePeriod})
|
||||
</p>
|
||||
<p className="text-lg font-semibold">
|
||||
{(stats?.total_requests || 0).toLocaleString()}
|
||||
{(stats?.period_requests || 0).toLocaleString()}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -366,14 +420,14 @@ export function TrafficDetailsDialog({
|
||||
{/* Total Stats (smaller, under period stats) */}
|
||||
<div className="flex items-center gap-6 text-sm text-muted-foreground border-t pt-4">
|
||||
<div>
|
||||
<span className="font-medium">Total:</span>{" "}
|
||||
<span className="font-medium">All-time traffic:</span>{" "}
|
||||
{formatBytes(
|
||||
(stats?.total_bytes_sent || 0) +
|
||||
(stats?.total_bytes_received || 0),
|
||||
)}
|
||||
</div>
|
||||
<div>
|
||||
<span className="font-medium">Requests:</span>{" "}
|
||||
<span className="font-medium">All-time requests:</span>{" "}
|
||||
{stats?.total_requests?.toLocaleString() || 0}
|
||||
</div>
|
||||
</div>
|
||||
@@ -389,7 +443,8 @@ export function TrafficDetailsDialog({
|
||||
{topDomainsByTraffic.length > 0 && (
|
||||
<div>
|
||||
<h3 className="text-sm font-medium mb-2">
|
||||
Top Domains by Traffic
|
||||
Top Domains by Traffic (
|
||||
{timePeriod === "all" ? "all time" : timePeriod})
|
||||
</h3>
|
||||
<div className="border rounded-md">
|
||||
<div className="grid grid-cols-[1fr_80px_80px_80px] gap-2 px-3 py-2 text-xs font-medium text-muted-foreground border-b bg-muted/30">
|
||||
@@ -408,9 +463,7 @@ export function TrafficDetailsDialog({
|
||||
<span className="text-xs text-muted-foreground w-4 shrink-0">
|
||||
{index + 1}
|
||||
</span>
|
||||
<span className="truncate" title={domain.domain}>
|
||||
{domain.domain}
|
||||
</span>
|
||||
<TruncatedDomain domain={domain.domain} />
|
||||
</div>
|
||||
<span className="text-right text-muted-foreground">
|
||||
{domain.request_count.toLocaleString()}
|
||||
@@ -432,7 +485,8 @@ export function TrafficDetailsDialog({
|
||||
{topDomainsByRequests.length > 0 && (
|
||||
<div>
|
||||
<h3 className="text-sm font-medium mb-2">
|
||||
Top Domains by Requests
|
||||
Top Domains by Requests (
|
||||
{timePeriod === "all" ? "all time" : timePeriod})
|
||||
</h3>
|
||||
<div className="border rounded-md">
|
||||
<div className="grid grid-cols-[1fr_80px_100px] gap-2 px-3 py-2 text-xs font-medium text-muted-foreground border-b bg-muted/30">
|
||||
@@ -450,9 +504,7 @@ export function TrafficDetailsDialog({
|
||||
<span className="text-xs text-muted-foreground w-4 shrink-0">
|
||||
{index + 1}
|
||||
</span>
|
||||
<span className="truncate" title={domain.domain}>
|
||||
{domain.domain}
|
||||
</span>
|
||||
<TruncatedDomain domain={domain.domain} />
|
||||
</div>
|
||||
<span className="text-right text-muted-foreground">
|
||||
{domain.request_count.toLocaleString()}
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
type HTMLMotionProps,
|
||||
type LegacyAnimationControls,
|
||||
motion,
|
||||
type TargetAndTransition,
|
||||
type Transition,
|
||||
} from "motion/react";
|
||||
import type * as React from "react";
|
||||
|
||||
import { useAutoHeight } from "@/hooks/use-auto-height";
|
||||
import { Slot, type WithAsChild } from "@/lib/slot";
|
||||
|
||||
type AutoHeightProps = WithAsChild<
|
||||
{
|
||||
children: React.ReactNode;
|
||||
deps?: React.DependencyList;
|
||||
animate?: TargetAndTransition | LegacyAnimationControls;
|
||||
transition?: Transition;
|
||||
} & Omit<HTMLMotionProps<"div">, "animate">
|
||||
>;
|
||||
|
||||
function AutoHeight({
|
||||
children,
|
||||
deps = [],
|
||||
transition = {
|
||||
type: "spring",
|
||||
stiffness: 300,
|
||||
damping: 30,
|
||||
bounce: 0,
|
||||
restDelta: 0.01,
|
||||
},
|
||||
style,
|
||||
animate,
|
||||
asChild = false,
|
||||
...props
|
||||
}: AutoHeightProps) {
|
||||
const { ref, height } = useAutoHeight<HTMLDivElement>(deps);
|
||||
|
||||
const Comp = asChild ? Slot : motion.div;
|
||||
|
||||
return (
|
||||
<Comp
|
||||
style={{ overflow: "hidden", ...style }}
|
||||
animate={{ height, ...animate }}
|
||||
transition={transition}
|
||||
{...props}
|
||||
>
|
||||
<div ref={ref}>{children}</div>
|
||||
</Comp>
|
||||
);
|
||||
}
|
||||
|
||||
export { AutoHeight, type AutoHeightProps };
|
||||
@@ -2,6 +2,12 @@
|
||||
|
||||
import * as React from "react";
|
||||
import * as RechartsPrimitive from "recharts";
|
||||
import type {
|
||||
Props as DefaultLegendContentProps,
|
||||
LegendPayload,
|
||||
} from "recharts/types/component/DefaultLegendContent";
|
||||
import type { Payload } from "recharts/types/component/DefaultTooltipContent";
|
||||
import type { TooltipContentProps } from "recharts/types/component/Tooltip";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
@@ -105,13 +111,15 @@ const ChartTooltip = RechartsPrimitive.Tooltip;
|
||||
|
||||
const ChartTooltipContent = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<typeof RechartsPrimitive.Tooltip> &
|
||||
TooltipContentProps<number, string> &
|
||||
React.ComponentProps<"div"> & {
|
||||
hideLabel?: boolean;
|
||||
hideIndicator?: boolean;
|
||||
indicator?: "line" | "dot" | "dashed";
|
||||
nameKey?: string;
|
||||
labelKey?: string;
|
||||
labelClassName?: string;
|
||||
color?: string;
|
||||
}
|
||||
>(
|
||||
(
|
||||
@@ -187,15 +195,15 @@ const ChartTooltipContent = React.forwardRef<
|
||||
{!nestLabel ? tooltipLabel : null}
|
||||
<div className="grid gap-1.5">
|
||||
{payload
|
||||
.filter((item) => item.type !== "none")
|
||||
.map((item, index) => {
|
||||
.filter((item: Payload<number, string>) => item.type !== "none")
|
||||
.map((item: Payload<number, string>, index: number) => {
|
||||
const key = `${nameKey || item.name || item.dataKey || "value"}`;
|
||||
const itemConfig = getPayloadConfigFromPayload(config, item, key);
|
||||
const indicatorColor = color || item.payload.fill || item.color;
|
||||
const indicatorColor = color || item.payload?.fill || item.color;
|
||||
|
||||
return (
|
||||
<div
|
||||
key={item.dataKey}
|
||||
key={String(item.dataKey ?? index)}
|
||||
className={cn(
|
||||
"flex w-full flex-wrap items-stretch gap-2 [&>svg]:h-2.5 [&>svg]:w-2.5 [&>svg]:text-muted-foreground",
|
||||
indicator === "dot" && "items-center",
|
||||
@@ -264,7 +272,7 @@ const ChartLegend = RechartsPrimitive.Legend;
|
||||
const ChartLegendContent = React.forwardRef<
|
||||
HTMLDivElement,
|
||||
React.ComponentProps<"div"> &
|
||||
Pick<RechartsPrimitive.LegendProps, "payload" | "verticalAlign"> & {
|
||||
Pick<DefaultLegendContentProps, "payload" | "verticalAlign"> & {
|
||||
hideIcon?: boolean;
|
||||
nameKey?: string;
|
||||
}
|
||||
@@ -289,8 +297,8 @@ const ChartLegendContent = React.forwardRef<
|
||||
)}
|
||||
>
|
||||
{payload
|
||||
.filter((item) => item.type !== "none")
|
||||
.map((item) => {
|
||||
.filter((item: LegendPayload) => item.type !== "none")
|
||||
.map((item: LegendPayload) => {
|
||||
const key = `${nameKey || item.dataKey || "value"}`;
|
||||
const itemConfig = getPayloadConfigFromPayload(config, item, key);
|
||||
|
||||
|
||||
+138
-39
@@ -1,86 +1,185 @@
|
||||
"use client";
|
||||
|
||||
import * as DialogPrimitive from "@radix-ui/react-dialog";
|
||||
import { AnimatePresence, type HTMLMotionProps, motion } from "motion/react";
|
||||
import { Dialog as DialogPrimitive } from "radix-ui";
|
||||
import type * as React from "react";
|
||||
import { RxCross2 } from "react-icons/rx";
|
||||
|
||||
import { useControlledState } from "@/hooks/use-controlled-state";
|
||||
import { getStrictContext } from "@/lib/get-strict-context";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { WindowDragArea } from "../window-drag-area";
|
||||
|
||||
function Dialog({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Root>) {
|
||||
return <DialogPrimitive.Root data-slot="dialog" {...props} />;
|
||||
type DialogContextType = {
|
||||
isOpen: boolean;
|
||||
setIsOpen: DialogProps["onOpenChange"];
|
||||
};
|
||||
|
||||
const [DialogProvider, useDialog] =
|
||||
getStrictContext<DialogContextType>("DialogContext");
|
||||
|
||||
type DialogProps = React.ComponentProps<typeof DialogPrimitive.Root>;
|
||||
|
||||
function Dialog(props: DialogProps) {
|
||||
const [isOpen, setIsOpen] = useControlledState({
|
||||
value: props?.open,
|
||||
defaultValue: props?.defaultOpen,
|
||||
onChange: props?.onOpenChange,
|
||||
});
|
||||
|
||||
return (
|
||||
<DialogProvider value={{ isOpen, setIsOpen }}>
|
||||
<DialogPrimitive.Root
|
||||
data-slot="dialog"
|
||||
{...props}
|
||||
onOpenChange={setIsOpen}
|
||||
/>
|
||||
</DialogProvider>
|
||||
);
|
||||
}
|
||||
|
||||
function DialogTrigger({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Trigger>) {
|
||||
type DialogTriggerProps = React.ComponentProps<typeof DialogPrimitive.Trigger>;
|
||||
|
||||
function DialogTrigger(props: DialogTriggerProps) {
|
||||
return <DialogPrimitive.Trigger data-slot="dialog-trigger" {...props} />;
|
||||
}
|
||||
|
||||
function DialogPortal({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Portal>) {
|
||||
return <DialogPrimitive.Portal data-slot="dialog-portal" {...props} />;
|
||||
type DialogPortalProps = Omit<
|
||||
React.ComponentProps<typeof DialogPrimitive.Portal>,
|
||||
"forceMount"
|
||||
>;
|
||||
|
||||
function DialogPortal(props: DialogPortalProps) {
|
||||
const { isOpen } = useDialog();
|
||||
|
||||
return (
|
||||
<AnimatePresence>
|
||||
{isOpen && (
|
||||
<DialogPrimitive.Portal
|
||||
data-slot="dialog-portal"
|
||||
forceMount
|
||||
{...props}
|
||||
/>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
);
|
||||
}
|
||||
|
||||
function DialogClose({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Close>) {
|
||||
return <DialogPrimitive.Close data-slot="dialog-close" {...props} />;
|
||||
}
|
||||
type DialogOverlayProps = Omit<
|
||||
React.ComponentProps<typeof DialogPrimitive.Overlay>,
|
||||
"forceMount" | "asChild"
|
||||
> &
|
||||
HTMLMotionProps<"div">;
|
||||
|
||||
function DialogOverlay({
|
||||
className,
|
||||
transition = { duration: 0.2, ease: "easeInOut" },
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Overlay>) {
|
||||
}: DialogOverlayProps) {
|
||||
return (
|
||||
<DialogPrimitive.Overlay
|
||||
data-slot="dialog-overlay"
|
||||
className={cn(
|
||||
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-[9999] bg-background/50",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<WindowDragArea />
|
||||
<DialogPrimitive.Overlay data-slot="dialog-overlay" asChild forceMount>
|
||||
<motion.div
|
||||
key="dialog-overlay"
|
||||
initial={{ opacity: 0, filter: "blur(4px)" }}
|
||||
animate={{ opacity: 1, filter: "blur(0px)" }}
|
||||
exit={{ opacity: 0, filter: "blur(4px)" }}
|
||||
transition={transition}
|
||||
className={cn("fixed inset-0 z-9999 bg-background/50", className)}
|
||||
{...props}
|
||||
>
|
||||
<WindowDragArea />
|
||||
</motion.div>
|
||||
</DialogPrimitive.Overlay>
|
||||
);
|
||||
}
|
||||
|
||||
type DialogFlipDirection = "top" | "bottom" | "left" | "right";
|
||||
|
||||
type DialogContentProps = Omit<
|
||||
React.ComponentProps<typeof DialogPrimitive.Content>,
|
||||
"forceMount" | "asChild"
|
||||
> &
|
||||
HTMLMotionProps<"div"> & {
|
||||
from?: DialogFlipDirection;
|
||||
};
|
||||
|
||||
function DialogContent({
|
||||
className,
|
||||
children,
|
||||
from = "top",
|
||||
onOpenAutoFocus,
|
||||
onCloseAutoFocus,
|
||||
onEscapeKeyDown,
|
||||
onPointerDownOutside,
|
||||
onInteractOutside,
|
||||
transition = { type: "spring", stiffness: 150, damping: 25 },
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Content>) {
|
||||
}: DialogContentProps) {
|
||||
const initialRotation =
|
||||
from === "bottom" || from === "left" ? "20deg" : "-20deg";
|
||||
const isVertical = from === "top" || from === "bottom";
|
||||
const rotateAxis = isVertical ? "rotateX" : "rotateY";
|
||||
|
||||
return (
|
||||
<DialogPortal data-slot="dialog-portal">
|
||||
<DialogOverlay />
|
||||
<DialogPrimitive.Content
|
||||
data-slot="dialog-content"
|
||||
className={cn(
|
||||
"bg-background data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 fixed top-[50%] left-[50%] z-[10000] grid w-full max-w-[calc(100%-2rem)] translate-x-[-50%] translate-y-[-50%] gap-4 rounded-lg border p-6 shadow-lg duration-200 sm:max-w-lg",
|
||||
className,
|
||||
)}
|
||||
asChild
|
||||
forceMount
|
||||
onOpenAutoFocus={onOpenAutoFocus}
|
||||
onCloseAutoFocus={onCloseAutoFocus}
|
||||
onEscapeKeyDown={onEscapeKeyDown}
|
||||
onPointerDownOutside={onPointerDownOutside}
|
||||
onInteractOutside={(event) => {
|
||||
const target = event.target as HTMLElement | null;
|
||||
if (target?.closest('[data-window-drag-area="true"]')) {
|
||||
event.preventDefault();
|
||||
}
|
||||
onInteractOutside?.(event);
|
||||
}}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
<DialogPrimitive.Close className="cursor-pointer ring-offset-background focus:ring-ring data-[state=open]:bg-accent data-[state=open]:text-muted-foreground absolute top-4 right-4 rounded-xs opacity-70 transition-opacity hover:opacity-100 focus:ring-2 focus:ring-offset-2 focus:outline-hidden disabled:pointer-events-none [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4">
|
||||
<RxCross2 />
|
||||
<span className="sr-only">Close</span>
|
||||
</DialogPrimitive.Close>
|
||||
<motion.div
|
||||
key="dialog-content"
|
||||
data-slot="dialog-content"
|
||||
initial={{
|
||||
opacity: 0,
|
||||
filter: "blur(4px)",
|
||||
transform: `perspective(500px) ${rotateAxis}(${initialRotation}) scale(0.8)`,
|
||||
}}
|
||||
animate={{
|
||||
opacity: 1,
|
||||
filter: "blur(0px)",
|
||||
transform: `perspective(500px) ${rotateAxis}(0deg) scale(1)`,
|
||||
}}
|
||||
exit={{
|
||||
opacity: 0,
|
||||
filter: "blur(4px)",
|
||||
transform: `perspective(500px) ${rotateAxis}(${initialRotation}) scale(0.8)`,
|
||||
}}
|
||||
transition={transition}
|
||||
className={cn(
|
||||
"bg-background fixed top-[50%] left-[50%] z-10000 grid w-full max-w-[calc(100%-2rem)] translate-x-[-50%] translate-y-[-50%] gap-4 rounded-lg border p-6 shadow-lg sm:max-w-lg",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
<DialogPrimitive.Close className="cursor-pointer ring-offset-background focus:ring-ring data-[state=open]:bg-accent data-[state=open]:text-muted-foreground absolute top-4 right-4 rounded-xs opacity-70 transition-opacity hover:opacity-100 focus:ring-2 focus:ring-offset-2 focus:outline-hidden disabled:pointer-events-none [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4">
|
||||
<RxCross2 />
|
||||
<span className="sr-only">Close</span>
|
||||
</DialogPrimitive.Close>
|
||||
</motion.div>
|
||||
</DialogPrimitive.Content>
|
||||
</DialogPortal>
|
||||
);
|
||||
}
|
||||
|
||||
type DialogCloseProps = React.ComponentProps<typeof DialogPrimitive.Close>;
|
||||
|
||||
function DialogClose(props: DialogCloseProps) {
|
||||
return <DialogPrimitive.Close data-slot="dialog-close" {...props} />;
|
||||
}
|
||||
|
||||
function DialogHeader({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
|
||||
@@ -0,0 +1,640 @@
|
||||
"use client";
|
||||
|
||||
import { AnimatePresence, motion, type Transition } from "motion/react";
|
||||
import * as React from "react";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
type HighlightMode = "children" | "parent";
|
||||
|
||||
type Bounds = {
|
||||
top: number;
|
||||
left: number;
|
||||
width: number;
|
||||
height: number;
|
||||
};
|
||||
|
||||
const DEFAULT_BOUNDS_OFFSET: Bounds = {
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: 0,
|
||||
height: 0,
|
||||
};
|
||||
|
||||
type HighlightContextType<T extends string> = {
|
||||
as?: keyof HTMLElementTagNameMap;
|
||||
mode: HighlightMode;
|
||||
activeValue: T | null;
|
||||
setActiveValue: (value: T | null) => void;
|
||||
setBounds: (bounds: DOMRect) => void;
|
||||
clearBounds: () => void;
|
||||
id: string;
|
||||
hover: boolean;
|
||||
click: boolean;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
activeClassName?: string;
|
||||
setActiveClassName: (className: string) => void;
|
||||
transition?: Transition;
|
||||
disabled?: boolean;
|
||||
enabled?: boolean;
|
||||
exitDelay?: number;
|
||||
forceUpdateBounds?: boolean;
|
||||
};
|
||||
|
||||
const HighlightContext = React.createContext<
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
HighlightContextType<any> | undefined
|
||||
>(undefined);
|
||||
|
||||
function useHighlight<T extends string>(): HighlightContextType<T> {
|
||||
const context = React.useContext(HighlightContext);
|
||||
if (!context) {
|
||||
throw new Error("useHighlight must be used within a HighlightProvider");
|
||||
}
|
||||
return context as unknown as HighlightContextType<T>;
|
||||
}
|
||||
|
||||
type BaseHighlightProps<T extends React.ElementType = "div"> = {
|
||||
as?: T;
|
||||
ref?: React.Ref<HTMLDivElement>;
|
||||
mode?: HighlightMode;
|
||||
value?: string | null;
|
||||
defaultValue?: string | null;
|
||||
onValueChange?: (value: string | null) => void;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
transition?: Transition;
|
||||
hover?: boolean;
|
||||
click?: boolean;
|
||||
disabled?: boolean;
|
||||
enabled?: boolean;
|
||||
exitDelay?: number;
|
||||
};
|
||||
|
||||
type ParentModeHighlightProps = {
|
||||
boundsOffset?: Partial<Bounds>;
|
||||
containerClassName?: string;
|
||||
forceUpdateBounds?: boolean;
|
||||
};
|
||||
|
||||
type ControlledParentModeHighlightProps<T extends React.ElementType = "div"> =
|
||||
BaseHighlightProps<T> &
|
||||
ParentModeHighlightProps & {
|
||||
mode: "parent";
|
||||
controlledItems: true;
|
||||
children: React.ReactNode;
|
||||
};
|
||||
|
||||
type ControlledChildrenModeHighlightProps<T extends React.ElementType = "div"> =
|
||||
BaseHighlightProps<T> & {
|
||||
mode?: "children" | undefined;
|
||||
controlledItems: true;
|
||||
children: React.ReactNode;
|
||||
};
|
||||
|
||||
type UncontrolledParentModeHighlightProps<T extends React.ElementType = "div"> =
|
||||
BaseHighlightProps<T> &
|
||||
ParentModeHighlightProps & {
|
||||
mode: "parent";
|
||||
controlledItems?: false;
|
||||
itemsClassName?: string;
|
||||
children: React.ReactElement | React.ReactElement[];
|
||||
};
|
||||
|
||||
type UncontrolledChildrenModeHighlightProps<
|
||||
T extends React.ElementType = "div",
|
||||
> = BaseHighlightProps<T> & {
|
||||
mode?: "children";
|
||||
controlledItems?: false;
|
||||
itemsClassName?: string;
|
||||
children: React.ReactElement | React.ReactElement[];
|
||||
};
|
||||
|
||||
type HighlightProps<T extends React.ElementType = "div"> =
|
||||
| ControlledParentModeHighlightProps<T>
|
||||
| ControlledChildrenModeHighlightProps<T>
|
||||
| UncontrolledParentModeHighlightProps<T>
|
||||
| UncontrolledChildrenModeHighlightProps<T>;
|
||||
|
||||
function Highlight<T extends React.ElementType = "div">({
|
||||
ref,
|
||||
...props
|
||||
}: HighlightProps<T>) {
|
||||
const {
|
||||
as: Component = "div",
|
||||
children,
|
||||
value,
|
||||
defaultValue,
|
||||
onValueChange,
|
||||
className,
|
||||
style,
|
||||
transition = { type: "spring", stiffness: 350, damping: 35 },
|
||||
hover = false,
|
||||
click = true,
|
||||
enabled = true,
|
||||
controlledItems,
|
||||
disabled = false,
|
||||
exitDelay = 200,
|
||||
mode = "children",
|
||||
} = props;
|
||||
|
||||
const localRef = React.useRef<HTMLDivElement>(null);
|
||||
React.useImperativeHandle(ref, () => localRef.current as HTMLDivElement);
|
||||
|
||||
const propsBoundsOffset = (props as ParentModeHighlightProps)?.boundsOffset;
|
||||
const boundsOffset = propsBoundsOffset ?? DEFAULT_BOUNDS_OFFSET;
|
||||
const boundsOffsetTop = boundsOffset.top ?? 0;
|
||||
const boundsOffsetLeft = boundsOffset.left ?? 0;
|
||||
const boundsOffsetWidth = boundsOffset.width ?? 0;
|
||||
const boundsOffsetHeight = boundsOffset.height ?? 0;
|
||||
|
||||
const boundsOffsetRef = React.useRef({
|
||||
top: boundsOffsetTop,
|
||||
left: boundsOffsetLeft,
|
||||
width: boundsOffsetWidth,
|
||||
height: boundsOffsetHeight,
|
||||
});
|
||||
|
||||
React.useEffect(() => {
|
||||
boundsOffsetRef.current = {
|
||||
top: boundsOffsetTop,
|
||||
left: boundsOffsetLeft,
|
||||
width: boundsOffsetWidth,
|
||||
height: boundsOffsetHeight,
|
||||
};
|
||||
}, [
|
||||
boundsOffsetTop,
|
||||
boundsOffsetLeft,
|
||||
boundsOffsetWidth,
|
||||
boundsOffsetHeight,
|
||||
]);
|
||||
|
||||
const [activeValue, setActiveValue] = React.useState<string | null>(
|
||||
value ?? defaultValue ?? null,
|
||||
);
|
||||
const [boundsState, setBoundsState] = React.useState<Bounds | null>(null);
|
||||
const [activeClassNameState, setActiveClassNameState] =
|
||||
React.useState<string>("");
|
||||
|
||||
const safeSetActiveValue = (id: string | null) => {
|
||||
setActiveValue((prev) => {
|
||||
if (prev !== id) {
|
||||
onValueChange?.(id);
|
||||
return id;
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
};
|
||||
|
||||
const safeSetBoundsRef = React.useRef<
|
||||
((bounds: DOMRect) => void) | undefined
|
||||
>(undefined);
|
||||
|
||||
React.useEffect(() => {
|
||||
safeSetBoundsRef.current = (bounds: DOMRect) => {
|
||||
if (!localRef.current) return;
|
||||
|
||||
const containerRect = localRef.current.getBoundingClientRect();
|
||||
const offset = boundsOffsetRef.current;
|
||||
const newBounds: Bounds = {
|
||||
top: bounds.top - containerRect.top + offset.top,
|
||||
left: bounds.left - containerRect.left + offset.left,
|
||||
width: bounds.width + offset.width,
|
||||
height: bounds.height + offset.height,
|
||||
};
|
||||
|
||||
setBoundsState((prev) => {
|
||||
if (
|
||||
prev &&
|
||||
prev.top === newBounds.top &&
|
||||
prev.left === newBounds.left &&
|
||||
prev.width === newBounds.width &&
|
||||
prev.height === newBounds.height
|
||||
) {
|
||||
return prev;
|
||||
}
|
||||
return newBounds;
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
const safeSetBounds = (bounds: DOMRect) => {
|
||||
safeSetBoundsRef.current?.(bounds);
|
||||
};
|
||||
|
||||
const clearBounds = React.useCallback(() => {
|
||||
setBoundsState((prev) => (prev === null ? prev : null));
|
||||
}, []);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (value !== undefined) setActiveValue(value);
|
||||
else if (defaultValue !== undefined) setActiveValue(defaultValue);
|
||||
}, [value, defaultValue]);
|
||||
|
||||
const id = React.useId();
|
||||
|
||||
React.useEffect(() => {
|
||||
if (mode !== "parent") return;
|
||||
const container = localRef.current;
|
||||
if (!container) return;
|
||||
|
||||
const onScroll = () => {
|
||||
if (!activeValue) return;
|
||||
const activeEl = container.querySelector<HTMLElement>(
|
||||
`[data-value="${activeValue}"][data-highlight="true"]`,
|
||||
);
|
||||
if (activeEl)
|
||||
safeSetBoundsRef.current?.(activeEl.getBoundingClientRect());
|
||||
};
|
||||
|
||||
container.addEventListener("scroll", onScroll, { passive: true });
|
||||
return () => container.removeEventListener("scroll", onScroll);
|
||||
}, [mode, activeValue]);
|
||||
|
||||
const render = (children: React.ReactNode) => {
|
||||
if (mode === "parent") {
|
||||
return (
|
||||
<Component
|
||||
ref={localRef}
|
||||
data-slot="motion-highlight-container"
|
||||
style={{ position: "relative", zIndex: 1 }}
|
||||
className={(props as ParentModeHighlightProps)?.containerClassName}
|
||||
>
|
||||
<AnimatePresence initial={false} mode="wait">
|
||||
{boundsState && (
|
||||
<motion.div
|
||||
data-slot="motion-highlight"
|
||||
animate={{
|
||||
top: boundsState.top,
|
||||
left: boundsState.left,
|
||||
width: boundsState.width,
|
||||
height: boundsState.height,
|
||||
opacity: 1,
|
||||
}}
|
||||
initial={{
|
||||
top: boundsState.top,
|
||||
left: boundsState.left,
|
||||
width: boundsState.width,
|
||||
height: boundsState.height,
|
||||
opacity: 0,
|
||||
}}
|
||||
exit={{
|
||||
opacity: 0,
|
||||
transition: {
|
||||
...transition,
|
||||
delay: (transition?.delay ?? 0) + (exitDelay ?? 0) / 1000,
|
||||
},
|
||||
}}
|
||||
transition={transition}
|
||||
style={{ position: "absolute", zIndex: 0, ...style }}
|
||||
className={cn(className, activeClassNameState)}
|
||||
/>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
{children}
|
||||
</Component>
|
||||
);
|
||||
}
|
||||
|
||||
return children;
|
||||
};
|
||||
|
||||
return (
|
||||
<HighlightContext.Provider
|
||||
value={{
|
||||
mode,
|
||||
activeValue,
|
||||
setActiveValue: safeSetActiveValue,
|
||||
id,
|
||||
hover,
|
||||
click,
|
||||
className,
|
||||
style,
|
||||
transition,
|
||||
disabled,
|
||||
enabled,
|
||||
exitDelay,
|
||||
setBounds: safeSetBounds,
|
||||
clearBounds,
|
||||
activeClassName: activeClassNameState,
|
||||
setActiveClassName: setActiveClassNameState,
|
||||
forceUpdateBounds: (props as ParentModeHighlightProps)
|
||||
?.forceUpdateBounds,
|
||||
}}
|
||||
>
|
||||
{enabled
|
||||
? controlledItems
|
||||
? render(children)
|
||||
: render(
|
||||
React.Children.map(children, (child, index) => (
|
||||
<HighlightItem key={index} className={props?.itemsClassName}>
|
||||
{child}
|
||||
</HighlightItem>
|
||||
)),
|
||||
)
|
||||
: children}
|
||||
</HighlightContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
function getNonOverridingDataAttributes(
|
||||
element: React.ReactElement,
|
||||
dataAttributes: Record<string, unknown>,
|
||||
): Record<string, unknown> {
|
||||
return Object.keys(dataAttributes).reduce<Record<string, unknown>>(
|
||||
(acc, key) => {
|
||||
if ((element.props as Record<string, unknown>)[key] === undefined) {
|
||||
acc[key] = dataAttributes[key];
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{},
|
||||
);
|
||||
}
|
||||
|
||||
type ExtendedChildProps = React.ComponentProps<"div"> & {
|
||||
id?: string;
|
||||
ref?: React.Ref<HTMLElement>;
|
||||
"data-active"?: string;
|
||||
"data-value"?: string;
|
||||
"data-disabled"?: boolean;
|
||||
"data-highlight"?: boolean;
|
||||
"data-slot"?: string;
|
||||
};
|
||||
|
||||
type HighlightItemProps<T extends React.ElementType = "div"> =
|
||||
React.ComponentProps<T> & {
|
||||
as?: T;
|
||||
children: React.ReactElement;
|
||||
id?: string;
|
||||
value?: string;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
transition?: Transition;
|
||||
activeClassName?: string;
|
||||
disabled?: boolean;
|
||||
exitDelay?: number;
|
||||
asChild?: boolean;
|
||||
forceUpdateBounds?: boolean;
|
||||
};
|
||||
|
||||
function HighlightItem<T extends React.ElementType>({
|
||||
ref,
|
||||
as,
|
||||
children,
|
||||
id,
|
||||
value,
|
||||
className,
|
||||
style,
|
||||
transition,
|
||||
disabled = false,
|
||||
activeClassName,
|
||||
exitDelay,
|
||||
asChild = false,
|
||||
forceUpdateBounds,
|
||||
...props
|
||||
}: HighlightItemProps<T>) {
|
||||
const itemId = React.useId();
|
||||
const {
|
||||
activeValue,
|
||||
setActiveValue,
|
||||
mode,
|
||||
setBounds,
|
||||
clearBounds,
|
||||
hover,
|
||||
click,
|
||||
enabled,
|
||||
className: contextClassName,
|
||||
style: contextStyle,
|
||||
transition: contextTransition,
|
||||
id: contextId,
|
||||
disabled: contextDisabled,
|
||||
exitDelay: contextExitDelay,
|
||||
forceUpdateBounds: contextForceUpdateBounds,
|
||||
setActiveClassName,
|
||||
} = useHighlight();
|
||||
|
||||
const Component = as ?? "div";
|
||||
const element = children as React.ReactElement<ExtendedChildProps>;
|
||||
const childValue =
|
||||
id ?? value ?? element.props?.["data-value"] ?? element.props?.id ?? itemId;
|
||||
const isActive = activeValue === childValue;
|
||||
const isDisabled = disabled === undefined ? contextDisabled : disabled;
|
||||
const itemTransition = transition ?? contextTransition;
|
||||
|
||||
const localRef = React.useRef<HTMLDivElement>(null);
|
||||
React.useImperativeHandle(ref, () => localRef.current as HTMLDivElement);
|
||||
|
||||
const refCallback = React.useCallback((node: HTMLElement | null) => {
|
||||
localRef.current = node as HTMLDivElement;
|
||||
}, []);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (mode !== "parent") return;
|
||||
let rafId: number;
|
||||
let previousBounds: Bounds | null = null;
|
||||
const shouldUpdateBounds =
|
||||
forceUpdateBounds === true ||
|
||||
(contextForceUpdateBounds && forceUpdateBounds !== false);
|
||||
|
||||
const updateBounds = () => {
|
||||
if (!localRef.current) return;
|
||||
|
||||
const bounds = localRef.current.getBoundingClientRect();
|
||||
|
||||
if (shouldUpdateBounds) {
|
||||
if (
|
||||
previousBounds &&
|
||||
previousBounds.top === bounds.top &&
|
||||
previousBounds.left === bounds.left &&
|
||||
previousBounds.width === bounds.width &&
|
||||
previousBounds.height === bounds.height
|
||||
) {
|
||||
rafId = requestAnimationFrame(updateBounds);
|
||||
return;
|
||||
}
|
||||
previousBounds = bounds;
|
||||
rafId = requestAnimationFrame(updateBounds);
|
||||
}
|
||||
|
||||
setBounds(bounds);
|
||||
};
|
||||
|
||||
if (isActive) {
|
||||
updateBounds();
|
||||
setActiveClassName(activeClassName ?? "");
|
||||
} else if (!activeValue) clearBounds();
|
||||
|
||||
if (shouldUpdateBounds) return () => cancelAnimationFrame(rafId);
|
||||
}, [
|
||||
mode,
|
||||
isActive,
|
||||
activeValue,
|
||||
setBounds,
|
||||
clearBounds,
|
||||
activeClassName,
|
||||
setActiveClassName,
|
||||
forceUpdateBounds,
|
||||
contextForceUpdateBounds,
|
||||
]);
|
||||
|
||||
if (!React.isValidElement(children)) return children;
|
||||
|
||||
const dataAttributes = {
|
||||
"data-active": isActive ? "true" : "false",
|
||||
"aria-selected": isActive,
|
||||
"data-disabled": isDisabled,
|
||||
"data-value": childValue,
|
||||
"data-highlight": true,
|
||||
};
|
||||
|
||||
const commonHandlers = hover
|
||||
? {
|
||||
onMouseEnter: (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
setActiveValue(childValue);
|
||||
element.props.onMouseEnter?.(e);
|
||||
},
|
||||
onMouseLeave: (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
setActiveValue(null);
|
||||
element.props.onMouseLeave?.(e);
|
||||
},
|
||||
}
|
||||
: click
|
||||
? {
|
||||
onClick: (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
setActiveValue(childValue);
|
||||
element.props.onClick?.(e);
|
||||
},
|
||||
}
|
||||
: {};
|
||||
|
||||
if (asChild) {
|
||||
if (mode === "children") {
|
||||
return React.cloneElement(
|
||||
element,
|
||||
{
|
||||
key: childValue,
|
||||
ref: refCallback,
|
||||
className: cn("relative", element.props.className),
|
||||
...getNonOverridingDataAttributes(element, {
|
||||
...dataAttributes,
|
||||
"data-slot": "motion-highlight-item-container",
|
||||
}),
|
||||
...commonHandlers,
|
||||
...props,
|
||||
},
|
||||
<>
|
||||
<AnimatePresence initial={false} mode="wait">
|
||||
{isActive && !isDisabled && (
|
||||
<motion.div
|
||||
layoutId={`transition-background-${contextId}`}
|
||||
data-slot="motion-highlight"
|
||||
style={{
|
||||
position: "absolute",
|
||||
zIndex: 0,
|
||||
...contextStyle,
|
||||
...style,
|
||||
}}
|
||||
className={cn(contextClassName, activeClassName)}
|
||||
transition={itemTransition}
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{
|
||||
opacity: 0,
|
||||
transition: {
|
||||
...itemTransition,
|
||||
delay:
|
||||
(itemTransition?.delay ?? 0) +
|
||||
(exitDelay ?? contextExitDelay ?? 0) / 1000,
|
||||
},
|
||||
}}
|
||||
{...dataAttributes}
|
||||
/>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
|
||||
<Component
|
||||
data-slot="motion-highlight-item"
|
||||
style={{ position: "relative", zIndex: 1 }}
|
||||
className={className}
|
||||
{...dataAttributes}
|
||||
>
|
||||
{children}
|
||||
</Component>
|
||||
</>,
|
||||
);
|
||||
}
|
||||
|
||||
return React.cloneElement(element, {
|
||||
ref: refCallback,
|
||||
...getNonOverridingDataAttributes(element, {
|
||||
...dataAttributes,
|
||||
"data-slot": "motion-highlight-item",
|
||||
}),
|
||||
...commonHandlers,
|
||||
});
|
||||
}
|
||||
|
||||
return enabled ? (
|
||||
<Component
|
||||
key={childValue}
|
||||
ref={localRef}
|
||||
data-slot="motion-highlight-item-container"
|
||||
className={cn(mode === "children" && "relative", className)}
|
||||
{...dataAttributes}
|
||||
{...props}
|
||||
{...commonHandlers}
|
||||
>
|
||||
{mode === "children" && (
|
||||
<AnimatePresence initial={false} mode="wait">
|
||||
{isActive && !isDisabled && (
|
||||
<motion.div
|
||||
layoutId={`transition-background-${contextId}`}
|
||||
data-slot="motion-highlight"
|
||||
style={{
|
||||
position: "absolute",
|
||||
zIndex: 0,
|
||||
...contextStyle,
|
||||
...style,
|
||||
}}
|
||||
className={cn(contextClassName, activeClassName)}
|
||||
transition={itemTransition}
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{
|
||||
opacity: 0,
|
||||
transition: {
|
||||
...itemTransition,
|
||||
delay:
|
||||
(itemTransition?.delay ?? 0) +
|
||||
(exitDelay ?? contextExitDelay ?? 0) / 1000,
|
||||
},
|
||||
}}
|
||||
{...dataAttributes}
|
||||
/>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
)}
|
||||
|
||||
{React.cloneElement(element, {
|
||||
style: { position: "relative", zIndex: 1 },
|
||||
className: element.props.className,
|
||||
...getNonOverridingDataAttributes(element, {
|
||||
...dataAttributes,
|
||||
"data-slot": "motion-highlight-item",
|
||||
}),
|
||||
})}
|
||||
</Component>
|
||||
) : (
|
||||
children
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
Highlight,
|
||||
HighlightItem,
|
||||
useHighlight,
|
||||
type HighlightProps,
|
||||
type HighlightItemProps,
|
||||
};
|
||||
+179
-18
@@ -1,18 +1,82 @@
|
||||
"use client";
|
||||
|
||||
import * as TabsPrimitive from "@radix-ui/react-tabs";
|
||||
import {
|
||||
AnimatePresence,
|
||||
type HTMLMotionProps,
|
||||
motion,
|
||||
type Transition,
|
||||
} from "motion/react";
|
||||
import * as React from "react";
|
||||
|
||||
import { AutoHeight } from "@/components/ui/auto-height";
|
||||
import {
|
||||
Highlight,
|
||||
HighlightItem,
|
||||
type HighlightItemProps,
|
||||
type HighlightProps,
|
||||
} from "@/components/ui/highlight";
|
||||
import { useControlledState } from "@/hooks/use-controlled-state";
|
||||
import { getStrictContext } from "@/lib/get-strict-context";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
const Tabs = TabsPrimitive.Root;
|
||||
type TabsContextType = {
|
||||
value: string | undefined;
|
||||
setValue: TabsProps["onValueChange"];
|
||||
};
|
||||
|
||||
const [TabsProvider, useTabs] =
|
||||
getStrictContext<TabsContextType>("TabsContext");
|
||||
|
||||
type TabsProps = React.ComponentProps<typeof TabsPrimitive.Root>;
|
||||
|
||||
function Tabs(props: TabsProps) {
|
||||
const [value, setValue] = useControlledState({
|
||||
value: props.value,
|
||||
defaultValue: props.defaultValue,
|
||||
onChange: props.onValueChange,
|
||||
});
|
||||
|
||||
return (
|
||||
<TabsProvider value={{ value, setValue }}>
|
||||
<TabsPrimitive.Root
|
||||
data-slot="tabs"
|
||||
{...props}
|
||||
onValueChange={setValue}
|
||||
/>
|
||||
</TabsProvider>
|
||||
);
|
||||
}
|
||||
|
||||
type TabsHighlightProps = Omit<HighlightProps, "controlledItems" | "value">;
|
||||
|
||||
function TabsHighlight({
|
||||
transition = { type: "spring", stiffness: 200, damping: 25 },
|
||||
...props
|
||||
}: TabsHighlightProps) {
|
||||
const { value } = useTabs();
|
||||
|
||||
return (
|
||||
<Highlight
|
||||
data-slot="tabs-highlight"
|
||||
controlledItems
|
||||
value={value}
|
||||
transition={transition}
|
||||
click={false}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
type TabsListProps = React.ComponentProps<typeof TabsPrimitive.List>;
|
||||
|
||||
const TabsList = React.forwardRef<
|
||||
React.ElementRef<typeof TabsPrimitive.List>,
|
||||
React.ComponentPropsWithoutRef<typeof TabsPrimitive.List>
|
||||
TabsListProps
|
||||
>(({ className, ...props }, ref) => (
|
||||
<TabsPrimitive.List
|
||||
ref={ref}
|
||||
data-slot="tabs-list"
|
||||
className={cn(
|
||||
"inline-flex h-10 items-center justify-center rounded-md bg-muted p-1 text-muted-foreground",
|
||||
className,
|
||||
@@ -22,12 +86,23 @@ const TabsList = React.forwardRef<
|
||||
));
|
||||
TabsList.displayName = TabsPrimitive.List.displayName;
|
||||
|
||||
type TabsHighlightItemProps = HighlightItemProps & {
|
||||
value: string;
|
||||
};
|
||||
|
||||
function TabsHighlightItem(props: TabsHighlightItemProps) {
|
||||
return <HighlightItem data-slot="tabs-highlight-item" {...props} />;
|
||||
}
|
||||
|
||||
type TabsTriggerProps = React.ComponentProps<typeof TabsPrimitive.Trigger>;
|
||||
|
||||
const TabsTrigger = React.forwardRef<
|
||||
React.ElementRef<typeof TabsPrimitive.Trigger>,
|
||||
React.ComponentPropsWithoutRef<typeof TabsPrimitive.Trigger>
|
||||
TabsTriggerProps
|
||||
>(({ className, ...props }, ref) => (
|
||||
<TabsPrimitive.Trigger
|
||||
ref={ref}
|
||||
data-slot="tabs-trigger"
|
||||
className={cn(
|
||||
"cursor-pointer inline-flex items-center justify-center whitespace-nowrap rounded-sm px-3 py-1.5 text-sm font-medium ring-offset-background transition-all focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:bg-background data-[state=active]:text-foreground data-[state=active]:shadow-sm",
|
||||
className,
|
||||
@@ -37,19 +112,105 @@ const TabsTrigger = React.forwardRef<
|
||||
));
|
||||
TabsTrigger.displayName = TabsPrimitive.Trigger.displayName;
|
||||
|
||||
const TabsContent = React.forwardRef<
|
||||
React.ElementRef<typeof TabsPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof TabsPrimitive.Content>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<TabsPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"mt-2 ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
TabsContent.displayName = TabsPrimitive.Content.displayName;
|
||||
type TabsContentProps = React.ComponentProps<typeof TabsPrimitive.Content> &
|
||||
HTMLMotionProps<"div">;
|
||||
|
||||
export { Tabs, TabsList, TabsTrigger, TabsContent };
|
||||
function TabsContent({
|
||||
value,
|
||||
forceMount,
|
||||
transition = { duration: 0.5, ease: "easeInOut" },
|
||||
className,
|
||||
...props
|
||||
}: TabsContentProps) {
|
||||
return (
|
||||
<AnimatePresence mode="wait">
|
||||
<TabsPrimitive.Content asChild forceMount={forceMount} value={value}>
|
||||
<motion.div
|
||||
data-slot="tabs-content"
|
||||
layout
|
||||
layoutDependency={value}
|
||||
initial={{ opacity: 0, filter: "blur(4px)" }}
|
||||
animate={{ opacity: 1, filter: "blur(0px)" }}
|
||||
exit={{ opacity: 0, filter: "blur(4px)" }}
|
||||
transition={transition}
|
||||
className={cn(
|
||||
"mt-2 ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
</TabsPrimitive.Content>
|
||||
</AnimatePresence>
|
||||
);
|
||||
}
|
||||
|
||||
type TabsContentsAutoProps = React.ComponentProps<typeof AutoHeight> & {
|
||||
mode?: "auto-height";
|
||||
children: React.ReactNode;
|
||||
transition?: Transition;
|
||||
};
|
||||
|
||||
type TabsContentsLayoutProps = Omit<HTMLMotionProps<"div">, "transition"> & {
|
||||
mode: "layout";
|
||||
children: React.ReactNode;
|
||||
transition?: Transition;
|
||||
};
|
||||
|
||||
type TabsContentsProps = TabsContentsAutoProps | TabsContentsLayoutProps;
|
||||
|
||||
const defaultTransition: Transition = {
|
||||
type: "spring",
|
||||
stiffness: 200,
|
||||
damping: 30,
|
||||
};
|
||||
|
||||
function isAutoMode(props: TabsContentsProps): props is TabsContentsAutoProps {
|
||||
return !("mode" in props) || props.mode === "auto-height";
|
||||
}
|
||||
|
||||
function TabsContents(props: TabsContentsProps) {
|
||||
const { value } = useTabs();
|
||||
|
||||
if (isAutoMode(props)) {
|
||||
const { transition = defaultTransition, ...autoProps } = props;
|
||||
|
||||
return (
|
||||
<AutoHeight
|
||||
data-slot="tabs-contents"
|
||||
deps={[value]}
|
||||
transition={transition}
|
||||
{...autoProps}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
const { transition = defaultTransition, style, ...layoutProps } = props;
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
data-slot="tabs-contents"
|
||||
layout="size"
|
||||
layoutDependency={value}
|
||||
style={{ overflow: "hidden", ...style }}
|
||||
transition={{ layout: transition }}
|
||||
{...layoutProps}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
Tabs,
|
||||
TabsHighlight,
|
||||
TabsHighlightItem,
|
||||
TabsList,
|
||||
TabsTrigger,
|
||||
TabsContent,
|
||||
TabsContents,
|
||||
type TabsProps,
|
||||
type TabsHighlightProps,
|
||||
type TabsHighlightItemProps,
|
||||
type TabsListProps,
|
||||
type TabsTriggerProps,
|
||||
type TabsContentProps,
|
||||
type TabsContentsProps,
|
||||
};
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
"use client";
|
||||
|
||||
import * as React from "react";
|
||||
|
||||
type AutoHeightOptions = {
|
||||
includeParentBox?: boolean;
|
||||
includeSelfBox?: boolean;
|
||||
};
|
||||
|
||||
export function useAutoHeight<T extends HTMLElement = HTMLDivElement>(
|
||||
deps: React.DependencyList = [],
|
||||
options: AutoHeightOptions = {
|
||||
includeParentBox: true,
|
||||
includeSelfBox: false,
|
||||
},
|
||||
) {
|
||||
const ref = React.useRef<T | null>(null);
|
||||
const roRef = React.useRef<ResizeObserver | null>(null);
|
||||
const [height, setHeight] = React.useState(0);
|
||||
|
||||
const measure = React.useCallback(() => {
|
||||
const el = ref.current;
|
||||
if (!el) return 0;
|
||||
|
||||
const base = el.getBoundingClientRect().height || 0;
|
||||
|
||||
let extra = 0;
|
||||
|
||||
if (options.includeParentBox && el.parentElement) {
|
||||
const cs = getComputedStyle(el.parentElement);
|
||||
const paddingY =
|
||||
(parseFloat(cs.paddingTop || "0") || 0) +
|
||||
(parseFloat(cs.paddingBottom || "0") || 0);
|
||||
const borderY =
|
||||
(parseFloat(cs.borderTopWidth || "0") || 0) +
|
||||
(parseFloat(cs.borderBottomWidth || "0") || 0);
|
||||
const isBorderBox = cs.boxSizing === "border-box";
|
||||
if (isBorderBox) {
|
||||
extra += paddingY + borderY;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.includeSelfBox) {
|
||||
const cs = getComputedStyle(el);
|
||||
const paddingY =
|
||||
(parseFloat(cs.paddingTop || "0") || 0) +
|
||||
(parseFloat(cs.paddingBottom || "0") || 0);
|
||||
const borderY =
|
||||
(parseFloat(cs.borderTopWidth || "0") || 0) +
|
||||
(parseFloat(cs.borderBottomWidth || "0") || 0);
|
||||
const isBorderBox = cs.boxSizing === "border-box";
|
||||
if (isBorderBox) {
|
||||
extra += paddingY + borderY;
|
||||
}
|
||||
}
|
||||
|
||||
const dpr =
|
||||
typeof window !== "undefined" ? window.devicePixelRatio || 1 : 1;
|
||||
const total = Math.ceil((base + extra) * dpr) / dpr;
|
||||
|
||||
return total;
|
||||
}, [options.includeParentBox, options.includeSelfBox]);
|
||||
|
||||
React.useLayoutEffect(() => {
|
||||
const el = ref.current;
|
||||
if (!el) return;
|
||||
|
||||
setHeight(measure());
|
||||
|
||||
if (roRef.current) {
|
||||
roRef.current.disconnect();
|
||||
roRef.current = null;
|
||||
}
|
||||
|
||||
const ro = new ResizeObserver(() => {
|
||||
const next = measure();
|
||||
requestAnimationFrame(() => setHeight(next));
|
||||
});
|
||||
|
||||
ro.observe(el);
|
||||
if (options.includeParentBox && el.parentElement) {
|
||||
ro.observe(el.parentElement);
|
||||
}
|
||||
|
||||
roRef.current = ro;
|
||||
|
||||
return () => {
|
||||
ro.disconnect();
|
||||
roRef.current = null;
|
||||
};
|
||||
}, [...deps, measure, options.includeParentBox]);
|
||||
|
||||
React.useLayoutEffect(() => {
|
||||
if (height === 0) {
|
||||
const next = measure();
|
||||
if (next !== 0) setHeight(next);
|
||||
}
|
||||
}, [height, measure]);
|
||||
|
||||
return { ref, height } as const;
|
||||
}
|
||||
@@ -3,7 +3,7 @@ import { getBrowserDisplayName } from "@/lib/browser-utils";
|
||||
import type { BrowserProfile } from "@/types";
|
||||
|
||||
/**
|
||||
* Hook for managing browser state and enforcing single-instance rules for Tor and Mullvad browsers
|
||||
* Hook for managing browser state
|
||||
*/
|
||||
export function useBrowserState(
|
||||
profiles: BrowserProfile[],
|
||||
@@ -22,8 +22,8 @@ export function useBrowserState(
|
||||
* Check if a browser type allows only one instance to run at a time
|
||||
*/
|
||||
const isSingleInstanceBrowser = useCallback(
|
||||
(browserType: string): boolean => {
|
||||
return browserType === "tor-browser" || browserType === "mullvad-browser";
|
||||
(_browserType: string): boolean => {
|
||||
return false; // No browsers currently require single instance
|
||||
},
|
||||
[],
|
||||
);
|
||||
@@ -102,7 +102,7 @@ export function useBrowserState(
|
||||
return false;
|
||||
}
|
||||
|
||||
// For single-instance browsers (Tor and Mullvad)
|
||||
// For single-instance browsers
|
||||
if (isSingleInstanceBrowser(profile.browser)) {
|
||||
const runningInstancesOfType = profiles.filter(
|
||||
(p) => p.browser === profile.browser && runningProfiles.has(p.id),
|
||||
@@ -195,9 +195,7 @@ export function useBrowserState(
|
||||
isSingleInstanceBrowser(profile.browser) &&
|
||||
!canLaunchProfile(profile)
|
||||
) {
|
||||
const browserDisplayName =
|
||||
profile.browser === "tor-browser" ? "TOR" : "Mullvad";
|
||||
return `Only one ${browserDisplayName} browser instance can run at a time. Stop the running ${browserDisplayName} browser first.`;
|
||||
return `Only one instance of this browser can run at a time. Stop the running browser first.`;
|
||||
}
|
||||
|
||||
return "";
|
||||
@@ -242,8 +240,6 @@ export function useBrowserState(
|
||||
}
|
||||
|
||||
if (isSingleInstanceBrowser(profile.browser)) {
|
||||
const browserDisplayName =
|
||||
profile.browser === "tor-browser" ? "TOR" : "Mullvad";
|
||||
const runningInstancesOfType = profiles.filter(
|
||||
(p) => p.browser === profile.browser && runningProfiles.has(p.id),
|
||||
);
|
||||
@@ -252,7 +248,7 @@ export function useBrowserState(
|
||||
const runningProfileNames = runningInstancesOfType
|
||||
.map((p) => p.name)
|
||||
.join(", ");
|
||||
return `${browserDisplayName} browser is already running (${runningProfileNames}). Only one instance can run at a time.`;
|
||||
return `${getBrowserDisplayName(profile.browser)} browser is already running (${runningProfileNames}). Only one instance can run at a time.`;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
import * as React from "react";
|
||||
|
||||
interface CommonControlledStateProps<T> {
|
||||
value?: T;
|
||||
defaultValue?: T;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function useControlledState<T, Rest extends any[] = []>(
|
||||
props: CommonControlledStateProps<T> & {
|
||||
onChange?: (value: T, ...args: Rest) => void;
|
||||
},
|
||||
): readonly [T, (next: T, ...args: Rest) => void] {
|
||||
const { value, defaultValue, onChange } = props;
|
||||
|
||||
const [state, setInternalState] = React.useState<T>(
|
||||
value !== undefined ? value : (defaultValue as T),
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (value !== undefined) setInternalState(value);
|
||||
}, [value]);
|
||||
|
||||
const setState = React.useCallback(
|
||||
(next: T, ...args: Rest) => {
|
||||
setInternalState(next);
|
||||
onChange?.(next, ...args);
|
||||
},
|
||||
[onChange],
|
||||
);
|
||||
|
||||
return [state, setState] as const;
|
||||
}
|
||||
@@ -4,7 +4,7 @@
|
||||
*/
|
||||
|
||||
import { FaChrome, FaFirefox, FaShieldAlt } from "react-icons/fa";
|
||||
import { SiBrave, SiMullvad, SiTorbrowser } from "react-icons/si";
|
||||
import { SiBrave } from "react-icons/si";
|
||||
import { ZenBrowser } from "@/components/icons/zen-browser";
|
||||
|
||||
/**
|
||||
@@ -14,11 +14,9 @@ export function getBrowserDisplayName(browserType: string): string {
|
||||
const browserNames: Record<string, string> = {
|
||||
firefox: "Firefox",
|
||||
"firefox-developer": "Firefox Developer Edition",
|
||||
"mullvad-browser": "Mullvad Browser",
|
||||
zen: "Zen Browser",
|
||||
brave: "Brave",
|
||||
chromium: "Chromium",
|
||||
"tor-browser": "Tor Browser",
|
||||
camoufox: "Anti-Detect",
|
||||
};
|
||||
|
||||
@@ -30,8 +28,6 @@ export function getBrowserDisplayName(browserType: string): string {
|
||||
*/
|
||||
export function getBrowserIcon(browserType: string) {
|
||||
switch (browserType) {
|
||||
case "mullvad-browser":
|
||||
return SiMullvad;
|
||||
case "chromium":
|
||||
return FaChrome;
|
||||
case "brave":
|
||||
@@ -41,8 +37,6 @@ export function getBrowserIcon(browserType: string) {
|
||||
return FaFirefox;
|
||||
case "zen":
|
||||
return ZenBrowser;
|
||||
case "tor-browser":
|
||||
return SiTorbrowser;
|
||||
case "camoufox":
|
||||
return FaShieldAlt;
|
||||
default:
|
||||
|
||||
@@ -0,0 +1,36 @@
|
||||
import * as React from "react";
|
||||
|
||||
function getStrictContext<T>(
|
||||
name?: string,
|
||||
): readonly [
|
||||
({
|
||||
value,
|
||||
children,
|
||||
}: {
|
||||
value: T;
|
||||
children?: React.ReactNode;
|
||||
}) => React.JSX.Element,
|
||||
() => T,
|
||||
] {
|
||||
const Context = React.createContext<T | undefined>(undefined);
|
||||
|
||||
const Provider = ({
|
||||
value,
|
||||
children,
|
||||
}: {
|
||||
value: T;
|
||||
children?: React.ReactNode;
|
||||
}) => <Context.Provider value={value}>{children}</Context.Provider>;
|
||||
|
||||
const useSafeContext = () => {
|
||||
const ctx = React.useContext(Context);
|
||||
if (ctx === undefined) {
|
||||
throw new Error(`useContext must be used within ${name ?? "a Provider"}`);
|
||||
}
|
||||
return ctx;
|
||||
};
|
||||
|
||||
return [Provider, useSafeContext] as const;
|
||||
}
|
||||
|
||||
export { getStrictContext };
|
||||
@@ -0,0 +1,98 @@
|
||||
"use client";
|
||||
|
||||
import { type HTMLMotionProps, isMotionComponent, motion } from "motion/react";
|
||||
import * as React from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
type AnyProps = Record<string, unknown>;
|
||||
|
||||
type DOMMotionProps<T extends HTMLElement = HTMLElement> = Omit<
|
||||
HTMLMotionProps<keyof HTMLElementTagNameMap>,
|
||||
"ref"
|
||||
> & { ref?: React.Ref<T> };
|
||||
|
||||
type WithAsChild<Base extends object> =
|
||||
| (Base & { asChild: true; children: React.ReactElement })
|
||||
| (Base & { asChild?: false | undefined });
|
||||
|
||||
type SlotProps<T extends HTMLElement = HTMLElement> = {
|
||||
children?: React.ReactElement;
|
||||
} & DOMMotionProps<T>;
|
||||
|
||||
function mergeRefs<T>(
|
||||
...refs: (React.Ref<T> | undefined)[]
|
||||
): React.RefCallback<T> {
|
||||
return (node) => {
|
||||
refs.forEach((ref) => {
|
||||
if (!ref) return;
|
||||
if (typeof ref === "function") {
|
||||
ref(node);
|
||||
} else {
|
||||
(ref as React.RefObject<T | null>).current = node;
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function mergeProps<T extends HTMLElement>(
|
||||
childProps: AnyProps,
|
||||
slotProps: DOMMotionProps<T>,
|
||||
): AnyProps {
|
||||
const merged: AnyProps = { ...childProps, ...slotProps };
|
||||
|
||||
if (childProps.className || slotProps.className) {
|
||||
merged.className = cn(
|
||||
childProps.className as string,
|
||||
slotProps.className as string,
|
||||
);
|
||||
}
|
||||
|
||||
if (childProps.style || slotProps.style) {
|
||||
merged.style = {
|
||||
...(childProps.style as React.CSSProperties),
|
||||
...(slotProps.style as React.CSSProperties),
|
||||
};
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
function Slot<T extends HTMLElement = HTMLElement>({
|
||||
children,
|
||||
ref,
|
||||
...props
|
||||
}: SlotProps<T>) {
|
||||
const isAlreadyMotion = React.useMemo(() => {
|
||||
if (!React.isValidElement(children)) return false;
|
||||
return (
|
||||
typeof children.type === "object" &&
|
||||
children.type !== null &&
|
||||
isMotionComponent(children.type)
|
||||
);
|
||||
}, [children]);
|
||||
|
||||
const Base = React.useMemo(() => {
|
||||
if (!React.isValidElement(children)) return motion.div;
|
||||
return isAlreadyMotion
|
||||
? (children.type as React.ElementType)
|
||||
: motion.create(children.type as React.ElementType);
|
||||
}, [isAlreadyMotion, children]);
|
||||
|
||||
if (!React.isValidElement(children)) return null;
|
||||
|
||||
const { ref: childRef, ...childProps } = children.props as AnyProps;
|
||||
|
||||
const mergedProps = mergeProps(childProps, props);
|
||||
|
||||
return (
|
||||
<Base {...mergedProps} ref={mergeRefs(childRef as React.Ref<T>, ref)} />
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
Slot,
|
||||
type SlotProps,
|
||||
type WithAsChild,
|
||||
type DOMMotionProps,
|
||||
type AnyProps,
|
||||
};
|
||||
@@ -320,6 +320,7 @@ export interface FilteredTrafficStats {
|
||||
bandwidth_history: BandwidthDataPoint[];
|
||||
period_bytes_sent: number;
|
||||
period_bytes_received: number;
|
||||
period_requests: number;
|
||||
domains: Record<string, DomainAccess>;
|
||||
unique_ips: string[];
|
||||
}
|
||||
|
||||
+4
-2
@@ -13,7 +13,7 @@
|
||||
"moduleResolution": "bundler",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve",
|
||||
"jsx": "react-jsx",
|
||||
"incremental": true,
|
||||
"plugins": [
|
||||
{
|
||||
@@ -29,7 +29,9 @@
|
||||
"**/*.tsx",
|
||||
".next/types/**/*.ts",
|
||||
"next-env.d.ts",
|
||||
"dist/types/**/*.ts"
|
||||
"dist/types/**/*.ts",
|
||||
".next/dev/types/**/*.ts",
|
||||
"dist/dev/types/**/*.ts"
|
||||
],
|
||||
"exclude": ["node_modules", "nodecar", "src-tauri/target"]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user