mirror of
https://github.com/zhom/donutbrowser.git
synced 2026-05-04 09:35:11 +02:00
Compare commits
49 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| e3d487f846 | |||
| b4b7609534 | |||
| 8bf40fbc62 | |||
| 630cf74ab9 | |||
| b8d8039c80 | |||
| f1c4245c5a | |||
| 5cc816ecc5 | |||
| 7409cf7851 | |||
| d36d5430ca | |||
| 7518ee9e87 | |||
| ab8db06dfb | |||
| 0b43c6776b | |||
| 564c57fefc | |||
| d3cf91c5d3 | |||
| 729307be7b | |||
| c736eb9195 | |||
| 68d0741f38 | |||
| ae59ba802e | |||
| 73de070478 | |||
| 187d3414d8 | |||
| cc74589243 | |||
| 55974d17be | |||
| cbd0312618 | |||
| 41205ab31d | |||
| bfec778d19 | |||
| 0cb738c5ae | |||
| a82a73b3f4 | |||
| 49eca7271f | |||
| 487c72cbb7 | |||
| aec4a0c3af | |||
| c37675bce2 | |||
| ccdc411e7f | |||
| bec3fa142c | |||
| d725040b6e | |||
| 81c00538a9 | |||
| 1c5444928d | |||
| 85f8630389 | |||
| 57ead61139 | |||
| ef00c59063 | |||
| a61f42b645 | |||
| 3dd66069b5 | |||
| 14c7ded062 | |||
| d58b68fd50 | |||
| 3e69fea338 | |||
| fe2125beba | |||
| 23cfa84998 | |||
| 3e3ec29f58 | |||
| b1b91e94c0 | |||
| c624196dbb |
@@ -31,7 +31,7 @@ jobs:
|
||||
# build-mode: none
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -39,7 +39,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 #v2.8.1
|
||||
uses: swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 #v2.8.2
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
name: Contributors
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
@@ -19,7 +21,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
- name: Contribute List
|
||||
uses: akhilmhdh/contributors-readme-action@83ea0b4f1ac928fbfe88b9e8460a932a528eb79f #v2.3.11
|
||||
env:
|
||||
|
||||
@@ -13,7 +13,7 @@ jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -15,7 +15,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Get issue templates
|
||||
id: get-templates
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
|
||||
- name: Validate issue with AI
|
||||
id: validate
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
with:
|
||||
prompt-file: issue_analysis.txt
|
||||
system-prompt: |
|
||||
@@ -115,13 +115,14 @@ jobs:
|
||||
- name: Parse validation result and take action
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RESPONSE_FILE: ${{ steps.validate.outputs.response-file }}
|
||||
RESPONSE: ${{ steps.validate.outputs.response }}
|
||||
run: |
|
||||
# Prefer reading from the response file to avoid output truncation
|
||||
RESPONSE_FILE='${{ steps.validate.outputs.response-file }}'
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RAW_OUTPUT=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
RAW_OUTPUT='${{ steps.validate.outputs.response }}'
|
||||
RAW_OUTPUT="$RESPONSE"
|
||||
fi
|
||||
|
||||
# Extract JSON if wrapped in markdown code fences; otherwise use raw
|
||||
|
||||
@@ -34,7 +34,7 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -42,7 +42,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
@@ -41,7 +41,7 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -49,7 +49,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
|
||||
@@ -50,7 +50,7 @@ jobs:
|
||||
scan-scheduled:
|
||||
name: Scheduled Security Scan
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'schedule' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -63,7 +63,7 @@ jobs:
|
||||
scan-pr:
|
||||
name: PR Security Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -29,7 +29,7 @@ jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
name: Generate Release Notes
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
workflow_run:
|
||||
workflows: ["Release"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -11,19 +13,40 @@ permissions:
|
||||
jobs:
|
||||
generate-release-notes:
|
||||
runs-on: ubuntu-latest
|
||||
if: startsWith(github.event.release.tag_name, 'v') && !github.event.release.prerelease
|
||||
if: github.event.workflow_run.conclusion == 'success' && startsWith(github.event.workflow_run.head_branch, 'v')
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
with:
|
||||
fetch-depth: 0 # Fetch full history to compare with previous release
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get release info
|
||||
id: get-release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG_NAME: ${{ github.event.workflow_run.head_branch }}
|
||||
run: |
|
||||
echo "tag-name=$TAG_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
# Get release info by tag
|
||||
RELEASE_INFO=$(gh api /repos/${{ github.repository }}/releases/tags/$TAG_NAME)
|
||||
RELEASE_ID=$(echo "$RELEASE_INFO" | jq -r '.id')
|
||||
IS_PRERELEASE=$(echo "$RELEASE_INFO" | jq -r '.prerelease')
|
||||
|
||||
echo "release-id=$RELEASE_ID" >> $GITHUB_OUTPUT
|
||||
echo "is-prerelease=$IS_PRERELEASE" >> $GITHUB_OUTPUT
|
||||
|
||||
if [ "$IS_PRERELEASE" = "true" ]; then
|
||||
echo "Skipping release notes generation for prerelease"
|
||||
fi
|
||||
|
||||
- name: Get previous release tag
|
||||
id: get-previous-tag
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
env:
|
||||
CURRENT_TAG: ${{ steps.get-release.outputs.tag-name }}
|
||||
run: |
|
||||
# Get the previous release tag (excluding the current one)
|
||||
CURRENT_TAG="${{ github.event.release.tag_name }}"
|
||||
PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' | grep -v "$CURRENT_TAG" | head -n 1)
|
||||
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
@@ -38,16 +61,16 @@ jobs:
|
||||
|
||||
- name: Get commit messages between releases
|
||||
id: get-commits
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
env:
|
||||
PREVIOUS_TAG: ${{ steps.get-previous-tag.outputs.previous-tag }}
|
||||
CURRENT_TAG: ${{ steps.get-previous-tag.outputs.current-tag }}
|
||||
run: |
|
||||
# Get commit messages between previous and current release
|
||||
PREVIOUS_TAG="${{ steps.get-previous-tag.outputs.previous-tag }}"
|
||||
CURRENT_TAG="${{ steps.get-previous-tag.outputs.current-tag }}"
|
||||
|
||||
# Get commit log with detailed format
|
||||
COMMIT_LOG=$(git log --pretty=format:"- %s (%h by %an)" $PREVIOUS_TAG..$CURRENT_TAG --no-merges)
|
||||
COMMIT_LOG=$(git log --pretty=format:"- %s (%h by %an)" "$PREVIOUS_TAG".."$CURRENT_TAG" --no-merges)
|
||||
|
||||
# Get changed files summary
|
||||
CHANGED_FILES=$(git diff --name-status $PREVIOUS_TAG..$CURRENT_TAG | head -20)
|
||||
CHANGED_FILES=$(git diff --name-status "$PREVIOUS_TAG".."$CURRENT_TAG" | head -20)
|
||||
|
||||
# Save to files for AI processing
|
||||
echo "$COMMIT_LOG" > commits.txt
|
||||
@@ -58,7 +81,8 @@ jobs:
|
||||
|
||||
- name: Generate release notes with AI
|
||||
id: generate-notes
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
with:
|
||||
prompt-file: commits.txt
|
||||
system-prompt: |
|
||||
@@ -101,23 +125,27 @@ jobs:
|
||||
model: gpt-4o
|
||||
|
||||
- name: Update release with generated notes
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RESPONSE_FILE: ${{ steps.generate-notes.outputs.response-file }}
|
||||
RESPONSE_OUTPUT: ${{ steps.generate-notes.outputs.response }}
|
||||
RELEASE_ID: ${{ steps.get-release.outputs.release-id }}
|
||||
run: |
|
||||
# Prefer reading from the response file to avoid output truncation
|
||||
RESPONSE_FILE='${{ steps.generate-notes.outputs.response-file }}'
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RELEASE_NOTES=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
RELEASE_NOTES='${{ steps.generate-notes.outputs.response }}'
|
||||
RELEASE_NOTES="$RESPONSE_OUTPUT"
|
||||
fi
|
||||
|
||||
# Update the release with the generated notes
|
||||
gh api --method PATCH /repos/${{ github.repository }}/releases/${{ github.event.release.id }} \
|
||||
gh api --method PATCH /repos/${{ github.repository }}/releases/"$RELEASE_ID" \
|
||||
--field body="$RELEASE_NOTES"
|
||||
|
||||
echo "✅ Release notes updated successfully!"
|
||||
|
||||
- name: Cleanup
|
||||
if: always()
|
||||
run: |
|
||||
rm -f commits.txt changes.txt
|
||||
|
||||
@@ -13,7 +13,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
@@ -131,7 +131,7 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 #v2.8.1
|
||||
uses: swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 #v2.8.2
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ env:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@b77c075a1235514558f0eb88dbd31e22c45e0cd2" # v2.3.0
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@375a0e8ebdc98e99b02ac4338a724f5750f21213" # v2.3.1
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -104,7 +104,7 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
@@ -112,7 +112,7 @@ jobs:
|
||||
run_install: false
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@2028fbc5c25fe9cf00d9f06a71cc4710d4507903 #v6.0.0
|
||||
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f #v6.1.0
|
||||
with:
|
||||
node-version-file: .node-version
|
||||
cache: "pnpm"
|
||||
@@ -130,7 +130,7 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@f13886b937689c021905a6b90929199931d60db1 #v2.8.1
|
||||
uses: swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 #v2.8.2
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
|
||||
@@ -21,6 +21,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions Repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 #v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
- name: Spell Check Repo
|
||||
uses: crate-ci/typos@626c4bedb751ce0b7f03262ca97ddda9a076ae1c #v1.39.2
|
||||
uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06 #v1.40.0
|
||||
|
||||
@@ -12,7 +12,7 @@ jobs:
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: "This issue has been inactive for 60 days. Please respond to keep it open."
|
||||
|
||||
@@ -12,6 +12,16 @@ Do keep in mind before you start working on an issue / posting a PR:
|
||||
- Confirm if other contributors are working on the same issue
|
||||
- Check if the feature aligns with our roadmap and project goals
|
||||
|
||||
## Contributor License Agreement
|
||||
|
||||
By contributing to Donut Browser, you agree that your contributions will be licensed under the same terms as the project. You must agree to our [Contributor License Agreement](CONTRIBUTOR_LICENSE_AGREEMENT.md) before your contributions can be accepted. This agreement ensures that:
|
||||
|
||||
- Your contributions can be used in the open source version of Donut Browser (licensed under AGPL-3.0)
|
||||
- Donut Browser can offer commercial licenses for the software, including your contributions
|
||||
- You retain all rights to use your contributions for any other purpose
|
||||
|
||||
When you submit your first pull request, you acknowledge that you agree to the terms of the Contributor License Agreement.
|
||||
|
||||
## Tips & Things to Consider
|
||||
|
||||
- PRs with tests are highly appreciated
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
# Donut Browser Software Grant and Contributor License Agreement ("Agreement")
|
||||
|
||||
This agreement is based on the Apache Software Foundation Contributor License Agreement. (v r190612)
|
||||
|
||||
Thank you for your interest in the Donut Browser project ("Donut Browser" or "the Project"). In order to clarify the intellectual property license granted with Contributions from any person or entity, Donut Browser must have a Contributor License Agreement (CLA) on file that has been agreed to by each Contributor, indicating agreement to the license terms below. This license is for your protection as a Contributor as well as the protection of Donut Browser and its users; it does not change your rights to use your own Contributions for any other purpose. This Agreement allows an individual to contribute to Donut Browser on that individual's own behalf, or an entity (the "Corporation") to submit Contributions to Donut Browser, to authorize Contributions submitted by its designated employees to Donut Browser, and to grant copyright and patent licenses thereto.
|
||||
|
||||
You accept and agree to the following terms and conditions for Your present and future Contributions submitted to Donut Browser. Except for the license granted herein to Donut Browser and recipients of software distributed by Donut Browser, You reserve all right, title, and interest in and to Your Contributions.
|
||||
|
||||
1. Definitions. "You" (or "Your") shall mean the copyright owner or legal entity authorized by the copyright owner that is making this Agreement with Donut Browser. For legal entities, the entity making a Contribution and all other entities that control, are controlled by, or are under common control with that entity are considered to be a single Contributor. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "Contribution" shall mean any work, as well as any modifications or additions to an existing work, that is intentionally submitted by You to Donut Browser for inclusion in, or documentation of, any of the products owned or managed by Donut Browser (the "Work"). For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to Donut Browser or its representatives, including but not limited to communication on electronic mailing lists, source code control systems (such as GitHub), and issue tracking systems that are managed by, or on behalf of, Donut Browser for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by You as "Not a Contribution."
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this Agreement, You hereby grant to Donut Browser and to recipients of software distributed by Donut Browser a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your Contributions and such derivative works under any license terms, including but not limited to the GNU Affero General Public License version 3 (AGPL-3.0) and any commercial or proprietary license terms that Donut Browser may choose to offer. This grant includes the right for Donut Browser to offer the Work, including Your Contributions, under multiple licenses simultaneously (dual or multi-licensing), including both open source and commercial licenses.
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this Agreement, You hereby grant to Donut Browser and to recipients of software distributed by Donut Browser a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by You that are necessarily infringed by Your Contribution(s) alone or by combination of Your Contribution(s) with the Work to which such Contribution(s) were submitted. If any entity institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that your Contribution, or the Work to which you have contributed, constitutes direct or contributory patent infringement, then any patent licenses granted to that entity under this Agreement for that Contribution or Work shall terminate as of the date such litigation is filed.
|
||||
4. You represent that You are legally entitled to grant the above license. If You are an individual, and if Your employer(s) has rights to intellectual property that you create that includes Your Contributions, you represent that You have received permission to make Contributions on behalf of that employer, or that Your employer has waived such rights for your Contributions to Donut Browser. If You are a Corporation, any individual who makes a contribution from an account associated with You will be considered authorized to Contribute on Your behalf.
|
||||
5. You represent that each of Your Contributions is Your original creation (see section 7 for submissions on behalf of others).
|
||||
6. You are not expected to provide support for Your Contributions, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contributions on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
7. Should You wish to submit work that is not Your original creation, You may submit it to Donut Browser separately from any Contribution, identifying the complete details of its source and of any license or other restriction (including, but not limited to, related patents, trademarks, and license agreements) of which you are personally aware, and conspicuously marking the work as "Submitted on behalf of a third-party: [named here]".
|
||||
+4
-1
@@ -10,6 +10,7 @@
|
||||
"cssVariables": true,
|
||||
"prefix": ""
|
||||
},
|
||||
"iconLibrary": "react-icons",
|
||||
"aliases": {
|
||||
"components": "@/components",
|
||||
"utils": "@/lib/utils",
|
||||
@@ -17,5 +18,7 @@
|
||||
"lib": "@/lib",
|
||||
"hooks": "@/hooks"
|
||||
},
|
||||
"iconLibrary": "lucide"
|
||||
"registries": {
|
||||
"@animate-ui": "https://animate-ui.com/r/{name}.json"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,15 +21,15 @@
|
||||
"author": "",
|
||||
"license": "AGPL-3.0",
|
||||
"dependencies": {
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/node": "^25.0.3",
|
||||
"commander": "^14.0.2",
|
||||
"donutbrowser-camoufox-js": "^0.7.0",
|
||||
"dotenv": "^17.2.3",
|
||||
"fingerprint-generator": "^2.1.77",
|
||||
"fingerprint-generator": "^2.1.78",
|
||||
"get-port": "^7.1.0",
|
||||
"nodemon": "^3.1.11",
|
||||
"playwright-core": "^1.57.0",
|
||||
"proxy-chain": "^2.6.0",
|
||||
"proxy-chain": "^2.7.0",
|
||||
"tmp": "^0.2.5",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.9.3"
|
||||
|
||||
+13
-13
@@ -2,7 +2,7 @@
|
||||
"name": "donutbrowser",
|
||||
"private": true,
|
||||
"license": "AGPL-3.0",
|
||||
"version": "0.13.3",
|
||||
"version": "0.13.8",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "next dev --turbopack",
|
||||
@@ -53,31 +53,31 @@
|
||||
"cmdk": "^1.1.1",
|
||||
"color": "^5.0.3",
|
||||
"flag-icons": "^7.5.0",
|
||||
"lucide-react": "^0.555.0",
|
||||
"motion": "^12.23.24",
|
||||
"next": "^16.0.6",
|
||||
"lucide-react": "^0.562.0",
|
||||
"motion": "^12.23.26",
|
||||
"next": "^16.1.0",
|
||||
"next-themes": "^0.4.6",
|
||||
"radix-ui": "^1.4.3",
|
||||
"react": "^19.2.0",
|
||||
"react-dom": "^19.2.0",
|
||||
"react": "^19.2.3",
|
||||
"react-dom": "^19.2.3",
|
||||
"react-icons": "^5.5.0",
|
||||
"recharts": "3.5.1",
|
||||
"recharts": "3.6.0",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"tauri-plugin-macos-permissions-api": "^2.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.3.8",
|
||||
"@tailwindcss/postcss": "^4.1.17",
|
||||
"@tauri-apps/cli": "^2.9.5",
|
||||
"@biomejs/biome": "2.3.10",
|
||||
"@tailwindcss/postcss": "^4.1.18",
|
||||
"@tauri-apps/cli": "^2.9.6",
|
||||
"@types/color": "^4.2.0",
|
||||
"@types/node": "^24.10.1",
|
||||
"@types/node": "^25.0.3",
|
||||
"@types/react": "^19.2.7",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^5.1.1",
|
||||
"@vitejs/plugin-react": "^5.1.2",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^16.2.7",
|
||||
"tailwindcss": "^4.1.17",
|
||||
"tailwindcss": "^4.1.18",
|
||||
"ts-unused-exports": "^11.0.1",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "~5.9.3"
|
||||
|
||||
Generated
+881
-880
File diff suppressed because it is too large
Load Diff
Generated
+161
-220
@@ -460,9 +460,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||
|
||||
[[package]]
|
||||
name = "base64ct"
|
||||
version = "1.8.0"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
|
||||
checksum = "0e050f626429857a27ddccb31e0aca21356bfa709c04041aefddac081a8f068a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
@@ -509,22 +509,13 @@ dependencies = [
|
||||
"generic-array",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block2"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2c132eebf10f5cad5289222520a4a058514204aed6d791f1cf4fe8088b82d15f"
|
||||
dependencies = [
|
||||
"objc2 0.5.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block2"
|
||||
version = "0.6.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cdeb9d870516001442e364c5220d3574d2da8dc765554b4a617230d33fa58ef5"
|
||||
dependencies = [
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -586,9 +577,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "bumpalo"
|
||||
version = "3.19.0"
|
||||
version = "3.19.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
|
||||
checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510"
|
||||
|
||||
[[package]]
|
||||
name = "byte-unit"
|
||||
@@ -693,9 +684,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "camino"
|
||||
version = "1.2.1"
|
||||
version = "1.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "276a59bf2b2c967788139340c9f0c5b12d7fd6630315c15c217e559de85d2609"
|
||||
checksum = "e629a66d692cb9ff1a1c664e41771b3dcaf961985a9774c0eb0bd1b51cf60a48"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
]
|
||||
@@ -735,9 +726,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.2.48"
|
||||
version = "1.2.50"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c481bdbf0ed3b892f6f806287d72acd515b352a4ec27a208489b8c1bc839633a"
|
||||
checksum = "9f50d563227a1c37cc0a263f64eca3334388c01c5e4c4861a9def205c614383c"
|
||||
dependencies = [
|
||||
"find-msvc-tools",
|
||||
"jobserver",
|
||||
@@ -987,9 +978,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "crc"
|
||||
version = "3.4.0"
|
||||
version = "3.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5eb8a2a1cd12ab0d987a5d5e825195d372001a4094a0376319d5a0ad71c1ba0d"
|
||||
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675"
|
||||
dependencies = [
|
||||
"crc-catalog",
|
||||
]
|
||||
@@ -1234,9 +1225,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"block2 0.6.2",
|
||||
"block2",
|
||||
"libc",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1261,9 +1252,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dlopen2"
|
||||
version = "0.8.0"
|
||||
version = "0.8.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b54f373ccf864bf587a89e880fb7610f8d73f3045f13580948ccbcaff26febff"
|
||||
checksum = "5e2c5bd4158e66d1e215c49b837e11d62f3267b30c92f1d171c4d3105e3dc4d4"
|
||||
dependencies = [
|
||||
"dlopen2_derive",
|
||||
"libc",
|
||||
@@ -1273,9 +1264,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dlopen2_derive"
|
||||
version = "0.4.1"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "788160fb30de9cdd857af31c6a2675904b16ece8fc2737b2c7127ba368c9d0f4"
|
||||
checksum = "0fbbb781877580993a8707ec48672673ec7b81eeba04cfd2310bd28c08e47c8f"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -1293,7 +1284,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "donutbrowser"
|
||||
version = "0.13.3"
|
||||
version = "0.13.8"
|
||||
dependencies = [
|
||||
"aes-gcm",
|
||||
"argon2",
|
||||
@@ -1317,7 +1308,7 @@ dependencies = [
|
||||
"log",
|
||||
"lzma-rs",
|
||||
"msi-extract",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"rand 0.9.2",
|
||||
"reqwest",
|
||||
@@ -1574,13 +1565,13 @@ checksum = "3a3076410a55c90011c298b04d0cfa770b00fa04e1e3c97d3f6c9de105a03844"
|
||||
|
||||
[[package]]
|
||||
name = "flate2"
|
||||
version = "1.1.5"
|
||||
version = "1.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bfe33edd8e85a12a67454e37f8c75e730830d83e313556ab9ebf9ee7fbeb3bfb"
|
||||
checksum = "a2152dbcb980c05735e2a651d96011320a949eb31a0c8b38b72645ce97dec676"
|
||||
dependencies = [
|
||||
"crc32fast",
|
||||
"libz-rs-sys",
|
||||
"miniz_oxide",
|
||||
"zlib-rs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2261,9 +2252,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hyper-util"
|
||||
version = "0.1.18"
|
||||
version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "52e9a2a24dc5c6821e71a7030e1e14b7b632acac55c40e9d2e082c621261bb56"
|
||||
checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
@@ -2280,6 +2271,7 @@ dependencies = [
|
||||
"socket2",
|
||||
"system-configuration",
|
||||
"tokio",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
"windows-registry",
|
||||
@@ -2367,9 +2359,9 @@ checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a"
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties"
|
||||
version = "2.1.1"
|
||||
version = "2.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e93fcd3157766c0c8da2f8cff6ce651a31f0810eaa1c51ec363ef790bbb5fb99"
|
||||
checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec"
|
||||
dependencies = [
|
||||
"icu_collections",
|
||||
"icu_locale_core",
|
||||
@@ -2381,9 +2373,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "icu_properties_data"
|
||||
version = "2.1.1"
|
||||
version = "2.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "02845b3647bb045f1100ecd6480ff52f34c35f82d9880e029d329c21d1054899"
|
||||
checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af"
|
||||
|
||||
[[package]]
|
||||
name = "icu_provider"
|
||||
@@ -2687,9 +2679,9 @@ checksum = "2c4a545a15244c7d945065b5d392b2d2d7f21526fba56ce51467b06ed445e8f7"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.177"
|
||||
version = "0.2.178"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
|
||||
checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
@@ -2713,22 +2705,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
version = "0.1.10"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb"
|
||||
checksum = "df15f6eac291ed1cf25865b1ee60399f57e7c227e7f51bdbd4c5270396a9ed50"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libz-rs-sys"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "840db8cf39d9ec4dd794376f38acc40d0fc65eec2a8f484f7fd375b84602becd"
|
||||
dependencies = [
|
||||
"zlib-rs",
|
||||
"redox_syscall 0.6.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2754,9 +2737,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.28"
|
||||
version = "0.4.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432"
|
||||
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
|
||||
dependencies = [
|
||||
"value-bag",
|
||||
]
|
||||
@@ -2773,9 +2756,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "lzma-rust2"
|
||||
version = "0.13.0"
|
||||
version = "0.15.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c60a23ffb90d527e23192f1246b14746e2f7f071cb84476dd879071696c18a4a"
|
||||
checksum = "48172246aa7c3ea28e423295dd1ca2589a24617cc4e588bb8cfe177cb2c54d95"
|
||||
dependencies = [
|
||||
"crc",
|
||||
"sha2",
|
||||
@@ -2883,9 +2866,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "1.1.0"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "69d83b0086dc8ecf3ce9ae2874b2d1290252e2a30720bea58a5c6639b0092873"
|
||||
checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"wasi 0.11.1+wasi-snapshot-preview1",
|
||||
@@ -2926,10 +2909,10 @@ dependencies = [
|
||||
"dpi",
|
||||
"gtk",
|
||||
"keyboard-types",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"objc2-core-foundation",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"once_cell",
|
||||
"png",
|
||||
"serde",
|
||||
@@ -3011,9 +2994,9 @@ checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb"
|
||||
|
||||
[[package]]
|
||||
name = "ntapi"
|
||||
version = "0.4.1"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4"
|
||||
checksum = "c70f219e21142367c70c0b30c6a9e3a14d55b4d12a204d897fbec83a0363f081"
|
||||
dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
@@ -3074,22 +3057,6 @@ dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc-sys"
|
||||
version = "0.3.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cdb91bdd390c7ce1a8607f35f3ca7151b65afc0ff5ff3b34fa350f7d7c7e4310"
|
||||
|
||||
[[package]]
|
||||
name = "objc2"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "46a785d4eeff09c14c487497c162e92766fbb3e4059a71840cecc03d9a50b804"
|
||||
dependencies = [
|
||||
"objc-sys",
|
||||
"objc2-encode",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2"
|
||||
version = "0.6.3"
|
||||
@@ -3107,9 +3074,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d49e936b501e5c5bf01fda3a9452ff86dc3ea98ad5f283e1455153142d97518c"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"block2 0.6.2",
|
||||
"block2",
|
||||
"libc",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-cloud-kit",
|
||||
"objc2-core-data",
|
||||
"objc2-core-foundation",
|
||||
@@ -3117,8 +3084,8 @@ dependencies = [
|
||||
"objc2-core-image",
|
||||
"objc2-core-text",
|
||||
"objc2-core-video",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-quartz-core 0.3.2",
|
||||
"objc2-foundation",
|
||||
"objc2-quartz-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3128,8 +3095,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73ad74d880bb43877038da939b7427bba67e9dd42004a18b809ba7d87cee241c"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"objc2 0.6.3",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2",
|
||||
"objc2-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3139,8 +3106,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b402a653efbb5e82ce4df10683b6b28027616a2715e90009947d50b8dd298fa"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"objc2 0.6.3",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2",
|
||||
"objc2-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3151,7 +3118,7 @@ checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"dispatch2",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3162,7 +3129,7 @@ checksum = "e022c9d066895efa1345f8e33e584b9f958da2fd4cd116792e15e07e4720a807"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"dispatch2",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
"objc2-io-surface",
|
||||
]
|
||||
@@ -3173,8 +3140,8 @@ version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e5d563b38d2b97209f8e861173de434bd0214cf020e3423a52624cd1d989f006"
|
||||
dependencies = [
|
||||
"objc2 0.6.3",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2",
|
||||
"objc2-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3184,7 +3151,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0cde0dfb48d25d2b4862161a4d5fcc0e3c24367869ad306b0c9ec0073bfed92d"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
"objc2-core-graphics",
|
||||
]
|
||||
@@ -3196,7 +3163,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d425caf1df73233f29fd8a5c3e5edbc30d2d4307870f802d18f00d83dc5141a6"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
"objc2-core-graphics",
|
||||
"objc2-io-surface",
|
||||
@@ -3217,18 +3184,6 @@ dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-foundation"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ee638a5da3799329310ad4cfa62fbf045d5f56e3ef5ba4149e7452dcf89d5a8"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"block2 0.5.1",
|
||||
"libc",
|
||||
"objc2 0.5.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-foundation"
|
||||
version = "0.3.2"
|
||||
@@ -3236,9 +3191,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e3e0adef53c21f888deb4fa59fc59f7eb17404926ee8a6f59f5df0fd7f9f3272"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"block2 0.6.2",
|
||||
"block2",
|
||||
"libc",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
@@ -3259,7 +3214,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "180788110936d59bab6bd83b6060ffdfffb3b922ba1396b312ae795e1de9d81d"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
@@ -3269,35 +3224,10 @@ version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2a1e6550c4caed348956ce3370c9ffeca70bb1dbed4fa96112e7c6170e074586"
|
||||
dependencies = [
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-metal"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd0cba1276f6023976a406a14ffa85e1fdd19df6b0f737b063b95f6c8c7aadd6"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"block2 0.5.1",
|
||||
"objc2 0.5.2",
|
||||
"objc2-foundation 0.2.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-quartz-core"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e42bee7bff906b14b167da2bac5efe6b6a07e6f7c0a21a7308d40c960242dc7a"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"block2 0.5.1",
|
||||
"objc2 0.5.2",
|
||||
"objc2-foundation 0.2.2",
|
||||
"objc2-metal",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-quartz-core"
|
||||
version = "0.3.2"
|
||||
@@ -3305,8 +3235,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96c1358452b371bf9f104e21ec536d37a650eb10f7ee379fff67d2e08d537f1f"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"objc2 0.6.3",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
"objc2-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3316,7 +3247,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "709fe137109bd1e8b5a99390f77a7d8b2961dafc1a1c5db8f2e60329ad6d895a"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
@@ -3327,9 +3258,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d87d638e33c06f577498cbcc50491496a3ed4246998a7fbba7ccb98b1e7eab22"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3339,11 +3270,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b2e5aaab980c433cf470df9d7af96a7b46a9d892d521a2cbbb2f8a4c16751e7f"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"block2 0.6.2",
|
||||
"objc2 0.6.3",
|
||||
"block2",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"objc2-core-foundation",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"objc2-javascript-core",
|
||||
"objc2-security",
|
||||
]
|
||||
@@ -3507,7 +3438,7 @@ checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"redox_syscall 0.5.18",
|
||||
"smallvec",
|
||||
"windows-link 0.2.1",
|
||||
]
|
||||
@@ -3768,9 +3699,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.11.1"
|
||||
version = "1.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
|
||||
checksum = "f59e70c4aef1e55797c2e8fd94a4f2a973fc972cfde0e0b05f683667b0cd39dd"
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic-util"
|
||||
@@ -4069,6 +4000,15 @@ dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec96166dafa0886eb81fe1c0a388bece180fbef2135f97c1e2cf8302e74b43b5"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.5.2"
|
||||
@@ -4140,9 +4080,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "reqwest"
|
||||
version = "0.12.24"
|
||||
version = "0.12.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f"
|
||||
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
@@ -4188,17 +4128,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ef2bee61e6cffa4635c72d7d81a84294e28f0930db0ddcb0f66d10244674ebed"
|
||||
dependencies = [
|
||||
"ashpd",
|
||||
"block2 0.6.2",
|
||||
"block2",
|
||||
"dispatch2",
|
||||
"glib-sys",
|
||||
"gobject-sys",
|
||||
"gtk-sys",
|
||||
"js-sys",
|
||||
"log",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"objc2-core-foundation",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"raw-window-handle",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
@@ -4312,9 +4252,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rustls-pki-types"
|
||||
version = "1.13.1"
|
||||
version = "1.13.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "708c0f9d5f54ba0272468c1d306a52c495b31fa155e91bc25371e6df7996908c"
|
||||
checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282"
|
||||
dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
@@ -4772,9 +4712,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "simd-adler32"
|
||||
version = "0.3.7"
|
||||
version = "0.3.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe"
|
||||
checksum = "e320a6c5ad31d271ad523dcf3ad13e2767ad8b1cb8f047f75a8aeaf8da139da2"
|
||||
|
||||
[[package]]
|
||||
name = "simdutf8"
|
||||
@@ -4818,24 +4758,24 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "softbuffer"
|
||||
version = "0.4.6"
|
||||
version = "0.4.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "18051cdd562e792cad055119e0cdb2cfc137e44e3987532e0f9659a77931bb08"
|
||||
checksum = "aac18da81ebbf05109ab275b157c22a653bb3c12cf884450179942f81bcbf6c3"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"cfg_aliases",
|
||||
"core-graphics",
|
||||
"foreign-types 0.5.0",
|
||||
"js-sys",
|
||||
"log",
|
||||
"objc2 0.5.2",
|
||||
"objc2-foundation 0.2.2",
|
||||
"objc2-quartz-core 0.2.2",
|
||||
"ndk",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
"objc2-core-graphics",
|
||||
"objc2-foundation",
|
||||
"objc2-quartz-core",
|
||||
"raw-window-handle",
|
||||
"redox_syscall",
|
||||
"redox_syscall 0.5.18",
|
||||
"tracing",
|
||||
"wasm-bindgen",
|
||||
"web-sys",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5021,7 +4961,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f3a753bdc39c07b192151523a3f77cd0394aa75413802c883a0f6f6a0e5ee2e7"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"block2 0.6.2",
|
||||
"block2",
|
||||
"core-foundation 0.10.1",
|
||||
"core-graphics",
|
||||
"crossbeam-channel",
|
||||
@@ -5038,9 +4978,9 @@ dependencies = [
|
||||
"ndk",
|
||||
"ndk-context",
|
||||
"ndk-sys",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"raw-window-handle",
|
||||
@@ -5090,9 +5030,9 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1"
|
||||
|
||||
[[package]]
|
||||
name = "tauri"
|
||||
version = "2.9.3"
|
||||
version = "2.9.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9e492485dd390b35f7497401f67694f46161a2a00ffd800938d5dd3c898fb9d8"
|
||||
checksum = "8a3868da5508446a7cd08956d523ac3edf0a8bc20bf7e4038f9a95c2800d2033"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@@ -5110,9 +5050,9 @@ dependencies = [
|
||||
"log",
|
||||
"mime",
|
||||
"muda",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"objc2-ui-kit",
|
||||
"objc2-web-kit",
|
||||
"percent-encoding",
|
||||
@@ -5141,9 +5081,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-build"
|
||||
version = "2.5.2"
|
||||
version = "2.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "87d6f8cafe6a75514ce5333f115b7b1866e8e68d9672bf4ca89fc0f35697ea9d"
|
||||
checksum = "17fcb8819fd16463512a12f531d44826ce566f486d7ccd211c9c8cebdaec4e08"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cargo_toml",
|
||||
@@ -5163,9 +5103,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-codegen"
|
||||
version = "2.5.1"
|
||||
version = "2.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b7ef707148f0755110ca54377560ab891d722de4d53297595380a748026f139f"
|
||||
checksum = "9fa9844cefcf99554a16e0a278156ae73b0d8680bbc0e2ad1e4287aadd8489cf"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"brotli",
|
||||
@@ -5190,9 +5130,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-macros"
|
||||
version = "2.5.1"
|
||||
version = "2.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71664fd715ee6e382c05345ad258d6d1d50f90cf1b58c0aa726638b33c2a075d"
|
||||
checksum = "3764a12f886d8245e66b7ee9b43ccc47883399be2019a61d80cf0f4117446fde"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
@@ -5204,9 +5144,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-plugin"
|
||||
version = "2.5.1"
|
||||
version = "2.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "076c78a474a7247c90cad0b6e87e593c4c620ed4efdb79cbe0214f0021f6c39d"
|
||||
checksum = "0e1d0a4860b7ff570c891e1d2a586bf1ede205ff858fbc305e0b5ae5d14c1377"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"glob",
|
||||
@@ -5290,8 +5230,8 @@ dependencies = [
|
||||
"byte-unit",
|
||||
"fern",
|
||||
"log",
|
||||
"objc2 0.6.3",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2",
|
||||
"objc2-foundation",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
@@ -5309,8 +5249,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5607e0707d37d7b20e287cf0ce396d1efebe7b833b8e9cbd2ea4257091d9c604"
|
||||
dependencies = [
|
||||
"macos-accessibility-client",
|
||||
"objc2 0.6.3",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2",
|
||||
"objc2-foundation",
|
||||
"serde",
|
||||
"tauri",
|
||||
"tauri-plugin",
|
||||
@@ -5326,7 +5266,7 @@ dependencies = [
|
||||
"dunce",
|
||||
"glob",
|
||||
"objc2-app-kit",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"open",
|
||||
"schemars 0.8.22",
|
||||
"serde",
|
||||
@@ -5378,16 +5318,16 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-runtime"
|
||||
version = "2.9.1"
|
||||
version = "2.9.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9368f09358496f2229313fccb37682ad116b7f46fa76981efe116994a0628926"
|
||||
checksum = "87f766fe9f3d1efc4b59b17e7a891ad5ed195fa8d23582abb02e6c9a01137892"
|
||||
dependencies = [
|
||||
"cookie",
|
||||
"dpi",
|
||||
"gtk",
|
||||
"http",
|
||||
"jni",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-ui-kit",
|
||||
"objc2-web-kit",
|
||||
"raw-window-handle",
|
||||
@@ -5403,17 +5343,17 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-runtime-wry"
|
||||
version = "2.9.1"
|
||||
version = "2.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "929f5df216f5c02a9e894554401bcdab6eec3e39ec6a4a7731c7067fc8688a93"
|
||||
checksum = "187a3f26f681bdf028f796ccf57cf478c1ee422c50128e5a0a6ebeb3f5910065"
|
||||
dependencies = [
|
||||
"gtk",
|
||||
"http",
|
||||
"jni",
|
||||
"log",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"once_cell",
|
||||
"percent-encoding",
|
||||
"raw-window-handle",
|
||||
@@ -5430,9 +5370,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tauri-utils"
|
||||
version = "2.8.0"
|
||||
version = "2.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f6b8bbe426abdbf52d050e52ed693130dbd68375b9ad82a3fb17efb4c8d85673"
|
||||
checksum = "76a423c51176eb3616ee9b516a9fa67fed5f0e78baaba680e44eb5dd2cc37490"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"brotli",
|
||||
@@ -5753,18 +5693,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "toml_parser"
|
||||
version = "1.0.4"
|
||||
version = "1.0.6+spec-1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e"
|
||||
checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44"
|
||||
dependencies = [
|
||||
"winnow 0.7.13",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "toml_writer"
|
||||
version = "1.0.4"
|
||||
version = "1.0.6+spec-1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "df8b2b54733674ad286d16267dcfc7a71ed5c776e4ac7aa3c3e2561f7c637bf2"
|
||||
checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607"
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
@@ -5784,9 +5724,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tower-http"
|
||||
version = "0.6.7"
|
||||
version = "0.6.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9cf146f99d442e8e68e585f5d798ccd3cad9a7835b917e09728880a862706456"
|
||||
checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"bytes",
|
||||
@@ -5824,9 +5764,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
|
||||
|
||||
[[package]]
|
||||
name = "tracing"
|
||||
version = "0.1.43"
|
||||
version = "0.1.44"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2d15d90a0b5c19378952d479dc858407149d7bb45a14de0142f6c534b16fc647"
|
||||
checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
@@ -5847,9 +5787,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "tracing-core"
|
||||
version = "0.1.35"
|
||||
version = "0.1.36"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7a04e24fab5c89c6a36eb8558c9656f30d81de51dfa4d3b45f26b21d61fa0a6c"
|
||||
checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
]
|
||||
@@ -5864,11 +5804,11 @@ dependencies = [
|
||||
"dirs",
|
||||
"libappindicator",
|
||||
"muda",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"objc2-core-foundation",
|
||||
"objc2-core-graphics",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"once_cell",
|
||||
"png",
|
||||
"serde",
|
||||
@@ -6073,13 +6013,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.18.1"
|
||||
version = "1.19.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2"
|
||||
checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a"
|
||||
dependencies = [
|
||||
"getrandom 0.3.4",
|
||||
"js-sys",
|
||||
"serde",
|
||||
"serde_core",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
@@ -6425,10 +6365,10 @@ version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9bec5a31f3f9362f2258fd0e9c9dd61a9ca432e7306cc78c444258f0dce9a9c"
|
||||
dependencies = [
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"objc2-core-foundation",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"raw-window-handle",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-version",
|
||||
@@ -6954,7 +6894,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "728b7d4c8ec8d81cab295e0b5b8a4c263c0d41a785fb8f8c4df284e5411140a2"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"block2 0.6.2",
|
||||
"block2",
|
||||
"cookie",
|
||||
"crossbeam-channel",
|
||||
"dirs",
|
||||
@@ -6969,10 +6909,10 @@ dependencies = [
|
||||
"kuchikiki",
|
||||
"libc",
|
||||
"ndk",
|
||||
"objc2 0.6.3",
|
||||
"objc2",
|
||||
"objc2-app-kit",
|
||||
"objc2-core-foundation",
|
||||
"objc2-foundation 0.3.2",
|
||||
"objc2-foundation",
|
||||
"objc2-ui-kit",
|
||||
"objc2-web-kit",
|
||||
"once_cell",
|
||||
@@ -7119,18 +7059,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy"
|
||||
version = "0.8.30"
|
||||
version = "0.8.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ea879c944afe8a2b25fef16bb4ba234f47c694565e97383b36f3a878219065c"
|
||||
checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3"
|
||||
dependencies = [
|
||||
"zerocopy-derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zerocopy-derive"
|
||||
version = "0.8.30"
|
||||
version = "0.8.31"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf955aa904d6040f70dc8e9384444cb1030aed272ba3cb09bbc4ab9e7c1f34f5"
|
||||
checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -7213,9 +7153,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "6.0.0"
|
||||
version = "7.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eb2a05c7c36fde6c09b08576c9f7fb4cda705990f73b58fe011abf7dfb24168b"
|
||||
checksum = "bdd8a47718a4ee5fe78e07667cd36f3de80e7c2bfe727c7074245ffc7303c037"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"arbitrary",
|
||||
@@ -7224,6 +7164,7 @@ dependencies = [
|
||||
"crc32fast",
|
||||
"deflate64",
|
||||
"flate2",
|
||||
"generic-array",
|
||||
"getrandom 0.3.4",
|
||||
"hmac",
|
||||
"indexmap 2.12.0",
|
||||
@@ -7240,9 +7181,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zlib-rs"
|
||||
version = "0.5.2"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f06ae92f42f5e5c42443fd094f245eb656abf56dd7cce9b8b263236565e00f2"
|
||||
checksum = "51f936044d677be1a1168fae1d03b583a285a5dd9d8cbf7b24c23aa1fc775235"
|
||||
|
||||
[[package]]
|
||||
name = "zopfli"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "donutbrowser"
|
||||
version = "0.13.3"
|
||||
version = "0.13.8"
|
||||
description = "Simple Yet Powerful Anti-Detect Browser"
|
||||
authors = ["zhom@github"]
|
||||
edition = "2021"
|
||||
@@ -50,14 +50,14 @@ base64 = "0.22"
|
||||
libc = "0.2"
|
||||
async-trait = "0.1"
|
||||
futures-util = "0.3"
|
||||
zip = "6"
|
||||
zip = "7"
|
||||
tar = "0"
|
||||
bzip2 = "0"
|
||||
flate2 = "1"
|
||||
lzma-rs = "0"
|
||||
msi-extract = "0"
|
||||
|
||||
uuid = { version = "1.18", features = ["v4", "serde"] }
|
||||
uuid = { version = "1.19", features = ["v4", "serde"] }
|
||||
url = "2.5"
|
||||
urlencoding = "2.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
@@ -91,6 +91,7 @@ windows = { version = "0.62", features = [
|
||||
"Win32_System_Threading",
|
||||
"Win32_System_Diagnostics_Debug",
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_System_IO",
|
||||
"Win32_Security",
|
||||
"Win32_Storage_FileSystem",
|
||||
"Win32_System_Registry",
|
||||
|
||||
@@ -42,6 +42,10 @@ fn main() {
|
||||
println!("cargo:rerun-if-changed=src/proxy_runner.rs");
|
||||
println!("cargo:rerun-if-changed=src/proxy_storage.rs");
|
||||
|
||||
// Tell Cargo to rebuild when binaries directory contents change
|
||||
// This ensures tauri_build is re-run after sidecar binaries are copied
|
||||
println!("cargo:rerun-if-changed=binaries");
|
||||
|
||||
// Only run tauri_build if all external binaries exist
|
||||
// This allows building donut-proxy sidecar without the other binaries present
|
||||
if external_binaries_exist() {
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"identifier": "default",
|
||||
"description": "enables the default permissions",
|
||||
"windows": ["main"],
|
||||
"webviews": ["main"],
|
||||
"permissions": [
|
||||
"core:default",
|
||||
"core:event:allow-listen",
|
||||
|
||||
+36
-19
@@ -836,11 +836,11 @@ impl ApiClient {
|
||||
};
|
||||
|
||||
// Look for assets matching the pattern: camoufox-{version}-{release}-{os}.{arch}.zip
|
||||
// Use ends_with for precise matching to avoid false positives
|
||||
let pattern = format!(".{os_name}.{arch_name}.zip");
|
||||
assets.iter().any(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.starts_with("camoufox-")
|
||||
&& name.contains(&format!("-{os_name}.{arch_name}.zip"))
|
||||
&& name.ends_with(".zip")
|
||||
name.starts_with("camoufox-") && name.ends_with(&pattern)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -900,13 +900,20 @@ impl ApiClient {
|
||||
pub async fn fetch_chromium_latest_version(
|
||||
&self,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Use architecture-aware URL for Chromium
|
||||
let arch = if cfg!(target_arch = "aarch64") {
|
||||
"Mac_Arm"
|
||||
} else {
|
||||
"Mac"
|
||||
// Use platform-aware URL for Chromium to match download URL generation
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
let platform_str = match (&os[..], &arch[..]) {
|
||||
("windows", "x64") => "Win_x64",
|
||||
("windows", "arm64") => "Win_Arm64",
|
||||
("linux", "x64") => "Linux_x64",
|
||||
("linux", "arm64") => return Err("Chromium doesn't support ARM64 on Linux".into()),
|
||||
("macos", "x64") => "Mac",
|
||||
("macos", "arm64") => "Mac_Arm",
|
||||
_ => {
|
||||
return Err(format!("Unsupported platform/architecture for Chromium: {os}/{arch}").into())
|
||||
}
|
||||
};
|
||||
let url = format!("{}/{arch}/LAST_CHANGE", self.chromium_api_base);
|
||||
let url = format!("{}/{platform_str}/LAST_CHANGE", self.chromium_api_base);
|
||||
let version = self
|
||||
.client
|
||||
.get(&url)
|
||||
@@ -1480,14 +1487,19 @@ mod tests {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
let arch = if cfg!(target_arch = "aarch64") {
|
||||
"Mac_Arm"
|
||||
} else {
|
||||
"Mac"
|
||||
let (os, arch) = ApiClient::get_platform_info();
|
||||
let platform_str = match (&os[..], &arch[..]) {
|
||||
("windows", "x64") => "Win_x64",
|
||||
("windows", "arm64") => "Win_Arm64",
|
||||
("linux", "x64") => "Linux_x64",
|
||||
("linux", "arm64") => return,
|
||||
("macos", "x64") => "Mac",
|
||||
("macos", "arm64") => "Mac_Arm",
|
||||
_ => return,
|
||||
};
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/{arch}/LAST_CHANGE")))
|
||||
.and(path(format!("/{platform_str}/LAST_CHANGE")))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string("1465660")
|
||||
@@ -1508,14 +1520,19 @@ mod tests {
|
||||
let server = setup_mock_server().await;
|
||||
let client = create_test_client(&server);
|
||||
|
||||
let arch = if cfg!(target_arch = "aarch64") {
|
||||
"Mac_Arm"
|
||||
} else {
|
||||
"Mac"
|
||||
let (os, arch) = ApiClient::get_platform_info();
|
||||
let platform_str = match (&os[..], &arch[..]) {
|
||||
("windows", "x64") => "Win_x64",
|
||||
("windows", "arm64") => "Win_Arm64",
|
||||
("linux", "x64") => "Linux_x64",
|
||||
("linux", "arm64") => return,
|
||||
("macos", "x64") => "Mac",
|
||||
("macos", "arm64") => "Mac_Arm",
|
||||
_ => return,
|
||||
};
|
||||
|
||||
Mock::given(method("GET"))
|
||||
.and(path(format!("/{arch}/LAST_CHANGE")))
|
||||
.and(path(format!("/{platform_str}/LAST_CHANGE")))
|
||||
.respond_with(
|
||||
ResponseTemplate::new(200)
|
||||
.set_body_string("1465660")
|
||||
|
||||
@@ -784,6 +784,20 @@ impl AppAutoUpdater {
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let file_path = dest_dir.join(filename);
|
||||
|
||||
// First, try to get the file size via HEAD request
|
||||
// This is more reliable than GET content-length for some CDN configurations
|
||||
// especially when dealing with redirects (like GitHub releases)
|
||||
let head_size = self
|
||||
.client
|
||||
.head(download_url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await
|
||||
.ok()
|
||||
.and_then(|r| r.content_length());
|
||||
|
||||
log::info!("HEAD request for download size: {:?} bytes", head_size);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(download_url)
|
||||
@@ -795,7 +809,9 @@ impl AppAutoUpdater {
|
||||
return Err(format!("Download failed with status: {}", response.status()).into());
|
||||
}
|
||||
|
||||
let total_size = response.content_length().unwrap_or(0);
|
||||
// Use HEAD size if available, otherwise fall back to GET content-length
|
||||
let total_size = head_size.or(response.content_length()).unwrap_or(0);
|
||||
log::info!("Final download size: {} bytes", total_size);
|
||||
let mut file = fs::File::create(&file_path)?;
|
||||
let mut stream = response.bytes_stream();
|
||||
let mut downloaded = 0u64;
|
||||
|
||||
+105
-4
@@ -79,10 +79,10 @@ mod macos {
|
||||
executable_dir.push("Contents");
|
||||
executable_dir.push("MacOS");
|
||||
|
||||
// Find the first executable in the MacOS directory
|
||||
let executable_path = std::fs::read_dir(&executable_dir)?
|
||||
// Find executables matching the browser name pattern
|
||||
let candidates: Vec<_> = std::fs::read_dir(&executable_dir)?
|
||||
.filter_map(Result::ok)
|
||||
.find(|entry| {
|
||||
.filter(|entry| {
|
||||
let binding = entry.file_name();
|
||||
let name = binding.to_string_lossy();
|
||||
name.starts_with("firefox")
|
||||
@@ -91,7 +91,108 @@ mod macos {
|
||||
|| name.contains("Browser")
|
||||
})
|
||||
.map(|entry| entry.path())
|
||||
.ok_or("No executable found in MacOS directory")?;
|
||||
.collect();
|
||||
|
||||
if candidates.is_empty() {
|
||||
return Err("No executable found in MacOS directory".into());
|
||||
}
|
||||
|
||||
// For Camoufox, validate architecture compatibility
|
||||
let executable_path = if candidates.iter().any(|p| {
|
||||
p.file_name()
|
||||
.and_then(|n| n.to_str())
|
||||
.map(|n| n.starts_with("camoufox"))
|
||||
.unwrap_or(false)
|
||||
}) {
|
||||
// Find the executable that matches the current architecture
|
||||
let current_arch = if cfg!(target_arch = "x86_64") {
|
||||
"x86_64"
|
||||
} else if cfg!(target_arch = "aarch64") {
|
||||
"arm64"
|
||||
} else {
|
||||
return Err("Unsupported architecture".into());
|
||||
};
|
||||
|
||||
// Try to find an executable that matches the current architecture
|
||||
// Use file command to check architecture
|
||||
let mut found_executable = None;
|
||||
let mut file_command_available = true;
|
||||
|
||||
for candidate in &candidates {
|
||||
match std::process::Command::new("file").arg(candidate).output() {
|
||||
Ok(output) => {
|
||||
if output.status.success() {
|
||||
if let Ok(output_str) = String::from_utf8(output.stdout) {
|
||||
let is_compatible = if current_arch == "x86_64" {
|
||||
output_str.contains("x86_64") || output_str.contains("i386")
|
||||
} else {
|
||||
output_str.contains("arm64") || output_str.contains("aarch64")
|
||||
};
|
||||
|
||||
if is_compatible {
|
||||
found_executable = Some(candidate.clone());
|
||||
log::info!(
|
||||
"Found compatible Camoufox executable for {}: {}",
|
||||
current_arch,
|
||||
candidate.display()
|
||||
);
|
||||
break;
|
||||
} else {
|
||||
log::warn!(
|
||||
"Skipping incompatible Camoufox executable: {} (architecture: {})",
|
||||
candidate.display(),
|
||||
output_str.trim()
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::warn!(
|
||||
"Failed to check architecture for {}: file command returned non-zero exit code",
|
||||
candidate.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!(
|
||||
"Failed to check architecture for {} using file command: {}",
|
||||
candidate.display(),
|
||||
e
|
||||
);
|
||||
file_command_available = false;
|
||||
// Continue checking other candidates
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If no compatible executable found but we have candidates, use the first one
|
||||
// (fallback for cases where file command isn't available or failed)
|
||||
if found_executable.is_none() && !candidates.is_empty() {
|
||||
if !file_command_available {
|
||||
log::warn!(
|
||||
"file command not available, using first candidate: {}",
|
||||
candidates[0].display()
|
||||
);
|
||||
} else {
|
||||
log::warn!(
|
||||
"No compatible executable found for architecture {}, using first candidate: {}",
|
||||
current_arch,
|
||||
candidates[0].display()
|
||||
);
|
||||
}
|
||||
found_executable = Some(candidates[0].clone());
|
||||
}
|
||||
|
||||
found_executable.ok_or_else(|| {
|
||||
format!(
|
||||
"No compatible Camoufox executable found for architecture {}. Available executables: {:?}",
|
||||
current_arch,
|
||||
candidates
|
||||
)
|
||||
})?
|
||||
} else {
|
||||
// For other browsers, use the first matching executable
|
||||
candidates[0].clone()
|
||||
};
|
||||
|
||||
Ok(executable_path)
|
||||
}
|
||||
|
||||
+206
-14
@@ -985,9 +985,9 @@ impl BrowserRunner {
|
||||
.await
|
||||
{
|
||||
Ok(stopped) => {
|
||||
if stopped {
|
||||
// Verify the process actually died by checking after a short delay
|
||||
if let Some(pid) = camoufox_process.processId {
|
||||
if let Some(pid) = camoufox_process.processId {
|
||||
if stopped {
|
||||
// Verify the process actually died by checking after a short delay
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
|
||||
@@ -1019,7 +1019,20 @@ impl BrowserRunner {
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
process_actually_stopped = true;
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped =
|
||||
system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
@@ -1029,7 +1042,20 @@ impl BrowserRunner {
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
process_actually_stopped = true;
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped =
|
||||
system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
@@ -1040,19 +1066,109 @@ impl BrowserRunner {
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
process_actually_stopped = true;
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped =
|
||||
system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
process_actually_stopped = true; // No PID to verify, assume stopped
|
||||
// stop_camoufox returned false, try to force kill the process
|
||||
log::warn!(
|
||||
"Camoufox stop command returned false for process {} (PID: {:?}) - attempting force kill",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(e) = platform_browser::macos::kill_browser_process_impl(
|
||||
pid,
|
||||
Some(&profile_path_str),
|
||||
)
|
||||
.await
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(e) = platform_browser::linux::kill_browser_process_impl(pid).await {
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(e) = platform_browser::windows::kill_browser_process_impl(pid).await
|
||||
{
|
||||
log::error!("Failed to force kill Camoufox process {}: {}", pid, e);
|
||||
} else {
|
||||
// Verify the process is actually dead after force kill
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
if process_actually_stopped {
|
||||
log::info!(
|
||||
"Successfully force killed Camoufox process {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
pid
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::warn!(
|
||||
"Failed to stop Camoufox process: {} (PID: {:?})",
|
||||
camoufox_process.id,
|
||||
camoufox_process.processId
|
||||
);
|
||||
// No PID available, assume stopped if stop_camoufox returned true
|
||||
process_actually_stopped = stopped;
|
||||
if !stopped {
|
||||
log::warn!(
|
||||
"Failed to stop Camoufox process {} but no PID available for force kill",
|
||||
camoufox_process.id
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
@@ -1061,6 +1177,71 @@ impl BrowserRunner {
|
||||
camoufox_process.id,
|
||||
e
|
||||
);
|
||||
// Try to force kill if we have a PID
|
||||
if let Some(pid) = camoufox_process.processId {
|
||||
log::info!(
|
||||
"Attempting force kill after stop_camoufox error for PID: {}",
|
||||
pid
|
||||
);
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(kill_err) =
|
||||
platform_browser::macos::kill_browser_process_impl(pid, Some(&profile_path_str))
|
||||
.await
|
||||
{
|
||||
log::error!(
|
||||
"Failed to force kill Camoufox process {}: {}",
|
||||
pid,
|
||||
kill_err
|
||||
);
|
||||
} else {
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(kill_err) =
|
||||
platform_browser::linux::kill_browser_process_impl(pid).await
|
||||
{
|
||||
log::error!(
|
||||
"Failed to force kill Camoufox process {}: {}",
|
||||
pid,
|
||||
kill_err
|
||||
);
|
||||
} else {
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
}
|
||||
}
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use crate::platform_browser;
|
||||
if let Err(kill_err) =
|
||||
platform_browser::windows::kill_browser_process_impl(pid).await
|
||||
{
|
||||
log::error!(
|
||||
"Failed to force kill Camoufox process {}: {}",
|
||||
pid,
|
||||
kill_err
|
||||
);
|
||||
} else {
|
||||
use tokio::time::{sleep, Duration};
|
||||
sleep(Duration::from_millis(500)).await;
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
process_actually_stopped = system.process(Pid::from(pid as usize)).is_none();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1081,9 +1262,20 @@ impl BrowserRunner {
|
||||
}
|
||||
}
|
||||
|
||||
// Log warning if process wasn't confirmed stopped, but continue with cleanup
|
||||
// If process wasn't confirmed stopped, return an error
|
||||
if !process_actually_stopped {
|
||||
log::warn!("Camoufox process may still be running, but proceeding with cleanup");
|
||||
log::error!(
|
||||
"Failed to stop Camoufox process for profile: {} (ID: {}) - process may still be running",
|
||||
profile.name,
|
||||
profile.id
|
||||
);
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to stop Camoufox process for profile {} - process may still be running",
|
||||
profile.name
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
|
||||
// Clear the process ID from the profile
|
||||
|
||||
@@ -321,15 +321,31 @@ impl Downloader {
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
// Look for assets matching the pattern
|
||||
// Use ends_with for precise matching to avoid false positives
|
||||
let pattern = format!(".{os_name}.{arch_name}.zip");
|
||||
let asset = assets.iter().find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.starts_with("camoufox-")
|
||||
&& name.contains(&format!("-{os_name}.{arch_name}.zip"))
|
||||
&& name.ends_with(".zip")
|
||||
name.starts_with("camoufox-") && name.ends_with(&pattern)
|
||||
});
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
if let Some(asset) = asset {
|
||||
log::info!(
|
||||
"Selected Camoufox asset for {}/{}: {}",
|
||||
os,
|
||||
arch,
|
||||
asset.name
|
||||
);
|
||||
Some(asset.browser_download_url.clone())
|
||||
} else {
|
||||
log::warn!(
|
||||
"No matching Camoufox asset found for {}/{} with pattern '{}'. Available assets: {:?}",
|
||||
os,
|
||||
arch,
|
||||
pattern,
|
||||
assets.iter().map(|a| &a.name).collect::<Vec<_>>()
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn download_browser<R: tauri::Runtime>(
|
||||
|
||||
@@ -249,12 +249,8 @@ async fn is_geoip_database_available() -> Result<bool, String> {
|
||||
|
||||
#[tauri::command]
|
||||
async fn get_all_traffic_snapshots() -> Result<Vec<crate::traffic_stats::TrafficSnapshot>, String> {
|
||||
Ok(
|
||||
crate::traffic_stats::list_traffic_stats()
|
||||
.into_iter()
|
||||
.map(|s| s.to_snapshot())
|
||||
.collect(),
|
||||
)
|
||||
// Use real-time snapshots that merge in-memory data with disk data
|
||||
Ok(crate::traffic_stats::get_all_traffic_snapshots_realtime())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@@ -594,7 +590,8 @@ pub fn run() {
|
||||
// Periodically broadcast browser running status to the frontend
|
||||
let app_handle_status = app.handle().clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_millis(500));
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(5));
|
||||
interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
let mut last_running_states: std::collections::HashMap<String, bool> =
|
||||
std::collections::HashMap::new();
|
||||
|
||||
|
||||
@@ -750,7 +750,7 @@ impl ProfileManager {
|
||||
|
||||
// For non-camoufox browsers, use the existing PID-based logic
|
||||
let inner_profile = profile.clone();
|
||||
let system = System::new_all();
|
||||
let mut system = System::new();
|
||||
let mut is_running = false;
|
||||
let mut found_pid: Option<u32> = None;
|
||||
|
||||
@@ -792,6 +792,8 @@ impl ProfileManager {
|
||||
|
||||
// If we didn't find the browser with the stored PID, search all processes
|
||||
if !is_running {
|
||||
// Refresh all processes only when we need to search (expensive but necessary)
|
||||
system.refresh_all();
|
||||
for (pid, process) in system.processes() {
|
||||
let cmd = process.cmd();
|
||||
if cmd.len() >= 2 {
|
||||
@@ -874,7 +876,6 @@ impl ProfileManager {
|
||||
None => inner_profile.clone(),
|
||||
};
|
||||
|
||||
let previous_pid = latest_profile.process_id;
|
||||
let mut merged = latest_profile.clone();
|
||||
|
||||
if let Some(pid) = found_pid {
|
||||
@@ -890,13 +891,6 @@ impl ProfileManager {
|
||||
if let Err(e) = self.save_profile(&merged) {
|
||||
log::warn!("Warning: Failed to clear profile PID: {e}");
|
||||
}
|
||||
|
||||
// Stop any associated proxy immediately when the browser stops
|
||||
if let Some(old_pid) = previous_pid {
|
||||
let _ = crate::proxy_manager::PROXY_MANAGER
|
||||
.stop_proxy(app_handle.clone(), old_pid)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
// Emit profile update event to frontend
|
||||
@@ -974,18 +968,12 @@ impl ProfileManager {
|
||||
None => profile.clone(),
|
||||
};
|
||||
|
||||
if let Some(old_pid) = latest.process_id {
|
||||
if latest.process_id.is_some() {
|
||||
latest.process_id = None;
|
||||
if let Err(e) = self.save_profile(&latest) {
|
||||
log::warn!("Warning: Failed to clear Camoufox profile process info: {e}");
|
||||
}
|
||||
|
||||
// Stop any proxy tied to this old PID immediately
|
||||
let _ = crate::proxy_manager::PROXY_MANAGER
|
||||
.stop_proxy(app_handle.clone(), old_pid)
|
||||
.await;
|
||||
|
||||
// Emit profile update event to frontend
|
||||
if let Err(e) = app_handle.emit("profile-updated", &latest) {
|
||||
log::warn!("Warning: Failed to emit profile update event: {e}");
|
||||
}
|
||||
@@ -1010,7 +998,7 @@ impl ProfileManager {
|
||||
None => profile.clone(),
|
||||
};
|
||||
|
||||
if let Some(old_pid) = latest.process_id {
|
||||
if latest.process_id.is_some() {
|
||||
latest.process_id = None;
|
||||
if let Err(e2) = self.save_profile(&latest) {
|
||||
log::warn!(
|
||||
@@ -1018,11 +1006,6 @@ impl ProfileManager {
|
||||
);
|
||||
}
|
||||
|
||||
// Best-effort stop of proxy tied to old PID
|
||||
let _ = crate::proxy_manager::PROXY_MANAGER
|
||||
.stop_proxy(app_handle.clone(), old_pid)
|
||||
.await;
|
||||
|
||||
// Emit profile update event to frontend
|
||||
if let Err(e3) = app_handle.emit("profile-updated", &latest) {
|
||||
log::warn!("Warning: Failed to emit profile update event: {e3}");
|
||||
@@ -1241,7 +1224,9 @@ mod tests {
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
|
||||
// Mock the base directories by setting environment variables
|
||||
std::env::set_var("HOME", temp_dir.path());
|
||||
unsafe {
|
||||
std::env::set_var("HOME", temp_dir.path());
|
||||
}
|
||||
|
||||
let profile_manager = ProfileManager::instance();
|
||||
(profile_manager, temp_dir)
|
||||
|
||||
+119
-60
@@ -595,11 +595,6 @@ impl ProxyManager {
|
||||
browser_pid: u32,
|
||||
profile_id: Option<&str>,
|
||||
) -> Result<ProxySettings, String> {
|
||||
// First, proactively cleanup any dead proxies so we don't accidentally reuse stale ones
|
||||
let _ = self.cleanup_dead_proxies(app_handle.clone()).await;
|
||||
|
||||
// If we have a previous proxy tied to this profile, and the upstream settings are changing,
|
||||
// stop it before starting a new one so the change takes effect immediately.
|
||||
if let Some(name) = profile_id {
|
||||
// Check if we have an active proxy recorded for this profile
|
||||
let maybe_existing_id = {
|
||||
@@ -625,30 +620,29 @@ impl ProxyManager {
|
||||
&& existing.upstream_host == desired_host
|
||||
&& existing.upstream_port == desired_port;
|
||||
|
||||
if !is_same_upstream {
|
||||
// Stop the previous proxy tied to this profile (best effort)
|
||||
// We don't know the original PID mapping that created it; iterate to find its key
|
||||
let pid_to_stop = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies.iter().find_map(|(pid, info)| {
|
||||
if info.id == existing_id {
|
||||
Some(*pid)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
};
|
||||
if let Some(pid) = pid_to_stop {
|
||||
let _ = self.stop_proxy(app_handle.clone(), pid).await;
|
||||
if is_same_upstream {
|
||||
// Settings match - can reuse existing proxy
|
||||
// Just update the PID mapping if needed
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
if proxies.contains_key(&browser_pid) {
|
||||
// Already mapped, reuse it
|
||||
return Ok(ProxySettings {
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
port: existing.local_port,
|
||||
username: None,
|
||||
password: None,
|
||||
});
|
||||
}
|
||||
// Need to add this PID to the mapping - we'll do that after starting
|
||||
}
|
||||
// Settings differ - we'll create a new proxy, but don't stop the old one
|
||||
// It will be cleaned up by periodic cleanup if it becomes dead
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check if we already have a proxy for this browser PID. If it exists but the upstream
|
||||
// settings don't match the newly requested ones, stop it and create a new proxy so that
|
||||
// changes take effect immediately.
|
||||
let mut needs_restart = false;
|
||||
// Check if we already have a proxy for this browser PID
|
||||
// If settings match, reuse it; otherwise create a new one (don't stop the old one)
|
||||
{
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
if let Some(existing) = proxies.get(&browser_pid) {
|
||||
@@ -663,7 +657,7 @@ impl ProxyManager {
|
||||
&& existing.upstream_port == desired_port;
|
||||
|
||||
if is_same_upstream {
|
||||
// Check if profile_id matches - if not, we need to restart to update tracking
|
||||
// Check if profile_id matches
|
||||
let profile_id_matches = match (profile_id, &existing.profile_id) {
|
||||
(Some(ref new_id), Some(ref old_id)) => new_id == old_id,
|
||||
(None, None) => true,
|
||||
@@ -671,7 +665,7 @@ impl ProxyManager {
|
||||
};
|
||||
|
||||
if profile_id_matches {
|
||||
// Reuse existing local proxy (profile_id matches)
|
||||
// Reuse existing local proxy (settings and profile_id match)
|
||||
return Ok(ProxySettings {
|
||||
proxy_type: "http".to_string(),
|
||||
host: "127.0.0.1".to_string(),
|
||||
@@ -679,28 +673,15 @@ impl ProxyManager {
|
||||
username: None,
|
||||
password: None,
|
||||
});
|
||||
} else {
|
||||
// Profile ID changed - need to restart proxy to update tracking
|
||||
log::info!(
|
||||
"Profile ID changed for proxy {}: {:?} -> {:?}, restarting proxy",
|
||||
existing.id,
|
||||
existing.profile_id,
|
||||
profile_id
|
||||
);
|
||||
needs_restart = true;
|
||||
}
|
||||
} else {
|
||||
// Upstream changed; we must restart the local proxy so that traffic is routed correctly
|
||||
needs_restart = true;
|
||||
// Profile ID changed - we'll create a new proxy but don't stop the old one
|
||||
// It will be cleaned up by periodic cleanup if it becomes dead
|
||||
}
|
||||
// Upstream changed - we'll create a new proxy but don't stop the old one
|
||||
// It will be cleaned up by periodic cleanup if it becomes dead
|
||||
}
|
||||
}
|
||||
|
||||
if needs_restart {
|
||||
// Best-effort stop of the old proxy for this PID before starting a new one
|
||||
let _ = self.stop_proxy(app_handle.clone(), browser_pid).await;
|
||||
}
|
||||
|
||||
// Start a new proxy using the donut-proxy binary with the correct CLI interface
|
||||
let mut proxy_cmd = app_handle
|
||||
.shell()
|
||||
@@ -955,30 +936,108 @@ impl ProxyManager {
|
||||
}
|
||||
}
|
||||
|
||||
// Check if a process is still running
|
||||
fn is_process_running(&self, pid: u32) -> bool {
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
system.process(Pid::from(pid as usize)).is_some()
|
||||
}
|
||||
|
||||
// Clean up proxies for dead browser processes
|
||||
// Only clean up orphaned config files where the proxy process itself is dead
|
||||
pub async fn cleanup_dead_proxies(
|
||||
&self,
|
||||
app_handle: tauri::AppHandle,
|
||||
) -> Result<Vec<u32>, String> {
|
||||
let dead_pids = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies
|
||||
.keys()
|
||||
.filter(|&&pid| pid != 0 && !self.is_process_running(pid)) // Skip temporary PID 0
|
||||
.copied()
|
||||
.collect::<Vec<u32>>()
|
||||
// Don't stop proxies for dead browser processes - let them run indefinitely
|
||||
// The proxy processes are idle and don't consume CPU when not in use
|
||||
// Only clean up config files where the proxy process itself is dead (see below)
|
||||
let dead_pids: Vec<u32> = Vec::new();
|
||||
|
||||
// Clean up orphaned proxy configs (only where proxy process is definitely dead)
|
||||
// IMPORTANT: Only clean up configs where the proxy process itself is dead
|
||||
// If the proxy process is running (even if idle), leave it alone
|
||||
// The user doesn't care if proxy processes run indefinitely as long as they're not consuming CPU
|
||||
let orphaned_configs = {
|
||||
use crate::proxy_storage::{is_process_running, list_proxy_configs};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
let all_configs = list_proxy_configs();
|
||||
let tracked_proxy_ids: std::collections::HashSet<String> = {
|
||||
let proxies = self.active_proxies.lock().unwrap();
|
||||
proxies.values().map(|p| p.id.clone()).collect()
|
||||
};
|
||||
|
||||
// Get current time for grace period check
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
|
||||
all_configs
|
||||
.into_iter()
|
||||
.filter(|config| {
|
||||
// If proxy is tracked in active_proxies, it's definitely not orphaned
|
||||
if tracked_proxy_ids.contains(&config.id) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Extract creation time from proxy ID (format: proxy_{timestamp}_{random})
|
||||
// This gives us a grace period for newly created proxies
|
||||
let proxy_age = config
|
||||
.id
|
||||
.strip_prefix("proxy_")
|
||||
.and_then(|s| s.split('_').next())
|
||||
.and_then(|s| s.parse::<u64>().ok())
|
||||
.map(|created_at| now.saturating_sub(created_at))
|
||||
.unwrap_or(0);
|
||||
|
||||
// Grace period: don't clean up proxies created in the last 120 seconds
|
||||
// This prevents race conditions during startup (increased from 60 to 120 for safety)
|
||||
if proxy_age < 120 {
|
||||
log::debug!(
|
||||
"Skipping cleanup of proxy {} - too new (age: {}s)",
|
||||
config.id,
|
||||
proxy_age
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// ONLY clean up if we can verify the proxy process is dead
|
||||
// If proxy process is running, leave it alone (even if idle)
|
||||
if let Some(proxy_pid) = config.pid {
|
||||
// Check if proxy process is actually dead
|
||||
if !is_process_running(proxy_pid) {
|
||||
// Proxy process is dead, clean up the config file
|
||||
log::info!(
|
||||
"Proxy {} process (PID {}) is dead, will clean up config",
|
||||
config.id,
|
||||
proxy_pid
|
||||
);
|
||||
return true;
|
||||
}
|
||||
// Proxy process is running - leave it alone
|
||||
log::debug!(
|
||||
"Skipping cleanup of proxy {} - process (PID {}) is still running",
|
||||
config.id,
|
||||
proxy_pid
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
// No PID in config - can't verify if process is dead
|
||||
// Be conservative: don't clean up (might be starting up or PID not set yet)
|
||||
log::debug!(
|
||||
"Skipping cleanup of proxy {} - no PID in config (might be starting up)",
|
||||
config.id
|
||||
);
|
||||
false
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
for dead_pid in &dead_pids {
|
||||
log::info!("Cleaning up proxy for dead browser process PID: {dead_pid}");
|
||||
let _ = self.stop_proxy(app_handle.clone(), *dead_pid).await;
|
||||
// Clean up orphaned config files (proxy process is dead)
|
||||
for config in orphaned_configs {
|
||||
log::info!(
|
||||
"Cleaning up orphaned proxy config: {} (proxy process is dead)",
|
||||
config.id
|
||||
);
|
||||
// Just delete the config file - the process is already dead
|
||||
use crate::proxy_storage::delete_proxy_config;
|
||||
delete_proxy_config(&config.id);
|
||||
}
|
||||
|
||||
// Emit event for reactive UI updates
|
||||
|
||||
+545
-92
@@ -359,13 +359,328 @@ async fn connect_via_socks(
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_http_via_socks4(
|
||||
req: Request<hyper::body::Incoming>,
|
||||
upstream_url: &str,
|
||||
) -> Result<Response<Full<Bytes>>, Infallible> {
|
||||
// Extract domain for traffic tracking
|
||||
let domain = req
|
||||
.uri()
|
||||
.host()
|
||||
.map(|h| h.to_string())
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
|
||||
// Parse upstream SOCKS4 proxy URL
|
||||
let upstream = match Url::parse(upstream_url) {
|
||||
Ok(url) => url,
|
||||
Err(e) => {
|
||||
log::error!("Failed to parse SOCKS4 proxy URL: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from("Invalid proxy URL")));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
};
|
||||
|
||||
let socks_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let socks_port = upstream.port().unwrap_or(1080);
|
||||
let socks_addr = format!("{}:{}", socks_host, socks_port);
|
||||
|
||||
// Parse target from request URI
|
||||
let target_uri = req.uri();
|
||||
let target_host = target_uri.host().unwrap_or("localhost");
|
||||
let target_port = target_uri.port_u16().unwrap_or(80);
|
||||
|
||||
// Connect to SOCKS4 proxy
|
||||
let mut socks_stream = match TcpStream::connect(&socks_addr).await {
|
||||
Ok(stream) => stream,
|
||||
Err(e) => {
|
||||
log::error!("Failed to connect to SOCKS4 proxy {}: {}", socks_addr, e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to connect to SOCKS4 proxy: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
};
|
||||
|
||||
// Resolve target host to IP (SOCKS4 requires IP addresses)
|
||||
let target_ip = match tokio::net::lookup_host((target_host, target_port)).await {
|
||||
Ok(mut addrs) => {
|
||||
if let Some(addr) = addrs.next() {
|
||||
match addr.ip() {
|
||||
std::net::IpAddr::V4(ipv4) => ipv4.octets(),
|
||||
std::net::IpAddr::V6(_) => {
|
||||
log::error!("SOCKS4 does not support IPv6");
|
||||
let mut response = Response::new(Full::new(Bytes::from(
|
||||
"SOCKS4 does not support IPv6 addresses",
|
||||
)));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::error!("Failed to resolve target host: {}", target_host);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to resolve target host: {}",
|
||||
target_host
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to resolve target host {}: {}", target_host, e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to resolve target host: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
};
|
||||
|
||||
// Build SOCKS4 CONNECT request
|
||||
let mut socks_request = vec![0x04, 0x01]; // SOCKS4, CONNECT
|
||||
socks_request.extend_from_slice(&target_port.to_be_bytes());
|
||||
socks_request.extend_from_slice(&target_ip);
|
||||
socks_request.push(0); // NULL terminator for userid
|
||||
|
||||
// Send SOCKS4 CONNECT request
|
||||
if let Err(e) = socks_stream.write_all(&socks_request).await {
|
||||
log::error!("Failed to send SOCKS4 CONNECT request: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to send SOCKS4 request: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// Read SOCKS4 response
|
||||
let mut socks_response = [0u8; 8];
|
||||
if let Err(e) = socks_stream.read_exact(&mut socks_response).await {
|
||||
log::error!("Failed to read SOCKS4 response: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to read SOCKS4 response: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// Check SOCKS4 response (second byte should be 0x5A for success)
|
||||
if socks_response[1] != 0x5A {
|
||||
log::error!(
|
||||
"SOCKS4 connection failed, response code: {}",
|
||||
socks_response[1]
|
||||
);
|
||||
let mut response = Response::new(Full::new(Bytes::from("SOCKS4 connection failed")));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// Now send the HTTP request through the SOCKS4 connection
|
||||
// Build HTTP request line
|
||||
let method = req.method().as_str();
|
||||
let path = target_uri
|
||||
.path_and_query()
|
||||
.map(|pq| pq.as_str())
|
||||
.unwrap_or("/");
|
||||
let http_version = if req.version() == hyper::Version::HTTP_11 {
|
||||
"HTTP/1.1"
|
||||
} else {
|
||||
"HTTP/1.0"
|
||||
};
|
||||
|
||||
let mut http_request = format!("{} {} {}\r\n", method, path, http_version);
|
||||
|
||||
// Add Host header if not present
|
||||
let mut has_host = false;
|
||||
for (name, value) in req.headers().iter() {
|
||||
if name.as_str().eq_ignore_ascii_case("host") {
|
||||
has_host = true;
|
||||
}
|
||||
// Skip proxy-specific headers
|
||||
if name.as_str().eq_ignore_ascii_case("proxy-authorization")
|
||||
|| name.as_str().eq_ignore_ascii_case("proxy-connection")
|
||||
|| name.as_str().eq_ignore_ascii_case("proxy-authenticate")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
// Skip Content-Length and Transfer-Encoding - we'll add our own Content-Length
|
||||
// based on the collected body size. Having both violates HTTP/1.1 (RFC 7230).
|
||||
if name.as_str().eq_ignore_ascii_case("content-length")
|
||||
|| name.as_str().eq_ignore_ascii_case("transfer-encoding")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if let Ok(val) = value.to_str() {
|
||||
http_request.push_str(&format!("{}: {}\r\n", name.as_str(), val));
|
||||
}
|
||||
}
|
||||
|
||||
if !has_host {
|
||||
http_request.push_str(&format!("Host: {}:{}\r\n", target_host, target_port));
|
||||
}
|
||||
|
||||
// Get body
|
||||
let body_bytes = match req.collect().await {
|
||||
Ok(collected) => collected.to_bytes(),
|
||||
Err(_) => Bytes::new(),
|
||||
};
|
||||
|
||||
// Add Content-Length if there's a body
|
||||
if !body_bytes.is_empty() {
|
||||
http_request.push_str(&format!("Content-Length: {}\r\n", body_bytes.len()));
|
||||
}
|
||||
|
||||
http_request.push_str("\r\n");
|
||||
|
||||
// Send HTTP request
|
||||
if let Err(e) = socks_stream.write_all(http_request.as_bytes()).await {
|
||||
log::error!("Failed to send HTTP request through SOCKS4: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to send HTTP request: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
// Send body if present
|
||||
if !body_bytes.is_empty() {
|
||||
if let Err(e) = socks_stream.write_all(&body_bytes).await {
|
||||
log::error!("Failed to send HTTP body through SOCKS4: {}", e);
|
||||
let mut response = Response::new(Full::new(Bytes::from(format!(
|
||||
"Failed to send HTTP body: {}",
|
||||
e
|
||||
))));
|
||||
*response.status_mut() = StatusCode::BAD_GATEWAY;
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
|
||||
// Read HTTP response
|
||||
let mut response_buffer = Vec::with_capacity(8192);
|
||||
let mut temp_buf = [0u8; 4096];
|
||||
let mut content_length: Option<usize> = None;
|
||||
let mut is_chunked = false;
|
||||
|
||||
// Read until we have complete headers
|
||||
loop {
|
||||
match socks_stream.read(&mut temp_buf).await {
|
||||
Ok(0) => break, // Connection closed
|
||||
Ok(n) => {
|
||||
response_buffer.extend_from_slice(&temp_buf[..n]);
|
||||
// Check for end of headers (\r\n\r\n)
|
||||
if let Some(pos) = response_buffer.windows(4).position(|w| w == b"\r\n\r\n") {
|
||||
// Parse headers
|
||||
let headers_str = String::from_utf8_lossy(&response_buffer[..pos + 4]);
|
||||
for line in headers_str.lines() {
|
||||
let line_lower = line.to_lowercase();
|
||||
if line_lower.starts_with("content-length:") {
|
||||
if let Some(len_str) = line.split(':').nth(1) {
|
||||
if let Ok(len) = len_str.trim().parse::<usize>() {
|
||||
content_length = Some(len);
|
||||
}
|
||||
}
|
||||
} else if line_lower.starts_with("transfer-encoding:") && line_lower.contains("chunked")
|
||||
{
|
||||
is_chunked = true;
|
||||
}
|
||||
}
|
||||
// Read body if Content-Length is specified and we don't have it all
|
||||
if let Some(cl) = content_length {
|
||||
let body_start = pos + 4;
|
||||
let body_received = response_buffer.len() - body_start;
|
||||
if body_received < cl {
|
||||
// Read remaining body (but don't use read_exact as connection might close)
|
||||
let remaining = cl - body_received;
|
||||
let mut read_so_far = 0;
|
||||
while read_so_far < remaining {
|
||||
match socks_stream.read(&mut temp_buf).await {
|
||||
Ok(0) => break, // Connection closed
|
||||
Ok(m) => {
|
||||
let to_read = (remaining - read_so_far).min(m);
|
||||
response_buffer.extend_from_slice(&temp_buf[..to_read]);
|
||||
read_so_far += to_read;
|
||||
if to_read < m {
|
||||
// More data than needed, might be next response - stop here
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if !is_chunked {
|
||||
// No Content-Length and not chunked - read until connection closes
|
||||
// But limit to reasonable size to avoid memory issues
|
||||
let max_body_size = 10 * 1024 * 1024; // 10MB max
|
||||
while response_buffer.len() < max_body_size {
|
||||
match socks_stream.read(&mut temp_buf).await {
|
||||
Ok(0) => break, // Connection closed
|
||||
Ok(n) => {
|
||||
response_buffer.extend_from_slice(&temp_buf[..n]);
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
}
|
||||
// Note: Chunked encoding is complex to parse manually, so we'll read what we can
|
||||
// For full chunked support, we'd need a proper HTTP parser
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Error reading HTTP response from SOCKS4: {}", e);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Parse HTTP response
|
||||
let response_str = String::from_utf8_lossy(&response_buffer);
|
||||
let mut lines = response_str.lines();
|
||||
let status_line = lines.next().unwrap_or("HTTP/1.1 500 Internal Server Error");
|
||||
let status_parts: Vec<&str> = status_line.split_whitespace().collect();
|
||||
let status_code = status_parts
|
||||
.get(1)
|
||||
.and_then(|s| s.parse::<u16>().ok())
|
||||
.unwrap_or(500);
|
||||
|
||||
// Find header/body boundary
|
||||
let header_end = response_buffer
|
||||
.windows(4)
|
||||
.position(|w| w == b"\r\n\r\n")
|
||||
.map(|p| p + 4)
|
||||
.unwrap_or(response_buffer.len());
|
||||
|
||||
let body = response_buffer[header_end..].to_vec();
|
||||
|
||||
// Record request in traffic tracker
|
||||
let response_size = body.len() as u64;
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
tracker.record_request(&domain, body_bytes.len() as u64, response_size);
|
||||
}
|
||||
|
||||
let mut hyper_response = Response::new(Full::new(Bytes::from(body)));
|
||||
*hyper_response.status_mut() = StatusCode::from_u16(status_code).unwrap();
|
||||
|
||||
Ok(hyper_response)
|
||||
}
|
||||
|
||||
async fn handle_http(
|
||||
req: Request<hyper::body::Incoming>,
|
||||
upstream_url: Option<String>,
|
||||
) -> Result<Response<Full<Bytes>>, Infallible> {
|
||||
// Use reqwest for all HTTP requests as it handles proxies better
|
||||
// This is faster and more reliable than trying to use hyper-proxy with version conflicts
|
||||
use reqwest::Client;
|
||||
// Extract domain for traffic tracking
|
||||
let domain = req
|
||||
.uri()
|
||||
.host()
|
||||
.map(|h| h.to_string())
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
|
||||
log::error!(
|
||||
"DEBUG: Handling HTTP request: {} {} (host: {:?})",
|
||||
@@ -374,12 +689,20 @@ async fn handle_http(
|
||||
req.uri().host()
|
||||
);
|
||||
|
||||
// Extract domain for traffic tracking
|
||||
let domain = req
|
||||
.uri()
|
||||
.host()
|
||||
.map(|h| h.to_string())
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
// Check if we need to handle SOCKS4 manually (reqwest doesn't support it)
|
||||
if let Some(ref upstream) = upstream_url {
|
||||
if upstream != "DIRECT" {
|
||||
if let Ok(url) = Url::parse(upstream) {
|
||||
if url.scheme() == "socks4" {
|
||||
// Handle SOCKS4 manually for HTTP requests
|
||||
return handle_http_via_socks4(req, upstream).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Use reqwest for HTTP/HTTPS/SOCKS5 proxies
|
||||
use reqwest::Client;
|
||||
|
||||
let client_builder = Client::builder();
|
||||
let client = if let Some(ref upstream) = upstream_url {
|
||||
@@ -497,6 +820,7 @@ fn build_reqwest_client_with_proxy(
|
||||
let proxy = match scheme {
|
||||
"http" | "https" => {
|
||||
// For HTTP/HTTPS proxies, reqwest handles them directly
|
||||
// Note: HTTPS proxy URLs still use HTTP CONNECT method, reqwest handles TLS automatically
|
||||
Proxy::http(upstream_url)?
|
||||
}
|
||||
"socks5" => {
|
||||
@@ -504,8 +828,9 @@ fn build_reqwest_client_with_proxy(
|
||||
Proxy::all(upstream_url)?
|
||||
}
|
||||
"socks4" => {
|
||||
// SOCKS4 is not directly supported by reqwest, would need custom handling
|
||||
return Err("SOCKS4 not supported for HTTP requests via reqwest".into());
|
||||
// SOCKS4 is handled manually in handle_http_via_socks4
|
||||
// This should not be reached, but return error as fallback
|
||||
return Err("SOCKS4 should be handled manually".into());
|
||||
}
|
||||
_ => {
|
||||
return Err(format!("Unsupported proxy scheme: {}", scheme).into());
|
||||
@@ -599,14 +924,80 @@ pub async fn run_proxy_server(config: ProxyConfig) -> Result<(), Box<dyn std::er
|
||||
);
|
||||
log::error!("Proxy server entering accept loop - process should stay alive");
|
||||
|
||||
// Start a background task to write lightweight session snapshots for real-time updates
|
||||
// These are much smaller than full stats and can be written frequently (~100 bytes every 2 seconds)
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
let tracker_clone = tracker.clone();
|
||||
tokio::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(2));
|
||||
interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
// Write lightweight session snapshot (only current counters, ~100 bytes)
|
||||
if let Err(e) = tracker_clone.write_session_snapshot() {
|
||||
log::debug!("Failed to write session snapshot: {}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Start a background task to periodically flush traffic stats to disk
|
||||
// Use adaptive flush frequency: every 5 seconds when active, every 30 seconds when idle
|
||||
tokio::spawn(async move {
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(1));
|
||||
let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(5));
|
||||
interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip);
|
||||
let mut last_activity_time = std::time::Instant::now();
|
||||
let mut last_flush_time = std::time::Instant::now();
|
||||
let mut current_interval_secs = 5u64;
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
if let Err(e) = tracker.flush_to_disk() {
|
||||
log::error!("Failed to flush traffic stats: {}", e);
|
||||
let (sent, recv, requests) = tracker.get_snapshot();
|
||||
let current_bytes = sent + recv;
|
||||
let time_since_activity = last_activity_time.elapsed();
|
||||
let time_since_flush = last_flush_time.elapsed();
|
||||
let has_traffic = current_bytes > 0 || requests > 0;
|
||||
|
||||
// Determine flush frequency based on activity
|
||||
// When active: flush every 5 seconds
|
||||
// When idle: flush every 30 seconds
|
||||
let desired_interval_secs =
|
||||
if has_traffic || time_since_activity < std::time::Duration::from_secs(30) {
|
||||
5u64
|
||||
} else {
|
||||
30u64
|
||||
};
|
||||
|
||||
// Update interval if needed
|
||||
if desired_interval_secs != current_interval_secs {
|
||||
current_interval_secs = desired_interval_secs;
|
||||
interval = tokio::time::interval(tokio::time::Duration::from_secs(desired_interval_secs));
|
||||
}
|
||||
|
||||
// Only flush if enough time has passed since last flush
|
||||
let flush_interval = std::time::Duration::from_secs(desired_interval_secs);
|
||||
let should_flush = time_since_flush >= flush_interval;
|
||||
|
||||
if should_flush {
|
||||
match tracker.flush_to_disk() {
|
||||
Ok(Some((sent, recv))) => {
|
||||
// Successful flush with data
|
||||
last_flush_time = std::time::Instant::now();
|
||||
if sent > 0 || recv > 0 {
|
||||
last_activity_time = std::time::Instant::now();
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
// No data to flush - this is normal
|
||||
last_flush_time = std::time::Instant::now();
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to flush traffic stats: {}", e);
|
||||
// Don't update flush time on error - retry sooner
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -627,38 +1018,95 @@ pub async fn run_proxy_server(config: ProxyConfig) -> Result<(), Box<dyn std::er
|
||||
tokio::task::spawn(async move {
|
||||
// Read first bytes to detect CONNECT requests
|
||||
// CONNECT requests need special handling for tunneling
|
||||
let mut peek_buffer = [0u8; 8];
|
||||
// Use a larger buffer to ensure we can detect CONNECT even with partial reads
|
||||
let mut peek_buffer = [0u8; 16];
|
||||
match stream.read(&mut peek_buffer).await {
|
||||
Ok(0) => {
|
||||
log::error!("DEBUG: Connection closed immediately (0 bytes read)");
|
||||
}
|
||||
Ok(n) => {
|
||||
let request_start = String::from_utf8_lossy(&peek_buffer[..n.min(7)]);
|
||||
log::error!("DEBUG: Read {} bytes, starts with: {:?}", n, request_start);
|
||||
if n >= 7 && request_start.starts_with("CONNECT") {
|
||||
// Check if this looks like a CONNECT request
|
||||
// Be more lenient - check if the first bytes match "CONNECT" (case-insensitive)
|
||||
let request_start_upper =
|
||||
String::from_utf8_lossy(&peek_buffer[..n.min(7)]).to_uppercase();
|
||||
let is_connect = request_start_upper.starts_with("CONNECT");
|
||||
|
||||
log::error!(
|
||||
"DEBUG: Read {} bytes, starts with: {:?}, is_connect: {}",
|
||||
n,
|
||||
String::from_utf8_lossy(&peek_buffer[..n.min(20)]),
|
||||
is_connect
|
||||
);
|
||||
|
||||
if is_connect {
|
||||
// Handle CONNECT request manually for tunneling
|
||||
let mut full_request = Vec::with_capacity(4096);
|
||||
full_request.extend_from_slice(&peek_buffer[..n]);
|
||||
|
||||
// Read the rest of the CONNECT request
|
||||
// Read the rest of the CONNECT request until we have the full headers
|
||||
// CONNECT requests end with \r\n\r\n (or \n\n)
|
||||
let mut remaining = [0u8; 4096];
|
||||
let mut total_read = n;
|
||||
let max_reads = 100; // Prevent infinite loop
|
||||
let mut reads = 0;
|
||||
|
||||
loop {
|
||||
if reads >= max_reads {
|
||||
log::error!("DEBUG: Max reads reached, breaking");
|
||||
break;
|
||||
}
|
||||
|
||||
match stream.read(&mut remaining).await {
|
||||
Ok(0) => break,
|
||||
Ok(m) => {
|
||||
full_request.extend_from_slice(&remaining[..m]);
|
||||
Ok(0) => {
|
||||
// Connection closed, but we might have a complete request
|
||||
if full_request.ends_with(b"\r\n\r\n") || full_request.ends_with(b"\n\n") {
|
||||
break;
|
||||
}
|
||||
// If we have some data, try to process it anyway
|
||||
if total_read > 0 {
|
||||
break;
|
||||
}
|
||||
return; // No data at all
|
||||
}
|
||||
Ok(m) => {
|
||||
reads += 1;
|
||||
total_read += m;
|
||||
full_request.extend_from_slice(&remaining[..m]);
|
||||
|
||||
// Check if we have complete headers
|
||||
if full_request.ends_with(b"\r\n\r\n") || full_request.ends_with(b"\n\n") {
|
||||
break;
|
||||
}
|
||||
|
||||
// Also check if we have enough to parse (at least "CONNECT host:port HTTP/1.x")
|
||||
if total_read >= 20 {
|
||||
// Check if we have a newline that might indicate end of request line
|
||||
if let Some(pos) = full_request.iter().position(|&b| b == b'\n') {
|
||||
if pos < full_request.len() - 1 {
|
||||
// We have at least the request line, check if we have headers
|
||||
let request_str = String::from_utf8_lossy(&full_request);
|
||||
if request_str.contains("\r\n\r\n") || request_str.contains("\n\n") {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("DEBUG: Error reading CONNECT request: {:?}", e);
|
||||
// If we have some data, try to process it
|
||||
if total_read > 0 {
|
||||
break;
|
||||
}
|
||||
return;
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
|
||||
// Handle CONNECT manually
|
||||
log::error!(
|
||||
"DEBUG: Handling CONNECT manually for: {}",
|
||||
String::from_utf8_lossy(&full_request[..full_request.len().min(100)])
|
||||
String::from_utf8_lossy(&full_request[..full_request.len().min(200)])
|
||||
);
|
||||
if let Err(e) = handle_connect_from_buffer(stream, full_request, upstream).await {
|
||||
log::error!("Error handling CONNECT request: {:?}", e);
|
||||
@@ -673,7 +1121,7 @@ pub async fn run_proxy_server(config: ProxyConfig) -> Result<(), Box<dyn std::er
|
||||
log::error!(
|
||||
"DEBUG: Non-CONNECT request, first {} bytes: {:?}",
|
||||
n,
|
||||
String::from_utf8_lossy(&peek_buffer[..n])
|
||||
String::from_utf8_lossy(&peek_buffer[..n.min(50)])
|
||||
);
|
||||
let prepended_bytes = peek_buffer[..n].to_vec();
|
||||
let prepended_reader = PrependReader {
|
||||
@@ -745,80 +1193,85 @@ async fn handle_connect_from_buffer(
|
||||
}
|
||||
|
||||
// Connect to target (directly or via upstream proxy)
|
||||
let target_stream = if upstream_url.is_none()
|
||||
|| upstream_url
|
||||
.as_ref()
|
||||
.map(|s| s == "DIRECT")
|
||||
.unwrap_or(false)
|
||||
{
|
||||
// Direct connection
|
||||
TcpStream::connect((target_host, target_port)).await?
|
||||
} else {
|
||||
// Connect via upstream proxy
|
||||
let upstream = Url::parse(upstream_url.as_ref().unwrap())?;
|
||||
let scheme = upstream.scheme();
|
||||
let target_stream = match upstream_url.as_ref() {
|
||||
None => {
|
||||
// Direct connection
|
||||
TcpStream::connect((target_host, target_port)).await?
|
||||
}
|
||||
Some(url) if url == "DIRECT" => {
|
||||
// Direct connection
|
||||
TcpStream::connect((target_host, target_port)).await?
|
||||
}
|
||||
Some(upstream_url_str) => {
|
||||
// Connect via upstream proxy
|
||||
let upstream = Url::parse(upstream_url_str)?;
|
||||
let scheme = upstream.scheme();
|
||||
|
||||
match scheme {
|
||||
"http" | "https" => {
|
||||
// Connect via HTTP proxy CONNECT
|
||||
let proxy_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let proxy_port = upstream.port().unwrap_or(8080);
|
||||
let mut proxy_stream = TcpStream::connect((proxy_host, proxy_port)).await?;
|
||||
match scheme {
|
||||
"http" | "https" => {
|
||||
// Connect via HTTP/HTTPS proxy CONNECT
|
||||
// Note: HTTPS proxy URLs still use HTTP CONNECT method (CONNECT is always HTTP-based)
|
||||
// For HTTPS proxies, reqwest handles TLS automatically in handle_http
|
||||
// For manual CONNECT here, we use plain TCP - HTTPS proxy CONNECT typically works over plain TCP
|
||||
let proxy_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let proxy_port = upstream.port().unwrap_or(8080);
|
||||
let mut proxy_stream = TcpStream::connect((proxy_host, proxy_port)).await?;
|
||||
|
||||
// Add authentication if provided
|
||||
let mut connect_req = format!(
|
||||
"CONNECT {}:{} HTTP/1.1\r\nHost: {}:{}\r\n",
|
||||
target_host, target_port, target_host, target_port
|
||||
);
|
||||
// Add authentication if provided
|
||||
let mut connect_req = format!(
|
||||
"CONNECT {}:{} HTTP/1.1\r\nHost: {}:{}\r\n",
|
||||
target_host, target_port, target_host, target_port
|
||||
);
|
||||
|
||||
if !upstream.username().is_empty() {
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
let auth = general_purpose::STANDARD.encode(format!("{}:{}", username, password));
|
||||
connect_req.push_str(&format!("Proxy-Authorization: Basic {}\r\n", auth));
|
||||
}
|
||||
|
||||
connect_req.push_str("\r\n");
|
||||
|
||||
// Send CONNECT request to upstream proxy
|
||||
proxy_stream.write_all(connect_req.as_bytes()).await?;
|
||||
|
||||
// Read response
|
||||
let mut buffer = [0u8; 4096];
|
||||
let n = proxy_stream.read(&mut buffer).await?;
|
||||
let response = String::from_utf8_lossy(&buffer[..n]);
|
||||
|
||||
if !response.starts_with("HTTP/1.1 200") && !response.starts_with("HTTP/1.0 200") {
|
||||
return Err(format!("Upstream proxy CONNECT failed: {}", response).into());
|
||||
}
|
||||
|
||||
proxy_stream
|
||||
}
|
||||
"socks4" | "socks5" => {
|
||||
// Connect via SOCKS proxy
|
||||
let socks_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let socks_port = upstream.port().unwrap_or(1080);
|
||||
let socks_addr = format!("{}:{}", socks_host, socks_port);
|
||||
|
||||
if !upstream.username().is_empty() {
|
||||
use base64::{engine::general_purpose, Engine as _};
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
let auth = general_purpose::STANDARD.encode(format!("{}:{}", username, password));
|
||||
connect_req.push_str(&format!("Proxy-Authorization: Basic {}\r\n", auth));
|
||||
|
||||
connect_via_socks(
|
||||
&socks_addr,
|
||||
target_host,
|
||||
target_port,
|
||||
scheme == "socks5",
|
||||
if !username.is_empty() {
|
||||
Some((username, password))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
.await?
|
||||
}
|
||||
|
||||
connect_req.push_str("\r\n");
|
||||
|
||||
// Send CONNECT request to upstream proxy
|
||||
proxy_stream.write_all(connect_req.as_bytes()).await?;
|
||||
|
||||
// Read response
|
||||
let mut buffer = [0u8; 4096];
|
||||
let n = proxy_stream.read(&mut buffer).await?;
|
||||
let response = String::from_utf8_lossy(&buffer[..n]);
|
||||
|
||||
if !response.starts_with("HTTP/1.1 200") && !response.starts_with("HTTP/1.0 200") {
|
||||
return Err(format!("Upstream proxy CONNECT failed: {}", response).into());
|
||||
_ => {
|
||||
return Err(format!("Unsupported upstream proxy scheme: {}", scheme).into());
|
||||
}
|
||||
|
||||
proxy_stream
|
||||
}
|
||||
"socks4" | "socks5" => {
|
||||
// Connect via SOCKS proxy
|
||||
let socks_host = upstream.host_str().unwrap_or("127.0.0.1");
|
||||
let socks_port = upstream.port().unwrap_or(1080);
|
||||
let socks_addr = format!("{}:{}", socks_host, socks_port);
|
||||
|
||||
let username = upstream.username();
|
||||
let password = upstream.password().unwrap_or("");
|
||||
|
||||
connect_via_socks(
|
||||
&socks_addr,
|
||||
target_host,
|
||||
target_port,
|
||||
scheme == "socks5",
|
||||
if !username.is_empty() {
|
||||
Some((username, password))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
)
|
||||
.await?
|
||||
}
|
||||
_ => {
|
||||
return Err(format!("Unsupported upstream proxy scheme: {}", scheme).into());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@@ -133,6 +133,6 @@ pub fn generate_proxy_id() -> String {
|
||||
|
||||
pub fn is_process_running(pid: u32) -> bool {
|
||||
use sysinfo::{Pid, System};
|
||||
let system = System::new_all();
|
||||
let system = System::new();
|
||||
system.process(Pid::from(pid as usize)).is_some()
|
||||
}
|
||||
|
||||
+458
-14
@@ -82,6 +82,9 @@ pub struct TrafficStats {
|
||||
pub session_start: u64,
|
||||
/// Last update timestamp
|
||||
pub last_update: u64,
|
||||
/// Timestamp of the last flush to disk (used to avoid double-counting session snapshots)
|
||||
#[serde(default)]
|
||||
pub last_flush_timestamp: u64,
|
||||
/// Total bytes sent across all time
|
||||
pub total_bytes_sent: u64,
|
||||
/// Total bytes received across all time
|
||||
@@ -110,6 +113,7 @@ impl TrafficStats {
|
||||
profile_id,
|
||||
session_start: now,
|
||||
last_update: now,
|
||||
last_flush_timestamp: 0,
|
||||
total_bytes_sent: 0,
|
||||
total_bytes_received: 0,
|
||||
total_requests: 0,
|
||||
@@ -175,6 +179,37 @@ impl TrafficStats {
|
||||
});
|
||||
}
|
||||
|
||||
/// Prune old data to prevent unbounded growth
|
||||
/// Keeps only the last 7 days of bandwidth history and domain access history
|
||||
pub fn prune_old_data(&mut self) {
|
||||
const RETENTION_SECONDS: u64 = 7 * 24 * 60 * 60; // 7 days
|
||||
let now = current_timestamp();
|
||||
let cutoff = now.saturating_sub(RETENTION_SECONDS);
|
||||
|
||||
// Prune bandwidth history
|
||||
self.bandwidth_history.retain(|dp| dp.timestamp >= cutoff);
|
||||
|
||||
// Prune domain access history
|
||||
self
|
||||
.domain_access_history
|
||||
.retain(|dp| dp.timestamp >= cutoff);
|
||||
|
||||
// Remove domains that haven't been accessed recently and have no recent history
|
||||
let recent_domains: std::collections::HashSet<String> = self
|
||||
.domain_access_history
|
||||
.iter()
|
||||
.filter(|dp| dp.timestamp >= cutoff)
|
||||
.map(|dp| dp.domain.clone())
|
||||
.collect();
|
||||
|
||||
// Keep domains that were accessed recently OR have high total traffic
|
||||
self.domains.retain(|domain, access| {
|
||||
recent_domains.contains(domain)
|
||||
|| access.last_access >= cutoff
|
||||
|| (access.bytes_sent + access.bytes_received) > 1_000_000 // Keep domains with >1MB traffic
|
||||
});
|
||||
}
|
||||
|
||||
/// Record a request to a domain
|
||||
pub fn record_request(&mut self, domain: &str, bytes_sent: u64, bytes_received: u64) {
|
||||
let now = current_timestamp();
|
||||
@@ -235,6 +270,63 @@ fn current_timestamp() -> u64 {
|
||||
.as_secs()
|
||||
}
|
||||
|
||||
/// File lock guard for preventing concurrent writes
|
||||
struct FileLockGuard {
|
||||
_file: std::fs::File,
|
||||
}
|
||||
|
||||
/// Acquire a file lock for exclusive access
|
||||
/// On Unix, uses flock; on Windows, uses file handles
|
||||
fn acquire_file_lock(lock_path: &PathBuf) -> Result<FileLockGuard, Box<dyn std::error::Error>> {
|
||||
use std::fs::OpenOptions;
|
||||
|
||||
let file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.truncate(false)
|
||||
.open(lock_path)?;
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::io::AsRawFd;
|
||||
let fd = file.as_raw_fd();
|
||||
unsafe {
|
||||
if libc::flock(fd, libc::LOCK_EX | libc::LOCK_NB) != 0 {
|
||||
return Err("Failed to acquire file lock".into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::os::windows::io::AsRawHandle;
|
||||
use windows::Win32::Foundation::HANDLE;
|
||||
use windows::Win32::Storage::FileSystem::LockFileEx;
|
||||
use windows::Win32::Storage::FileSystem::LOCKFILE_EXCLUSIVE_LOCK;
|
||||
use windows::Win32::Storage::FileSystem::LOCKFILE_FAIL_IMMEDIATELY;
|
||||
use windows::Win32::System::IO::OVERLAPPED;
|
||||
|
||||
let handle = HANDLE(file.as_raw_handle() as *mut core::ffi::c_void);
|
||||
unsafe {
|
||||
let mut overlapped: OVERLAPPED = std::mem::zeroed();
|
||||
if LockFileEx(
|
||||
handle,
|
||||
LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY,
|
||||
Some(0),
|
||||
u32::MAX,
|
||||
u32::MAX,
|
||||
&mut overlapped,
|
||||
)
|
||||
.is_err()
|
||||
{
|
||||
return Err("Failed to acquire file lock".into());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(FileLockGuard { _file: file })
|
||||
}
|
||||
|
||||
/// Get the traffic stats storage directory
|
||||
pub fn get_traffic_stats_dir() -> PathBuf {
|
||||
let base_dirs = BaseDirs::new().expect("Failed to get base directories");
|
||||
@@ -432,6 +524,17 @@ pub fn clear_all_traffic_stats() -> Result<(), Box<dyn std::error::Error>> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Lightweight session snapshot for real-time updates (written frequently, separate from full stats)
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct SessionSnapshot {
|
||||
proxy_id: String,
|
||||
profile_id: Option<String>,
|
||||
timestamp: u64,
|
||||
bytes_sent: u64,
|
||||
bytes_received: u64,
|
||||
requests: u64,
|
||||
}
|
||||
|
||||
/// Live bandwidth tracker for real-time stats collection in the proxy
|
||||
/// This is designed to be used from within the proxy server
|
||||
pub struct LiveTrafficTracker {
|
||||
@@ -444,6 +547,7 @@ pub struct LiveTrafficTracker {
|
||||
ips: RwLock<Vec<String>>,
|
||||
#[allow(dead_code)]
|
||||
session_start: u64,
|
||||
last_session_write: std::sync::atomic::AtomicU64,
|
||||
}
|
||||
|
||||
impl LiveTrafficTracker {
|
||||
@@ -457,9 +561,46 @@ impl LiveTrafficTracker {
|
||||
domain_stats: RwLock::new(HashMap::new()),
|
||||
ips: RwLock::new(Vec::new()),
|
||||
session_start: current_timestamp(),
|
||||
last_session_write: std::sync::atomic::AtomicU64::new(0),
|
||||
}
|
||||
}
|
||||
|
||||
/// Write a lightweight session snapshot for real-time updates
|
||||
/// This is much smaller than full stats and can be written frequently
|
||||
pub fn write_session_snapshot(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let now = current_timestamp();
|
||||
let last_write = self.last_session_write.load(Ordering::Relaxed);
|
||||
|
||||
// Only write if at least 1 second has passed (avoid excessive writes)
|
||||
if now.saturating_sub(last_write) < 1 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let snapshot = SessionSnapshot {
|
||||
proxy_id: self.proxy_id.clone(),
|
||||
profile_id: self.profile_id.clone(),
|
||||
timestamp: now,
|
||||
bytes_sent: self.bytes_sent.load(Ordering::Relaxed),
|
||||
bytes_received: self.bytes_received.load(Ordering::Relaxed),
|
||||
requests: self.requests.load(Ordering::Relaxed),
|
||||
};
|
||||
|
||||
let storage_key = self
|
||||
.profile_id
|
||||
.clone()
|
||||
.unwrap_or_else(|| self.proxy_id.clone());
|
||||
let session_file = get_traffic_stats_dir().join(format!("{}.session.json", storage_key));
|
||||
|
||||
// Write atomically using a temp file
|
||||
let temp_file = session_file.with_extension("tmp");
|
||||
let content = serde_json::to_string(&snapshot)?;
|
||||
fs::write(&temp_file, content)?;
|
||||
fs::rename(&temp_file, &session_file)?;
|
||||
|
||||
self.last_session_write.store(now, Ordering::Relaxed);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add_bytes_sent(&self, bytes: u64) {
|
||||
self.bytes_sent.fetch_add(bytes, Ordering::Relaxed);
|
||||
}
|
||||
@@ -509,10 +650,120 @@ impl LiveTrafficTracker {
|
||||
)
|
||||
}
|
||||
|
||||
/// Create a real-time snapshot that merges in-memory data with disk-stored data
|
||||
/// This provides near real-time updates without waiting for disk flush
|
||||
pub fn to_realtime_snapshot(&self) -> TrafficSnapshot {
|
||||
let now = current_timestamp();
|
||||
let cutoff = now.saturating_sub(60); // Last 60 seconds for mini chart
|
||||
|
||||
// Get in-memory counters (not yet flushed to disk)
|
||||
let in_memory_sent = self.bytes_sent.load(Ordering::Relaxed);
|
||||
let in_memory_recv = self.bytes_received.load(Ordering::Relaxed);
|
||||
let in_memory_requests = self.requests.load(Ordering::Relaxed);
|
||||
|
||||
// Load disk-stored stats
|
||||
let storage_key = self
|
||||
.profile_id
|
||||
.clone()
|
||||
.unwrap_or_else(|| self.proxy_id.clone());
|
||||
let disk_stats = load_traffic_stats(&storage_key);
|
||||
|
||||
if let Some(stats) = disk_stats {
|
||||
// Merge in-memory data with disk data
|
||||
let total_sent = stats.total_bytes_sent + in_memory_sent;
|
||||
let total_recv = stats.total_bytes_received + in_memory_recv;
|
||||
let total_requests = stats.total_requests + in_memory_requests;
|
||||
|
||||
// Get current bandwidth from in-memory counters (most recent)
|
||||
// For the chart, we'll use disk data + current in-memory data point
|
||||
let mut recent_bandwidth = stats
|
||||
.bandwidth_history
|
||||
.iter()
|
||||
.filter(|dp| dp.timestamp >= cutoff)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Add current second's data if we have in-memory traffic
|
||||
if in_memory_sent > 0 || in_memory_recv > 0 {
|
||||
// Check if we already have a data point for this second
|
||||
if let Some(last) = recent_bandwidth.last_mut() {
|
||||
if last.timestamp == now {
|
||||
last.bytes_sent += in_memory_sent;
|
||||
last.bytes_received += in_memory_recv;
|
||||
} else {
|
||||
recent_bandwidth.push(BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
recent_bandwidth.push(BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
TrafficSnapshot {
|
||||
profile_id: self.profile_id.clone(),
|
||||
session_start: stats.session_start,
|
||||
last_update: now,
|
||||
total_bytes_sent: total_sent,
|
||||
total_bytes_received: total_recv,
|
||||
total_requests,
|
||||
current_bytes_sent: in_memory_sent,
|
||||
current_bytes_received: in_memory_recv,
|
||||
recent_bandwidth,
|
||||
}
|
||||
} else {
|
||||
// No disk data yet, use only in-memory data
|
||||
let recent_bandwidth = if in_memory_sent > 0 || in_memory_recv > 0 {
|
||||
vec![BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
}]
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
TrafficSnapshot {
|
||||
profile_id: self.profile_id.clone(),
|
||||
session_start: self.session_start,
|
||||
last_update: now,
|
||||
total_bytes_sent: in_memory_sent,
|
||||
total_bytes_received: in_memory_recv,
|
||||
total_requests: in_memory_requests,
|
||||
current_bytes_sent: in_memory_sent,
|
||||
current_bytes_received: in_memory_recv,
|
||||
recent_bandwidth,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Flush current stats to disk and return the delta
|
||||
pub fn flush_to_disk(&self) -> Result<(u64, u64), Box<dyn std::error::Error>> {
|
||||
let bytes_sent = self.bytes_sent.swap(0, Ordering::Relaxed);
|
||||
let bytes_received = self.bytes_received.swap(0, Ordering::Relaxed);
|
||||
/// Returns None if there's no new data to flush
|
||||
pub fn flush_to_disk(&self) -> Result<Option<(u64, u64)>, Box<dyn std::error::Error>> {
|
||||
let bytes_sent = self.bytes_sent.load(Ordering::Relaxed);
|
||||
let bytes_received = self.bytes_received.load(Ordering::Relaxed);
|
||||
|
||||
// Check if there's any new data to flush
|
||||
let has_domain_updates = {
|
||||
let domain_map = self.domain_stats.read().ok();
|
||||
domain_map.is_some_and(|dm| !dm.is_empty())
|
||||
};
|
||||
|
||||
let has_ip_updates = {
|
||||
let ips = self.ips.read().ok();
|
||||
ips.is_some_and(|i| !i.is_empty())
|
||||
};
|
||||
|
||||
// Only flush if there's meaningful new data (bytes or domain/IP updates)
|
||||
if bytes_sent == 0 && bytes_received == 0 && !has_domain_updates && !has_ip_updates {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Use profile_id as storage key if available, otherwise fall back to proxy_id
|
||||
let storage_key = self
|
||||
@@ -520,6 +771,19 @@ impl LiveTrafficTracker {
|
||||
.clone()
|
||||
.unwrap_or_else(|| self.proxy_id.clone());
|
||||
|
||||
// Use file locking to prevent concurrent writes from multiple proxy processes
|
||||
let lock_path = get_traffic_stats_dir().join(format!("{}.lock", storage_key));
|
||||
let _lock = match acquire_file_lock(&lock_path) {
|
||||
Ok(lock) => lock,
|
||||
Err(e) => {
|
||||
// If lock acquisition fails, reset counters to prevent indefinite accumulation
|
||||
// The data will be lost, but this prevents memory growth
|
||||
let _ = self.bytes_sent.swap(0, Ordering::Relaxed);
|
||||
let _ = self.bytes_received.swap(0, Ordering::Relaxed);
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
// Load or create stats using the storage key
|
||||
let mut stats = load_traffic_stats(&storage_key)
|
||||
.unwrap_or_else(|| TrafficStats::new(self.proxy_id.clone(), self.profile_id.clone()));
|
||||
@@ -532,8 +796,25 @@ impl LiveTrafficTracker {
|
||||
// Update the proxy_id to current session (for debugging/tracking)
|
||||
stats.proxy_id = self.proxy_id.clone();
|
||||
|
||||
// Prune old data before adding new data to keep file size manageable
|
||||
stats.prune_old_data();
|
||||
|
||||
// Update flush timestamp BEFORE reading/resetting counters
|
||||
// This prevents double-counting session snapshots written after this timestamp
|
||||
// If we set it after reading counters, a session snapshot written just before
|
||||
// the flush completes could have a timestamp newer than last_flush_timestamp,
|
||||
// causing its data to be added even though it was already included in the flush
|
||||
let now = current_timestamp();
|
||||
stats.last_flush_timestamp = now;
|
||||
stats.last_update = now;
|
||||
|
||||
// Reset counters after reading (lock is held, so flush will proceed)
|
||||
let sent = self.bytes_sent.swap(0, Ordering::Relaxed);
|
||||
let received = self.bytes_received.swap(0, Ordering::Relaxed);
|
||||
let _requests = self.requests.swap(0, Ordering::Relaxed);
|
||||
|
||||
// Update bandwidth history
|
||||
stats.record_bandwidth(bytes_sent, bytes_received);
|
||||
stats.record_bandwidth(sent, received);
|
||||
|
||||
// Update domain stats
|
||||
if let Ok(mut domain_map) = self.domain_stats.write() {
|
||||
@@ -544,17 +825,17 @@ impl LiveTrafficTracker {
|
||||
}
|
||||
}
|
||||
|
||||
// Update IPs
|
||||
if let Ok(ips) = self.ips.read() {
|
||||
for ip in ips.iter() {
|
||||
stats.record_ip(ip);
|
||||
// Update IPs and clear them after flushing (like domain_stats)
|
||||
if let Ok(mut ips) = self.ips.write() {
|
||||
for ip in ips.drain(..) {
|
||||
stats.record_ip(&ip);
|
||||
}
|
||||
}
|
||||
|
||||
// Save to disk
|
||||
// Save to disk (lock is still held)
|
||||
save_traffic_stats(&stats)?;
|
||||
|
||||
Ok((bytes_sent, bytes_received))
|
||||
Ok(Some((sent, received)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -601,11 +882,36 @@ pub struct FilteredTrafficStats {
|
||||
|
||||
/// Get traffic stats for a profile, filtered to a specific time period
|
||||
/// seconds: number of seconds to include (0 = all time)
|
||||
/// Merges in-memory data with disk data for real-time updates
|
||||
pub fn get_traffic_stats_for_period(
|
||||
profile_id: &str,
|
||||
seconds: u64,
|
||||
) -> Option<FilteredTrafficStats> {
|
||||
let stats = load_traffic_stats(profile_id)?;
|
||||
// Get in-memory data if available
|
||||
let in_memory_sent = get_traffic_tracker()
|
||||
.and_then(|t| {
|
||||
if t.profile_id.as_deref() == Some(profile_id) {
|
||||
Some(t.bytes_sent.load(Ordering::Relaxed))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(0);
|
||||
let in_memory_recv = get_traffic_tracker()
|
||||
.and_then(|t| {
|
||||
if t.profile_id.as_deref() == Some(profile_id) {
|
||||
Some(t.bytes_received.load(Ordering::Relaxed))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.unwrap_or(0);
|
||||
|
||||
let mut stats = load_traffic_stats(profile_id)?;
|
||||
|
||||
// Merge in-memory counters with disk data for real-time totals
|
||||
stats.total_bytes_sent += in_memory_sent;
|
||||
stats.total_bytes_received += in_memory_recv;
|
||||
|
||||
let now = current_timestamp();
|
||||
let cutoff = if seconds == 0 {
|
||||
@@ -615,14 +921,39 @@ pub fn get_traffic_stats_for_period(
|
||||
};
|
||||
|
||||
// Filter bandwidth history to requested period
|
||||
let filtered_history: Vec<BandwidthDataPoint> = stats
|
||||
let mut filtered_history: Vec<BandwidthDataPoint> = stats
|
||||
.bandwidth_history
|
||||
.iter()
|
||||
.filter(|dp| dp.timestamp >= cutoff)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
// Calculate period totals for bandwidth
|
||||
// Add current in-memory data point for real-time display
|
||||
if (seconds == 0 || now.saturating_sub(seconds) <= now)
|
||||
&& (in_memory_sent > 0 || in_memory_recv > 0)
|
||||
{
|
||||
// Check if we already have a data point for this second
|
||||
if let Some(last) = filtered_history.last_mut() {
|
||||
if last.timestamp == now {
|
||||
last.bytes_sent += in_memory_sent;
|
||||
last.bytes_received += in_memory_recv;
|
||||
} else {
|
||||
filtered_history.push(BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
filtered_history.push(BandwidthDataPoint {
|
||||
timestamp: now,
|
||||
bytes_sent: in_memory_sent,
|
||||
bytes_received: in_memory_recv,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate period totals for bandwidth (includes in-memory data)
|
||||
let period_bytes_sent: u64 = filtered_history.iter().map(|dp| dp.bytes_sent).sum();
|
||||
let period_bytes_received: u64 = filtered_history.iter().map(|dp| dp.bytes_received).sum();
|
||||
|
||||
@@ -664,7 +995,7 @@ pub fn get_traffic_stats_for_period(
|
||||
Some(FilteredTrafficStats {
|
||||
profile_id: stats.profile_id,
|
||||
session_start: stats.session_start,
|
||||
last_update: stats.last_update,
|
||||
last_update: now, // Use current time for real-time updates
|
||||
total_bytes_sent: stats.total_bytes_sent,
|
||||
total_bytes_received: stats.total_bytes_received,
|
||||
total_requests: stats.total_requests,
|
||||
@@ -678,11 +1009,124 @@ pub fn get_traffic_stats_for_period(
|
||||
}
|
||||
|
||||
/// Get lightweight traffic snapshot for a profile (for mini charts, only recent 60 seconds)
|
||||
/// Merges in-memory data with disk data for real-time updates
|
||||
pub fn get_traffic_snapshot_for_profile(profile_id: &str) -> Option<TrafficSnapshot> {
|
||||
// First try to get real-time data from active tracker
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
let tracker_profile_id = tracker.profile_id.as_deref();
|
||||
if tracker_profile_id == Some(profile_id) {
|
||||
return Some(tracker.to_realtime_snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to disk data
|
||||
let stats = load_traffic_stats(profile_id)?;
|
||||
Some(stats.to_snapshot())
|
||||
}
|
||||
|
||||
/// Load session snapshot from disk (written by proxy worker processes)
|
||||
fn load_session_snapshot(profile_id: &str) -> Option<SessionSnapshot> {
|
||||
let session_file = get_traffic_stats_dir().join(format!("{}.session.json", profile_id));
|
||||
if !session_file.exists() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let content = fs::read_to_string(&session_file).ok()?;
|
||||
serde_json::from_str::<SessionSnapshot>(&content).ok()
|
||||
}
|
||||
|
||||
/// Get all traffic snapshots with real-time data merged
|
||||
/// This provides near real-time updates by merging session snapshots with disk data
|
||||
pub fn get_all_traffic_snapshots_realtime() -> Vec<TrafficSnapshot> {
|
||||
use std::collections::HashMap;
|
||||
|
||||
// Start with disk-stored stats
|
||||
let mut snapshots: HashMap<String, TrafficSnapshot> = list_traffic_stats()
|
||||
.into_iter()
|
||||
.map(|s| {
|
||||
let key = s.profile_id.clone().unwrap_or_else(|| s.proxy_id.clone());
|
||||
(key, s.to_snapshot())
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Try to merge in real-time data from active tracker (if in same process)
|
||||
if let Some(tracker) = get_traffic_tracker() {
|
||||
let key = tracker
|
||||
.profile_id
|
||||
.clone()
|
||||
.unwrap_or_else(|| tracker.proxy_id.clone());
|
||||
let realtime_snapshot = tracker.to_realtime_snapshot();
|
||||
snapshots.insert(key, realtime_snapshot);
|
||||
}
|
||||
|
||||
// Also merge session snapshots from proxy worker processes
|
||||
let storage_dir = get_traffic_stats_dir();
|
||||
if let Ok(entries) = fs::read_dir(&storage_dir) {
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) {
|
||||
if file_name.ends_with(".session.json") {
|
||||
if let Some(profile_id) = file_name.strip_suffix(".session.json") {
|
||||
if let Some(session) = load_session_snapshot(profile_id) {
|
||||
// Merge session data with disk snapshot
|
||||
if let Some(snapshot) = snapshots.get_mut(profile_id) {
|
||||
// Only merge session data if it's newer than the last flush
|
||||
// Session snapshots written before the last flush contain bytes already
|
||||
// included in disk totals, so merging them would cause double-counting
|
||||
let disk_stats = load_traffic_stats(profile_id);
|
||||
let last_flush = disk_stats
|
||||
.as_ref()
|
||||
.map(|s| s.last_flush_timestamp)
|
||||
.unwrap_or(0);
|
||||
|
||||
if session.timestamp > last_flush {
|
||||
// Session data contains in-memory counters not yet flushed to disk
|
||||
// Disk snapshot contains cumulative totals already flushed
|
||||
// We need to ADD them, not take the max, to get the true total
|
||||
snapshot.total_bytes_sent =
|
||||
snapshot.total_bytes_sent.saturating_add(session.bytes_sent);
|
||||
snapshot.total_bytes_received = snapshot
|
||||
.total_bytes_received
|
||||
.saturating_add(session.bytes_received);
|
||||
snapshot.total_requests =
|
||||
snapshot.total_requests.saturating_add(session.requests);
|
||||
snapshot.current_bytes_sent = session.bytes_sent;
|
||||
snapshot.current_bytes_received = session.bytes_received;
|
||||
snapshot.last_update = session.timestamp;
|
||||
} else {
|
||||
// Session snapshot is stale (written before last flush)
|
||||
// Use current values from disk snapshot, but update timestamp if session is newer
|
||||
if session.timestamp > snapshot.last_update {
|
||||
snapshot.last_update = session.timestamp;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Create new snapshot from session data
|
||||
snapshots.insert(
|
||||
profile_id.to_string(),
|
||||
TrafficSnapshot {
|
||||
profile_id: session.profile_id,
|
||||
session_start: current_timestamp().saturating_sub(60),
|
||||
last_update: session.timestamp,
|
||||
total_bytes_sent: session.bytes_sent,
|
||||
total_bytes_received: session.bytes_received,
|
||||
total_requests: session.requests,
|
||||
current_bytes_sent: session.bytes_sent,
|
||||
current_bytes_received: session.bytes_received,
|
||||
recent_bandwidth: vec![],
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
snapshots.into_values().collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -32,7 +32,7 @@ pub struct BackgroundUpdateResult {
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct BackgroundUpdateState {
|
||||
pub(crate) struct BackgroundUpdateState {
|
||||
last_update_time: u64,
|
||||
update_interval_hours: u64,
|
||||
}
|
||||
@@ -78,12 +78,12 @@ impl VersionUpdater {
|
||||
Ok(cache_dir)
|
||||
}
|
||||
|
||||
fn get_background_update_state_file() -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
pub(crate) fn get_background_update_state_file() -> Result<PathBuf, Box<dyn std::error::Error>> {
|
||||
let cache_dir = Self::get_cache_dir()?;
|
||||
Ok(cache_dir.join("background_update_state.json"))
|
||||
}
|
||||
|
||||
fn load_background_update_state() -> BackgroundUpdateState {
|
||||
pub(crate) fn load_background_update_state() -> BackgroundUpdateState {
|
||||
let state_file = match Self::get_background_update_state_file() {
|
||||
Ok(file) => file,
|
||||
Err(_) => return BackgroundUpdateState::default(),
|
||||
@@ -101,7 +101,7 @@ impl VersionUpdater {
|
||||
serde_json::from_str(&content).unwrap_or_default()
|
||||
}
|
||||
|
||||
fn save_background_update_state(
|
||||
pub(crate) fn save_background_update_state(
|
||||
state: &BackgroundUpdateState,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let state_file = Self::get_background_update_state_file()?;
|
||||
@@ -516,50 +516,31 @@ pub async fn clear_all_version_cache_and_refetch(
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serial_test::serial;
|
||||
use std::env;
|
||||
use tempfile::TempDir;
|
||||
|
||||
// Helper function to create a unique test state file
|
||||
fn get_test_state_file(test_name: &str) -> PathBuf {
|
||||
let cache_dir = VersionUpdater::get_cache_dir().unwrap();
|
||||
cache_dir.join(format!("test_{test_name}_state.json"))
|
||||
fn setup_test_env() -> TempDir {
|
||||
let temp_dir = TempDir::new().expect("Failed to create temp directory");
|
||||
env::set_var("HOME", temp_dir.path());
|
||||
temp_dir
|
||||
}
|
||||
|
||||
fn save_test_state(
|
||||
test_name: &str,
|
||||
state: &BackgroundUpdateState,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let state_file = get_test_state_file(test_name);
|
||||
let content = serde_json::to_string_pretty(state)?;
|
||||
fs::write(&state_file, content)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn load_test_state(test_name: &str) -> BackgroundUpdateState {
|
||||
let state_file = get_test_state_file(test_name);
|
||||
|
||||
if !state_file.exists() {
|
||||
return BackgroundUpdateState::default();
|
||||
}
|
||||
|
||||
let content = match fs::read_to_string(&state_file) {
|
||||
Ok(content) => content,
|
||||
Err(_) => return BackgroundUpdateState::default(),
|
||||
};
|
||||
|
||||
match serde_json::from_str(&content) {
|
||||
Ok(state) => state,
|
||||
Err(e) => {
|
||||
eprintln!("Failed to parse test state file {:?}: {}", state_file, e);
|
||||
BackgroundUpdateState::default()
|
||||
}
|
||||
fn cleanup_state_file() {
|
||||
if let Ok(state_file) = VersionUpdater::get_background_update_state_file() {
|
||||
let _ = fs::remove_file(&state_file);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial]
|
||||
fn test_background_update_state_persistence() {
|
||||
let test_name = "persistence";
|
||||
let _temp_dir = setup_test_env();
|
||||
|
||||
// Clean up any existing test file first
|
||||
let _ = fs::remove_file(get_test_state_file(test_name));
|
||||
// Clean up any existing state file first
|
||||
if let Ok(state_file) = VersionUpdater::get_background_update_state_file() {
|
||||
let _ = fs::remove_file(&state_file);
|
||||
}
|
||||
|
||||
// Create a test state
|
||||
let test_state = BackgroundUpdateState {
|
||||
@@ -568,33 +549,55 @@ mod tests {
|
||||
};
|
||||
|
||||
// Save the state
|
||||
save_test_state(test_name, &test_state).unwrap();
|
||||
let save_result = VersionUpdater::save_background_update_state(&test_state);
|
||||
assert!(save_result.is_ok(), "Should save state successfully");
|
||||
|
||||
// Verify file was created
|
||||
let state_file = get_test_state_file(test_name);
|
||||
let state_file = VersionUpdater::get_background_update_state_file().unwrap();
|
||||
assert!(state_file.exists(), "State file should exist after saving");
|
||||
|
||||
// Load the state back
|
||||
let loaded_state = load_test_state(test_name);
|
||||
// Read the file directly to verify contents
|
||||
let file_content = fs::read_to_string(&state_file).expect("Should read state file");
|
||||
let file_state: BackgroundUpdateState =
|
||||
serde_json::from_str(&file_content).expect("Should parse state file");
|
||||
|
||||
// Verify the file contents match what we saved
|
||||
assert_eq!(
|
||||
file_state.last_update_time, test_state.last_update_time,
|
||||
"File last_update_time should match. Expected: {}, Got: {}",
|
||||
test_state.last_update_time, file_state.last_update_time
|
||||
);
|
||||
assert_eq!(
|
||||
file_state.update_interval_hours, test_state.update_interval_hours,
|
||||
"File update_interval_hours should match"
|
||||
);
|
||||
|
||||
// Load the state back using the method
|
||||
let loaded_state = VersionUpdater::load_background_update_state();
|
||||
|
||||
// Verify the values match
|
||||
assert_eq!(
|
||||
loaded_state.last_update_time, test_state.last_update_time,
|
||||
"last_update_time should match. Expected: {}, Got: {}",
|
||||
"Loaded last_update_time should match. Expected: {}, Got: {}",
|
||||
test_state.last_update_time, loaded_state.last_update_time
|
||||
);
|
||||
assert_eq!(
|
||||
loaded_state.update_interval_hours, test_state.update_interval_hours,
|
||||
"update_interval_hours should match"
|
||||
"Loaded update_interval_hours should match"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
let _ = fs::remove_file(get_test_state_file(test_name));
|
||||
cleanup_state_file();
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial]
|
||||
fn test_should_run_background_update_logic() {
|
||||
// Create isolated test states to avoid interference
|
||||
let _temp_dir = setup_test_env();
|
||||
|
||||
// Clean up any existing state file first
|
||||
cleanup_state_file();
|
||||
|
||||
let current_time = VersionUpdater::get_current_timestamp();
|
||||
|
||||
// Test with recent update (should not update)
|
||||
@@ -643,6 +646,9 @@ mod tests {
|
||||
should_update_never,
|
||||
"Should update when never updated before"
|
||||
);
|
||||
|
||||
// Clean up
|
||||
cleanup_state_file();
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://schema.tauri.app/config/2",
|
||||
"productName": "Donut",
|
||||
"version": "0.13.3",
|
||||
"version": "0.13.8",
|
||||
"identifier": "com.donutbrowser",
|
||||
"build": {
|
||||
"beforeDevCommand": "pnpm copy-proxy-binary && pnpm dev",
|
||||
|
||||
@@ -868,8 +868,12 @@ export function ProfilesDataTable({
|
||||
);
|
||||
|
||||
// Fetch traffic snapshots for running profiles (lightweight, real-time data)
|
||||
// Using runningProfiles.size as dependency to avoid Set reference comparison issues
|
||||
const runningCount = runningProfiles.size;
|
||||
// Convert Set to sorted array to avoid Set reference comparison issues in dependencies
|
||||
const runningProfileIds = React.useMemo(
|
||||
() => Array.from(runningProfiles).sort(),
|
||||
[runningProfiles],
|
||||
);
|
||||
const runningCount = runningProfileIds.length;
|
||||
React.useEffect(() => {
|
||||
if (!browserState.isClient) return;
|
||||
|
||||
@@ -886,9 +890,12 @@ export function ProfilesDataTable({
|
||||
const newSnapshots: Record<string, TrafficSnapshot> = {};
|
||||
for (const snapshot of allSnapshots) {
|
||||
if (snapshot.profile_id) {
|
||||
const existing = newSnapshots[snapshot.profile_id];
|
||||
if (!existing || snapshot.last_update > existing.last_update) {
|
||||
newSnapshots[snapshot.profile_id] = snapshot;
|
||||
// Only keep snapshots for profiles that are currently running
|
||||
if (runningProfileIds.includes(snapshot.profile_id)) {
|
||||
const existing = newSnapshots[snapshot.profile_id];
|
||||
if (!existing || snapshot.last_update > existing.last_update) {
|
||||
newSnapshots[snapshot.profile_id] = snapshot;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -901,7 +908,27 @@ export function ProfilesDataTable({
|
||||
void fetchTrafficSnapshots();
|
||||
const interval = setInterval(fetchTrafficSnapshots, 1000);
|
||||
return () => clearInterval(interval);
|
||||
}, [browserState.isClient, runningCount]);
|
||||
}, [browserState.isClient, runningCount, runningProfileIds]);
|
||||
|
||||
// Clean up snapshots for profiles that are no longer running
|
||||
React.useEffect(() => {
|
||||
if (!browserState.isClient) return;
|
||||
|
||||
setTrafficSnapshots((prev) => {
|
||||
const cleaned: Record<string, TrafficSnapshot> = {};
|
||||
for (const [profileId, snapshot] of Object.entries(prev)) {
|
||||
// Only keep snapshots for profiles that are currently running
|
||||
if (runningProfileIds.includes(profileId)) {
|
||||
cleaned[profileId] = snapshot;
|
||||
}
|
||||
}
|
||||
// Only update if something was removed
|
||||
if (Object.keys(cleaned).length !== Object.keys(prev).length) {
|
||||
return cleaned;
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
}, [browserState.isClient, runningProfileIds]);
|
||||
|
||||
// Clear launching/stopping spinners when backend reports running status changes
|
||||
React.useEffect(() => {
|
||||
@@ -1692,6 +1719,7 @@ export function ProfilesDataTable({
|
||||
if (isRunning && meta.trafficSnapshots) {
|
||||
// Find the traffic snapshot for this profile by matching profile_id
|
||||
const snapshot = meta.trafficSnapshots[profile.id];
|
||||
// Only use recent_bandwidth (last 60 seconds) - minimal data needed for mini chart
|
||||
// Create a new array reference to ensure React detects changes
|
||||
const bandwidthData = snapshot?.recent_bandwidth
|
||||
? [...snapshot.recent_bandwidth]
|
||||
|
||||
@@ -171,7 +171,11 @@ export function TrafficDetailsDialog({
|
||||
void fetchStats();
|
||||
const interval = setInterval(fetchStats, 2000);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
return () => {
|
||||
clearInterval(interval);
|
||||
// Clear stats from memory when dialog closes to free up memory
|
||||
setStats(null);
|
||||
};
|
||||
}, [isOpen, profileId, timePeriod]);
|
||||
|
||||
// Transform data for chart (already filtered by backend)
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
type HTMLMotionProps,
|
||||
type LegacyAnimationControls,
|
||||
motion,
|
||||
type TargetAndTransition,
|
||||
type Transition,
|
||||
} from "motion/react";
|
||||
import type * as React from "react";
|
||||
|
||||
import { useAutoHeight } from "@/hooks/use-auto-height";
|
||||
import { Slot, type WithAsChild } from "@/lib/slot";
|
||||
|
||||
type AutoHeightProps = WithAsChild<
|
||||
{
|
||||
children: React.ReactNode;
|
||||
deps?: React.DependencyList;
|
||||
animate?: TargetAndTransition | LegacyAnimationControls;
|
||||
transition?: Transition;
|
||||
} & Omit<HTMLMotionProps<"div">, "animate">
|
||||
>;
|
||||
|
||||
function AutoHeight({
|
||||
children,
|
||||
deps = [],
|
||||
transition = {
|
||||
type: "spring",
|
||||
stiffness: 300,
|
||||
damping: 30,
|
||||
bounce: 0,
|
||||
restDelta: 0.01,
|
||||
},
|
||||
style,
|
||||
animate,
|
||||
asChild = false,
|
||||
...props
|
||||
}: AutoHeightProps) {
|
||||
const { ref, height } = useAutoHeight<HTMLDivElement>(deps);
|
||||
|
||||
const Comp = asChild ? Slot : motion.div;
|
||||
|
||||
return (
|
||||
<Comp
|
||||
style={{ overflow: "hidden", ...style }}
|
||||
animate={{ height, ...animate }}
|
||||
transition={transition}
|
||||
{...props}
|
||||
>
|
||||
<div ref={ref}>{children}</div>
|
||||
</Comp>
|
||||
);
|
||||
}
|
||||
|
||||
export { AutoHeight, type AutoHeightProps };
|
||||
+138
-39
@@ -1,86 +1,185 @@
|
||||
"use client";
|
||||
|
||||
import * as DialogPrimitive from "@radix-ui/react-dialog";
|
||||
import { AnimatePresence, type HTMLMotionProps, motion } from "motion/react";
|
||||
import { Dialog as DialogPrimitive } from "radix-ui";
|
||||
import type * as React from "react";
|
||||
import { RxCross2 } from "react-icons/rx";
|
||||
|
||||
import { useControlledState } from "@/hooks/use-controlled-state";
|
||||
import { getStrictContext } from "@/lib/get-strict-context";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { WindowDragArea } from "../window-drag-area";
|
||||
|
||||
function Dialog({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Root>) {
|
||||
return <DialogPrimitive.Root data-slot="dialog" {...props} />;
|
||||
type DialogContextType = {
|
||||
isOpen: boolean;
|
||||
setIsOpen: DialogProps["onOpenChange"];
|
||||
};
|
||||
|
||||
const [DialogProvider, useDialog] =
|
||||
getStrictContext<DialogContextType>("DialogContext");
|
||||
|
||||
type DialogProps = React.ComponentProps<typeof DialogPrimitive.Root>;
|
||||
|
||||
function Dialog(props: DialogProps) {
|
||||
const [isOpen, setIsOpen] = useControlledState({
|
||||
value: props?.open,
|
||||
defaultValue: props?.defaultOpen,
|
||||
onChange: props?.onOpenChange,
|
||||
});
|
||||
|
||||
return (
|
||||
<DialogProvider value={{ isOpen, setIsOpen }}>
|
||||
<DialogPrimitive.Root
|
||||
data-slot="dialog"
|
||||
{...props}
|
||||
onOpenChange={setIsOpen}
|
||||
/>
|
||||
</DialogProvider>
|
||||
);
|
||||
}
|
||||
|
||||
function DialogTrigger({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Trigger>) {
|
||||
type DialogTriggerProps = React.ComponentProps<typeof DialogPrimitive.Trigger>;
|
||||
|
||||
function DialogTrigger(props: DialogTriggerProps) {
|
||||
return <DialogPrimitive.Trigger data-slot="dialog-trigger" {...props} />;
|
||||
}
|
||||
|
||||
function DialogPortal({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Portal>) {
|
||||
return <DialogPrimitive.Portal data-slot="dialog-portal" {...props} />;
|
||||
type DialogPortalProps = Omit<
|
||||
React.ComponentProps<typeof DialogPrimitive.Portal>,
|
||||
"forceMount"
|
||||
>;
|
||||
|
||||
function DialogPortal(props: DialogPortalProps) {
|
||||
const { isOpen } = useDialog();
|
||||
|
||||
return (
|
||||
<AnimatePresence>
|
||||
{isOpen && (
|
||||
<DialogPrimitive.Portal
|
||||
data-slot="dialog-portal"
|
||||
forceMount
|
||||
{...props}
|
||||
/>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
);
|
||||
}
|
||||
|
||||
function DialogClose({
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Close>) {
|
||||
return <DialogPrimitive.Close data-slot="dialog-close" {...props} />;
|
||||
}
|
||||
type DialogOverlayProps = Omit<
|
||||
React.ComponentProps<typeof DialogPrimitive.Overlay>,
|
||||
"forceMount" | "asChild"
|
||||
> &
|
||||
HTMLMotionProps<"div">;
|
||||
|
||||
function DialogOverlay({
|
||||
className,
|
||||
transition = { duration: 0.2, ease: "easeInOut" },
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Overlay>) {
|
||||
}: DialogOverlayProps) {
|
||||
return (
|
||||
<DialogPrimitive.Overlay
|
||||
data-slot="dialog-overlay"
|
||||
className={cn(
|
||||
"data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 fixed inset-0 z-[9999] bg-background/50",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
<WindowDragArea />
|
||||
<DialogPrimitive.Overlay data-slot="dialog-overlay" asChild forceMount>
|
||||
<motion.div
|
||||
key="dialog-overlay"
|
||||
initial={{ opacity: 0, filter: "blur(4px)" }}
|
||||
animate={{ opacity: 1, filter: "blur(0px)" }}
|
||||
exit={{ opacity: 0, filter: "blur(4px)" }}
|
||||
transition={transition}
|
||||
className={cn("fixed inset-0 z-9999 bg-background/50", className)}
|
||||
{...props}
|
||||
>
|
||||
<WindowDragArea />
|
||||
</motion.div>
|
||||
</DialogPrimitive.Overlay>
|
||||
);
|
||||
}
|
||||
|
||||
type DialogFlipDirection = "top" | "bottom" | "left" | "right";
|
||||
|
||||
type DialogContentProps = Omit<
|
||||
React.ComponentProps<typeof DialogPrimitive.Content>,
|
||||
"forceMount" | "asChild"
|
||||
> &
|
||||
HTMLMotionProps<"div"> & {
|
||||
from?: DialogFlipDirection;
|
||||
};
|
||||
|
||||
function DialogContent({
|
||||
className,
|
||||
children,
|
||||
from = "top",
|
||||
onOpenAutoFocus,
|
||||
onCloseAutoFocus,
|
||||
onEscapeKeyDown,
|
||||
onPointerDownOutside,
|
||||
onInteractOutside,
|
||||
transition = { type: "spring", stiffness: 150, damping: 25 },
|
||||
...props
|
||||
}: React.ComponentProps<typeof DialogPrimitive.Content>) {
|
||||
}: DialogContentProps) {
|
||||
const initialRotation =
|
||||
from === "bottom" || from === "left" ? "20deg" : "-20deg";
|
||||
const isVertical = from === "top" || from === "bottom";
|
||||
const rotateAxis = isVertical ? "rotateX" : "rotateY";
|
||||
|
||||
return (
|
||||
<DialogPortal data-slot="dialog-portal">
|
||||
<DialogOverlay />
|
||||
<DialogPrimitive.Content
|
||||
data-slot="dialog-content"
|
||||
className={cn(
|
||||
"bg-background data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 fixed top-[50%] left-[50%] z-[10000] grid w-full max-w-[calc(100%-2rem)] translate-x-[-50%] translate-y-[-50%] gap-4 rounded-lg border p-6 shadow-lg duration-200 sm:max-w-lg",
|
||||
className,
|
||||
)}
|
||||
asChild
|
||||
forceMount
|
||||
onOpenAutoFocus={onOpenAutoFocus}
|
||||
onCloseAutoFocus={onCloseAutoFocus}
|
||||
onEscapeKeyDown={onEscapeKeyDown}
|
||||
onPointerDownOutside={onPointerDownOutside}
|
||||
onInteractOutside={(event) => {
|
||||
const target = event.target as HTMLElement | null;
|
||||
if (target?.closest('[data-window-drag-area="true"]')) {
|
||||
event.preventDefault();
|
||||
}
|
||||
onInteractOutside?.(event);
|
||||
}}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
<DialogPrimitive.Close className="cursor-pointer ring-offset-background focus:ring-ring data-[state=open]:bg-accent data-[state=open]:text-muted-foreground absolute top-4 right-4 rounded-xs opacity-70 transition-opacity hover:opacity-100 focus:ring-2 focus:ring-offset-2 focus:outline-hidden disabled:pointer-events-none [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4">
|
||||
<RxCross2 />
|
||||
<span className="sr-only">Close</span>
|
||||
</DialogPrimitive.Close>
|
||||
<motion.div
|
||||
key="dialog-content"
|
||||
data-slot="dialog-content"
|
||||
initial={{
|
||||
opacity: 0,
|
||||
filter: "blur(4px)",
|
||||
transform: `perspective(500px) ${rotateAxis}(${initialRotation}) scale(0.8)`,
|
||||
}}
|
||||
animate={{
|
||||
opacity: 1,
|
||||
filter: "blur(0px)",
|
||||
transform: `perspective(500px) ${rotateAxis}(0deg) scale(1)`,
|
||||
}}
|
||||
exit={{
|
||||
opacity: 0,
|
||||
filter: "blur(4px)",
|
||||
transform: `perspective(500px) ${rotateAxis}(${initialRotation}) scale(0.8)`,
|
||||
}}
|
||||
transition={transition}
|
||||
className={cn(
|
||||
"bg-background fixed top-[50%] left-[50%] z-10000 grid w-full max-w-[calc(100%-2rem)] translate-x-[-50%] translate-y-[-50%] gap-4 rounded-lg border p-6 shadow-lg sm:max-w-lg",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
>
|
||||
{children}
|
||||
<DialogPrimitive.Close className="cursor-pointer ring-offset-background focus:ring-ring data-[state=open]:bg-accent data-[state=open]:text-muted-foreground absolute top-4 right-4 rounded-xs opacity-70 transition-opacity hover:opacity-100 focus:ring-2 focus:ring-offset-2 focus:outline-hidden disabled:pointer-events-none [&_svg]:pointer-events-none [&_svg]:shrink-0 [&_svg:not([class*='size-'])]:size-4">
|
||||
<RxCross2 />
|
||||
<span className="sr-only">Close</span>
|
||||
</DialogPrimitive.Close>
|
||||
</motion.div>
|
||||
</DialogPrimitive.Content>
|
||||
</DialogPortal>
|
||||
);
|
||||
}
|
||||
|
||||
type DialogCloseProps = React.ComponentProps<typeof DialogPrimitive.Close>;
|
||||
|
||||
function DialogClose(props: DialogCloseProps) {
|
||||
return <DialogPrimitive.Close data-slot="dialog-close" {...props} />;
|
||||
}
|
||||
|
||||
function DialogHeader({ className, ...props }: React.ComponentProps<"div">) {
|
||||
return (
|
||||
<div
|
||||
|
||||
@@ -0,0 +1,640 @@
|
||||
"use client";
|
||||
|
||||
import { AnimatePresence, motion, type Transition } from "motion/react";
|
||||
import * as React from "react";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
type HighlightMode = "children" | "parent";
|
||||
|
||||
type Bounds = {
|
||||
top: number;
|
||||
left: number;
|
||||
width: number;
|
||||
height: number;
|
||||
};
|
||||
|
||||
const DEFAULT_BOUNDS_OFFSET: Bounds = {
|
||||
top: 0,
|
||||
left: 0,
|
||||
width: 0,
|
||||
height: 0,
|
||||
};
|
||||
|
||||
type HighlightContextType<T extends string> = {
|
||||
as?: keyof HTMLElementTagNameMap;
|
||||
mode: HighlightMode;
|
||||
activeValue: T | null;
|
||||
setActiveValue: (value: T | null) => void;
|
||||
setBounds: (bounds: DOMRect) => void;
|
||||
clearBounds: () => void;
|
||||
id: string;
|
||||
hover: boolean;
|
||||
click: boolean;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
activeClassName?: string;
|
||||
setActiveClassName: (className: string) => void;
|
||||
transition?: Transition;
|
||||
disabled?: boolean;
|
||||
enabled?: boolean;
|
||||
exitDelay?: number;
|
||||
forceUpdateBounds?: boolean;
|
||||
};
|
||||
|
||||
const HighlightContext = React.createContext<
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
HighlightContextType<any> | undefined
|
||||
>(undefined);
|
||||
|
||||
function useHighlight<T extends string>(): HighlightContextType<T> {
|
||||
const context = React.useContext(HighlightContext);
|
||||
if (!context) {
|
||||
throw new Error("useHighlight must be used within a HighlightProvider");
|
||||
}
|
||||
return context as unknown as HighlightContextType<T>;
|
||||
}
|
||||
|
||||
type BaseHighlightProps<T extends React.ElementType = "div"> = {
|
||||
as?: T;
|
||||
ref?: React.Ref<HTMLDivElement>;
|
||||
mode?: HighlightMode;
|
||||
value?: string | null;
|
||||
defaultValue?: string | null;
|
||||
onValueChange?: (value: string | null) => void;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
transition?: Transition;
|
||||
hover?: boolean;
|
||||
click?: boolean;
|
||||
disabled?: boolean;
|
||||
enabled?: boolean;
|
||||
exitDelay?: number;
|
||||
};
|
||||
|
||||
type ParentModeHighlightProps = {
|
||||
boundsOffset?: Partial<Bounds>;
|
||||
containerClassName?: string;
|
||||
forceUpdateBounds?: boolean;
|
||||
};
|
||||
|
||||
type ControlledParentModeHighlightProps<T extends React.ElementType = "div"> =
|
||||
BaseHighlightProps<T> &
|
||||
ParentModeHighlightProps & {
|
||||
mode: "parent";
|
||||
controlledItems: true;
|
||||
children: React.ReactNode;
|
||||
};
|
||||
|
||||
type ControlledChildrenModeHighlightProps<T extends React.ElementType = "div"> =
|
||||
BaseHighlightProps<T> & {
|
||||
mode?: "children" | undefined;
|
||||
controlledItems: true;
|
||||
children: React.ReactNode;
|
||||
};
|
||||
|
||||
type UncontrolledParentModeHighlightProps<T extends React.ElementType = "div"> =
|
||||
BaseHighlightProps<T> &
|
||||
ParentModeHighlightProps & {
|
||||
mode: "parent";
|
||||
controlledItems?: false;
|
||||
itemsClassName?: string;
|
||||
children: React.ReactElement | React.ReactElement[];
|
||||
};
|
||||
|
||||
type UncontrolledChildrenModeHighlightProps<
|
||||
T extends React.ElementType = "div",
|
||||
> = BaseHighlightProps<T> & {
|
||||
mode?: "children";
|
||||
controlledItems?: false;
|
||||
itemsClassName?: string;
|
||||
children: React.ReactElement | React.ReactElement[];
|
||||
};
|
||||
|
||||
type HighlightProps<T extends React.ElementType = "div"> =
|
||||
| ControlledParentModeHighlightProps<T>
|
||||
| ControlledChildrenModeHighlightProps<T>
|
||||
| UncontrolledParentModeHighlightProps<T>
|
||||
| UncontrolledChildrenModeHighlightProps<T>;
|
||||
|
||||
function Highlight<T extends React.ElementType = "div">({
|
||||
ref,
|
||||
...props
|
||||
}: HighlightProps<T>) {
|
||||
const {
|
||||
as: Component = "div",
|
||||
children,
|
||||
value,
|
||||
defaultValue,
|
||||
onValueChange,
|
||||
className,
|
||||
style,
|
||||
transition = { type: "spring", stiffness: 350, damping: 35 },
|
||||
hover = false,
|
||||
click = true,
|
||||
enabled = true,
|
||||
controlledItems,
|
||||
disabled = false,
|
||||
exitDelay = 200,
|
||||
mode = "children",
|
||||
} = props;
|
||||
|
||||
const localRef = React.useRef<HTMLDivElement>(null);
|
||||
React.useImperativeHandle(ref, () => localRef.current as HTMLDivElement);
|
||||
|
||||
const propsBoundsOffset = (props as ParentModeHighlightProps)?.boundsOffset;
|
||||
const boundsOffset = propsBoundsOffset ?? DEFAULT_BOUNDS_OFFSET;
|
||||
const boundsOffsetTop = boundsOffset.top ?? 0;
|
||||
const boundsOffsetLeft = boundsOffset.left ?? 0;
|
||||
const boundsOffsetWidth = boundsOffset.width ?? 0;
|
||||
const boundsOffsetHeight = boundsOffset.height ?? 0;
|
||||
|
||||
const boundsOffsetRef = React.useRef({
|
||||
top: boundsOffsetTop,
|
||||
left: boundsOffsetLeft,
|
||||
width: boundsOffsetWidth,
|
||||
height: boundsOffsetHeight,
|
||||
});
|
||||
|
||||
React.useEffect(() => {
|
||||
boundsOffsetRef.current = {
|
||||
top: boundsOffsetTop,
|
||||
left: boundsOffsetLeft,
|
||||
width: boundsOffsetWidth,
|
||||
height: boundsOffsetHeight,
|
||||
};
|
||||
}, [
|
||||
boundsOffsetTop,
|
||||
boundsOffsetLeft,
|
||||
boundsOffsetWidth,
|
||||
boundsOffsetHeight,
|
||||
]);
|
||||
|
||||
const [activeValue, setActiveValue] = React.useState<string | null>(
|
||||
value ?? defaultValue ?? null,
|
||||
);
|
||||
const [boundsState, setBoundsState] = React.useState<Bounds | null>(null);
|
||||
const [activeClassNameState, setActiveClassNameState] =
|
||||
React.useState<string>("");
|
||||
|
||||
const safeSetActiveValue = (id: string | null) => {
|
||||
setActiveValue((prev) => {
|
||||
if (prev !== id) {
|
||||
onValueChange?.(id);
|
||||
return id;
|
||||
}
|
||||
return prev;
|
||||
});
|
||||
};
|
||||
|
||||
const safeSetBoundsRef = React.useRef<
|
||||
((bounds: DOMRect) => void) | undefined
|
||||
>(undefined);
|
||||
|
||||
React.useEffect(() => {
|
||||
safeSetBoundsRef.current = (bounds: DOMRect) => {
|
||||
if (!localRef.current) return;
|
||||
|
||||
const containerRect = localRef.current.getBoundingClientRect();
|
||||
const offset = boundsOffsetRef.current;
|
||||
const newBounds: Bounds = {
|
||||
top: bounds.top - containerRect.top + offset.top,
|
||||
left: bounds.left - containerRect.left + offset.left,
|
||||
width: bounds.width + offset.width,
|
||||
height: bounds.height + offset.height,
|
||||
};
|
||||
|
||||
setBoundsState((prev) => {
|
||||
if (
|
||||
prev &&
|
||||
prev.top === newBounds.top &&
|
||||
prev.left === newBounds.left &&
|
||||
prev.width === newBounds.width &&
|
||||
prev.height === newBounds.height
|
||||
) {
|
||||
return prev;
|
||||
}
|
||||
return newBounds;
|
||||
});
|
||||
};
|
||||
});
|
||||
|
||||
const safeSetBounds = (bounds: DOMRect) => {
|
||||
safeSetBoundsRef.current?.(bounds);
|
||||
};
|
||||
|
||||
const clearBounds = React.useCallback(() => {
|
||||
setBoundsState((prev) => (prev === null ? prev : null));
|
||||
}, []);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (value !== undefined) setActiveValue(value);
|
||||
else if (defaultValue !== undefined) setActiveValue(defaultValue);
|
||||
}, [value, defaultValue]);
|
||||
|
||||
const id = React.useId();
|
||||
|
||||
React.useEffect(() => {
|
||||
if (mode !== "parent") return;
|
||||
const container = localRef.current;
|
||||
if (!container) return;
|
||||
|
||||
const onScroll = () => {
|
||||
if (!activeValue) return;
|
||||
const activeEl = container.querySelector<HTMLElement>(
|
||||
`[data-value="${activeValue}"][data-highlight="true"]`,
|
||||
);
|
||||
if (activeEl)
|
||||
safeSetBoundsRef.current?.(activeEl.getBoundingClientRect());
|
||||
};
|
||||
|
||||
container.addEventListener("scroll", onScroll, { passive: true });
|
||||
return () => container.removeEventListener("scroll", onScroll);
|
||||
}, [mode, activeValue]);
|
||||
|
||||
const render = (children: React.ReactNode) => {
|
||||
if (mode === "parent") {
|
||||
return (
|
||||
<Component
|
||||
ref={localRef}
|
||||
data-slot="motion-highlight-container"
|
||||
style={{ position: "relative", zIndex: 1 }}
|
||||
className={(props as ParentModeHighlightProps)?.containerClassName}
|
||||
>
|
||||
<AnimatePresence initial={false} mode="wait">
|
||||
{boundsState && (
|
||||
<motion.div
|
||||
data-slot="motion-highlight"
|
||||
animate={{
|
||||
top: boundsState.top,
|
||||
left: boundsState.left,
|
||||
width: boundsState.width,
|
||||
height: boundsState.height,
|
||||
opacity: 1,
|
||||
}}
|
||||
initial={{
|
||||
top: boundsState.top,
|
||||
left: boundsState.left,
|
||||
width: boundsState.width,
|
||||
height: boundsState.height,
|
||||
opacity: 0,
|
||||
}}
|
||||
exit={{
|
||||
opacity: 0,
|
||||
transition: {
|
||||
...transition,
|
||||
delay: (transition?.delay ?? 0) + (exitDelay ?? 0) / 1000,
|
||||
},
|
||||
}}
|
||||
transition={transition}
|
||||
style={{ position: "absolute", zIndex: 0, ...style }}
|
||||
className={cn(className, activeClassNameState)}
|
||||
/>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
{children}
|
||||
</Component>
|
||||
);
|
||||
}
|
||||
|
||||
return children;
|
||||
};
|
||||
|
||||
return (
|
||||
<HighlightContext.Provider
|
||||
value={{
|
||||
mode,
|
||||
activeValue,
|
||||
setActiveValue: safeSetActiveValue,
|
||||
id,
|
||||
hover,
|
||||
click,
|
||||
className,
|
||||
style,
|
||||
transition,
|
||||
disabled,
|
||||
enabled,
|
||||
exitDelay,
|
||||
setBounds: safeSetBounds,
|
||||
clearBounds,
|
||||
activeClassName: activeClassNameState,
|
||||
setActiveClassName: setActiveClassNameState,
|
||||
forceUpdateBounds: (props as ParentModeHighlightProps)
|
||||
?.forceUpdateBounds,
|
||||
}}
|
||||
>
|
||||
{enabled
|
||||
? controlledItems
|
||||
? render(children)
|
||||
: render(
|
||||
React.Children.map(children, (child, index) => (
|
||||
<HighlightItem key={index} className={props?.itemsClassName}>
|
||||
{child}
|
||||
</HighlightItem>
|
||||
)),
|
||||
)
|
||||
: children}
|
||||
</HighlightContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
function getNonOverridingDataAttributes(
|
||||
element: React.ReactElement,
|
||||
dataAttributes: Record<string, unknown>,
|
||||
): Record<string, unknown> {
|
||||
return Object.keys(dataAttributes).reduce<Record<string, unknown>>(
|
||||
(acc, key) => {
|
||||
if ((element.props as Record<string, unknown>)[key] === undefined) {
|
||||
acc[key] = dataAttributes[key];
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{},
|
||||
);
|
||||
}
|
||||
|
||||
type ExtendedChildProps = React.ComponentProps<"div"> & {
|
||||
id?: string;
|
||||
ref?: React.Ref<HTMLElement>;
|
||||
"data-active"?: string;
|
||||
"data-value"?: string;
|
||||
"data-disabled"?: boolean;
|
||||
"data-highlight"?: boolean;
|
||||
"data-slot"?: string;
|
||||
};
|
||||
|
||||
type HighlightItemProps<T extends React.ElementType = "div"> =
|
||||
React.ComponentProps<T> & {
|
||||
as?: T;
|
||||
children: React.ReactElement;
|
||||
id?: string;
|
||||
value?: string;
|
||||
className?: string;
|
||||
style?: React.CSSProperties;
|
||||
transition?: Transition;
|
||||
activeClassName?: string;
|
||||
disabled?: boolean;
|
||||
exitDelay?: number;
|
||||
asChild?: boolean;
|
||||
forceUpdateBounds?: boolean;
|
||||
};
|
||||
|
||||
function HighlightItem<T extends React.ElementType>({
|
||||
ref,
|
||||
as,
|
||||
children,
|
||||
id,
|
||||
value,
|
||||
className,
|
||||
style,
|
||||
transition,
|
||||
disabled = false,
|
||||
activeClassName,
|
||||
exitDelay,
|
||||
asChild = false,
|
||||
forceUpdateBounds,
|
||||
...props
|
||||
}: HighlightItemProps<T>) {
|
||||
const itemId = React.useId();
|
||||
const {
|
||||
activeValue,
|
||||
setActiveValue,
|
||||
mode,
|
||||
setBounds,
|
||||
clearBounds,
|
||||
hover,
|
||||
click,
|
||||
enabled,
|
||||
className: contextClassName,
|
||||
style: contextStyle,
|
||||
transition: contextTransition,
|
||||
id: contextId,
|
||||
disabled: contextDisabled,
|
||||
exitDelay: contextExitDelay,
|
||||
forceUpdateBounds: contextForceUpdateBounds,
|
||||
setActiveClassName,
|
||||
} = useHighlight();
|
||||
|
||||
const Component = as ?? "div";
|
||||
const element = children as React.ReactElement<ExtendedChildProps>;
|
||||
const childValue =
|
||||
id ?? value ?? element.props?.["data-value"] ?? element.props?.id ?? itemId;
|
||||
const isActive = activeValue === childValue;
|
||||
const isDisabled = disabled === undefined ? contextDisabled : disabled;
|
||||
const itemTransition = transition ?? contextTransition;
|
||||
|
||||
const localRef = React.useRef<HTMLDivElement>(null);
|
||||
React.useImperativeHandle(ref, () => localRef.current as HTMLDivElement);
|
||||
|
||||
const refCallback = React.useCallback((node: HTMLElement | null) => {
|
||||
localRef.current = node as HTMLDivElement;
|
||||
}, []);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (mode !== "parent") return;
|
||||
let rafId: number;
|
||||
let previousBounds: Bounds | null = null;
|
||||
const shouldUpdateBounds =
|
||||
forceUpdateBounds === true ||
|
||||
(contextForceUpdateBounds && forceUpdateBounds !== false);
|
||||
|
||||
const updateBounds = () => {
|
||||
if (!localRef.current) return;
|
||||
|
||||
const bounds = localRef.current.getBoundingClientRect();
|
||||
|
||||
if (shouldUpdateBounds) {
|
||||
if (
|
||||
previousBounds &&
|
||||
previousBounds.top === bounds.top &&
|
||||
previousBounds.left === bounds.left &&
|
||||
previousBounds.width === bounds.width &&
|
||||
previousBounds.height === bounds.height
|
||||
) {
|
||||
rafId = requestAnimationFrame(updateBounds);
|
||||
return;
|
||||
}
|
||||
previousBounds = bounds;
|
||||
rafId = requestAnimationFrame(updateBounds);
|
||||
}
|
||||
|
||||
setBounds(bounds);
|
||||
};
|
||||
|
||||
if (isActive) {
|
||||
updateBounds();
|
||||
setActiveClassName(activeClassName ?? "");
|
||||
} else if (!activeValue) clearBounds();
|
||||
|
||||
if (shouldUpdateBounds) return () => cancelAnimationFrame(rafId);
|
||||
}, [
|
||||
mode,
|
||||
isActive,
|
||||
activeValue,
|
||||
setBounds,
|
||||
clearBounds,
|
||||
activeClassName,
|
||||
setActiveClassName,
|
||||
forceUpdateBounds,
|
||||
contextForceUpdateBounds,
|
||||
]);
|
||||
|
||||
if (!React.isValidElement(children)) return children;
|
||||
|
||||
const dataAttributes = {
|
||||
"data-active": isActive ? "true" : "false",
|
||||
"aria-selected": isActive,
|
||||
"data-disabled": isDisabled,
|
||||
"data-value": childValue,
|
||||
"data-highlight": true,
|
||||
};
|
||||
|
||||
const commonHandlers = hover
|
||||
? {
|
||||
onMouseEnter: (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
setActiveValue(childValue);
|
||||
element.props.onMouseEnter?.(e);
|
||||
},
|
||||
onMouseLeave: (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
setActiveValue(null);
|
||||
element.props.onMouseLeave?.(e);
|
||||
},
|
||||
}
|
||||
: click
|
||||
? {
|
||||
onClick: (e: React.MouseEvent<HTMLDivElement>) => {
|
||||
setActiveValue(childValue);
|
||||
element.props.onClick?.(e);
|
||||
},
|
||||
}
|
||||
: {};
|
||||
|
||||
if (asChild) {
|
||||
if (mode === "children") {
|
||||
return React.cloneElement(
|
||||
element,
|
||||
{
|
||||
key: childValue,
|
||||
ref: refCallback,
|
||||
className: cn("relative", element.props.className),
|
||||
...getNonOverridingDataAttributes(element, {
|
||||
...dataAttributes,
|
||||
"data-slot": "motion-highlight-item-container",
|
||||
}),
|
||||
...commonHandlers,
|
||||
...props,
|
||||
},
|
||||
<>
|
||||
<AnimatePresence initial={false} mode="wait">
|
||||
{isActive && !isDisabled && (
|
||||
<motion.div
|
||||
layoutId={`transition-background-${contextId}`}
|
||||
data-slot="motion-highlight"
|
||||
style={{
|
||||
position: "absolute",
|
||||
zIndex: 0,
|
||||
...contextStyle,
|
||||
...style,
|
||||
}}
|
||||
className={cn(contextClassName, activeClassName)}
|
||||
transition={itemTransition}
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{
|
||||
opacity: 0,
|
||||
transition: {
|
||||
...itemTransition,
|
||||
delay:
|
||||
(itemTransition?.delay ?? 0) +
|
||||
(exitDelay ?? contextExitDelay ?? 0) / 1000,
|
||||
},
|
||||
}}
|
||||
{...dataAttributes}
|
||||
/>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
|
||||
<Component
|
||||
data-slot="motion-highlight-item"
|
||||
style={{ position: "relative", zIndex: 1 }}
|
||||
className={className}
|
||||
{...dataAttributes}
|
||||
>
|
||||
{children}
|
||||
</Component>
|
||||
</>,
|
||||
);
|
||||
}
|
||||
|
||||
return React.cloneElement(element, {
|
||||
ref: refCallback,
|
||||
...getNonOverridingDataAttributes(element, {
|
||||
...dataAttributes,
|
||||
"data-slot": "motion-highlight-item",
|
||||
}),
|
||||
...commonHandlers,
|
||||
});
|
||||
}
|
||||
|
||||
return enabled ? (
|
||||
<Component
|
||||
key={childValue}
|
||||
ref={localRef}
|
||||
data-slot="motion-highlight-item-container"
|
||||
className={cn(mode === "children" && "relative", className)}
|
||||
{...dataAttributes}
|
||||
{...props}
|
||||
{...commonHandlers}
|
||||
>
|
||||
{mode === "children" && (
|
||||
<AnimatePresence initial={false} mode="wait">
|
||||
{isActive && !isDisabled && (
|
||||
<motion.div
|
||||
layoutId={`transition-background-${contextId}`}
|
||||
data-slot="motion-highlight"
|
||||
style={{
|
||||
position: "absolute",
|
||||
zIndex: 0,
|
||||
...contextStyle,
|
||||
...style,
|
||||
}}
|
||||
className={cn(contextClassName, activeClassName)}
|
||||
transition={itemTransition}
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{
|
||||
opacity: 0,
|
||||
transition: {
|
||||
...itemTransition,
|
||||
delay:
|
||||
(itemTransition?.delay ?? 0) +
|
||||
(exitDelay ?? contextExitDelay ?? 0) / 1000,
|
||||
},
|
||||
}}
|
||||
{...dataAttributes}
|
||||
/>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
)}
|
||||
|
||||
{React.cloneElement(element, {
|
||||
style: { position: "relative", zIndex: 1 },
|
||||
className: element.props.className,
|
||||
...getNonOverridingDataAttributes(element, {
|
||||
...dataAttributes,
|
||||
"data-slot": "motion-highlight-item",
|
||||
}),
|
||||
})}
|
||||
</Component>
|
||||
) : (
|
||||
children
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
Highlight,
|
||||
HighlightItem,
|
||||
useHighlight,
|
||||
type HighlightProps,
|
||||
type HighlightItemProps,
|
||||
};
|
||||
+179
-18
@@ -1,18 +1,82 @@
|
||||
"use client";
|
||||
|
||||
import * as TabsPrimitive from "@radix-ui/react-tabs";
|
||||
import {
|
||||
AnimatePresence,
|
||||
type HTMLMotionProps,
|
||||
motion,
|
||||
type Transition,
|
||||
} from "motion/react";
|
||||
import * as React from "react";
|
||||
|
||||
import { AutoHeight } from "@/components/ui/auto-height";
|
||||
import {
|
||||
Highlight,
|
||||
HighlightItem,
|
||||
type HighlightItemProps,
|
||||
type HighlightProps,
|
||||
} from "@/components/ui/highlight";
|
||||
import { useControlledState } from "@/hooks/use-controlled-state";
|
||||
import { getStrictContext } from "@/lib/get-strict-context";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
const Tabs = TabsPrimitive.Root;
|
||||
type TabsContextType = {
|
||||
value: string | undefined;
|
||||
setValue: TabsProps["onValueChange"];
|
||||
};
|
||||
|
||||
const [TabsProvider, useTabs] =
|
||||
getStrictContext<TabsContextType>("TabsContext");
|
||||
|
||||
type TabsProps = React.ComponentProps<typeof TabsPrimitive.Root>;
|
||||
|
||||
function Tabs(props: TabsProps) {
|
||||
const [value, setValue] = useControlledState({
|
||||
value: props.value,
|
||||
defaultValue: props.defaultValue,
|
||||
onChange: props.onValueChange,
|
||||
});
|
||||
|
||||
return (
|
||||
<TabsProvider value={{ value, setValue }}>
|
||||
<TabsPrimitive.Root
|
||||
data-slot="tabs"
|
||||
{...props}
|
||||
onValueChange={setValue}
|
||||
/>
|
||||
</TabsProvider>
|
||||
);
|
||||
}
|
||||
|
||||
type TabsHighlightProps = Omit<HighlightProps, "controlledItems" | "value">;
|
||||
|
||||
function TabsHighlight({
|
||||
transition = { type: "spring", stiffness: 200, damping: 25 },
|
||||
...props
|
||||
}: TabsHighlightProps) {
|
||||
const { value } = useTabs();
|
||||
|
||||
return (
|
||||
<Highlight
|
||||
data-slot="tabs-highlight"
|
||||
controlledItems
|
||||
value={value}
|
||||
transition={transition}
|
||||
click={false}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
type TabsListProps = React.ComponentProps<typeof TabsPrimitive.List>;
|
||||
|
||||
const TabsList = React.forwardRef<
|
||||
React.ElementRef<typeof TabsPrimitive.List>,
|
||||
React.ComponentPropsWithoutRef<typeof TabsPrimitive.List>
|
||||
TabsListProps
|
||||
>(({ className, ...props }, ref) => (
|
||||
<TabsPrimitive.List
|
||||
ref={ref}
|
||||
data-slot="tabs-list"
|
||||
className={cn(
|
||||
"inline-flex h-10 items-center justify-center rounded-md bg-muted p-1 text-muted-foreground",
|
||||
className,
|
||||
@@ -22,12 +86,23 @@ const TabsList = React.forwardRef<
|
||||
));
|
||||
TabsList.displayName = TabsPrimitive.List.displayName;
|
||||
|
||||
type TabsHighlightItemProps = HighlightItemProps & {
|
||||
value: string;
|
||||
};
|
||||
|
||||
function TabsHighlightItem(props: TabsHighlightItemProps) {
|
||||
return <HighlightItem data-slot="tabs-highlight-item" {...props} />;
|
||||
}
|
||||
|
||||
type TabsTriggerProps = React.ComponentProps<typeof TabsPrimitive.Trigger>;
|
||||
|
||||
const TabsTrigger = React.forwardRef<
|
||||
React.ElementRef<typeof TabsPrimitive.Trigger>,
|
||||
React.ComponentPropsWithoutRef<typeof TabsPrimitive.Trigger>
|
||||
TabsTriggerProps
|
||||
>(({ className, ...props }, ref) => (
|
||||
<TabsPrimitive.Trigger
|
||||
ref={ref}
|
||||
data-slot="tabs-trigger"
|
||||
className={cn(
|
||||
"cursor-pointer inline-flex items-center justify-center whitespace-nowrap rounded-sm px-3 py-1.5 text-sm font-medium ring-offset-background transition-all focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50 data-[state=active]:bg-background data-[state=active]:text-foreground data-[state=active]:shadow-sm",
|
||||
className,
|
||||
@@ -37,19 +112,105 @@ const TabsTrigger = React.forwardRef<
|
||||
));
|
||||
TabsTrigger.displayName = TabsPrimitive.Trigger.displayName;
|
||||
|
||||
const TabsContent = React.forwardRef<
|
||||
React.ElementRef<typeof TabsPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof TabsPrimitive.Content>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<TabsPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"mt-2 ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
TabsContent.displayName = TabsPrimitive.Content.displayName;
|
||||
type TabsContentProps = React.ComponentProps<typeof TabsPrimitive.Content> &
|
||||
HTMLMotionProps<"div">;
|
||||
|
||||
export { Tabs, TabsList, TabsTrigger, TabsContent };
|
||||
function TabsContent({
|
||||
value,
|
||||
forceMount,
|
||||
transition = { duration: 0.5, ease: "easeInOut" },
|
||||
className,
|
||||
...props
|
||||
}: TabsContentProps) {
|
||||
return (
|
||||
<AnimatePresence mode="wait">
|
||||
<TabsPrimitive.Content asChild forceMount={forceMount} value={value}>
|
||||
<motion.div
|
||||
data-slot="tabs-content"
|
||||
layout
|
||||
layoutDependency={value}
|
||||
initial={{ opacity: 0, filter: "blur(4px)" }}
|
||||
animate={{ opacity: 1, filter: "blur(0px)" }}
|
||||
exit={{ opacity: 0, filter: "blur(4px)" }}
|
||||
transition={transition}
|
||||
className={cn(
|
||||
"mt-2 ring-offset-background focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
</TabsPrimitive.Content>
|
||||
</AnimatePresence>
|
||||
);
|
||||
}
|
||||
|
||||
type TabsContentsAutoProps = React.ComponentProps<typeof AutoHeight> & {
|
||||
mode?: "auto-height";
|
||||
children: React.ReactNode;
|
||||
transition?: Transition;
|
||||
};
|
||||
|
||||
type TabsContentsLayoutProps = Omit<HTMLMotionProps<"div">, "transition"> & {
|
||||
mode: "layout";
|
||||
children: React.ReactNode;
|
||||
transition?: Transition;
|
||||
};
|
||||
|
||||
type TabsContentsProps = TabsContentsAutoProps | TabsContentsLayoutProps;
|
||||
|
||||
const defaultTransition: Transition = {
|
||||
type: "spring",
|
||||
stiffness: 200,
|
||||
damping: 30,
|
||||
};
|
||||
|
||||
function isAutoMode(props: TabsContentsProps): props is TabsContentsAutoProps {
|
||||
return !("mode" in props) || props.mode === "auto-height";
|
||||
}
|
||||
|
||||
function TabsContents(props: TabsContentsProps) {
|
||||
const { value } = useTabs();
|
||||
|
||||
if (isAutoMode(props)) {
|
||||
const { transition = defaultTransition, ...autoProps } = props;
|
||||
|
||||
return (
|
||||
<AutoHeight
|
||||
data-slot="tabs-contents"
|
||||
deps={[value]}
|
||||
transition={transition}
|
||||
{...autoProps}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
const { transition = defaultTransition, style, ...layoutProps } = props;
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
data-slot="tabs-contents"
|
||||
layout="size"
|
||||
layoutDependency={value}
|
||||
style={{ overflow: "hidden", ...style }}
|
||||
transition={{ layout: transition }}
|
||||
{...layoutProps}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
Tabs,
|
||||
TabsHighlight,
|
||||
TabsHighlightItem,
|
||||
TabsList,
|
||||
TabsTrigger,
|
||||
TabsContent,
|
||||
TabsContents,
|
||||
type TabsProps,
|
||||
type TabsHighlightProps,
|
||||
type TabsHighlightItemProps,
|
||||
type TabsListProps,
|
||||
type TabsTriggerProps,
|
||||
type TabsContentProps,
|
||||
type TabsContentsProps,
|
||||
};
|
||||
|
||||
@@ -0,0 +1,101 @@
|
||||
"use client";
|
||||
|
||||
import * as React from "react";
|
||||
|
||||
type AutoHeightOptions = {
|
||||
includeParentBox?: boolean;
|
||||
includeSelfBox?: boolean;
|
||||
};
|
||||
|
||||
export function useAutoHeight<T extends HTMLElement = HTMLDivElement>(
|
||||
deps: React.DependencyList = [],
|
||||
options: AutoHeightOptions = {
|
||||
includeParentBox: true,
|
||||
includeSelfBox: false,
|
||||
},
|
||||
) {
|
||||
const ref = React.useRef<T | null>(null);
|
||||
const roRef = React.useRef<ResizeObserver | null>(null);
|
||||
const [height, setHeight] = React.useState(0);
|
||||
|
||||
const measure = React.useCallback(() => {
|
||||
const el = ref.current;
|
||||
if (!el) return 0;
|
||||
|
||||
const base = el.getBoundingClientRect().height || 0;
|
||||
|
||||
let extra = 0;
|
||||
|
||||
if (options.includeParentBox && el.parentElement) {
|
||||
const cs = getComputedStyle(el.parentElement);
|
||||
const paddingY =
|
||||
(parseFloat(cs.paddingTop || "0") || 0) +
|
||||
(parseFloat(cs.paddingBottom || "0") || 0);
|
||||
const borderY =
|
||||
(parseFloat(cs.borderTopWidth || "0") || 0) +
|
||||
(parseFloat(cs.borderBottomWidth || "0") || 0);
|
||||
const isBorderBox = cs.boxSizing === "border-box";
|
||||
if (isBorderBox) {
|
||||
extra += paddingY + borderY;
|
||||
}
|
||||
}
|
||||
|
||||
if (options.includeSelfBox) {
|
||||
const cs = getComputedStyle(el);
|
||||
const paddingY =
|
||||
(parseFloat(cs.paddingTop || "0") || 0) +
|
||||
(parseFloat(cs.paddingBottom || "0") || 0);
|
||||
const borderY =
|
||||
(parseFloat(cs.borderTopWidth || "0") || 0) +
|
||||
(parseFloat(cs.borderBottomWidth || "0") || 0);
|
||||
const isBorderBox = cs.boxSizing === "border-box";
|
||||
if (isBorderBox) {
|
||||
extra += paddingY + borderY;
|
||||
}
|
||||
}
|
||||
|
||||
const dpr =
|
||||
typeof window !== "undefined" ? window.devicePixelRatio || 1 : 1;
|
||||
const total = Math.ceil((base + extra) * dpr) / dpr;
|
||||
|
||||
return total;
|
||||
}, [options.includeParentBox, options.includeSelfBox]);
|
||||
|
||||
React.useLayoutEffect(() => {
|
||||
const el = ref.current;
|
||||
if (!el) return;
|
||||
|
||||
setHeight(measure());
|
||||
|
||||
if (roRef.current) {
|
||||
roRef.current.disconnect();
|
||||
roRef.current = null;
|
||||
}
|
||||
|
||||
const ro = new ResizeObserver(() => {
|
||||
const next = measure();
|
||||
requestAnimationFrame(() => setHeight(next));
|
||||
});
|
||||
|
||||
ro.observe(el);
|
||||
if (options.includeParentBox && el.parentElement) {
|
||||
ro.observe(el.parentElement);
|
||||
}
|
||||
|
||||
roRef.current = ro;
|
||||
|
||||
return () => {
|
||||
ro.disconnect();
|
||||
roRef.current = null;
|
||||
};
|
||||
}, [...deps, measure, options.includeParentBox]);
|
||||
|
||||
React.useLayoutEffect(() => {
|
||||
if (height === 0) {
|
||||
const next = measure();
|
||||
if (next !== 0) setHeight(next);
|
||||
}
|
||||
}, [height, measure]);
|
||||
|
||||
return { ref, height } as const;
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
import * as React from "react";
|
||||
|
||||
interface CommonControlledStateProps<T> {
|
||||
value?: T;
|
||||
defaultValue?: T;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
export function useControlledState<T, Rest extends any[] = []>(
|
||||
props: CommonControlledStateProps<T> & {
|
||||
onChange?: (value: T, ...args: Rest) => void;
|
||||
},
|
||||
): readonly [T, (next: T, ...args: Rest) => void] {
|
||||
const { value, defaultValue, onChange } = props;
|
||||
|
||||
const [state, setInternalState] = React.useState<T>(
|
||||
value !== undefined ? value : (defaultValue as T),
|
||||
);
|
||||
|
||||
React.useEffect(() => {
|
||||
if (value !== undefined) setInternalState(value);
|
||||
}, [value]);
|
||||
|
||||
const setState = React.useCallback(
|
||||
(next: T, ...args: Rest) => {
|
||||
setInternalState(next);
|
||||
onChange?.(next, ...args);
|
||||
},
|
||||
[onChange],
|
||||
);
|
||||
|
||||
return [state, setState] as const;
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
import * as React from "react";
|
||||
|
||||
function getStrictContext<T>(
|
||||
name?: string,
|
||||
): readonly [
|
||||
({
|
||||
value,
|
||||
children,
|
||||
}: {
|
||||
value: T;
|
||||
children?: React.ReactNode;
|
||||
}) => React.JSX.Element,
|
||||
() => T,
|
||||
] {
|
||||
const Context = React.createContext<T | undefined>(undefined);
|
||||
|
||||
const Provider = ({
|
||||
value,
|
||||
children,
|
||||
}: {
|
||||
value: T;
|
||||
children?: React.ReactNode;
|
||||
}) => <Context.Provider value={value}>{children}</Context.Provider>;
|
||||
|
||||
const useSafeContext = () => {
|
||||
const ctx = React.useContext(Context);
|
||||
if (ctx === undefined) {
|
||||
throw new Error(`useContext must be used within ${name ?? "a Provider"}`);
|
||||
}
|
||||
return ctx;
|
||||
};
|
||||
|
||||
return [Provider, useSafeContext] as const;
|
||||
}
|
||||
|
||||
export { getStrictContext };
|
||||
@@ -0,0 +1,98 @@
|
||||
"use client";
|
||||
|
||||
import { type HTMLMotionProps, isMotionComponent, motion } from "motion/react";
|
||||
import * as React from "react";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
type AnyProps = Record<string, unknown>;
|
||||
|
||||
type DOMMotionProps<T extends HTMLElement = HTMLElement> = Omit<
|
||||
HTMLMotionProps<keyof HTMLElementTagNameMap>,
|
||||
"ref"
|
||||
> & { ref?: React.Ref<T> };
|
||||
|
||||
type WithAsChild<Base extends object> =
|
||||
| (Base & { asChild: true; children: React.ReactElement })
|
||||
| (Base & { asChild?: false | undefined });
|
||||
|
||||
type SlotProps<T extends HTMLElement = HTMLElement> = {
|
||||
children?: React.ReactElement;
|
||||
} & DOMMotionProps<T>;
|
||||
|
||||
function mergeRefs<T>(
|
||||
...refs: (React.Ref<T> | undefined)[]
|
||||
): React.RefCallback<T> {
|
||||
return (node) => {
|
||||
refs.forEach((ref) => {
|
||||
if (!ref) return;
|
||||
if (typeof ref === "function") {
|
||||
ref(node);
|
||||
} else {
|
||||
(ref as React.RefObject<T | null>).current = node;
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function mergeProps<T extends HTMLElement>(
|
||||
childProps: AnyProps,
|
||||
slotProps: DOMMotionProps<T>,
|
||||
): AnyProps {
|
||||
const merged: AnyProps = { ...childProps, ...slotProps };
|
||||
|
||||
if (childProps.className || slotProps.className) {
|
||||
merged.className = cn(
|
||||
childProps.className as string,
|
||||
slotProps.className as string,
|
||||
);
|
||||
}
|
||||
|
||||
if (childProps.style || slotProps.style) {
|
||||
merged.style = {
|
||||
...(childProps.style as React.CSSProperties),
|
||||
...(slotProps.style as React.CSSProperties),
|
||||
};
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
function Slot<T extends HTMLElement = HTMLElement>({
|
||||
children,
|
||||
ref,
|
||||
...props
|
||||
}: SlotProps<T>) {
|
||||
const isAlreadyMotion = React.useMemo(() => {
|
||||
if (!React.isValidElement(children)) return false;
|
||||
return (
|
||||
typeof children.type === "object" &&
|
||||
children.type !== null &&
|
||||
isMotionComponent(children.type)
|
||||
);
|
||||
}, [children]);
|
||||
|
||||
const Base = React.useMemo(() => {
|
||||
if (!React.isValidElement(children)) return motion.div;
|
||||
return isAlreadyMotion
|
||||
? (children.type as React.ElementType)
|
||||
: motion.create(children.type as React.ElementType);
|
||||
}, [isAlreadyMotion, children]);
|
||||
|
||||
if (!React.isValidElement(children)) return null;
|
||||
|
||||
const { ref: childRef, ...childProps } = children.props as AnyProps;
|
||||
|
||||
const mergedProps = mergeProps(childProps, props);
|
||||
|
||||
return (
|
||||
<Base {...mergedProps} ref={mergeRefs(childRef as React.Ref<T>, ref)} />
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
Slot,
|
||||
type SlotProps,
|
||||
type WithAsChild,
|
||||
type DOMMotionProps,
|
||||
type AnyProps,
|
||||
};
|
||||
Reference in New Issue
Block a user