Compare commits
256 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 45e57662de | |||
| 7931a241e7 | |||
| 224c35388f | |||
| 2bf45357ab | |||
| dd0ccda5fd | |||
| c422217b0f | |||
| 55b0016d31 | |||
| fede1d93a8 | |||
| 17ee38d316 | |||
| 826cb187c7 | |||
| 0deea7eb0c | |||
| 3f1f11001e | |||
| a0205aafa9 | |||
| 7d03968123 | |||
| 05791ace1f | |||
| 80757829c2 | |||
| 90ef4f3069 | |||
| 378430d7c0 | |||
| fc860ccc35 | |||
| 806aee3e0e | |||
| c6568a126d | |||
| 168eac0065 | |||
| 9c33d4f7b1 | |||
| 30f8e3eab2 | |||
| 02e1f158bd | |||
| 27d108a852 | |||
| f4301213f6 | |||
| d53c939e40 | |||
| ff1d63ce41 | |||
| 214e558a4c | |||
| 48883ddd03 | |||
| ac5d975e5b | |||
| 088f36e38f | |||
| e06d2b0aca | |||
| 547fb0bed6 | |||
| c8c2419ff1 | |||
| 35723de96a | |||
| cb8093fbde | |||
| 749b439d6d | |||
| e49b0b30a1 | |||
| e388e2e85a | |||
| decfdfcfc7 | |||
| c516999f7a | |||
| 1099459dbb | |||
| a3514df0d4 | |||
| 0102cb6c06 | |||
| 612c6610ce | |||
| ba750a3401 | |||
| d0e3e15fd3 | |||
| 248927ae6f | |||
| 6d71dbc62c | |||
| 3f0029c778 | |||
| fff1fe7087 | |||
| 1c971c664f | |||
| 0788797e3f | |||
| 8c338515b7 | |||
| a8c179fca7 | |||
| d0f436ce2d | |||
| 4019701186 | |||
| 53f85abe24 | |||
| 2aafb4c7a4 | |||
| 00d5c655dc | |||
| b12a704d9f | |||
| 0e134fd145 | |||
| adcdc91de2 | |||
| 880014d4c4 | |||
| 71f367f0ae | |||
| daa001cdf2 | |||
| 17056360ab | |||
| 80d5b77a80 | |||
| 701605fa73 | |||
| 19cb24f67f | |||
| c3fec3d095 | |||
| bb8b6ea0b7 | |||
| a6dfc5664b | |||
| 001a292185 | |||
| c7d7ff19a7 | |||
| aec05fb725 | |||
| c420318be0 | |||
| 52c9147092 | |||
| c8a28dde5b | |||
| 915ed06032 | |||
| 9bd5b9f6db | |||
| 2adbf900ae | |||
| 95b17e368d | |||
| 71563c1cdc | |||
| e160f5b2cc | |||
| ad18966294 | |||
| 9a6b500a4f | |||
| e9c4e32df2 | |||
| 21bc1de298 | |||
| 495a91a364 | |||
| 7b1e966b73 | |||
| c33d165c6b | |||
| c0807164cb | |||
| 06fcd0cfd8 | |||
| befccef2c3 | |||
| 946bd1b81b | |||
| cae758f0ab | |||
| aa2e9e2528 | |||
| 084e63eb1e | |||
| c2d59e7faf | |||
| e8b800e83b | |||
| b00b773c07 | |||
| c782ef1961 | |||
| 888631bc48 | |||
| cd5fd2c970 | |||
| f63650fa5d | |||
| 7092f2155b | |||
| 861d301451 | |||
| e1a4d8f389 | |||
| 65d417d17c | |||
| 0fa3922202 | |||
| f46f7e8961 | |||
| 378ece5ea5 | |||
| 6c76dc1a34 | |||
| e45f4a792f | |||
| 0860a3b6e0 | |||
| 0222c7e904 | |||
| 786acc4356 | |||
| a813358c49 | |||
| a3fd056d6e | |||
| 806e2497c0 | |||
| c742964d86 | |||
| 57e17b46e9 | |||
| 116a54942d | |||
| 8936816613 | |||
| db05ffdef6 | |||
| 96614a3f33 | |||
| 222a8b89f5 | |||
| 69e68a7331 | |||
| 5e6faf4e2c | |||
| cf1e49c761 | |||
| d05ab23404 | |||
| 8511535d69 | |||
| 29dd5abb34 | |||
| b2d1456aa9 | |||
| e3fc715cfa | |||
| 2cf9013d28 | |||
| 76dd0d84e8 | |||
| ccecd2a1e3 | |||
| 238f7648cf | |||
| c4aee3a00b | |||
| 140e611085 | |||
| b4488ee3ec | |||
| c4bfd4e253 | |||
| 0b3dac5da8 | |||
| db4c1fce6c | |||
| d2d459feeb | |||
| 7648785e39 | |||
| 081a1922df | |||
| 55b8b61f42 | |||
| 5bea6a32e0 | |||
| e72874142b | |||
| 6b5b177482 | |||
| cdaacc5b27 | |||
| f5e068346c | |||
| 07ac2b7ff8 | |||
| ee7160bb9e | |||
| d0ea3f8903 | |||
| 942d193206 | |||
| 90563ea6f5 | |||
| 6a88887a6c | |||
| 0553f76f71 | |||
| 95e5dbb84a | |||
| e9b5442340 | |||
| 756bd69a84 | |||
| 21a6185344 | |||
| b3d279046b | |||
| f4eecf24cc | |||
| cf79f2b172 | |||
| 3669d63ddf | |||
| 478553a4a8 | |||
| 3d1471d41d | |||
| 12bc4ed08f | |||
| 48ba93cf9a | |||
| 43ee6856f9 | |||
| 56034a99d6 | |||
| a8be96d28e | |||
| 0a826ff03c | |||
| 250e206eef | |||
| dd6834a4af | |||
| 266ecda1c7 | |||
| 0d793e4cd8 | |||
| 23d25928fc | |||
| 3cb68c53ad | |||
| acd572ed23 | |||
| 9822ad4e3f | |||
| 01d600f97e | |||
| e1461693da | |||
| 576119e5a3 | |||
| 1ff17e6833 | |||
| 2ffa37371d | |||
| 6fa0f1348a | |||
| e298496fb7 | |||
| f6041192e9 | |||
| 4ef50672b4 | |||
| 3140ad99ae | |||
| 97b1225d40 | |||
| 8a96d18e46 | |||
| a723c8b30b | |||
| 4a56575dbd | |||
| 3331699540 | |||
| 1f28983a4e | |||
| 362f3e423b | |||
| 704bcb2b28 | |||
| 08559eef13 | |||
| 0ff0570321 | |||
| 7eb56a2296 | |||
| e2fa6f2c5f | |||
| 8b83ece7be | |||
| 4fed80cf3c | |||
| c1fb1e3c4b | |||
| 7e367325be | |||
| e6cb4e6082 | |||
| 21d80fde56 | |||
| 3732d3a6e1 | |||
| 2e193987df | |||
| ddc2657165 | |||
| 98798b83df | |||
| ed82f74932 | |||
| cc5379f957 | |||
| 8b9ad44ebc | |||
| 206be3ff12 | |||
| 1afc2ca5ff | |||
| c61b3d3188 | |||
| 97da1ca288 | |||
| 6484656de0 | |||
| 961e3f2185 | |||
| f515a4f327 | |||
| 4ba2c5ec24 | |||
| f378f0fbde | |||
| c816fee184 | |||
| 4872dcc8ad | |||
| 8bc1ea500b | |||
| 7ed19f3a8f | |||
| e5663515a7 | |||
| 0f579cb97d | |||
| de896f895c | |||
| 3d57a622b1 | |||
| 5dfe7cb216 | |||
| dea0181009 | |||
| 4983f622d0 | |||
| 6654ab9fdc | |||
| d490ad3612 | |||
| e31de5ac99 | |||
| 7cd3e922f5 | |||
| 547bd89de9 | |||
| edabfd0831 | |||
| 127912c68c | |||
| af2aa36ac6 | |||
| d52493b7e4 | |||
| dfc94c10ff | |||
| a008e11504 | |||
| 6f28ed3a47 | |||
| c30a44a13d |
@@ -197,6 +197,7 @@ These are frequently overlooked issues that make UI look unprofessional:
|
||||
Before delivering UI code, verify these items:
|
||||
|
||||
### Visual Quality
|
||||
|
||||
- [ ] No emojis used as icons (use SVG instead)
|
||||
- [ ] All icons from consistent icon set (Heroicons/Lucide)
|
||||
- [ ] Brand logos are correct (verified from Simple Icons)
|
||||
@@ -204,24 +205,28 @@ Before delivering UI code, verify these items:
|
||||
- [ ] Use theme colors directly (bg-primary) not var() wrapper
|
||||
|
||||
### Interaction
|
||||
|
||||
- [ ] All clickable elements have `cursor-pointer`
|
||||
- [ ] Hover states provide clear visual feedback
|
||||
- [ ] Transitions are smooth (150-300ms)
|
||||
- [ ] Focus states visible for keyboard navigation
|
||||
|
||||
### Light/Dark Mode
|
||||
|
||||
- [ ] Light mode text has sufficient contrast (4.5:1 minimum)
|
||||
- [ ] Glass/transparent elements visible in light mode
|
||||
- [ ] Borders visible in both modes
|
||||
- [ ] Test both modes before delivery
|
||||
|
||||
### Layout
|
||||
|
||||
- [ ] Floating elements have proper spacing from edges
|
||||
- [ ] No content hidden behind fixed navbars
|
||||
- [ ] Responsive at 320px, 768px, 1024px, 1440px
|
||||
- [ ] No horizontal scroll on mobile
|
||||
|
||||
### Accessibility
|
||||
|
||||
- [ ] All images have alt text
|
||||
- [ ] Form inputs have labels
|
||||
- [ ] Color is not the only indicator
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
---
|
||||
name: "Bug report"
|
||||
about: Report a bug
|
||||
---
|
||||
|
||||
<!--
|
||||
Hi there! To expedite issue processing please search open and closed issues before submitting a new one. Existing issues often contain information about workarounds, resolution, or progress updates.
|
||||
-->
|
||||
|
||||
# Bug Report
|
||||
|
||||
## Description
|
||||
|
||||
<!-- A clear and concise description of the problem. -->
|
||||
|
||||
## Is this a regression?
|
||||
|
||||
<!-- Did this behavior use to work in the previous version? -->
|
||||
|
||||
## Minimal Reproduction
|
||||
|
||||
<!-- Clear steps to re-produce the issue. -->
|
||||
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
## Your Environment
|
||||
|
||||
<!-- Please provide as much information as you feel comfortable to help us understand the issue better -->
|
||||
|
||||
## Exception or Error or Screenshot
|
||||
|
||||
<!-- Please provide any error messages, stack traces, or screenshots that might help -->
|
||||
|
||||
<pre><code>
|
||||
<!-- Paste error logs here -->
|
||||
</code></pre>
|
||||
|
||||
## Additional Context
|
||||
|
||||
<!-- Add any other context about the problem here. -->
|
||||
@@ -1,34 +0,0 @@
|
||||
---
|
||||
name: "Feature request"
|
||||
about: Suggest a feature
|
||||
---
|
||||
|
||||
# Feature Request
|
||||
|
||||
## Description
|
||||
|
||||
<!-- A clear and concise description of the problem or missing capability. -->
|
||||
|
||||
## Describe the solution you'd like
|
||||
|
||||
<!-- If you have a solution in mind, please describe it. -->
|
||||
|
||||
## Describe alternatives you've considered
|
||||
|
||||
<!-- Have you considered any alternative solutions or workarounds? -->
|
||||
|
||||
## Use Case
|
||||
|
||||
<!-- Describe the specific use case and how this feature would benefit users. -->
|
||||
|
||||
## Priority
|
||||
|
||||
<!-- How important is this feature to you? -->
|
||||
|
||||
- [ ] Low - Nice to have
|
||||
- [ ] Medium - Would improve my workflow
|
||||
- [ ] High - Critical for my use case
|
||||
|
||||
## Additional Context
|
||||
|
||||
<!-- Add any other context, mockups, or examples about the feature request here. -->
|
||||
@@ -0,0 +1,63 @@
|
||||
name: Bug Report
|
||||
description: Something isn't working
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: What happened?
|
||||
placeholder: Describe the bug. What did you expect vs what actually happened?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
placeholder: |
|
||||
1. Go to ...
|
||||
2. Click on ...
|
||||
3. See error
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating System
|
||||
options:
|
||||
- macOS (Apple Silicon)
|
||||
- macOS (Intel)
|
||||
- Windows
|
||||
- Linux
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Donut Browser version
|
||||
placeholder: e.g. 0.17.6 or nightly-2026-03-21
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: browser
|
||||
attributes:
|
||||
label: Which browser is affected?
|
||||
options:
|
||||
- Wayfern
|
||||
- Camoufox
|
||||
- Both
|
||||
- Not browser-specific
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Error logs or screenshots
|
||||
description: Run from terminal to get logs. Paste errors, screenshots, or screen recordings.
|
||||
placeholder: Paste logs here or drag screenshots
|
||||
validations:
|
||||
required: false
|
||||
@@ -0,0 +1,5 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Questions & Discussion
|
||||
url: https://github.com/zhom/donutbrowser/discussions
|
||||
about: Ask questions or discuss ideas here instead of opening an issue.
|
||||
@@ -0,0 +1,30 @@
|
||||
name: Feature Request
|
||||
description: Suggest a new feature
|
||||
labels: ["enhancement"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: What do you want?
|
||||
placeholder: Describe the feature and why you need it.
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: use-case
|
||||
attributes:
|
||||
label: Use case
|
||||
placeholder: How would you use this feature? What problem does it solve?
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: dropdown
|
||||
id: priority
|
||||
attributes:
|
||||
label: How important is this to you?
|
||||
options:
|
||||
- Nice to have
|
||||
- Would improve my workflow
|
||||
- Critical for my use case
|
||||
validations:
|
||||
required: true
|
||||
@@ -1,54 +1,20 @@
|
||||
# ✨ Pull Request
|
||||
## Which issue does this PR fix?
|
||||
|
||||
## 📓 Referenced Issue
|
||||
<!-- Link the issue. #123 -->
|
||||
|
||||
<!-- Please link the related issue. Use # before the issue number and use the verbs 'fixes', 'resolves' to auto-link it, for eg, Fixes: #<issue-number> -->
|
||||
## How to test
|
||||
|
||||
## ℹ️ About the PR
|
||||
<!-- Steps for the reviewer to verify your changes work -->
|
||||
|
||||
<!-- Please provide a description of your solution if it is not clear in the related issue or if the PR has a breaking change. If there is an interesting topic to discuss or you have questions or there is an issue with Tauri, Rust, or another library that you have used. -->
|
||||
## Checklist
|
||||
|
||||
## 🔄 Type of Change
|
||||
- [ ] Read [CONTRIBUTING.md](https://github.com/zhom/donutbrowser/blob/main/CONTRIBUTING.md)
|
||||
- [ ] Ran `pnpm format && pnpm lint && pnpm test` locally and it passes
|
||||
- [ ] I tested the changes myself by running the app locally
|
||||
- [ ] Updated translations in all locale files (if UI text changed)
|
||||
|
||||
<!-- Mark the relevant option with an "x". -->
|
||||
## AI usage
|
||||
|
||||
- [ ] 🐛 Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] ✨ New feature (non-breaking change which adds functionality)
|
||||
- [ ] 💥 Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] 📚 Documentation update
|
||||
- [ ] 🧹 Code cleanup/refactoring
|
||||
- [ ] ⚡ Performance improvement
|
||||
- [ ] I used AI to help write this PR
|
||||
|
||||
## 🖼️ Testing Scenarios / Screenshots
|
||||
|
||||
<!-- Please include screenshots or gif to showcase the final output. Also, try to explain the testing you did to validate your change. -->
|
||||
|
||||
## ✅ Checklist
|
||||
|
||||
<!-- Mark completed items with an "x". -->
|
||||
|
||||
- [ ] My code follows the style guidelines of this project
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have commented my code, particularly in hard-to-understand areas
|
||||
- [ ] I have made corresponding changes to the documentation
|
||||
- [ ] My changes generate no new warnings
|
||||
- [ ] I have added tests that prove my fix is effective or that my feature works
|
||||
- [ ] New and existing unit tests pass locally with my changes
|
||||
- [ ] Any dependent changes have been merged and published
|
||||
|
||||
## 🧪 How Has This Been Tested?
|
||||
|
||||
<!-- Please describe the tests that you ran to verify your changes. -->
|
||||
|
||||
## 📱 Platform Testing
|
||||
|
||||
<!-- Which platforms have you tested on? -->
|
||||
|
||||
- [ ] macOS (Intel)
|
||||
- [ ] macOS (Apple Silicon)
|
||||
- [ ] Windows (if applicable)
|
||||
- [ ] Linux (if applicable)
|
||||
|
||||
## 📋 Additional Notes
|
||||
|
||||
<!-- Any additional information that reviewers should know about this PR. -->
|
||||
<!-- If you checked the box above, briefly explain how AI was used (e.g. "generated the test", "wrote the initial implementation", "full PR"). -->
|
||||
|
||||
@@ -0,0 +1,33 @@
|
||||
messages:
|
||||
- role: system
|
||||
content: |-
|
||||
You are an expert technical writer tasked with generating comprehensive release notes for Donut Browser, a powerful anti-detect browser desktop app built with Tauri + Next.js that helps users manage multiple browser profiles with proxy support.
|
||||
|
||||
Guidelines:
|
||||
- Use clear, user-friendly language
|
||||
- Group related commits logically
|
||||
- Omit minor commits like formatting, typos unless significant
|
||||
- Focus on user-facing changes
|
||||
- Use emojis sparingly and consistently
|
||||
- Keep descriptions concise but informative
|
||||
- If commits are unclear, infer the purpose from the context
|
||||
- Only include sections that have relevant changes
|
||||
- role: user
|
||||
content: |-
|
||||
Generate release notes for version {{version}} based on these commits:
|
||||
|
||||
{{commits}}
|
||||
|
||||
Use this format:
|
||||
|
||||
## What's New in {{version}}
|
||||
|
||||
[Brief 1-2 sentence overview]
|
||||
|
||||
### New Features
|
||||
### Bug Fixes
|
||||
### Improvements
|
||||
### Documentation
|
||||
### Dependencies
|
||||
### Developer Experience
|
||||
model: openai/gpt-4.1
|
||||
@@ -27,12 +27,14 @@ jobs:
|
||||
build-mode: none
|
||||
- language: javascript-typescript
|
||||
build-mode: none
|
||||
- language: rust
|
||||
build-mode: none
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 #v4.4.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ permissions:
|
||||
|
||||
jobs:
|
||||
contrib-readme-job:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
runs-on: ubuntu-latest
|
||||
name: Automatically update the contributors list in the README
|
||||
permissions:
|
||||
@@ -21,7 +22,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
- name: Contribute List
|
||||
uses: akhilmhdh/contributors-readme-action@83ea0b4f1ac928fbfe88b9e8460a932a528eb79f #v2.3.11
|
||||
env:
|
||||
|
||||
@@ -12,8 +12,8 @@ permissions:
|
||||
jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@c5996e0193a3df57d695c1b8a1dec2a4c62e8730" # v2.3.3
|
||||
if: github.repository == 'zhom/donutbrowser' && github.actor == 'dependabot[bot]'
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@c51854704019a247608d928f370c98740469d4b5" # v2.3.5
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
|
||||
lint-js:
|
||||
name: Lint JavaScript/TypeScript
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
if: github.repository == 'zhom/donutbrowser' && github.actor == 'dependabot[bot]'
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
lint-rust:
|
||||
name: Lint Rust
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
if: github.repository == 'zhom/donutbrowser' && github.actor == 'dependabot[bot]'
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
@@ -44,7 +44,7 @@ jobs:
|
||||
|
||||
codeql:
|
||||
name: CodeQL
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
if: github.repository == 'zhom/donutbrowser' && github.actor == 'dependabot[bot]'
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
@@ -55,7 +55,7 @@ jobs:
|
||||
|
||||
spellcheck:
|
||||
name: Spell Check
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
if: github.repository == 'zhom/donutbrowser' && github.actor == 'dependabot[bot]'
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
permissions:
|
||||
@@ -63,13 +63,13 @@ jobs:
|
||||
|
||||
dependabot-automerge:
|
||||
name: Dependabot Automerge
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
if: github.repository == 'zhom/donutbrowser' && github.actor == 'dependabot[bot]'
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dependabot metadata
|
||||
id: metadata
|
||||
uses: dependabot/fetch-metadata@21025c705c08248db411dc16f3619e6b5f9ea21a #v2.5.0
|
||||
uses: dependabot/fetch-metadata@ffa630c65fa7e0ecfa0625b5ceda64399aea1b36 #v3.0.0
|
||||
with:
|
||||
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
- name: Enable auto-merge for minor and patch updates
|
||||
|
||||
@@ -0,0 +1,73 @@
|
||||
name: Build and Push donut-sync Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- "donut-sync/**"
|
||||
workflow_call:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Docker tag (e.g., v1.0.0)"
|
||||
required: true
|
||||
type: string
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Docker tag (e.g., v1.0.0, latest)"
|
||||
required: true
|
||||
default: "latest"
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
REGISTRY: docker.io
|
||||
IMAGE_NAME: donutbrowser/donut-sync
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd #v4.0.0
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 #v4.1.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Determine tags
|
||||
id: tags
|
||||
run: |
|
||||
TAGS=""
|
||||
INPUT_TAG="${{ inputs.tag }}"
|
||||
|
||||
if [ -n "$INPUT_TAG" ]; then
|
||||
# Called from release workflow or manual dispatch
|
||||
TAGS="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${INPUT_TAG}"
|
||||
TAGS="${TAGS},${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest"
|
||||
elif [ "${{ github.event_name }}" = "push" ]; then
|
||||
# Push to main (nightly): tag with nightly and commit SHA
|
||||
SHORT_SHA=$(echo "${{ github.sha }}" | cut -c1-7)
|
||||
TAGS="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:nightly"
|
||||
TAGS="${TAGS},${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:nightly-${SHORT_SHA}"
|
||||
fi
|
||||
|
||||
echo "tags=${TAGS}" >> "$GITHUB_OUTPUT"
|
||||
echo "Tags: ${TAGS}"
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@d08e5c354a6adb9ed34480a06d141179aa583294 #v7.0.0
|
||||
with:
|
||||
context: .
|
||||
file: ./donut-sync/Dockerfile
|
||||
push: true
|
||||
tags: ${{ steps.tags.outputs.tags }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
platforms: linux/amd64,linux/arm64
|
||||
@@ -0,0 +1,49 @@
|
||||
name: Flake Test
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "flake.nix"
|
||||
- "flake.lock"
|
||||
- ".github/workflows/flake-test.yml"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- "flake.nix"
|
||||
- "flake.lock"
|
||||
- ".github/workflows/flake-test.yml"
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
flake:
|
||||
name: validate-flake
|
||||
runs-on: ubuntu-22.04
|
||||
timeout-minutes: 90
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@a6f7623b2e2401f485f1eead77ced45bd99b09b0 #v31
|
||||
with:
|
||||
extra_nix_config: |
|
||||
experimental-features = nix-command flakes
|
||||
|
||||
- name: Evaluate flake outputs
|
||||
run: nix flake show --all-systems
|
||||
|
||||
- name: Check setup app is exposed
|
||||
run: nix eval .#apps.x86_64-linux.setup.program --raw
|
||||
|
||||
- name: Run flake setup app
|
||||
env:
|
||||
CI: "true"
|
||||
run: nix run .#setup
|
||||
|
||||
- name: Run flake info app
|
||||
run: nix run .#info
|
||||
@@ -3,7 +3,7 @@ name: Issue & PR Automation
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
pull_request:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
issue_comment:
|
||||
types: [created]
|
||||
@@ -14,93 +14,15 @@ permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
models: read
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
validate-issue:
|
||||
if: github.event_name == 'issues'
|
||||
analyze-issue:
|
||||
if: github.repository == 'zhom/donutbrowser' && github.event_name == 'issues'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
|
||||
- name: Get issue templates
|
||||
id: get-templates
|
||||
run: |
|
||||
if [ -f ".github/ISSUE_TEMPLATE/01-bug-report.md" ]; then
|
||||
echo "bug-template-exists=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
if [ -f ".github/ISSUE_TEMPLATE/02-feature-request.md" ]; then
|
||||
echo "feature-template-exists=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Create issue analysis prompt
|
||||
id: create-prompt
|
||||
env:
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
ISSUE_LABELS: ${{ join(github.event.issue.labels.*.name, ', ') }}
|
||||
run: |
|
||||
cat > issue_analysis.txt << EOF
|
||||
## Issue Content to Analyze:
|
||||
|
||||
**Title:** $ISSUE_TITLE
|
||||
|
||||
**Body:**
|
||||
$ISSUE_BODY
|
||||
|
||||
**Labels:** $ISSUE_LABELS
|
||||
EOF
|
||||
|
||||
- name: Validate issue with AI
|
||||
id: validate
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
with:
|
||||
prompt-file: issue_analysis.txt
|
||||
system-prompt: |
|
||||
You are an issue validation assistant for Donut Browser, an anti-detect browser.
|
||||
|
||||
Analyze the provided issue content and determine if it contains sufficient information based on these requirements:
|
||||
|
||||
**For Bug Reports, the issue should include:**
|
||||
1. Clear description of the problem
|
||||
2. Steps to reproduce the issue (numbered list preferred)
|
||||
3. Expected vs actual behavior
|
||||
4. Environment information (OS, browser version, etc.)
|
||||
5. Error messages, stack traces, or screenshots if applicable
|
||||
|
||||
**For Feature Requests, the issue should include:**
|
||||
1. Clear description of the requested feature
|
||||
2. Use case or problem it solves
|
||||
3. Proposed solution or how it should work
|
||||
4. Priority level or importance
|
||||
|
||||
**General Requirements for all issues:**
|
||||
1. Descriptive title
|
||||
2. Sufficient detail to understand and act upon
|
||||
3. Professional tone and clear communication
|
||||
|
||||
Respond ONLY with valid JSON (no markdown fences). Keep responses concise.
|
||||
|
||||
JSON structure:
|
||||
{
|
||||
"is_valid": true|false,
|
||||
"issue_type": "bug_report"|"feature_request"|"other",
|
||||
"missing_info": ["item1", "item2"],
|
||||
"suggestions": ["suggestion1", "suggestion2"],
|
||||
"overall_assessment": "One sentence assessment"
|
||||
}
|
||||
|
||||
IMPORTANT CONSTRAINTS:
|
||||
- Maximum 3 items in missing_info array
|
||||
- Maximum 3 items in suggestions array
|
||||
- Each array item must be under 80 characters
|
||||
- overall_assessment must be under 100 characters
|
||||
- Output ONLY the JSON object, nothing else
|
||||
model: gpt-5-mini
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Check if first-time contributor
|
||||
id: check-first-time
|
||||
@@ -108,9 +30,9 @@ jobs:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
ISSUE_AUTHOR: ${{ github.event.issue.user.login }}
|
||||
run: |
|
||||
ISSUE_COUNT=$(gh api "/repos/${{ github.repository }}/issues" \
|
||||
--jq "map(select(.user.login == \"$ISSUE_AUTHOR\" and .number != ${{ github.event.issue.number }})) | length" \
|
||||
--paginate || echo "0")
|
||||
ISSUE_COUNT=$(gh api "/repos/${{ github.repository }}/issues?state=all&creator=$ISSUE_AUTHOR&per_page=100" \
|
||||
--jq "[.[] | select(.number != ${{ github.event.issue.number }}) ] | length" \
|
||||
|| echo "0")
|
||||
|
||||
if [ "$ISSUE_COUNT" = "0" ]; then
|
||||
echo "is_first_time=true" >> $GITHUB_OUTPUT
|
||||
@@ -118,108 +40,148 @@ jobs:
|
||||
echo "is_first_time=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Parse validation result and take action
|
||||
- name: Build repo context and find related files
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RESPONSE_FILE: ${{ steps.validate.outputs.response-file }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
run: |
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RAW_OUTPUT=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
echo "::error::Response file not found: $RESPONSE_FILE"
|
||||
# Read project guidelines (contains repo structure)
|
||||
cp CLAUDE.md /tmp/repo-context.txt
|
||||
|
||||
printf '%s' "$ISSUE_TITLE" > /tmp/issue-title.txt
|
||||
printf '%s' "${ISSUE_BODY:-}" > /tmp/issue-body.txt
|
||||
|
||||
# List all source files for the AI to pick from
|
||||
find . -type f \( -name "*.rs" -o -name "*.ts" -o -name "*.tsx" \) \
|
||||
! -path "*/node_modules/*" ! -path "*/target/*" ! -path "*/.next/*" ! -path "*/dist/*" \
|
||||
! -path "*/.git/*" ! -path "*/gen/*" ! -path "*/data/*" \
|
||||
| sed 's|^\./||' | sort > /tmp/all-source-files.txt
|
||||
|
||||
- name: Select relevant files with AI
|
||||
env:
|
||||
OPENROUTER_API_KEY: ${{ secrets.OPENROUTER_API_KEY }}
|
||||
run: |
|
||||
PAYLOAD=$(jq -n \
|
||||
--rawfile title /tmp/issue-title.txt \
|
||||
--rawfile body /tmp/issue-body.txt \
|
||||
--rawfile files /tmp/all-source-files.txt \
|
||||
'{
|
||||
model: "anthropic/claude-opus-4.6",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: "You are a file selector for Donut Browser (Tauri + Next.js + Rust anti-detect browser). Given an issue and a list of source files, output ONLY the 10 most likely relevant file paths, one per line. No explanations, no numbering, just paths."
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: ("Issue: " + $title + "\n\n" + $body + "\n\nFiles:\n" + $files)
|
||||
}
|
||||
]
|
||||
}')
|
||||
|
||||
RESPONSE=$(curl -fsSL https://openrouter.ai/api/v1/chat/completions \
|
||||
-H "Authorization: Bearer $OPENROUTER_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$PAYLOAD")
|
||||
|
||||
jq -r '.choices[0].message.content // empty' <<< "$RESPONSE" > /tmp/selected-files.txt
|
||||
|
||||
# Read the selected files in full (skip binary files)
|
||||
echo "" > /tmp/file-contents.txt
|
||||
while IFS= read -r filepath; do
|
||||
filepath=$(echo "$filepath" | xargs)
|
||||
[ -z "$filepath" ] && continue
|
||||
if [ -f "$filepath" ] && file --mime "$filepath" | grep -q "text/"; then
|
||||
echo "=== $filepath ===" >> /tmp/file-contents.txt
|
||||
cat "$filepath" >> /tmp/file-contents.txt
|
||||
echo "" >> /tmp/file-contents.txt
|
||||
fi
|
||||
done < /tmp/selected-files.txt
|
||||
|
||||
# Cap total context at 100KB
|
||||
head -c 100000 /tmp/file-contents.txt > /tmp/file-context.txt
|
||||
|
||||
- name: Analyze issue with AI
|
||||
env:
|
||||
OPENROUTER_API_KEY: ${{ secrets.OPENROUTER_API_KEY }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
ISSUE_BODY: ${{ github.event.issue.body }}
|
||||
ISSUE_AUTHOR: ${{ github.event.issue.user.login }}
|
||||
IS_FIRST_TIME: ${{ steps.check-first-time.outputs.is_first_time }}
|
||||
run: |
|
||||
GREETING=""
|
||||
if [ "$IS_FIRST_TIME" = "true" ]; then
|
||||
GREETING='This is a first-time contributor. Start your comment with: "Thanks for opening your first issue!"'
|
||||
fi
|
||||
|
||||
printf '%s' "$ISSUE_TITLE" > /tmp/issue-title.txt
|
||||
printf '%s' "${ISSUE_BODY:-}" > /tmp/issue-body.txt
|
||||
printf '%s' "$ISSUE_AUTHOR" > /tmp/issue-author.txt
|
||||
printf '%s' "$GREETING" > /tmp/greeting.txt
|
||||
|
||||
PAYLOAD=$(jq -n \
|
||||
--rawfile title /tmp/issue-title.txt \
|
||||
--rawfile body /tmp/issue-body.txt \
|
||||
--rawfile author /tmp/issue-author.txt \
|
||||
--rawfile greeting /tmp/greeting.txt \
|
||||
--rawfile repo_context /tmp/repo-context.txt \
|
||||
--rawfile context /tmp/file-context.txt \
|
||||
'{
|
||||
model: "anthropic/claude-opus-4.6",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: ("You are a triage bot for Donut Browser, an open-source anti-detect browser (Tauri desktop app: Rust backend + Next.js frontend).\n\nProject guidelines and structure:\n" + $repo_context + "\n\nYou have access to relevant source files for context.\n\nAnalyze the issue and produce a single comment. Your job is to collect missing information needed to diagnose the issue, NOT to guess the cause.\n\nFormat:\n\n1. One sentence acknowledging the issue.\n2. **Missing information** - Ask specific questions about what is missing from the report. Focus on reproducing the issue. Do NOT speculate about root causes or mention internal code/files — you will almost certainly be wrong without logs. Instead, ask for:\n - Exact steps to reproduce (if not provided)\n - Expected vs actual behavior (if unclear)\n - Error messages or screenshots (if not provided)\n - OS and app version (if not provided)\n - For bug reports: if logs are needed, tell the user EXACTLY how to get them:\n - macOS app logs: `~/Library/Logs/Donut Browser/`\n - Linux app logs: `~/.local/share/DonutBrowser/logs/`\n - Windows app logs: `%APPDATA%\\DonutBrowser\\logs\\`\n - Sync server logs: `docker logs <container>` or check the server console\n - Provide a ready-to-run shell command when possible.\n - For self-hosted sync issues: check if the user is using the latest Docker image (`docker pull donutbrowser/donut-sync:latest`).\n - Only ask for information that is actually missing. If the issue is already detailed, just acknowledge it.\n3. Suggest a label: `Label: bug` or `Label: enhancement` on its own line.\n\nRules:\n- Do NOT include a \"Possible cause\" section. Do not speculate about what code might be causing the issue.\n- Be brief and focused on collecting actionable information from the reporter.\n- If the issue already has everything needed (steps to reproduce, logs, version, OS), just acknowledge it.\n- Never exceed 15 lines.")
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: (
|
||||
(if ($greeting | length) > 0 then $greeting + "\n\n" else "" end) +
|
||||
"Analyze this issue:\n\nTitle: " + $title +
|
||||
"\nAuthor: " + $author +
|
||||
"\n\nBody:\n" + $body +
|
||||
"\n\nRelevant source files:\n" + $context
|
||||
)
|
||||
}
|
||||
]
|
||||
}')
|
||||
|
||||
RESPONSE=$(curl -fsSL https://openrouter.ai/api/v1/chat/completions \
|
||||
-H "Authorization: Bearer $OPENROUTER_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$PAYLOAD")
|
||||
|
||||
jq -r '.choices[0].message.content // empty' <<< "$RESPONSE" > /tmp/ai-comment.txt
|
||||
|
||||
if [ ! -s /tmp/ai-comment.txt ]; then
|
||||
echo "::error::AI response was empty"
|
||||
echo "Raw response:"
|
||||
echo "$RESPONSE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
JSON_RESULT=$(printf "%s" "$RAW_OUTPUT" | sed -n '/```json/,/```/p' | sed '1d;$d')
|
||||
if [ -z "$JSON_RESULT" ]; then
|
||||
JSON_RESULT="$RAW_OUTPUT"
|
||||
fi
|
||||
|
||||
if ! echo "$JSON_RESULT" | jq empty 2>/dev/null; then
|
||||
echo "::warning::Invalid JSON in AI response, using fallback"
|
||||
JSON_RESULT='{"is_valid":true,"issue_type":"other","missing_info":[],"suggestions":[],"overall_assessment":"Unable to validate automatically"}'
|
||||
fi
|
||||
|
||||
IS_VALID=$(echo "$JSON_RESULT" | jq -r '.is_valid // false')
|
||||
ISSUE_TYPE=$(echo "$JSON_RESULT" | jq -r '.issue_type // "other"')
|
||||
MISSING_INFO=$(echo "$JSON_RESULT" | jq -r '.missing_info[]? // empty' | sed 's/^/- /')
|
||||
SUGGESTIONS=$(echo "$JSON_RESULT" | jq -r '.suggestions[]? // empty' | sed 's/^/- /')
|
||||
ASSESSMENT=$(echo "$JSON_RESULT" | jq -r '.overall_assessment // "No assessment provided"')
|
||||
|
||||
IS_FIRST_TIME="${{ steps.check-first-time.outputs.is_first_time }}"
|
||||
GREETING_SECTION=""
|
||||
if [ "$IS_FIRST_TIME" = "true" ]; then
|
||||
GREETING_SECTION="## 👋 Welcome!\n\nThank you for your first issue ❤️ If this is a feature request, please make sure it is clear what you want, why you want it, and how important it is to you. If you posted a bug report, please make sure it includes as much detail as possible.\n\n---\n\n"
|
||||
fi
|
||||
|
||||
if [ "$IS_VALID" = "false" ]; then
|
||||
{
|
||||
printf "%b" "$GREETING_SECTION"
|
||||
printf "## 🤖 Issue Validation\n\n"
|
||||
printf "Thank you for submitting this issue! However, it appears that some required information might be missing to help us better understand and address your concern.\n\n"
|
||||
printf "**Issue Type Detected:** \`%s\`\n\n" "$ISSUE_TYPE"
|
||||
printf "**Assessment:** %s\n\n" "$ASSESSMENT"
|
||||
printf "### 📋 Missing Information:\n%s\n\n" "$MISSING_INFO"
|
||||
printf "### 💡 Suggestions for Improvement:\n%s\n\n" "$SUGGESTIONS"
|
||||
printf "### 📝 How to Provide Additional Information:\n\n"
|
||||
printf "Please edit your original issue description to include the missing information. Here are our issue templates for reference:\n\n"
|
||||
printf -- "- **Bug Report Template:** [View Template](.github/ISSUE_TEMPLATE/01-bug-report.md)\n"
|
||||
printf -- "- **Feature Request Template:** [View Template](.github/ISSUE_TEMPLATE/02-feature-request.md)\n\n"
|
||||
printf "### 🔧 Quick Tips:\n"
|
||||
printf -- "- For **bug reports**: Include step-by-step reproduction instructions, your environment details, and any error messages\n"
|
||||
printf -- "- For **feature requests**: Describe the use case, expected behavior, and why this feature would be valuable\n"
|
||||
printf -- "- Add **screenshots** or **logs** when applicable\n\n"
|
||||
printf "Once you have updated the issue with the missing information, feel free to remove this comment or reply to let us know you have made the updates.\n\n"
|
||||
printf -- "---\n*This validation was performed automatically to ensure we have all the information needed to help you effectively.*\n"
|
||||
} > comment.md
|
||||
|
||||
gh issue comment ${{ github.event.issue.number }} --body-file comment.md
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "needs-info"
|
||||
else
|
||||
SUGGESTIONS_SECTION=""
|
||||
if [ -n "$SUGGESTIONS" ]; then
|
||||
SUGGESTIONS_SECTION=$(printf "### 💡 Suggestions:\n%s\n\n" "$SUGGESTIONS")
|
||||
fi
|
||||
|
||||
{
|
||||
printf "%b" "$GREETING_SECTION"
|
||||
printf "## 🤖 Issue Validation\n\n"
|
||||
printf "**Issue Type Detected:** \`%s\`\n\n" "$ISSUE_TYPE"
|
||||
printf "**Assessment:** %s\n\n" "$ASSESSMENT"
|
||||
printf "%b" "$SUGGESTIONS_SECTION"
|
||||
printf -- "---\n*This validation was performed automatically to help triage issues.*\n"
|
||||
} > comment.md
|
||||
|
||||
gh issue comment ${{ github.event.issue.number }} --body-file comment.md
|
||||
|
||||
case "$ISSUE_TYPE" in
|
||||
"bug_report")
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "bug"
|
||||
;;
|
||||
"feature_request")
|
||||
gh issue edit ${{ github.event.issue.number }} --add-label "enhancement"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
- name: Run opencode analysis
|
||||
uses: anomalyco/opencode/github@d1482e148399bfaf808674549199f5f4aa69a22d #v1.2.4
|
||||
- name: Post comment and label
|
||||
env:
|
||||
ZHIPU_API_KEY: ${{ secrets.ZHIPU_API_KEY }}
|
||||
with:
|
||||
model: zai-coding-plan/glm-4.7
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
run: |
|
||||
LABEL=$(grep -oP '^Label:\s*\K.*' /tmp/ai-comment.txt | tail -1 | tr '[:upper:]' '[:lower:]' | xargs)
|
||||
sed -i '/^Label:/d' /tmp/ai-comment.txt
|
||||
|
||||
- name: Cleanup
|
||||
run: rm -f issue_analysis.txt comment.md
|
||||
gh issue comment "$ISSUE_NUMBER" --repo "$GITHUB_REPOSITORY" --body-file /tmp/ai-comment.txt
|
||||
|
||||
handle-pr:
|
||||
if: github.event_name == 'pull_request' && github.actor != 'dependabot[bot]'
|
||||
if [ "$LABEL" = "bug" ]; then
|
||||
gh issue edit "$ISSUE_NUMBER" --repo "$GITHUB_REPOSITORY" --add-label "bug" 2>/dev/null || true
|
||||
elif [ "$LABEL" = "enhancement" ]; then
|
||||
gh issue edit "$ISSUE_NUMBER" --repo "$GITHUB_REPOSITORY" --add-label "enhancement" 2>/dev/null || true
|
||||
fi
|
||||
|
||||
analyze-pr:
|
||||
if: github.repository == 'zhom/donutbrowser' && github.event_name == 'pull_request_target' && github.actor != 'dependabot[bot]'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
with:
|
||||
fetch-depth: 0
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Check if first-time contributor
|
||||
id: check-first-time
|
||||
@@ -237,144 +199,123 @@ jobs:
|
||||
echo "is_first_time=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Get PR diff
|
||||
id: get-diff
|
||||
- name: Gather PR context
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
gh pr diff ${{ github.event.pull_request.number }} > pr_diff.txt
|
||||
head -c 10000 pr_diff.txt > pr_diff_truncated.txt
|
||||
# Get changed files list
|
||||
gh api "/repos/$GITHUB_REPOSITORY/pulls/$PR_NUMBER/files" \
|
||||
--jq '.[] | "- \(.filename) (\(.status)) +\(.additions)/-\(.deletions)"' \
|
||||
> /tmp/pr-files.txt
|
||||
|
||||
- name: Create PR analysis prompt
|
||||
env:
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
PR_BODY: ${{ github.event.pull_request.body }}
|
||||
run: |
|
||||
{
|
||||
printf "## Pull Request to Review:\n\n"
|
||||
printf "**Title:** %s\n\n" "$PR_TITLE"
|
||||
printf "**Description:**\n%s\n\n" "$PR_BODY"
|
||||
printf "**Diff:**\n"
|
||||
cat pr_diff_truncated.txt
|
||||
} > pr_analysis.txt
|
||||
# Get the actual diff
|
||||
gh api "/repos/$GITHUB_REPOSITORY/pulls/$PR_NUMBER" \
|
||||
--header "Accept: application/vnd.github.diff" \
|
||||
> /tmp/pr-diff-full.txt 2>/dev/null || true
|
||||
head -c 20000 /tmp/pr-diff-full.txt > /tmp/pr-diff.txt
|
||||
|
||||
# Get CONTRIBUTING.md and README.md for context
|
||||
cat CONTRIBUTING.md > /tmp/contributing.txt 2>/dev/null || echo "Not found" > /tmp/contributing.txt
|
||||
head -50 README.md > /tmp/readme.txt 2>/dev/null || echo "Not found" > /tmp/readme.txt
|
||||
|
||||
# Read project guidelines (contains repo structure)
|
||||
cp CLAUDE.md /tmp/repo-context.txt
|
||||
|
||||
# Read full contents of all changed files (skip binary)
|
||||
echo "" > /tmp/related-file-contents.txt
|
||||
gh api "/repos/$GITHUB_REPOSITORY/pulls/$PR_NUMBER/files" --jq '.[].filename' | while IFS= read -r filepath; do
|
||||
if [ -f "$filepath" ] && file --mime "$filepath" | grep -q "text/"; then
|
||||
echo "=== $filepath (full file) ===" >> /tmp/related-file-contents.txt
|
||||
cat "$filepath" >> /tmp/related-file-contents.txt
|
||||
echo "" >> /tmp/related-file-contents.txt
|
||||
fi
|
||||
done
|
||||
head -c 100000 /tmp/related-file-contents.txt > /tmp/pr-file-context.txt
|
||||
|
||||
- name: Analyze PR with AI
|
||||
id: analyze
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
with:
|
||||
prompt-file: pr_analysis.txt
|
||||
system-prompt: |
|
||||
You are a code review assistant for Donut Browser, an open-source anti-detect browser built with Tauri, Next.js, and Rust.
|
||||
|
||||
Review the provided pull request and provide constructive feedback. Focus on:
|
||||
1. Code quality and best practices
|
||||
2. Potential bugs or issues
|
||||
3. Security concerns (especially important for an anti-detect browser)
|
||||
4. Performance implications
|
||||
5. Consistency with the project's patterns
|
||||
|
||||
Respond ONLY with valid JSON (no markdown fences).
|
||||
|
||||
JSON structure:
|
||||
{
|
||||
"summary": "Brief 1-2 sentence summary of what this PR does",
|
||||
"quality_score": "good"|"needs_work"|"critical_issues",
|
||||
"feedback": ["feedback point 1", "feedback point 2"],
|
||||
"suggestions": ["suggestion 1", "suggestion 2"],
|
||||
"security_notes": ["security note if any"] or []
|
||||
}
|
||||
|
||||
IMPORTANT CONSTRAINTS:
|
||||
- Maximum 4 items in feedback array
|
||||
- Maximum 3 items in suggestions array
|
||||
- Maximum 2 items in security_notes array
|
||||
- Each array item must be under 150 characters
|
||||
- summary must be under 200 characters
|
||||
- Be constructive and helpful, not harsh
|
||||
- Output ONLY the JSON object, nothing else
|
||||
model: gpt-5-mini
|
||||
|
||||
- name: Post PR feedback comment
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
RESPONSE_FILE: ${{ steps.analyze.outputs.response-file }}
|
||||
OPENROUTER_API_KEY: ${{ secrets.OPENROUTER_API_KEY }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
PR_BODY: ${{ github.event.pull_request.body }}
|
||||
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
|
||||
PR_BASE: ${{ github.event.pull_request.base.ref }}
|
||||
PR_HEAD: ${{ github.event.pull_request.head.ref }}
|
||||
IS_FIRST_TIME: ${{ steps.check-first-time.outputs.is_first_time }}
|
||||
run: |
|
||||
if [ -n "$RESPONSE_FILE" ] && [ -f "$RESPONSE_FILE" ]; then
|
||||
RAW_OUTPUT=$(cat "$RESPONSE_FILE")
|
||||
else
|
||||
echo "::error::Response file not found"
|
||||
GREETING=""
|
||||
if [ "$IS_FIRST_TIME" = "true" ]; then
|
||||
GREETING='This is a first-time contributor. Start your comment with: "Thanks for your first PR!"'
|
||||
fi
|
||||
|
||||
printf '%s' "$PR_TITLE" > /tmp/pr-title.txt
|
||||
printf '%s' "${PR_BODY:-}" > /tmp/pr-body.txt
|
||||
printf '%s' "$PR_AUTHOR" > /tmp/pr-author.txt
|
||||
printf '%s' "$PR_BASE" > /tmp/pr-base.txt
|
||||
printf '%s' "$PR_HEAD" > /tmp/pr-head.txt
|
||||
printf '%s' "$GREETING" > /tmp/greeting.txt
|
||||
|
||||
PAYLOAD=$(jq -n \
|
||||
--rawfile title /tmp/pr-title.txt \
|
||||
--rawfile body /tmp/pr-body.txt \
|
||||
--rawfile author /tmp/pr-author.txt \
|
||||
--rawfile base /tmp/pr-base.txt \
|
||||
--rawfile head /tmp/pr-head.txt \
|
||||
--rawfile files /tmp/pr-files.txt \
|
||||
--rawfile diff /tmp/pr-diff.txt \
|
||||
--rawfile greeting /tmp/greeting.txt \
|
||||
--rawfile repo_context /tmp/repo-context.txt \
|
||||
--rawfile contributing /tmp/contributing.txt \
|
||||
--rawfile file_context /tmp/pr-file-context.txt \
|
||||
'{
|
||||
model: "anthropic/claude-opus-4.6",
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: ("You are a code review bot for Donut Browser, an open-source anti-detect browser (Tauri desktop app: Rust backend + Next.js frontend).\n\nProject guidelines and structure:\n" + $repo_context + "\n\nContributing guidelines:\n" + $contributing + "\n\nYou have access to the full changed files and the diff. Use them to give a substantive review.\n\nReview this PR and produce a single comment. Format:\n\n1. One sentence summarizing what this PR does and whether the approach is sound.\n2. **Code review** - Specific observations about the actual code changes. Mention file names and what you see in the diff. Look for:\n - Bugs or logic errors in the changed code\n - Security issues (SQL injection, path traversal, XSS, command injection)\n - Missing error handling or edge cases\n - Breaking changes to existing APIs or behavior\n - If UI text was added/changed, check if all 7 translation files (en, es, fr, ja, pt, ru, zh) in src/i18n/locales/ were updated\n - If Tauri commands were added/removed, the unused-commands test in lib.rs needs updating\n3. **Suggestions** - Concrete improvements if any. Skip if the PR looks good.\n\nRules:\n- Be substantive. Review the actual diff, not just the description.\n- Do NOT nitpick formatting or style — the project has automated linting (biome + clippy + rustfmt).\n- Do NOT just summarize the PR description back to the user — they wrote it, they know what it says.\n- If the PR is good, say so briefly.\n- Never exceed 20 lines.")
|
||||
},
|
||||
{
|
||||
role: "user",
|
||||
content: (
|
||||
(if ($greeting | length) > 0 then $greeting + "\n\n" else "" end) +
|
||||
"Review this PR:\n\nTitle: " + $title +
|
||||
"\nAuthor: " + $author +
|
||||
"\nBase: " + $base + " <- Head: " + $head +
|
||||
"\n\nDescription:\n" + $body +
|
||||
"\n\nChanged files:\n" + $files +
|
||||
"\n\nDiff:\n" + $diff +
|
||||
"\n\nFull file contents:\n" + $file_context
|
||||
)
|
||||
}
|
||||
]
|
||||
}')
|
||||
|
||||
RESPONSE=$(curl -fsSL https://openrouter.ai/api/v1/chat/completions \
|
||||
-H "Authorization: Bearer $OPENROUTER_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$PAYLOAD")
|
||||
|
||||
jq -r '.choices[0].message.content // empty' <<< "$RESPONSE" > /tmp/ai-comment.txt
|
||||
|
||||
if [ ! -s /tmp/ai-comment.txt ]; then
|
||||
echo "::error::AI response was empty"
|
||||
echo "Raw response:"
|
||||
echo "$RESPONSE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
JSON_RESULT=$(printf "%s" "$RAW_OUTPUT" | sed -n '/```json/,/```/p' | sed '1d;$d')
|
||||
if [ -z "$JSON_RESULT" ]; then
|
||||
JSON_RESULT="$RAW_OUTPUT"
|
||||
fi
|
||||
|
||||
if ! echo "$JSON_RESULT" | jq empty 2>/dev/null; then
|
||||
echo "::warning::Invalid JSON in AI response, using fallback"
|
||||
JSON_RESULT='{"summary":"Unable to analyze automatically","quality_score":"good","feedback":[],"suggestions":[],"security_notes":[]}'
|
||||
fi
|
||||
|
||||
SUMMARY=$(echo "$JSON_RESULT" | jq -r '.summary // "No summary"')
|
||||
QUALITY=$(echo "$JSON_RESULT" | jq -r '.quality_score // "good"')
|
||||
FEEDBACK=$(echo "$JSON_RESULT" | jq -r '.feedback[]? // empty' | sed 's/^/- /')
|
||||
SUGGESTIONS=$(echo "$JSON_RESULT" | jq -r '.suggestions[]? // empty' | sed 's/^/- /')
|
||||
SECURITY=$(echo "$JSON_RESULT" | jq -r '.security_notes[]? // empty' | sed 's/^/- ⚠️ /')
|
||||
|
||||
IS_FIRST_TIME="${{ steps.check-first-time.outputs.is_first_time }}"
|
||||
|
||||
{
|
||||
if [ "$IS_FIRST_TIME" = "true" ]; then
|
||||
printf "## 👋 Welcome!\n\n"
|
||||
printf "Thank you for your first contribution ❤️ A human will review your PR shortly. Make sure that the pipelines are green, so that the PR is considered ready for review and could be merged.\n\n"
|
||||
printf -- "---\n\n"
|
||||
fi
|
||||
|
||||
printf "## 🤖 PR Review\n\n"
|
||||
printf "**Summary:** %s\n\n" "$SUMMARY"
|
||||
|
||||
case "$QUALITY" in
|
||||
"good")
|
||||
printf "**Status:** ✅ Looking good!\n\n"
|
||||
;;
|
||||
"needs_work")
|
||||
printf "**Status:** 🔧 Some improvements suggested\n\n"
|
||||
;;
|
||||
"critical_issues")
|
||||
printf "**Status:** ⚠️ Please address the issues below\n\n"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -n "$FEEDBACK" ]; then
|
||||
printf "### 📝 Feedback:\n%s\n\n" "$FEEDBACK"
|
||||
fi
|
||||
|
||||
if [ -n "$SUGGESTIONS" ]; then
|
||||
printf "### 💡 Suggestions:\n%s\n\n" "$SUGGESTIONS"
|
||||
fi
|
||||
|
||||
if [ -n "$SECURITY" ]; then
|
||||
printf "### 🔒 Security Notes:\n%s\n\n" "$SECURITY"
|
||||
fi
|
||||
|
||||
printf -- "---\n*This review was performed automatically. A human maintainer will also review your changes.*\n"
|
||||
} > comment.md
|
||||
|
||||
gh pr comment ${{ github.event.pull_request.number }} --body-file comment.md
|
||||
|
||||
- name: Run opencode analysis
|
||||
uses: anomalyco/opencode/github@d1482e148399bfaf808674549199f5f4aa69a22d #v1.2.4
|
||||
- name: Post comment
|
||||
env:
|
||||
ZHIPU_API_KEY: ${{ secrets.ZHIPU_API_KEY }}
|
||||
with:
|
||||
model: zai-coding-plan/glm-4.7
|
||||
|
||||
- name: Cleanup
|
||||
run: rm -f pr_diff.txt pr_diff_truncated.txt pr_analysis.txt comment.md
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
PR_NUMBER: ${{ github.event.pull_request.number }}
|
||||
run: |
|
||||
gh pr comment "$PR_NUMBER" --repo "$GITHUB_REPOSITORY" --body-file /tmp/ai-comment.txt
|
||||
|
||||
opencode-command:
|
||||
if: |
|
||||
github.repository == 'zhom/donutbrowser' &&
|
||||
(github.event_name == 'issue_comment' || github.event_name == 'pull_request_review_comment') &&
|
||||
(contains(github.event.comment.body, ' /oc') ||
|
||||
startsWith(github.event.comment.body, '/oc') ||
|
||||
@@ -383,11 +324,12 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Run opencode
|
||||
uses: anomalyco/opencode/github@d1482e148399bfaf808674549199f5f4aa69a22d #v1.2.4
|
||||
uses: anomalyco/opencode/github@6314f09c14fdd6a3ab8bedc4f7b7182647551d12 #v1.3.13
|
||||
env:
|
||||
ZHIPU_API_KEY: ${{ secrets.ZHIPU_API_KEY }}
|
||||
TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
model: zai-coding-plan/glm-4.7
|
||||
|
||||
@@ -34,10 +34,10 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 #v4.4.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ permissions:
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-22.04, windows-latest]
|
||||
|
||||
@@ -40,10 +41,10 @@ jobs:
|
||||
run: git config --global core.autocrlf false
|
||||
|
||||
- name: Checkout repository code
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Set up pnpm package manager
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 #v4.4.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
@@ -66,7 +67,7 @@ jobs:
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt install libwebkit2gtk-4.1-dev build-essential curl wget file libxdo-dev libssl-dev libayatana-appindicator3-dev librsvg2-dev
|
||||
sudo apt install libwebkit2gtk-4.1-dev build-essential curl wget file libxdo-dev libssl-dev libayatana-appindicator3-dev librsvg2-dev openvpn
|
||||
|
||||
- name: Install frontend dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
@@ -112,12 +113,8 @@ jobs:
|
||||
run: cargo clippy --all-targets --all-features -- -D warnings -D clippy::all
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Run Rust unit tests
|
||||
run: cargo test --lib && cargo test --test donut_proxy_integration && cargo test --test vpn_integration
|
||||
working-directory: src-tauri
|
||||
|
||||
- name: Run Rust sync e2e tests
|
||||
run: node scripts/sync-test-harness.mjs
|
||||
- name: Run test suite
|
||||
run: pnpm test
|
||||
|
||||
- name: Run cargo audit security check
|
||||
run: cargo audit
|
||||
|
||||
@@ -46,7 +46,7 @@ jobs:
|
||||
scan-scheduled:
|
||||
name: Scheduled Security Scan
|
||||
if: ${{ github.event_name == 'push' || github.event_name == 'schedule' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@c5996e0193a3df57d695c1b8a1dec2a4c62e8730" # v2.3.3
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@c51854704019a247608d928f370c98740469d4b5" # v2.3.5
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -58,7 +58,7 @@ jobs:
|
||||
scan-pr:
|
||||
name: PR Security Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@c5996e0193a3df57d695c1b8a1dec2a4c62e8730" # v2.3.3
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@c51854704019a247608d928f370c98740469d4b5" # v2.3.5
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -29,7 +29,7 @@ jobs:
|
||||
security-scan:
|
||||
name: Security Vulnerability Scan
|
||||
if: ${{ github.event_name == 'pull_request' || github.event_name == 'merge_group' }}
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@c5996e0193a3df57d695c1b8a1dec2a4c62e8730" # v2.3.3
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@c51854704019a247608d928f370c98740469d4b5" # v2.3.5
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
|
||||
@@ -0,0 +1,211 @@
|
||||
name: Publish Linux Repos
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
description: "Release tag (e.g. v0.18.1). Leave empty for latest."
|
||||
required: false
|
||||
type: string
|
||||
workflow_run:
|
||||
workflows: ["Release"]
|
||||
types:
|
||||
- completed
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
publish-repos:
|
||||
if: >
|
||||
github.repository == 'zhom/donutbrowser' &&
|
||||
(github.event_name == 'workflow_dispatch' ||
|
||||
github.event.workflow_run.conclusion == 'success')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Determine release tag
|
||||
id: tag
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
INPUT_TAG: ${{ inputs.tag }}
|
||||
run: |
|
||||
if [[ -n "${INPUT_TAG:-}" ]]; then
|
||||
echo "tag=${INPUT_TAG}" >> "$GITHUB_OUTPUT"
|
||||
elif [[ "${{ github.event_name }}" == "workflow_run" ]]; then
|
||||
# The Release workflow is triggered by a tag push (v*),
|
||||
# so head_branch is the tag name
|
||||
echo "tag=${{ github.event.workflow_run.head_branch }}" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
TAG=$(gh release view --repo "${{ github.repository }}" --json tagName -q .tagName)
|
||||
echo "tag=${TAG}" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Configure aws-cli for R2
|
||||
# aws-cli v2.23+ sends integrity checksums by default; Cloudflare R2
|
||||
# rejects those headers with `Unauthorized` on ListObjectsV2.
|
||||
# Also normalise the endpoint URL (must start with https://).
|
||||
# Both values propagate to later steps via $GITHUB_ENV.
|
||||
env:
|
||||
RAW_ENDPOINT: ${{ secrets.R2_ENDPOINT_URL }}
|
||||
run: |
|
||||
endpoint="$RAW_ENDPOINT"
|
||||
if [[ "$endpoint" != https://* && "$endpoint" != http://* ]]; then
|
||||
endpoint="https://$endpoint"
|
||||
fi
|
||||
echo "R2_ENDPOINT=$endpoint" >> "$GITHUB_ENV"
|
||||
echo "AWS_REQUEST_CHECKSUM_CALCULATION=WHEN_REQUIRED" >> "$GITHUB_ENV"
|
||||
echo "AWS_RESPONSE_CHECKSUM_VALIDATION=WHEN_REQUIRED" >> "$GITHUB_ENV"
|
||||
|
||||
- name: Install tools
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y dpkg-dev createrepo-c
|
||||
|
||||
- name: Download packages from GitHub release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG: ${{ steps.tag.outputs.tag }}
|
||||
run: |
|
||||
mkdir -p /tmp/packages
|
||||
gh release download "$TAG" \
|
||||
--repo "${{ github.repository }}" \
|
||||
--pattern "*.deb" \
|
||||
--dir /tmp/packages
|
||||
gh release download "$TAG" \
|
||||
--repo "${{ github.repository }}" \
|
||||
--pattern "*.rpm" \
|
||||
--dir /tmp/packages
|
||||
echo "Downloaded packages:"
|
||||
ls -lh /tmp/packages/
|
||||
|
||||
- name: Build DEB repository
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
R2_BUCKET: ${{ secrets.R2_BUCKET_NAME }}
|
||||
run: |
|
||||
DEB_DIR="/tmp/repo/deb"
|
||||
mkdir -p "$DEB_DIR/pool/main"
|
||||
mkdir -p "$DEB_DIR/dists/stable/main/binary-amd64"
|
||||
mkdir -p "$DEB_DIR/dists/stable/main/binary-arm64"
|
||||
|
||||
# Sync existing pool from R2 (incremental)
|
||||
aws s3 sync "s3://${R2_BUCKET}/deb/pool" "$DEB_DIR/pool" \
|
||||
--endpoint-url "$R2_ENDPOINT" 2>/dev/null || true
|
||||
|
||||
# Copy new .deb files into pool
|
||||
cp /tmp/packages/*.deb "$DEB_DIR/pool/main/" 2>/dev/null || true
|
||||
|
||||
# Generate Packages and Packages.gz for each arch
|
||||
for arch in amd64 arm64; do
|
||||
BINARY_DIR="$DEB_DIR/dists/stable/main/binary-${arch}"
|
||||
(cd "$DEB_DIR" && dpkg-scanpackages --arch "$arch" pool/main) \
|
||||
> "$BINARY_DIR/Packages"
|
||||
gzip -9c "$BINARY_DIR/Packages" > "$BINARY_DIR/Packages.gz"
|
||||
echo " $arch: $(grep -c '^Package:' "$BINARY_DIR/Packages" 2>/dev/null || echo 0) package(s)"
|
||||
done
|
||||
|
||||
# Generate Release file
|
||||
{
|
||||
echo "Origin: Donut Browser"
|
||||
echo "Label: Donut Browser"
|
||||
echo "Suite: stable"
|
||||
echo "Codename: stable"
|
||||
echo "Architectures: amd64 arm64"
|
||||
echo "Components: main"
|
||||
echo "Date: $(date -u '+%a, %d %b %Y %H:%M:%S UTC')"
|
||||
echo "MD5Sum:"
|
||||
for arch in amd64 arm64; do
|
||||
for file in "main/binary-${arch}/Packages" "main/binary-${arch}/Packages.gz"; do
|
||||
filepath="$DEB_DIR/dists/stable/$file"
|
||||
if [[ -f "$filepath" ]]; then
|
||||
size=$(wc -c < "$filepath")
|
||||
md5=$(md5sum "$filepath" | awk '{print $1}')
|
||||
printf " %s %8d %s\n" "$md5" "$size" "$file"
|
||||
fi
|
||||
done
|
||||
done
|
||||
echo "SHA256:"
|
||||
for arch in amd64 arm64; do
|
||||
for file in "main/binary-${arch}/Packages" "main/binary-${arch}/Packages.gz"; do
|
||||
filepath="$DEB_DIR/dists/stable/$file"
|
||||
if [[ -f "$filepath" ]]; then
|
||||
size=$(wc -c < "$filepath")
|
||||
sha256=$(sha256sum "$filepath" | awk '{print $1}')
|
||||
printf " %s %8d %s\n" "$sha256" "$size" "$file"
|
||||
fi
|
||||
done
|
||||
done
|
||||
} > "$DEB_DIR/dists/stable/Release"
|
||||
|
||||
echo "DEB Release file created."
|
||||
|
||||
- name: Build RPM repository
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
R2_BUCKET: ${{ secrets.R2_BUCKET_NAME }}
|
||||
run: |
|
||||
RPM_DIR="/tmp/repo/rpm"
|
||||
mkdir -p "$RPM_DIR/x86_64"
|
||||
mkdir -p "$RPM_DIR/aarch64"
|
||||
|
||||
# Sync existing RPMs from R2 (incremental)
|
||||
aws s3 sync "s3://${R2_BUCKET}/rpm/x86_64" "$RPM_DIR/x86_64" \
|
||||
--endpoint-url "$R2_ENDPOINT" --exclude "repodata/*" 2>/dev/null || true
|
||||
aws s3 sync "s3://${R2_BUCKET}/rpm/aarch64" "$RPM_DIR/aarch64" \
|
||||
--endpoint-url "$R2_ENDPOINT" --exclude "repodata/*" 2>/dev/null || true
|
||||
|
||||
# Copy new .rpm files into arch directories
|
||||
for rpm in /tmp/packages/*.rpm; do
|
||||
[[ -f "$rpm" ]] || continue
|
||||
filename=$(basename "$rpm")
|
||||
if [[ "$filename" == *x86_64* ]]; then
|
||||
cp "$rpm" "$RPM_DIR/x86_64/"
|
||||
elif [[ "$filename" == *aarch64* ]]; then
|
||||
cp "$rpm" "$RPM_DIR/aarch64/"
|
||||
fi
|
||||
done
|
||||
|
||||
# Generate repodata
|
||||
createrepo_c --update "$RPM_DIR"
|
||||
echo "RPM repodata created."
|
||||
|
||||
- name: Upload to R2
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
R2_BUCKET: ${{ secrets.R2_BUCKET_NAME }}
|
||||
run: |
|
||||
echo "Uploading DEB repository..."
|
||||
aws s3 sync /tmp/repo/deb/dists "s3://${R2_BUCKET}/deb/dists" \
|
||||
--endpoint-url "$R2_ENDPOINT" --delete
|
||||
aws s3 sync /tmp/repo/deb/pool "s3://${R2_BUCKET}/deb/pool" \
|
||||
--endpoint-url "$R2_ENDPOINT"
|
||||
|
||||
echo "Uploading RPM repository..."
|
||||
aws s3 sync /tmp/repo/rpm "s3://${R2_BUCKET}/rpm" \
|
||||
--endpoint-url "$R2_ENDPOINT"
|
||||
|
||||
- name: Verify upload
|
||||
env:
|
||||
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
|
||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
|
||||
AWS_DEFAULT_REGION: auto
|
||||
R2_BUCKET: ${{ secrets.R2_BUCKET_NAME }}
|
||||
TAG: ${{ steps.tag.outputs.tag }}
|
||||
run: |
|
||||
echo "Published repos for $TAG"
|
||||
echo ""
|
||||
echo "DEB dists/stable/:"
|
||||
aws s3 ls "s3://${R2_BUCKET}/deb/dists/stable/" \
|
||||
--endpoint-url "$R2_ENDPOINT" 2>/dev/null || echo " (empty)"
|
||||
echo "DEB pool/main/:"
|
||||
aws s3 ls "s3://${R2_BUCKET}/deb/pool/main/" \
|
||||
--endpoint-url "$R2_ENDPOINT" 2>/dev/null || echo " (empty)"
|
||||
echo "RPM repodata/:"
|
||||
aws s3 ls "s3://${R2_BUCKET}/rpm/repodata/" \
|
||||
--endpoint-url "$R2_ENDPOINT" 2>/dev/null || echo " (empty)"
|
||||
@@ -13,11 +13,11 @@ permissions:
|
||||
jobs:
|
||||
generate-release-notes:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event.workflow_run.conclusion == 'success' && startsWith(github.event.workflow_run.head_branch, 'v')
|
||||
if: github.repository == 'zhom/donutbrowser' && github.event.workflow_run.conclusion == 'success' && startsWith(github.event.workflow_run.head_branch, 'v')
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -82,47 +82,14 @@ jobs:
|
||||
- name: Generate release notes with AI
|
||||
id: generate-notes
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
uses: actions/ai-inference@a380166897b5408b8fb7dddd148142794cb5624a # v2.0.6
|
||||
uses: actions/ai-inference@e09e65981758de8b2fdab13c2bfb7c7d5493b0b6 # v2.0.7
|
||||
with:
|
||||
prompt-file: commits.txt
|
||||
system-prompt: |
|
||||
You are an expert technical writer tasked with generating comprehensive release notes for Donut Browser, a powerful anti-detect browser.
|
||||
|
||||
Analyze the provided commit messages and generate well-structured release notes following this format:
|
||||
|
||||
## What's New in ${{ steps.get-previous-tag.outputs.current-tag }}
|
||||
|
||||
[Brief 1-2 sentence overview of the release]
|
||||
|
||||
### ✨ New Features
|
||||
[List new features with brief descriptions]
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
[List bug fixes]
|
||||
|
||||
### 🔧 Improvements
|
||||
[List improvements and enhancements]
|
||||
|
||||
### 📚 Documentation
|
||||
[List documentation updates if any]
|
||||
|
||||
### 🔄 Dependencies
|
||||
[List dependency updates if any]
|
||||
|
||||
### 🛠️ Developer Experience
|
||||
[List development-related changes if any]
|
||||
|
||||
Guidelines:
|
||||
- Use clear, user-friendly language
|
||||
- Group related commits logically
|
||||
- Omit minor commits like formatting, typos unless significant
|
||||
- Focus on user-facing changes
|
||||
- Use emojis sparingly and consistently
|
||||
- Keep descriptions concise but informative
|
||||
- If commits are unclear, infer the purpose from the context
|
||||
|
||||
The application is a desktop app built with Tauri + Next.js that helps users manage multiple browser profiles with proxy support.
|
||||
model: gpt-5-mini
|
||||
prompt-file: .github/prompts/release-notes.prompt.yml
|
||||
input: |
|
||||
version: ${{ steps.get-previous-tag.outputs.current-tag }}
|
||||
file_input: |
|
||||
commits: ./commits.txt
|
||||
max-tokens: 4096
|
||||
|
||||
- name: Update release with generated notes
|
||||
if: steps.get-release.outputs.is-prerelease == 'false'
|
||||
|
||||
@@ -18,8 +18,9 @@ env:
|
||||
|
||||
jobs:
|
||||
security-scan:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@c5996e0193a3df57d695c1b8a1dec2a4c62e8730" # v2.3.3
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@c51854704019a247608d928f370c98740469d4b5" # v2.3.5
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -33,6 +34,7 @@ jobs:
|
||||
actions: read
|
||||
|
||||
lint-js:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
@@ -40,6 +42,7 @@ jobs:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
@@ -47,6 +50,7 @@ jobs:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: CodeQL
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
@@ -57,6 +61,7 @@ jobs:
|
||||
actions: read
|
||||
|
||||
spellcheck:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
@@ -64,6 +69,7 @@ jobs:
|
||||
contents: read
|
||||
|
||||
release:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -99,10 +105,10 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 #v4.4.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
@@ -125,7 +131,7 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev libxdo-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 #v2.8.2
|
||||
uses: swatinem/rust-cache@c19371144df3bb44fab255c43d04cbc2ab54d1c4 #v2.9.1
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
@@ -133,6 +139,10 @@ jobs:
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build frontend
|
||||
# NEXT_PUBLIC_* vars are inlined at build time and must be forwarded
|
||||
# from secrets explicitly — they are NOT inherited from the job env.
|
||||
env:
|
||||
NEXT_PUBLIC_TURNSTILE: ${{ secrets.NEXT_PUBLIC_TURNSTILE }}
|
||||
run: pnpm exec next build
|
||||
|
||||
- name: Verify frontend dist exists
|
||||
@@ -202,7 +212,7 @@ jobs:
|
||||
rm -f $CERT_PATH $KEY_PATH $PEM_PATH $P12_PATH
|
||||
|
||||
- name: Build Tauri app
|
||||
uses: tauri-apps/tauri-action@73fb865345c54760d875b94642314f8c0c894afa #v0.6.1
|
||||
uses: tauri-apps/tauri-action@84b9d35b5fc46c1e45415bdb6144030364f7ebc5 #v0.6.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GITHUB_REF_NAME: ${{ github.ref_name }}
|
||||
@@ -210,6 +220,12 @@ jobs:
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
# tauri-action invokes `pnpm tauri build`, which runs
|
||||
# `beforeBuildCommand` from tauri.conf.json. That rebuilds the
|
||||
# frontend in its own subprocess, so the env var MUST be forwarded
|
||||
# here or the inner `next build` inlines an empty string and
|
||||
# overwrites the dist the explicit "Build frontend" step produced.
|
||||
NEXT_PUBLIC_TURNSTILE: ${{ secrets.NEXT_PUBLIC_TURNSTILE }}
|
||||
with:
|
||||
projectPath: ./src-tauri
|
||||
tagName: ${{ github.ref_name }}
|
||||
@@ -219,14 +235,389 @@ jobs:
|
||||
prerelease: false
|
||||
args: ${{ matrix.args }}
|
||||
|
||||
- name: Create portable Windows ZIP
|
||||
if: matrix.platform == 'windows-latest'
|
||||
shell: bash
|
||||
env:
|
||||
TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
VERSION="${TAG#v}"
|
||||
PORTABLE_DIR="Donut-Portable"
|
||||
mkdir -p "$PORTABLE_DIR"
|
||||
|
||||
# Copy main executable
|
||||
cp "src-tauri/target/${{ matrix.target }}/release/donutbrowser.exe" "$PORTABLE_DIR/Donut.exe"
|
||||
|
||||
# Copy sidecar binaries
|
||||
cp "src-tauri/target/${{ matrix.target }}/release/donut-proxy.exe" "$PORTABLE_DIR/"
|
||||
cp "src-tauri/target/${{ matrix.target }}/release/donut-daemon.exe" "$PORTABLE_DIR/"
|
||||
|
||||
# Copy WebView2Loader if present
|
||||
if [ -f "src-tauri/target/${{ matrix.target }}/release/WebView2Loader.dll" ]; then
|
||||
cp "src-tauri/target/${{ matrix.target }}/release/WebView2Loader.dll" "$PORTABLE_DIR/"
|
||||
fi
|
||||
|
||||
# Create .portable marker
|
||||
touch "$PORTABLE_DIR/.portable"
|
||||
|
||||
# Create ZIP
|
||||
7z a "Donut_${VERSION}_x64-portable.zip" "$PORTABLE_DIR"
|
||||
|
||||
- name: Upload portable ZIP to release
|
||||
if: matrix.platform == 'windows-latest'
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
VERSION="${TAG#v}"
|
||||
gh release upload "$TAG" "Donut_${VERSION}_x64-portable.zip" --clobber
|
||||
|
||||
- name: Clean up Apple certificate
|
||||
if: matrix.platform == 'macos-latest' && always()
|
||||
run: |
|
||||
security delete-keychain $RUNNER_TEMP/app-signing.keychain-db || true
|
||||
rm -f $RUNNER_TEMP/build_certificate.p12 || true
|
||||
|
||||
# - name: Commit CHANGELOG.md
|
||||
# uses: stefanzweifel/git-auto-commit-action@778341af668090896ca464160c2def5d1d1a3eb0 #v6.0.1
|
||||
# with:
|
||||
# branch: main
|
||||
# commit_message: "docs: update CHANGELOG.md for ${{ github.ref_name }} [skip ci]"
|
||||
changelog:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [release]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Generate changelog
|
||||
env:
|
||||
TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
PREV_TAG=$(git tag --sort=-version:refname \
|
||||
| grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' \
|
||||
| grep -v "^${TAG}$" \
|
||||
| head -n 1)
|
||||
|
||||
if [ -z "$PREV_TAG" ]; then
|
||||
PREV_TAG=$(git rev-list --max-parents=0 HEAD)
|
||||
fi
|
||||
|
||||
echo "Generating changelog: ${PREV_TAG}..${TAG}"
|
||||
|
||||
features=""
|
||||
fixes=""
|
||||
refactors=""
|
||||
perf=""
|
||||
docs=""
|
||||
maintenance=""
|
||||
other=""
|
||||
|
||||
strip_prefix() { echo "$1" | sed -E 's/^[a-z]+(\([^)]*\))?: //'; }
|
||||
|
||||
while IFS= read -r msg; do
|
||||
[ -z "$msg" ] && continue
|
||||
case "$msg" in
|
||||
feat\(*\):*|feat:*)
|
||||
features="${features}- $(strip_prefix "$msg")"$'\n' ;;
|
||||
fix\(*\):*|fix:*)
|
||||
fixes="${fixes}- $(strip_prefix "$msg")"$'\n' ;;
|
||||
refactor\(*\):*|refactor:*)
|
||||
refactors="${refactors}- $(strip_prefix "$msg")"$'\n' ;;
|
||||
perf\(*\):*|perf:*)
|
||||
perf="${perf}- $(strip_prefix "$msg")"$'\n' ;;
|
||||
docs\(*\):*|docs:*)
|
||||
docs="${docs}- $(strip_prefix "$msg")"$'\n' ;;
|
||||
build*|ci*|chore*|test*)
|
||||
maintenance="${maintenance}- ${msg}"$'\n' ;;
|
||||
*)
|
||||
other="${other}- ${msg}"$'\n' ;;
|
||||
esac
|
||||
done < <(git log --pretty=format:"%s" "${PREV_TAG}..${TAG}" --no-merges)
|
||||
|
||||
{
|
||||
echo "## ${TAG} ($(date -u +%Y-%m-%d))"
|
||||
echo ""
|
||||
[ -n "$features" ] && printf "### Features\n\n%s\n" "$features"
|
||||
[ -n "$fixes" ] && printf "### Bug Fixes\n\n%s\n" "$fixes"
|
||||
[ -n "$refactors" ] && printf "### Refactoring\n\n%s\n" "$refactors"
|
||||
[ -n "$perf" ] && printf "### Performance\n\n%s\n" "$perf"
|
||||
[ -n "$docs" ] && printf "### Documentation\n\n%s\n" "$docs"
|
||||
[ -n "$maintenance" ] && printf "### Maintenance\n\n%s\n" "$maintenance"
|
||||
[ -n "$other" ] && printf "### Other\n\n%s\n" "$other"
|
||||
} > /tmp/release-changelog.md
|
||||
|
||||
echo "Generated changelog:"
|
||||
cat /tmp/release-changelog.md
|
||||
|
||||
- name: Update CHANGELOG.md
|
||||
run: |
|
||||
if [ -f CHANGELOG.md ]; then
|
||||
# Insert new entry after the "# Changelog" header (first 2 lines)
|
||||
{
|
||||
head -n 2 CHANGELOG.md
|
||||
echo ""
|
||||
cat /tmp/release-changelog.md
|
||||
tail -n +3 CHANGELOG.md
|
||||
} > CHANGELOG.tmp
|
||||
mv CHANGELOG.tmp CHANGELOG.md
|
||||
else
|
||||
{
|
||||
echo "# Changelog"
|
||||
echo ""
|
||||
cat /tmp/release-changelog.md
|
||||
} > CHANGELOG.md
|
||||
fi
|
||||
|
||||
- name: Update README download links
|
||||
env:
|
||||
TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
VERSION="${TAG#v}"
|
||||
BASE="https://github.com/zhom/donutbrowser/releases/download/${TAG}"
|
||||
|
||||
# Generate the new install section between markers
|
||||
cat > /tmp/install-links.md << LINKS
|
||||
### macOS
|
||||
|
||||
| | Apple Silicon | Intel |
|
||||
|---|---|---|
|
||||
| **DMG** | [Download](${BASE}/Donut_${VERSION}_aarch64.dmg) | [Download](${BASE}/Donut_${VERSION}_x64.dmg) |
|
||||
|
||||
Or install via Homebrew:
|
||||
|
||||
\`\`\`bash
|
||||
brew install --cask donut
|
||||
\`\`\`
|
||||
|
||||
### Windows
|
||||
|
||||
[Download Windows Installer (x64)](${BASE}/Donut_${VERSION}_x64-setup.exe) · [Portable (x64)](${BASE}/Donut_${VERSION}_x64-portable.zip)
|
||||
|
||||
### Linux
|
||||
|
||||
| Format | x86_64 | ARM64 |
|
||||
|---|---|---|
|
||||
| **deb** | [Download](${BASE}/Donut_${VERSION}_amd64.deb) | [Download](${BASE}/Donut_${VERSION}_arm64.deb) |
|
||||
| **rpm** | [Download](${BASE}/Donut-${VERSION}-1.x86_64.rpm) | [Download](${BASE}/Donut-${VERSION}-1.aarch64.rpm) |
|
||||
| **AppImage** | [Download](${BASE}/Donut_${VERSION}_amd64.AppImage) | [Download](${BASE}/Donut_${VERSION}_aarch64.AppImage) |
|
||||
LINKS
|
||||
|
||||
# Strip leading whitespace from heredoc
|
||||
sed -i 's/^ //' /tmp/install-links.md
|
||||
|
||||
# Replace content between markers in README
|
||||
sed -i '/<!-- install-links-start -->/,/<!-- install-links-end -->/{
|
||||
/<!-- install-links-start -->/{
|
||||
p
|
||||
r /tmp/install-links.md
|
||||
}
|
||||
/<!-- install-links-end -->/!d
|
||||
}' README.md
|
||||
|
||||
- name: Create release docs PR
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
VERSION="${TAG#v}"
|
||||
BRANCH="docs/release-${VERSION}"
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git checkout -b "$BRANCH"
|
||||
git add CHANGELOG.md README.md
|
||||
if git diff --cached --quiet; then
|
||||
echo "No changes to commit"
|
||||
else
|
||||
git commit -m "docs: update CHANGELOG.md and README.md for ${TAG} [skip ci]"
|
||||
git push origin "$BRANCH"
|
||||
gh pr create \
|
||||
--title "docs: release notes for ${TAG}" \
|
||||
--body "Automated update of CHANGELOG.md and README.md download links for ${TAG}." \
|
||||
--base main \
|
||||
--head "$BRANCH"
|
||||
gh pr merge "$BRANCH" --squash --admin
|
||||
fi
|
||||
|
||||
- name: Update release notes
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
gh release edit "$TAG" --notes-file /tmp/release-changelog.md
|
||||
|
||||
notify-discord:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [release, changelog]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Generate changelog summary
|
||||
env:
|
||||
TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
PREV_TAG=$(git tag --sort=-version:refname \
|
||||
| grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' \
|
||||
| grep -v "^${TAG}$" \
|
||||
| head -n 1)
|
||||
if [ -z "$PREV_TAG" ]; then
|
||||
PREV_TAG=$(git rev-list --max-parents=0 HEAD)
|
||||
fi
|
||||
|
||||
strip_prefix() { echo "$1" | sed -E 's/^[a-z]+(\([^)]*\))?: //'; }
|
||||
|
||||
CHANGES=""
|
||||
while IFS= read -r msg; do
|
||||
[ -z "$msg" ] && continue
|
||||
case "$msg" in
|
||||
feat\(*\):*|feat:*) CHANGES="${CHANGES}• $(strip_prefix "$msg")\n" ;;
|
||||
fix\(*\):*|fix:*) CHANGES="${CHANGES}• $(strip_prefix "$msg")\n" ;;
|
||||
refactor\(*\):*|refactor:*) CHANGES="${CHANGES}• $(strip_prefix "$msg")\n" ;;
|
||||
perf\(*\):*|perf:*) CHANGES="${CHANGES}• $(strip_prefix "$msg")\n" ;;
|
||||
esac
|
||||
done < <(git log --pretty=format:"%s" "${PREV_TAG}..${TAG}" --no-merges)
|
||||
|
||||
# Truncate to fit Discord embed (max 4096 chars)
|
||||
if [ ${#CHANGES} -gt 3900 ]; then
|
||||
CHANGES="${CHANGES:0:3900}\n..."
|
||||
fi
|
||||
|
||||
if [ -z "$CHANGES" ]; then
|
||||
CHANGES="See the full changelog on GitHub."
|
||||
fi
|
||||
|
||||
printf '%s' "$CHANGES" > /tmp/discord-changes.txt
|
||||
|
||||
- name: Send Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_STABLE_WEBHOOK_URL }}
|
||||
TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
VERSION="${TAG}"
|
||||
RELEASE_URL="https://github.com/${GITHUB_REPOSITORY}/releases/tag/${VERSION}"
|
||||
CHANGES=$(cat /tmp/discord-changes.txt)
|
||||
|
||||
# Build JSON with jq to handle escaping
|
||||
PAYLOAD=$(jq -n \
|
||||
--arg title "Donut Browser ${VERSION} Released" \
|
||||
--arg url "$RELEASE_URL" \
|
||||
--arg changes "$CHANGES" \
|
||||
--arg dl_mac_arm "https://github.com/'"${GITHUB_REPOSITORY}"'/releases/download/'"${VERSION}"'/Donut_'"${VERSION#v}"'_aarch64.dmg" \
|
||||
--arg dl_mac_intel "https://github.com/'"${GITHUB_REPOSITORY}"'/releases/download/'"${VERSION}"'/Donut_'"${VERSION#v}"'_x64.dmg" \
|
||||
--arg dl_win "https://github.com/'"${GITHUB_REPOSITORY}"'/releases/download/'"${VERSION}"'/Donut_'"${VERSION#v}"'_x64-setup.exe" \
|
||||
--arg dl_linux "https://github.com/'"${GITHUB_REPOSITORY}"'/releases/download/'"${VERSION}"'/Donut_'"${VERSION#v}"'_amd64.AppImage" \
|
||||
'{
|
||||
embeds: [{
|
||||
title: $title,
|
||||
url: $url,
|
||||
description: $changes,
|
||||
color: 5814783,
|
||||
fields: [
|
||||
{ name: "Download", value: ("[macOS (Apple Silicon)](" + $dl_mac_arm + ") · [macOS (Intel)](" + $dl_mac_intel + ")\n[Windows x64](" + $dl_win + ") · [Linux x64](" + $dl_linux + ")"), inline: false }
|
||||
],
|
||||
footer: { text: "donutbrowser.com" }
|
||||
}]
|
||||
}')
|
||||
|
||||
curl -fsSL -H "Content-Type: application/json" -d "$PAYLOAD" "$DISCORD_WEBHOOK_URL"
|
||||
|
||||
deploy-website:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [release]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger Cloudflare Pages deployment
|
||||
run: curl -fsSL -X POST "${{ secrets.CLOUDFLARE_WEB_DEPLOYMENT_HOOK }}"
|
||||
|
||||
docker:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [release]
|
||||
uses: ./.github/workflows/docker-sync.yml
|
||||
with:
|
||||
tag: ${{ github.ref_name }}
|
||||
secrets: inherit
|
||||
|
||||
update-flake:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [release]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
with:
|
||||
ref: main
|
||||
|
||||
- name: Compute AppImage hashes
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
TAG: ${{ github.ref_name }}
|
||||
run: |
|
||||
VERSION="${TAG#v}"
|
||||
echo "VERSION=${VERSION}" >> "$GITHUB_ENV"
|
||||
|
||||
AMD64_URL="https://github.com/zhom/donutbrowser/releases/download/${TAG}/Donut_${VERSION}_amd64.AppImage"
|
||||
AARCH64_URL="https://github.com/zhom/donutbrowser/releases/download/${TAG}/Donut_${VERSION}_aarch64.AppImage"
|
||||
|
||||
echo "Downloading x86_64 AppImage..."
|
||||
curl -fsSL -o /tmp/amd64.AppImage "$AMD64_URL" || { echo "x86_64 AppImage not found"; exit 1; }
|
||||
|
||||
echo "Downloading aarch64 AppImage..."
|
||||
curl -fsSL -o /tmp/aarch64.AppImage "$AARCH64_URL" || { echo "aarch64 AppImage not found"; exit 1; }
|
||||
|
||||
# Compute SRI hashes (sha256-<base64>)
|
||||
AMD64_HASH="sha256-$(sha256sum /tmp/amd64.AppImage | awk '{print $1}' | xxd -r -p | base64 | tr -d '\n')"
|
||||
AARCH64_HASH="sha256-$(sha256sum /tmp/aarch64.AppImage | awk '{print $1}' | xxd -r -p | base64 | tr -d '\n')"
|
||||
|
||||
echo "AMD64_HASH=${AMD64_HASH}" >> "$GITHUB_ENV"
|
||||
echo "AARCH64_HASH=${AARCH64_HASH}" >> "$GITHUB_ENV"
|
||||
echo "AMD64_URL=${AMD64_URL}" >> "$GITHUB_ENV"
|
||||
echo "AARCH64_URL=${AARCH64_URL}" >> "$GITHUB_ENV"
|
||||
|
||||
echo "x86_64 hash: ${AMD64_HASH}"
|
||||
echo "aarch64 hash: ${AARCH64_HASH}"
|
||||
|
||||
- name: Update flake.nix
|
||||
run: |
|
||||
# Update releaseVersion
|
||||
sed -i "s/releaseVersion = \"[^\"]*\"/releaseVersion = \"${VERSION}\"/" flake.nix
|
||||
|
||||
# Update x86_64 URL and hash
|
||||
sed -i "s|url = \"https://github.com/zhom/donutbrowser/releases/download/v[^\"]*_amd64.AppImage\"|url = \"${AMD64_URL}\"|" flake.nix
|
||||
sed -i "/amd64.AppImage/{ n; s|hash = \"[^\"]*\"|hash = \"${AMD64_HASH}\"|; }" flake.nix
|
||||
|
||||
# Update aarch64 URL and hash
|
||||
sed -i "s|url = \"https://github.com/zhom/donutbrowser/releases/download/v[^\"]*_aarch64.AppImage\"|url = \"${AARCH64_URL}\"|" flake.nix
|
||||
sed -i "/aarch64.AppImage/{ n; s|hash = \"[^\"]*\"|hash = \"${AARCH64_HASH}\"|; }" flake.nix
|
||||
|
||||
echo "Updated flake.nix:"
|
||||
grep -n "releaseVersion\|AppImage\|hash = " flake.nix
|
||||
|
||||
- name: Create pull request
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
BRANCH="chore/update-flake-${VERSION}"
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git checkout -b "$BRANCH"
|
||||
git add flake.nix
|
||||
if git diff --cached --quiet; then
|
||||
echo "No flake changes needed"
|
||||
exit 0
|
||||
fi
|
||||
git commit -m "chore: update flake.nix for v${VERSION} [skip ci]"
|
||||
git push origin "$BRANCH"
|
||||
gh pr create \
|
||||
--title "chore: update flake.nix for v${VERSION}" \
|
||||
--body "Automated update of flake.nix with new AppImage hashes for v${VERSION}." \
|
||||
--base main \
|
||||
--head "$BRANCH"
|
||||
gh pr merge "$BRANCH" --squash --admin
|
||||
|
||||
@@ -17,8 +17,9 @@ env:
|
||||
|
||||
jobs:
|
||||
security-scan:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: Security Vulnerability Scan
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@c5996e0193a3df57d695c1b8a1dec2a4c62e8730" # v2.3.3
|
||||
uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable.yml@c51854704019a247608d928f370c98740469d4b5" # v2.3.5
|
||||
with:
|
||||
scan-args: |-
|
||||
-r
|
||||
@@ -32,6 +33,7 @@ jobs:
|
||||
actions: read
|
||||
|
||||
lint-js:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: Lint JavaScript/TypeScript
|
||||
uses: ./.github/workflows/lint-js.yml
|
||||
secrets: inherit
|
||||
@@ -39,6 +41,7 @@ jobs:
|
||||
contents: read
|
||||
|
||||
lint-rust:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: Lint Rust
|
||||
uses: ./.github/workflows/lint-rs.yml
|
||||
secrets: inherit
|
||||
@@ -46,6 +49,7 @@ jobs:
|
||||
contents: read
|
||||
|
||||
codeql:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: CodeQL
|
||||
uses: ./.github/workflows/codeql.yml
|
||||
secrets: inherit
|
||||
@@ -56,6 +60,7 @@ jobs:
|
||||
actions: read
|
||||
|
||||
spellcheck:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
name: Spell Check
|
||||
uses: ./.github/workflows/spellcheck.yml
|
||||
secrets: inherit
|
||||
@@ -63,6 +68,7 @@ jobs:
|
||||
contents: read
|
||||
|
||||
rolling-release:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [security-scan, lint-js, lint-rust, codeql, spellcheck]
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -98,10 +104,10 @@ jobs:
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Setup pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 #v4.4.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
@@ -124,7 +130,7 @@ jobs:
|
||||
sudo apt-get install -y libwebkit2gtk-4.1-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev libxdo-dev pkg-config xdg-utils
|
||||
|
||||
- name: Rust cache
|
||||
uses: swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 #v2.8.2
|
||||
uses: swatinem/rust-cache@c19371144df3bb44fab255c43d04cbc2ab54d1c4 #v2.9.1
|
||||
with:
|
||||
workdir: ./src-tauri
|
||||
|
||||
@@ -132,6 +138,10 @@ jobs:
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Build frontend
|
||||
# NEXT_PUBLIC_* vars are inlined at build time and must be forwarded
|
||||
# from secrets explicitly — they are NOT inherited from the job env.
|
||||
env:
|
||||
NEXT_PUBLIC_TURNSTILE: ${{ secrets.NEXT_PUBLIC_TURNSTILE }}
|
||||
run: pnpm exec next build
|
||||
|
||||
- name: Verify frontend dist exists
|
||||
@@ -210,7 +220,7 @@ jobs:
|
||||
echo "Generated timestamp: ${TIMESTAMP}-${COMMIT_HASH}"
|
||||
|
||||
- name: Build Tauri app
|
||||
uses: tauri-apps/tauri-action@73fb865345c54760d875b94642314f8c0c894afa #v0.6.1
|
||||
uses: tauri-apps/tauri-action@84b9d35b5fc46c1e45415bdb6144030364f7ebc5 #v0.6.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_TAG: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
@@ -220,6 +230,9 @@ jobs:
|
||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||
APPLE_PASSWORD: ${{ secrets.APPLE_PASSWORD }}
|
||||
APPLE_TEAM_ID: ${{ secrets.APPLE_TEAM_ID }}
|
||||
# tauri-action's inner `pnpm tauri build` re-runs beforeBuildCommand
|
||||
# which rebuilds dist/ in a subprocess. The env var must be here too.
|
||||
NEXT_PUBLIC_TURNSTILE: ${{ secrets.NEXT_PUBLIC_TURNSTILE }}
|
||||
with:
|
||||
projectPath: ./src-tauri
|
||||
tagName: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
@@ -229,8 +242,181 @@ jobs:
|
||||
prerelease: true
|
||||
args: ${{ matrix.args }}
|
||||
|
||||
- name: Create portable Windows ZIP
|
||||
if: matrix.platform == 'windows-latest'
|
||||
shell: bash
|
||||
run: |
|
||||
PORTABLE_DIR="Donut-Portable"
|
||||
mkdir -p "$PORTABLE_DIR"
|
||||
|
||||
cp "src-tauri/target/${{ matrix.target }}/release/donutbrowser.exe" "$PORTABLE_DIR/Donut.exe"
|
||||
cp "src-tauri/target/${{ matrix.target }}/release/donut-proxy.exe" "$PORTABLE_DIR/"
|
||||
cp "src-tauri/target/${{ matrix.target }}/release/donut-daemon.exe" "$PORTABLE_DIR/"
|
||||
|
||||
if [ -f "src-tauri/target/${{ matrix.target }}/release/WebView2Loader.dll" ]; then
|
||||
cp "src-tauri/target/${{ matrix.target }}/release/WebView2Loader.dll" "$PORTABLE_DIR/"
|
||||
fi
|
||||
|
||||
touch "$PORTABLE_DIR/.portable"
|
||||
|
||||
7z a "Donut_x64-portable.zip" "$PORTABLE_DIR"
|
||||
|
||||
- name: Upload portable ZIP to release
|
||||
if: matrix.platform == 'windows-latest'
|
||||
shell: bash
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
NIGHTLY_TAG: "nightly-${{ steps.timestamp.outputs.timestamp }}"
|
||||
run: |
|
||||
gh release upload "$NIGHTLY_TAG" "Donut_x64-portable.zip" --clobber
|
||||
|
||||
- name: Clean up Apple certificate
|
||||
if: matrix.platform == 'macos-latest' && always()
|
||||
run: |
|
||||
security delete-keychain $RUNNER_TEMP/app-signing.keychain-db || true
|
||||
rm -f $RUNNER_TEMP/build_certificate.p12 || true
|
||||
|
||||
update-nightly-release:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [rolling-release]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
steps:
|
||||
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
|
||||
- name: Generate nightly tag
|
||||
id: tag
|
||||
run: |
|
||||
TIMESTAMP=$(date -u +"%Y-%m-%d")
|
||||
COMMIT_HASH=$(echo "${GITHUB_SHA}" | cut -c1-7)
|
||||
echo "nightly_tag=nightly-${TIMESTAMP}-${COMMIT_HASH}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Generate nightly changelog
|
||||
id: nightly-changelog
|
||||
run: |
|
||||
LAST_STABLE=$(git tag --sort=-version:refname \
|
||||
| grep -E "^v[0-9]+\.[0-9]+\.[0-9]+\$" \
|
||||
| head -n 1)
|
||||
|
||||
if [ -z "$LAST_STABLE" ]; then
|
||||
LAST_STABLE=$(git rev-list --max-parents=0 HEAD)
|
||||
fi
|
||||
|
||||
COMMIT_SHORT=$(echo "${GITHUB_SHA}" | cut -c1-7)
|
||||
{
|
||||
echo "**Nightly build from main branch**"
|
||||
echo ""
|
||||
echo "Commit: ${GITHUB_SHA}"
|
||||
echo "Changes since ${LAST_STABLE}:"
|
||||
echo ""
|
||||
} > /tmp/nightly-notes.md
|
||||
|
||||
strip_prefix() { echo "$1" | sed -E 's/^[a-z]+(\([^)]*\))?: //'; }
|
||||
|
||||
features=""
|
||||
fixes=""
|
||||
refactors=""
|
||||
other=""
|
||||
|
||||
while IFS= read -r msg; do
|
||||
[ -z "$msg" ] && continue
|
||||
case "$msg" in
|
||||
feat\(*\):*|feat:*)
|
||||
features="${features}- $(strip_prefix "$msg")"$'\n' ;;
|
||||
fix\(*\):*|fix:*)
|
||||
fixes="${fixes}- $(strip_prefix "$msg")"$'\n' ;;
|
||||
refactor\(*\):*|refactor:*)
|
||||
refactors="${refactors}- $(strip_prefix "$msg")"$'\n' ;;
|
||||
build*|ci*|chore*|test*|docs*|perf*)
|
||||
;; # skip maintenance commits from nightly notes
|
||||
*)
|
||||
other="${other}- ${msg}"$'\n' ;;
|
||||
esac
|
||||
done < <(git log --pretty=format:"%s" "${LAST_STABLE}..HEAD" --no-merges)
|
||||
|
||||
{
|
||||
[ -n "$features" ] && printf "### Features\n\n%s\n" "$features"
|
||||
[ -n "$fixes" ] && printf "### Bug Fixes\n\n%s\n" "$fixes"
|
||||
[ -n "$refactors" ] && printf "### Refactoring\n\n%s\n" "$refactors"
|
||||
[ -n "$other" ] && printf "### Other\n\n%s\n" "$other"
|
||||
true
|
||||
} >> /tmp/nightly-notes.md
|
||||
|
||||
- name: Update rolling nightly release
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
NIGHTLY_TAG="${{ steps.tag.outputs.nightly_tag }}"
|
||||
ASSETS_DIR="/tmp/nightly-assets"
|
||||
|
||||
# Download all assets from the per-commit nightly release
|
||||
mkdir -p "$ASSETS_DIR"
|
||||
gh release download "$NIGHTLY_TAG" --dir "$ASSETS_DIR" --clobber
|
||||
|
||||
# Rename versioned filenames to stable nightly names
|
||||
cd "$ASSETS_DIR"
|
||||
for f in Donut_*_aarch64.dmg; do [ -f "$f" ] && mv "$f" Donut_nightly_aarch64.dmg; done
|
||||
for f in Donut_*_x64.dmg; do [ -f "$f" ] && mv "$f" Donut_nightly_x64.dmg; done
|
||||
for f in Donut_*_x64-setup.exe; do [ -f "$f" ] && mv "$f" Donut_nightly_x64-setup.exe; done
|
||||
for f in Donut_*_aarch64.AppImage; do [ -f "$f" ] && mv "$f" Donut_nightly_aarch64.AppImage; done
|
||||
for f in Donut_*_amd64.AppImage; do [ -f "$f" ] && mv "$f" Donut_nightly_amd64.AppImage; done
|
||||
for f in Donut_*_amd64.deb; do [ -f "$f" ] && mv "$f" Donut_nightly_amd64.deb; done
|
||||
for f in Donut_*_arm64.deb; do [ -f "$f" ] && mv "$f" Donut_nightly_arm64.deb; done
|
||||
for f in Donut-*.x86_64.rpm; do [ -f "$f" ] && mv "$f" Donut_nightly_x86_64.rpm; done
|
||||
for f in Donut-*.aarch64.rpm; do [ -f "$f" ] && mv "$f" Donut_nightly_aarch64.rpm; done
|
||||
cd "$GITHUB_WORKSPACE"
|
||||
|
||||
# Delete existing rolling nightly release and tag
|
||||
gh release delete nightly --yes 2>/dev/null || true
|
||||
git push --delete origin nightly 2>/dev/null || true
|
||||
|
||||
# Create new rolling nightly release with all assets
|
||||
gh release create nightly \
|
||||
"$ASSETS_DIR"/Donut_nightly_* \
|
||||
"$ASSETS_DIR"/Donut_aarch64.app.tar.gz \
|
||||
"$ASSETS_DIR"/Donut_x64.app.tar.gz \
|
||||
--title "Donut Browser Nightly" \
|
||||
--notes-file /tmp/nightly-notes.md \
|
||||
--prerelease
|
||||
|
||||
deploy-website:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [update-nightly-release]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger Cloudflare Pages deployment
|
||||
run: curl -fsSL -X POST "${{ secrets.CLOUDFLARE_WEB_DEPLOYMENT_HOOK }}"
|
||||
|
||||
notify-discord:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
needs: [update-nightly-release]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Send Discord notification
|
||||
env:
|
||||
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_NIGHTLY_WEBHOOK_URL }}
|
||||
run: |
|
||||
COMMIT_SHORT=$(echo "${GITHUB_SHA}" | cut -c1-7)
|
||||
RELEASE_URL="https://github.com/${GITHUB_REPOSITORY}/releases/tag/nightly"
|
||||
COMMIT_URL="https://github.com/${GITHUB_REPOSITORY}/commit/${GITHUB_SHA}"
|
||||
|
||||
PAYLOAD=$(jq -n \
|
||||
--arg title "Donut Browser Nightly (${COMMIT_SHORT})" \
|
||||
--arg url "$RELEASE_URL" \
|
||||
--arg commit_url "$COMMIT_URL" \
|
||||
--arg commit_short "$COMMIT_SHORT" \
|
||||
'{
|
||||
embeds: [{
|
||||
title: $title,
|
||||
url: $url,
|
||||
color: 16752128,
|
||||
fields: [
|
||||
{ name: "Commit", value: ("[" + $commit_short + "](" + $commit_url + ")"), inline: true },
|
||||
{ name: "Download", value: ("[Nightly Release](" + $url + ")"), inline: true }
|
||||
],
|
||||
footer: { text: "donutbrowser.com" }
|
||||
}]
|
||||
}')
|
||||
|
||||
curl -fsSL -H "Content-Type: application/json" -d "$PAYLOAD" "$DISCORD_WEBHOOK_URL"
|
||||
|
||||
@@ -21,6 +21,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions Repository
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 #v6.0.1
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd #v6.0.2
|
||||
- name: Spell Check Repo
|
||||
uses: crate-ci/typos@78bc6fb2c0d734235d57a2d6b9de923cc325ebdd #v1.43.4
|
||||
uses: crate-ci/typos@02ea592e44b3a53c302f697cddca7641cd051c3d #v1.45.0
|
||||
|
||||
@@ -6,16 +6,19 @@ on:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
if: github.repository == 'zhom/donutbrowser'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
- uses: actions/stale@b5d41d4e1d5dceea10e7104786b73624c18a190f # v10.2.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: "This issue has been inactive for 60 days. Please respond to keep it open."
|
||||
stale-pr-message: "This pull request has been inactive for 60 days. Please respond to keep it open."
|
||||
stale-issue-message: "This issue has been inactive for 30 days. Please respond to keep it open."
|
||||
stale-pr-message: "This pull request has been inactive for 30 days. Please respond to keep it open."
|
||||
stale-issue-label: "stale"
|
||||
stale-pr-label: "stale"
|
||||
days-before-stale: 30
|
||||
days-before-close: 7
|
||||
|
||||
@@ -24,6 +24,7 @@ jobs:
|
||||
rust-sync-e2e:
|
||||
name: Rust Sync E2E Tests
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [macos-latest, ubuntu-22.04]
|
||||
|
||||
@@ -31,10 +32,10 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v6.0.2
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 #v4.4.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
@@ -50,7 +51,7 @@ jobs:
|
||||
toolchain: stable
|
||||
|
||||
- name: Cache Rust dependencies
|
||||
uses: swatinem/rust-cache@779680da715d629ac1d338a641029a2f4372abb5 #v2.8.2
|
||||
uses: swatinem/rust-cache@c19371144df3bb44fab255c43d04cbc2ab54d1c4 #v2.9.1
|
||||
with:
|
||||
workspaces: "src-tauri"
|
||||
|
||||
@@ -72,7 +73,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
uses: actions/checkout@v6.0.2
|
||||
|
||||
- name: Start MinIO
|
||||
run: |
|
||||
@@ -84,7 +85,7 @@ jobs:
|
||||
|
||||
# Wait for MinIO to be ready
|
||||
for i in {1..30}; do
|
||||
if curl -sf http://localhost:8987/minio/health/live; then
|
||||
if curl -sf http://127.0.0.1:8987/minio/health/live; then
|
||||
echo "MinIO is ready"
|
||||
break
|
||||
fi
|
||||
@@ -93,7 +94,7 @@ jobs:
|
||||
done
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 #v4.2.0
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 #v4.4.0
|
||||
with:
|
||||
run_install: false
|
||||
|
||||
@@ -110,7 +111,7 @@ jobs:
|
||||
working-directory: donut-sync
|
||||
env:
|
||||
SYNC_TOKEN: test-sync-token
|
||||
S3_ENDPOINT: http://localhost:8987
|
||||
S3_ENDPOINT: http://127.0.0.1:8987
|
||||
S3_ACCESS_KEY_ID: minioadmin
|
||||
S3_SECRET_ACCESS_KEY: minioadmin
|
||||
S3_BUCKET: donut-sync-test
|
||||
|
||||
@@ -55,4 +55,7 @@ nodecar/nodecar-bin
|
||||
.cache/
|
||||
|
||||
# env
|
||||
.env
|
||||
.env
|
||||
|
||||
# next
|
||||
next-env.d.ts
|
||||
@@ -0,0 +1,10 @@
|
||||
# Prevent pushing the 'nightly' tag — it is managed by CI
|
||||
if git rev-parse nightly >/dev/null 2>&1; then
|
||||
LOCAL_NIGHTLY=$(git rev-parse nightly)
|
||||
REMOTE_NIGHTLY=$(git ls-remote --tags origin refs/tags/nightly 2>/dev/null | awk '{print $1}')
|
||||
if [ -n "$REMOTE_NIGHTLY" ] && [ "$LOCAL_NIGHTLY" != "$REMOTE_NIGHTLY" ]; then
|
||||
echo "⚠ Skipping push of 'nightly' tag (managed by CI)"
|
||||
# Delete the local nightly tag so --tags won't try to push it
|
||||
git tag -d nightly >/dev/null 2>&1 || true
|
||||
fi
|
||||
fi
|
||||
@@ -10,11 +10,15 @@
|
||||
"appindicator",
|
||||
"applescript",
|
||||
"asyncio",
|
||||
"autocheckpoint",
|
||||
"autoconfig",
|
||||
"autologin",
|
||||
"bintools",
|
||||
"biomejs",
|
||||
"boringtun",
|
||||
"breezedark",
|
||||
"browserforge",
|
||||
"Buildx",
|
||||
"busctl",
|
||||
"CAMOU",
|
||||
"camoufox",
|
||||
@@ -31,7 +35,9 @@
|
||||
"cmdk",
|
||||
"codegen",
|
||||
"codesign",
|
||||
"codesigning",
|
||||
"commitish",
|
||||
"coreutils",
|
||||
"Crashpad",
|
||||
"CTYPE",
|
||||
"daijro",
|
||||
@@ -39,45 +45,68 @@
|
||||
"datareporting",
|
||||
"datas",
|
||||
"DBAPI",
|
||||
"dbus",
|
||||
"dconf",
|
||||
"debuginfo",
|
||||
"desynced",
|
||||
"devedition",
|
||||
"direnv",
|
||||
"diskutil",
|
||||
"distro",
|
||||
"dists",
|
||||
"DMABUF",
|
||||
"DOCKERHUB",
|
||||
"doctest",
|
||||
"doesn",
|
||||
"domcontentloaded",
|
||||
"dont",
|
||||
"donutbrowser",
|
||||
"doorhanger",
|
||||
"dpkg",
|
||||
"dtolnay",
|
||||
"dyld",
|
||||
"elif",
|
||||
"erasevolume",
|
||||
"errorlevel",
|
||||
"esac",
|
||||
"esbuild",
|
||||
"etree",
|
||||
"fetchurl",
|
||||
"findutils",
|
||||
"firstrun",
|
||||
"flate",
|
||||
"fontconfig",
|
||||
"freetype",
|
||||
"fribidi",
|
||||
"frontmost",
|
||||
"fsprogs",
|
||||
"geoip",
|
||||
"getcwd",
|
||||
"gettimezone",
|
||||
"gifs",
|
||||
"globset",
|
||||
"gnugrep",
|
||||
"gnumake",
|
||||
"gnused",
|
||||
"GOPATH",
|
||||
"gsettings",
|
||||
"harfbuzz",
|
||||
"healthreport",
|
||||
"hiddenimports",
|
||||
"hkcu",
|
||||
"hooksconfig",
|
||||
"hookspath",
|
||||
"hostable",
|
||||
"Hoverable",
|
||||
"icns",
|
||||
"idlelib",
|
||||
"idletime",
|
||||
"idna",
|
||||
"imdisk",
|
||||
"infobars",
|
||||
"inkey",
|
||||
"Inno",
|
||||
"isps",
|
||||
"kdeglobals",
|
||||
"keras",
|
||||
"KHTML",
|
||||
@@ -87,17 +116,39 @@
|
||||
"langpack",
|
||||
"launchservices",
|
||||
"letterboxing",
|
||||
"leveldb",
|
||||
"libappindicator",
|
||||
"libatk",
|
||||
"libayatana",
|
||||
"libc",
|
||||
"libcairo",
|
||||
"libdrm",
|
||||
"libfuse",
|
||||
"libgbm",
|
||||
"libgdk",
|
||||
"libglib",
|
||||
"libglvnd",
|
||||
"libgpg",
|
||||
"libpango",
|
||||
"librsvg",
|
||||
"libsoup",
|
||||
"libwebkit",
|
||||
"libx",
|
||||
"libxcb",
|
||||
"libxcomposite",
|
||||
"libxcursor",
|
||||
"libxdamage",
|
||||
"libxdo",
|
||||
"libxext",
|
||||
"libxfixes",
|
||||
"libxi",
|
||||
"libxinerama",
|
||||
"libxkbcommon",
|
||||
"libxrandr",
|
||||
"libxrender",
|
||||
"libxscrnsaver",
|
||||
"libxshmfence",
|
||||
"libxtst",
|
||||
"localtime",
|
||||
"lpdw",
|
||||
"lxml",
|
||||
@@ -105,6 +156,7 @@
|
||||
"macchiato",
|
||||
"Matchalk",
|
||||
"maxminddb",
|
||||
"minidumps",
|
||||
"minioadmin",
|
||||
"mmdb",
|
||||
"mountpoint",
|
||||
@@ -114,34 +166,50 @@
|
||||
"msys",
|
||||
"muda",
|
||||
"mypy",
|
||||
"nixos",
|
||||
"nixpkgs",
|
||||
"noarchive",
|
||||
"nobrowse",
|
||||
"noconfirm",
|
||||
"nodecar",
|
||||
"NODELAY",
|
||||
"nodemon",
|
||||
"nomount",
|
||||
"norestart",
|
||||
"NSIS",
|
||||
"nspr",
|
||||
"ntfs",
|
||||
"ntlm",
|
||||
"numpy",
|
||||
"numtide",
|
||||
"objc",
|
||||
"oneshot",
|
||||
"opencode",
|
||||
"OPENROUTER",
|
||||
"orhun",
|
||||
"orjson",
|
||||
"osascript",
|
||||
"oscpu",
|
||||
"outpath",
|
||||
"OVPN",
|
||||
"pango",
|
||||
"passout",
|
||||
"patchelf",
|
||||
"pathex",
|
||||
"pathlib",
|
||||
"peerconnection",
|
||||
"PHANDLER",
|
||||
"pids",
|
||||
"pipefail",
|
||||
"pixbuf",
|
||||
"pkexec",
|
||||
"pkgs",
|
||||
"pkill",
|
||||
"plasmohq",
|
||||
"platformdirs",
|
||||
"pname",
|
||||
"prefs",
|
||||
"presign",
|
||||
"PRIO",
|
||||
"propertylist",
|
||||
"psutil",
|
||||
@@ -153,13 +221,23 @@
|
||||
"pytest",
|
||||
"pyyaml",
|
||||
"quic",
|
||||
"ralt",
|
||||
"ramdisk",
|
||||
"rawfile",
|
||||
"repodata",
|
||||
"repogen",
|
||||
"reportingpolicy",
|
||||
"reqwest",
|
||||
"resvg",
|
||||
"ridedott",
|
||||
"rlib",
|
||||
"rsplit",
|
||||
"rusqlite",
|
||||
"rustc",
|
||||
"rwxr",
|
||||
"safebrowsing",
|
||||
"SARIF",
|
||||
"sarifv",
|
||||
"scipy",
|
||||
"screeninfo",
|
||||
"selectables",
|
||||
@@ -172,17 +250,21 @@
|
||||
"shadcn",
|
||||
"showcursor",
|
||||
"shutil",
|
||||
"sighandler",
|
||||
"signon",
|
||||
"signum",
|
||||
"sklearn",
|
||||
"smoltcp",
|
||||
"SMTO",
|
||||
"sonner",
|
||||
"splitn",
|
||||
"sspi",
|
||||
"staticlib",
|
||||
"stdenv",
|
||||
"stefanzweifel",
|
||||
"subdirs",
|
||||
"subkey",
|
||||
"subsec",
|
||||
"SUPPRESSMSGBOXES",
|
||||
"swatinem",
|
||||
"sysinfo",
|
||||
@@ -194,14 +276,19 @@
|
||||
"TERX",
|
||||
"testpass",
|
||||
"testuser",
|
||||
"thiserror",
|
||||
"timedatectl",
|
||||
"titlebar",
|
||||
"tkinter",
|
||||
"tmpfs",
|
||||
"tombstoned",
|
||||
"tqdm",
|
||||
"trackingprotection",
|
||||
"trailhead",
|
||||
"tungstenite",
|
||||
"turbopack",
|
||||
"turtledemo",
|
||||
"typer",
|
||||
"udeps",
|
||||
"unlisten",
|
||||
"unminimize",
|
||||
@@ -212,6 +299,7 @@
|
||||
"venv",
|
||||
"vercel",
|
||||
"VERYSILENT",
|
||||
"vpns",
|
||||
"wayfern",
|
||||
"webgl",
|
||||
"webrtc",
|
||||
|
||||
@@ -1,9 +1,109 @@
|
||||
# Instructions for AI Agents
|
||||
# Project Guidelines
|
||||
|
||||
- After your changes, instead of running specific tests or linting specific files, run "pnpm format && pnpm lint && pnpm test". It means that you first format the code, then lint it, then test it, so that no part is broken after your changes.
|
||||
- Don't leave comments that don't add value.
|
||||
- Do not duplicate code unless you have a very good reason to do so. It is important that the same logic is not duplicated multiple times.
|
||||
- Before finishing the task and showing summary, always run "pnpm format && pnpm lint && pnpm test" at the root of the project to ensure that you don't finish with broken application.
|
||||
- If there is a global singleton of a struct, only use it inside a method while properly initializing it, unless I have explicitly specified in the request otherwise.
|
||||
- If you are modifying the UI, do not add random colors that are not controlled by src/lib/themes.ts file.
|
||||
> **NOTE**: CLAUDE.md is a symlink to AGENTS.md — editing either file updates both.
|
||||
> After significant changes (new modules, renamed files, new directories), re-evaluate the Repository Structure below and update it if needed.
|
||||
|
||||
## Repository Structure
|
||||
|
||||
```
|
||||
donutbrowser/
|
||||
├── src/ # Next.js frontend
|
||||
│ ├── app/ # App router (page.tsx, layout.tsx)
|
||||
│ ├── components/ # 50+ React components (dialogs, tables, UI)
|
||||
│ ├── hooks/ # Event-driven React hooks
|
||||
│ ├── i18n/locales/ # Translations (en, es, fr, ja, pt, ru, zh)
|
||||
│ ├── lib/ # Utilities (themes, toast, browser-utils)
|
||||
│ └── types.ts # Shared TypeScript interfaces
|
||||
├── src-tauri/ # Rust backend (Tauri)
|
||||
│ ├── src/
|
||||
│ │ ├── lib.rs # Tauri command registration (100+ commands)
|
||||
│ │ ├── browser_runner.rs # Profile launch/kill orchestration
|
||||
│ │ ├── browser.rs # Browser trait & launch logic
|
||||
│ │ ├── profile/ # Profile CRUD (manager.rs, types.rs)
|
||||
│ │ ├── proxy_manager.rs # Proxy lifecycle & connection testing
|
||||
│ │ ├── proxy_server.rs # Local proxy binary (donut-proxy)
|
||||
│ │ ├── proxy_storage.rs # Proxy config persistence (JSON files)
|
||||
│ │ ├── api_server.rs # REST API (utoipa + axum)
|
||||
│ │ ├── mcp_server.rs # MCP protocol server
|
||||
│ │ ├── sync/ # Cloud sync (engine, encryption, manifest, scheduler)
|
||||
│ │ ├── vpn/ # WireGuard & OpenVPN tunnels
|
||||
│ │ ├── camoufox/ # Camoufox fingerprint engine (Bayesian network)
|
||||
│ │ ├── wayfern_manager.rs # Wayfern (Chromium) browser management
|
||||
│ │ ├── camoufox_manager.rs # Camoufox (Firefox) browser management
|
||||
│ │ ├── downloader.rs # Browser binary downloader
|
||||
│ │ ├── extraction.rs # Archive extraction (zip, tar, dmg, msi)
|
||||
│ │ ├── settings_manager.rs # App settings persistence
|
||||
│ │ ├── cookie_manager.rs # Cookie import/export
|
||||
│ │ ├── extension_manager.rs # Browser extension management
|
||||
│ │ ├── group_manager.rs # Profile group management
|
||||
│ │ ├── synchronizer.rs # Real-time profile synchronizer
|
||||
│ │ ├── daemon/ # Background daemon + tray icon (currently disabled)
|
||||
│ │ └── cloud_auth.rs # Cloud authentication
|
||||
│ ├── tests/ # Integration tests
|
||||
│ └── Cargo.toml # Rust dependencies
|
||||
├── donut-sync/ # NestJS sync server (self-hostable)
|
||||
│ └── src/ # Controllers, services, auth, S3 sync
|
||||
├── docs/ # Documentation (self-hosting guide)
|
||||
├── flake.nix # Nix development environment
|
||||
└── .github/workflows/ # CI/CD pipelines
|
||||
```
|
||||
|
||||
## Testing and Quality
|
||||
|
||||
- After making changes, run `pnpm format && pnpm lint && pnpm test` at the root of the project
|
||||
- Always run this command before finishing a task to ensure the application isn't broken
|
||||
- `pnpm lint` includes spellcheck via [typos](https://github.com/crate-ci/typos). False positives can be allowlisted in `_typos.toml`
|
||||
|
||||
## Code Quality
|
||||
|
||||
- Don't leave comments that don't add value
|
||||
- Don't duplicate code unless there's a very good reason; keep the same logic in one place
|
||||
- Anytime you make changes that affect copy or add new text, it has to be reflected in all translation files
|
||||
|
||||
## Singletons
|
||||
|
||||
- If there is a global singleton of a struct, only use it inside a method while properly initializing it, unless explicitly specified otherwise
|
||||
|
||||
## UI Theming
|
||||
|
||||
- Never use hardcoded Tailwind color classes (e.g., `text-red-500`, `bg-green-600`, `border-yellow-400`). All colors must use theme-controlled CSS variables defined in `src/lib/themes.ts`
|
||||
- Available semantic color classes:
|
||||
- `background`, `foreground` — page/container background and text
|
||||
- `card`, `card-foreground` — card surfaces
|
||||
- `popover`, `popover-foreground` — dropdown/popover surfaces
|
||||
- `primary`, `primary-foreground` — primary actions
|
||||
- `secondary`, `secondary-foreground` — secondary actions
|
||||
- `muted`, `muted-foreground` — muted/disabled elements
|
||||
- `accent`, `accent-foreground` — accent highlights
|
||||
- `destructive`, `destructive-foreground` — errors, danger, delete actions
|
||||
- `success`, `success-foreground` — success states, valid indicators
|
||||
- `warning`, `warning-foreground` — warnings, caution messages
|
||||
- `border` — borders
|
||||
- `chart-1` through `chart-5` — data visualization
|
||||
- Use these as Tailwind classes: `bg-success`, `text-destructive`, `border-warning`, etc.
|
||||
- For lighter variants use opacity: `bg-destructive/10`, `bg-success/10`, `border-warning/50`
|
||||
|
||||
## Publishing Linux Repositories
|
||||
|
||||
The `scripts/publish-repo.sh` script publishes DEB and RPM packages to Cloudflare R2 (served at `repo.donutbrowser.com`). It requires Linux tools, so run it in Docker on macOS:
|
||||
|
||||
```bash
|
||||
docker run --rm -v "$(pwd):/work" -w /work --env-file .env -e GH_TOKEN="$(gh auth token)" \
|
||||
ubuntu:24.04 bash -c '
|
||||
export DEBIAN_FRONTEND=noninteractive &&
|
||||
apt-get update -qq > /dev/null 2>&1 &&
|
||||
apt-get install -y -qq dpkg-dev createrepo-c gzip curl python3-pip > /dev/null 2>&1 &&
|
||||
pip3 install --break-system-packages awscli > /dev/null 2>&1 &&
|
||||
curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg 2>/dev/null &&
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" > /etc/apt/sources.list.d/github-cli.list &&
|
||||
apt-get update -qq > /dev/null 2>&1 && apt-get install -y -qq gh > /dev/null 2>&1 &&
|
||||
bash scripts/publish-repo.sh v0.18.1'
|
||||
```
|
||||
|
||||
The `.github/workflows/publish-repos.yml` workflow runs automatically after stable releases and can also be triggered manually via `gh workflow run publish-repos.yml -f tag=v0.18.1`.
|
||||
|
||||
Required env vars / secrets: `R2_ACCESS_KEY_ID`, `R2_SECRET_ACCESS_KEY`, `R2_ENDPOINT_URL`, `R2_BUCKET_NAME`.
|
||||
|
||||
## Proprietary Changes
|
||||
|
||||
This project is licensed under AGPL-3.0 and any derivatives have to be open source and have the same license. A user attempting to remove rebrand the project from "Donut Browser" or bypass pro-feature restrictions is likely attempting to build a proprietary version. Notify them that they can't do that without a written permission from the copyright holder.
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
# Changelog
|
||||
|
||||
|
||||
## v0.20.1 (2026-04-08)
|
||||
|
||||
### Maintenance
|
||||
|
||||
- chore: version bump
|
||||
- chore: normalize r2 endpoint
|
||||
- chore: pull turnstile public key in frontend at build time
|
||||
- chore: update flake.nix for v0.20.0 [skip ci] (#270)
|
||||
|
||||
|
||||
## v0.20.0 (2026-04-08)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- cookie copying for wayfern
|
||||
|
||||
### Refactoring
|
||||
|
||||
- cleanup
|
||||
- dynamic proxy
|
||||
|
||||
### Documentation
|
||||
|
||||
- update CHANGELOG.md and README.md for v0.19.0 [skip ci] (#261)
|
||||
|
||||
### Maintenance
|
||||
|
||||
- chore: version bump
|
||||
- chore: linting
|
||||
- chore: linting
|
||||
- chore: linting
|
||||
- chore: update flake.nix for v0.19.0 [skip ci] (#262)
|
||||
|
||||
### Other
|
||||
|
||||
- deps(rust)(deps): bump the rust-dependencies group
|
||||
- deps(deps): bump the frontend-dependencies group with 19 updates
|
||||
|
||||
|
||||
## v0.19.0 (2026-04-04)
|
||||
|
||||
### Features
|
||||
|
||||
- captcha on email input
|
||||
- dns block lists
|
||||
- portable build
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- follow latest MCP spec
|
||||
- wayfern initial connection on macos doesn't timeout
|
||||
|
||||
### Refactoring
|
||||
|
||||
- linux auto updates
|
||||
- more robust vpn handling
|
||||
- don't allow portable build to be set as the default browser
|
||||
- show app version in settings
|
||||
|
||||
### Documentation
|
||||
|
||||
- remove codacy badge
|
||||
- agents
|
||||
- contrib-readme-action has updated readme
|
||||
- update CHANGELOG.md and README.md for v0.18.1 [skip ci]
|
||||
- cleanup
|
||||
|
||||
### Maintenance
|
||||
|
||||
- test: simplify
|
||||
- chore: preserve cargo
|
||||
- chore: version bump
|
||||
- chore: linting
|
||||
- chore: update dependencies
|
||||
- chore: repo publish workflow
|
||||
- chore: copy and backlink
|
||||
- test: serialize
|
||||
- chore: copy correct file
|
||||
- chore: linting
|
||||
- chore: do not provide possible cause
|
||||
- chore: linting
|
||||
- chore: linting
|
||||
- chore: linting
|
||||
- chore: linting
|
||||
- ci(deps): bump the github-actions group with 8 updates
|
||||
- chore: commit doc changes directly and pretty discord notifications
|
||||
- chore: update flake.nix for v0.18.1 [skip ci]
|
||||
- chore: fix linting and formatting
|
||||
|
||||
### Other
|
||||
|
||||
- deps(deps): bump the frontend-dependencies group with 35 updates
|
||||
- deps(rust)(deps): bump the rust-dependencies group
|
||||
|
||||
## v0.18.1 (2026-03-24)
|
||||
|
||||
### Refactoring
|
||||
|
||||
- run docker workflow on release
|
||||
|
||||
### Documentation
|
||||
|
||||
- agents.md
|
||||
|
||||
### Maintenance
|
||||
|
||||
- chore: version bump
|
||||
- chore: require ai disclosure
|
||||
- chore: redeploy web on new release
|
||||
- chore: fix e2e in pr requests
|
||||
- chore: issues get stale after 30 days
|
||||
- chore: better issue validation
|
||||
- chore: update flake.nix for v0.18.0 [skip ci] (#247)
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
# Project Guidelines
|
||||
|
||||
## Testing and Quality
|
||||
|
||||
- After making changes, run `pnpm format && pnpm lint && pnpm test` at the root of the project
|
||||
- Always run this command before finishing a task to ensure the application isn't broken
|
||||
|
||||
## Code Quality
|
||||
|
||||
- Don't leave comments that don't add value
|
||||
- Don't duplicate code unless there's a very good reason; keep the same logic in one place
|
||||
- Anytime you make changes that affect copy or add new text, it has to be reflected in all translation files
|
||||
|
||||
## Singletons
|
||||
|
||||
- If there is a global singleton of a struct, only use it inside a method while properly initializing it, unless explicitly specified otherwise
|
||||
|
||||
## UI Theming
|
||||
|
||||
- When modifying the UI, don't add random colors that are not controlled by `src/lib/themes.ts`
|
||||
@@ -1,10 +1,10 @@
|
||||
# Code of Conduct
|
||||
|
||||
All participants of the Donut Browser project (referred to as "the project") are expected to abide by our Code of Conduct, both online and during in-person events that are hosted and/or associated with the project.
|
||||
All participants of the Donut Browser project (referred to as "the project") are expected to abide by this Code of Conduct, both online and during in-person events that are hosted and/or associated with the project.
|
||||
|
||||
## The Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
In the interest of fostering an open and welcoming environment, the maintainers pledge to make participation in the project and the community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## The Standards
|
||||
|
||||
@@ -23,6 +23,6 @@ Examples of unacceptable behavior by participants include:
|
||||
|
||||
## Enforcement
|
||||
|
||||
Violations of the Code of Conduct may be reported to contact at donutbrowser dot com. All reports will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. Further details of specific enforcement policies may be posted separately.
|
||||
Violations of the Code of Conduct may be reported to [contact@donutbrowser.com](mailto:contact@donutbrowser.com). All reports will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
We hold the right and responsibility to remove comments or other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any members for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
The maintainers hold the right and responsibility to remove comments or other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any members for other behaviors that are deemed inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
@@ -1,194 +1,100 @@
|
||||
# Contributing to Donut Browser
|
||||
|
||||
Contributions are welcome and always appreciated! 🍩
|
||||
|
||||
To begin working on an issue, simply leave a comment indicating that you're taking it on. There's no need to be officially assigned to the issue before you start.
|
||||
Contributions are welcome! To start working on an issue, leave a comment indicating you're taking it on.
|
||||
|
||||
## Before Starting
|
||||
|
||||
Do keep in mind before you start working on an issue / posting a PR:
|
||||
|
||||
- Search existing PRs related to that issue which might close them
|
||||
- Confirm if other contributors are working on the same issue
|
||||
- Check if the feature aligns with our roadmap and project goals
|
||||
- Search existing PRs related to that issue
|
||||
- Confirm no other contributors are working on the same issue
|
||||
- Check if the feature aligns with the project's goals
|
||||
|
||||
## Contributor License Agreement
|
||||
|
||||
By contributing to Donut Browser, you agree that your contributions will be licensed under the same terms as the project. You must agree to our [Contributor License Agreement](CONTRIBUTOR_LICENSE_AGREEMENT.md) before your contributions can be accepted. This agreement ensures that:
|
||||
|
||||
- Your contributions can be used in the open source version of Donut Browser (licensed under AGPL-3.0)
|
||||
- Donut Browser can offer commercial licenses for the software, including your contributions
|
||||
- You retain all rights to use your contributions for any other purpose
|
||||
|
||||
When you submit your first pull request, you acknowledge that you agree to the terms of the Contributor License Agreement.
|
||||
|
||||
## Tips & Things to Consider
|
||||
|
||||
- PRs with tests are highly appreciated
|
||||
- Avoid adding third party libraries, whenever possible
|
||||
- Unless you are helping out by updating dependencies, you should not be uploading your lock files or updating any dependencies in your PR
|
||||
- If you are unsure where to start, open a discussion and we will point you to a good first issue
|
||||
By contributing, you agree your contributions will be licensed under the same terms as the project. See [Contributor License Agreement](CONTRIBUTOR_LICENSE_AGREEMENT.md). This ensures contributions can be used in the open source version (AGPL-3.0) and commercially licensed. You retain all rights to use your contributions elsewhere.
|
||||
|
||||
## Development Setup
|
||||
|
||||
### Using Nix
|
||||
|
||||
If you have [Nix](https://nixos.org/) installed, you can skip the manual setup below and simply run:
|
||||
### Using Nix (recommended)
|
||||
|
||||
```bash
|
||||
nix develop
|
||||
# or if you use direnv
|
||||
direnv allow
|
||||
nix run .#setup # Install dependencies
|
||||
nix run .#tauri-dev # Start development server
|
||||
nix run .#test # Run all checks
|
||||
```
|
||||
|
||||
This will provide Node.js, Rust, and all necessary system libraries.
|
||||
Or enter the dev shell: `nix develop`
|
||||
|
||||
### Manual Setup
|
||||
|
||||
Ensure you have the following dependencies installed:
|
||||
Requirements:
|
||||
|
||||
- Node.js (see `.node-version` for exact version)
|
||||
- pnpm package manager
|
||||
- Latest Rust and Cargo toolchain
|
||||
- [Tauri prerequisites guide](https://v2.tauri.app/start/prerequisites/).
|
||||
|
||||
## Run Locally
|
||||
|
||||
After having the above dependencies installed, proceed through the following steps to setup the codebase locally:
|
||||
|
||||
1. **Fork the project** & [clone](https://docs.github.com/en/repositories/creating-and-managing-repositories/cloning-a-repository) it locally.
|
||||
|
||||
2. **Create a new separate branch.**
|
||||
|
||||
```bash
|
||||
git checkout -b feature/my-feature-name
|
||||
```
|
||||
|
||||
3. **Install frontend dependencies**
|
||||
|
||||
```bash
|
||||
pnpm install
|
||||
```
|
||||
|
||||
4. **Start the development server**
|
||||
|
||||
```bash
|
||||
pnpm tauri dev
|
||||
```
|
||||
|
||||
This will start the app for local development with live reloading.
|
||||
|
||||
## Code Style & Quality
|
||||
|
||||
We use several tools to maintain code quality:
|
||||
|
||||
- **Biome** for JavaScript/TypeScript linting and formatting
|
||||
- **Clippy** for Rust linting
|
||||
- **rustfmt** for Rust formatting
|
||||
|
||||
### Before Committing
|
||||
|
||||
Run these commands to ensure your code meets our standards:
|
||||
- Node.js (see `.node-version`)
|
||||
- pnpm
|
||||
- Rust + Cargo (latest stable)
|
||||
- [Tauri v2 prerequisites](https://v2.tauri.app/start/prerequisites/)
|
||||
|
||||
```bash
|
||||
# Format and lint frontend code
|
||||
pnpm format:js
|
||||
|
||||
# Format and lint Rust code
|
||||
pnpm format:rust
|
||||
|
||||
# Run all linting
|
||||
pnpm lint
|
||||
git checkout -b feature/my-feature-name
|
||||
pnpm install
|
||||
pnpm tauri dev
|
||||
```
|
||||
|
||||
## Building
|
||||
## Quality Checks
|
||||
|
||||
It is crucial to test your code before submitting a pull request. Please ensure that you can make a complete production build before you submit your code for merging.
|
||||
Run before every commit:
|
||||
|
||||
```bash
|
||||
# Build the frontend
|
||||
pnpm build
|
||||
|
||||
# Build the backend
|
||||
cd src-tauri && cargo build
|
||||
|
||||
# Build the Tauri application
|
||||
pnpm tauri build
|
||||
pnpm format && pnpm lint && pnpm test
|
||||
```
|
||||
|
||||
Make sure the build completes successfully without errors.
|
||||
This runs:
|
||||
|
||||
## Testing
|
||||
- **Biome** — JS/TS linting and formatting
|
||||
- **Clippy + rustfmt** — Rust linting and formatting
|
||||
- **typos** — Spellcheck (allowlist in `_typos.toml`)
|
||||
- **CodeQL** — Security analysis (JS, Actions, Rust) — runs in CI
|
||||
- **Unit tests** — 330+ Rust tests
|
||||
- **Integration tests** — proxy, sync e2e
|
||||
|
||||
- Always test your changes on the target platform
|
||||
- Verify that existing functionality still works
|
||||
- Add tests for new features when possible
|
||||
### Running CodeQL locally
|
||||
|
||||
```bash
|
||||
# Install: brew install codeql
|
||||
codeql pack download codeql/javascript-queries codeql/rust-queries
|
||||
|
||||
# JavaScript
|
||||
codeql database create /tmp/codeql-js --language=javascript --source-root=.
|
||||
codeql database analyze /tmp/codeql-js --format=sarifv2.1.0 --output=/tmp/js.sarif codeql/javascript-queries
|
||||
|
||||
# Rust
|
||||
codeql database create /tmp/codeql-rust --language=rust --source-root=.
|
||||
codeql database analyze /tmp/codeql-rust --format=sarifv2.1.0 --output=/tmp/rust.sarif codeql/rust-queries
|
||||
```
|
||||
|
||||
## Key Rules
|
||||
|
||||
- **Translations**: Any UI text changes must be reflected in all 7 locale files (`src/i18n/locales/`)
|
||||
- **Tauri commands**: If you modify Tauri commands, the `test_no_unused_tauri_commands` test will catch unused ones
|
||||
- **No hardcoded colors**: Use theme CSS variables (see `src/lib/themes.ts`), never Tailwind color classes like `text-red-500`
|
||||
- **No lock file changes**: Don't update `pnpm-lock.yaml` or `Cargo.lock` unless updating dependencies is the purpose of the PR
|
||||
- **AGPL-3.0**: This project is AGPL-licensed. Derivatives must be open source with the same license
|
||||
|
||||
## Pull Request Guidelines
|
||||
|
||||
🎉 Now that you're ready to submit your code for merging, there are some points to keep in mind:
|
||||
- Fill the PR description template
|
||||
- Reference related issues (`Fixes #123` or `Refs #123`)
|
||||
- Include screenshots/videos for UI changes
|
||||
- Ensure "Allow edits from maintainers" is checked
|
||||
|
||||
### PR Description
|
||||
## Architecture
|
||||
|
||||
- Fill your PR description template accordingly
|
||||
- Have an appropriate title and description
|
||||
- Include relevant screenshots for UI changes. If you can include video/gifs, it is even better.
|
||||
- Reference related issues
|
||||
|
||||
### Linking Issues
|
||||
|
||||
If your PR fixes an issue, add this line **in the body** of the Pull Request description:
|
||||
|
||||
```text
|
||||
Fixes #00000
|
||||
```
|
||||
|
||||
If your PR is referencing an issue:
|
||||
|
||||
```text
|
||||
Refs #00000
|
||||
```
|
||||
|
||||
### PR Checklist
|
||||
|
||||
- [ ] Code follows our style guidelines
|
||||
- [ ] I have performed a self-review of my code
|
||||
- [ ] I have commented my code, particularly in hard-to-understand areas
|
||||
- [ ] I have made corresponding changes to the documentation
|
||||
- [ ] My changes generate no new warnings
|
||||
- [ ] I have added tests that prove my fix is effective or that my feature works
|
||||
- [ ] New and existing unit tests pass locally with my changes
|
||||
- [ ] Any dependent changes have been merged and published
|
||||
|
||||
### Options
|
||||
|
||||
- Ensure that "Allow edits from maintainers" option is checked
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
Donut Browser is built with:
|
||||
|
||||
- **Frontend**: Next.js React application
|
||||
- **Backend**: Tauri (Rust) for native functionality
|
||||
- **Node.js Sidecar**: `nodecar` binary for access to JavaScript ecosystem
|
||||
- **Build System**: GitHub Actions for CI/CD
|
||||
|
||||
Understanding this architecture will help you contribute more effectively.
|
||||
- **Frontend**: Next.js (React) — `src/`
|
||||
- **Backend**: Tauri (Rust) — `src-tauri/src/`
|
||||
- **Proxy Worker**: Detached process for proxy tunneling — `src-tauri/src/bin/proxy_server.rs`
|
||||
- **Sync**: Cloud sync via S3-compatible storage — `src-tauri/src/sync/`, `donut-sync/`
|
||||
- **Browsers**: Camoufox (Firefox-based) and Wayfern (Chromium-based)
|
||||
|
||||
## Getting Help
|
||||
|
||||
- **Issues**: Use for bug reports and feature requests
|
||||
- **Discussions**: Use for questions and general discussion
|
||||
- **Pull Requests**: Use for code contributions
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Please note that this project is released with a [Contributor Code of Conduct](CODE_OF_CONDUCT.md). By participating in this project you agree to abide by its terms.
|
||||
|
||||
## Recognition
|
||||
|
||||
All contributors will be recognized! We use the all-contributors specification to acknowledge everyone who contributes to the project.
|
||||
|
||||
---
|
||||
|
||||
Thank you for contributing to Donut Browser! 🍩✨
|
||||
- **Issues**: Bug reports and feature requests
|
||||
- **Discussions**: Questions and general discussion
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
<div align="center">
|
||||
<img src="assets/logo.png" alt="Donut Browser Logo" width="150">
|
||||
<h1>Donut Browser</h1>
|
||||
<strong>A powerful anti-detect browser that puts you in control of your browsing experience. 🍩</strong>
|
||||
<strong>Open Source Anti-Detect Browser</strong>
|
||||
<br>
|
||||
<a href="https://donutbrowser.com">donutbrowser.com</a>
|
||||
</div>
|
||||
<br>
|
||||
|
||||
@@ -14,72 +16,107 @@
|
||||
<a style="text-decoration: none;" href="https://github.com/zhom/donutbrowser/blob/main/LICENSE" target="_blank">
|
||||
<img src="https://img.shields.io/badge/license-AGPL--3.0-blue.svg" alt="License">
|
||||
</a>
|
||||
<a href="https://app.codacy.com/gh/zhom/donutbrowser/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade">
|
||||
<img src="https://app.codacy.com/project/badge/Grade/b9c9beafc92d4bc8bc7c5b42c6c4ba81"/>
|
||||
</a>
|
||||
<a href="https://app.fossa.com/projects/git%2Bgithub.com%2Fzhom%2Fdonutbrowser?ref=badge_shield&issueType=security" alt="FOSSA Status">
|
||||
<img src="https://app.fossa.com/api/projects/git%2Bgithub.com%2Fzhom%2Fdonutbrowser.svg?type=shield&issueType=security"/>
|
||||
<img src="https://app.fossa.com/api/projects/git%2Bgithub.com%2Fzhom%2Fdonutbrowser.svg?type=shield&issueType=security" alt="FOSSA Security Status"/>
|
||||
</a>
|
||||
<a style="text-decoration: none;" href="https://github.com/zhom/donutbrowser/stargazers" target="_blank">
|
||||
<img src="https://img.shields.io/github/stars/zhom/donutbrowser?style=social" alt="GitHub stars">
|
||||
<a style="text-decoration: none;" href="https://github.com/zhom/donutbrowser/network/members" target="_blank">
|
||||
<img src="https://img.shields.io/github/forks/zhom/donutbrowser?style=social" alt="GitHub forks">
|
||||
</a>
|
||||
<a style="text-decoration: none;" href="https://github.com/zhom/donutbrowser/releases" target="_blank">
|
||||
<img src="https://img.shields.io/github/downloads/zhom/donutbrowser/total" alt="Downloads">
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="assets/preview-dark.png" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="assets/preview.png" />
|
||||
<img alt="Preview" src="assets/preview.png" />
|
||||
</picture>
|
||||
<img alt="Donut Browser Preview" src="assets/donut-preview.png" />
|
||||
|
||||
## Features
|
||||
|
||||
- Create unlimited number of local browser profiles completely isolated from each other
|
||||
- Safely use multiple accounts on one device by using anti-detect browser profiles, powered by [Camoufox](https://camoufox.com)
|
||||
- Proxy support with basic auth for all browsers
|
||||
- Import profiles from your existing browsers
|
||||
- Automatic updates for browsers
|
||||
- Set Donut Browser as your default browser to control in which profile to open links
|
||||
- **Unlimited browser profiles** — each fully isolated with its own fingerprint, cookies, extensions, and data
|
||||
- **Chromium & Firefox engines** — Chromium powered by [Wayfern](https://wayfern.com), Firefox powered by [Camoufox](https://camoufox.com), both with advanced fingerprint spoofing
|
||||
- **Proxy support** — HTTP, HTTPS, SOCKS4, SOCKS5 per profile, with dynamic proxy URLs
|
||||
- **VPN support** — WireGuard and OpenVPN configs per profile
|
||||
- **Local API & MCP** — REST API and [Model Context Protocol](https://modelcontextprotocol.io) server for integration with Claude, automation tools, and custom workflows
|
||||
- **Profile groups** — organize profiles and apply bulk settings
|
||||
- **Import profiles** — migrate from Chrome, Firefox, Edge, Brave, or other Chromium browsers
|
||||
- **Cookie & extension management** — import/export cookies, manage extensions per profile
|
||||
- **Default browser** — set Donut as your default browser and choose which profile opens each link
|
||||
- **Cloud sync** — sync profiles, proxies, and groups across devices (self-hostable)
|
||||
- **E2E encryption** — optional end-to-end encrypted sync with a password only you know
|
||||
- **Zero telemetry** — no tracking or device fingerprinting
|
||||
|
||||
## Download
|
||||
## Install
|
||||
|
||||
> For Linux, .deb and .rpm packages are available as well as standalone .AppImage files.
|
||||
<!-- install-links-start -->
|
||||
### macOS
|
||||
|
||||
The app can be downloaded from the [releases page](https://github.com/zhom/donutbrowser/releases/latest).
|
||||
| | Apple Silicon | Intel |
|
||||
|---|---|---|
|
||||
| **DMG** | [Download](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_aarch64.dmg) | [Download](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_x64.dmg) |
|
||||
|
||||
<!-- ## Supported Platforms
|
||||
Or install via Homebrew:
|
||||
|
||||
- ✅ **macOS** (Apple Silicon)
|
||||
- ✅ **Linux** (x64)
|
||||
- ✅ **Windows** (x64) -->
|
||||
```bash
|
||||
brew install --cask donut
|
||||
```
|
||||
|
||||
## Development
|
||||
### Windows
|
||||
|
||||
### Contributing
|
||||
[Download Windows Installer (x64)](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_x64-setup.exe) · [Portable (x64)](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_x64-portable.zip)
|
||||
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
### Linux
|
||||
|
||||
## Issues
|
||||
| Format | x86_64 | ARM64 |
|
||||
|---|---|---|
|
||||
| **deb** | [Download](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_amd64.deb) | [Download](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_arm64.deb) |
|
||||
| **rpm** | [Download](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut-0.20.1-1.x86_64.rpm) | [Download](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut-0.20.1-1.aarch64.rpm) |
|
||||
| **AppImage** | [Download](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_amd64.AppImage) | [Download](https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_aarch64.AppImage) |
|
||||
<!-- install-links-end -->
|
||||
|
||||
If you face any problems while using the application, please [open an issue](https://github.com/zhom/donutbrowser/issues).
|
||||
Or install via package manager:
|
||||
|
||||
```bash
|
||||
curl -fsSL https://donutbrowser.com/install.sh | sh
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>Troubleshooting AppImage</summary>
|
||||
|
||||
If the AppImage segfaults on launch, install **libfuse2** (`sudo apt install libfuse2` / `yay -S libfuse2` / `sudo dnf install fuse-libs`), or bypass FUSE entirely:
|
||||
|
||||
```bash
|
||||
APPIMAGE_EXTRACT_AND_RUN=1 ./Donut.Browser_x.x.x_amd64.AppImage
|
||||
```
|
||||
|
||||
If that gives an EGL display error, try adding `WEBKIT_DISABLE_DMABUF_RENDERER=1` or `GDK_BACKEND=x11` to the command above. If issues persist, the **.deb** / **.rpm** packages are a more reliable alternative.
|
||||
|
||||
</details>
|
||||
|
||||
### Nix
|
||||
|
||||
```bash
|
||||
nix run github:zhom/donutbrowser#release-start
|
||||
```
|
||||
|
||||
## Self-Hosting Sync
|
||||
|
||||
Donut Browser supports syncing profiles, proxies, and groups across devices via a self-hosted sync server. See the [Self-Hosting Guide](docs/self-hosting-donut-sync.md) for Docker-based setup instructions.
|
||||
|
||||
## Community
|
||||
## Development
|
||||
|
||||
Have questions or want to contribute? We'd love to hear from you!
|
||||
See [CONTRIBUTING.md](CONTRIBUTING.md).
|
||||
|
||||
## Community
|
||||
|
||||
- **Issues**: [GitHub Issues](https://github.com/zhom/donutbrowser/issues)
|
||||
- **Discussions**: [GitHub Discussions](https://github.com/zhom/donutbrowser/discussions)
|
||||
|
||||
## Star History
|
||||
|
||||
<a href="https://www.star-history.com/#zhom/donutbrowser&Date">
|
||||
<a href="https://www.star-history.com/?repos=zhom%2Fdonutbrowser&type=date&legend=top-left">
|
||||
<picture>
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=zhom/donutbrowser&type=Date&theme=dark" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=zhom/donutbrowser&type=Date" />
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=zhom/donutbrowser&type=Date" />
|
||||
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/image?repos=zhom/donutbrowser&type=date&theme=dark&legend=top-left" />
|
||||
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/image?repos=zhom/donutbrowser&type=date&legend=top-left" />
|
||||
<img alt="Star History Chart" src="https://api.star-history.com/image?repos=zhom/donutbrowser&type=date&legend=top-left" />
|
||||
</picture>
|
||||
</a>
|
||||
|
||||
@@ -103,6 +140,20 @@ Have questions or want to contribute? We'd love to hear from you!
|
||||
<sub><b>Hassiy</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/yb403">
|
||||
<img src="https://avatars.githubusercontent.com/u/87396571?v=4" width="100;" alt="yb403"/>
|
||||
<br />
|
||||
<sub><b>yb403</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/drunkod">
|
||||
<img src="https://avatars.githubusercontent.com/u/9677471?v=4" width="100;" alt="drunkod"/>
|
||||
<br />
|
||||
<sub><b>drunkod</b></sub>
|
||||
</a>
|
||||
</td>
|
||||
<td align="center">
|
||||
<a href="https://github.com/JorySeverijnse">
|
||||
<img src="https://avatars.githubusercontent.com/u/117462355?v=4" width="100;" alt="JorySeverijnse"/>
|
||||
@@ -117,7 +168,7 @@ Have questions or want to contribute? We'd love to hear from you!
|
||||
|
||||
## Contact
|
||||
|
||||
Have an urgent question or want to report a security vulnerability? Send an email to contact at donutbrowser dot com and we'll get back to you as fast as possible.
|
||||
Have an urgent question or want to report a security vulnerability? Send an email to [contact@donutbrowser.com](mailto:contact@donutbrowser.com).
|
||||
|
||||
## License
|
||||
|
||||
|
||||
@@ -4,13 +4,13 @@
|
||||
|
||||
Thanks for helping make Donut Browser safe for everyone! ❤️
|
||||
|
||||
We take the security of Donut Browser seriously. If you believe you have found a security vulnerability in Donut Browser, please report it to us through coordinated disclosure.
|
||||
I take the security of Donut Browser seriously. If you believe you have found a security vulnerability in Donut Browser, please report it to me through coordinated disclosure.
|
||||
|
||||
**Please do not report security vulnerabilities through public GitHub issues, discussions, or pull requests.**
|
||||
|
||||
Instead, please send an email to **contact at donutbrowser dot com** with the subject line "Security Vulnerability Report".
|
||||
Instead, please send an email to **[contact@donutbrowser.com](mailto:contact@donutbrowser.com)** with the subject line "Security Vulnerability Report".
|
||||
|
||||
Please include as much of the information listed below as you can to help us better understand and resolve the issue:
|
||||
Please include as much of the information listed below as you can to help me better understand and resolve the issue:
|
||||
|
||||
- The type of issue (e.g., buffer overflow, injection attack, privilege escalation, or cross-site scripting)
|
||||
- Full paths of source file(s) related to the manifestation of the issue
|
||||
@@ -21,18 +21,18 @@ Please include as much of the information listed below as you can to help us bet
|
||||
- Impact of the issue, including how an attacker might exploit the issue
|
||||
- Your assessment of the severity level
|
||||
|
||||
This information will help us triage your report more quickly.
|
||||
This information will help me triage your report more quickly.
|
||||
|
||||
## What to Expect
|
||||
|
||||
- **Response Time**: We will acknowledge receipt of your vulnerability report within 72 hours.
|
||||
- **Investigation**: We will investigate the issue and provide you with updates on our progress.
|
||||
- **Resolution**: We aim to resolve critical security issues as fast as possible, but no longer than in 30 days after the initial report.
|
||||
- **Disclosure**: We will coordinate with you on the timing of any public disclosure.
|
||||
- **Response Time**: I will acknowledge receipt of your vulnerability report within 72 hours.
|
||||
- **Investigation**: I will investigate the issue and provide you with updates on my progress.
|
||||
- **Resolution**: I aim to resolve critical security issues as fast as possible, but no longer than in 30 days after the initial report.
|
||||
- **Disclosure**: I will coordinate with you on the timing of any public disclosure.
|
||||
|
||||
## Contact
|
||||
|
||||
For urgent security matters, please contact us at **contact at donutbrowser dot com**.
|
||||
For urgent security matters, please contact me at **[contact@donutbrowser.com](mailto:contact@donutbrowser.com)**.
|
||||
|
||||
For general questions about this security policy, you can also reach out through:
|
||||
|
||||
|
||||
@@ -6,3 +6,7 @@ extend-exclude = [
|
||||
"src-tauri/build.rs",
|
||||
"src-tauri/tests/fixtures/test.ovpn",
|
||||
]
|
||||
|
||||
[default.extend-words]
|
||||
DBE = "DBE"
|
||||
nd = "nd"
|
||||
|
||||
|
After Width: | Height: | Size: 623 KiB |
|
Before Width: | Height: | Size: 111 KiB |
|
Before Width: | Height: | Size: 114 KiB |
@@ -1,12 +1,21 @@
|
||||
FROM node:22-alpine AS builder
|
||||
|
||||
WORKDIR /build
|
||||
COPY donut-sync/package.json donut-sync/tsconfig.json donut-sync/tsconfig.build.json ./
|
||||
COPY donut-sync/src/ src/
|
||||
RUN npm install
|
||||
RUN npm run build
|
||||
RUN npm prune --omit=dev
|
||||
|
||||
FROM node:22-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package.json .
|
||||
COPY dist/ dist/
|
||||
COPY node_modules/ node_modules/
|
||||
COPY --from=builder /build/package.json .
|
||||
COPY --from=builder /build/dist/ dist/
|
||||
COPY --from=builder /build/node_modules/ node_modules/
|
||||
|
||||
ENV NODE_ENV=production
|
||||
EXPOSE 12342
|
||||
|
||||
USER node
|
||||
CMD ["node", "dist/main"]
|
||||
|
||||
@@ -2,8 +2,6 @@
|
||||
<a href="http://nestjs.com/" target="blank"><img src="https://nestjs.com/img/logo-small.svg" width="120" alt="Nest Logo" /></a>
|
||||
</p>
|
||||
|
||||
[circleci-image]: https://img.shields.io/circleci/build/github/nestjs/nest/master?token=abc123def456
|
||||
[circleci-url]: https://circleci.com/gh/nestjs/nest
|
||||
|
||||
<p align="center">A progressive <a href="http://nodejs.org" target="_blank">Node.js</a> framework for building efficient and scalable server-side applications.</p>
|
||||
<p align="center">
|
||||
@@ -28,33 +26,33 @@
|
||||
## Project setup
|
||||
|
||||
```bash
|
||||
$ pnpm install
|
||||
pnpm install
|
||||
```
|
||||
|
||||
## Compile and run the project
|
||||
|
||||
```bash
|
||||
# development
|
||||
$ pnpm run start
|
||||
pnpm run start
|
||||
|
||||
# watch mode
|
||||
$ pnpm run start:dev
|
||||
pnpm run start:dev
|
||||
|
||||
# production mode
|
||||
$ pnpm run start:prod
|
||||
pnpm run start:prod
|
||||
```
|
||||
|
||||
## Run tests
|
||||
|
||||
```bash
|
||||
# unit tests
|
||||
$ pnpm run test
|
||||
pnpm run test
|
||||
|
||||
# e2e tests
|
||||
$ pnpm run test:e2e
|
||||
pnpm run test:e2e
|
||||
|
||||
# test coverage
|
||||
$ pnpm run test:cov
|
||||
pnpm run test:cov
|
||||
```
|
||||
|
||||
## Deployment
|
||||
@@ -64,8 +62,8 @@ When you're ready to deploy your NestJS application to production, there are som
|
||||
If you are looking for a cloud-based platform to deploy your NestJS application, check out [Mau](https://mau.nestjs.com), our official platform for deploying NestJS applications on AWS. Mau makes deployment straightforward and fast, requiring just a few simple steps:
|
||||
|
||||
```bash
|
||||
$ pnpm install -g @nestjs/mau
|
||||
$ mau deploy
|
||||
pnpm install -g @nestjs/mau
|
||||
mau deploy
|
||||
```
|
||||
|
||||
With Mau, you can deploy your application in just a few clicks, allowing you to focus on building features rather than managing infrastructure.
|
||||
|
||||
@@ -18,4 +18,3 @@ services:
|
||||
|
||||
volumes:
|
||||
minio_data:
|
||||
|
||||
|
||||
@@ -15,36 +15,36 @@
|
||||
"test:watch": "jest --watch",
|
||||
"test:cov": "jest --coverage",
|
||||
"test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand",
|
||||
"test:e2e": "jest --config ./test/jest-e2e.json"
|
||||
"test:e2e": "NODE_OPTIONS='--experimental-vm-modules' jest --config ./test/jest-e2e.json"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.990.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.990.0",
|
||||
"@nestjs/common": "^11.1.13",
|
||||
"@aws-sdk/client-s3": "^3.1024.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.1024.0",
|
||||
"@nestjs/common": "^11.1.18",
|
||||
"@nestjs/config": "^4.0.3",
|
||||
"@nestjs/core": "^11.1.13",
|
||||
"@nestjs/platform-express": "^11.1.13",
|
||||
"@nestjs/core": "^11.1.18",
|
||||
"@nestjs/platform-express": "^11.1.18",
|
||||
"jsonwebtoken": "^9.0.3",
|
||||
"reflect-metadata": "^0.2.2",
|
||||
"rxjs": "^7.8.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@nestjs/cli": "^11.0.16",
|
||||
"@nestjs/schematics": "^11.0.9",
|
||||
"@nestjs/testing": "^11.1.13",
|
||||
"@nestjs/cli": "^11.0.17",
|
||||
"@nestjs/schematics": "^11.0.10",
|
||||
"@nestjs/testing": "^11.1.18",
|
||||
"@types/express": "^5.0.6",
|
||||
"@types/jest": "^30.0.0",
|
||||
"@types/jsonwebtoken": "^9.0.10",
|
||||
"@types/node": "^25.2.3",
|
||||
"@types/supertest": "^6.0.3",
|
||||
"jest": "^30.2.0",
|
||||
"@types/node": "^25.5.2",
|
||||
"@types/supertest": "^7.2.0",
|
||||
"jest": "^30.3.0",
|
||||
"source-map-support": "^0.5.21",
|
||||
"supertest": "^7.2.2",
|
||||
"ts-jest": "^29.4.6",
|
||||
"ts-loader": "^9.5.4",
|
||||
"ts-jest": "^29.4.9",
|
||||
"ts-loader": "^9.5.7",
|
||||
"ts-node": "^10.9.2",
|
||||
"tsconfig-paths": "^4.2.0",
|
||||
"typescript": "^5.9.3"
|
||||
"typescript": "^6.0.2"
|
||||
},
|
||||
"jest": {
|
||||
"moduleFileExtensions": [
|
||||
|
||||
@@ -27,7 +27,7 @@ export class AuthGuard implements CanActivate {
|
||||
const request = context.switchToHttp().getRequest<Request>();
|
||||
const authHeader = request.headers.authorization;
|
||||
|
||||
if (!authHeader || !authHeader.startsWith("Bearer ")) {
|
||||
if (!authHeader?.startsWith("Bearer ")) {
|
||||
throw new UnauthorizedException(
|
||||
"Missing or invalid authorization header",
|
||||
);
|
||||
@@ -38,11 +38,12 @@ export class AuthGuard implements CanActivate {
|
||||
// Try SYNC_TOKEN first (self-hosted mode)
|
||||
const expectedToken = this.configService.get<string>("SYNC_TOKEN");
|
||||
if (expectedToken && token === expectedToken) {
|
||||
(request as any).user = {
|
||||
(request as unknown as Record<string, unknown>).user = {
|
||||
mode: "self-hosted",
|
||||
prefix: "",
|
||||
teamPrefix: null,
|
||||
profileLimit: 0,
|
||||
teamProfileLimit: 0,
|
||||
} satisfies UserContext;
|
||||
return true;
|
||||
}
|
||||
@@ -54,11 +55,12 @@ export class AuthGuard implements CanActivate {
|
||||
algorithms: ["RS256"],
|
||||
}) as jwt.JwtPayload;
|
||||
|
||||
(request as any).user = {
|
||||
(request as unknown as Record<string, unknown>).user = {
|
||||
mode: "cloud",
|
||||
prefix: decoded.prefix || `users/${decoded.sub}/`,
|
||||
teamPrefix: decoded.teamPrefix || null,
|
||||
profileLimit: decoded.profileLimit || 0,
|
||||
teamProfileLimit: decoded.teamProfileLimit || 0,
|
||||
} satisfies UserContext;
|
||||
return true;
|
||||
} catch (err) {
|
||||
|
||||
@@ -3,4 +3,5 @@ export interface UserContext {
|
||||
prefix: string; // '' for self-hosted, 'users/{id}/' for cloud
|
||||
teamPrefix: string | null; // 'teams/{id}/' or null
|
||||
profileLimit: number; // 0 for unlimited (self-hosted)
|
||||
teamProfileLimit: number; // 0 for unlimited or non-team users
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { NestFactory } from "@nestjs/core";
|
||||
import type { NestExpressApplication } from "@nestjs/platform-express";
|
||||
import { AppModule } from "./app.module.js";
|
||||
|
||||
function validateEnv() {
|
||||
@@ -11,7 +12,10 @@ function validateEnv() {
|
||||
async function bootstrap() {
|
||||
validateEnv();
|
||||
|
||||
const app = await NestFactory.create(AppModule);
|
||||
const app = await NestFactory.create<NestExpressApplication>(AppModule);
|
||||
|
||||
// biome-ignore lint/correctness/useHookAtTopLevel: NestJS method, not a React hook
|
||||
app.useBodyParser("json", { limit: "50mb" });
|
||||
|
||||
app.enableCors({
|
||||
origin: "*",
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
import {
|
||||
Body,
|
||||
Controller,
|
||||
Headers,
|
||||
HttpCode,
|
||||
Post,
|
||||
UnauthorizedException,
|
||||
} from "@nestjs/common";
|
||||
import { ConfigService } from "@nestjs/config";
|
||||
import { SyncService } from "./sync.service.js";
|
||||
|
||||
@Controller("v1/internal")
|
||||
export class InternalController {
|
||||
private readonly internalKey: string | undefined;
|
||||
|
||||
constructor(
|
||||
private readonly syncService: SyncService,
|
||||
private readonly configService: ConfigService,
|
||||
) {
|
||||
this.internalKey = this.configService.get<string>("INTERNAL_KEY");
|
||||
}
|
||||
|
||||
@Post("cleanup-excess-profiles")
|
||||
@HttpCode(200)
|
||||
async cleanupExcessProfiles(
|
||||
@Headers("x-internal-key") key: string,
|
||||
@Body() body: { userId: string; maxProfiles: number },
|
||||
) {
|
||||
if (!this.internalKey || key !== this.internalKey) {
|
||||
throw new UnauthorizedException("Invalid internal key");
|
||||
}
|
||||
|
||||
return this.syncService.cleanupExcessProfiles(
|
||||
body.userId,
|
||||
body.maxProfiles,
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -39,7 +39,7 @@ export class SyncController {
|
||||
constructor(private readonly syncService: SyncService) {}
|
||||
|
||||
private getUserContext(req: Request): UserContext {
|
||||
return (req as any).user as UserContext;
|
||||
return (req as unknown as Record<string, unknown>).user as UserContext;
|
||||
}
|
||||
|
||||
@Post("stat")
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Module } from "@nestjs/common";
|
||||
import { AuthGuard } from "../auth/auth.guard.js";
|
||||
import { InternalController } from "./internal.controller.js";
|
||||
import { SyncController } from "./sync.controller.js";
|
||||
import { SyncService } from "./sync.service.js";
|
||||
|
||||
@Module({
|
||||
controllers: [SyncController],
|
||||
controllers: [SyncController, InternalController],
|
||||
providers: [SyncService, AuthGuard],
|
||||
exports: [SyncService],
|
||||
})
|
||||
|
||||
@@ -145,6 +145,7 @@ export class SyncService implements OnModuleInit {
|
||||
*/
|
||||
private scopeKey(ctx: UserContext, key: string): string {
|
||||
if (ctx.mode === "self-hosted") return key;
|
||||
if (ctx.teamPrefix && key.startsWith(ctx.teamPrefix)) return key;
|
||||
return `${ctx.prefix}${key}`;
|
||||
}
|
||||
|
||||
@@ -309,10 +310,12 @@ export class SyncService implements OnModuleInit {
|
||||
);
|
||||
|
||||
const userPrefix = ctx?.prefix || "";
|
||||
const teamPrefix = ctx?.teamPrefix || "";
|
||||
const objects = (response.Contents || []).map((obj) => {
|
||||
// Strip user prefix from returned keys so client sees relative keys
|
||||
let key = obj.Key || "";
|
||||
if (userPrefix && key.startsWith(userPrefix)) {
|
||||
if (teamPrefix && key.startsWith(teamPrefix)) {
|
||||
key = key.substring(teamPrefix.length);
|
||||
} else if (userPrefix && key.startsWith(userPrefix)) {
|
||||
key = key.substring(userPrefix.length);
|
||||
}
|
||||
return {
|
||||
@@ -481,11 +484,15 @@ export class SyncService implements OnModuleInit {
|
||||
): Observable<SubscribeEventDto> {
|
||||
const basePrefixes = ["profiles/", "proxies/", "groups/", "tombstones/"];
|
||||
|
||||
// Scope prefixes for cloud users; self-hosted gets root prefixes
|
||||
const prefixes =
|
||||
ctx.mode === "self-hosted"
|
||||
? basePrefixes
|
||||
: basePrefixes.map((p) => `${ctx.prefix}${p}`);
|
||||
let prefixes: string[];
|
||||
if (ctx.mode === "self-hosted") {
|
||||
prefixes = basePrefixes;
|
||||
} else {
|
||||
prefixes = basePrefixes.map((p) => `${ctx.prefix}${p}`);
|
||||
if (ctx.teamPrefix) {
|
||||
prefixes.push(...basePrefixes.map((p) => `${ctx.teamPrefix}${p}`));
|
||||
}
|
||||
}
|
||||
|
||||
// Per-connection state (not shared across subscribers)
|
||||
let lastKnownState = new Map<string, string>();
|
||||
@@ -547,6 +554,135 @@ export class SyncService implements OnModuleInit {
|
||||
this.changeSubject.next(event);
|
||||
}
|
||||
|
||||
async cleanupExcessProfiles(
|
||||
userId: string,
|
||||
maxProfiles: number,
|
||||
): Promise<{ deletedProfiles: string[]; remaining: number }> {
|
||||
const userPrefix = `users/${userId}/`;
|
||||
const profilePrefix = `${userPrefix}profiles/`;
|
||||
|
||||
// List all profile directories
|
||||
const profiles: { id: string; lastModified: Date }[] = [];
|
||||
let continuationToken: string | undefined;
|
||||
|
||||
do {
|
||||
const result = await this.s3Client.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: this.bucket,
|
||||
Prefix: profilePrefix,
|
||||
Delimiter: "/",
|
||||
MaxKeys: 1000,
|
||||
ContinuationToken: continuationToken,
|
||||
}),
|
||||
);
|
||||
|
||||
if (result.CommonPrefixes) {
|
||||
for (const cp of result.CommonPrefixes) {
|
||||
if (!cp.Prefix) continue;
|
||||
const profileId = cp.Prefix.replace(profilePrefix, "").replace(
|
||||
/\/$/,
|
||||
"",
|
||||
);
|
||||
|
||||
// Get creation time from first object in the profile directory
|
||||
const objects = await this.s3Client.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: this.bucket,
|
||||
Prefix: cp.Prefix,
|
||||
MaxKeys: 1,
|
||||
}),
|
||||
);
|
||||
|
||||
const firstObj = objects.Contents?.[0];
|
||||
profiles.push({
|
||||
id: profileId,
|
||||
lastModified: firstObj?.LastModified || new Date(0),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
continuationToken = result.NextContinuationToken;
|
||||
} while (continuationToken);
|
||||
|
||||
if (profiles.length <= maxProfiles) {
|
||||
return { deletedProfiles: [], remaining: profiles.length };
|
||||
}
|
||||
|
||||
// Sort newest first — delete newest excess profiles
|
||||
profiles.sort(
|
||||
(a, b) => b.lastModified.getTime() - a.lastModified.getTime(),
|
||||
);
|
||||
|
||||
const excessCount = profiles.length - maxProfiles;
|
||||
const toDelete = profiles.slice(0, excessCount);
|
||||
const deletedProfiles: string[] = [];
|
||||
|
||||
for (const profile of toDelete) {
|
||||
const prefix = `${profilePrefix}${profile.id}/`;
|
||||
|
||||
// Delete all objects under this profile
|
||||
let delToken: string | undefined;
|
||||
do {
|
||||
const listResult = await this.s3Client.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: this.bucket,
|
||||
Prefix: prefix,
|
||||
MaxKeys: 1000,
|
||||
ContinuationToken: delToken,
|
||||
}),
|
||||
);
|
||||
|
||||
const objects = listResult.Contents || [];
|
||||
if (objects.length > 0) {
|
||||
const deleteObjects = objects
|
||||
.filter((obj): obj is typeof obj & { Key: string } => !!obj.Key)
|
||||
.map((obj) => ({ Key: obj.Key }));
|
||||
|
||||
if (deleteObjects.length > 0) {
|
||||
await this.s3Client.send(
|
||||
new DeleteObjectsCommand({
|
||||
Bucket: this.bucket,
|
||||
Delete: { Objects: deleteObjects, Quiet: true },
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
delToken = listResult.NextContinuationToken;
|
||||
} while (delToken);
|
||||
|
||||
// Create tombstone
|
||||
const tombstoneKey = `${userPrefix}tombstones/profiles/${profile.id}`;
|
||||
const tombstoneData = JSON.stringify({
|
||||
prefix: `profiles/${profile.id}/`,
|
||||
deleted_at: new Date().toISOString(),
|
||||
reason: "excess_profile_cleanup",
|
||||
});
|
||||
|
||||
await this.s3Client.send(
|
||||
new PutObjectCommand({
|
||||
Bucket: this.bucket,
|
||||
Key: tombstoneKey,
|
||||
Body: tombstoneData,
|
||||
ContentType: "application/json",
|
||||
}),
|
||||
);
|
||||
|
||||
deletedProfiles.push(profile.id);
|
||||
this.logger.log(
|
||||
`Cleaned up excess profile ${profile.id} for user ${userId}`,
|
||||
);
|
||||
}
|
||||
|
||||
// Report updated profile usage to backend
|
||||
const remaining = profiles.length - deletedProfiles.length;
|
||||
await this.reportProfileUsage(userId, remaining).catch((err) =>
|
||||
this.logger.warn(`Failed to report usage after cleanup: ${err.message}`),
|
||||
);
|
||||
|
||||
return { deletedProfiles, remaining };
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the user has reached their profile limit.
|
||||
* Counts objects in the profiles/ prefix.
|
||||
@@ -554,16 +690,33 @@ export class SyncService implements OnModuleInit {
|
||||
private async checkProfileLimit(ctx: UserContext): Promise<void> {
|
||||
if (ctx.profileLimit <= 0) return; // 0 = unlimited
|
||||
|
||||
const profilePrefix = `${ctx.prefix}profiles/`;
|
||||
const result = await this.s3Client.send(
|
||||
let count = 0;
|
||||
|
||||
const userResult = await this.s3Client.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: this.bucket,
|
||||
Prefix: profilePrefix,
|
||||
Prefix: `${ctx.prefix}profiles/`,
|
||||
Delimiter: "/",
|
||||
}),
|
||||
);
|
||||
count += userResult.CommonPrefixes?.length || 0;
|
||||
|
||||
if (ctx.teamPrefix && ctx.teamProfileLimit && ctx.teamProfileLimit > 0) {
|
||||
const teamResult = await this.s3Client.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: this.bucket,
|
||||
Prefix: `${ctx.teamPrefix}profiles/`,
|
||||
Delimiter: "/",
|
||||
}),
|
||||
);
|
||||
const teamCount = teamResult.CommonPrefixes?.length || 0;
|
||||
if (teamCount >= ctx.teamProfileLimit) {
|
||||
throw new ForbiddenException(
|
||||
`Team profile limit reached (${ctx.teamProfileLimit}). Ask the team owner to upgrade.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const count = result.CommonPrefixes?.length || 0;
|
||||
if (count >= ctx.profileLimit) {
|
||||
throw new ForbiddenException(
|
||||
`Profile limit reached (${ctx.profileLimit}). Upgrade your plan for more profiles.`,
|
||||
@@ -604,6 +757,35 @@ export class SyncService implements OnModuleInit {
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
private async countTeamProfiles(ctx: UserContext): Promise<number> {
|
||||
if (!ctx.teamPrefix) return 0;
|
||||
const profilePrefix = `${ctx.teamPrefix}profiles/`;
|
||||
let count = 0;
|
||||
let continuationToken: string | undefined;
|
||||
|
||||
do {
|
||||
const result = await this.s3Client.send(
|
||||
new ListObjectsV2Command({
|
||||
Bucket: this.bucket,
|
||||
Prefix: profilePrefix,
|
||||
Delimiter: "/",
|
||||
MaxKeys: 1000,
|
||||
ContinuationToken: continuationToken,
|
||||
}),
|
||||
);
|
||||
count += result.CommonPrefixes?.length || 0;
|
||||
continuationToken = result.NextContinuationToken;
|
||||
} while (continuationToken);
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
private extractTeamId(ctx: UserContext): string | null {
|
||||
if (!ctx.teamPrefix) return null;
|
||||
const match = ctx.teamPrefix.match(/^teams\/([^/]+)\/$/);
|
||||
return match ? match[1] : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fire-and-forget: count profiles and report to backend.
|
||||
*/
|
||||
@@ -614,7 +796,17 @@ export class SyncService implements OnModuleInit {
|
||||
if (!userId) return;
|
||||
|
||||
this.countProfiles(ctx)
|
||||
.then((count) => this.reportProfileUsage(userId, count))
|
||||
.then(async (count) => {
|
||||
await this.reportProfileUsage(userId, count);
|
||||
|
||||
if (ctx.teamPrefix) {
|
||||
const teamCount = await this.countTeamProfiles(ctx);
|
||||
const teamId = this.extractTeamId(ctx);
|
||||
if (teamId) {
|
||||
await this.reportProfileUsage(teamId, teamCount);
|
||||
}
|
||||
}
|
||||
})
|
||||
.catch((err) =>
|
||||
this.logger.warn(`Failed to report profile usage: ${err.message}`),
|
||||
);
|
||||
|
||||
@@ -2,18 +2,29 @@ import { INestApplication } from "@nestjs/common";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
import request from "supertest";
|
||||
import { App } from "supertest/types";
|
||||
import { AppModule } from "./../src/app.module.js";
|
||||
import { AppController } from "./../src/app.controller.js";
|
||||
import { AppService } from "./../src/app.service.js";
|
||||
import { SyncService } from "./../src/sync/sync.service.js";
|
||||
|
||||
describe("AppController (e2e)", () => {
|
||||
let app: INestApplication<App>;
|
||||
|
||||
beforeEach(async () => {
|
||||
const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
imports: [AppModule],
|
||||
controllers: [AppController],
|
||||
providers: [
|
||||
AppService,
|
||||
{
|
||||
provide: SyncService,
|
||||
useValue: {
|
||||
checkS3Connectivity: async () => true,
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compile();
|
||||
|
||||
app = moduleFixture.createNestApplication();
|
||||
await app.init();
|
||||
await app.listen(0);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
{
|
||||
"moduleFileExtensions": ["js", "json", "ts"],
|
||||
"rootDir": ".",
|
||||
"maxWorkers": 1,
|
||||
"testEnvironment": "node",
|
||||
"testRegex": ".e2e-spec.ts$",
|
||||
"transform": {
|
||||
"^.+\\.(t|j)s$": "ts-jest"
|
||||
"^.+\\.(t|j)s$": [
|
||||
"ts-jest",
|
||||
{
|
||||
"tsconfig": "<rootDir>/tsconfig.json"
|
||||
}
|
||||
]
|
||||
},
|
||||
"moduleNameMapper": {
|
||||
"^(\\.{1,2}/.*)\\.js$": "$1"
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { Server } from "node:http";
|
||||
import type { AddressInfo } from "node:net";
|
||||
import { INestApplication } from "@nestjs/common";
|
||||
import { ConfigModule } from "@nestjs/config";
|
||||
import { Test, TestingModule } from "@nestjs/testing";
|
||||
@@ -6,6 +8,11 @@ import { App } from "supertest/types";
|
||||
import { AppController } from "./../src/app.controller.js";
|
||||
import { AppService } from "./../src/app.service.js";
|
||||
import { SyncModule } from "./../src/sync/sync.module.js";
|
||||
import {
|
||||
configureTestEnv,
|
||||
TEST_SYNC_TOKEN,
|
||||
waitForTestS3,
|
||||
} from "./test-env.js";
|
||||
|
||||
interface PresignResponse {
|
||||
url: string;
|
||||
@@ -29,26 +36,12 @@ interface StatResponse {
|
||||
lastModified?: string;
|
||||
}
|
||||
|
||||
interface SSEError {
|
||||
code?: string;
|
||||
timeout?: boolean;
|
||||
response?: { status: number };
|
||||
}
|
||||
|
||||
const TEST_TOKEN = "test-sync-token";
|
||||
|
||||
describe("SyncController (e2e)", () => {
|
||||
let app: INestApplication<App>;
|
||||
|
||||
beforeAll(async () => {
|
||||
process.env.SYNC_TOKEN = TEST_TOKEN;
|
||||
process.env.S3_ENDPOINT =
|
||||
process.env.S3_ENDPOINT || "http://localhost:8987";
|
||||
process.env.S3_ACCESS_KEY_ID = process.env.S3_ACCESS_KEY_ID || "minioadmin";
|
||||
process.env.S3_SECRET_ACCESS_KEY =
|
||||
process.env.S3_SECRET_ACCESS_KEY || "minioadmin";
|
||||
process.env.S3_BUCKET = "donut-sync-test";
|
||||
process.env.S3_FORCE_PATH_STYLE = "true";
|
||||
configureTestEnv();
|
||||
await waitForTestS3();
|
||||
|
||||
const moduleFixture: TestingModule = await Test.createTestingModule({
|
||||
imports: [
|
||||
@@ -62,7 +55,7 @@ describe("SyncController (e2e)", () => {
|
||||
}).compile();
|
||||
|
||||
app = moduleFixture.createNestApplication();
|
||||
await app.init();
|
||||
await app.listen(0);
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -88,7 +81,7 @@ describe("SyncController (e2e)", () => {
|
||||
it("should accept requests with valid token", () => {
|
||||
return request(app.getHttpServer())
|
||||
.post("/v1/objects/stat")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ key: "nonexistent-key" })
|
||||
.expect(200)
|
||||
.expect({ exists: false });
|
||||
@@ -99,7 +92,7 @@ describe("SyncController (e2e)", () => {
|
||||
it("should return exists: false for non-existent key", () => {
|
||||
return request(app.getHttpServer())
|
||||
.post("/v1/objects/stat")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ key: "does-not-exist" })
|
||||
.expect(200)
|
||||
.expect({ exists: false });
|
||||
@@ -110,7 +103,7 @@ describe("SyncController (e2e)", () => {
|
||||
it("should return a presigned upload URL", async () => {
|
||||
const response = await request(app.getHttpServer())
|
||||
.post("/v1/objects/presign-upload")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ key: "test/upload-key.txt", contentType: "text/plain" })
|
||||
.expect(200);
|
||||
|
||||
@@ -125,7 +118,7 @@ describe("SyncController (e2e)", () => {
|
||||
it("should return a presigned download URL", async () => {
|
||||
const response = await request(app.getHttpServer())
|
||||
.post("/v1/objects/presign-download")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ key: "test/download-key.txt" })
|
||||
.expect(200);
|
||||
|
||||
@@ -140,7 +133,7 @@ describe("SyncController (e2e)", () => {
|
||||
it("should list objects with prefix", async () => {
|
||||
const response = await request(app.getHttpServer())
|
||||
.post("/v1/objects/list")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ prefix: "profiles/" })
|
||||
.expect(200);
|
||||
|
||||
@@ -155,7 +148,7 @@ describe("SyncController (e2e)", () => {
|
||||
it("should delete object and create tombstone", async () => {
|
||||
const response = await request(app.getHttpServer())
|
||||
.post("/v1/objects/delete")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({
|
||||
key: "test/to-delete.txt",
|
||||
tombstoneKey: "tombstones/test/to-delete.json",
|
||||
@@ -176,7 +169,7 @@ describe("SyncController (e2e)", () => {
|
||||
it("should complete full upload/download cycle with presigned URLs", async () => {
|
||||
const uploadResponse = await request(app.getHttpServer())
|
||||
.post("/v1/objects/presign-upload")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ key: testKey, contentType: "text/plain" })
|
||||
.expect(200);
|
||||
|
||||
@@ -192,7 +185,7 @@ describe("SyncController (e2e)", () => {
|
||||
|
||||
const statResponse = await request(app.getHttpServer())
|
||||
.post("/v1/objects/stat")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ key: testKey })
|
||||
.expect(200);
|
||||
|
||||
@@ -202,7 +195,7 @@ describe("SyncController (e2e)", () => {
|
||||
|
||||
const downloadResponse = await request(app.getHttpServer())
|
||||
.post("/v1/objects/presign-download")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ key: testKey })
|
||||
.expect(200);
|
||||
|
||||
@@ -215,13 +208,13 @@ describe("SyncController (e2e)", () => {
|
||||
|
||||
await request(app.getHttpServer())
|
||||
.post("/v1/objects/delete")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ key: testKey })
|
||||
.expect(200);
|
||||
|
||||
const finalStatResponse = await request(app.getHttpServer())
|
||||
.post("/v1/objects/stat")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Authorization", `Bearer ${TEST_SYNC_TOKEN}`)
|
||||
.send({ key: testKey })
|
||||
.expect(200);
|
||||
|
||||
@@ -238,20 +231,28 @@ describe("SyncController (e2e)", () => {
|
||||
});
|
||||
|
||||
it("should return SSE stream with valid token", async () => {
|
||||
const response = await request(app.getHttpServer())
|
||||
.get("/v1/objects/subscribe")
|
||||
.set("Authorization", `Bearer ${TEST_TOKEN}`)
|
||||
.set("Accept", "text/event-stream")
|
||||
.buffer(true)
|
||||
.timeout(3000)
|
||||
.catch((err: SSEError) => {
|
||||
if (err.code === "ECONNABORTED" || err.timeout) {
|
||||
return err.response ?? { status: 200 };
|
||||
}
|
||||
throw err;
|
||||
});
|
||||
const address = (
|
||||
app.getHttpServer() as Server
|
||||
).address() as AddressInfo | null;
|
||||
if (!address || typeof address === "string") {
|
||||
throw new Error("Expected app to be listening on a TCP port");
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`http://127.0.0.1:${address.port}/v1/objects/subscribe`,
|
||||
{
|
||||
headers: {
|
||||
Accept: "text/event-stream",
|
||||
Authorization: `Bearer ${TEST_SYNC_TOKEN}`,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers.get("content-type")).toContain(
|
||||
"text/event-stream",
|
||||
);
|
||||
await response.body?.cancel();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
import { ListBucketsCommand, S3Client } from "@aws-sdk/client-s3";
|
||||
|
||||
export const TEST_SYNC_TOKEN = "test-sync-token";
|
||||
export const TEST_S3_ENDPOINT = "http://127.0.0.1:8987";
|
||||
|
||||
export function configureTestEnv() {
|
||||
process.env.SYNC_TOKEN ||= TEST_SYNC_TOKEN;
|
||||
process.env.S3_ENDPOINT ||= TEST_S3_ENDPOINT;
|
||||
process.env.S3_ACCESS_KEY_ID ||= "minioadmin";
|
||||
process.env.S3_SECRET_ACCESS_KEY ||= "minioadmin";
|
||||
process.env.S3_BUCKET ||= "donut-sync-test";
|
||||
process.env.S3_FORCE_PATH_STYLE ||= "true";
|
||||
}
|
||||
|
||||
export async function waitForTestS3(timeoutMs = 30_000) {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
const s3Client = new S3Client({
|
||||
endpoint: TEST_S3_ENDPOINT,
|
||||
region: "us-east-1",
|
||||
credentials: {
|
||||
accessKeyId: "minioadmin",
|
||||
secretAccessKey: "minioadmin",
|
||||
},
|
||||
forcePathStyle: true,
|
||||
});
|
||||
|
||||
while (Date.now() < deadline) {
|
||||
try {
|
||||
await s3Client.send(new ListBucketsCommand({}));
|
||||
return;
|
||||
} catch {}
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
}
|
||||
|
||||
throw new Error(`Timed out waiting for S3 at ${TEST_S3_ENDPOINT}`);
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"rootDir": ".."
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"rootDir": "./src"
|
||||
},
|
||||
"exclude": ["node_modules", "test", "dist", "**/*spec.ts"]
|
||||
}
|
||||
|
||||
@@ -13,10 +13,11 @@
|
||||
"target": "ES2023",
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
"baseUrl": "./",
|
||||
"incremental": true,
|
||||
"skipLibCheck": true,
|
||||
"strictNullChecks": true,
|
||||
"strictPropertyInitialization": false,
|
||||
"types": ["jest", "node"],
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noImplicitAny": false,
|
||||
"strictBindCallApply": false,
|
||||
|
||||
@@ -37,28 +37,7 @@
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs",
|
||||
"rust-overlay": "rust-overlay"
|
||||
}
|
||||
},
|
||||
"rust-overlay": {
|
||||
"inputs": {
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
]
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1767926800,
|
||||
"narHash": "sha256-x0n73J6ufD/EhDlVdcoAmF0OQHZ+b0a2cKDc8RZyt+o=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "499e9eed88ff9494b6604205b42847e847dfeb91",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
|
||||
@@ -1,66 +1,341 @@
|
||||
{
|
||||
description = "Donut Browser Development Environment";
|
||||
description = "Donut Browser development environment and quick-start commands";
|
||||
|
||||
inputs = {
|
||||
nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
rust-overlay = {
|
||||
url = "github:oxalica/rust-overlay";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, flake-utils, rust-overlay, ... }:
|
||||
outputs = { self, nixpkgs, flake-utils, ... }:
|
||||
flake-utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
overlays = [ (import rust-overlay) ];
|
||||
pkgs = import nixpkgs {
|
||||
inherit system overlays;
|
||||
inherit system;
|
||||
config.allowUnfree = true;
|
||||
};
|
||||
lib = pkgs.lib;
|
||||
|
||||
# Rust toolchain
|
||||
rustToolchain = pkgs.rust-bin.stable.latest.default.override {
|
||||
extensions = [ "rust-src" "rust-analyzer" "clippy" "rustfmt" ];
|
||||
};
|
||||
nodejs =
|
||||
if pkgs ? nodejs_23 then
|
||||
pkgs.nodejs_23
|
||||
else
|
||||
pkgs.nodejs_22;
|
||||
|
||||
# System dependencies for Tauri on Linux
|
||||
libraries = with pkgs; [
|
||||
rustPackages = with pkgs; [
|
||||
cargo
|
||||
clippy
|
||||
rust-analyzer
|
||||
rustc
|
||||
rustfmt
|
||||
];
|
||||
|
||||
commonLibs = with pkgs; [
|
||||
webkitgtk_4_1
|
||||
libsoup_3
|
||||
glib
|
||||
gtk3
|
||||
cairo
|
||||
gdk-pixbuf
|
||||
glib
|
||||
pango
|
||||
atk
|
||||
at-spi2-atk
|
||||
at-spi2-core
|
||||
dbus
|
||||
librsvg
|
||||
libsoup_3
|
||||
nss
|
||||
nspr
|
||||
libdrm
|
||||
libgbm
|
||||
libxkbcommon
|
||||
libx11
|
||||
libxcomposite
|
||||
libxdamage
|
||||
libxext
|
||||
libxfixes
|
||||
libxrandr
|
||||
libxcb
|
||||
libxshmfence
|
||||
libxtst
|
||||
libxi
|
||||
xdotool
|
||||
libxrender
|
||||
libxinerama
|
||||
libxcursor
|
||||
libxscrnsaver
|
||||
fontconfig
|
||||
freetype
|
||||
fribidi
|
||||
harfbuzz
|
||||
expat
|
||||
libglvnd
|
||||
libgpg-error
|
||||
e2fsprogs
|
||||
gmp
|
||||
zlib
|
||||
stdenv.cc.cc.lib
|
||||
];
|
||||
|
||||
packages = with pkgs; [
|
||||
rustToolchain
|
||||
nodejs_22
|
||||
pnpm
|
||||
pkg-config
|
||||
cargo-tauri
|
||||
openssl
|
||||
# App specific tools
|
||||
biome
|
||||
] ++ libraries;
|
||||
runtimeLibPath = lib.makeLibraryPath commonLibs;
|
||||
nixLd = pkgs.stdenv.cc.bintools.dynamicLinker;
|
||||
pkgConfigLibs = [
|
||||
pkgs.at-spi2-atk
|
||||
pkgs.at-spi2-core
|
||||
pkgs.cairo
|
||||
pkgs.dbus
|
||||
pkgs.gdk-pixbuf
|
||||
pkgs.glib
|
||||
pkgs.gtk3
|
||||
pkgs.libsoup_3
|
||||
pkgs.libxkbcommon
|
||||
pkgs.openssl
|
||||
pkgs.pango
|
||||
pkgs.harfbuzz
|
||||
pkgs.webkitgtk_4_1
|
||||
];
|
||||
pkgConfigPath = lib.makeSearchPath "lib/pkgconfig" (
|
||||
pkgConfigLibs ++ map lib.getDev pkgConfigLibs
|
||||
);
|
||||
releaseVersion = "0.20.1";
|
||||
releaseAppImage =
|
||||
if system == "x86_64-linux" then
|
||||
pkgs.fetchurl {
|
||||
url = "https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_amd64.AppImage";
|
||||
hash = "sha256-3uHIXtSVoS/sLY9EduuvgWjefT5rb9G/4Vzb95B0CPU=";
|
||||
}
|
||||
else if system == "aarch64-linux" then
|
||||
pkgs.fetchurl {
|
||||
url = "https://github.com/zhom/donutbrowser/releases/download/v0.20.1/Donut_0.20.1_aarch64.AppImage";
|
||||
hash = "sha256-fIduBF/eEX7liYnjWxrc+zJ1zharcHnlBq2ydUZu1r4=";
|
||||
}
|
||||
else
|
||||
null;
|
||||
releaseUnpacked =
|
||||
if releaseAppImage != null then
|
||||
pkgs.stdenvNoCC.mkDerivation {
|
||||
pname = "donut-release-unpacked";
|
||||
version = releaseVersion;
|
||||
src = releaseAppImage;
|
||||
dontUnpack = true;
|
||||
nativeBuildInputs = [ pkgs.xz ];
|
||||
installPhase = ''
|
||||
runHook preInstall
|
||||
|
||||
cp "$src" ./donut.AppImage
|
||||
chmod +x ./donut.AppImage
|
||||
./donut.AppImage --appimage-extract >/dev/null
|
||||
|
||||
mkdir -p "$out"
|
||||
cp -a ./squashfs-root "$out/"
|
||||
|
||||
runHook postInstall
|
||||
'';
|
||||
}
|
||||
else
|
||||
null;
|
||||
releaseWrapped =
|
||||
if releaseAppImage != null then
|
||||
pkgs.appimageTools.wrapType2 {
|
||||
pname = "donut";
|
||||
version = releaseVersion;
|
||||
src = releaseAppImage;
|
||||
extraPkgs = _: commonLibs;
|
||||
extraInstallCommands = ''
|
||||
for bin in "$out"/bin/*; do
|
||||
if [ -f "$bin" ]; then
|
||||
mv "$bin" "$out/bin/donut-release"
|
||||
break
|
||||
fi
|
||||
done
|
||||
'';
|
||||
}
|
||||
else
|
||||
null;
|
||||
releaseLauncher =
|
||||
if releaseUnpacked != null then
|
||||
pkgs.writeShellApplication {
|
||||
name = "donut-release-start";
|
||||
runtimeInputs = with pkgs; [
|
||||
coreutils
|
||||
xdg-utils
|
||||
];
|
||||
text = ''
|
||||
set -euo pipefail
|
||||
|
||||
if [ -x "${releaseWrapped}/bin/donut-release" ]; then
|
||||
if "${releaseWrapped}/bin/donut-release" "$@"; then
|
||||
exit 0
|
||||
fi
|
||||
echo "Wrapped AppImage failed, retrying with direct AppRun..." >&2
|
||||
fi
|
||||
|
||||
export LD_LIBRARY_PATH="${releaseUnpacked}/squashfs-root/usr/lib:${releaseUnpacked}/squashfs-root/usr/lib64:${runtimeLibPath}:''${LD_LIBRARY_PATH:-}"
|
||||
export NIX_LD_LIBRARY_PATH="$LD_LIBRARY_PATH"
|
||||
export LIBRARY_PATH="$LD_LIBRARY_PATH"
|
||||
export XDG_DATA_DIRS="${releaseUnpacked}/squashfs-root/usr/share:''${XDG_DATA_DIRS:-}"
|
||||
exec "${releaseUnpacked}/squashfs-root/AppRun" "$@"
|
||||
'';
|
||||
}
|
||||
else
|
||||
pkgs.writeShellApplication {
|
||||
name = "donut-release-start";
|
||||
text = ''
|
||||
echo "Release launcher is supported only on Linux (x86_64/aarch64)."
|
||||
exit 1
|
||||
'';
|
||||
};
|
||||
|
||||
mkApp = name: text:
|
||||
let
|
||||
app = pkgs.writeShellApplication {
|
||||
inherit name;
|
||||
runtimeInputs = with pkgs; [
|
||||
bash
|
||||
coreutils
|
||||
findutils
|
||||
git
|
||||
gnugrep
|
||||
gnused
|
||||
curl
|
||||
gcc
|
||||
pkg-config
|
||||
openssl
|
||||
cargo
|
||||
clippy
|
||||
rustc
|
||||
rustfmt
|
||||
nodejs
|
||||
pnpm
|
||||
cargo-tauri
|
||||
];
|
||||
text = ''
|
||||
export NODE_ENV=development
|
||||
export NIX_LD="${nixLd}"
|
||||
export NIX_LD_LIBRARY_PATH="${runtimeLibPath}:''${NIX_LD_LIBRARY_PATH:-}"
|
||||
export LD_LIBRARY_PATH="${runtimeLibPath}:''${LD_LIBRARY_PATH:-}"
|
||||
export LIBRARY_PATH="${runtimeLibPath}:''${LIBRARY_PATH:-}"
|
||||
export PKG_CONFIG_PATH="${pkgConfigPath}:''${PKG_CONFIG_PATH:-}"
|
||||
export RUST_SRC_PATH="${pkgs.rustPlatform.rustLibSrc}"
|
||||
${text}
|
||||
'';
|
||||
};
|
||||
in
|
||||
{
|
||||
type = "app";
|
||||
program = "${app}/bin/${name}";
|
||||
};
|
||||
in
|
||||
{
|
||||
devShells.default = pkgs.mkShell {
|
||||
buildInputs = packages;
|
||||
packages = with pkgs; [
|
||||
nodejs
|
||||
pnpm
|
||||
cargo-tauri
|
||||
pkg-config
|
||||
openssl
|
||||
git
|
||||
bashInteractive
|
||||
gnumake
|
||||
clang
|
||||
llvmPackages.bintools
|
||||
python3
|
||||
curl
|
||||
wget
|
||||
unzip
|
||||
zip
|
||||
xz
|
||||
biome
|
||||
docker
|
||||
] ++ rustPackages ++ commonLibs;
|
||||
|
||||
shellHook = ''
|
||||
export LD_LIBRARY_PATH=${pkgs.lib.makeLibraryPath libraries}:$LD_LIBRARY_PATH
|
||||
export XDG_DATA_DIRS=${pkgs.gsettings-desktop-schemas}/share/gsettings-schemas/${pkgs.gsettings-desktop-schemas.name}:${pkgs.gtk3}/share/gsettings-schemas/${pkgs.gtk3.name}:$XDG_DATA_DIRS
|
||||
|
||||
echo "🍩 Donut Browser Dev Environment Loaded!"
|
||||
echo "Node: $(node --version)"
|
||||
echo "Rust: $(rustc --version)"
|
||||
echo "Tauri CLI: $(cargo-tauri --version)"
|
||||
export NODE_ENV=development
|
||||
export NIX_LD="${nixLd}"
|
||||
export NIX_LD_LIBRARY_PATH="${runtimeLibPath}:''${NIX_LD_LIBRARY_PATH:-}"
|
||||
export LD_LIBRARY_PATH="${runtimeLibPath}:''${LD_LIBRARY_PATH:-}"
|
||||
export LIBRARY_PATH="${runtimeLibPath}:''${LIBRARY_PATH:-}"
|
||||
export PKG_CONFIG_PATH="${pkgConfigPath}:''${PKG_CONFIG_PATH:-}"
|
||||
export RUST_SRC_PATH="${pkgs.rustPlatform.rustLibSrc}"
|
||||
export XDG_DATA_DIRS="${pkgs.gsettings-desktop-schemas}/share:${pkgs.gtk3}/share:''${XDG_DATA_DIRS:-}"
|
||||
|
||||
echo "Donut Browser dev shell ready."
|
||||
echo "Quick start:"
|
||||
echo " nix run .#setup"
|
||||
echo " nix run .#tauri-dev"
|
||||
echo " nix run .#full-dev"
|
||||
echo " nix run .#build"
|
||||
echo " nix run .#test"
|
||||
echo " nix run .#release-start"
|
||||
'';
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
apps.info = mkApp "donut-info" ''
|
||||
set -euo pipefail
|
||||
echo "Node: $(node --version)"
|
||||
echo "pnpm: $(pnpm --version)"
|
||||
echo "Rust: $(rustc --version)"
|
||||
echo "Cargo: $(cargo --version)"
|
||||
echo "Tauri CLI: $(cargo-tauri --version)"
|
||||
'';
|
||||
|
||||
apps.deps = mkApp "donut-deps" ''
|
||||
set -euo pipefail
|
||||
pnpm install
|
||||
'';
|
||||
|
||||
apps.dev = mkApp "donut-dev" ''
|
||||
set -euo pipefail
|
||||
pnpm dev
|
||||
'';
|
||||
|
||||
apps."tauri-dev" = mkApp "donut-tauri-dev" ''
|
||||
set -euo pipefail
|
||||
pnpm tauri dev
|
||||
'';
|
||||
|
||||
apps."full-dev" = mkApp "donut-full-dev" ''
|
||||
set -euo pipefail
|
||||
chmod +x ./scripts/dev.sh
|
||||
./scripts/dev.sh
|
||||
'';
|
||||
|
||||
apps.build = mkApp "donut-build" ''
|
||||
set -euo pipefail
|
||||
pnpm build
|
||||
(cd src-tauri && cargo build)
|
||||
'';
|
||||
|
||||
apps.start = mkApp "donut-start" ''
|
||||
set -euo pipefail
|
||||
pnpm start
|
||||
'';
|
||||
|
||||
apps.test = mkApp "donut-test" ''
|
||||
set -euo pipefail
|
||||
pnpm format && pnpm lint && pnpm test
|
||||
'';
|
||||
|
||||
apps.setup = mkApp "donut-setup" ''
|
||||
set -euo pipefail
|
||||
|
||||
if [ ! -f "package.json" ]; then
|
||||
echo "package.json not found. Run this from the donutbrowser repo root."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
pnpm install
|
||||
pnpm copy-proxy-binary
|
||||
|
||||
echo "Setup complete."
|
||||
echo "Run the app with:"
|
||||
echo " nix run .#tauri-dev"
|
||||
echo "Or run full local stack (sync + minio + tauri):"
|
||||
echo " nix run .#full-dev"
|
||||
'';
|
||||
|
||||
apps."release-start" = {
|
||||
type = "app";
|
||||
program = "${releaseLauncher}/bin/donut-release-start";
|
||||
};
|
||||
|
||||
apps.default = self.apps.${system}.setup;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,19 +2,21 @@
|
||||
"name": "donutbrowser",
|
||||
"private": true,
|
||||
"license": "AGPL-3.0",
|
||||
"version": "0.14.2",
|
||||
"version": "0.20.2",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "next dev --turbopack -p 12341",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"test": "pnpm test:rust:unit && pnpm test:sync-e2e",
|
||||
"test": "pnpm test:rust:unit && pnpm test:openvpn-e2e && pnpm test:sync-e2e",
|
||||
"test:openvpn-e2e": "node scripts/openvpn-test-harness.mjs",
|
||||
"test:rust": "cd src-tauri && cargo test",
|
||||
"test:rust:unit": "cd src-tauri && cargo test --lib && cargo test --test donut_proxy_integration",
|
||||
"test:rust:unit": "cd src-tauri && cargo test --lib && cargo test --test donut_proxy_integration && cargo test --test vpn_integration",
|
||||
"test:sync-e2e": "node scripts/sync-test-harness.mjs",
|
||||
"lint": "pnpm lint:js && pnpm lint:rust",
|
||||
"lint": "pnpm lint:js && pnpm lint:rust && pnpm lint:spell",
|
||||
"lint:js": "biome check src/ && tsc --noEmit && cd donut-sync && biome check src/ && tsc --noEmit",
|
||||
"lint:rust": "cd src-tauri && cargo clippy --all-targets --all-features -- -D warnings -D clippy::all && cargo fmt --all",
|
||||
"lint:spell": "typos .",
|
||||
"tauri": "tauri",
|
||||
"shadcn:add": "pnpm dlx shadcn@latest add",
|
||||
"prepare": "husky && husky install",
|
||||
@@ -44,58 +46,67 @@
|
||||
"@radix-ui/react-tabs": "^1.1.13",
|
||||
"@radix-ui/react-tooltip": "^1.2.8",
|
||||
"@tanstack/react-table": "^8.21.3",
|
||||
"@tauri-apps/api": "~2.9.0",
|
||||
"@tauri-apps/api": "~2.10.1",
|
||||
"@tauri-apps/plugin-deep-link": "^2.4.7",
|
||||
"@tauri-apps/plugin-dialog": "^2.6.0",
|
||||
"@tauri-apps/plugin-fs": "~2.4.5",
|
||||
"@tauri-apps/plugin-log": "^2.8.0",
|
||||
"@tauri-apps/plugin-opener": "^2.5.3",
|
||||
"ahooks": "^3.9.6",
|
||||
"ahooks": "^3.9.7",
|
||||
"class-variance-authority": "^0.7.1",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "^1.1.1",
|
||||
"color": "^5.0.3",
|
||||
"flag-icons": "^7.5.0",
|
||||
"i18next": "^25.8.7",
|
||||
"lucide-react": "^0.564.0",
|
||||
"motion": "^12.34.0",
|
||||
"next": "^16.1.6",
|
||||
"i18next": "^26.0.3",
|
||||
"lucide-react": "^1.7.0",
|
||||
"motion": "^12.38.0",
|
||||
"next": "^16.2.2",
|
||||
"next-themes": "^0.4.6",
|
||||
"radix-ui": "^1.4.3",
|
||||
"react": "^19.2.4",
|
||||
"react-dom": "^19.2.4",
|
||||
"react-i18next": "^16.5.4",
|
||||
"react-icons": "^5.5.0",
|
||||
"recharts": "3.7.0",
|
||||
"react-i18next": "^17.0.2",
|
||||
"react-icons": "^5.6.0",
|
||||
"recharts": "3.8.1",
|
||||
"sonner": "^2.0.7",
|
||||
"tailwind-merge": "^3.4.0",
|
||||
"tailwind-merge": "^3.5.0",
|
||||
"tauri-plugin-macos-permissions-api": "^2.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@biomejs/biome": "2.3.15",
|
||||
"@tailwindcss/postcss": "^4.1.18",
|
||||
"@tauri-apps/cli": "~2.9.0",
|
||||
"@types/color": "^4.2.0",
|
||||
"@types/node": "^25.2.3",
|
||||
"@biomejs/biome": "2.4.10",
|
||||
"@tailwindcss/postcss": "^4.2.2",
|
||||
"@tauri-apps/cli": "~2.10.1",
|
||||
"@types/color": "^4.2.1",
|
||||
"@types/node": "^25.5.2",
|
||||
"@types/react": "^19.2.14",
|
||||
"@types/react-dom": "^19.2.3",
|
||||
"@vitejs/plugin-react": "^5.1.4",
|
||||
"@vitejs/plugin-react": "^6.0.1",
|
||||
"husky": "^9.1.7",
|
||||
"lint-staged": "^16.2.7",
|
||||
"tailwindcss": "^4.1.18",
|
||||
"lint-staged": "^16.4.0",
|
||||
"tailwindcss": "^4.2.2",
|
||||
"ts-unused-exports": "^11.0.1",
|
||||
"tw-animate-css": "^1.4.0",
|
||||
"typescript": "~5.9.3"
|
||||
"typescript": "~6.0.2"
|
||||
},
|
||||
"packageManager": "pnpm@10.29.3",
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"picomatch@>=4.0.0 <4.0.4": ">=4.0.4",
|
||||
"path-to-regexp@>=8.0.0 <8.4.0": ">=8.4.0"
|
||||
}
|
||||
},
|
||||
"packageManager": "pnpm@10.30.1",
|
||||
"lint-staged": {
|
||||
"**/*.{js,jsx,ts,tsx,json,css}": [
|
||||
"biome check --fix"
|
||||
],
|
||||
"src-tauri/**/*.rs": [
|
||||
"cd src-tauri && cargo fmt --all",
|
||||
"cd src-tauri && cargo clippy --all-targets --all-features -- -D warnings -D clippy::all",
|
||||
"cd src-tauri && cargo test"
|
||||
"bash -c 'cd src-tauri && cargo fmt --all'",
|
||||
"bash -c 'cd src-tauri && cargo clippy --all-targets --all-features -- -D warnings -D clippy::all'",
|
||||
"bash -c 'cd src-tauri && cargo test --lib'"
|
||||
],
|
||||
"**/*.{rs,ts,tsx,js,jsx,md}": [
|
||||
"typos"
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,7 +81,7 @@ echo -e "${YELLOW}Waiting for MinIO to be healthy...${NC}"
|
||||
MAX_RETRIES=30
|
||||
RETRY_COUNT=0
|
||||
while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do
|
||||
if curl -sf http://localhost:8987/minio/health/live > /dev/null 2>&1; then
|
||||
if curl -sf http://127.0.0.1:8987/minio/health/live > /dev/null 2>&1; then
|
||||
echo -e "${GREEN}MinIO is ready!${NC}"
|
||||
break
|
||||
fi
|
||||
|
||||
@@ -0,0 +1,161 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* OpenVPN E2E Test Harness
|
||||
*
|
||||
* This script:
|
||||
* 1. Skips unless explicitly enabled via DONUTBROWSER_RUN_OPENVPN_E2E=1
|
||||
* 2. Builds the Rust vpn_integration test binary without running it
|
||||
* 3. Runs the OpenVPN e2e test binary under sudo
|
||||
*
|
||||
* Usage: DONUTBROWSER_RUN_OPENVPN_E2E=1 node scripts/openvpn-test-harness.mjs
|
||||
*/
|
||||
|
||||
import { spawn } from "child_process";
|
||||
import path from "path";
|
||||
import { fileURLToPath } from "url";
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const ROOT_DIR = path.resolve(__dirname, "..");
|
||||
const SRC_TAURI_DIR = path.join(ROOT_DIR, "src-tauri");
|
||||
const TEST_NAME = "test_openvpn_traffic_flows_through_donut_proxy";
|
||||
|
||||
function log(message) {
|
||||
console.log(`[openvpn-harness] ${message}`);
|
||||
}
|
||||
|
||||
function error(message) {
|
||||
console.error(`[openvpn-harness] ERROR: ${message}`);
|
||||
}
|
||||
|
||||
function shouldRun() {
|
||||
if (process.env.DONUTBROWSER_RUN_OPENVPN_E2E !== "1") {
|
||||
log("Skipping OpenVPN e2e test because DONUTBROWSER_RUN_OPENVPN_E2E is not set");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (process.platform !== "linux") {
|
||||
log(`Skipping OpenVPN e2e test on unsupported platform: ${process.platform}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async function buildTestBinary() {
|
||||
log("Building OpenVPN e2e test binary...");
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let executablePath = "";
|
||||
let stdoutBuffer = "";
|
||||
|
||||
const proc = spawn(
|
||||
"cargo",
|
||||
[
|
||||
"test",
|
||||
"--test",
|
||||
"vpn_integration",
|
||||
TEST_NAME,
|
||||
"--no-run",
|
||||
"--message-format=json",
|
||||
],
|
||||
{
|
||||
cwd: SRC_TAURI_DIR,
|
||||
env: process.env,
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
}
|
||||
);
|
||||
|
||||
const parseBuffer = (flush = false) => {
|
||||
const lines = stdoutBuffer.split("\n");
|
||||
const completeLines = flush ? lines : lines.slice(0, -1);
|
||||
stdoutBuffer = flush ? "" : lines.at(-1) ?? "";
|
||||
|
||||
for (const line of completeLines.filter(Boolean)) {
|
||||
try {
|
||||
const message = JSON.parse(line);
|
||||
if (message.reason === "compiler-artifact" && message.executable) {
|
||||
executablePath = message.executable;
|
||||
}
|
||||
} catch {
|
||||
// Ignore non-JSON lines.
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
proc.stdout.on("data", (data) => {
|
||||
stdoutBuffer += data.toString();
|
||||
parseBuffer();
|
||||
});
|
||||
|
||||
proc.stderr.on("data", (data) => {
|
||||
process.stderr.write(data);
|
||||
});
|
||||
|
||||
proc.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
proc.on("close", (code) => {
|
||||
parseBuffer(true);
|
||||
|
||||
if (code !== 0) {
|
||||
reject(new Error(`cargo test --no-run exited with code ${code}`));
|
||||
return;
|
||||
}
|
||||
|
||||
if (!executablePath) {
|
||||
reject(new Error("Could not determine the vpn_integration test binary path"));
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(path.isAbsolute(executablePath) ? executablePath : path.resolve(SRC_TAURI_DIR, executablePath));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function runOpenVpnE2e(executablePath) {
|
||||
log("Running OpenVPN e2e test under sudo...");
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const proc = spawn(
|
||||
"sudo",
|
||||
[
|
||||
"--preserve-env=CI,GITHUB_ACTIONS,VPN_TEST_OVPN_HOST,VPN_TEST_OVPN_PORT,DONUTBROWSER_RUN_OPENVPN_E2E",
|
||||
executablePath,
|
||||
TEST_NAME,
|
||||
"--exact",
|
||||
"--nocapture",
|
||||
],
|
||||
{
|
||||
cwd: SRC_TAURI_DIR,
|
||||
env: process.env,
|
||||
stdio: "inherit",
|
||||
}
|
||||
);
|
||||
|
||||
proc.on("error", (err) => {
|
||||
reject(err);
|
||||
});
|
||||
|
||||
proc.on("close", (code) => {
|
||||
resolve(code ?? 1);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (!shouldRun()) {
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
try {
|
||||
const executablePath = await buildTestBinary();
|
||||
const exitCode = await runOpenVpnE2e(executablePath);
|
||||
process.exit(exitCode);
|
||||
} catch (err) {
|
||||
error(err instanceof Error ? err.message : String(err));
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -0,0 +1,240 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
||||
WORK_DIR="$(mktemp -d)"
|
||||
trap 'rm -rf "$WORK_DIR"' EXIT
|
||||
|
||||
GITHUB_REPO="zhom/donutbrowser"
|
||||
|
||||
# Load .env if running locally
|
||||
if [[ -f "$REPO_ROOT/.env" ]]; then
|
||||
set -a
|
||||
# shellcheck disable=SC1091
|
||||
source "$REPO_ROOT/.env"
|
||||
set +a
|
||||
fi
|
||||
|
||||
# Validate required env vars
|
||||
for var in R2_ACCESS_KEY_ID R2_SECRET_ACCESS_KEY R2_ENDPOINT_URL R2_BUCKET_NAME; do
|
||||
if [[ -z "${!var:-}" ]]; then
|
||||
echo "Error: $var is not set. Configure it in .env or export it."
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Export for AWS CLI
|
||||
export AWS_ACCESS_KEY_ID="$R2_ACCESS_KEY_ID"
|
||||
export AWS_SECRET_ACCESS_KEY="$R2_SECRET_ACCESS_KEY"
|
||||
export AWS_DEFAULT_REGION="auto"
|
||||
# aws-cli v2.23+ sends integrity checksums by default; R2 rejects them
|
||||
# with `Unauthorized` on ListObjectsV2. Disable.
|
||||
export AWS_REQUEST_CHECKSUM_CALCULATION="WHEN_REQUIRED"
|
||||
export AWS_RESPONSE_CHECKSUM_VALIDATION="WHEN_REQUIRED"
|
||||
|
||||
# Ensure endpoint URL has https:// prefix
|
||||
R2_ENDPOINT="$R2_ENDPOINT_URL"
|
||||
if [[ "$R2_ENDPOINT" != https://* ]]; then
|
||||
R2_ENDPOINT="https://$R2_ENDPOINT"
|
||||
fi
|
||||
|
||||
# Determine version tag
|
||||
if [[ $# -ge 1 ]]; then
|
||||
TAG="$1"
|
||||
else
|
||||
echo "Fetching latest release tag..."
|
||||
TAG=$(gh release view --repo "$GITHUB_REPO" --json tagName -q .tagName)
|
||||
echo "Latest release: $TAG"
|
||||
fi
|
||||
|
||||
VERSION="${TAG#v}"
|
||||
echo "Publishing repositories for version $VERSION"
|
||||
|
||||
# Check required tools
|
||||
for cmd in aws gh dpkg-scanpackages gzip createrepo_c; do
|
||||
if ! command -v "$cmd" &>/dev/null; then
|
||||
echo "Error: $cmd is not installed."
|
||||
case "$cmd" in
|
||||
dpkg-scanpackages) echo " Install with: sudo apt-get install dpkg-dev" ;;
|
||||
createrepo_c) echo " Install with: sudo apt-get install createrepo-c" ;;
|
||||
aws) echo " Install with: pip install awscli" ;;
|
||||
gh) echo " Install with: https://cli.github.com/" ;;
|
||||
esac
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
PACKAGES_DIR="$WORK_DIR/packages"
|
||||
REPO_DIR="$WORK_DIR/repo"
|
||||
mkdir -p "$PACKAGES_DIR" "$REPO_DIR"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Download .deb and .rpm from GitHub release
|
||||
# ---------------------------------------------------------------------------
|
||||
echo ""
|
||||
echo "==> Downloading packages from GitHub release $TAG..."
|
||||
gh release download "$TAG" \
|
||||
--repo "$GITHUB_REPO" \
|
||||
--pattern "*.deb" \
|
||||
--dir "$PACKAGES_DIR"
|
||||
gh release download "$TAG" \
|
||||
--repo "$GITHUB_REPO" \
|
||||
--pattern "*.rpm" \
|
||||
--dir "$PACKAGES_DIR"
|
||||
|
||||
echo "Downloaded:"
|
||||
ls -lh "$PACKAGES_DIR/"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# DEB repository
|
||||
# ---------------------------------------------------------------------------
|
||||
echo ""
|
||||
echo "==> Building DEB repository..."
|
||||
|
||||
DEB_DIR="$REPO_DIR/deb"
|
||||
mkdir -p "$DEB_DIR/pool/main"
|
||||
mkdir -p "$DEB_DIR/dists/stable/main/binary-amd64"
|
||||
mkdir -p "$DEB_DIR/dists/stable/main/binary-arm64"
|
||||
|
||||
# Pull existing pool from R2 (incremental)
|
||||
echo " Syncing existing DEB pool from R2..."
|
||||
aws s3 sync "s3://${R2_BUCKET_NAME}/deb/pool" "$DEB_DIR/pool" \
|
||||
--endpoint-url "$R2_ENDPOINT" 2>/dev/null || true
|
||||
|
||||
# Copy new .deb files into pool
|
||||
for deb in "$PACKAGES_DIR"/*.deb; do
|
||||
[[ -f "$deb" ]] || continue
|
||||
cp "$deb" "$DEB_DIR/pool/main/"
|
||||
done
|
||||
|
||||
# Generate Packages and Packages.gz for each arch
|
||||
for arch in amd64 arm64; do
|
||||
echo " Generating Packages for $arch..."
|
||||
BINARY_DIR="$DEB_DIR/dists/stable/main/binary-${arch}"
|
||||
|
||||
# dpkg-scanpackages needs to run from the repo root
|
||||
# and needs paths relative to that root
|
||||
(cd "$DEB_DIR" && dpkg-scanpackages --arch "$arch" pool/main) \
|
||||
> "$BINARY_DIR/Packages"
|
||||
|
||||
gzip -9c "$BINARY_DIR/Packages" > "$BINARY_DIR/Packages.gz"
|
||||
|
||||
echo " $(grep -c '^Package:' "$BINARY_DIR/Packages" 2>/dev/null || echo 0) package(s)"
|
||||
done
|
||||
|
||||
# Generate Release file
|
||||
echo " Generating Release file..."
|
||||
{
|
||||
echo "Origin: Donut Browser"
|
||||
echo "Label: Donut Browser"
|
||||
echo "Suite: stable"
|
||||
echo "Codename: stable"
|
||||
echo "Architectures: amd64 arm64"
|
||||
echo "Components: main"
|
||||
echo "Date: $(date -u '+%a, %d %b %Y %H:%M:%S UTC')"
|
||||
echo "MD5Sum:"
|
||||
for arch in amd64 arm64; do
|
||||
for file in "main/binary-${arch}/Packages" "main/binary-${arch}/Packages.gz"; do
|
||||
filepath="$DEB_DIR/dists/stable/$file"
|
||||
if [[ -f "$filepath" ]]; then
|
||||
size=$(wc -c < "$filepath")
|
||||
md5=$(md5sum "$filepath" | awk '{print $1}')
|
||||
printf " %s %8d %s\n" "$md5" "$size" "$file"
|
||||
fi
|
||||
done
|
||||
done
|
||||
echo "SHA256:"
|
||||
for arch in amd64 arm64; do
|
||||
for file in "main/binary-${arch}/Packages" "main/binary-${arch}/Packages.gz"; do
|
||||
filepath="$DEB_DIR/dists/stable/$file"
|
||||
if [[ -f "$filepath" ]]; then
|
||||
size=$(wc -c < "$filepath")
|
||||
sha256=$(sha256sum "$filepath" | awk '{print $1}')
|
||||
printf " %s %8d %s\n" "$sha256" "$size" "$file"
|
||||
fi
|
||||
done
|
||||
done
|
||||
} > "$DEB_DIR/dists/stable/Release"
|
||||
|
||||
echo " DEB Release file created."
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# RPM repository
|
||||
# ---------------------------------------------------------------------------
|
||||
echo ""
|
||||
echo "==> Building RPM repository..."
|
||||
|
||||
RPM_DIR="$REPO_DIR/rpm"
|
||||
mkdir -p "$RPM_DIR/x86_64"
|
||||
mkdir -p "$RPM_DIR/aarch64"
|
||||
|
||||
# Pull existing RPMs from R2 (incremental)
|
||||
echo " Syncing existing RPM packages from R2..."
|
||||
aws s3 sync "s3://${R2_BUCKET_NAME}/rpm/x86_64" "$RPM_DIR/x86_64" \
|
||||
--endpoint-url "$R2_ENDPOINT" --exclude "repodata/*" 2>/dev/null || true
|
||||
aws s3 sync "s3://${R2_BUCKET_NAME}/rpm/aarch64" "$RPM_DIR/aarch64" \
|
||||
--endpoint-url "$R2_ENDPOINT" --exclude "repodata/*" 2>/dev/null || true
|
||||
|
||||
# Copy new .rpm files into arch directories
|
||||
for rpm in "$PACKAGES_DIR"/*.rpm; do
|
||||
[[ -f "$rpm" ]] || continue
|
||||
filename=$(basename "$rpm")
|
||||
if [[ "$filename" == *x86_64* ]]; then
|
||||
cp "$rpm" "$RPM_DIR/x86_64/"
|
||||
elif [[ "$filename" == *aarch64* ]]; then
|
||||
cp "$rpm" "$RPM_DIR/aarch64/"
|
||||
fi
|
||||
done
|
||||
|
||||
# Generate repodata using createrepo_c
|
||||
# We point createrepo_c at the top-level rpm dir so it indexes all subdirs
|
||||
echo " Generating RPM repodata..."
|
||||
createrepo_c --update "$RPM_DIR"
|
||||
|
||||
echo " RPM repodata created."
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Upload to R2
|
||||
# ---------------------------------------------------------------------------
|
||||
echo ""
|
||||
echo "==> Uploading DEB repository to R2..."
|
||||
aws s3 sync "$DEB_DIR/dists" "s3://${R2_BUCKET_NAME}/deb/dists" \
|
||||
--endpoint-url "$R2_ENDPOINT" --delete
|
||||
aws s3 sync "$DEB_DIR/pool" "s3://${R2_BUCKET_NAME}/deb/pool" \
|
||||
--endpoint-url "$R2_ENDPOINT"
|
||||
|
||||
echo "==> Uploading RPM repository to R2..."
|
||||
aws s3 sync "$RPM_DIR" "s3://${R2_BUCKET_NAME}/rpm" \
|
||||
--endpoint-url "$R2_ENDPOINT"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Verify
|
||||
# ---------------------------------------------------------------------------
|
||||
echo ""
|
||||
echo "==> Verifying upload..."
|
||||
echo "DEB dists/stable/:"
|
||||
aws s3 ls "s3://${R2_BUCKET_NAME}/deb/dists/stable/" \
|
||||
--endpoint-url "$R2_ENDPOINT" 2>/dev/null || echo " (empty or not accessible)"
|
||||
echo "DEB pool/main/:"
|
||||
aws s3 ls "s3://${R2_BUCKET_NAME}/deb/pool/main/" \
|
||||
--endpoint-url "$R2_ENDPOINT" 2>/dev/null || echo " (empty or not accessible)"
|
||||
echo "RPM repodata/:"
|
||||
aws s3 ls "s3://${R2_BUCKET_NAME}/rpm/repodata/" \
|
||||
--endpoint-url "$R2_ENDPOINT" 2>/dev/null || echo " (empty or not accessible)"
|
||||
|
||||
echo ""
|
||||
echo "Done! Repository published for $TAG"
|
||||
echo ""
|
||||
echo "Users can add the DEB repo with:"
|
||||
echo " echo 'deb [trusted=yes] https://repo.donutbrowser.com/deb stable main' | sudo tee /etc/apt/sources.list.d/donutbrowser.list"
|
||||
echo " sudo apt update && sudo apt install donut"
|
||||
echo ""
|
||||
echo "Users can add the RPM repo with:"
|
||||
echo " sudo tee /etc/yum.repos.d/donutbrowser.repo << 'EOF'"
|
||||
echo " [donutbrowser]"
|
||||
echo " name=Donut Browser"
|
||||
echo " baseurl=https://repo.donutbrowser.com/rpm"
|
||||
echo " enabled=1"
|
||||
echo " gpgcheck=0"
|
||||
echo " EOF"
|
||||
echo " sudo dnf install Donut"
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "donutbrowser"
|
||||
version = "0.14.2"
|
||||
version = "0.20.2"
|
||||
description = "Simple Yet Powerful Anti-Detect Browser"
|
||||
authors = ["zhom@github"]
|
||||
edition = "2021"
|
||||
@@ -30,7 +30,7 @@ path = "src/bin/donut_daemon.rs"
|
||||
|
||||
[build-dependencies]
|
||||
tauri-build = { version = "2", features = [] }
|
||||
resvg = "0.46"
|
||||
resvg = "0.47"
|
||||
|
||||
[dependencies]
|
||||
serde_json = "1"
|
||||
@@ -40,6 +40,7 @@ tauri-plugin-opener = "2"
|
||||
tauri-plugin-fs = "2"
|
||||
tauri-plugin-shell = "2"
|
||||
tauri-plugin-deep-link = "2"
|
||||
tauri-plugin-single-instance = "2"
|
||||
tauri-plugin-dialog = "2"
|
||||
tauri-plugin-macos-permissions = "2"
|
||||
tauri-plugin-log = "2"
|
||||
@@ -56,14 +57,14 @@ base64 = "0.22"
|
||||
libc = "0.2"
|
||||
async-trait = "0.1"
|
||||
futures-util = "0.3"
|
||||
zip = { version = "7", default-features = false, features = ["deflate-flate2"] }
|
||||
zip = { version = "8", default-features = false, features = ["deflate-flate2"] }
|
||||
tar = "0"
|
||||
bzip2 = "0"
|
||||
flate2 = "1"
|
||||
lzma-rs = "0"
|
||||
msi-extract = "0"
|
||||
|
||||
uuid = { version = "1.20", features = ["v4", "serde"] }
|
||||
uuid = { version = "1.23", features = ["v4", "serde"] }
|
||||
url = "2.5"
|
||||
blake3 = "1"
|
||||
globset = "0.4"
|
||||
@@ -71,14 +72,20 @@ mime_guess = "2"
|
||||
once_cell = "1"
|
||||
urlencoding = "2.1"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
chrono-tz = "0.10"
|
||||
axum = { version = "0.8.8", features = ["ws"] }
|
||||
tower = "0.5"
|
||||
tower-http = { version = "0.6", features = ["cors"] }
|
||||
rand = "0.9.2"
|
||||
rand = "0.10.0"
|
||||
utoipa = { version = "5", features = ["axum_extras", "chrono"] }
|
||||
utoipa-axum = "0.2"
|
||||
argon2 = "0.5"
|
||||
aes-gcm = "0.10"
|
||||
aes = "0.8"
|
||||
cbc = "0.1"
|
||||
pbkdf2 = "0.12"
|
||||
sha1 = "0.10"
|
||||
sha2 = "0.10"
|
||||
hyper = { version = "1.8", features = ["full"] }
|
||||
hyper-util = { version = "0.1", features = ["full"] }
|
||||
http-body-util = "0.1"
|
||||
@@ -89,8 +96,8 @@ async-socks5 = "0.6"
|
||||
playwright = { git = "https://github.com/sctg-development/playwright-rust", branch = "master" }
|
||||
|
||||
# Wayfern CDP integration
|
||||
tokio-tungstenite = { version = "0.28", features = ["native-tls"] }
|
||||
rusqlite = { version = "0.38", features = ["bundled"] }
|
||||
tokio-tungstenite = { version = "0.29", features = ["native-tls"] }
|
||||
rusqlite = { version = "0.39", features = ["bundled"] }
|
||||
serde_yaml = "0.9"
|
||||
thiserror = "2.0"
|
||||
regex-lite = "0.1"
|
||||
@@ -100,20 +107,17 @@ quick-xml = { version = "0.39", features = ["serialize"] }
|
||||
|
||||
# VPN support
|
||||
boringtun = "0.7"
|
||||
smoltcp = { version = "0.13", default-features = false, features = ["std", "medium-ip", "proto-ipv4", "proto-ipv6", "socket-tcp", "socket-udp"] }
|
||||
|
||||
# Daemon dependencies (tray icon)
|
||||
tray-icon = "0.21"
|
||||
tray-icon = "0.22"
|
||||
muda = "0.17"
|
||||
tao = "0.34"
|
||||
single-instance = "0.3"
|
||||
tao = "0.35"
|
||||
image = "0.25"
|
||||
dirs = "6"
|
||||
crossbeam-channel = "0.5"
|
||||
sys-locale = "0.3"
|
||||
|
||||
[target."cfg(any(target_os = \"macos\", windows, target_os = \"linux\"))".dependencies]
|
||||
tauri-plugin-single-instance = { version = "2", features = ["deep-link"] }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
nix = { version = "0.31", features = ["signal", "process"] }
|
||||
|
||||
@@ -123,7 +127,7 @@ objc2 = "0.6.3"
|
||||
objc2-app-kit = { version = "0.3.2", features = ["NSWindow", "NSApplication", "NSRunningApplication"] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
winreg = "0.55"
|
||||
winreg = "0.56"
|
||||
windows = { version = "0.62", features = [
|
||||
"Win32_Foundation",
|
||||
"Win32_System_ProcessStatus",
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
<string>Donut</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.donutbrowser</string>
|
||||
<key>CFBundlePackageType</key>
|
||||
<string>APPL</string>
|
||||
<key>CFBundleURLName</key>
|
||||
<string>com.donutbrowser</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
|
||||
@@ -34,6 +34,8 @@
|
||||
"deep-link:allow-get-current",
|
||||
"dialog:default",
|
||||
"dialog:allow-open",
|
||||
"dialog:allow-save",
|
||||
"fs:allow-write-text-file",
|
||||
"macos-permissions:default",
|
||||
"macos-permissions:allow-request-microphone-permission",
|
||||
"macos-permissions:allow-request-camera-permission",
|
||||
|
||||
|
Before Width: | Height: | Size: 745 B After Width: | Height: | Size: 487 B |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.2 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.2 KiB |
@@ -1,4 +1,3 @@
|
||||
use directories::BaseDirs;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
@@ -13,6 +12,7 @@ pub struct VersionComponent {
|
||||
pub major: u32,
|
||||
pub minor: u32,
|
||||
pub patch: u32,
|
||||
pub build: u32,
|
||||
pub pre_release: Option<PreRelease>,
|
||||
}
|
||||
|
||||
@@ -48,6 +48,7 @@ impl VersionComponent {
|
||||
major: 999, // High major version to indicate it's a rolling release
|
||||
minor: 0,
|
||||
patch: 0,
|
||||
build: 0,
|
||||
pre_release: Some(PreRelease {
|
||||
kind: PreReleaseKind::Alpha,
|
||||
number: Some(999), // High number to indicate it's a rolling release
|
||||
@@ -67,6 +68,7 @@ impl VersionComponent {
|
||||
let major = parts.first().copied().unwrap_or(0);
|
||||
let minor = parts.get(1).copied().unwrap_or(0);
|
||||
let patch = parts.get(2).copied().unwrap_or(0);
|
||||
let build = parts.get(3).copied().unwrap_or(0);
|
||||
|
||||
// Parse pre-release part
|
||||
let pre_release = pre_release_part
|
||||
@@ -77,6 +79,7 @@ impl VersionComponent {
|
||||
major,
|
||||
minor,
|
||||
patch,
|
||||
build,
|
||||
pre_release,
|
||||
}
|
||||
}
|
||||
@@ -174,7 +177,12 @@ impl Ord for VersionComponent {
|
||||
match (self_is_twilight, other_is_twilight) {
|
||||
(true, true) => {
|
||||
// Both are twilight, compare by base version
|
||||
return (self.major, self.minor, self.patch).cmp(&(other.major, other.minor, other.patch));
|
||||
return (self.major, self.minor, self.patch, self.build).cmp(&(
|
||||
other.major,
|
||||
other.minor,
|
||||
other.patch,
|
||||
other.build,
|
||||
));
|
||||
}
|
||||
(false, false) => {
|
||||
// Neither is twilight, continue with normal comparison
|
||||
@@ -182,8 +190,13 @@ impl Ord for VersionComponent {
|
||||
_ => unreachable!(), // Already handled above
|
||||
}
|
||||
|
||||
// Compare major.minor.patch first
|
||||
match (self.major, self.minor, self.patch).cmp(&(other.major, other.minor, other.patch)) {
|
||||
// Compare major.minor.patch.build first
|
||||
match (self.major, self.minor, self.patch, self.build).cmp(&(
|
||||
other.major,
|
||||
other.minor,
|
||||
other.patch,
|
||||
other.build,
|
||||
)) {
|
||||
Ordering::Equal => {
|
||||
// If numeric parts are equal, compare pre-release
|
||||
match (&self.pre_release, &other.pre_release) {
|
||||
@@ -464,13 +477,7 @@ impl ApiClient {
|
||||
}
|
||||
|
||||
fn get_cache_dir() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let app_name = if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
};
|
||||
let cache_dir = base_dirs.cache_dir().join(app_name).join("version_cache");
|
||||
let cache_dir = crate::app_dirs::cache_dir().join("version_cache");
|
||||
fs::create_dir_all(&cache_dir)?;
|
||||
Ok(cache_dir)
|
||||
}
|
||||
@@ -1131,18 +1138,47 @@ impl ApiClient {
|
||||
log::info!("Fetching Wayfern version from https://donutbrowser.com/wayfern.json");
|
||||
let url = "https://donutbrowser.com/wayfern.json";
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await?;
|
||||
let mut last_err = None;
|
||||
let mut version_info: Option<WayfernVersionInfo> = None;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Failed to fetch Wayfern version: {}", response.status()).into());
|
||||
for attempt in 1..=3 {
|
||||
match self
|
||||
.client
|
||||
.get(url)
|
||||
.header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36")
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) => {
|
||||
if !response.status().is_success() {
|
||||
last_err = Some(format!("HTTP {}", response.status()));
|
||||
} else {
|
||||
match response.json::<WayfernVersionInfo>().await {
|
||||
Ok(info) => {
|
||||
version_info = Some(info);
|
||||
break;
|
||||
}
|
||||
Err(e) => last_err = Some(format!("Failed to parse response: {e}")),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::warn!("Wayfern fetch attempt {attempt}/3 failed: {e}");
|
||||
last_err = Some(e.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
if attempt < 3 {
|
||||
tokio::time::sleep(tokio::time::Duration::from_secs(2)).await;
|
||||
}
|
||||
}
|
||||
|
||||
let version_info: WayfernVersionInfo = response.json().await?;
|
||||
let version_info = version_info.ok_or_else(|| {
|
||||
format!(
|
||||
"Failed to fetch Wayfern version after 3 attempts: {}",
|
||||
last_err.unwrap_or_default()
|
||||
)
|
||||
})?;
|
||||
log::info!("Fetched Wayfern version: {}", version_info.version);
|
||||
|
||||
// Cache the results (unless bypassing cache)
|
||||
|
||||
@@ -31,6 +31,7 @@ pub struct ApiProfile {
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
pub proxy_id: Option<String>,
|
||||
pub launch_hook: Option<String>,
|
||||
pub process_id: Option<u32>,
|
||||
pub last_launch: Option<u64>,
|
||||
pub release_type: String,
|
||||
@@ -39,6 +40,7 @@ pub struct ApiProfile {
|
||||
pub group_id: Option<String>,
|
||||
pub tags: Vec<String>,
|
||||
pub is_running: bool,
|
||||
pub proxy_bypass_rules: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, ToSchema)]
|
||||
@@ -58,6 +60,7 @@ pub struct CreateProfileRequest {
|
||||
pub browser: String,
|
||||
pub version: String,
|
||||
pub proxy_id: Option<String>,
|
||||
pub launch_hook: Option<String>,
|
||||
pub release_type: Option<String>,
|
||||
#[schema(value_type = Object)]
|
||||
pub camoufox_config: Option<serde_json::Value>,
|
||||
@@ -73,11 +76,14 @@ pub struct UpdateProfileRequest {
|
||||
pub browser: Option<String>,
|
||||
pub version: Option<String>,
|
||||
pub proxy_id: Option<String>,
|
||||
pub launch_hook: Option<String>,
|
||||
pub release_type: Option<String>,
|
||||
#[schema(value_type = Object)]
|
||||
pub camoufox_config: Option<serde_json::Value>,
|
||||
pub group_id: Option<String>,
|
||||
pub tags: Option<Vec<String>>,
|
||||
pub extension_group_id: Option<String>,
|
||||
pub proxy_bypass_rules: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -295,30 +301,24 @@ impl ApiServer {
|
||||
|
||||
// Create router with OpenAPI documentation
|
||||
let (v1_routes, _) = OpenApiRouter::new()
|
||||
.routes(routes!(
|
||||
get_profiles,
|
||||
create_profile,
|
||||
get_profile,
|
||||
update_profile,
|
||||
delete_profile,
|
||||
run_profile,
|
||||
open_url_in_profile,
|
||||
kill_profile,
|
||||
get_groups,
|
||||
create_group,
|
||||
get_group,
|
||||
update_group,
|
||||
delete_group,
|
||||
get_tags,
|
||||
get_proxies,
|
||||
create_proxy,
|
||||
get_proxy,
|
||||
update_proxy,
|
||||
delete_proxy,
|
||||
download_browser_api,
|
||||
get_browser_versions,
|
||||
check_browser_downloaded,
|
||||
))
|
||||
.routes(routes!(get_profiles, create_profile))
|
||||
.routes(routes!(get_profile, update_profile, delete_profile))
|
||||
.routes(routes!(run_profile))
|
||||
.routes(routes!(open_url_in_profile))
|
||||
.routes(routes!(kill_profile))
|
||||
.routes(routes!(get_groups, create_group))
|
||||
.routes(routes!(get_group, update_group, delete_group))
|
||||
.routes(routes!(get_tags))
|
||||
.routes(routes!(get_proxies, create_proxy))
|
||||
.routes(routes!(get_proxy, update_proxy, delete_proxy))
|
||||
.routes(routes!(get_extensions))
|
||||
.routes(routes!(delete_extension_api))
|
||||
.routes(routes!(get_extension_groups))
|
||||
.routes(routes!(delete_extension_group_api))
|
||||
.routes(routes!(download_browser_api))
|
||||
.routes(routes!(get_browser_versions))
|
||||
.routes(routes!(check_browser_downloaded))
|
||||
.routes(routes!(get_wayfern_token, refresh_wayfern_token))
|
||||
.split_for_parts();
|
||||
|
||||
let api = ApiDoc::openapi();
|
||||
@@ -337,7 +337,7 @@ impl ApiServer {
|
||||
.with_state(ws_state);
|
||||
|
||||
let app = Router::new()
|
||||
.nest("/v1", v1_routes)
|
||||
.merge(v1_routes)
|
||||
.nest("/ws", ws_routes)
|
||||
.route("/openapi.json", get(move || async move { Json(api) }))
|
||||
.layer(CorsLayer::permissive())
|
||||
@@ -483,6 +483,7 @@ async fn get_profiles() -> Result<Json<ApiProfilesResponse>, StatusCode> {
|
||||
browser: profile.browser.clone(),
|
||||
version: profile.version.clone(),
|
||||
proxy_id: profile.proxy_id.clone(),
|
||||
launch_hook: profile.launch_hook.clone(),
|
||||
process_id: profile.process_id,
|
||||
last_launch: profile.last_launch,
|
||||
release_type: profile.release_type.clone(),
|
||||
@@ -493,6 +494,7 @@ async fn get_profiles() -> Result<Json<ApiProfilesResponse>, StatusCode> {
|
||||
group_id: profile.group_id.clone(),
|
||||
tags: profile.tags.clone(),
|
||||
is_running: profile.process_id.is_some(), // Simple check based on process_id
|
||||
proxy_bypass_rules: profile.proxy_bypass_rules.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -537,6 +539,7 @@ async fn get_profile(
|
||||
browser: profile.browser.clone(),
|
||||
version: profile.version.clone(),
|
||||
proxy_id: profile.proxy_id.clone(),
|
||||
launch_hook: profile.launch_hook.clone(),
|
||||
process_id: profile.process_id,
|
||||
last_launch: profile.last_launch,
|
||||
release_type: profile.release_type.clone(),
|
||||
@@ -547,6 +550,7 @@ async fn get_profile(
|
||||
group_id: profile.group_id.clone(),
|
||||
tags: profile.tags.clone(),
|
||||
is_running: profile.process_id.is_some(), // Simple check based on process_id
|
||||
proxy_bypass_rules: profile.proxy_bypass_rules.clone(),
|
||||
},
|
||||
}))
|
||||
} else {
|
||||
@@ -601,9 +605,13 @@ async fn create_profile(
|
||||
&request.version,
|
||||
request.release_type.as_deref().unwrap_or("stable"),
|
||||
request.proxy_id.clone(),
|
||||
None, // vpn_id
|
||||
camoufox_config,
|
||||
wayfern_config,
|
||||
request.group_id.clone(),
|
||||
false,
|
||||
None,
|
||||
request.launch_hook.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
@@ -633,6 +641,7 @@ async fn create_profile(
|
||||
browser: profile.browser,
|
||||
version: profile.version,
|
||||
proxy_id: profile.proxy_id,
|
||||
launch_hook: profile.launch_hook,
|
||||
process_id: profile.process_id,
|
||||
last_launch: profile.last_launch,
|
||||
release_type: profile.release_type,
|
||||
@@ -643,6 +652,7 @@ async fn create_profile(
|
||||
group_id: profile.group_id,
|
||||
tags: profile.tags,
|
||||
is_running: false,
|
||||
proxy_bypass_rules: profile.proxy_bypass_rules,
|
||||
},
|
||||
}))
|
||||
}
|
||||
@@ -705,6 +715,21 @@ async fn update_profile(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(launch_hook) = request.launch_hook {
|
||||
let normalized = if launch_hook.trim().is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(launch_hook)
|
||||
};
|
||||
|
||||
if profile_manager
|
||||
.update_profile_launch_hook(&state.app_handle, &id, normalized)
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(camoufox_config) = request.camoufox_config {
|
||||
let config: Result<CamoufoxConfig, _> = serde_json::from_value(camoufox_config);
|
||||
match config {
|
||||
@@ -746,6 +771,29 @@ async fn update_profile(
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(extension_group_id) = request.extension_group_id {
|
||||
let ext_group = if extension_group_id.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(extension_group_id)
|
||||
};
|
||||
if profile_manager
|
||||
.update_profile_extension_group(&id, ext_group)
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(proxy_bypass_rules) = request.proxy_bypass_rules {
|
||||
if profile_manager
|
||||
.update_profile_proxy_bypass_rules(&state.app_handle, &id, proxy_bypass_rules)
|
||||
.is_err()
|
||||
{
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
}
|
||||
|
||||
// Return updated profile
|
||||
get_profile(Path(id), State(state)).await
|
||||
}
|
||||
@@ -1061,11 +1109,13 @@ async fn create_proxy(
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<CreateProxyRequest>,
|
||||
) -> Result<Json<ApiProxyResponse>, StatusCode> {
|
||||
match PROXY_MANAGER.create_stored_proxy(
|
||||
let result = PROXY_MANAGER.create_stored_proxy(
|
||||
&state.app_handle,
|
||||
request.name.clone(),
|
||||
request.proxy_settings,
|
||||
) {
|
||||
);
|
||||
|
||||
match result {
|
||||
Ok(proxy) => Ok(Json(ApiProxyResponse {
|
||||
id: proxy.id,
|
||||
name: proxy.name,
|
||||
@@ -1099,28 +1149,16 @@ async fn update_proxy(
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<UpdateProxyRequest>,
|
||||
) -> Result<Json<ApiProxyResponse>, StatusCode> {
|
||||
let proxies = PROXY_MANAGER.get_stored_proxies();
|
||||
if let Some(proxy) = proxies.into_iter().find(|p| p.id == id) {
|
||||
let new_name = request.name.unwrap_or(proxy.name.clone());
|
||||
let new_proxy_settings = request
|
||||
.proxy_settings
|
||||
.unwrap_or(proxy.proxy_settings.clone());
|
||||
let result =
|
||||
PROXY_MANAGER.update_stored_proxy(&state.app_handle, &id, request.name, request.proxy_settings);
|
||||
|
||||
match PROXY_MANAGER.update_stored_proxy(
|
||||
&state.app_handle,
|
||||
&id,
|
||||
Some(new_name.clone()),
|
||||
Some(new_proxy_settings.clone()),
|
||||
) {
|
||||
Ok(_) => Ok(Json(ApiProxyResponse {
|
||||
id,
|
||||
name: new_name,
|
||||
proxy_settings: new_proxy_settings,
|
||||
})),
|
||||
Err(_) => Err(StatusCode::BAD_REQUEST),
|
||||
}
|
||||
} else {
|
||||
Err(StatusCode::NOT_FOUND)
|
||||
match result {
|
||||
Ok(proxy) => Ok(Json(ApiProxyResponse {
|
||||
id: proxy.id,
|
||||
name: proxy.name,
|
||||
proxy_settings: proxy.proxy_settings,
|
||||
})),
|
||||
Err(_) => Err(StatusCode::NOT_FOUND),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1151,6 +1189,94 @@ async fn delete_proxy(
|
||||
}
|
||||
}
|
||||
|
||||
// Extension API endpoints
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/v1/extensions",
|
||||
responses(
|
||||
(status = 200, description = "List of extensions"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
),
|
||||
security(("bearer_auth" = [])),
|
||||
tag = "extensions"
|
||||
)]
|
||||
async fn get_extensions(
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<Vec<crate::extension_manager::Extension>>, StatusCode> {
|
||||
let mgr = crate::extension_manager::EXTENSION_MANAGER.lock().unwrap();
|
||||
mgr
|
||||
.list_extensions()
|
||||
.map(Json)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/v1/extension-groups",
|
||||
responses(
|
||||
(status = 200, description = "List of extension groups"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
),
|
||||
security(("bearer_auth" = [])),
|
||||
tag = "extensions"
|
||||
)]
|
||||
async fn get_extension_groups(
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<Vec<crate::extension_manager::ExtensionGroup>>, StatusCode> {
|
||||
let mgr = crate::extension_manager::EXTENSION_MANAGER.lock().unwrap();
|
||||
mgr
|
||||
.list_groups()
|
||||
.map(Json)
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/v1/extensions/{id}",
|
||||
params(("id" = String, Path, description = "Extension ID")),
|
||||
responses(
|
||||
(status = 204, description = "Extension deleted"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 404, description = "Extension not found"),
|
||||
),
|
||||
security(("bearer_auth" = [])),
|
||||
tag = "extensions"
|
||||
)]
|
||||
async fn delete_extension_api(
|
||||
Path(id): Path<String>,
|
||||
State(state): State<ApiServerState>,
|
||||
) -> Result<StatusCode, StatusCode> {
|
||||
let mgr = crate::extension_manager::EXTENSION_MANAGER.lock().unwrap();
|
||||
mgr
|
||||
.delete_extension(&state.app_handle, &id)
|
||||
.map(|_| StatusCode::NO_CONTENT)
|
||||
.map_err(|_| StatusCode::NOT_FOUND)
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/v1/extension-groups/{id}",
|
||||
params(("id" = String, Path, description = "Extension Group ID")),
|
||||
responses(
|
||||
(status = 204, description = "Extension group deleted"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 404, description = "Extension group not found"),
|
||||
),
|
||||
security(("bearer_auth" = [])),
|
||||
tag = "extensions"
|
||||
)]
|
||||
async fn delete_extension_group_api(
|
||||
Path(id): Path<String>,
|
||||
State(state): State<ApiServerState>,
|
||||
) -> Result<StatusCode, StatusCode> {
|
||||
let mgr = crate::extension_manager::EXTENSION_MANAGER.lock().unwrap();
|
||||
mgr
|
||||
.delete_group(&state.app_handle, &id)
|
||||
.map(|_| StatusCode::NO_CONTENT)
|
||||
.map_err(|_| StatusCode::NOT_FOUND)
|
||||
}
|
||||
|
||||
// API Handler - Run Profile with Remote Debugging
|
||||
#[utoipa::path(
|
||||
post,
|
||||
@@ -1161,6 +1287,7 @@ async fn delete_proxy(
|
||||
request_body = RunProfileRequest,
|
||||
responses(
|
||||
(status = 200, description = "Profile launched successfully", body = RunProfileResponse),
|
||||
(status = 400, description = "Cannot launch cross-OS profile"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 404, description = "Profile not found"),
|
||||
(status = 500, description = "Internal server error")
|
||||
@@ -1175,6 +1302,13 @@ async fn run_profile(
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<RunProfileRequest>,
|
||||
) -> Result<Json<RunProfileResponse>, StatusCode> {
|
||||
if !crate::cloud_auth::CLOUD_AUTH
|
||||
.has_active_paid_subscription()
|
||||
.await
|
||||
{
|
||||
return Err(StatusCode::PAYMENT_REQUIRED);
|
||||
}
|
||||
|
||||
let headless = request.headless.unwrap_or(false);
|
||||
let url = request.url;
|
||||
|
||||
@@ -1188,6 +1322,15 @@ async fn run_profile(
|
||||
.find(|p| p.id.to_string() == id)
|
||||
.ok_or(StatusCode::NOT_FOUND)?;
|
||||
|
||||
if profile.is_cross_os() {
|
||||
return Err(StatusCode::BAD_REQUEST);
|
||||
}
|
||||
|
||||
// Team lock check
|
||||
crate::team_lock::acquire_team_lock_if_needed(profile)
|
||||
.await
|
||||
.map_err(|_| StatusCode::CONFLICT)?;
|
||||
|
||||
// Generate a random port for remote debugging
|
||||
let remote_debugging_port = rand::random::<u16>().saturating_add(9000).max(9000);
|
||||
|
||||
@@ -1234,6 +1377,13 @@ async fn open_url_in_profile(
|
||||
State(state): State<ApiServerState>,
|
||||
Json(request): Json<OpenUrlRequest>,
|
||||
) -> Result<StatusCode, StatusCode> {
|
||||
if !crate::cloud_auth::CLOUD_AUTH
|
||||
.has_active_paid_subscription()
|
||||
.await
|
||||
{
|
||||
return Err(StatusCode::PAYMENT_REQUIRED);
|
||||
}
|
||||
|
||||
let browser_runner = crate::browser_runner::BrowserRunner::instance();
|
||||
|
||||
browser_runner
|
||||
@@ -1282,6 +1432,8 @@ async fn kill_profile(
|
||||
.await
|
||||
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
|
||||
|
||||
crate::team_lock::release_team_lock_if_needed(profile).await;
|
||||
|
||||
Ok(StatusCode::NO_CONTENT)
|
||||
}
|
||||
|
||||
@@ -1377,3 +1529,54 @@ async fn check_browser_downloaded(
|
||||
let is_downloaded = crate::downloaded_browsers_registry::is_browser_downloaded(browser, version);
|
||||
Ok(Json(is_downloaded))
|
||||
}
|
||||
|
||||
// API Handlers - Wayfern Token
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, ToSchema)]
|
||||
pub struct WayfernTokenResponse {
|
||||
pub token: Option<String>,
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/v1/wayfern-token",
|
||||
responses(
|
||||
(status = 200, description = "Current wayfern token", body = WayfernTokenResponse),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
),
|
||||
security(
|
||||
("bearer_auth" = [])
|
||||
),
|
||||
tag = "wayfern"
|
||||
)]
|
||||
async fn get_wayfern_token(
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<WayfernTokenResponse>, StatusCode> {
|
||||
let token = crate::cloud_auth::CLOUD_AUTH.get_wayfern_token().await;
|
||||
Ok(Json(WayfernTokenResponse { token }))
|
||||
}
|
||||
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/v1/wayfern-token/refresh",
|
||||
responses(
|
||||
(status = 200, description = "Refreshed wayfern token", body = WayfernTokenResponse),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 500, description = "Failed to refresh token"),
|
||||
),
|
||||
security(
|
||||
("bearer_auth" = [])
|
||||
),
|
||||
tag = "wayfern"
|
||||
)]
|
||||
async fn refresh_wayfern_token(
|
||||
State(_state): State<ApiServerState>,
|
||||
) -> Result<Json<WayfernTokenResponse>, (StatusCode, String)> {
|
||||
crate::cloud_auth::CLOUD_AUTH
|
||||
.request_wayfern_token()
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e))?;
|
||||
|
||||
let token = crate::cloud_auth::CLOUD_AUTH.get_wayfern_token().await;
|
||||
Ok(Json(WayfernTokenResponse { token }))
|
||||
}
|
||||
|
||||
@@ -109,6 +109,8 @@ pub struct AppUpdateInfo {
|
||||
pub published_at: String,
|
||||
pub manual_update_required: bool,
|
||||
pub release_page_url: Option<String>,
|
||||
/// True when a system package manager repo is configured (apt/dnf/zypper)
|
||||
pub repo_update: bool,
|
||||
}
|
||||
|
||||
pub struct AppAutoUpdater {
|
||||
@@ -212,11 +214,12 @@ impl AppAutoUpdater {
|
||||
// Find the appropriate asset for current platform
|
||||
let download_url = self.get_download_url_for_platform(&latest_release.assets);
|
||||
|
||||
// On Linux, we show the update notification even if auto-update is disabled
|
||||
// Users can manually download from the release page
|
||||
// On Linux, when a package repo is configured, notify users to update via
|
||||
// their package manager instead of auto-downloading from GitHub.
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let manual_update_required = download_url.is_none();
|
||||
let repo_update = self.is_repo_configured();
|
||||
let manual_update_required = download_url.is_none() || repo_update;
|
||||
let update_info = AppUpdateInfo {
|
||||
current_version,
|
||||
new_version: latest_release.tag_name.clone(),
|
||||
@@ -226,13 +229,15 @@ impl AppAutoUpdater {
|
||||
published_at: latest_release.published_at.clone(),
|
||||
manual_update_required,
|
||||
release_page_url: Some(release_page_url),
|
||||
repo_update,
|
||||
};
|
||||
|
||||
log::info!(
|
||||
"Update info prepared: {} -> {} (manual_update_required: {})",
|
||||
"Update info prepared: {} -> {} (manual_update_required: {}, repo_update: {})",
|
||||
update_info.current_version,
|
||||
update_info.new_version,
|
||||
update_info.manual_update_required
|
||||
update_info.manual_update_required,
|
||||
update_info.repo_update
|
||||
);
|
||||
return Ok(Some(update_info));
|
||||
}
|
||||
@@ -249,6 +254,7 @@ impl AppAutoUpdater {
|
||||
published_at: latest_release.published_at.clone(),
|
||||
manual_update_required: false,
|
||||
release_page_url: Some(release_page_url),
|
||||
repo_update: false,
|
||||
};
|
||||
|
||||
log::info!(
|
||||
@@ -455,6 +461,30 @@ impl AppAutoUpdater {
|
||||
LinuxInstallationMethod::Unknown
|
||||
}
|
||||
|
||||
/// Check if the APT repository is configured
|
||||
#[cfg(target_os = "linux")]
|
||||
fn is_deb_repo_configured() -> bool {
|
||||
Path::new("/etc/apt/sources.list.d/donutbrowser.list").exists()
|
||||
}
|
||||
|
||||
/// Check if an RPM repository is configured (yum/dnf or zypper)
|
||||
#[cfg(target_os = "linux")]
|
||||
fn is_rpm_repo_configured() -> bool {
|
||||
Path::new("/etc/yum.repos.d/donutbrowser.repo").exists()
|
||||
|| Path::new("/etc/zypp/repos.d/donutbrowser.repo").exists()
|
||||
}
|
||||
|
||||
/// Check if a system package manager repo is configured for this installation.
|
||||
#[cfg(target_os = "linux")]
|
||||
fn is_repo_configured(&self) -> bool {
|
||||
let installation_method = self.detect_linux_installation_method();
|
||||
match installation_method {
|
||||
LinuxInstallationMethod::Deb => Self::is_deb_repo_configured(),
|
||||
LinuxInstallationMethod::Rpm => Self::is_rpm_repo_configured(),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the appropriate download URL for the current platform
|
||||
fn get_download_url_for_platform(&self, assets: &[AppReleaseAsset]) -> Option<String> {
|
||||
let arch = if cfg!(target_arch = "aarch64") {
|
||||
@@ -506,7 +536,8 @@ impl AppAutoUpdater {
|
||||
&& (asset.name.contains(&format!("_{arch}.dmg"))
|
||||
|| asset.name.contains(&format!("-{arch}.dmg"))
|
||||
|| asset.name.contains(&format!("_{arch}_"))
|
||||
|| asset.name.contains(&format!("-{arch}-")))
|
||||
|| asset.name.contains(&format!("-{arch}-"))
|
||||
|| asset.name.contains(&format!("_{arch}-")))
|
||||
{
|
||||
log::info!("Found exact architecture match: {}", asset.name);
|
||||
return Some(asset.browser_download_url.clone());
|
||||
@@ -564,7 +595,8 @@ impl AppAutoUpdater {
|
||||
&& (asset.name.contains(&format!("_{arch}.{ext}"))
|
||||
|| asset.name.contains(&format!("-{arch}.{ext}"))
|
||||
|| asset.name.contains(&format!("_{arch}_"))
|
||||
|| asset.name.contains(&format!("-{arch}-")))
|
||||
|| asset.name.contains(&format!("-{arch}-"))
|
||||
|| asset.name.contains(&format!("_{arch}-")))
|
||||
{
|
||||
log::info!("Found Windows {ext} with exact arch match: {}", asset.name);
|
||||
return Some(asset.browser_download_url.clone());
|
||||
@@ -627,7 +659,8 @@ impl AppAutoUpdater {
|
||||
&& (asset.name.contains(&format!("_{arch}.{ext}"))
|
||||
|| asset.name.contains(&format!("-{arch}.{ext}"))
|
||||
|| asset.name.contains(&format!("_{arch}_"))
|
||||
|| asset.name.contains(&format!("-{arch}-")))
|
||||
|| asset.name.contains(&format!("-{arch}-"))
|
||||
|| asset.name.contains(&format!("_{arch}-")))
|
||||
{
|
||||
log::info!("Found Linux {ext} with exact arch match: {}", asset.name);
|
||||
return Some(asset.browser_download_url.clone());
|
||||
@@ -701,7 +734,8 @@ impl AppAutoUpdater {
|
||||
|
||||
let total_size = response.content_length().unwrap_or(0);
|
||||
log::info!("Silent download size: {} bytes", total_size);
|
||||
let mut file = fs::File::create(&file_path)?;
|
||||
let raw_file = fs::File::create(&file_path)?;
|
||||
let mut file = std::io::BufWriter::with_capacity(8 * 1024 * 1024, raw_file);
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
@@ -709,6 +743,7 @@ impl AppAutoUpdater {
|
||||
let chunk = chunk?;
|
||||
file.write_all(&chunk)?;
|
||||
}
|
||||
std::io::Write::flush(&mut file)?;
|
||||
|
||||
log::info!("Silent download completed: {}", file_path.display());
|
||||
Ok(file_path)
|
||||
@@ -741,13 +776,36 @@ impl AppAutoUpdater {
|
||||
log::info!("Extracting update...");
|
||||
let extracted_app_path = self.extract_update(&download_path, &temp_dir).await?;
|
||||
|
||||
log::info!("Installing update (overwriting binary)...");
|
||||
self.install_update(&extracted_app_path).await?;
|
||||
// On Windows, MSI/EXE installers close the running app, so running them now
|
||||
// would kill the process before the "Update ready" toast can appear. Instead,
|
||||
// defer execution to restart_application() when the user clicks "Restart Now".
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
let ext = extracted_app_path
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.unwrap_or("")
|
||||
.to_lowercase();
|
||||
if ext == "msi" || ext == "exe" {
|
||||
log::info!("Deferring Windows installer execution until user-initiated restart");
|
||||
*PENDING_INSTALLER_PATH.lock().unwrap() = Some(extracted_app_path);
|
||||
} else {
|
||||
log::info!("Installing update (overwriting binary)...");
|
||||
self.install_update(&extracted_app_path).await?;
|
||||
log::info!("Cleaning up temporary files...");
|
||||
let _ = fs::remove_dir_all(&temp_dir);
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Cleaning up temporary files...");
|
||||
let _ = fs::remove_dir_all(&temp_dir);
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
log::info!("Installing update (overwriting binary)...");
|
||||
self.install_update(&extracted_app_path).await?;
|
||||
log::info!("Cleaning up temporary files...");
|
||||
let _ = fs::remove_dir_all(&temp_dir);
|
||||
}
|
||||
|
||||
log::info!("Update installed successfully, emitting app-update-ready event");
|
||||
log::info!("Update ready, emitting app-update-ready event");
|
||||
|
||||
let _ = events::emit("app-update-ready", update_info.new_version.clone());
|
||||
|
||||
@@ -1418,14 +1476,63 @@ rm "{}"
|
||||
{
|
||||
let app_path = self.get_current_app_path()?;
|
||||
let current_pid = std::process::id();
|
||||
let pending = PENDING_INSTALLER_PATH.lock().unwrap().take();
|
||||
|
||||
// Create a temporary restart batch script
|
||||
let temp_dir = std::env::temp_dir();
|
||||
let script_path = temp_dir.join("donut_restart.bat");
|
||||
let update_temp_dir = temp_dir.join("donut_app_update");
|
||||
|
||||
// Create the restart script content
|
||||
let script_content = format!(
|
||||
r#"@echo off
|
||||
let script_content = if let Some(installer_path) = pending {
|
||||
let ext = installer_path
|
||||
.extension()
|
||||
.and_then(|e| e.to_str())
|
||||
.unwrap_or("")
|
||||
.to_lowercase();
|
||||
let install_cmd = match ext.as_str() {
|
||||
"msi" => format!(
|
||||
"msiexec /i \"{}\" /quiet /norestart REBOOT=ReallySuppress",
|
||||
installer_path.to_str().unwrap()
|
||||
),
|
||||
"exe" => format!("\"{}\" /S", installer_path.to_str().unwrap()),
|
||||
_ => String::new(),
|
||||
};
|
||||
|
||||
format!(
|
||||
r#"@echo off
|
||||
rem Wait for the current process to exit
|
||||
:wait_loop
|
||||
tasklist /fi "PID eq {pid}" >nul 2>&1
|
||||
if %errorlevel% equ 0 (
|
||||
timeout /t 1 /nobreak >nul
|
||||
goto wait_loop
|
||||
)
|
||||
|
||||
rem Wait a bit more to ensure clean exit
|
||||
timeout /t 2 /nobreak >nul
|
||||
|
||||
rem Run the installer
|
||||
{install_cmd}
|
||||
|
||||
rem Wait for installation to complete
|
||||
timeout /t 3 /nobreak >nul
|
||||
|
||||
rem Start the new application
|
||||
start "" "{app_path}"
|
||||
|
||||
rem Clean up installer temp files
|
||||
rmdir /s /q "{update_temp}"
|
||||
|
||||
rem Clean up this script
|
||||
del "%~f0"
|
||||
"#,
|
||||
pid = current_pid,
|
||||
install_cmd = install_cmd,
|
||||
app_path = app_path.to_str().unwrap(),
|
||||
update_temp = update_temp_dir.to_str().unwrap(),
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
r#"@echo off
|
||||
rem Wait for the current process to exit
|
||||
:wait_loop
|
||||
tasklist /fi "PID eq {}" >nul 2>&1
|
||||
@@ -1443,24 +1550,20 @@ start "" "{}"
|
||||
rem Clean up this script
|
||||
del "%~f0"
|
||||
"#,
|
||||
current_pid,
|
||||
app_path.to_str().unwrap()
|
||||
);
|
||||
current_pid,
|
||||
app_path.to_str().unwrap()
|
||||
)
|
||||
};
|
||||
|
||||
// Write the script to file
|
||||
fs::write(&script_path, script_content)?;
|
||||
|
||||
// Execute the restart script in the background
|
||||
let mut cmd = Command::new("cmd");
|
||||
cmd.args(["/C", script_path.to_str().unwrap()]);
|
||||
|
||||
// Start the process detached
|
||||
let _child = cmd.spawn()?;
|
||||
|
||||
// Give the script a moment to start
|
||||
tokio::time::sleep(tokio::time::Duration::from_millis(500)).await;
|
||||
|
||||
// Exit the current process
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
@@ -1531,6 +1634,20 @@ rm "{}"
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn check_for_app_updates() -> Result<Option<AppUpdateInfo>, String> {
|
||||
if crate::app_dirs::is_portable() {
|
||||
log::info!("App auto-updates disabled in portable mode");
|
||||
return Ok(None);
|
||||
}
|
||||
// The disable_auto_updates setting controls app self-updates only
|
||||
let disabled = crate::settings_manager::SettingsManager::instance()
|
||||
.load_settings()
|
||||
.map(|s| s.disable_auto_updates)
|
||||
.unwrap_or(false);
|
||||
if disabled {
|
||||
log::info!("App auto-updates disabled by user setting");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let updater = AppAutoUpdater::instance();
|
||||
updater
|
||||
.check_for_updates()
|
||||
@@ -1698,15 +1815,10 @@ mod tests {
|
||||
browser_download_url: "https://example.com/x64.dmg".to_string(),
|
||||
size: 12345,
|
||||
},
|
||||
// Windows assets
|
||||
// Windows assets (NSIS naming: _ARCH-setup.exe)
|
||||
AppReleaseAsset {
|
||||
name: "Donut.Browser_0.1.0_x64.msi".to_string(),
|
||||
browser_download_url: "https://example.com/x64.msi".to_string(),
|
||||
size: 12345,
|
||||
},
|
||||
AppReleaseAsset {
|
||||
name: "Donut.Browser_0.1.0_x64.exe".to_string(),
|
||||
browser_download_url: "https://example.com/x64.exe".to_string(),
|
||||
name: "Donut_0.1.0_x64-setup.exe".to_string(),
|
||||
browser_download_url: "https://example.com/x64-setup.exe".to_string(),
|
||||
size: 12345,
|
||||
},
|
||||
// Linux assets
|
||||
@@ -1923,9 +2035,19 @@ mod tests {
|
||||
// If url is None, it means AppImage was detected and auto-updates are disabled
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg(target_os = "linux")]
|
||||
fn test_repo_detection_returns_bool() {
|
||||
// These just verify the functions run without panicking.
|
||||
// Actual values depend on the host system configuration.
|
||||
let _deb = AppAutoUpdater::is_deb_repo_configured();
|
||||
let _rpm = AppAutoUpdater::is_rpm_repo_configured();
|
||||
}
|
||||
}
|
||||
|
||||
// Global singleton instance
|
||||
lazy_static::lazy_static! {
|
||||
static ref APP_AUTO_UPDATER: AppAutoUpdater = AppAutoUpdater::new();
|
||||
static ref PENDING_INSTALLER_PATH: std::sync::Mutex<Option<PathBuf>> = std::sync::Mutex::new(None);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,231 @@
|
||||
use directories::BaseDirs;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
static BASE_DIRS: OnceLock<BaseDirs> = OnceLock::new();
|
||||
static PORTABLE_DIR: OnceLock<Option<PathBuf>> = OnceLock::new();
|
||||
|
||||
fn base_dirs() -> &'static BaseDirs {
|
||||
BASE_DIRS.get_or_init(|| BaseDirs::new().expect("Failed to get base directories"))
|
||||
}
|
||||
|
||||
/// Returns the portable base directory if a `.portable` marker exists next to the executable.
|
||||
fn portable_dir() -> Option<&'static PathBuf> {
|
||||
PORTABLE_DIR
|
||||
.get_or_init(|| {
|
||||
std::env::current_exe()
|
||||
.ok()
|
||||
.and_then(|exe| exe.parent().map(|p| p.to_path_buf()))
|
||||
.filter(|dir| dir.join(".portable").exists())
|
||||
})
|
||||
.as_ref()
|
||||
}
|
||||
|
||||
/// Returns true if the app is running in portable mode.
|
||||
pub fn is_portable() -> bool {
|
||||
portable_dir().is_some()
|
||||
}
|
||||
|
||||
pub fn app_name() -> &'static str {
|
||||
if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data_dir() -> PathBuf {
|
||||
#[cfg(test)]
|
||||
{
|
||||
if let Some(dir) = TEST_DATA_DIR.with(|cell| cell.borrow().clone()) {
|
||||
return dir;
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(dir) = std::env::var("DONUTBROWSER_DATA_DIR") {
|
||||
return PathBuf::from(dir);
|
||||
}
|
||||
|
||||
if let Some(dir) = portable_dir() {
|
||||
return dir.join("data");
|
||||
}
|
||||
|
||||
base_dirs().data_local_dir().join(app_name())
|
||||
}
|
||||
|
||||
pub fn cache_dir() -> PathBuf {
|
||||
#[cfg(test)]
|
||||
{
|
||||
if let Some(dir) = TEST_CACHE_DIR.with(|cell| cell.borrow().clone()) {
|
||||
return dir;
|
||||
}
|
||||
}
|
||||
|
||||
if let Ok(dir) = std::env::var("DONUTBROWSER_CACHE_DIR") {
|
||||
return PathBuf::from(dir);
|
||||
}
|
||||
|
||||
if let Some(dir) = portable_dir() {
|
||||
return dir.join("cache");
|
||||
}
|
||||
|
||||
base_dirs().cache_dir().join(app_name())
|
||||
}
|
||||
|
||||
pub fn profiles_dir() -> PathBuf {
|
||||
data_dir().join("profiles")
|
||||
}
|
||||
|
||||
pub fn binaries_dir() -> PathBuf {
|
||||
data_dir().join("binaries")
|
||||
}
|
||||
|
||||
pub fn data_subdir() -> PathBuf {
|
||||
data_dir().join("data")
|
||||
}
|
||||
|
||||
pub fn settings_dir() -> PathBuf {
|
||||
data_dir().join("settings")
|
||||
}
|
||||
|
||||
pub fn proxies_dir() -> PathBuf {
|
||||
data_dir().join("proxies")
|
||||
}
|
||||
|
||||
pub fn proxy_workers_dir() -> PathBuf {
|
||||
cache_dir().join("proxy_workers")
|
||||
}
|
||||
|
||||
pub fn vpn_dir() -> PathBuf {
|
||||
data_dir().join("vpn")
|
||||
}
|
||||
|
||||
pub fn extensions_dir() -> PathBuf {
|
||||
data_dir().join("extensions")
|
||||
}
|
||||
|
||||
pub fn dns_blocklist_dir() -> PathBuf {
|
||||
cache_dir().join("dns_blocklists")
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
thread_local! {
|
||||
static TEST_DATA_DIR: std::cell::RefCell<Option<PathBuf>> = const { std::cell::RefCell::new(None) };
|
||||
static TEST_CACHE_DIR: std::cell::RefCell<Option<PathBuf>> = const { std::cell::RefCell::new(None) };
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub struct TestDirGuard {
|
||||
kind: TestDirKind,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
enum TestDirKind {
|
||||
Data,
|
||||
Cache,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl Drop for TestDirGuard {
|
||||
fn drop(&mut self) {
|
||||
match self.kind {
|
||||
TestDirKind::Data => TEST_DATA_DIR.with(|cell| *cell.borrow_mut() = None),
|
||||
TestDirKind::Cache => TEST_CACHE_DIR.with(|cell| *cell.borrow_mut() = None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn set_test_data_dir(dir: PathBuf) -> TestDirGuard {
|
||||
TEST_DATA_DIR.with(|cell| *cell.borrow_mut() = Some(dir));
|
||||
TestDirGuard {
|
||||
kind: TestDirKind::Data,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn set_test_cache_dir(dir: PathBuf) -> TestDirGuard {
|
||||
TEST_CACHE_DIR.with(|cell| *cell.borrow_mut() = Some(dir));
|
||||
TestDirGuard {
|
||||
kind: TestDirKind::Cache,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_app_name() {
|
||||
let name = app_name();
|
||||
assert!(
|
||||
name == "DonutBrowser" || name == "DonutBrowserDev",
|
||||
"app_name should be DonutBrowser or DonutBrowserDev, got: {name}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_data_dir_returns_path() {
|
||||
let dir = data_dir();
|
||||
assert!(
|
||||
dir.to_string_lossy().contains(app_name()),
|
||||
"data_dir should contain app_name"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_dir_returns_path() {
|
||||
let dir = cache_dir();
|
||||
assert!(
|
||||
dir.to_string_lossy().contains(app_name()),
|
||||
"cache_dir should contain app_name"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_subdirectory_helpers() {
|
||||
assert!(profiles_dir().ends_with("profiles"));
|
||||
assert!(binaries_dir().ends_with("binaries"));
|
||||
assert!(data_subdir().ends_with("data"));
|
||||
assert!(settings_dir().ends_with("settings"));
|
||||
assert!(proxies_dir().ends_with("proxies"));
|
||||
assert!(proxy_workers_dir().ends_with("proxy_workers"));
|
||||
assert!(vpn_dir().ends_with("vpn"));
|
||||
assert!(extensions_dir().ends_with("extensions"));
|
||||
assert!(dns_blocklist_dir().ends_with("dns_blocklists"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_test_data_dir() {
|
||||
let tmp = PathBuf::from("/tmp/test-donut-data");
|
||||
let _guard = set_test_data_dir(tmp.clone());
|
||||
assert_eq!(data_dir(), tmp);
|
||||
assert_eq!(profiles_dir(), tmp.join("profiles"));
|
||||
assert_eq!(binaries_dir(), tmp.join("binaries"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_set_test_cache_dir() {
|
||||
let tmp = PathBuf::from("/tmp/test-donut-cache");
|
||||
let _guard = set_test_cache_dir(tmp.clone());
|
||||
assert_eq!(cache_dir(), tmp);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_guard_cleanup() {
|
||||
let original_data = data_dir();
|
||||
let original_cache = cache_dir();
|
||||
|
||||
{
|
||||
let _guard = set_test_data_dir(PathBuf::from("/tmp/test-cleanup-data"));
|
||||
assert_eq!(data_dir(), PathBuf::from("/tmp/test-cleanup-data"));
|
||||
}
|
||||
assert_eq!(data_dir(), original_data);
|
||||
|
||||
{
|
||||
let _guard = set_test_cache_dir(PathBuf::from("/tmp/test-cleanup-cache"));
|
||||
assert_eq!(cache_dir(), PathBuf::from("/tmp/test-cleanup-cache"));
|
||||
}
|
||||
assert_eq!(cache_dir(), original_cache);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::browser_version_manager::{BrowserVersionInfo, BrowserVersionManager};
|
||||
use crate::events;
|
||||
use crate::profile::{BrowserProfile, ProfileManager};
|
||||
use crate::settings_manager::SettingsManager;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -61,6 +60,10 @@ impl AutoUpdater {
|
||||
let mut browser_profiles: HashMap<String, Vec<BrowserProfile>> = HashMap::new();
|
||||
|
||||
for profile in profiles {
|
||||
if profile.is_cross_os() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Only check supported browsers
|
||||
if !self
|
||||
.browser_version_manager
|
||||
@@ -77,24 +80,25 @@ impl AutoUpdater {
|
||||
}
|
||||
|
||||
for (browser, profiles) in browser_profiles {
|
||||
// Get cached versions first, then try to fetch if needed
|
||||
let versions = if let Some(cached) = self
|
||||
// Always fetch fresh versions for update checks — stale cache would miss new releases
|
||||
let versions = match self
|
||||
.browser_version_manager
|
||||
.get_cached_browser_versions_detailed(&browser)
|
||||
.fetch_browser_versions_detailed(&browser, false)
|
||||
.await
|
||||
{
|
||||
cached
|
||||
} else if self.browser_version_manager.should_update_cache(&browser) {
|
||||
// Try to fetch fresh versions
|
||||
match self
|
||||
.browser_version_manager
|
||||
.fetch_browser_versions_detailed(&browser, false)
|
||||
.await
|
||||
{
|
||||
Ok(versions) => versions,
|
||||
Err(_) => continue, // Skip this browser if fetch fails
|
||||
Ok(versions) => versions,
|
||||
Err(e) => {
|
||||
log::warn!("Failed to fetch versions for {browser}: {e}, trying cache");
|
||||
// Fall back to cache if network fails
|
||||
if let Some(cached) = self
|
||||
.browser_version_manager
|
||||
.get_cached_browser_versions_detailed(&browser)
|
||||
{
|
||||
cached
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
continue; // No cached versions and cache doesn't need update
|
||||
};
|
||||
|
||||
browser_versions.insert(browser.clone(), versions.clone());
|
||||
@@ -102,26 +106,7 @@ impl AutoUpdater {
|
||||
// Check each profile for updates
|
||||
for profile in profiles {
|
||||
if let Some(update) = self.check_profile_update(&profile, &versions)? {
|
||||
// Apply chromium threshold logic
|
||||
if browser == "chromium" {
|
||||
// For chromium, only show notifications if there are 400+ new versions
|
||||
let current_version = &profile.version.parse::<u32>().unwrap();
|
||||
let new_version = &update.new_version.parse::<u32>().unwrap();
|
||||
|
||||
let result = new_version - current_version;
|
||||
log::info!(
|
||||
"Current version: {current_version}, New version: {new_version}, Result: {result}"
|
||||
);
|
||||
if result > 400 {
|
||||
notifications.push(update);
|
||||
} else {
|
||||
log::info!(
|
||||
"Skipping chromium update notification: only {result} new versions (need 400+)"
|
||||
);
|
||||
}
|
||||
} else {
|
||||
notifications.push(update);
|
||||
}
|
||||
notifications.push(update);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -132,78 +117,80 @@ impl AutoUpdater {
|
||||
pub async fn check_for_updates_with_progress(&self, app_handle: &tauri::AppHandle) {
|
||||
log::info!("Starting auto-update check with progress...");
|
||||
|
||||
// Browser auto-updates are always enabled — the disable_auto_updates setting
|
||||
// only controls app self-updates, not browser version updates.
|
||||
|
||||
// Check for browser updates and trigger auto-downloads
|
||||
match self.check_for_updates().await {
|
||||
Ok(update_notifications) => {
|
||||
if !update_notifications.is_empty() {
|
||||
log::info!(
|
||||
"Found {} browser updates to auto-download",
|
||||
update_notifications.len()
|
||||
);
|
||||
// Group by browser+version to avoid duplicate downloads
|
||||
let grouped = self.group_update_notifications(update_notifications);
|
||||
if !grouped.is_empty() {
|
||||
log::info!("Found {} browser updates", grouped.len());
|
||||
|
||||
// Trigger automatic downloads for each update
|
||||
for notification in update_notifications {
|
||||
for notification in grouped {
|
||||
log::info!(
|
||||
"Auto-downloading {} version {}",
|
||||
"Auto-updating {} to version {} ({} profiles)",
|
||||
notification.browser,
|
||||
notification.new_version
|
||||
notification.new_version,
|
||||
notification.affected_profiles.len()
|
||||
);
|
||||
|
||||
// Clone app_handle for the async task
|
||||
let browser = notification.browser.clone();
|
||||
let new_version = notification.new_version.clone();
|
||||
let notification_id = notification.id.clone();
|
||||
let affected_profiles = notification.affected_profiles.clone();
|
||||
let app_handle_clone = app_handle.clone();
|
||||
|
||||
// Spawn async task to handle the download and auto-update
|
||||
tokio::spawn(async move {
|
||||
// TODO: update the logic to use the downloaded browsers registry instance instead of the static method
|
||||
// First, check if browser already exists
|
||||
match crate::downloaded_browsers_registry::is_browser_downloaded(
|
||||
browser.clone(),
|
||||
new_version.clone(),
|
||||
) {
|
||||
true => {
|
||||
log::info!("Browser {browser} {new_version} already downloaded, proceeding to auto-update profiles");
|
||||
let registry =
|
||||
crate::downloaded_browsers_registry::DownloadedBrowsersRegistry::instance();
|
||||
|
||||
// Browser already exists, go straight to profile update
|
||||
match AutoUpdater::instance()
|
||||
.complete_browser_update_with_auto_update(
|
||||
&app_handle_clone,
|
||||
&browser.clone(),
|
||||
&new_version.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(updated_profiles) => {
|
||||
// Skip if this browser-version pair is already being downloaded
|
||||
if crate::downloader::is_downloading(&browser, &new_version) {
|
||||
log::info!(
|
||||
"Browser {browser} {new_version} is already being downloaded, skipping duplicate"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if registry.is_browser_downloaded(&browser, &new_version) {
|
||||
log::info!("Browser {browser} {new_version} already downloaded, proceeding to auto-update profiles");
|
||||
|
||||
// Browser already exists, go straight to profile update
|
||||
match AutoUpdater::instance()
|
||||
.auto_update_profile_versions(&app_handle_clone, &browser, &new_version)
|
||||
.await
|
||||
{
|
||||
Ok(updated_profiles) => {
|
||||
if !updated_profiles.is_empty() {
|
||||
log::info!(
|
||||
"Auto-update completed for {} profiles: {:?}",
|
||||
"Auto-updated {} profiles to {browser} {new_version}: {:?}",
|
||||
updated_profiles.len(),
|
||||
updated_profiles
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to complete auto-update for {browser}: {e}");
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to auto-update profiles for {browser}: {e}");
|
||||
}
|
||||
}
|
||||
false => {
|
||||
log::info!("Downloading browser {browser} version {new_version}...");
|
||||
} else {
|
||||
log::info!("Downloading browser {browser} version {new_version}...");
|
||||
|
||||
// Emit the auto-update event to trigger frontend handling
|
||||
let auto_update_event = serde_json::json!({
|
||||
"browser": browser,
|
||||
"new_version": new_version,
|
||||
"notification_id": notification_id,
|
||||
"affected_profiles": affected_profiles
|
||||
});
|
||||
|
||||
if let Err(e) = events::emit("browser-auto-update-available", &auto_update_event)
|
||||
{
|
||||
log::error!("Failed to emit auto-update event for {browser}: {e}");
|
||||
} else {
|
||||
log::info!("Emitted auto-update event for {browser}");
|
||||
// Download directly from Rust — download_browser_full already
|
||||
// auto-updates non-running profiles after successful download.
|
||||
match crate::downloader::download_browser(
|
||||
app_handle_clone,
|
||||
browser.clone(),
|
||||
new_version.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(actual_version) => {
|
||||
log::info!("Auto-download completed for {browser} {actual_version}");
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to auto-download {browser} {new_version}: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -217,6 +204,24 @@ impl AutoUpdater {
|
||||
log::error!("Failed to check for browser updates: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
// Also update any profiles that can be bumped to an already-installed newer version.
|
||||
// This handles cases where a version was downloaded but profiles weren't updated
|
||||
// (e.g., they were running at the time, or the update was missed).
|
||||
match self.update_profiles_to_latest_installed(app_handle) {
|
||||
Ok(updated) => {
|
||||
if !updated.is_empty() {
|
||||
log::info!(
|
||||
"Updated {} profiles to latest installed versions: {:?}",
|
||||
updated.len(),
|
||||
updated
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to update profiles to latest installed versions: {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a specific profile has an available update
|
||||
@@ -313,9 +318,42 @@ impl AutoUpdater {
|
||||
// Find all profiles for this browser that should be updated
|
||||
for profile in profiles {
|
||||
if profile.browser == browser {
|
||||
if profile.is_cross_os() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if profile is currently running
|
||||
if profile.process_id.is_some() {
|
||||
continue; // Skip running profiles
|
||||
// Store as pending update so it gets applied when browser closes
|
||||
log::info!(
|
||||
"Profile {} is running, storing pending update {} -> {}",
|
||||
profile.name,
|
||||
profile.version,
|
||||
new_version
|
||||
);
|
||||
let mut state = self.load_auto_update_state().unwrap_or_default();
|
||||
let notification = UpdateNotification {
|
||||
id: format!("{}_{}_to_{}", browser, profile.version, new_version),
|
||||
browser: browser.to_string(),
|
||||
current_version: profile.version.clone(),
|
||||
new_version: new_version.to_string(),
|
||||
affected_profiles: vec![profile.name.clone()],
|
||||
is_stable_update: true,
|
||||
timestamp: std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs(),
|
||||
};
|
||||
// Add if not already pending
|
||||
if !state
|
||||
.pending_updates
|
||||
.iter()
|
||||
.any(|u| u.id == notification.id)
|
||||
{
|
||||
state.pending_updates.push(notification);
|
||||
let _ = self.save_auto_update_state(&state);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this is an update (newer version)
|
||||
@@ -362,15 +400,6 @@ impl AutoUpdater {
|
||||
Ok(updated_profiles)
|
||||
}
|
||||
|
||||
/// Check if browser is disabled due to ongoing update
|
||||
pub fn is_browser_disabled(
|
||||
&self,
|
||||
browser: &str,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let state = self.load_auto_update_state()?;
|
||||
Ok(state.disabled_browsers.contains(browser))
|
||||
}
|
||||
|
||||
/// Dismiss update notification
|
||||
pub fn dismiss_update_notification(
|
||||
&self,
|
||||
@@ -457,6 +486,148 @@ impl AutoUpdater {
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
/// Get the latest installed version for a browser from the downloaded browsers registry
|
||||
pub fn get_latest_installed_version(&self, browser: &str) -> Option<String> {
|
||||
let registry = crate::downloaded_browsers_registry::DownloadedBrowsersRegistry::instance();
|
||||
let versions = registry.get_downloaded_versions(browser);
|
||||
versions
|
||||
.into_iter()
|
||||
.filter(|v| registry.is_browser_downloaded(browser, v))
|
||||
.max_by(|a, b| self.compare_versions(a, b))
|
||||
}
|
||||
|
||||
/// Update a single profile to the latest installed version for its browser.
|
||||
/// Used when a browser closes to ensure it's on the latest version.
|
||||
pub fn update_profile_to_latest_installed(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
profile: &crate::profile::BrowserProfile,
|
||||
) -> Option<crate::profile::BrowserProfile> {
|
||||
let latest = self.get_latest_installed_version(&profile.browser)?;
|
||||
|
||||
if !self.is_version_newer(&latest, &profile.version) {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Only update stable->stable and nightly->nightly
|
||||
let is_profile_nightly =
|
||||
crate::api_client::is_browser_version_nightly(&profile.browser, &profile.version, None);
|
||||
let is_latest_nightly =
|
||||
crate::api_client::is_browser_version_nightly(&profile.browser, &latest, None);
|
||||
if is_profile_nightly != is_latest_nightly {
|
||||
return None;
|
||||
}
|
||||
|
||||
match self
|
||||
.profile_manager
|
||||
.update_profile_version(app_handle, &profile.id.to_string(), &latest)
|
||||
{
|
||||
Ok(updated) => {
|
||||
log::info!(
|
||||
"Updated profile {} from {} {} to latest installed version {}",
|
||||
profile.name,
|
||||
profile.browser,
|
||||
profile.version,
|
||||
latest
|
||||
);
|
||||
Some(updated)
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"Failed to update profile {} to latest installed version: {e}",
|
||||
profile.name
|
||||
);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Update all non-running profiles to the latest installed version for each browser.
|
||||
/// Handles the case where a newer version was downloaded but profiles weren't updated.
|
||||
pub fn update_profiles_to_latest_installed(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> Result<Vec<String>, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let registry = crate::downloaded_browsers_registry::DownloadedBrowsersRegistry::instance();
|
||||
let profiles = self
|
||||
.profile_manager
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
let mut all_updated = Vec::new();
|
||||
|
||||
// Group profiles by browser
|
||||
let mut browser_profiles: HashMap<String, Vec<BrowserProfile>> = HashMap::new();
|
||||
for profile in profiles {
|
||||
if profile.is_cross_os() {
|
||||
continue;
|
||||
}
|
||||
browser_profiles
|
||||
.entry(profile.browser.clone())
|
||||
.or_default()
|
||||
.push(profile);
|
||||
}
|
||||
|
||||
for (browser, profiles) in browser_profiles {
|
||||
let installed_versions = registry.get_downloaded_versions(&browser);
|
||||
if installed_versions.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Find the latest installed version that actually exists on disk
|
||||
let latest_installed = installed_versions
|
||||
.iter()
|
||||
.filter(|v| registry.is_browser_downloaded(&browser, v))
|
||||
.max_by(|a, b| self.compare_versions(a, b));
|
||||
|
||||
let latest_version = match latest_installed {
|
||||
Some(v) => v.clone(),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
for profile in profiles {
|
||||
if profile.process_id.is_some() {
|
||||
continue;
|
||||
}
|
||||
|
||||
if !self.is_version_newer(&latest_version, &profile.version) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Only update stable->stable and nightly->nightly
|
||||
let is_profile_nightly =
|
||||
crate::api_client::is_browser_version_nightly(&browser, &profile.version, None);
|
||||
let is_latest_nightly =
|
||||
crate::api_client::is_browser_version_nightly(&browser, &latest_version, None);
|
||||
if is_profile_nightly != is_latest_nightly {
|
||||
continue;
|
||||
}
|
||||
|
||||
match self.profile_manager.update_profile_version(
|
||||
app_handle,
|
||||
&profile.id.to_string(),
|
||||
&latest_version,
|
||||
) {
|
||||
Ok(_) => {
|
||||
log::info!(
|
||||
"Updated profile {} from {} {} to latest installed version {}",
|
||||
profile.name,
|
||||
browser,
|
||||
profile.version,
|
||||
latest_version
|
||||
);
|
||||
all_updated.push(profile.name);
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to update profile {}: {e}", profile.name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(all_updated)
|
||||
}
|
||||
}
|
||||
|
||||
// Tauri commands
|
||||
@@ -511,6 +682,8 @@ mod tests {
|
||||
version: version.to_string(),
|
||||
process_id: None,
|
||||
proxy_id: None,
|
||||
vpn_id: None,
|
||||
launch_hook: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
@@ -518,8 +691,16 @@ mod tests {
|
||||
group_id: None,
|
||||
tags: Vec::new(),
|
||||
note: None,
|
||||
sync_enabled: false,
|
||||
sync_mode: crate::profile::types::SyncMode::Disabled,
|
||||
encryption_salt: None,
|
||||
last_sync: None,
|
||||
host_os: None,
|
||||
ephemeral: false,
|
||||
extension_group_id: None,
|
||||
proxy_bypass_rules: Vec::new(),
|
||||
created_by_id: None,
|
||||
created_by_email: None,
|
||||
dns_blocklist: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -16,12 +16,32 @@ use serde::{Deserialize, Serialize};
|
||||
use tao::event::{Event, StartCause};
|
||||
use tao::event_loop::{ControlFlow, EventLoopBuilder};
|
||||
use tokio::runtime::Runtime;
|
||||
use tray_icon::{MouseButton, TrayIcon, TrayIconEvent};
|
||||
use tray_icon::TrayIcon;
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
use tray_icon::{MouseButton, TrayIconEvent};
|
||||
|
||||
use donutbrowser_lib::daemon::{autostart, services, tray};
|
||||
|
||||
static SHOULD_QUIT: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
#[cfg(windows)]
|
||||
fn win_process_exists(pid: u32) -> bool {
|
||||
const PROCESS_QUERY_LIMITED_INFORMATION: u32 = 0x1000;
|
||||
|
||||
extern "system" {
|
||||
fn OpenProcess(dwDesiredAccess: u32, bInheritHandles: i32, dwProcessId: u32) -> *mut ();
|
||||
fn CloseHandle(hObject: *mut ()) -> i32;
|
||||
}
|
||||
|
||||
let handle = unsafe { OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid) };
|
||||
if handle.is_null() {
|
||||
false
|
||||
} else {
|
||||
unsafe { CloseHandle(handle) };
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
enum ServiceStatus {
|
||||
Ready {
|
||||
api_port: Option<u16>,
|
||||
@@ -173,6 +193,41 @@ fn run_daemon() {
|
||||
// Store tray icon in Option - created after event loop starts
|
||||
let mut tray_icon: Option<TrayIcon> = None;
|
||||
|
||||
// Install signal handlers so SIGTERM/SIGINT trigger graceful shutdown
|
||||
#[cfg(unix)]
|
||||
unsafe {
|
||||
extern "C" fn signal_handler(_sig: libc::c_int) {
|
||||
SHOULD_QUIT.store(true, std::sync::atomic::Ordering::SeqCst);
|
||||
}
|
||||
libc::signal(
|
||||
libc::SIGTERM,
|
||||
signal_handler as *const () as libc::sighandler_t,
|
||||
);
|
||||
libc::signal(
|
||||
libc::SIGINT,
|
||||
signal_handler as *const () as libc::sighandler_t,
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
extern "system" {
|
||||
fn SetConsoleCtrlHandler(
|
||||
handler: Option<unsafe extern "system" fn(u32) -> i32>,
|
||||
add: i32,
|
||||
) -> i32;
|
||||
}
|
||||
|
||||
unsafe extern "system" fn ctrl_handler(_ctrl_type: u32) -> i32 {
|
||||
SHOULD_QUIT.store(true, std::sync::atomic::Ordering::SeqCst);
|
||||
1 // TRUE
|
||||
}
|
||||
|
||||
unsafe {
|
||||
SetConsoleCtrlHandler(Some(ctrl_handler), 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the event loop
|
||||
event_loop.run(move |event, _, control_flow| {
|
||||
// Use WaitUntil to check for menu events periodically while staying low on CPU
|
||||
@@ -222,15 +277,15 @@ fn run_daemon() {
|
||||
|
||||
// Process menu events
|
||||
while let Ok(event) = menu_channel.try_recv() {
|
||||
if event.id == tray_menu.open_item.id() {
|
||||
tray::open_gui();
|
||||
} else if event.id == tray_menu.quit_item.id() {
|
||||
if event.id == tray_menu.quit_item.id() {
|
||||
log::info!("[daemon] Quit requested");
|
||||
SHOULD_QUIT.store(true, Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle tray icon click (left-click opens the app)
|
||||
// On macOS, left-click already shows the menu, so don't also launch the GUI.
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
while let Ok(event) = TrayIconEvent::receiver().try_recv() {
|
||||
if let TrayIconEvent::Click {
|
||||
button: MouseButton::Left,
|
||||
@@ -243,12 +298,37 @@ fn run_daemon() {
|
||||
|
||||
// Use swap to only run cleanup once
|
||||
if SHOULD_QUIT.swap(false, Ordering::SeqCst) {
|
||||
// Cleanup
|
||||
// Remove tray icon from status bar immediately so the UI feels responsive
|
||||
tray_icon = None;
|
||||
|
||||
tray::quit_gui();
|
||||
|
||||
let mut state = read_state();
|
||||
state.daemon_pid = None;
|
||||
let _ = write_state(&state);
|
||||
log::info!("[daemon] Exiting");
|
||||
*control_flow = ControlFlow::Exit;
|
||||
|
||||
// Use process::exit for immediate termination instead of ControlFlow::Exit.
|
||||
// ControlFlow::Exit can delay because tao's macOS event loop defers exit,
|
||||
// and dropping the tokio runtime blocks until all spawned tasks finish.
|
||||
process::exit(0);
|
||||
}
|
||||
}
|
||||
Event::Reopen { .. } => {
|
||||
tray::open_gui();
|
||||
|
||||
// Re-hide daemon from Dock. macOS activates the daemon (making it
|
||||
// visible) when the user clicks the Dock icon, overriding the
|
||||
// Accessory policy set at init.
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
use objc2::MainThreadMarker;
|
||||
use objc2_app_kit::{NSApplication, NSApplicationActivationPolicy};
|
||||
|
||||
if let Some(mtm) = MainThreadMarker::new() {
|
||||
let app = NSApplication::sharedApplication(mtm);
|
||||
app.setActivationPolicy(NSApplicationActivationPolicy::Accessory);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -266,6 +346,32 @@ fn stop_daemon() {
|
||||
let state = read_state();
|
||||
|
||||
if let Some(pid) = state.daemon_pid {
|
||||
// On Windows, taskkill /F kills instantly with no handler, so kill GUI first
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::os::windows::process::CommandExt;
|
||||
use std::process::Command;
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
|
||||
let state_path = get_state_path();
|
||||
if let Ok(content) = fs::read_to_string(&state_path) {
|
||||
if let Ok(val) = serde_json::from_str::<serde_json::Value>(&content) {
|
||||
if let Some(gui_pid) = val.get("gui_pid").and_then(|v| v.as_u64()) {
|
||||
let _ = Command::new("taskkill")
|
||||
.args(["/PID", &gui_pid.to_string(), "/F"])
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.output();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let _ = Command::new("taskkill")
|
||||
.args(["/PID", &pid.to_string(), "/F"])
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.output();
|
||||
eprintln!("Sent stop signal to daemon (PID {})", pid);
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
unsafe {
|
||||
@@ -273,15 +379,6 @@ fn stop_daemon() {
|
||||
}
|
||||
eprintln!("Sent stop signal to daemon (PID {})", pid);
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::process::Command;
|
||||
let _ = Command::new("taskkill")
|
||||
.args(["/PID", &pid.to_string(), "/F"])
|
||||
.output();
|
||||
eprintln!("Sent stop signal to daemon (PID {})", pid);
|
||||
}
|
||||
} else {
|
||||
eprintln!("Daemon is not running");
|
||||
}
|
||||
@@ -295,15 +392,7 @@ fn show_status() {
|
||||
let is_running = unsafe { libc::kill(pid as i32, 0) == 0 };
|
||||
|
||||
#[cfg(windows)]
|
||||
let is_running = {
|
||||
use std::process::Command;
|
||||
let output = Command::new("tasklist")
|
||||
.args(["/FI", &format!("PID eq {}", pid)])
|
||||
.output();
|
||||
output
|
||||
.map(|o| String::from_utf8_lossy(&o.stdout).contains(&pid.to_string()))
|
||||
.unwrap_or(false)
|
||||
};
|
||||
let is_running = win_process_exists(pid);
|
||||
|
||||
#[cfg(not(any(unix, windows)))]
|
||||
let is_running = false;
|
||||
@@ -357,10 +446,6 @@ fn main() {
|
||||
|
||||
match args[1].as_str() {
|
||||
"start" => {
|
||||
// "start" is now an alias for "run"
|
||||
// On macOS, the daemon should be started via launchctl (see daemon_spawn.rs)
|
||||
// This command is kept for backward compatibility
|
||||
eprintln!("Starting daemon...");
|
||||
run_daemon();
|
||||
}
|
||||
"stop" => {
|
||||
|
||||
@@ -147,6 +147,16 @@ async fn main() {
|
||||
Arg::new("profile-id")
|
||||
.long("profile-id")
|
||||
.help("ID of the profile this proxy is associated with"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("bypass-rules")
|
||||
.long("bypass-rules")
|
||||
.help("JSON array of bypass rules (hostnames, IPs, or regex patterns)"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("blocklist-file")
|
||||
.long("blocklist-file")
|
||||
.help("Path to DNS blocklist file (one domain per line)"),
|
||||
),
|
||||
)
|
||||
.subcommand(
|
||||
@@ -172,6 +182,33 @@ async fn main() {
|
||||
)
|
||||
.arg(Arg::new("action").required(true).help("Action (start)")),
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("vpn-worker")
|
||||
.about("Run a VPN worker process (internal use)")
|
||||
.arg(
|
||||
Arg::new("id")
|
||||
.long("id")
|
||||
.required(true)
|
||||
.help("VPN worker configuration ID"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("port")
|
||||
.long("port")
|
||||
.value_parser(clap::value_parser!(u16))
|
||||
.required(true)
|
||||
.help("Local SOCKS5 port"),
|
||||
)
|
||||
.arg(Arg::new("action").required(true).help("Action (start)")),
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("mcp-bridge")
|
||||
.about("Bridge stdio MCP to a local HTTP MCP server")
|
||||
.arg(
|
||||
Arg::new("url")
|
||||
.required(true)
|
||||
.help("HTTP MCP server URL (e.g. http://127.0.0.1:51080/mcp/TOKEN)"),
|
||||
),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
if let Some(proxy_matches) = matches.subcommand_matches("proxy") {
|
||||
@@ -199,8 +236,21 @@ async fn main() {
|
||||
|
||||
let port = start_matches.get_one::<u16>("port").copied();
|
||||
let profile_id = start_matches.get_one::<String>("profile-id").cloned();
|
||||
let bypass_rules: Vec<String> = start_matches
|
||||
.get_one::<String>("bypass-rules")
|
||||
.and_then(|s| serde_json::from_str(s).ok())
|
||||
.unwrap_or_default();
|
||||
let blocklist_file = start_matches.get_one::<String>("blocklist-file").cloned();
|
||||
|
||||
match start_proxy_process_with_profile(upstream_url, port, profile_id).await {
|
||||
match start_proxy_process_with_profile(
|
||||
upstream_url,
|
||||
port,
|
||||
profile_id,
|
||||
bypass_rules,
|
||||
blocklist_file,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(config) => {
|
||||
// Output the configuration as JSON for the Rust side to parse
|
||||
// Use println! here because this needs to go to stdout for parsing
|
||||
@@ -333,6 +383,182 @@ async fn main() {
|
||||
log::error!("Invalid action for proxy-worker. Use 'start'");
|
||||
process::exit(1);
|
||||
}
|
||||
} else if let Some(vpn_matches) = matches.subcommand_matches("vpn-worker") {
|
||||
let id = vpn_matches.get_one::<String>("id").expect("id is required");
|
||||
let action = vpn_matches
|
||||
.get_one::<String>("action")
|
||||
.expect("action is required");
|
||||
let port = *vpn_matches
|
||||
.get_one::<u16>("port")
|
||||
.expect("port is required");
|
||||
|
||||
if action == "start" {
|
||||
set_high_priority();
|
||||
|
||||
log::info!("VPN worker starting, config id: {}", id);
|
||||
log::info!("Process PID: {}", std::process::id());
|
||||
|
||||
// Retry config loading to handle file system race condition
|
||||
let config = {
|
||||
let mut attempts = 0;
|
||||
loop {
|
||||
if let Some(config) = donutbrowser_lib::vpn_worker_storage::get_vpn_worker_config(id) {
|
||||
log::info!(
|
||||
"Found VPN worker config: id={}, vpn_type={}, vpn_id={}",
|
||||
config.id,
|
||||
config.vpn_type,
|
||||
config.vpn_id
|
||||
);
|
||||
break config;
|
||||
}
|
||||
attempts += 1;
|
||||
if attempts >= 10 {
|
||||
log::error!(
|
||||
"VPN worker configuration {} not found after {} attempts",
|
||||
id,
|
||||
attempts
|
||||
);
|
||||
process::exit(1);
|
||||
}
|
||||
log::info!(
|
||||
"VPN worker config {} not found yet, retrying ({}/10)...",
|
||||
id,
|
||||
attempts
|
||||
);
|
||||
std::thread::sleep(std::time::Duration::from_millis(50));
|
||||
}
|
||||
};
|
||||
|
||||
// Read the decrypted VPN config from the temp file
|
||||
let vpn_config_data = match std::fs::read_to_string(&config.config_file_path) {
|
||||
Ok(data) => data,
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"Failed to read VPN config file {}: {}",
|
||||
config.config_file_path,
|
||||
e
|
||||
);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
match config.vpn_type.as_str() {
|
||||
"wireguard" => {
|
||||
let wg_config = match donutbrowser_lib::vpn::parse_wireguard_config(&vpn_config_data) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
log::error!("Failed to parse WireGuard config: {}", e);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let server =
|
||||
donutbrowser_lib::vpn::socks5_server::WireGuardSocks5Server::new(wg_config, port);
|
||||
if let Err(e) = server.run(id.clone()).await {
|
||||
log::error!("VPN worker failed: {}", e);
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
"openvpn" => {
|
||||
let ovpn_config = match donutbrowser_lib::vpn::parse_openvpn_config(&vpn_config_data) {
|
||||
Ok(c) => c,
|
||||
Err(e) => {
|
||||
log::error!("Failed to parse OpenVPN config: {}", e);
|
||||
process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let server =
|
||||
donutbrowser_lib::vpn::openvpn_socks5::OpenVpnSocks5Server::new(ovpn_config, port);
|
||||
if let Err(e) = server.run(id.clone()).await {
|
||||
log::error!("VPN worker failed: {}", e);
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
other => {
|
||||
log::error!("Unknown VPN type: {}", other);
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::error!("Invalid action for vpn-worker. Use 'start'");
|
||||
process::exit(1);
|
||||
}
|
||||
} else if let Some(bridge_matches) = matches.subcommand_matches("mcp-bridge") {
|
||||
let url = bridge_matches
|
||||
.get_one::<String>("url")
|
||||
.expect("url is required")
|
||||
.clone();
|
||||
|
||||
// Suppress debug logging for bridge mode — stderr noise confuses MCP clients
|
||||
log::set_max_level(log::LevelFilter::Warn);
|
||||
|
||||
// stdio↔HTTP MCP bridge: translates stdio JSON-RPC to Streamable HTTP transport
|
||||
let client = reqwest::Client::new();
|
||||
let stdin = tokio::io::stdin();
|
||||
let reader = tokio::io::BufReader::new(stdin);
|
||||
let mut session_id: Option<String> = None;
|
||||
|
||||
use tokio::io::{AsyncBufReadExt, AsyncWriteExt};
|
||||
let mut lines = reader.lines();
|
||||
let mut stdout = tokio::io::stdout();
|
||||
|
||||
while let Ok(Some(line)) = lines.next_line().await {
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this is a notification (no "id" field) to handle 202 responses
|
||||
let is_notification = serde_json::from_str::<serde_json::Value>(&line)
|
||||
.ok()
|
||||
.map(|v| v.get("id").is_none() || v["id"].is_null())
|
||||
.unwrap_or(false);
|
||||
|
||||
let mut req = client
|
||||
.post(&url)
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Accept", "application/json");
|
||||
|
||||
if let Some(sid) = &session_id {
|
||||
req = req.header("mcp-session-id", sid);
|
||||
}
|
||||
|
||||
match req.body(line).send().await {
|
||||
Ok(resp) => {
|
||||
// Capture session ID from initialize response
|
||||
if let Some(sid) = resp.headers().get("mcp-session-id") {
|
||||
if let Ok(s) = sid.to_str() {
|
||||
session_id = Some(s.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Notifications return 202 with no body — don't write anything
|
||||
if is_notification {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Ok(body) = resp.text().await {
|
||||
if !body.is_empty() {
|
||||
let _ = stdout.write_all(body.as_bytes()).await;
|
||||
let _ = stdout.write_all(b"\n").await;
|
||||
let _ = stdout.flush().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
if !is_notification {
|
||||
let err = serde_json::json!({
|
||||
"jsonrpc": "2.0",
|
||||
"id": null,
|
||||
"error": {"code": -32000, "message": format!("HTTP error: {e}")},
|
||||
});
|
||||
let _ = stdout.write_all(err.to_string().as_bytes()).await;
|
||||
let _ = stdout.write_all(b"\n").await;
|
||||
let _ = stdout.flush().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
log::error!("No command specified");
|
||||
process::exit(1);
|
||||
|
||||
@@ -685,7 +685,7 @@ impl BrowserVersionManager {
|
||||
"macos-arm64" | "macos-x64" => (format!("wayfern-{version}-{platform_key}.dmg"), true),
|
||||
"linux-x64" | "linux-arm64" => (format!("wayfern-{version}-{platform_key}.tar.xz"), true),
|
||||
"windows-x64" | "windows-arm64" => {
|
||||
(format!("wayfern-{version}-{platform_key}.exe"), false)
|
||||
(format!("wayfern-{version}-{platform_key}.zip"), true)
|
||||
}
|
||||
_ => {
|
||||
return Err(
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
//!
|
||||
//! Converts fingerprints to Camoufox configuration format and builds launch options.
|
||||
|
||||
use rand::Rng;
|
||||
use rand::RngExt;
|
||||
use serde_yaml;
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
@@ -425,8 +425,28 @@ impl CamoufoxConfigBuilder {
|
||||
/// Build the complete Camoufox launch configuration with async geolocation support.
|
||||
/// This method should be used when geoip option is set to Auto.
|
||||
pub async fn build_async(self) -> Result<CamoufoxLaunchConfig, ConfigError> {
|
||||
// Get proxy URL for IP detection if set
|
||||
let proxy_url = self.proxy.as_ref().map(|p| p.server.clone());
|
||||
// Get full proxy URL (with credentials) for IP detection
|
||||
let proxy_url = self.proxy.as_ref().map(|p| {
|
||||
if let (Some(user), Some(pass)) = (&p.username, &p.password) {
|
||||
// Reconstruct URL with credentials: scheme://user:pass@host:port
|
||||
if let Ok(mut parsed) = url::Url::parse(&p.server) {
|
||||
let _ = parsed.set_username(user);
|
||||
let _ = parsed.set_password(Some(pass));
|
||||
parsed.to_string()
|
||||
} else {
|
||||
p.server.clone()
|
||||
}
|
||||
} else if let Some(user) = &p.username {
|
||||
if let Ok(mut parsed) = url::Url::parse(&p.server) {
|
||||
let _ = parsed.set_username(user);
|
||||
parsed.to_string()
|
||||
} else {
|
||||
p.server.clone()
|
||||
}
|
||||
} else {
|
||||
p.server.clone()
|
||||
}
|
||||
});
|
||||
let geoip_option = self.geoip.clone();
|
||||
let block_webrtc = self.block_webrtc;
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
//!
|
||||
//! Implements weighted random sampling from conditional probability distributions.
|
||||
|
||||
use rand::Rng;
|
||||
use rand::RngExt;
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ use directories::BaseDirs;
|
||||
use maxminddb::{geoip2, Reader};
|
||||
use quick_xml::events::Event;
|
||||
use quick_xml::Reader as XmlReader;
|
||||
use rand::Rng;
|
||||
use rand::RngExt;
|
||||
use std::collections::HashMap;
|
||||
use std::net::IpAddr;
|
||||
use std::path::PathBuf;
|
||||
@@ -267,6 +267,7 @@ impl Default for LocaleSelector {
|
||||
}
|
||||
|
||||
/// Normalize a locale string to standard format.
|
||||
/// Handles formats like "en-US", "zh-Hant-US", "zh-Hans-CN".
|
||||
fn normalize_locale(locale: &str) -> Locale {
|
||||
let parts: Vec<&str> = locale.split('-').collect();
|
||||
|
||||
@@ -275,23 +276,33 @@ fn normalize_locale(locale: &str) -> Locale {
|
||||
.map(|s| s.to_lowercase())
|
||||
.unwrap_or_else(|| "en".to_string());
|
||||
|
||||
let region = parts.get(1).map(|s| s.to_uppercase());
|
||||
// A 4-letter part is a script subtag (e.g. "Hant", "Hans", "Cyrl").
|
||||
// A 2-letter or 3-digit part is a region subtag (e.g. "US", "CN").
|
||||
let mut explicit_script: Option<String> = None;
|
||||
let mut region: Option<String> = None;
|
||||
|
||||
// Determine script based on language if needed
|
||||
let script = match language.as_str() {
|
||||
"zh" => {
|
||||
// Chinese - Traditional for TW/HK, Simplified otherwise
|
||||
if region.as_deref() == Some("TW") || region.as_deref() == Some("HK") {
|
||||
Some("Hant".to_string())
|
||||
} else {
|
||||
Some("Hans".to_string())
|
||||
for part in parts.iter().skip(1) {
|
||||
if part.len() == 4 && part.chars().all(|c| c.is_ascii_alphabetic()) {
|
||||
explicit_script = Some(part[..1].to_uppercase() + &part[1..].to_lowercase());
|
||||
} else {
|
||||
region = Some(part.to_uppercase());
|
||||
}
|
||||
}
|
||||
|
||||
let script = if explicit_script.is_some() {
|
||||
explicit_script
|
||||
} else {
|
||||
match language.as_str() {
|
||||
"zh" => {
|
||||
if region.as_deref() == Some("TW") || region.as_deref() == Some("HK") {
|
||||
Some("Hant".to_string())
|
||||
} else {
|
||||
Some("Hans".to_string())
|
||||
}
|
||||
}
|
||||
"sr" => Some("Cyrl".to_string()),
|
||||
_ => None,
|
||||
}
|
||||
"sr" => {
|
||||
// Serbian - can be Cyrillic or Latin
|
||||
Some("Cyrl".to_string())
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
Locale {
|
||||
@@ -442,5 +453,16 @@ mod tests {
|
||||
|
||||
let zh_cn = normalize_locale("zh-CN");
|
||||
assert_eq!(zh_cn.script, Some("Hans".to_string()));
|
||||
|
||||
// 3-part locale: language-script-region
|
||||
let zh_hant_us = normalize_locale("zh-Hant-US");
|
||||
assert_eq!(zh_hant_us.language, "zh");
|
||||
assert_eq!(zh_hant_us.region, Some("US".to_string()));
|
||||
assert_eq!(zh_hant_us.script, Some("Hant".to_string()));
|
||||
|
||||
let zh_hans_us = normalize_locale("zh-Hans-US");
|
||||
assert_eq!(zh_hans_us.language, "zh");
|
||||
assert_eq!(zh_hans_us.region, Some("US".to_string()));
|
||||
assert_eq!(zh_hans_us.script, Some("Hans".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
//!
|
||||
//! Samples realistic WebGL configurations based on OS-specific probability distributions.
|
||||
|
||||
use rand::Rng;
|
||||
use rand::RngExt;
|
||||
use rusqlite::{Connection, Result as SqliteResult};
|
||||
use std::collections::HashMap;
|
||||
use std::io::Write;
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use crate::browser_runner::BrowserRunner;
|
||||
use crate::camoufox::{CamoufoxConfigBuilder, GeoIPOption, ScreenConstraints};
|
||||
use crate::profile::BrowserProfile;
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
@@ -22,7 +21,6 @@ pub struct CamoufoxConfig {
|
||||
pub block_images: Option<bool>,
|
||||
pub block_webrtc: Option<bool>,
|
||||
pub block_webgl: Option<bool>,
|
||||
pub executable_path: Option<String>,
|
||||
pub fingerprint: Option<String>, // JSON string of the complete fingerprint config
|
||||
pub randomize_fingerprint_on_launch: Option<bool>, // Generate new fingerprint on every launch
|
||||
pub os: Option<String>, // Operating system for fingerprint generation: "windows", "macos", or "linux"
|
||||
@@ -40,7 +38,6 @@ impl Default for CamoufoxConfig {
|
||||
block_images: None,
|
||||
block_webrtc: None,
|
||||
block_webgl: None,
|
||||
executable_path: None,
|
||||
fingerprint: None,
|
||||
randomize_fingerprint_on_launch: None,
|
||||
os: None,
|
||||
@@ -57,6 +54,7 @@ pub struct CamoufoxLaunchResult {
|
||||
#[serde(alias = "profile_path")]
|
||||
pub profilePath: Option<String>,
|
||||
pub url: Option<String>,
|
||||
pub cdp_port: Option<u16>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -66,6 +64,7 @@ struct CamoufoxInstance {
|
||||
process_id: Option<u32>,
|
||||
profile_path: Option<String>,
|
||||
url: Option<String>,
|
||||
cdp_port: Option<u16>,
|
||||
}
|
||||
|
||||
struct CamoufoxManagerInner {
|
||||
@@ -74,7 +73,6 @@ struct CamoufoxManagerInner {
|
||||
|
||||
pub struct CamoufoxManager {
|
||||
inner: Arc<AsyncMutex<CamoufoxManagerInner>>,
|
||||
base_dirs: BaseDirs,
|
||||
}
|
||||
|
||||
impl CamoufoxManager {
|
||||
@@ -83,7 +81,6 @@ impl CamoufoxManager {
|
||||
inner: Arc::new(AsyncMutex::new(CamoufoxManagerInner {
|
||||
instances: HashMap::new(),
|
||||
})),
|
||||
base_dirs: BaseDirs::new().expect("Failed to get base directories"),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -91,15 +88,35 @@ impl CamoufoxManager {
|
||||
&CAMOUFOX_LAUNCHER
|
||||
}
|
||||
|
||||
async fn find_free_port() -> Result<u16, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await?;
|
||||
let port = listener.local_addr()?.port();
|
||||
drop(listener);
|
||||
Ok(port)
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn get_cdp_port(&self, profile_path: &str) -> Option<u16> {
|
||||
let inner = self.inner.lock().await;
|
||||
let target_path = std::path::Path::new(profile_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(profile_path).to_path_buf());
|
||||
|
||||
for instance in inner.instances.values() {
|
||||
if let Some(path) = &instance.profile_path {
|
||||
let instance_path = std::path::Path::new(path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| std::path::Path::new(path).to_path_buf());
|
||||
if instance_path == target_path {
|
||||
return instance.cdp_port;
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
pub fn get_profiles_dir(&self) -> PathBuf {
|
||||
let mut path = self.base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("profiles");
|
||||
path
|
||||
crate::app_dirs::profiles_dir()
|
||||
}
|
||||
|
||||
/// Generate Camoufox fingerprint configuration during profile creation
|
||||
@@ -110,13 +127,9 @@ impl CamoufoxManager {
|
||||
config: &CamoufoxConfig,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Get executable path
|
||||
let executable_path = if let Some(path) = &config.executable_path {
|
||||
PathBuf::from(path)
|
||||
} else {
|
||||
BrowserRunner::instance()
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?
|
||||
};
|
||||
let executable_path = BrowserRunner::instance()
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?;
|
||||
|
||||
// Build the config using CamoufoxConfigBuilder
|
||||
let mut builder = CamoufoxConfigBuilder::new()
|
||||
@@ -203,13 +216,9 @@ impl CamoufoxManager {
|
||||
};
|
||||
|
||||
// Get executable path
|
||||
let executable_path = if let Some(path) = &config.executable_path {
|
||||
PathBuf::from(path)
|
||||
} else {
|
||||
BrowserRunner::instance()
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?
|
||||
};
|
||||
let executable_path = BrowserRunner::instance()
|
||||
.get_browser_executable_path(profile)
|
||||
.map_err(|e| format!("Failed to get Camoufox executable path: {e}"))?;
|
||||
|
||||
// Parse the fingerprint config JSON
|
||||
let fingerprint_config: HashMap<String, serde_json::Value> =
|
||||
@@ -233,6 +242,9 @@ impl CamoufoxManager {
|
||||
.to_string(),
|
||||
];
|
||||
|
||||
let cdp_port = Self::find_free_port().await?;
|
||||
args.push(format!("--remote-debugging-port={cdp_port}"));
|
||||
|
||||
// Add URL if provided
|
||||
if let Some(url) = url {
|
||||
args.push("-new-tab".to_string());
|
||||
@@ -288,6 +300,7 @@ impl CamoufoxManager {
|
||||
process_id,
|
||||
profile_path: Some(profile_path.to_string()),
|
||||
url: url.map(String::from),
|
||||
cdp_port: Some(cdp_port),
|
||||
};
|
||||
|
||||
let launch_result = CamoufoxLaunchResult {
|
||||
@@ -295,6 +308,7 @@ impl CamoufoxManager {
|
||||
processId: process_id,
|
||||
profilePath: Some(profile_path.to_string()),
|
||||
url: url.map(String::from),
|
||||
cdp_port: Some(cdp_port),
|
||||
};
|
||||
|
||||
{
|
||||
@@ -360,8 +374,11 @@ impl CamoufoxManager {
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::os::windows::process::CommandExt;
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
let result = std::process::Command::new("taskkill")
|
||||
.args(["/PID", &pid.to_string(), "/T"])
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.status();
|
||||
|
||||
match result {
|
||||
@@ -409,6 +426,7 @@ impl CamoufoxManager {
|
||||
processId: instance.process_id,
|
||||
profilePath: instance.profile_path.clone(),
|
||||
url: instance.url.clone(),
|
||||
cdp_port: instance.cdp_port,
|
||||
}));
|
||||
}
|
||||
}
|
||||
@@ -419,7 +437,9 @@ impl CamoufoxManager {
|
||||
|
||||
// If not found in in-memory instances, scan system processes
|
||||
// This handles the case where the app was restarted but Camoufox is still running
|
||||
if let Some((pid, found_profile_path)) = self.find_camoufox_process_by_profile(&target_path) {
|
||||
if let Some((pid, found_profile_path, cdp_port)) =
|
||||
self.find_camoufox_process_by_profile(&target_path)
|
||||
{
|
||||
log::info!(
|
||||
"Found running Camoufox process (PID: {}) for profile path via system scan",
|
||||
pid
|
||||
@@ -435,6 +455,7 @@ impl CamoufoxManager {
|
||||
process_id: Some(pid),
|
||||
profile_path: Some(found_profile_path.clone()),
|
||||
url: None,
|
||||
cdp_port,
|
||||
},
|
||||
);
|
||||
|
||||
@@ -443,6 +464,7 @@ impl CamoufoxManager {
|
||||
processId: Some(pid),
|
||||
profilePath: Some(found_profile_path),
|
||||
url: None,
|
||||
cdp_port,
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -453,7 +475,7 @@ impl CamoufoxManager {
|
||||
fn find_camoufox_process_by_profile(
|
||||
&self,
|
||||
target_path: &std::path::Path,
|
||||
) -> Option<(u32, String)> {
|
||||
) -> Option<(u32, String, Option<u16>)> {
|
||||
use sysinfo::{ProcessRefreshKind, RefreshKind, System};
|
||||
|
||||
let system = System::new_with_specifics(
|
||||
@@ -478,6 +500,10 @@ impl CamoufoxManager {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut matched = false;
|
||||
let mut found_profile_path = None;
|
||||
let mut cdp_port: Option<u16> = None;
|
||||
|
||||
// Check if the command line contains our profile path
|
||||
for (i, arg) in cmd.iter().enumerate() {
|
||||
if let Some(arg_str) = arg.to_str() {
|
||||
@@ -489,15 +515,27 @@ impl CamoufoxManager {
|
||||
.unwrap_or_else(|_| std::path::Path::new(next_arg).to_path_buf());
|
||||
|
||||
if cmd_path == target_path {
|
||||
return Some((pid.as_u32(), next_arg.to_string()));
|
||||
matched = true;
|
||||
found_profile_path = Some(next_arg.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Also check if the argument contains the profile path directly
|
||||
if arg_str.contains(&*target_path_str) {
|
||||
return Some((pid.as_u32(), target_path_str.to_string()));
|
||||
if !matched && arg_str.contains(&*target_path_str) {
|
||||
matched = true;
|
||||
found_profile_path = Some(target_path_str.to_string());
|
||||
}
|
||||
|
||||
if let Some(port_val) = arg_str.strip_prefix("--remote-debugging-port=") {
|
||||
cdp_port = port_val.parse().ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matched {
|
||||
if let Some(profile_path) = found_profile_path {
|
||||
return Some((pid.as_u32(), profile_path, cdp_port));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -548,9 +586,11 @@ impl CamoufoxManager {
|
||||
/// Check if a Camoufox server is running with the given process ID
|
||||
async fn is_server_running(&self, process_id: u32) -> bool {
|
||||
// Check if the process is still running
|
||||
use sysinfo::{Pid, System};
|
||||
use sysinfo::{Pid, ProcessRefreshKind, RefreshKind, System};
|
||||
|
||||
let system = System::new_all();
|
||||
let system = System::new_with_specifics(
|
||||
RefreshKind::nothing().with_processes(ProcessRefreshKind::everything()),
|
||||
);
|
||||
if let Some(process) = system.process(Pid::from(process_id as usize)) {
|
||||
// Check if this is actually a Camoufox process by looking at the command line
|
||||
let cmd = process.cmd();
|
||||
@@ -576,10 +616,15 @@ impl CamoufoxManager {
|
||||
profile: BrowserProfile,
|
||||
config: CamoufoxConfig,
|
||||
url: Option<String>,
|
||||
override_profile_path: Option<std::path::PathBuf>,
|
||||
) -> Result<CamoufoxLaunchResult, String> {
|
||||
// Get profile path
|
||||
let profiles_dir = self.get_profiles_dir();
|
||||
let profile_path = profile.get_profile_data_path(&profiles_dir);
|
||||
let profile_path = if let Some(ref override_path) = override_profile_path {
|
||||
override_path.clone()
|
||||
} else {
|
||||
let profiles_dir = self.get_profiles_dir();
|
||||
profile.get_profile_data_path(&profiles_dir)
|
||||
};
|
||||
let profile_path_str = profile_path.to_string_lossy();
|
||||
|
||||
// Check if there's already a running instance for this profile
|
||||
@@ -591,6 +636,29 @@ impl CamoufoxManager {
|
||||
// Clean up any dead instances before launching
|
||||
let _ = self.cleanup_dead_instances().await;
|
||||
|
||||
// For ephemeral profiles, write Firefox prefs to minimize disk writes
|
||||
if override_profile_path.is_some() {
|
||||
let user_js_path = profile_path.join("user.js");
|
||||
let prefs = concat!(
|
||||
"user_pref(\"browser.cache.disk.enable\", false);\n",
|
||||
"user_pref(\"browser.cache.memory.enable\", true);\n",
|
||||
"user_pref(\"browser.sessionstore.resume_from_crash\", false);\n",
|
||||
"user_pref(\"browser.sessionstore.max_tabs_undo\", 0);\n",
|
||||
"user_pref(\"browser.sessionstore.max_windows_undo\", 0);\n",
|
||||
"user_pref(\"places.history.enabled\", false);\n",
|
||||
"user_pref(\"browser.formfill.enable\", false);\n",
|
||||
"user_pref(\"signon.rememberSignons\", false);\n",
|
||||
"user_pref(\"browser.bookmarks.max_backups\", 0);\n",
|
||||
"user_pref(\"browser.shell.checkDefaultBrowser\", false);\n",
|
||||
"user_pref(\"toolkit.crashreporter.enabled\", false);\n",
|
||||
"user_pref(\"browser.pagethumbnails.capturing_disabled\", true);\n",
|
||||
"user_pref(\"browser.download.manager.addToRecentDocs\", false);\n",
|
||||
);
|
||||
if let Err(e) = std::fs::write(&user_js_path, prefs) {
|
||||
log::warn!("Failed to write ephemeral user.js: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
self
|
||||
.launch_camoufox(
|
||||
&app_handle,
|
||||
|
||||
@@ -37,6 +37,14 @@ pub struct CloudUser {
|
||||
pub proxy_bandwidth_limit_mb: i64,
|
||||
#[serde(rename = "proxyBandwidthUsedMb")]
|
||||
pub proxy_bandwidth_used_mb: i64,
|
||||
#[serde(rename = "proxyBandwidthExtraMb", default)]
|
||||
pub proxy_bandwidth_extra_mb: i64,
|
||||
#[serde(rename = "teamId", default)]
|
||||
pub team_id: Option<String>,
|
||||
#[serde(rename = "teamName", default)]
|
||||
pub team_name: Option<String>,
|
||||
#[serde(rename = "teamRole", default)]
|
||||
pub team_role: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -73,6 +81,14 @@ struct SyncTokenResponse {
|
||||
sync_token: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct WayfernTokenResponse {
|
||||
token: String,
|
||||
#[serde(rename = "expiresIn")]
|
||||
#[allow(dead_code)]
|
||||
expires_in: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct LocationItem {
|
||||
pub code: String,
|
||||
@@ -97,6 +113,7 @@ pub struct CloudAuthManager {
|
||||
client: Client,
|
||||
state: Mutex<Option<CloudAuthState>>,
|
||||
refresh_lock: tokio::sync::Mutex<()>,
|
||||
wayfern_token: Mutex<Option<String>>,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
@@ -110,6 +127,7 @@ impl CloudAuthManager {
|
||||
client: Client::new(),
|
||||
state: Mutex::new(state),
|
||||
refresh_lock: tokio::sync::Mutex::new(()),
|
||||
wayfern_token: Mutex::new(None),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -247,7 +265,7 @@ impl CloudAuthManager {
|
||||
Self::encrypt_and_store(&path, b"DBCAT", token)
|
||||
}
|
||||
|
||||
fn load_access_token() -> Result<Option<String>, String> {
|
||||
pub(crate) fn load_access_token() -> Result<Option<String>, String> {
|
||||
let path = Self::get_settings_dir().join("cloud_access_token.dat");
|
||||
Self::decrypt_from_file(&path, b"DBCAT")
|
||||
}
|
||||
@@ -344,12 +362,12 @@ impl CloudAuthManager {
|
||||
|
||||
// --- API methods ---
|
||||
|
||||
pub async fn request_otp(&self, email: &str) -> Result<String, String> {
|
||||
pub async fn request_otp(&self, email: &str, captcha_token: &str) -> Result<String, String> {
|
||||
let url = format!("{CLOUD_API_URL}/api/auth/otp/request");
|
||||
let response = self
|
||||
.client
|
||||
.post(&url)
|
||||
.json(&serde_json::json!({ "email": email }))
|
||||
.json(&serde_json::json!({ "email": email, "captchaToken": captcha_token }))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to request OTP: {e}"))?;
|
||||
@@ -570,6 +588,12 @@ impl CloudAuthManager {
|
||||
}
|
||||
|
||||
pub async fn logout(&self) -> Result<(), String> {
|
||||
// Clear wayfern token
|
||||
self.clear_wayfern_token().await;
|
||||
|
||||
// Disconnect profile lock manager
|
||||
crate::team_lock::PROFILE_LOCK.disconnect().await;
|
||||
|
||||
// Try to call the logout API (best-effort)
|
||||
if let Ok(Some(access_token)) = Self::load_access_token() {
|
||||
let refresh_token = Self::load_refresh_token().ok().flatten();
|
||||
@@ -602,11 +626,46 @@ impl CloudAuthManager {
|
||||
pub async fn has_active_paid_subscription(&self) -> bool {
|
||||
let state = self.state.lock().await;
|
||||
match &*state {
|
||||
Some(auth) => auth.user.plan != "free" && auth.user.subscription_status == "active",
|
||||
Some(auth) => {
|
||||
auth.user.plan != "free"
|
||||
&& (auth.user.subscription_status == "active"
|
||||
|| auth.user.plan_period.as_deref() == Some("lifetime"))
|
||||
}
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Non-async version that uses try_lock, defaults to false if lock can't be acquired.
|
||||
pub fn has_active_paid_subscription_sync(&self) -> bool {
|
||||
match self.state.try_lock() {
|
||||
Ok(state) => match &*state {
|
||||
Some(auth) => {
|
||||
auth.user.plan != "free"
|
||||
&& (auth.user.subscription_status == "active"
|
||||
|| auth.user.plan_period.as_deref() == Some("lifetime"))
|
||||
}
|
||||
None => false,
|
||||
},
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn is_fingerprint_os_allowed(&self, fingerprint_os: Option<&str>) -> bool {
|
||||
let host_os = crate::profile::types::get_host_os();
|
||||
match fingerprint_os {
|
||||
None => true,
|
||||
Some(os) if os == host_os => true,
|
||||
Some(_) => self.has_active_paid_subscription().await,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn is_on_team_plan(&self) -> bool {
|
||||
if let Some(state) = self.get_user().await {
|
||||
return state.user.team_id.is_some();
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub async fn get_user(&self) -> Option<CloudAuthState> {
|
||||
let state = self.state.lock().await;
|
||||
state.clone()
|
||||
@@ -620,7 +679,7 @@ impl CloudAuthManager {
|
||||
|
||||
/// API call with 401 retry: if first attempt gets 401, refresh access token and retry once.
|
||||
/// Uses refresh_lock to prevent concurrent token rotations from racing.
|
||||
async fn api_call_with_retry<F, Fut, T>(&self, make_request: F) -> Result<T, String>
|
||||
pub async fn api_call_with_retry<F, Fut, T>(&self, make_request: F) -> Result<T, String>
|
||||
where
|
||||
F: Fn(String) -> Fut + Send,
|
||||
Fut: std::future::Future<Output = Result<T, String>> + Send,
|
||||
@@ -651,11 +710,12 @@ impl CloudAuthManager {
|
||||
|
||||
/// Fetch proxy configuration from the cloud backend
|
||||
async fn fetch_proxy_config(&self) -> Result<Option<CloudProxyConfigResponse>, String> {
|
||||
// Check cached user state for proxy bandwidth
|
||||
// Check cached user state for proxy bandwidth (subscription or extra)
|
||||
{
|
||||
let state = self.state.lock().await;
|
||||
match &*state {
|
||||
Some(auth) if auth.user.proxy_bandwidth_limit_mb > 0 => {}
|
||||
Some(auth)
|
||||
if auth.user.proxy_bandwidth_limit_mb > 0 || auth.user.proxy_bandwidth_extra_mb > 0 => {}
|
||||
_ => return Ok(None),
|
||||
}
|
||||
}
|
||||
@@ -794,13 +854,13 @@ impl CloudAuthManager {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Fetch state list for a country from the cloud backend
|
||||
pub async fn fetch_states(&self, country: &str) -> Result<Vec<LocationItem>, String> {
|
||||
/// Fetch region list for a country from the cloud backend
|
||||
pub async fn fetch_regions(&self, country: &str) -> Result<Vec<LocationItem>, String> {
|
||||
let country = country.to_string();
|
||||
self
|
||||
.api_call_with_retry(move |access_token| {
|
||||
let url = format!(
|
||||
"{CLOUD_API_URL}/api/proxy/locations/states?country={}",
|
||||
"{CLOUD_API_URL}/api/proxy/locations/regions?country={}",
|
||||
country
|
||||
);
|
||||
let client = reqwest::Client::new();
|
||||
@@ -810,37 +870,40 @@ impl CloudAuthManager {
|
||||
.header("Authorization", format!("Bearer {access_token}"))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to fetch states: {e}"))?;
|
||||
.map_err(|e| format!("Failed to fetch regions: {e}"))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
return Err(format!("States fetch failed ({status}): {body}"));
|
||||
return Err(format!("Regions fetch failed ({status}): {body}"));
|
||||
}
|
||||
|
||||
response
|
||||
.json::<Vec<LocationItem>>()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse states: {e}"))
|
||||
.map_err(|e| format!("Failed to parse regions: {e}"))
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Fetch city list for a country+state from the cloud backend
|
||||
/// Fetch city list for a country, optionally filtered by region
|
||||
pub async fn fetch_cities(
|
||||
&self,
|
||||
country: &str,
|
||||
state: &str,
|
||||
region: Option<&str>,
|
||||
) -> Result<Vec<LocationItem>, String> {
|
||||
let country = country.to_string();
|
||||
let state = state.to_string();
|
||||
let region = region.map(|s| s.to_string());
|
||||
self
|
||||
.api_call_with_retry(move |access_token| {
|
||||
let url = format!(
|
||||
"{CLOUD_API_URL}/api/proxy/locations/cities?country={}&state={}",
|
||||
country, state
|
||||
let mut url = format!(
|
||||
"{CLOUD_API_URL}/api/proxy/locations/cities?country={}",
|
||||
country
|
||||
);
|
||||
if let Some(ref r) = region {
|
||||
url.push_str(&format!("®ion={}", r));
|
||||
}
|
||||
let client = reqwest::Client::new();
|
||||
async move {
|
||||
let response = client
|
||||
@@ -865,8 +928,108 @@ impl CloudAuthManager {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Fetch ISP list for a country, optionally filtered by region and city
|
||||
pub async fn fetch_isps(
|
||||
&self,
|
||||
country: &str,
|
||||
region: Option<&str>,
|
||||
city: Option<&str>,
|
||||
) -> Result<Vec<LocationItem>, String> {
|
||||
let country = country.to_string();
|
||||
let region = region.map(|s| s.to_string());
|
||||
let city = city.map(|s| s.to_string());
|
||||
self
|
||||
.api_call_with_retry(move |access_token| {
|
||||
let mut url = format!(
|
||||
"{CLOUD_API_URL}/api/proxy/locations/isps?country={}",
|
||||
country
|
||||
);
|
||||
if let Some(ref r) = region {
|
||||
url.push_str(&format!("®ion={}", r));
|
||||
}
|
||||
if let Some(ref c) = city {
|
||||
url.push_str(&format!("&city={}", c));
|
||||
}
|
||||
let client = reqwest::Client::new();
|
||||
async move {
|
||||
let response = client
|
||||
.get(&url)
|
||||
.header("Authorization", format!("Bearer {access_token}"))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to fetch ISPs: {e}"))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
return Err(format!("ISPs fetch failed ({status}): {body}"));
|
||||
}
|
||||
|
||||
response
|
||||
.json::<Vec<LocationItem>>()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse ISPs: {e}"))
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Request a wayfern token from the cloud API. Only succeeds for paid users.
|
||||
pub async fn request_wayfern_token(&self) -> Result<(), String> {
|
||||
if !self.has_active_paid_subscription().await {
|
||||
self.clear_wayfern_token().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let token = self
|
||||
.api_call_with_retry(|access_token| {
|
||||
let url = format!("{CLOUD_API_URL}/api/auth/wayfern-start");
|
||||
let client = reqwest::Client::new();
|
||||
async move {
|
||||
let response = client
|
||||
.post(&url)
|
||||
.header("Authorization", format!("Bearer {access_token}"))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to request wayfern token: {e}"))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let body = response.text().await.unwrap_or_default();
|
||||
return Err(format!("Wayfern token request failed ({status}): {body}"));
|
||||
}
|
||||
|
||||
let result: WayfernTokenResponse = response
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse wayfern token response: {e}"))?;
|
||||
|
||||
Ok(result.token)
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
let mut wt = self.wayfern_token.lock().await;
|
||||
*wt = Some(token);
|
||||
log::info!("Wayfern token acquired");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the current wayfern token, if any.
|
||||
pub async fn get_wayfern_token(&self) -> Option<String> {
|
||||
let wt = self.wayfern_token.lock().await;
|
||||
wt.clone()
|
||||
}
|
||||
|
||||
/// Clear the cached wayfern token.
|
||||
pub async fn clear_wayfern_token(&self) {
|
||||
let mut wt = self.wayfern_token.lock().await;
|
||||
*wt = None;
|
||||
}
|
||||
|
||||
/// Background loop that refreshes the sync token periodically
|
||||
pub async fn start_sync_token_refresh_loop(app_handle: tauri::AppHandle) {
|
||||
let mut wayfern_refresh_counter: u32 = 0;
|
||||
loop {
|
||||
tokio::time::sleep(std::time::Duration::from_secs(600)).await; // 10 minutes
|
||||
|
||||
@@ -874,6 +1037,8 @@ impl CloudAuthManager {
|
||||
continue;
|
||||
}
|
||||
|
||||
wayfern_refresh_counter += 1;
|
||||
|
||||
// Proactively refresh the access token if it's expired or expiring soon.
|
||||
// This runs first so subsequent API calls use a fresh token.
|
||||
if let Ok(Some(token)) = Self::load_access_token() {
|
||||
@@ -905,9 +1070,28 @@ impl CloudAuthManager {
|
||||
log::debug!("Failed to refresh cloud profile: {e}");
|
||||
}
|
||||
|
||||
// Reconnect profile lock manager if needed
|
||||
if let Some(auth_state) = CLOUD_AUTH.get_user().await {
|
||||
if auth_state.user.plan != "free" && !crate::team_lock::PROFILE_LOCK.is_connected().await {
|
||||
crate::team_lock::PROFILE_LOCK.connect().await;
|
||||
}
|
||||
}
|
||||
|
||||
// Sync cloud proxy credentials
|
||||
CLOUD_AUTH.sync_cloud_proxy().await;
|
||||
|
||||
// Refresh wayfern token every 10 hours (60 iterations of 10-minute loop)
|
||||
if wayfern_refresh_counter >= 60 {
|
||||
wayfern_refresh_counter = 0;
|
||||
if CLOUD_AUTH.has_active_paid_subscription().await {
|
||||
if let Err(e) = CLOUD_AUTH.request_wayfern_token().await {
|
||||
log::warn!("Failed to refresh wayfern token: {e}");
|
||||
}
|
||||
} else {
|
||||
CLOUD_AUTH.clear_wayfern_token().await;
|
||||
}
|
||||
}
|
||||
|
||||
let _ = &app_handle; // keep app_handle alive
|
||||
}
|
||||
}
|
||||
@@ -916,8 +1100,8 @@ impl CloudAuthManager {
|
||||
// --- Tauri commands ---
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cloud_request_otp(email: String) -> Result<String, String> {
|
||||
CLOUD_AUTH.request_otp(&email).await
|
||||
pub async fn cloud_request_otp(email: String, captcha_token: String) -> Result<String, String> {
|
||||
CLOUD_AUTH.request_otp(&email, &captcha_token).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@@ -943,11 +1127,21 @@ pub async fn cloud_verify_otp(
|
||||
Ok(None) => log::warn!("Sync token not available despite active subscription"),
|
||||
Err(e) => log::error!("Failed to pre-fetch sync token after login: {e}"),
|
||||
}
|
||||
|
||||
// Request wayfern token for paid users
|
||||
if let Err(e) = CLOUD_AUTH.request_wayfern_token().await {
|
||||
log::warn!("Failed to request wayfern token after login: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
// Sync cloud proxy after login
|
||||
CLOUD_AUTH.sync_cloud_proxy().await;
|
||||
|
||||
// Connect profile lock manager for paid users
|
||||
if state.user.plan != "free" {
|
||||
crate::team_lock::PROFILE_LOCK.connect().await;
|
||||
}
|
||||
|
||||
let _ = crate::events::emit_empty("cloud-auth-changed");
|
||||
|
||||
let _ = &app_handle;
|
||||
@@ -977,6 +1171,9 @@ pub async fn cloud_logout(app_handle: tauri::AppHandle) -> Result<(), String> {
|
||||
}
|
||||
let _ = manager.remove_sync_token(&app_handle).await;
|
||||
|
||||
// Remove cloud-managed and cloud-derived proxies
|
||||
crate::proxy_manager::PROXY_MANAGER.remove_cloud_proxies();
|
||||
|
||||
let _ = crate::events::emit_empty("cloud-auth-changed");
|
||||
Ok(())
|
||||
}
|
||||
@@ -986,33 +1183,59 @@ pub async fn cloud_has_active_subscription() -> Result<bool, String> {
|
||||
Ok(CLOUD_AUTH.has_active_paid_subscription().await)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cloud_get_wayfern_token() -> Result<Option<String>, String> {
|
||||
Ok(CLOUD_AUTH.get_wayfern_token().await)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cloud_refresh_wayfern_token() -> Result<Option<String>, String> {
|
||||
CLOUD_AUTH.request_wayfern_token().await?;
|
||||
Ok(CLOUD_AUTH.get_wayfern_token().await)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cloud_get_countries() -> Result<Vec<LocationItem>, String> {
|
||||
CLOUD_AUTH.fetch_countries().await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cloud_get_states(country: String) -> Result<Vec<LocationItem>, String> {
|
||||
CLOUD_AUTH.fetch_states(&country).await
|
||||
pub async fn cloud_get_regions(country: String) -> Result<Vec<LocationItem>, String> {
|
||||
CLOUD_AUTH.fetch_regions(&country).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cloud_get_cities(country: String, state: String) -> Result<Vec<LocationItem>, String> {
|
||||
CLOUD_AUTH.fetch_cities(&country, &state).await
|
||||
pub async fn cloud_get_cities(
|
||||
country: String,
|
||||
region: Option<String>,
|
||||
) -> Result<Vec<LocationItem>, String> {
|
||||
CLOUD_AUTH.fetch_cities(&country, region.as_deref()).await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cloud_get_isps(
|
||||
country: String,
|
||||
region: Option<String>,
|
||||
city: Option<String>,
|
||||
) -> Result<Vec<LocationItem>, String> {
|
||||
CLOUD_AUTH
|
||||
.fetch_isps(&country, region.as_deref(), city.as_deref())
|
||||
.await
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn create_cloud_location_proxy(
|
||||
name: String,
|
||||
country: String,
|
||||
state: Option<String>,
|
||||
region: Option<String>,
|
||||
city: Option<String>,
|
||||
isp: Option<String>,
|
||||
) -> Result<crate::proxy_manager::StoredProxy, String> {
|
||||
// If no cloud proxy exists yet, attempt to sync it first
|
||||
if !PROXY_MANAGER.has_cloud_proxy() {
|
||||
CLOUD_AUTH.sync_cloud_proxy().await;
|
||||
}
|
||||
PROXY_MANAGER.create_cloud_location_proxy(name, country, state, city)
|
||||
PROXY_MANAGER.create_cloud_location_proxy(name, country, region, city, isp)
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -1020,22 +1243,108 @@ pub struct CloudProxyUsage {
|
||||
pub used_mb: i64,
|
||||
pub limit_mb: i64,
|
||||
pub remaining_mb: i64,
|
||||
pub recurring_limit_mb: i64,
|
||||
pub extra_limit_mb: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ProxyUsageResponse {
|
||||
#[serde(rename = "usedMb")]
|
||||
used_mb: i64,
|
||||
#[serde(rename = "limitMb")]
|
||||
limit_mb: i64,
|
||||
#[serde(rename = "remainingMb")]
|
||||
remaining_mb: i64,
|
||||
#[serde(rename = "recurringLimitMb", default)]
|
||||
recurring_limit_mb: i64,
|
||||
#[serde(rename = "extraLimitMb", default)]
|
||||
extra_limit_mb: i64,
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn cloud_get_proxy_usage() -> Result<Option<CloudProxyUsage>, String> {
|
||||
let state = CLOUD_AUTH.state.lock().await;
|
||||
match &*state {
|
||||
Some(auth) if auth.user.proxy_bandwidth_limit_mb > 0 => {
|
||||
let used = auth.user.proxy_bandwidth_used_mb;
|
||||
let limit = auth.user.proxy_bandwidth_limit_mb;
|
||||
Ok(Some(CloudProxyUsage {
|
||||
used_mb: used,
|
||||
limit_mb: limit,
|
||||
remaining_mb: (limit - used).max(0),
|
||||
}))
|
||||
let (has_proxy, cached_recurring, cached_extra) = {
|
||||
let state = CLOUD_AUTH.state.lock().await;
|
||||
match &*state {
|
||||
Some(auth)
|
||||
if auth.user.proxy_bandwidth_limit_mb > 0 || auth.user.proxy_bandwidth_extra_mb > 0 =>
|
||||
{
|
||||
(
|
||||
true,
|
||||
auth.user.proxy_bandwidth_limit_mb,
|
||||
auth.user.proxy_bandwidth_extra_mb,
|
||||
)
|
||||
}
|
||||
_ => return Ok(None),
|
||||
}
|
||||
};
|
||||
|
||||
if !has_proxy {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Fetch live usage from the API
|
||||
match CLOUD_AUTH
|
||||
.api_call_with_retry(|access_token| {
|
||||
let url = format!("{CLOUD_API_URL}/api/proxy/usage");
|
||||
let client = reqwest::Client::new();
|
||||
async move {
|
||||
let response = client
|
||||
.get(&url)
|
||||
.header("Authorization", format!("Bearer {access_token}"))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to fetch proxy usage: {e}"))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!(
|
||||
"Proxy usage API returned status {}",
|
||||
response.status()
|
||||
));
|
||||
}
|
||||
|
||||
response
|
||||
.json::<ProxyUsageResponse>()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to parse proxy usage: {e}"))
|
||||
}
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(usage) => Ok(Some(CloudProxyUsage {
|
||||
used_mb: usage.used_mb,
|
||||
limit_mb: usage.limit_mb,
|
||||
remaining_mb: usage.remaining_mb,
|
||||
recurring_limit_mb: if usage.recurring_limit_mb > 0 {
|
||||
usage.recurring_limit_mb
|
||||
} else {
|
||||
cached_recurring
|
||||
},
|
||||
extra_limit_mb: if usage.recurring_limit_mb > 0 {
|
||||
usage.extra_limit_mb
|
||||
} else {
|
||||
cached_extra
|
||||
},
|
||||
})),
|
||||
Err(e) => {
|
||||
log::warn!("Failed to fetch live proxy usage, falling back to cached: {e}");
|
||||
// Fallback to cached values
|
||||
let state = CLOUD_AUTH.state.lock().await;
|
||||
match &*state {
|
||||
Some(auth) => {
|
||||
let used = auth.user.proxy_bandwidth_used_mb;
|
||||
let total = cached_recurring + cached_extra;
|
||||
Ok(Some(CloudProxyUsage {
|
||||
used_mb: used,
|
||||
limit_mb: total,
|
||||
remaining_mb: (total - used).max(0),
|
||||
recurring_limit_mb: cached_recurring,
|
||||
extra_limit_mb: cached_extra,
|
||||
}))
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1071,9 +1380,15 @@ pub async fn restart_sync_service(app_handle: tauri::AppHandle) -> Result<(), St
|
||||
{
|
||||
log::warn!("Failed to check for missing profiles: {}", e);
|
||||
}
|
||||
if let Err(e) = engine
|
||||
.check_for_missing_synced_entities(&app_handle_sync)
|
||||
.await
|
||||
{
|
||||
log::warn!("Failed to check for missing entities: {}", e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::debug!("Sync not configured, skipping missing profile check: {}", e);
|
||||
log::warn!("Sync not configured, skipping missing profile check: {}", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -103,11 +103,6 @@ pub fn enable_autostart() -> io::Result<()> {
|
||||
<true/>
|
||||
<key>LimitLoadToSessionType</key>
|
||||
<string>Aqua</string>
|
||||
<key>KeepAlive</key>
|
||||
<dict>
|
||||
<key>SuccessfulExit</key>
|
||||
<false/>
|
||||
</dict>
|
||||
<key>ProcessType</key>
|
||||
<string>Interactive</string>
|
||||
<key>StandardOutPath</key>
|
||||
@@ -188,6 +183,26 @@ pub fn load_launch_agent() -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn start_launch_agent() -> io::Result<()> {
|
||||
use std::process::Command;
|
||||
|
||||
let output = Command::new("launchctl")
|
||||
.args(["start", "com.donutbrowser.daemon"])
|
||||
.output()?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
return Err(io::Error::other(format!(
|
||||
"launchctl start failed: {}",
|
||||
stderr
|
||||
)));
|
||||
}
|
||||
|
||||
log::info!("Started launch agent via launchctl");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
pub fn unload_launch_agent() -> io::Result<()> {
|
||||
use std::process::Command;
|
||||
@@ -229,16 +244,22 @@ pub fn enable_autostart() -> io::Result<()> {
|
||||
|
||||
let desktop_path = autostart_dir.join("donut-daemon.desktop");
|
||||
|
||||
let escaped_daemon_path = daemon_path
|
||||
.display()
|
||||
.to_string()
|
||||
.replace('\\', "\\\\")
|
||||
.replace('"', "\\\"")
|
||||
.replace('`', "\\`")
|
||||
.replace('$', "\\$");
|
||||
let desktop_content = format!(
|
||||
r#"[Desktop Entry]
|
||||
Type=Application
|
||||
Name=Donut Browser Daemon
|
||||
Exec={} start
|
||||
Exec="{escaped_daemon_path}" run
|
||||
Hidden=false
|
||||
NoDisplay=true
|
||||
X-GNOME-Autostart-enabled=true
|
||||
"#,
|
||||
daemon_path.display()
|
||||
);
|
||||
|
||||
fs::write(&desktop_path, desktop_content)?;
|
||||
@@ -281,7 +302,7 @@ pub fn enable_autostart() -> io::Result<()> {
|
||||
|
||||
key.set_value(
|
||||
"DonutBrowserDaemon",
|
||||
&format!("\"{}\" start", daemon_path.display()),
|
||||
&format!("\"{}\" run", daemon_path.display()),
|
||||
)?;
|
||||
|
||||
log::info!("Added registry autostart entry");
|
||||
@@ -319,6 +340,9 @@ pub fn is_autostart_enabled() -> bool {
|
||||
}
|
||||
|
||||
pub fn get_data_dir() -> Option<PathBuf> {
|
||||
if crate::app_dirs::is_portable() {
|
||||
return Some(crate::app_dirs::data_dir());
|
||||
}
|
||||
if let Some(proj_dirs) = ProjectDirs::from("com", "donutbrowser", "Donut Browser") {
|
||||
Some(proj_dirs.data_dir().to_path_buf())
|
||||
} else {
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
use muda::{Menu, MenuItem, PredefinedMenuItem};
|
||||
use muda::{Menu, MenuItem};
|
||||
use std::process::Command;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use tray_icon::{Icon, TrayIcon, TrayIconBuilder};
|
||||
|
||||
static GUI_RUNNING: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
pub fn load_icon() -> Icon {
|
||||
// On Windows, use the full-color icon so it renders well on dark taskbars.
|
||||
// On macOS/Linux, use the template icon (black with alpha) for system light/dark handling.
|
||||
@@ -25,7 +22,6 @@ pub fn load_icon() -> Icon {
|
||||
|
||||
pub struct TrayMenu {
|
||||
pub menu: Menu,
|
||||
pub open_item: MenuItem,
|
||||
pub quit_item: MenuItem,
|
||||
}
|
||||
|
||||
@@ -39,19 +35,11 @@ impl TrayMenu {
|
||||
pub fn new() -> Self {
|
||||
let menu = Menu::new();
|
||||
|
||||
let open_item = MenuItem::new("Open Donut Browser", true, None);
|
||||
let separator = PredefinedMenuItem::separator();
|
||||
let quit_item = MenuItem::new("Quit Donut Browser", true, None);
|
||||
|
||||
menu.append(&open_item).unwrap();
|
||||
menu.append(&separator).unwrap();
|
||||
menu.append(&quit_item).unwrap();
|
||||
|
||||
Self {
|
||||
menu,
|
||||
open_item,
|
||||
quit_item,
|
||||
}
|
||||
Self { menu, quit_item }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,25 +56,47 @@ pub fn create_tray_icon(icon: Icon, menu: &Menu) -> TrayIcon {
|
||||
builder.build().expect("Failed to create tray icon")
|
||||
}
|
||||
|
||||
pub fn open_gui() {
|
||||
if GUI_RUNNING.load(Ordering::SeqCst) {
|
||||
log::info!("GUI already running, activating...");
|
||||
activate_gui();
|
||||
return;
|
||||
/// Resolve the .app bundle path from the current daemon executable.
|
||||
/// In production the daemon is at `Donut.app/Contents/MacOS/donut-daemon`.
|
||||
#[cfg(target_os = "macos")]
|
||||
fn get_app_bundle_path() -> Option<std::path::PathBuf> {
|
||||
let exe = std::env::current_exe().ok()?;
|
||||
let macos_dir = exe.parent()?;
|
||||
let contents_dir = macos_dir.parent()?;
|
||||
let app_dir = contents_dir.parent()?;
|
||||
if app_dir.extension().and_then(|e| e.to_str()) == Some("app") {
|
||||
Some(app_dir.to_path_buf())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open_gui() {
|
||||
log::info!("Opening GUI...");
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let _ = Command::new("open").arg("-a").arg("Donut Browser").spawn();
|
||||
// Launch the GUI binary directly. The daemon lives inside the same .app
|
||||
// bundle, so `open` (even with `-n`) can re-activate the daemon instead
|
||||
// of launching the GUI. Directly running the binary avoids macOS's app
|
||||
// activation machinery. The single-instance Tauri plugin in the GUI
|
||||
// handles deduplication if a GUI instance is already running.
|
||||
if let Some(app_bundle) = get_app_bundle_path() {
|
||||
let gui_binary = app_bundle.join("Contents").join("MacOS").join("Donut");
|
||||
if gui_binary.exists() {
|
||||
let _ = Command::new(&gui_binary).spawn();
|
||||
} else {
|
||||
let _ = Command::new("open").args(["-n"]).arg(&app_bundle).spawn();
|
||||
}
|
||||
} else {
|
||||
let _ = Command::new("open").args(["-n", "-a", "Donut"]).spawn();
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use std::path::PathBuf;
|
||||
|
||||
// In dev mode, find the main exe next to the daemon binary
|
||||
if let Ok(current_exe) = std::env::current_exe() {
|
||||
if let Some(exe_dir) = current_exe.parent() {
|
||||
let app_path = exe_dir.join("donutbrowser.exe");
|
||||
@@ -118,15 +128,77 @@ pub fn open_gui() {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn activate_gui() {
|
||||
#[cfg(target_os = "macos")]
|
||||
fn read_gui_pid() -> Option<u32> {
|
||||
let path = super::autostart::get_data_dir()?.join("daemon-state.json");
|
||||
let content = std::fs::read_to_string(path).ok()?;
|
||||
let val: serde_json::Value = serde_json::from_str(&content).ok()?;
|
||||
val.get("gui_pid")?.as_u64().map(|p| p as u32)
|
||||
}
|
||||
|
||||
fn kill_gui_by_pid() -> bool {
|
||||
let Some(pid) = read_gui_pid() else {
|
||||
return false;
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
let _ = Command::new("osascript")
|
||||
.args(["-e", "tell application \"Donut Browser\" to activate"])
|
||||
.spawn();
|
||||
let ret = unsafe { libc::kill(pid as i32, libc::SIGTERM) };
|
||||
ret == 0
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::os::windows::process::CommandExt;
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
Command::new("taskkill")
|
||||
.args(["/PID", &pid.to_string(), "/F"])
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.output()
|
||||
.map(|o| o.status.success())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[cfg(not(any(unix, windows)))]
|
||||
{
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_gui_running(running: bool) {
|
||||
GUI_RUNNING.store(running, Ordering::SeqCst);
|
||||
pub fn quit_gui() {
|
||||
log::info!("[daemon] Quitting GUI...");
|
||||
|
||||
if kill_gui_by_pid() {
|
||||
log::info!("[daemon] GUI killed by PID");
|
||||
return;
|
||||
}
|
||||
|
||||
log::info!("[daemon] PID-based kill failed, falling back to name-based kill");
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
// Use spawn() instead of output() to avoid blocking the event loop.
|
||||
// AppleScript has a ~2 minute default timeout that would freeze the tray icon.
|
||||
let _ = Command::new("osascript")
|
||||
.args(["-e", "tell application \"Donut\" to quit"])
|
||||
.spawn();
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
use std::os::windows::process::CommandExt;
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
let _ = Command::new("taskkill")
|
||||
.args(["/IM", "Donut.exe", "/F"])
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.spawn();
|
||||
let _ = Command::new("taskkill")
|
||||
.args(["/IM", "donutbrowser.exe", "/F"])
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.spawn();
|
||||
}
|
||||
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let _ = Command::new("pkill").args(["-x", "donutbrowser"]).spawn();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
// Daemon Spawn - Start the daemon from the GUI
|
||||
// Currently disabled; will be re-enabled in the future
|
||||
|
||||
use serde::Deserialize;
|
||||
use std::fs;
|
||||
@@ -9,6 +10,26 @@ use std::time::Duration;
|
||||
|
||||
use crate::daemon::autostart;
|
||||
|
||||
/// Check if a process with the given PID exists using the Windows API.
|
||||
/// This avoids spawning tasklist.exe which causes a visible conhost window flash.
|
||||
#[cfg(windows)]
|
||||
fn win_process_exists(pid: u32) -> bool {
|
||||
const PROCESS_QUERY_LIMITED_INFORMATION: u32 = 0x1000;
|
||||
|
||||
extern "system" {
|
||||
fn OpenProcess(dwDesiredAccess: u32, bInheritHandles: i32, dwProcessId: u32) -> *mut ();
|
||||
fn CloseHandle(hObject: *mut ()) -> i32;
|
||||
}
|
||||
|
||||
let handle = unsafe { OpenProcess(PROCESS_QUERY_LIMITED_INFORMATION, 0, pid) };
|
||||
if handle.is_null() {
|
||||
false
|
||||
} else {
|
||||
unsafe { CloseHandle(handle) };
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Default)]
|
||||
struct DaemonState {
|
||||
daemon_pid: Option<u32>,
|
||||
@@ -32,7 +53,7 @@ fn read_state() -> DaemonState {
|
||||
DaemonState::default()
|
||||
}
|
||||
|
||||
fn is_daemon_running() -> bool {
|
||||
pub fn is_daemon_running() -> bool {
|
||||
let state = read_state();
|
||||
|
||||
if let Some(pid) = state.daemon_pid {
|
||||
@@ -43,12 +64,7 @@ fn is_daemon_running() -> bool {
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let output = Command::new("tasklist")
|
||||
.args(["/FI", &format!("PID eq {}", pid)])
|
||||
.output();
|
||||
output
|
||||
.map(|o| String::from_utf8_lossy(&o.stdout).contains(&pid.to_string()))
|
||||
.unwrap_or(false)
|
||||
win_process_exists(pid)
|
||||
}
|
||||
|
||||
#[cfg(not(any(unix, windows)))]
|
||||
@@ -113,7 +129,13 @@ fn get_daemon_path() -> Option<PathBuf> {
|
||||
// Try to find it in PATH
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Ok(output) = Command::new("where").arg("donut-daemon").output() {
|
||||
use std::os::windows::process::CommandExt;
|
||||
const CREATE_NO_WINDOW: u32 = 0x08000000;
|
||||
if let Ok(output) = Command::new("where")
|
||||
.arg("donut-daemon")
|
||||
.creation_flags(CREATE_NO_WINDOW)
|
||||
.output()
|
||||
{
|
||||
if output.status.success() {
|
||||
let path = String::from_utf8_lossy(&output.stdout);
|
||||
let path = path.lines().next()?.trim();
|
||||
@@ -243,6 +265,11 @@ fn spawn_daemon_macos() -> Result<(), String> {
|
||||
autostart::load_launch_agent().map_err(|e| format!("Failed to load LaunchAgent: {}", e))?;
|
||||
log::info!("launchctl load completed");
|
||||
|
||||
// Also explicitly start the agent in case it was already loaded but stopped
|
||||
if let Err(e) = autostart::start_launch_agent() {
|
||||
log::debug!("launchctl start note (non-fatal): {}", e);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -308,3 +335,26 @@ pub fn ensure_daemon_running() -> Result<(), String> {
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn register_gui_pid() {
|
||||
let path = get_state_path();
|
||||
let mut val: serde_json::Value = if path.exists() {
|
||||
fs::read_to_string(&path)
|
||||
.ok()
|
||||
.and_then(|c| serde_json::from_str(&c).ok())
|
||||
.unwrap_or_else(|| serde_json::json!({}))
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
};
|
||||
|
||||
if let Some(obj) = val.as_object_mut() {
|
||||
obj.insert(
|
||||
"gui_pid".to_string(),
|
||||
serde_json::Value::Number(std::process::id().into()),
|
||||
);
|
||||
}
|
||||
|
||||
if let Ok(content) = serde_json::to_string_pretty(&val) {
|
||||
let _ = fs::write(&path, content);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -204,7 +204,7 @@ mod windows {
|
||||
.map_err(|e| format!("Failed to set ApplicationDescription: {}", e))?;
|
||||
|
||||
app_key
|
||||
.set_value("ApplicationIcon", &format!("{},0", exe_path))
|
||||
.set_value("ApplicationIcon", &format!("\"{}\",0", exe_path))
|
||||
.map_err(|e| format!("Failed to set ApplicationIcon: {}", e))?;
|
||||
|
||||
// Create Capabilities key
|
||||
@@ -273,7 +273,7 @@ mod windows {
|
||||
.map_err(|e| format!("Failed to create DefaultIcon key: {}", e))?;
|
||||
|
||||
icon_key
|
||||
.set_value("", &format!("{},0", exe_path))
|
||||
.set_value("", &format!("\"{}\",0", exe_path))
|
||||
.map_err(|e| format!("Failed to set default icon: {}", e))?;
|
||||
|
||||
// Create shell\open\command key
|
||||
|
||||
@@ -0,0 +1,343 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::time::{Duration, SystemTime};
|
||||
|
||||
use crate::app_dirs;
|
||||
|
||||
const REFRESH_INTERVAL: Duration = Duration::from_secs(43200); // 12 hours
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Default)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum BlocklistLevel {
|
||||
#[default]
|
||||
None,
|
||||
Light,
|
||||
Normal,
|
||||
Pro,
|
||||
ProPlus,
|
||||
Ultimate,
|
||||
}
|
||||
|
||||
impl BlocklistLevel {
|
||||
pub fn parse_level(s: &str) -> Option<Self> {
|
||||
match s {
|
||||
"light" => Some(Self::Light),
|
||||
"normal" => Some(Self::Normal),
|
||||
"pro" => Some(Self::Pro),
|
||||
"pro_plus" => Some(Self::ProPlus),
|
||||
"ultimate" => Some(Self::Ultimate),
|
||||
"none" => Some(Self::None),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
Self::None => "none",
|
||||
Self::Light => "light",
|
||||
Self::Normal => "normal",
|
||||
Self::Pro => "pro",
|
||||
Self::ProPlus => "pro_plus",
|
||||
Self::Ultimate => "ultimate",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::None => "None",
|
||||
Self::Light => "Light",
|
||||
Self::Normal => "Normal",
|
||||
Self::Pro => "Pro",
|
||||
Self::ProPlus => "Pro++",
|
||||
Self::Ultimate => "Ultimate",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn url(&self) -> Option<&'static str> {
|
||||
match self {
|
||||
Self::None => None,
|
||||
Self::Light => {
|
||||
Some("https://cdn.jsdelivr.net/gh/hagezi/dns-blocklists@latest/domains/light.txt")
|
||||
}
|
||||
Self::Normal => {
|
||||
Some("https://cdn.jsdelivr.net/gh/hagezi/dns-blocklists@latest/domains/multi.txt")
|
||||
}
|
||||
Self::Pro => Some("https://cdn.jsdelivr.net/gh/hagezi/dns-blocklists@latest/domains/pro.txt"),
|
||||
Self::ProPlus => {
|
||||
Some("https://cdn.jsdelivr.net/gh/hagezi/dns-blocklists@latest/domains/pro.plus.txt")
|
||||
}
|
||||
Self::Ultimate => {
|
||||
Some("https://cdn.jsdelivr.net/gh/hagezi/dns-blocklists@latest/domains/ultimate.txt")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn filename(&self) -> Option<&'static str> {
|
||||
match self {
|
||||
Self::None => None,
|
||||
Self::Light => Some("light.txt"),
|
||||
Self::Normal => Some("multi.txt"),
|
||||
Self::Pro => Some("pro.txt"),
|
||||
Self::ProPlus => Some("pro.plus.txt"),
|
||||
Self::Ultimate => Some("ultimate.txt"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all_downloadable() -> &'static [BlocklistLevel] {
|
||||
&[
|
||||
Self::Light,
|
||||
Self::Normal,
|
||||
Self::Pro,
|
||||
Self::ProPlus,
|
||||
Self::Ultimate,
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BlocklistCacheStatus {
|
||||
pub level: String,
|
||||
pub display_name: String,
|
||||
pub entry_count: usize,
|
||||
pub file_size_bytes: u64,
|
||||
pub last_updated: Option<u64>,
|
||||
pub is_fresh: bool,
|
||||
pub is_cached: bool,
|
||||
}
|
||||
|
||||
pub struct BlocklistManager;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref HTTP_CLIENT: reqwest::Client = reqwest::Client::builder()
|
||||
.timeout(Duration::from_secs(60))
|
||||
.build()
|
||||
.expect("Failed to create HTTP client");
|
||||
}
|
||||
|
||||
impl BlocklistManager {
|
||||
pub fn instance() -> &'static BlocklistManager {
|
||||
&BLOCKLIST_MANAGER
|
||||
}
|
||||
|
||||
fn cache_dir() -> PathBuf {
|
||||
app_dirs::dns_blocklist_dir()
|
||||
}
|
||||
|
||||
pub fn cached_file_path(level: BlocklistLevel) -> Option<PathBuf> {
|
||||
level.filename().map(|f| Self::cache_dir().join(f))
|
||||
}
|
||||
|
||||
pub fn is_cache_fresh(level: BlocklistLevel) -> bool {
|
||||
let Some(path) = Self::cached_file_path(level) else {
|
||||
return false;
|
||||
};
|
||||
if !path.exists() {
|
||||
return false;
|
||||
}
|
||||
match std::fs::metadata(&path).and_then(|m| m.modified()) {
|
||||
Ok(modified) => SystemTime::now()
|
||||
.duration_since(modified)
|
||||
.map(|age| age < REFRESH_INTERVAL)
|
||||
.unwrap_or(false),
|
||||
Err(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn fetch_blocklist(level: BlocklistLevel) -> Result<PathBuf, String> {
|
||||
let url = level
|
||||
.url()
|
||||
.ok_or_else(|| format!("No URL for level {:?}", level))?;
|
||||
let path =
|
||||
Self::cached_file_path(level).ok_or_else(|| format!("No filename for level {:?}", level))?;
|
||||
|
||||
let cache_dir = Self::cache_dir();
|
||||
std::fs::create_dir_all(&cache_dir).map_err(|e| format!("Failed to create cache dir: {e}"))?;
|
||||
|
||||
log::info!(
|
||||
"[dns-blocklist] Fetching {} from {}",
|
||||
level.display_name(),
|
||||
url
|
||||
);
|
||||
|
||||
let response = HTTP_CLIENT
|
||||
.get(url)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to fetch blocklist: {e}"))?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("HTTP {} when fetching {}", response.status(), url));
|
||||
}
|
||||
|
||||
let body = response
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to read response body: {e}"))?;
|
||||
|
||||
// Write atomically: write to temp file, then rename
|
||||
let tmp_path = path.with_extension("tmp");
|
||||
std::fs::write(&tmp_path, &body).map_err(|e| format!("Failed to write blocklist: {e}"))?;
|
||||
std::fs::rename(&tmp_path, &path).map_err(|e| format!("Failed to rename blocklist: {e}"))?;
|
||||
|
||||
let entry_count = body
|
||||
.lines()
|
||||
.filter(|l| !l.starts_with('#') && !l.trim().is_empty())
|
||||
.count();
|
||||
log::info!(
|
||||
"[dns-blocklist] Cached {} ({} domains)",
|
||||
level.display_name(),
|
||||
entry_count
|
||||
);
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
pub async fn ensure_cached(level: BlocklistLevel) -> Result<PathBuf, String> {
|
||||
if let Some(path) = Self::cached_file_path(level) {
|
||||
if path.exists() {
|
||||
return Ok(path);
|
||||
}
|
||||
}
|
||||
Self::fetch_blocklist(level).await
|
||||
}
|
||||
|
||||
pub async fn refresh_all_stale(&self) {
|
||||
for &level in BlocklistLevel::all_downloadable() {
|
||||
if !Self::is_cache_fresh(level) {
|
||||
if let Err(e) = Self::fetch_blocklist(level).await {
|
||||
log::error!(
|
||||
"[dns-blocklist] Failed to refresh {}: {e}",
|
||||
level.display_name()
|
||||
);
|
||||
let _ = crate::events::emit(
|
||||
"dns-blocklist-refresh-failed",
|
||||
serde_json::json!({
|
||||
"level": level.as_str(),
|
||||
"error": e,
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_blocklist_file_path(level: BlocklistLevel) -> Option<PathBuf> {
|
||||
Self::cached_file_path(level).filter(|p| p.exists())
|
||||
}
|
||||
|
||||
pub fn get_cache_status() -> Vec<BlocklistCacheStatus> {
|
||||
BlocklistLevel::all_downloadable()
|
||||
.iter()
|
||||
.map(|&level| {
|
||||
let path = Self::cached_file_path(level);
|
||||
let metadata = path.as_ref().and_then(|p| std::fs::metadata(p).ok());
|
||||
let is_cached = metadata.is_some();
|
||||
|
||||
let entry_count = if is_cached {
|
||||
path
|
||||
.as_ref()
|
||||
.and_then(|p| std::fs::read_to_string(p).ok())
|
||||
.map(|content| {
|
||||
content
|
||||
.lines()
|
||||
.filter(|l| !l.starts_with('#') && !l.trim().is_empty())
|
||||
.count()
|
||||
})
|
||||
.unwrap_or(0)
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let file_size_bytes = metadata.as_ref().map(|m| m.len()).unwrap_or(0);
|
||||
|
||||
let last_updated = metadata
|
||||
.as_ref()
|
||||
.and_then(|m| m.modified().ok())
|
||||
.and_then(|t| t.duration_since(SystemTime::UNIX_EPOCH).ok())
|
||||
.map(|d| d.as_secs());
|
||||
|
||||
BlocklistCacheStatus {
|
||||
level: level.as_str().to_string(),
|
||||
display_name: level.display_name().to_string(),
|
||||
entry_count,
|
||||
file_size_bytes,
|
||||
last_updated,
|
||||
is_fresh: Self::is_cache_fresh(level),
|
||||
is_cached,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref BLOCKLIST_MANAGER: BlocklistManager = BlocklistManager;
|
||||
}
|
||||
|
||||
// Tauri commands
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_dns_blocklist_cache_status() -> Result<Vec<BlocklistCacheStatus>, String> {
|
||||
Ok(BlocklistManager::get_cache_status())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn refresh_dns_blocklists() -> Result<(), String> {
|
||||
for &level in BlocklistLevel::all_downloadable() {
|
||||
BlocklistManager::fetch_blocklist(level).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_level_roundtrip() {
|
||||
for &level in BlocklistLevel::all_downloadable() {
|
||||
let s = level.as_str();
|
||||
let parsed = BlocklistLevel::parse_level(s);
|
||||
assert_eq!(parsed, Some(level), "Roundtrip failed for {s}");
|
||||
}
|
||||
assert_eq!(
|
||||
BlocklistLevel::parse_level("none"),
|
||||
Some(BlocklistLevel::None)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_level_urls_all_present() {
|
||||
for &level in BlocklistLevel::all_downloadable() {
|
||||
assert!(
|
||||
level.url().is_some(),
|
||||
"{} should have a URL",
|
||||
level.as_str()
|
||||
);
|
||||
assert!(
|
||||
level.filename().is_some(),
|
||||
"{} should have a filename",
|
||||
level.as_str()
|
||||
);
|
||||
}
|
||||
assert!(BlocklistLevel::None.url().is_none());
|
||||
assert!(BlocklistLevel::None.filename().is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_status_returns_all_levels() {
|
||||
let statuses = BlocklistManager::get_cache_status();
|
||||
assert_eq!(statuses.len(), 5);
|
||||
assert_eq!(statuses[0].level, "light");
|
||||
assert_eq!(statuses[1].level, "normal");
|
||||
assert_eq!(statuses[2].level, "pro");
|
||||
assert_eq!(statuses[3].level, "pro_plus");
|
||||
assert_eq!(statuses[4].level, "ultimate");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cache_fresh_returns_false_when_missing() {
|
||||
assert!(!BlocklistManager::is_cache_fresh(BlocklistLevel::Light));
|
||||
assert!(!BlocklistManager::is_cache_fresh(BlocklistLevel::None));
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
use directories::BaseDirs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
@@ -71,16 +70,7 @@ impl DownloadedBrowsersRegistry {
|
||||
}
|
||||
|
||||
fn get_registry_path() -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let base_dirs = BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let mut path = base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("data");
|
||||
path.push("downloaded_browsers.json");
|
||||
Ok(path)
|
||||
Ok(crate::app_dirs::data_subdir().join("downloaded_browsers.json"))
|
||||
}
|
||||
|
||||
pub fn add_browser(&self, info: DownloadedBrowserInfo) {
|
||||
@@ -128,19 +118,7 @@ impl DownloadedBrowsersRegistry {
|
||||
};
|
||||
let browser_instance = create_browser(browser_type.clone());
|
||||
|
||||
// Get binaries directory
|
||||
let binaries_dir = if let Some(base_dirs) = directories::BaseDirs::new() {
|
||||
let mut path = base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("binaries");
|
||||
path
|
||||
} else {
|
||||
return false;
|
||||
};
|
||||
let binaries_dir = crate::app_dirs::binaries_dir();
|
||||
|
||||
let files_exist = browser_instance.is_version_downloaded(version, &binaries_dir);
|
||||
|
||||
@@ -312,6 +290,30 @@ impl DownloadedBrowsersRegistry {
|
||||
}
|
||||
}
|
||||
|
||||
// Filter out versions that would leave a browser with zero versions in the registry
|
||||
{
|
||||
let data = self.data.lock().unwrap();
|
||||
let mut removal_counts: std::collections::HashMap<String, usize> =
|
||||
std::collections::HashMap::new();
|
||||
for (browser, _) in &to_remove {
|
||||
*removal_counts.entry(browser.clone()).or_insert(0) += 1;
|
||||
}
|
||||
to_remove.retain(|(browser, version)| {
|
||||
let total = data
|
||||
.browsers
|
||||
.get(browser.as_str())
|
||||
.map(|v| v.len())
|
||||
.unwrap_or(0);
|
||||
let removing = *removal_counts.get(browser.as_str()).unwrap_or(&0);
|
||||
if removing >= total {
|
||||
log::info!("Keeping last available version: {browser} {version}");
|
||||
*removal_counts.get_mut(browser.as_str()).unwrap() -= 1;
|
||||
return false;
|
||||
}
|
||||
true
|
||||
});
|
||||
}
|
||||
|
||||
// Remove unused binaries and their version folders
|
||||
for (browser, version) in to_remove {
|
||||
if let Err(e) = self.cleanup_failed_download(&browser, &version) {
|
||||
@@ -511,15 +513,12 @@ impl DownloadedBrowsersRegistry {
|
||||
browser: &str,
|
||||
version: &str,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Get binaries directory path
|
||||
let base_dirs = directories::BaseDirs::new().ok_or("Failed to get base directories")?;
|
||||
let mut binaries_dir = base_dirs.data_local_dir().to_path_buf();
|
||||
binaries_dir.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
binaries_dir.push("binaries");
|
||||
// Never remove a directory if a download is in progress for this browser/version
|
||||
if crate::downloader::is_downloading(browser, version) {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let binaries_dir = crate::app_dirs::binaries_dir();
|
||||
|
||||
let version_dir = binaries_dir.join(browser).join(version);
|
||||
|
||||
@@ -599,6 +598,12 @@ impl DownloadedBrowsersRegistry {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip if a download is in progress for this browser/version
|
||||
if crate::downloader::is_downloading(browser_name, version_name) {
|
||||
has_non_empty_versions = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if version directory is empty
|
||||
match fs::read_dir(&version_path) {
|
||||
Ok(mut entries) => {
|
||||
@@ -806,19 +811,7 @@ impl DownloadedBrowsersRegistry {
|
||||
|
||||
let browser = create_browser(browser_type.clone());
|
||||
|
||||
// Get binaries directory
|
||||
let binaries_dir = if let Some(base_dirs) = directories::BaseDirs::new() {
|
||||
let mut path = base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("binaries");
|
||||
path
|
||||
} else {
|
||||
return Err("Failed to get base directories".into());
|
||||
};
|
||||
let binaries_dir = crate::app_dirs::binaries_dir();
|
||||
|
||||
log::info!(
|
||||
"binaries_dir: {binaries_dir:?} for profile: {}",
|
||||
@@ -1164,6 +1157,58 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_last_version_kept_during_cleanup() {
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
|
||||
// Add a single version for "firefox"
|
||||
registry.add_browser(DownloadedBrowserInfo {
|
||||
browser: "firefox".to_string(),
|
||||
version: "139.0".to_string(),
|
||||
file_path: PathBuf::from("/test/firefox/139.0"),
|
||||
});
|
||||
|
||||
// Add two versions for "chromium"
|
||||
registry.add_browser(DownloadedBrowserInfo {
|
||||
browser: "chromium".to_string(),
|
||||
version: "120.0".to_string(),
|
||||
file_path: PathBuf::from("/test/chromium/120.0"),
|
||||
});
|
||||
registry.add_browser(DownloadedBrowserInfo {
|
||||
browser: "chromium".to_string(),
|
||||
version: "121.0".to_string(),
|
||||
file_path: PathBuf::from("/test/chromium/121.0"),
|
||||
});
|
||||
|
||||
// No active or running profiles
|
||||
let result = registry
|
||||
.cleanup_unused_binaries_internal(&[], &[])
|
||||
.expect("cleanup should succeed");
|
||||
|
||||
// firefox 139.0 should be kept (last version), chromium should lose one but keep one
|
||||
// The exact one kept depends on iteration order, but at least one must remain
|
||||
assert!(
|
||||
!result.contains(&"firefox 139.0".to_string()),
|
||||
"Last version of firefox should not be cleaned up"
|
||||
);
|
||||
// At most one chromium version should have been cleaned up
|
||||
let chromium_cleaned: Vec<_> = result
|
||||
.iter()
|
||||
.filter(|r| r.starts_with("chromium"))
|
||||
.collect();
|
||||
assert!(
|
||||
chromium_cleaned.len() <= 1,
|
||||
"At most one chromium version should be cleaned up, got: {:?}",
|
||||
chromium_cleaned
|
||||
);
|
||||
|
||||
// Verify firefox is still registered
|
||||
assert!(
|
||||
registry.is_browser_registered("firefox", "139.0"),
|
||||
"Last firefox version should still be registered"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_browser_registered_vs_downloaded() {
|
||||
let registry = DownloadedBrowsersRegistry::new();
|
||||
@@ -1203,12 +1248,13 @@ pub async fn ensure_active_browsers_downloaded(
|
||||
// Check if any version is already downloaded
|
||||
let existing = registry.get_downloaded_versions(browser);
|
||||
if !existing.is_empty() {
|
||||
log::debug!(
|
||||
"Skipping {browser}: already have {} version(s) downloaded",
|
||||
log::info!(
|
||||
"ensure_active: Skipping {browser}: already have {} version(s) downloaded",
|
||||
existing.len()
|
||||
);
|
||||
continue;
|
||||
}
|
||||
log::info!("ensure_active: No {browser} versions found, will download");
|
||||
|
||||
// Get the latest release type for this browser
|
||||
let release_types = match version_manager.get_browser_release_types(browser).await {
|
||||
|
||||
@@ -42,7 +42,10 @@ pub struct Downloader {
|
||||
impl Downloader {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
client: Client::builder()
|
||||
.connect_timeout(std::time::Duration::from_secs(30))
|
||||
.build()
|
||||
.unwrap_or_else(|_| Client::new()),
|
||||
api_client: ApiClient::instance(),
|
||||
registry: crate::downloaded_browsers_registry::DownloadedBrowsersRegistry::instance(),
|
||||
version_service: crate::browser_version_manager::BrowserVersionManager::instance(),
|
||||
@@ -56,7 +59,7 @@ impl Downloader {
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn new_with_api_client(_api_client: ApiClient) -> Self {
|
||||
pub fn new_for_test() -> Self {
|
||||
Self {
|
||||
client: Client::new(),
|
||||
api_client: ApiClient::instance(),
|
||||
@@ -67,87 +70,53 @@ impl Downloader {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub async fn download_file(
|
||||
&self,
|
||||
download_url: &str,
|
||||
dest_path: &Path,
|
||||
filename: &str,
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
let file_path = dest_path.join(filename);
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(download_url)
|
||||
.header(
|
||||
"User-Agent",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
||||
)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if !response.status().is_success() {
|
||||
return Err(format!("Download failed with status: {}", response.status()).into());
|
||||
}
|
||||
|
||||
let mut file = std::fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.open(&file_path)?;
|
||||
|
||||
let mut stream = response.bytes_stream();
|
||||
use futures_util::StreamExt;
|
||||
while let Some(chunk) = stream.next().await {
|
||||
let chunk = chunk?;
|
||||
io::copy(&mut chunk.as_ref(), &mut file)?;
|
||||
}
|
||||
|
||||
Ok(file_path)
|
||||
}
|
||||
|
||||
/// Resolve the actual download URL for browsers that need dynamic asset resolution
|
||||
pub async fn resolve_download_url(
|
||||
&self,
|
||||
browser_type: BrowserType,
|
||||
version: &str,
|
||||
download_info: &DownloadInfo,
|
||||
_download_info: &DownloadInfo,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
match browser_type {
|
||||
BrowserType::Brave => {
|
||||
// For Brave, we need to find the actual platform-specific asset
|
||||
let releases = self
|
||||
.api_client
|
||||
.fetch_brave_releases_with_caching(true)
|
||||
.await?;
|
||||
|
||||
// Find the release with the matching version
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| {
|
||||
r.tag_name == version || r.tag_name == format!("v{}", version.trim_start_matches('v'))
|
||||
})
|
||||
.ok_or(format!("Brave version {version} not found"))?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset based on platform and architecture
|
||||
let asset_url = self
|
||||
.find_brave_asset(&release.assets, &os, &arch)
|
||||
.ok_or(format!(
|
||||
"No compatible asset found for Brave version {version} on {os}/{arch}"
|
||||
))?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
BrowserType::Zen => {
|
||||
// For Zen, verify the asset exists and handle different naming patterns
|
||||
let releases = match self.api_client.fetch_zen_releases_with_caching(true).await {
|
||||
Ok(releases) => releases,
|
||||
Err(e) => {
|
||||
log::error!("Failed to fetch Zen releases: {e}");
|
||||
return Err(format!("Failed to fetch Zen releases from GitHub API: {e}. This might be due to GitHub API rate limiting or network issues. Please try again later.").into());
|
||||
}
|
||||
};
|
||||
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| r.tag_name == version)
|
||||
.ok_or_else(|| {
|
||||
format!(
|
||||
"Zen version {} not found. Available versions: {}",
|
||||
version,
|
||||
releases
|
||||
.iter()
|
||||
.take(5)
|
||||
.map(|r| r.tag_name.as_str())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
})?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
|
||||
// Find the appropriate asset
|
||||
let asset_url = self
|
||||
.find_zen_asset(&release.assets, &os, &arch)
|
||||
.ok_or_else(|| {
|
||||
let available_assets: Vec<&str> =
|
||||
release.assets.iter().map(|a| a.name.as_str()).collect();
|
||||
format!(
|
||||
"No compatible asset found for Zen version {} on {}/{}. Available assets: {}",
|
||||
version,
|
||||
os,
|
||||
arch,
|
||||
available_assets.join(", ")
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(asset_url)
|
||||
}
|
||||
BrowserType::Camoufox => {
|
||||
// For Camoufox, verify the asset exists and find the correct download URL
|
||||
let releases = self
|
||||
@@ -158,7 +127,11 @@ impl Downloader {
|
||||
let release = releases
|
||||
.iter()
|
||||
.find(|r| r.tag_name == version)
|
||||
.ok_or(format!("Camoufox version {version} not found"))?;
|
||||
.or_else(|| {
|
||||
log::info!("Camoufox: requested version {version} not found, using latest available");
|
||||
releases.first()
|
||||
})
|
||||
.ok_or("No Camoufox releases found".to_string())?;
|
||||
|
||||
// Get platform and architecture info
|
||||
let (os, arch) = Self::get_platform_info();
|
||||
@@ -179,14 +152,10 @@ impl Downloader {
|
||||
.fetch_wayfern_version_with_caching(true)
|
||||
.await?;
|
||||
|
||||
// Verify requested version matches available version
|
||||
if version_info.version != version {
|
||||
return Err(
|
||||
format!(
|
||||
"Wayfern version {version} not found. Available version: {}",
|
||||
version_info.version
|
||||
)
|
||||
.into(),
|
||||
log::info!(
|
||||
"Wayfern: requested version {version}, using available version {}",
|
||||
version_info.version
|
||||
);
|
||||
}
|
||||
|
||||
@@ -209,10 +178,6 @@ impl Downloader {
|
||||
|
||||
Ok(download_url)
|
||||
}
|
||||
_ => {
|
||||
// For other browsers, use the provided URL
|
||||
Ok(download_info.url.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -239,110 +204,6 @@ impl Downloader {
|
||||
(os.to_string(), arch.to_string())
|
||||
}
|
||||
|
||||
/// Find the appropriate Brave asset for the current platform and architecture
|
||||
fn find_brave_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Brave asset naming patterns:
|
||||
// Windows: BraveBrowserStandaloneNightlySetup.exe, BraveBrowserStandaloneSilentNightlySetup.exe
|
||||
// macOS: Brave-Browser-Nightly-universal.dmg, Brave-Browser-Nightly-universal.pkg
|
||||
// Linux: brave-browser-1.79.119-linux-arm64.zip, brave-browser-1.79.119-linux-amd64.zip
|
||||
|
||||
let asset = match os {
|
||||
"windows" => {
|
||||
// For Windows, look for standalone setup EXE (not the auto-updater one)
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("standalone") && name.ends_with(".exe") && !name.contains("silent")
|
||||
})
|
||||
.or_else(|| {
|
||||
// Fallback to any EXE if standalone not found
|
||||
assets.iter().find(|asset| asset.name.ends_with(".exe"))
|
||||
})
|
||||
}
|
||||
"macos" => {
|
||||
// For macOS, prefer universal DMG
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("universal") && name.ends_with(".dmg")
|
||||
})
|
||||
.or_else(|| {
|
||||
// Fallback to any DMG
|
||||
assets.iter().find(|asset| asset.name.ends_with(".dmg"))
|
||||
})
|
||||
}
|
||||
"linux" => {
|
||||
// For Linux, be strict about architecture matching - same logic as has_compatible_brave_asset
|
||||
let arch_pattern = if arch == "arm64" { "arm64" } else { "amd64" };
|
||||
|
||||
assets.iter().find(|asset| {
|
||||
let name = asset.name.to_lowercase();
|
||||
name.contains("linux") && name.contains(arch_pattern) && name.ends_with(".zip")
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
/// Find the appropriate Zen asset for the current platform and architecture
|
||||
fn find_zen_asset(
|
||||
&self,
|
||||
assets: &[crate::browser::GithubAsset],
|
||||
os: &str,
|
||||
arch: &str,
|
||||
) -> Option<String> {
|
||||
// Zen asset naming patterns:
|
||||
// Windows: zen.installer.exe, zen.installer-arm64.exe
|
||||
// macOS: zen.macos-universal.dmg
|
||||
// Linux: zen.linux-x86_64.tar.xz, zen.linux-aarch64.tar.xz, zen-x86_64.AppImage, zen-aarch64.AppImage
|
||||
|
||||
let asset = match (os, arch) {
|
||||
("windows", "x64") => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.installer.exe"),
|
||||
("windows", "arm64") => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.installer-arm64.exe"),
|
||||
("macos", _) => assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.macos-universal.dmg"),
|
||||
("linux", "x64") => {
|
||||
// Prefer tar.xz, fallback to AppImage
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.linux-x86_64.tar.xz")
|
||||
.or_else(|| {
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen-x86_64.AppImage")
|
||||
})
|
||||
}
|
||||
("linux", "arm64") => {
|
||||
// Prefer tar.xz, fallback to AppImage
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen.linux-aarch64.tar.xz")
|
||||
.or_else(|| {
|
||||
assets
|
||||
.iter()
|
||||
.find(|asset| asset.name == "zen-aarch64.AppImage")
|
||||
})
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
asset.map(|a| a.browser_download_url.clone())
|
||||
}
|
||||
|
||||
/// Find the appropriate Camoufox asset for the current platform and architecture
|
||||
fn find_camoufox_asset(
|
||||
&self,
|
||||
@@ -446,13 +307,15 @@ impl Downloader {
|
||||
let file_path = dest_path.join(&download_info.filename);
|
||||
|
||||
// Resolve the actual download URL
|
||||
log::info!(
|
||||
"Resolving download URL for {} {}",
|
||||
browser_type.as_str(),
|
||||
version
|
||||
);
|
||||
let download_url = self
|
||||
.resolve_download_url(browser_type.clone(), version, download_info)
|
||||
.await?;
|
||||
|
||||
// Check if this is a twilight release for special handling
|
||||
let is_twilight =
|
||||
browser_type == BrowserType::Zen && version.to_lowercase().contains("twilight");
|
||||
log::info!("Download URL resolved: {}", download_url);
|
||||
|
||||
// Determine if we have a partial file to resume
|
||||
let mut existing_size: u64 = 0;
|
||||
@@ -460,9 +323,10 @@ impl Downloader {
|
||||
existing_size = meta.len();
|
||||
}
|
||||
|
||||
// Build request, add Range only if we have bytes. If the server responds with 416 (Range Not
|
||||
// Satisfiable), delete the partial file and retry once without the Range header.
|
||||
let response = {
|
||||
// Build request with retry logic for transient network errors.
|
||||
let max_retries = 3u32;
|
||||
let mut response: Option<reqwest::Response> = None;
|
||||
for attempt in 0..=max_retries {
|
||||
let mut request = self
|
||||
.client
|
||||
.get(&download_url)
|
||||
@@ -475,27 +339,43 @@ impl Downloader {
|
||||
request = request.header("Range", format!("bytes={existing_size}-"));
|
||||
}
|
||||
|
||||
let first = request.send().await?;
|
||||
|
||||
if first.status().as_u16() == 416 && existing_size > 0 {
|
||||
// Partial file on disk is not acceptable to the server — remove it and retry from scratch
|
||||
let _ = std::fs::remove_file(&file_path);
|
||||
existing_size = 0;
|
||||
|
||||
let retry = self
|
||||
.client
|
||||
.get(&download_url)
|
||||
.header(
|
||||
"User-Agent",
|
||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36",
|
||||
)
|
||||
.send()
|
||||
.await?;
|
||||
retry
|
||||
} else {
|
||||
first
|
||||
log::info!("Sending download request (attempt {})...", attempt + 1);
|
||||
match request.send().await {
|
||||
Ok(resp) => {
|
||||
log::info!(
|
||||
"Download response received: status={}, content-length={:?}",
|
||||
resp.status(),
|
||||
resp.content_length()
|
||||
);
|
||||
if resp.status().as_u16() == 416 && existing_size > 0 {
|
||||
let _ = std::fs::remove_file(&file_path);
|
||||
existing_size = 0;
|
||||
log::warn!("Download returned 416, retrying without Range header");
|
||||
continue;
|
||||
}
|
||||
response = Some(resp);
|
||||
break;
|
||||
}
|
||||
Err(e) => {
|
||||
let is_retryable = e.is_connect() || e.is_timeout() || e.is_request();
|
||||
if is_retryable && attempt < max_retries {
|
||||
let delay = 2u64.pow(attempt);
|
||||
log::warn!(
|
||||
"Download attempt {} failed ({}), retrying in {}s...",
|
||||
attempt + 1,
|
||||
e,
|
||||
delay
|
||||
);
|
||||
tokio::time::sleep(std::time::Duration::from_secs(delay)).await;
|
||||
} else {
|
||||
return Err(format!("Download failed after {} attempts: {}", attempt + 1, e).into());
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
let response = response.ok_or_else(|| -> Box<dyn std::error::Error + Send + Sync> {
|
||||
"Download failed: no response received".into()
|
||||
})?;
|
||||
|
||||
// Check if the response is successful (200 OK or 206 Partial Content)
|
||||
if !(response.status().is_success() || response.status().as_u16() == 206) {
|
||||
@@ -533,6 +413,20 @@ impl Downloader {
|
||||
existing_size = 0;
|
||||
}
|
||||
|
||||
// If the existing file already matches the total size, skip the download
|
||||
if existing_size > 0 {
|
||||
if let Some(total) = total_size {
|
||||
if existing_size >= total {
|
||||
log::info!(
|
||||
"Archive {} already complete ({} bytes), skipping download",
|
||||
file_path.display(),
|
||||
existing_size
|
||||
);
|
||||
return Ok(file_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut downloaded = existing_size;
|
||||
let start_time = std::time::Instant::now();
|
||||
let mut last_update = start_time;
|
||||
@@ -548,11 +442,7 @@ impl Downloader {
|
||||
0.0
|
||||
};
|
||||
|
||||
let initial_stage = if is_twilight {
|
||||
"downloading (twilight rolling release)".to_string()
|
||||
} else {
|
||||
"downloading".to_string()
|
||||
};
|
||||
let initial_stage = "downloading".to_string();
|
||||
|
||||
let progress = DownloadProgress {
|
||||
browser: browser_type.as_str().to_string(),
|
||||
@@ -567,12 +457,16 @@ impl Downloader {
|
||||
|
||||
let _ = events::emit("download-progress", &progress);
|
||||
|
||||
// Open file in append mode (resuming) or create new
|
||||
// Open file in append mode (resuming) or create new.
|
||||
// Wrap in BufWriter with a large buffer to reduce the number of disk writes,
|
||||
// which dramatically improves download speed on Windows (NTFS + Defender overhead).
|
||||
use std::fs::OpenOptions;
|
||||
let mut file = OpenOptions::new()
|
||||
use std::io::Write;
|
||||
let raw_file = OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(&file_path)?;
|
||||
let mut file = io::BufWriter::with_capacity(8 * 1024 * 1024, raw_file);
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
@@ -585,7 +479,7 @@ impl Downloader {
|
||||
}
|
||||
}
|
||||
let chunk = chunk?;
|
||||
io::copy(&mut chunk.as_ref(), &mut file)?;
|
||||
file.write_all(&chunk)?;
|
||||
downloaded += chunk.len() as u64;
|
||||
|
||||
let now = std::time::Instant::now();
|
||||
@@ -614,11 +508,7 @@ impl Downloader {
|
||||
None
|
||||
};
|
||||
|
||||
let stage_description = if is_twilight {
|
||||
"downloading (twilight rolling release)".to_string()
|
||||
} else {
|
||||
"downloading".to_string()
|
||||
};
|
||||
let stage_description = "downloading".to_string();
|
||||
|
||||
let progress = DownloadProgress {
|
||||
browser: browser_type.as_str().to_string(),
|
||||
@@ -636,6 +526,9 @@ impl Downloader {
|
||||
}
|
||||
}
|
||||
|
||||
// Flush remaining buffered data to disk
|
||||
file.flush()?;
|
||||
|
||||
Ok(file_path)
|
||||
}
|
||||
|
||||
@@ -652,6 +545,41 @@ impl Downloader {
|
||||
return Err("Please accept Wayfern Terms and Conditions before downloading browsers".into());
|
||||
}
|
||||
|
||||
// For Wayfern/Camoufox, resolve the actual available version from the API
|
||||
let version = if browser_str == "wayfern" {
|
||||
match self
|
||||
.api_client
|
||||
.fetch_wayfern_version_with_caching(true)
|
||||
.await
|
||||
{
|
||||
Ok(info) if info.version != version => {
|
||||
log::info!(
|
||||
"Wayfern: requested {version}, using available {}",
|
||||
info.version
|
||||
);
|
||||
info.version
|
||||
}
|
||||
_ => version,
|
||||
}
|
||||
} else if browser_str == "camoufox" {
|
||||
match self
|
||||
.api_client
|
||||
.fetch_camoufox_releases_with_caching(true)
|
||||
.await
|
||||
{
|
||||
Ok(releases) if !releases.is_empty() && releases[0].tag_name != version => {
|
||||
log::info!(
|
||||
"Camoufox: requested {version}, using available {}",
|
||||
releases[0].tag_name
|
||||
);
|
||||
releases[0].tag_name.clone()
|
||||
}
|
||||
_ => version,
|
||||
}
|
||||
} else {
|
||||
version
|
||||
};
|
||||
|
||||
// Check if this browser-version pair is already being downloaded
|
||||
let download_key = format!("{browser_str}-{version}");
|
||||
let cancel_token = {
|
||||
@@ -674,21 +602,7 @@ impl Downloader {
|
||||
|
||||
// Use injected registry instance
|
||||
|
||||
// Get binaries directory - we need to get it from somewhere
|
||||
// This is a bit tricky since we don't have access to BrowserRunner's get_binaries_dir
|
||||
// We'll need to replicate this logic
|
||||
let binaries_dir = if let Some(base_dirs) = directories::BaseDirs::new() {
|
||||
let mut path = base_dirs.data_local_dir().to_path_buf();
|
||||
path.push(if cfg!(debug_assertions) {
|
||||
"DonutBrowserDev"
|
||||
} else {
|
||||
"DonutBrowser"
|
||||
});
|
||||
path.push("binaries");
|
||||
path
|
||||
} else {
|
||||
return Err("Failed to get base directories".into());
|
||||
};
|
||||
let binaries_dir = crate::app_dirs::binaries_dir();
|
||||
|
||||
// Check if registry thinks it's downloaded, but also verify files actually exist
|
||||
if self.registry.is_browser_downloaded(&browser_str, &version) {
|
||||
@@ -816,11 +730,16 @@ impl Downloader {
|
||||
// Do not remove the archive here. We keep it until verification succeeds.
|
||||
}
|
||||
Err(e) => {
|
||||
// Do not remove the archive or extracted files. Just drop the registry entry
|
||||
// so it won't be reported as downloaded.
|
||||
log::error!("Extraction failed for {browser_str} {version}: {e}");
|
||||
|
||||
// Delete the corrupt/invalid archive so a fresh download happens next time
|
||||
if download_path.exists() {
|
||||
log::info!("Deleting corrupt archive: {}", download_path.display());
|
||||
let _ = std::fs::remove_file(&download_path);
|
||||
}
|
||||
|
||||
let _ = self.registry.remove_browser(&browser_str, &version);
|
||||
let _ = self.registry.save();
|
||||
// Remove browser-version pair from downloading set on error
|
||||
{
|
||||
let mut downloading = DOWNLOADING_BROWSERS.lock().unwrap();
|
||||
downloading.remove(&download_key);
|
||||
@@ -829,6 +748,20 @@ impl Downloader {
|
||||
let mut tokens = DOWNLOAD_CANCELLATION_TOKENS.lock().unwrap();
|
||||
tokens.remove(&download_key);
|
||||
}
|
||||
|
||||
// Emit error stage so the UI shows a toast
|
||||
let progress = DownloadProgress {
|
||||
browser: browser_str.clone(),
|
||||
version: version.clone(),
|
||||
downloaded_bytes: 0,
|
||||
total_bytes: None,
|
||||
percentage: 0.0,
|
||||
speed_bytes_per_sec: 0.0,
|
||||
eta_seconds: None,
|
||||
stage: "error".to_string(),
|
||||
};
|
||||
let _ = events::emit("download-progress", &progress);
|
||||
|
||||
return Err(format!("Failed to extract browser: {e}").into());
|
||||
}
|
||||
}
|
||||
@@ -975,7 +908,6 @@ impl Downloader {
|
||||
.await
|
||||
{
|
||||
log::warn!("Failed to create version.json for Camoufox: {e}");
|
||||
// Don't fail the download if version.json creation fails
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1002,10 +934,51 @@ impl Downloader {
|
||||
tokens.remove(&download_key);
|
||||
}
|
||||
|
||||
// Auto-update non-running profiles to the latest installed version and cleanup unused binaries
|
||||
{
|
||||
let app_handle_for_update = app_handle.clone();
|
||||
tauri::async_runtime::spawn(async move {
|
||||
let auto_updater = crate::auto_updater::AutoUpdater::instance();
|
||||
match auto_updater.update_profiles_to_latest_installed(&app_handle_for_update) {
|
||||
Ok(updated) => {
|
||||
if !updated.is_empty() {
|
||||
log::info!(
|
||||
"Auto-updated {} profiles to latest installed versions: {:?}",
|
||||
updated.len(),
|
||||
updated
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to auto-update profile versions: {e}");
|
||||
}
|
||||
}
|
||||
|
||||
let registry = crate::downloaded_browsers_registry::DownloadedBrowsersRegistry::instance();
|
||||
match registry.cleanup_unused_binaries() {
|
||||
Ok(cleaned) => {
|
||||
if !cleaned.is_empty() {
|
||||
log::info!("Cleaned up unused binaries after download: {:?}", cleaned);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to cleanup unused binaries: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Ok(version)
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if a specific browser-version pair is currently being downloaded
|
||||
pub fn is_downloading(browser: &str, version: &str) -> bool {
|
||||
let download_key = format!("{browser}-{version}");
|
||||
let downloading = DOWNLOADING_BROWSERS.lock().unwrap();
|
||||
downloading.contains(&download_key)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn download_browser(
|
||||
app_handle: tauri::AppHandle,
|
||||
@@ -1040,85 +1013,21 @@ pub async fn cancel_download(browser_str: String, version: String) -> Result<(),
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::api_client::ApiClient;
|
||||
use crate::browser::BrowserType;
|
||||
use crate::browser_version_manager::DownloadInfo;
|
||||
|
||||
use tempfile::TempDir;
|
||||
use wiremock::matchers::{method, path};
|
||||
use wiremock::{Mock, MockServer, ResponseTemplate};
|
||||
|
||||
async fn setup_mock_server() -> MockServer {
|
||||
MockServer::start().await
|
||||
}
|
||||
|
||||
fn create_test_api_client(server: &MockServer) -> ApiClient {
|
||||
let base_url = server.uri();
|
||||
ApiClient::new_with_base_urls(
|
||||
base_url.clone(), // firefox_api_base
|
||||
base_url.clone(), // firefox_dev_api_base
|
||||
base_url.clone(), // github_api_base
|
||||
base_url.clone(), // chromium_api_base
|
||||
)
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_firefox_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
async fn test_download_file_with_progress() {
|
||||
let server = MockServer::start().await;
|
||||
let downloader = Downloader::new_for_test();
|
||||
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "https://download.mozilla.org/?product=firefox-139.0&os=osx&lang=en-US".to_string(),
|
||||
filename: "firefox-test.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Firefox, "139.0", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_resolve_chromium_download_url() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: "https://commondatastorage.googleapis.com/chromium-browser-snapshots/Mac/1465660/chrome-mac.zip".to_string(),
|
||||
filename: "chromium-test.zip".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let result = downloader
|
||||
.resolve_download_url(BrowserType::Chromium, "1465660", &download_info)
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let url = result.unwrap();
|
||||
assert_eq!(url, download_info.url);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_with_progress() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
|
||||
// Create a temporary directory for the test
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let dest_path = temp_dir.path();
|
||||
|
||||
// Create test file content (simulating a small download)
|
||||
let test_content = b"This is a test file content for download simulation";
|
||||
|
||||
// Mock the download endpoint
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/test-download"))
|
||||
.respond_with(
|
||||
@@ -1130,85 +1039,51 @@ mod tests {
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: format!("{}/test-download", server.uri()),
|
||||
filename: "test-file.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
// Create a mock app handle for testing
|
||||
let app = tauri::test::mock_app();
|
||||
let app_handle = app.handle().clone();
|
||||
let download_url = format!("{}/test-download", server.uri());
|
||||
|
||||
let result = downloader
|
||||
.download_browser(
|
||||
&app_handle,
|
||||
BrowserType::Firefox,
|
||||
"139.0",
|
||||
&download_info,
|
||||
dest_path,
|
||||
None,
|
||||
)
|
||||
.download_file(&download_url, dest_path, "test-file.dmg")
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
let downloaded_file = result.unwrap();
|
||||
assert!(downloaded_file.exists());
|
||||
|
||||
// Verify file content
|
||||
let downloaded_content = std::fs::read(&downloaded_file).unwrap();
|
||||
assert_eq!(downloaded_content, test_content);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_network_error() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
async fn test_download_file_network_error() {
|
||||
let server = MockServer::start().await;
|
||||
let downloader = Downloader::new_for_test();
|
||||
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let dest_path = temp_dir.path();
|
||||
|
||||
// Mock a 404 response
|
||||
Mock::given(method("GET"))
|
||||
.and(path("/missing-file"))
|
||||
.respond_with(ResponseTemplate::new(404))
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: format!("{}/missing-file", server.uri()),
|
||||
filename: "missing-file.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let app = tauri::test::mock_app();
|
||||
let app_handle = app.handle().clone();
|
||||
let download_url = format!("{}/missing-file", server.uri());
|
||||
|
||||
let result = downloader
|
||||
.download_browser(
|
||||
&app_handle,
|
||||
BrowserType::Firefox,
|
||||
"139.0",
|
||||
&download_info,
|
||||
dest_path,
|
||||
None,
|
||||
)
|
||||
.download_file(&download_url, dest_path, "missing-file.dmg")
|
||||
.await;
|
||||
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_download_browser_chunked_response() {
|
||||
let server = setup_mock_server().await;
|
||||
let api_client = create_test_api_client(&server);
|
||||
let downloader = Downloader::new_with_api_client(api_client);
|
||||
async fn test_download_file_chunked_response() {
|
||||
let server = MockServer::start().await;
|
||||
let downloader = Downloader::new_for_test();
|
||||
|
||||
let temp_dir = TempDir::new().unwrap();
|
||||
let dest_path = temp_dir.path();
|
||||
|
||||
// Create larger test content to simulate chunked transfer
|
||||
let test_content = vec![42u8; 1024]; // 1KB of data
|
||||
|
||||
Mock::given(method("GET"))
|
||||
@@ -1222,24 +1097,10 @@ mod tests {
|
||||
.mount(&server)
|
||||
.await;
|
||||
|
||||
let download_info = DownloadInfo {
|
||||
url: format!("{}/chunked-download", server.uri()),
|
||||
filename: "chunked-file.dmg".to_string(),
|
||||
is_archive: true,
|
||||
};
|
||||
|
||||
let app = tauri::test::mock_app();
|
||||
let app_handle = app.handle().clone();
|
||||
let download_url = format!("{}/chunked-download", server.uri());
|
||||
|
||||
let result = downloader
|
||||
.download_browser(
|
||||
&app_handle,
|
||||
BrowserType::Chromium,
|
||||
"1465660",
|
||||
&download_info,
|
||||
dest_path,
|
||||
None,
|
||||
)
|
||||
.download_file(&download_url, dest_path, "chunked-file.dmg")
|
||||
.await;
|
||||
|
||||
assert!(result.is_ok());
|
||||
|
||||
@@ -0,0 +1,335 @@
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::profile::BrowserProfile;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref EPHEMERAL_DIRS: Mutex<HashMap<String, PathBuf>> = Mutex::new(HashMap::new());
|
||||
}
|
||||
|
||||
/// Get or create the RAM-backed base directory for ephemeral profiles.
|
||||
/// Linux: /dev/shm (always tmpfs). macOS: RAM disk via hdiutil. Windows: imdisk RAM disk.
|
||||
fn get_ephemeral_base_dir() -> Result<PathBuf, String> {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
let base = PathBuf::from("/dev/shm/donut-ephemeral");
|
||||
std::fs::create_dir_all(&base)
|
||||
.map_err(|e| format!("Failed to create ephemeral base in /dev/shm: {e}"))?;
|
||||
Ok(base)
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
{
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
if let Ok(mount) = get_or_create_macos_ramdisk() {
|
||||
return Ok(mount);
|
||||
}
|
||||
log::warn!("Failed to create macOS RAM disk, ephemeral profiles may use disk");
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if let Ok(mount) = get_or_create_windows_ramdisk() {
|
||||
return Ok(mount);
|
||||
}
|
||||
log::warn!("Failed to create Windows RAM disk, ephemeral profiles may use disk");
|
||||
}
|
||||
|
||||
// Fallback
|
||||
let base = std::env::temp_dir().join("donut-ephemeral");
|
||||
std::fs::create_dir_all(&base)
|
||||
.map_err(|e| format!("Failed to create ephemeral base dir: {e}"))?;
|
||||
Ok(base)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
fn get_or_create_macos_ramdisk() -> Result<PathBuf, String> {
|
||||
let mount_point = PathBuf::from("/Volumes/DonutEphemeral");
|
||||
|
||||
// Reuse existing RAM disk from a previous session
|
||||
if mount_point.exists() && mount_point.is_dir() {
|
||||
return Ok(mount_point);
|
||||
}
|
||||
|
||||
// 256 MB in 512-byte sectors
|
||||
let sectors = 256 * 2048;
|
||||
let output = std::process::Command::new("hdiutil")
|
||||
.args(["attach", "-nomount", &format!("ram://{sectors}")])
|
||||
.output()
|
||||
.map_err(|e| format!("hdiutil attach failed: {e}"))?;
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(format!(
|
||||
"hdiutil attach failed: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
let dev = String::from_utf8_lossy(&output.stdout).trim().to_string();
|
||||
|
||||
let fmt = std::process::Command::new("diskutil")
|
||||
.args(["erasevolume", "HFS+", "DonutEphemeral", &dev])
|
||||
.output()
|
||||
.map_err(|e| format!("diskutil erasevolume failed: {e}"))?;
|
||||
|
||||
if !fmt.status.success() {
|
||||
let _ = std::process::Command::new("hdiutil")
|
||||
.args(["detach", &dev])
|
||||
.output();
|
||||
return Err(format!(
|
||||
"diskutil erasevolume failed: {}",
|
||||
String::from_utf8_lossy(&fmt.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
log::info!("Created macOS RAM disk at {}", mount_point.display());
|
||||
Ok(mount_point)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn get_or_create_windows_ramdisk() -> Result<PathBuf, String> {
|
||||
// Check if a previous RAM disk with our directory already exists
|
||||
for letter in ['R', 'Q', 'P', 'O'] {
|
||||
let base = PathBuf::from(format!("{}:\\DonutEphemeral", letter));
|
||||
if base.exists() && base.is_dir() {
|
||||
return Ok(base);
|
||||
}
|
||||
}
|
||||
|
||||
// Try to create a RAM disk using imdisk (open-source RAM disk driver)
|
||||
for letter in ['R', 'Q', 'P', 'O'] {
|
||||
let drive = format!("{}:", letter);
|
||||
if PathBuf::from(format!("{}\\", drive)).exists() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let output = std::process::Command::new("imdisk")
|
||||
.args(["-a", "-s", "256M", "-m", &drive, "-p", "/fs:ntfs /q /y"])
|
||||
.output();
|
||||
|
||||
match output {
|
||||
Ok(out) if out.status.success() => {
|
||||
let base = PathBuf::from(format!("{}\\DonutEphemeral", drive));
|
||||
std::fs::create_dir_all(&base)
|
||||
.map_err(|e| format!("Failed to create dir on RAM disk: {e}"))?;
|
||||
log::info!("Created Windows RAM disk at {}", base.display());
|
||||
return Ok(base);
|
||||
}
|
||||
Ok(out) => {
|
||||
log::debug!(
|
||||
"imdisk failed for drive {}: {}",
|
||||
drive,
|
||||
String::from_utf8_lossy(&out.stderr)
|
||||
);
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(format!("imdisk not available: {e}"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err("Could not create Windows RAM disk".to_string())
|
||||
}
|
||||
|
||||
pub fn create_ephemeral_dir(profile_id: &str) -> Result<PathBuf, String> {
|
||||
let base = get_ephemeral_base_dir()?;
|
||||
let dir_path = base.join(profile_id);
|
||||
|
||||
std::fs::create_dir_all(&dir_path).map_err(|e| format!("Failed to create ephemeral dir: {e}"))?;
|
||||
|
||||
EPHEMERAL_DIRS
|
||||
.lock()
|
||||
.map_err(|e| format!("Failed to lock ephemeral dirs: {e}"))?
|
||||
.insert(profile_id.to_string(), dir_path.clone());
|
||||
|
||||
log::info!(
|
||||
"Created ephemeral dir for profile {}: {}",
|
||||
profile_id,
|
||||
dir_path.display()
|
||||
);
|
||||
|
||||
Ok(dir_path)
|
||||
}
|
||||
|
||||
pub fn get_ephemeral_dir(profile_id: &str) -> Option<PathBuf> {
|
||||
EPHEMERAL_DIRS.lock().ok()?.get(profile_id).cloned()
|
||||
}
|
||||
|
||||
pub fn remove_ephemeral_dir(profile_id: &str) {
|
||||
let dir = EPHEMERAL_DIRS
|
||||
.lock()
|
||||
.ok()
|
||||
.and_then(|mut map| map.remove(profile_id));
|
||||
|
||||
if let Some(dir_path) = dir {
|
||||
if dir_path.exists() {
|
||||
if let Err(e) = std::fs::remove_dir_all(&dir_path) {
|
||||
log::warn!("Failed to remove ephemeral dir {}: {e}", dir_path.display());
|
||||
} else {
|
||||
log::info!(
|
||||
"Removed ephemeral dir for profile {}: {}",
|
||||
profile_id,
|
||||
dir_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Recover ephemeral dir mappings on startup by scanning the RAM-backed base dir.
|
||||
/// Dir names are profile UUIDs, so we re-populate the in-memory HashMap.
|
||||
/// Also cleans up old disk-based dirs from previous versions.
|
||||
pub fn recover_ephemeral_dirs() {
|
||||
cleanup_legacy_dirs();
|
||||
|
||||
let base = match get_ephemeral_base_dir() {
|
||||
Ok(base) => base,
|
||||
Err(e) => {
|
||||
log::warn!("Cannot recover ephemeral dirs: {e}");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let entries = match std::fs::read_dir(&base) {
|
||||
Ok(entries) => entries,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
let mut dirs = match EPHEMERAL_DIRS.lock() {
|
||||
Ok(dirs) => dirs,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
for entry in entries.flatten() {
|
||||
if entry.path().is_dir() {
|
||||
if let Some(name) = entry.file_name().to_str() {
|
||||
if uuid::Uuid::parse_str(name).is_ok() {
|
||||
dirs.insert(name.to_string(), entry.path());
|
||||
log::info!("Recovered ephemeral dir for profile {}", name);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Remove old-format ephemeral dirs from /tmp (pre-tmpfs migration).
|
||||
fn cleanup_legacy_dirs() {
|
||||
let temp_dir = std::env::temp_dir();
|
||||
let entries = match std::fs::read_dir(&temp_dir) {
|
||||
Ok(entries) => entries,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
for entry in entries.flatten() {
|
||||
if let Some(name) = entry.file_name().to_str() {
|
||||
if name.starts_with("donut-ephemeral-") && entry.path().is_dir() {
|
||||
if let Err(e) = std::fs::remove_dir_all(entry.path()) {
|
||||
log::warn!("Failed to clean up legacy ephemeral dir: {e}");
|
||||
} else {
|
||||
log::info!(
|
||||
"Cleaned up legacy ephemeral dir: {}",
|
||||
entry.path().display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_effective_profile_path(profile: &BrowserProfile, profiles_dir: &Path) -> PathBuf {
|
||||
if profile.ephemeral {
|
||||
if let Some(dir) = get_ephemeral_dir(&profile.id.to_string()) {
|
||||
return dir;
|
||||
}
|
||||
}
|
||||
profile.get_profile_data_path(profiles_dir)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_test_profile(id: uuid::Uuid, ephemeral: bool) -> BrowserProfile {
|
||||
BrowserProfile {
|
||||
id,
|
||||
name: "test".to_string(),
|
||||
browser: "camoufox".to_string(),
|
||||
version: "1.0".to_string(),
|
||||
proxy_id: None,
|
||||
vpn_id: None,
|
||||
launch_hook: None,
|
||||
process_id: None,
|
||||
last_launch: None,
|
||||
release_type: "stable".to_string(),
|
||||
camoufox_config: None,
|
||||
wayfern_config: None,
|
||||
group_id: None,
|
||||
tags: Vec::new(),
|
||||
note: None,
|
||||
sync_mode: crate::profile::types::SyncMode::Disabled,
|
||||
encryption_salt: None,
|
||||
last_sync: None,
|
||||
host_os: None,
|
||||
ephemeral,
|
||||
extension_group_id: None,
|
||||
proxy_bypass_rules: Vec::new(),
|
||||
created_by_id: None,
|
||||
created_by_email: None,
|
||||
dns_blocklist: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial_test::serial]
|
||||
fn test_ephemeral_dir_lifecycle() {
|
||||
// Clear global state to avoid interference from other tests
|
||||
EPHEMERAL_DIRS.lock().unwrap().clear();
|
||||
|
||||
let profile_id = uuid::Uuid::new_v4();
|
||||
let id_str = profile_id.to_string();
|
||||
|
||||
let dir = create_ephemeral_dir(&id_str).unwrap();
|
||||
assert!(dir.is_dir());
|
||||
assert_eq!(get_ephemeral_dir(&id_str), Some(dir.clone()));
|
||||
|
||||
let ephemeral_profile = make_test_profile(profile_id, true);
|
||||
let profiles_dir = std::env::temp_dir().join("test_profiles_ephemeral");
|
||||
assert_eq!(
|
||||
get_effective_profile_path(&ephemeral_profile, &profiles_dir),
|
||||
dir
|
||||
);
|
||||
|
||||
remove_ephemeral_dir(&id_str);
|
||||
assert!(!dir.exists());
|
||||
assert!(get_ephemeral_dir(&id_str).is_none());
|
||||
|
||||
let persistent_profile = make_test_profile(uuid::Uuid::new_v4(), false);
|
||||
let expected = persistent_profile.get_profile_data_path(&profiles_dir);
|
||||
assert_eq!(
|
||||
get_effective_profile_path(&persistent_profile, &profiles_dir),
|
||||
expected
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[serial_test::serial]
|
||||
fn test_recover_ephemeral_dirs() {
|
||||
let base = get_ephemeral_base_dir().unwrap();
|
||||
let test_id = uuid::Uuid::new_v4().to_string();
|
||||
let test_dir = base.join(&test_id);
|
||||
std::fs::create_dir_all(&test_dir).unwrap();
|
||||
|
||||
// Clear the HashMap so recovery has something to find
|
||||
EPHEMERAL_DIRS.lock().unwrap().remove(&test_id);
|
||||
assert!(get_ephemeral_dir(&test_id).is_none());
|
||||
|
||||
recover_ephemeral_dirs();
|
||||
assert_eq!(get_ephemeral_dir(&test_id), Some(test_dir.clone()));
|
||||
|
||||
// Clean up
|
||||
remove_ephemeral_dir(&test_id);
|
||||
}
|
||||
}
|
||||
@@ -6,8 +6,8 @@ use crate::browser::BrowserType;
|
||||
use crate::downloader::DownloadProgress;
|
||||
use crate::events;
|
||||
|
||||
#[cfg(any(target_os = "macos", target_os = "windows"))]
|
||||
use std::process::Command;
|
||||
#[cfg(target_os = "macos")]
|
||||
use tokio::process::Command;
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
use std::fs::create_dir_all;
|
||||
@@ -38,12 +38,7 @@ impl Extractor {
|
||||
"camoufox"
|
||||
} else if dest_dir.to_string_lossy().contains("wayfern") {
|
||||
"wayfern"
|
||||
} else if dest_dir.to_string_lossy().contains("firefox") {
|
||||
"firefox"
|
||||
} else if dest_dir.to_string_lossy().contains("zen") {
|
||||
"zen"
|
||||
} else {
|
||||
// For other browsers, assume the structure is already correct
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
@@ -212,6 +207,20 @@ impl Extractor {
|
||||
|
||||
match extraction_result {
|
||||
Ok(path) => {
|
||||
// Remove quarantine attributes on macOS to prevent
|
||||
// "app was prevented from modifying data" prompts
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
let _ = tokio::process::Command::new("xattr")
|
||||
.args([
|
||||
"-dr",
|
||||
"com.apple.quarantine",
|
||||
dest_dir.to_str().unwrap_or("."),
|
||||
])
|
||||
.output()
|
||||
.await;
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Successfully extracted {} {} to: {}",
|
||||
browser_type.as_str(),
|
||||
@@ -237,22 +246,21 @@ impl Extractor {
|
||||
&self,
|
||||
file_path: &Path,
|
||||
) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// First check file extension for DMG files since they're common on macOS
|
||||
// and can have misleading magic numbers
|
||||
// Check file extension first for container formats (DMG, MSI) whose internal
|
||||
// compression makes magic bytes unreliable
|
||||
if let Some(ext) = file_path.extension().and_then(|ext| ext.to_str()) {
|
||||
if ext.to_lowercase() == "dmg" {
|
||||
return Ok("dmg".to_string());
|
||||
}
|
||||
if ext.to_lowercase() == "msi" {
|
||||
return Ok("msi".to_string());
|
||||
match ext.to_lowercase().as_str() {
|
||||
"dmg" => return Ok("dmg".to_string()),
|
||||
"msi" => return Ok("msi".to_string()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let mut file = File::open(file_path)?;
|
||||
let mut buffer = [0u8; 12]; // Read first 12 bytes for magic number detection
|
||||
let mut buffer = [0u8; 12];
|
||||
file.read_exact(&mut buffer)?;
|
||||
|
||||
// Check magic numbers for different file types
|
||||
// Check magic numbers for other file types
|
||||
match &buffer[0..4] {
|
||||
[0x50, 0x4B, 0x03, 0x04] | [0x50, 0x4B, 0x05, 0x06] | [0x50, 0x4B, 0x07, 0x08] => {
|
||||
return Ok("zip".to_string())
|
||||
@@ -362,16 +370,20 @@ impl Extractor {
|
||||
.args([
|
||||
"attach",
|
||||
"-nobrowse",
|
||||
"-noverify",
|
||||
"-noautoopen",
|
||||
"-mountpoint",
|
||||
mount_point.to_str().unwrap(),
|
||||
dmg_path.to_str().unwrap(),
|
||||
])
|
||||
.output()?;
|
||||
.stdin(std::process::Stdio::null())
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
log::info!("Failed to mount DMG. stdout: {stdout}, stderr: {stderr}");
|
||||
log::error!("Failed to mount DMG. stdout: {stdout}, stderr: {stderr}");
|
||||
|
||||
// Clean up mount point before returning error
|
||||
let _ = fs::remove_dir_all(&mount_point);
|
||||
@@ -387,12 +399,13 @@ impl Extractor {
|
||||
let app_entry = match app_result {
|
||||
Ok(app_path) => app_path,
|
||||
Err(e) => {
|
||||
log::info!("Failed to find .app in mount point: {e}");
|
||||
log::error!("Failed to find .app in mount point: {e}");
|
||||
|
||||
// Try to unmount before returning error
|
||||
let _ = Command::new("hdiutil")
|
||||
.args(["detach", "-force", mount_point.to_str().unwrap()])
|
||||
.output();
|
||||
.output()
|
||||
.await;
|
||||
let _ = fs::remove_dir_all(&mount_point);
|
||||
|
||||
return Err("No .app found after extraction".into());
|
||||
@@ -412,16 +425,18 @@ impl Extractor {
|
||||
app_entry.to_str().unwrap(),
|
||||
app_path.to_str().unwrap(),
|
||||
])
|
||||
.output()?;
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
log::info!("Failed to copy app: {stderr}");
|
||||
log::error!("Failed to copy app: {stderr}");
|
||||
|
||||
// Unmount before returning error
|
||||
let _ = Command::new("hdiutil")
|
||||
.args(["detach", "-force", mount_point.to_str().unwrap()])
|
||||
.output();
|
||||
.output()
|
||||
.await;
|
||||
let _ = fs::remove_dir_all(&mount_point);
|
||||
|
||||
return Err(format!("Failed to copy app: {stderr}").into());
|
||||
@@ -432,18 +447,21 @@ impl Extractor {
|
||||
// Remove quarantine attributes
|
||||
let _ = Command::new("xattr")
|
||||
.args(["-dr", "com.apple.quarantine", app_path.to_str().unwrap()])
|
||||
.output();
|
||||
.output()
|
||||
.await;
|
||||
|
||||
let _ = Command::new("xattr")
|
||||
.args(["-cr", app_path.to_str().unwrap()])
|
||||
.output();
|
||||
.output()
|
||||
.await;
|
||||
|
||||
log::info!("Removed quarantine attributes");
|
||||
|
||||
// Unmount the DMG
|
||||
let output = Command::new("hdiutil")
|
||||
.args(["detach", mount_point.to_str().unwrap()])
|
||||
.output()?;
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if !output.status.success() {
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
@@ -593,7 +611,11 @@ impl Extractor {
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("ZIP extraction completed. Searching for executable...");
|
||||
log::info!("ZIP extraction completed.");
|
||||
|
||||
self.flatten_single_directory_archive(dest_dir)?;
|
||||
|
||||
log::info!("Searching for executable...");
|
||||
self
|
||||
.find_extracted_executable(dest_dir)
|
||||
.await
|
||||
@@ -617,7 +639,9 @@ impl Extractor {
|
||||
// Set executable permissions for extracted files
|
||||
self.set_executable_permissions_recursive(dest_dir).await?;
|
||||
|
||||
log::info!("tar.gz extraction completed. Searching for executable...");
|
||||
log::info!("tar.gz extraction completed.");
|
||||
self.flatten_single_directory_archive(dest_dir)?;
|
||||
log::info!("Searching for executable...");
|
||||
self.find_extracted_executable(dest_dir).await
|
||||
}
|
||||
|
||||
@@ -638,7 +662,9 @@ impl Extractor {
|
||||
// Set executable permissions for extracted files
|
||||
self.set_executable_permissions_recursive(dest_dir).await?;
|
||||
|
||||
log::info!("tar.bz2 extraction completed. Searching for executable...");
|
||||
log::info!("tar.bz2 extraction completed.");
|
||||
self.flatten_single_directory_archive(dest_dir)?;
|
||||
log::info!("Searching for executable...");
|
||||
self.find_extracted_executable(dest_dir).await
|
||||
}
|
||||
|
||||
@@ -673,7 +699,9 @@ impl Extractor {
|
||||
// Set executable permissions for extracted files
|
||||
self.set_executable_permissions_recursive(dest_dir).await?;
|
||||
|
||||
log::info!("tar.xz extraction completed. Searching for executable...");
|
||||
log::info!("tar.xz extraction completed.");
|
||||
self.flatten_single_directory_archive(dest_dir)?;
|
||||
log::info!("Searching for executable...");
|
||||
self.find_extracted_executable(dest_dir).await
|
||||
}
|
||||
|
||||
@@ -691,7 +719,9 @@ impl Extractor {
|
||||
extractor.to(dest_dir);
|
||||
}
|
||||
|
||||
log::info!("MSI extraction completed. Searching for executable...");
|
||||
log::info!("MSI extraction completed.");
|
||||
self.flatten_single_directory_archive(dest_dir)?;
|
||||
log::info!("Searching for executable...");
|
||||
self.find_extracted_executable(dest_dir).await
|
||||
}
|
||||
|
||||
@@ -727,55 +757,91 @@ impl Extractor {
|
||||
dest_dir: &Path,
|
||||
browser_type: BrowserType,
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
match browser_type {
|
||||
BrowserType::Zen => {
|
||||
// Zen installer EXE needs to be run to install
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
self.install_zen_windows(exe_path, dest_dir).await
|
||||
}
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
Err("Zen EXE installation is only supported on Windows".into())
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// For other browsers (Firefox, TOR, etc.), the EXE is typically just copied
|
||||
let exe_name = exe_path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.unwrap_or("browser.exe");
|
||||
{
|
||||
let _ = browser_type;
|
||||
let exe_name = exe_path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.unwrap_or("browser.exe");
|
||||
|
||||
let dest_path = dest_dir.join(exe_name);
|
||||
fs::copy(exe_path, &dest_path)?;
|
||||
Ok(dest_path)
|
||||
}
|
||||
let dest_path = dest_dir.join(exe_name);
|
||||
fs::copy(exe_path, &dest_path)?;
|
||||
Ok(dest_path)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn install_zen_windows(
|
||||
fn flatten_single_directory_archive(
|
||||
&self,
|
||||
installer_path: &Path,
|
||||
dest_dir: &Path,
|
||||
) -> Result<PathBuf, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// For Zen installer, we need to run it silently
|
||||
let output = Command::new(installer_path)
|
||||
.args(["/S", &format!("/D={}", dest_dir.display())])
|
||||
.output()?;
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
let entries: Vec<_> = fs::read_dir(dest_dir)?.filter_map(|e| e.ok()).collect();
|
||||
|
||||
if !output.status.success() {
|
||||
return Err(
|
||||
format!(
|
||||
"Failed to install Zen: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
)
|
||||
.into(),
|
||||
);
|
||||
let archive_extensions = ["zip", "tar", "xz", "gz", "bz2", "dmg", "msi", "exe"];
|
||||
|
||||
let mut dirs = Vec::new();
|
||||
let mut has_non_archive_files = false;
|
||||
|
||||
for entry in &entries {
|
||||
let path = entry.path();
|
||||
if path.is_dir() {
|
||||
dirs.push(path);
|
||||
} else if let Some(ext) = path.extension().and_then(|e| e.to_str()) {
|
||||
if !archive_extensions.contains(&ext.to_lowercase().as_str()) {
|
||||
has_non_archive_files = true;
|
||||
}
|
||||
} else {
|
||||
has_non_archive_files = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Find the installed executable
|
||||
self.find_extracted_executable(dest_dir).await
|
||||
if dirs.len() == 1 && !has_non_archive_files {
|
||||
let single_dir = &dirs[0];
|
||||
|
||||
if single_dir.extension().is_some_and(|ext| ext == "app") {
|
||||
log::info!(
|
||||
"Skipping flatten: {} is a macOS app bundle",
|
||||
single_dir.display()
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Flattening single-directory archive: moving contents of {} to {}",
|
||||
single_dir.display(),
|
||||
dest_dir.display()
|
||||
);
|
||||
|
||||
let inner_entries: Vec<_> = fs::read_dir(single_dir)?.filter_map(|e| e.ok()).collect();
|
||||
|
||||
for entry in inner_entries {
|
||||
let source = entry.path();
|
||||
let file_name = match source.file_name() {
|
||||
Some(name) => name.to_owned(),
|
||||
None => continue,
|
||||
};
|
||||
let target = dest_dir.join(&file_name);
|
||||
fs::rename(&source, &target).map_err(|e| {
|
||||
format!(
|
||||
"Failed to move {} to {}: {}",
|
||||
source.display(),
|
||||
target.display(),
|
||||
e
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
fs::remove_dir(single_dir).map_err(|e| {
|
||||
format!(
|
||||
"Failed to remove empty directory {}: {}",
|
||||
single_dir.display(),
|
||||
e
|
||||
)
|
||||
})?;
|
||||
|
||||
log::info!("Successfully flattened archive directory structure");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn find_extracted_executable(
|
||||
@@ -868,8 +934,6 @@ impl Extractor {
|
||||
"firefox.exe",
|
||||
"chrome.exe",
|
||||
"chromium.exe",
|
||||
"zen.exe",
|
||||
"brave.exe",
|
||||
"camoufox.exe",
|
||||
"wayfern.exe",
|
||||
];
|
||||
@@ -937,8 +1001,6 @@ impl Extractor {
|
||||
if file_name.contains("firefox")
|
||||
|| file_name.contains("chrome")
|
||||
|| file_name.contains("chromium")
|
||||
|| file_name.contains("zen")
|
||||
|| file_name.contains("brave")
|
||||
|| file_name.contains("browser")
|
||||
|| file_name.contains("camoufox")
|
||||
|| file_name.contains("wayfern")
|
||||
@@ -989,31 +1051,14 @@ impl Extractor {
|
||||
|
||||
// Enhanced list of common browser executable names
|
||||
let exe_names = [
|
||||
// Firefox variants
|
||||
// Firefox variants (used by Camoufox)
|
||||
"firefox",
|
||||
"firefox-bin",
|
||||
"firefox-esr",
|
||||
"firefox-trunk",
|
||||
// Chrome/Chromium variants
|
||||
// Chrome/Chromium variants (used by Wayfern)
|
||||
"chrome",
|
||||
"google-chrome",
|
||||
"google-chrome-stable",
|
||||
"google-chrome-beta",
|
||||
"google-chrome-unstable",
|
||||
"chromium",
|
||||
"chromium-browser",
|
||||
"chromium-bin",
|
||||
// Zen Browser
|
||||
"zen",
|
||||
"zen-browser",
|
||||
"zen-bin",
|
||||
// Brave variants
|
||||
"brave",
|
||||
"brave-browser",
|
||||
"brave-browser-stable",
|
||||
"brave-browser-beta",
|
||||
"brave-browser-dev",
|
||||
"brave-bin",
|
||||
// Camoufox variants
|
||||
"camoufox",
|
||||
"camoufox-bin",
|
||||
@@ -1044,17 +1089,12 @@ impl Extractor {
|
||||
"firefox",
|
||||
"chrome",
|
||||
"chromium",
|
||||
"brave",
|
||||
"zen",
|
||||
"camoufox",
|
||||
"wayfern",
|
||||
".",
|
||||
"./",
|
||||
"firefox",
|
||||
"Browser",
|
||||
"browser",
|
||||
"opt/google/chrome",
|
||||
"opt/brave.com/brave",
|
||||
"opt/camoufox",
|
||||
"usr/lib/firefox",
|
||||
"usr/lib/chromium",
|
||||
|
||||
@@ -5,6 +5,7 @@ use directories::BaseDirs;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use tokio::fs;
|
||||
use tokio::io::AsyncWriteExt;
|
||||
|
||||
@@ -22,6 +23,8 @@ pub struct GeoIPDownloadProgress {
|
||||
pub eta_seconds: Option<f64>,
|
||||
}
|
||||
|
||||
static DOWNLOAD_IN_PROGRESS: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
pub struct GeoIPDownloader {
|
||||
client: Client,
|
||||
}
|
||||
@@ -76,21 +79,39 @@ impl GeoIPDownloader {
|
||||
false
|
||||
}
|
||||
}
|
||||
/// Check if GeoIP database is missing for Camoufox profiles
|
||||
|
||||
fn get_timestamp_path() -> PathBuf {
|
||||
crate::app_dirs::cache_dir().join("geoip_last_download")
|
||||
}
|
||||
|
||||
fn is_geoip_stale() -> bool {
|
||||
let timestamp_path = Self::get_timestamp_path();
|
||||
let Ok(content) = std::fs::read_to_string(×tamp_path) else {
|
||||
return true;
|
||||
};
|
||||
let Ok(timestamp) = content.trim().parse::<u64>() else {
|
||||
return true;
|
||||
};
|
||||
let now = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs();
|
||||
const SEVEN_DAYS: u64 = 7 * 24 * 60 * 60;
|
||||
now.saturating_sub(timestamp) > SEVEN_DAYS
|
||||
}
|
||||
|
||||
/// Check if GeoIP database is missing or stale for Camoufox profiles
|
||||
pub fn check_missing_geoip_database(
|
||||
&self,
|
||||
) -> Result<bool, Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Get all profiles
|
||||
let profiles = ProfileManager::instance()
|
||||
.list_profiles()
|
||||
.map_err(|e| format!("Failed to list profiles: {e}"))?;
|
||||
|
||||
// Check if there are any Camoufox profiles
|
||||
let has_camoufox_profiles = profiles.iter().any(|profile| profile.browser == "camoufox");
|
||||
|
||||
if has_camoufox_profiles {
|
||||
// Check if GeoIP database is available
|
||||
return Ok(!Self::is_geoip_database_available());
|
||||
return Ok(!Self::is_geoip_database_available() || Self::is_geoip_stale());
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
@@ -108,6 +129,22 @@ impl GeoIPDownloader {
|
||||
pub async fn download_geoip_database(
|
||||
&self,
|
||||
_app_handle: &tauri::AppHandle,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
if DOWNLOAD_IN_PROGRESS
|
||||
.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
|
||||
.is_err()
|
||||
{
|
||||
log::info!("GeoIP database download already in progress, skipping");
|
||||
return Ok(());
|
||||
}
|
||||
let result = self.download_geoip_database_inner(_app_handle).await;
|
||||
DOWNLOAD_IN_PROGRESS.store(false, Ordering::SeqCst);
|
||||
result
|
||||
}
|
||||
|
||||
async fn download_geoip_database_inner(
|
||||
&self,
|
||||
_app_handle: &tauri::AppHandle,
|
||||
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
|
||||
// Emit initial progress
|
||||
let _ = events::emit(
|
||||
@@ -137,6 +174,13 @@ impl GeoIPDownloader {
|
||||
|
||||
let mmdb_path = Self::get_mmdb_file_path()?;
|
||||
|
||||
// Always download to a temp file first, then atomically rename.
|
||||
// This prevents corruption if the app is closed mid-download.
|
||||
let temp_path = mmdb_path.with_extension("mmdb.downloading");
|
||||
|
||||
// Remove any leftover temp file from a previous interrupted download
|
||||
let _ = fs::remove_file(&temp_path).await;
|
||||
|
||||
// Download the file
|
||||
let response = self.client.get(&download_url).send().await?;
|
||||
|
||||
@@ -152,7 +196,7 @@ impl GeoIPDownloader {
|
||||
|
||||
let total_size = response.content_length().unwrap_or(0);
|
||||
let mut downloaded: u64 = 0;
|
||||
let mut file = fs::File::create(&mmdb_path).await?;
|
||||
let mut file = fs::File::create(&temp_path).await?;
|
||||
let mut stream = response.bytes_stream();
|
||||
|
||||
use futures_util::StreamExt;
|
||||
@@ -200,6 +244,21 @@ impl GeoIPDownloader {
|
||||
}
|
||||
|
||||
file.flush().await?;
|
||||
drop(file);
|
||||
|
||||
// Atomically replace the old database with the new one
|
||||
fs::rename(&temp_path, &mmdb_path).await?;
|
||||
|
||||
// Write download timestamp
|
||||
let timestamp_path = Self::get_timestamp_path();
|
||||
if let Some(parent) = timestamp_path.parent() {
|
||||
let _ = fs::create_dir_all(parent).await;
|
||||
}
|
||||
let now = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap_or_default()
|
||||
.as_secs();
|
||||
let _ = fs::write(×tamp_path, now.to_string()).await;
|
||||
|
||||
// Emit completion
|
||||
let _ = events::emit(
|
||||
@@ -362,6 +421,31 @@ mod tests {
|
||||
assert!(path.to_string_lossy().ends_with("GeoLite2-City.mmdb"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_geoip_stale() {
|
||||
let tmp = tempfile::TempDir::new().unwrap();
|
||||
let _guard = crate::app_dirs::set_test_cache_dir(tmp.path().to_path_buf());
|
||||
|
||||
// No timestamp file → stale
|
||||
assert!(GeoIPDownloader::is_geoip_stale());
|
||||
|
||||
let timestamp_path = GeoIPDownloader::get_timestamp_path();
|
||||
std::fs::create_dir_all(timestamp_path.parent().unwrap()).unwrap();
|
||||
|
||||
// Recent timestamp → not stale
|
||||
let now = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
std::fs::write(×tamp_path, now.to_string()).unwrap();
|
||||
assert!(!GeoIPDownloader::is_geoip_stale());
|
||||
|
||||
// 8 days ago → stale
|
||||
let eight_days_ago = now - 8 * 24 * 60 * 60;
|
||||
std::fs::write(×tamp_path, eight_days_ago.to_string()).unwrap();
|
||||
assert!(GeoIPDownloader::is_geoip_stale());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_geoip_database_available() {
|
||||
// Test that the function works correctly regardless of file system state.
|
||||
|
||||