mirror of
https://github.com/zhom/donutbrowser.git
synced 2026-05-01 16:17:55 +02:00
feat: e2e encrypted sync
This commit is contained in:
@@ -0,0 +1,351 @@
|
||||
use aes_gcm::{
|
||||
aead::{Aead, AeadCore, KeyInit, OsRng},
|
||||
Aes256Gcm, Key,
|
||||
};
|
||||
use argon2::{password_hash::SaltString, Argon2, PasswordHasher};
|
||||
use base64::{engine::general_purpose::STANDARD as BASE64, Engine};
|
||||
|
||||
const E2E_FILE_HEADER: &[u8] = b"DBE2E";
|
||||
const E2E_FILE_VERSION: u8 = 1;
|
||||
|
||||
fn get_e2e_password_path() -> std::path::PathBuf {
|
||||
crate::app_dirs::settings_dir().join("e2e_password.dat")
|
||||
}
|
||||
|
||||
fn get_vault_password() -> String {
|
||||
env!("DONUT_BROWSER_VAULT_PASSWORD").to_string()
|
||||
}
|
||||
|
||||
pub fn store_e2e_password(password: &str) -> Result<(), String> {
|
||||
let file_path = get_e2e_password_path();
|
||||
|
||||
if let Some(parent) = file_path.parent() {
|
||||
std::fs::create_dir_all(parent).map_err(|e| format!("Failed to create directory: {e}"))?;
|
||||
}
|
||||
|
||||
let vault_password = get_vault_password();
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
let argon2 = Argon2::default();
|
||||
let password_hash = argon2
|
||||
.hash_password(vault_password.as_bytes(), &salt)
|
||||
.map_err(|e| format!("Argon2 key derivation failed: {e}"))?;
|
||||
let hash_value = password_hash.hash.unwrap();
|
||||
let hash_bytes = hash_value.as_bytes();
|
||||
|
||||
let key_bytes: [u8; 32] = hash_bytes[..32]
|
||||
.try_into()
|
||||
.map_err(|_| "Invalid key length")?;
|
||||
let key = Key::<Aes256Gcm>::from(key_bytes);
|
||||
let cipher = Aes256Gcm::new(&key);
|
||||
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
|
||||
|
||||
let ciphertext = cipher
|
||||
.encrypt(&nonce, password.as_bytes())
|
||||
.map_err(|e| format!("Encryption failed: {e}"))?;
|
||||
|
||||
let mut file_data = Vec::new();
|
||||
file_data.extend_from_slice(E2E_FILE_HEADER);
|
||||
file_data.push(E2E_FILE_VERSION);
|
||||
|
||||
let salt_str = salt.as_str();
|
||||
file_data.push(salt_str.len() as u8);
|
||||
file_data.extend_from_slice(salt_str.as_bytes());
|
||||
file_data.extend_from_slice(&nonce);
|
||||
file_data.extend_from_slice(&(ciphertext.len() as u32).to_le_bytes());
|
||||
file_data.extend_from_slice(&ciphertext);
|
||||
|
||||
std::fs::write(&file_path, file_data)
|
||||
.map_err(|e| format!("Failed to write e2e password file: {e}"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn load_e2e_password() -> Result<Option<String>, String> {
|
||||
let file_path = get_e2e_password_path();
|
||||
if !file_path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let file_data =
|
||||
std::fs::read(&file_path).map_err(|e| format!("Failed to read e2e password file: {e}"))?;
|
||||
|
||||
if file_data.len() < E2E_FILE_HEADER.len() + 1 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if &file_data[..E2E_FILE_HEADER.len()] != E2E_FILE_HEADER {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let version = file_data[E2E_FILE_HEADER.len()];
|
||||
if version != E2E_FILE_VERSION {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let mut offset = E2E_FILE_HEADER.len() + 1;
|
||||
|
||||
if offset >= file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let salt_len = file_data[offset] as usize;
|
||||
offset += 1;
|
||||
|
||||
if offset + salt_len > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let salt_str = std::str::from_utf8(&file_data[offset..offset + salt_len])
|
||||
.map_err(|_| "Invalid salt encoding")?;
|
||||
offset += salt_len;
|
||||
|
||||
let salt = SaltString::from_b64(salt_str).map_err(|e| format!("Invalid salt: {e}"))?;
|
||||
|
||||
if offset + 12 > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let nonce_bytes: [u8; 12] = file_data[offset..offset + 12]
|
||||
.try_into()
|
||||
.map_err(|_| "Invalid nonce")?;
|
||||
let nonce = aes_gcm::Nonce::from(nonce_bytes);
|
||||
offset += 12;
|
||||
|
||||
if offset + 4 > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let ciphertext_len =
|
||||
u32::from_le_bytes(file_data[offset..offset + 4].try_into().unwrap()) as usize;
|
||||
offset += 4;
|
||||
|
||||
if offset + ciphertext_len > file_data.len() {
|
||||
return Ok(None);
|
||||
}
|
||||
let ciphertext = &file_data[offset..offset + ciphertext_len];
|
||||
|
||||
let vault_password = get_vault_password();
|
||||
let argon2 = Argon2::default();
|
||||
let password_hash = argon2
|
||||
.hash_password(vault_password.as_bytes(), &salt)
|
||||
.map_err(|e| format!("Argon2 key derivation failed: {e}"))?;
|
||||
let hash_value = password_hash.hash.unwrap();
|
||||
let hash_bytes = hash_value.as_bytes();
|
||||
|
||||
let key_bytes: [u8; 32] = hash_bytes[..32]
|
||||
.try_into()
|
||||
.map_err(|_| "Invalid key length")?;
|
||||
let key = Key::<Aes256Gcm>::from(key_bytes);
|
||||
let cipher = Aes256Gcm::new(&key);
|
||||
|
||||
let plaintext = cipher
|
||||
.decrypt(&nonce, ciphertext)
|
||||
.map_err(|e| format!("Decryption failed: {e}"))?;
|
||||
|
||||
let password =
|
||||
String::from_utf8(plaintext).map_err(|e| format!("Invalid UTF-8 in password: {e}"))?;
|
||||
|
||||
Ok(Some(password))
|
||||
}
|
||||
|
||||
pub fn has_e2e_password() -> bool {
|
||||
get_e2e_password_path().exists()
|
||||
}
|
||||
|
||||
pub fn remove_e2e_password() -> Result<(), String> {
|
||||
let file_path = get_e2e_password_path();
|
||||
if file_path.exists() {
|
||||
std::fs::remove_file(&file_path)
|
||||
.map_err(|e| format!("Failed to remove e2e password file: {e}"))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Derive a per-profile encryption key using Argon2id
|
||||
pub fn derive_profile_key(user_password: &str, profile_salt: &str) -> Result<[u8; 32], String> {
|
||||
let salt_bytes = BASE64
|
||||
.decode(profile_salt)
|
||||
.map_err(|e| format!("Invalid salt encoding: {e}"))?;
|
||||
|
||||
let salt = SaltString::encode_b64(&salt_bytes)
|
||||
.map_err(|e| format!("Failed to create salt string: {e}"))?;
|
||||
|
||||
let argon2 = Argon2::default();
|
||||
let password_hash = argon2
|
||||
.hash_password(user_password.as_bytes(), &salt)
|
||||
.map_err(|e| format!("Key derivation failed: {e}"))?;
|
||||
let hash_value = password_hash.hash.unwrap();
|
||||
let hash_bytes = hash_value.as_bytes();
|
||||
|
||||
let mut key = [0u8; 32];
|
||||
key.copy_from_slice(&hash_bytes[..32]);
|
||||
Ok(key)
|
||||
}
|
||||
|
||||
/// Generate a random 16-byte salt, base64-encoded
|
||||
pub fn generate_salt() -> String {
|
||||
let mut salt = [0u8; 16];
|
||||
use aes_gcm::aead::rand_core::RngCore;
|
||||
OsRng.fill_bytes(&mut salt);
|
||||
BASE64.encode(salt)
|
||||
}
|
||||
|
||||
/// Encrypt bytes with AES-256-GCM. Output format: [nonce 12B][ciphertext]
|
||||
pub fn encrypt_bytes(key: &[u8; 32], plaintext: &[u8]) -> Result<Vec<u8>, String> {
|
||||
let aes_key = Key::<Aes256Gcm>::from(*key);
|
||||
let cipher = Aes256Gcm::new(&aes_key);
|
||||
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
|
||||
|
||||
let ciphertext = cipher
|
||||
.encrypt(&nonce, plaintext)
|
||||
.map_err(|e| format!("Encryption failed: {e}"))?;
|
||||
|
||||
let mut output = Vec::with_capacity(12 + ciphertext.len());
|
||||
output.extend_from_slice(&nonce);
|
||||
output.extend_from_slice(&ciphertext);
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Decrypt bytes encrypted with encrypt_bytes. Input format: [nonce 12B][ciphertext]
|
||||
pub fn decrypt_bytes(key: &[u8; 32], encrypted: &[u8]) -> Result<Vec<u8>, String> {
|
||||
if encrypted.len() < 12 {
|
||||
return Err("Encrypted data too short".to_string());
|
||||
}
|
||||
|
||||
let nonce_bytes: [u8; 12] = encrypted[..12].try_into().map_err(|_| "Invalid nonce")?;
|
||||
let nonce = aes_gcm::Nonce::from(nonce_bytes);
|
||||
let ciphertext = &encrypted[12..];
|
||||
|
||||
let aes_key = Key::<Aes256Gcm>::from(*key);
|
||||
let cipher = Aes256Gcm::new(&aes_key);
|
||||
|
||||
cipher
|
||||
.decrypt(&nonce, ciphertext)
|
||||
.map_err(|e| format!("Decryption failed: {e}"))
|
||||
}
|
||||
|
||||
// Tauri commands
|
||||
|
||||
#[tauri::command]
|
||||
pub fn set_e2e_password(password: String) -> Result<(), String> {
|
||||
if password.len() < 8 {
|
||||
return Err("Password must be at least 8 characters".to_string());
|
||||
}
|
||||
store_e2e_password(&password)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn check_has_e2e_password() -> bool {
|
||||
has_e2e_password()
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub fn delete_e2e_password() -> Result<(), String> {
|
||||
remove_e2e_password()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_encrypt_decrypt_roundtrip() {
|
||||
let key = [42u8; 32];
|
||||
let plaintext = b"Hello, World!";
|
||||
let encrypted = encrypt_bytes(&key, plaintext).unwrap();
|
||||
let decrypted = decrypt_bytes(&key, &encrypted).unwrap();
|
||||
assert_eq!(decrypted, plaintext);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encrypt_decrypt_empty_data() {
|
||||
let key = [1u8; 32];
|
||||
let plaintext = b"";
|
||||
let encrypted = encrypt_bytes(&key, plaintext).unwrap();
|
||||
let decrypted = decrypt_bytes(&key, &encrypted).unwrap();
|
||||
assert_eq!(decrypted, plaintext.to_vec());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_encrypt_decrypt_large_data() {
|
||||
let key = [7u8; 32];
|
||||
let plaintext = vec![0xABu8; 1_048_576]; // 1MB
|
||||
let encrypted = encrypt_bytes(&key, &plaintext).unwrap();
|
||||
let decrypted = decrypt_bytes(&key, &encrypted).unwrap();
|
||||
assert_eq!(decrypted, plaintext);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_different_keys_different_ciphertext() {
|
||||
let key1 = [1u8; 32];
|
||||
let key2 = [2u8; 32];
|
||||
let plaintext = b"same data";
|
||||
let encrypted1 = encrypt_bytes(&key1, plaintext).unwrap();
|
||||
let encrypted2 = encrypt_bytes(&key2, plaintext).unwrap();
|
||||
// Nonces are random so ciphertexts will differ regardless,
|
||||
// but decrypting with wrong key should fail
|
||||
assert!(decrypt_bytes(&key2, &encrypted1).is_err());
|
||||
assert!(decrypt_bytes(&key1, &encrypted2).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nonce_uniqueness() {
|
||||
let key = [5u8; 32];
|
||||
let plaintext = b"same data encrypted twice";
|
||||
let encrypted1 = encrypt_bytes(&key, plaintext).unwrap();
|
||||
let encrypted2 = encrypt_bytes(&key, plaintext).unwrap();
|
||||
// Different nonces should produce different ciphertext
|
||||
assert_ne!(encrypted1, encrypted2);
|
||||
// But both should decrypt to the same plaintext
|
||||
assert_eq!(
|
||||
decrypt_bytes(&key, &encrypted1).unwrap(),
|
||||
decrypt_bytes(&key, &encrypted2).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_wrong_key_fails() {
|
||||
let key = [10u8; 32];
|
||||
let wrong_key = [20u8; 32];
|
||||
let plaintext = b"secret data";
|
||||
let encrypted = encrypt_bytes(&key, plaintext).unwrap();
|
||||
assert!(decrypt_bytes(&wrong_key, &encrypted).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_key_derivation_deterministic() {
|
||||
let salt = generate_salt();
|
||||
let key1 = derive_profile_key("my_password", &salt).unwrap();
|
||||
let key2 = derive_profile_key("my_password", &salt).unwrap();
|
||||
assert_eq!(key1, key2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_key_derivation_different_salts() {
|
||||
let salt1 = generate_salt();
|
||||
let salt2 = generate_salt();
|
||||
let key1 = derive_profile_key("my_password", &salt1).unwrap();
|
||||
let key2 = derive_profile_key("my_password", &salt2).unwrap();
|
||||
assert_ne!(key1, key2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_salt_generation_unique() {
|
||||
let salt1 = generate_salt();
|
||||
let salt2 = generate_salt();
|
||||
assert_ne!(salt1, salt2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_password_storage_roundtrip() {
|
||||
let password = "test_password_12345";
|
||||
store_e2e_password(password).unwrap();
|
||||
assert!(has_e2e_password());
|
||||
let loaded = load_e2e_password().unwrap();
|
||||
assert_eq!(loaded, Some(password.to_string()));
|
||||
remove_e2e_password().unwrap();
|
||||
assert!(!has_e2e_password());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decrypt_too_short_data() {
|
||||
let key = [1u8; 32];
|
||||
assert!(decrypt_bytes(&key, &[0u8; 5]).is_err());
|
||||
}
|
||||
}
|
||||
+294
-30
@@ -1,8 +1,9 @@
|
||||
use super::client::SyncClient;
|
||||
use super::encryption;
|
||||
use super::manifest::{compute_diff, generate_manifest, get_cache_path, HashCache, SyncManifest};
|
||||
use super::types::*;
|
||||
use crate::events;
|
||||
use crate::profile::types::BrowserProfile;
|
||||
use crate::profile::types::{BrowserProfile, SyncMode};
|
||||
use crate::profile::ProfileManager;
|
||||
use crate::settings_manager::SettingsManager;
|
||||
use chrono::{DateTime, Utc};
|
||||
@@ -12,6 +13,18 @@ use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::Semaphore;
|
||||
|
||||
/// Check if sync is configured (cloud or self-hosted)
|
||||
pub fn is_sync_configured() -> bool {
|
||||
if crate::cloud_auth::CLOUD_AUTH.has_active_paid_subscription_sync() {
|
||||
return true;
|
||||
}
|
||||
let manager = SettingsManager::instance();
|
||||
if let Ok(settings) = manager.load_settings() {
|
||||
return settings.sync_server_url.is_some();
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub struct SyncEngine {
|
||||
client: SyncClient,
|
||||
}
|
||||
@@ -68,6 +81,24 @@ impl SyncEngine {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Derive encryption key if encrypted sync
|
||||
let encryption_key = if profile.is_encrypted_sync() {
|
||||
let password = encryption::load_e2e_password()
|
||||
.map_err(|e| SyncError::InvalidData(format!("Failed to load E2E password: {e}")))?
|
||||
.ok_or_else(|| {
|
||||
let _ = events::emit("profile-sync-e2e-password-required", ());
|
||||
SyncError::InvalidData("E2E password not set".to_string())
|
||||
})?;
|
||||
let salt = profile.encryption_salt.as_deref().ok_or_else(|| {
|
||||
SyncError::InvalidData("Encryption salt missing on encrypted profile".to_string())
|
||||
})?;
|
||||
let key = encryption::derive_profile_key(&password, salt)
|
||||
.map_err(|e| SyncError::InvalidData(format!("Key derivation failed: {e}")))?;
|
||||
Some(key)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let profile_manager = ProfileManager::instance();
|
||||
let profiles_dir = profile_manager.get_profiles_dir();
|
||||
let profile_dir = profiles_dir.join(profile.id.to_string());
|
||||
@@ -154,7 +185,13 @@ impl SyncEngine {
|
||||
// Perform uploads
|
||||
if !diff.files_to_upload.is_empty() {
|
||||
self
|
||||
.upload_profile_files(app_handle, &profile_id, &profile_dir, &diff.files_to_upload)
|
||||
.upload_profile_files(
|
||||
app_handle,
|
||||
&profile_id,
|
||||
&profile_dir,
|
||||
&diff.files_to_upload,
|
||||
encryption_key.as_ref(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
@@ -166,6 +203,7 @@ impl SyncEngine {
|
||||
&profile_id,
|
||||
&profile_dir,
|
||||
&diff.files_to_download,
|
||||
encryption_key.as_ref(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
@@ -190,7 +228,9 @@ impl SyncEngine {
|
||||
self.upload_profile_metadata(&profile_id, profile).await?;
|
||||
|
||||
// Upload manifest.json last for atomicity
|
||||
self.upload_manifest(&profile_id, &local_manifest).await?;
|
||||
let mut final_manifest = local_manifest;
|
||||
final_manifest.encrypted = encryption_key.is_some();
|
||||
self.upload_manifest(&profile_id, &final_manifest).await?;
|
||||
|
||||
// Sync associated proxy, group, and VPN
|
||||
if let Some(proxy_id) = &profile.proxy_id {
|
||||
@@ -291,6 +331,7 @@ impl SyncEngine {
|
||||
profile_id: &str,
|
||||
profile_dir: &Path,
|
||||
files: &[super::manifest::ManifestFileEntry],
|
||||
encryption_key: Option<&[u8; 32]>,
|
||||
) -> SyncResult<()> {
|
||||
if files.is_empty() {
|
||||
return Ok(());
|
||||
@@ -324,6 +365,7 @@ impl SyncEngine {
|
||||
let client = self.client.clone();
|
||||
let profile_dir = profile_dir.to_path_buf();
|
||||
let profile_id = profile_id.to_string();
|
||||
let enc_key = encryption_key.copied();
|
||||
|
||||
let mut handles = Vec::new();
|
||||
|
||||
@@ -355,8 +397,20 @@ impl SyncEngine {
|
||||
}
|
||||
};
|
||||
|
||||
let upload_data = if let Some(ref key) = enc_key {
|
||||
match encryption::encrypt_bytes(key, &data) {
|
||||
Ok(encrypted) => encrypted,
|
||||
Err(e) => {
|
||||
log::warn!("Failed to encrypt {}: {}", file_path.display(), e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
data
|
||||
};
|
||||
|
||||
if let Err(e) = client
|
||||
.upload_bytes(&url, &data, content_type.as_deref())
|
||||
.upload_bytes(&url, &upload_data, content_type.as_deref())
|
||||
.await
|
||||
{
|
||||
log::warn!("Failed to upload {}: {}", file_path.display(), e);
|
||||
@@ -387,6 +441,7 @@ impl SyncEngine {
|
||||
profile_id: &str,
|
||||
profile_dir: &Path,
|
||||
files: &[super::manifest::ManifestFileEntry],
|
||||
encryption_key: Option<&[u8; 32]>,
|
||||
) -> SyncResult<()> {
|
||||
if files.is_empty() {
|
||||
return Ok(());
|
||||
@@ -418,6 +473,7 @@ impl SyncEngine {
|
||||
let client = self.client.clone();
|
||||
let profile_dir = profile_dir.to_path_buf();
|
||||
let profile_id = profile_id.to_string();
|
||||
let enc_key = encryption_key.copied();
|
||||
|
||||
let mut handles = Vec::new();
|
||||
|
||||
@@ -440,10 +496,22 @@ impl SyncEngine {
|
||||
|
||||
match client.download_bytes(&url).await {
|
||||
Ok(data) => {
|
||||
let write_data = if let Some(ref key) = enc_key {
|
||||
match encryption::decrypt_bytes(key, &data) {
|
||||
Ok(decrypted) => decrypted,
|
||||
Err(e) => {
|
||||
log::warn!("Failed to decrypt {}, skipping: {}", remote_key, e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
data
|
||||
};
|
||||
|
||||
if let Some(parent) = file_path.parent() {
|
||||
let _ = fs::create_dir_all(parent);
|
||||
}
|
||||
if let Err(e) = fs::write(&file_path, &data) {
|
||||
if let Err(e) = fs::write(&file_path, &write_data) {
|
||||
log::warn!("Failed to write {}: {}", file_path.display(), e);
|
||||
}
|
||||
}
|
||||
@@ -1016,7 +1084,9 @@ impl SyncEngine {
|
||||
))
|
||||
})?;
|
||||
|
||||
profile.sync_enabled = true;
|
||||
if profile.sync_mode == SyncMode::Disabled {
|
||||
profile.sync_mode = SyncMode::Regular;
|
||||
}
|
||||
profile.last_sync = Some(
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
@@ -1052,6 +1122,26 @@ impl SyncEngine {
|
||||
));
|
||||
};
|
||||
|
||||
// If remote manifest is encrypted, we need the E2E password
|
||||
let encryption_key = if manifest.encrypted {
|
||||
let password = encryption::load_e2e_password()
|
||||
.map_err(|e| SyncError::InvalidData(format!("Failed to load E2E password: {e}")))?
|
||||
.ok_or_else(|| {
|
||||
let _ = events::emit("profile-sync-e2e-password-required", ());
|
||||
SyncError::InvalidData(
|
||||
"Remote profile is encrypted but no E2E password is set".to_string(),
|
||||
)
|
||||
})?;
|
||||
let salt = profile.encryption_salt.as_deref().ok_or_else(|| {
|
||||
SyncError::InvalidData("Encryption salt missing on encrypted profile".to_string())
|
||||
})?;
|
||||
let key = encryption::derive_profile_key(&password, salt)
|
||||
.map_err(|e| SyncError::InvalidData(format!("Key derivation failed: {e}")))?;
|
||||
Some(key)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Ensure profile directory exists
|
||||
fs::create_dir_all(&profile_dir).map_err(|e| {
|
||||
SyncError::IoError(format!(
|
||||
@@ -1078,12 +1168,24 @@ impl SyncEngine {
|
||||
}
|
||||
if !manifest.files.is_empty() {
|
||||
self
|
||||
.download_profile_files(app_handle, profile_id, &profile_dir, &manifest.files)
|
||||
.download_profile_files(
|
||||
app_handle,
|
||||
profile_id,
|
||||
&profile_dir,
|
||||
&manifest.files,
|
||||
encryption_key.as_ref(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
// Set sync enabled and save profile
|
||||
profile.sync_enabled = true;
|
||||
// Set sync mode and save profile
|
||||
if profile.sync_mode == SyncMode::Disabled {
|
||||
profile.sync_mode = if manifest.encrypted {
|
||||
SyncMode::Encrypted
|
||||
} else {
|
||||
SyncMode::Regular
|
||||
};
|
||||
}
|
||||
profile.last_sync = Some(
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
@@ -1170,23 +1272,23 @@ impl SyncEngine {
|
||||
// Refresh metadata for local cross-OS profiles (propagate renames, tags, notes from originating device)
|
||||
let profile_manager = ProfileManager::instance();
|
||||
// Collect cross-OS profiles before async operations to avoid holding non-Send Result across await
|
||||
let cross_os_profiles: Vec<(String, bool)> = profile_manager
|
||||
let cross_os_profiles: Vec<(String, SyncMode)> = profile_manager
|
||||
.list_profiles()
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.filter(|p| p.is_cross_os() && p.sync_enabled)
|
||||
.map(|p| (p.id.to_string(), p.sync_enabled))
|
||||
.filter(|p| p.is_cross_os() && p.is_sync_enabled())
|
||||
.map(|p| (p.id.to_string(), p.sync_mode))
|
||||
.collect();
|
||||
|
||||
if !cross_os_profiles.is_empty() {
|
||||
for (pid, sync_enabled) in &cross_os_profiles {
|
||||
for (pid, sync_mode) in &cross_os_profiles {
|
||||
let metadata_key = format!("profiles/{}/metadata.json", pid);
|
||||
match self.client.stat(&metadata_key).await {
|
||||
Ok(stat) if stat.exists => match self.client.presign_download(&metadata_key).await {
|
||||
Ok(presign) => match self.client.download_bytes(&presign.url).await {
|
||||
Ok(data) => {
|
||||
if let Ok(mut remote_profile) = serde_json::from_slice::<BrowserProfile>(&data) {
|
||||
remote_profile.sync_enabled = *sync_enabled;
|
||||
remote_profile.sync_mode = *sync_mode;
|
||||
remote_profile.last_sync = Some(
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)
|
||||
@@ -1220,6 +1322,111 @@ impl SyncEngine {
|
||||
|
||||
Ok(downloaded)
|
||||
}
|
||||
|
||||
/// Check for remote entities (proxies, groups, VPNs) not present locally and download them
|
||||
pub async fn check_for_missing_synced_entities(
|
||||
&self,
|
||||
app_handle: &tauri::AppHandle,
|
||||
) -> SyncResult<()> {
|
||||
log::info!("Checking for missing synced entities...");
|
||||
|
||||
// Check for remote proxies not present locally
|
||||
let remote_proxies = self.client.list("proxies/").await?;
|
||||
for obj in &remote_proxies.objects {
|
||||
if let Some(proxy_id) = obj
|
||||
.key
|
||||
.strip_prefix("proxies/")
|
||||
.and_then(|s| s.strip_suffix(".json"))
|
||||
{
|
||||
let exists_locally = crate::proxy_manager::PROXY_MANAGER
|
||||
.get_stored_proxies()
|
||||
.iter()
|
||||
.any(|p| p.id == proxy_id);
|
||||
if !exists_locally {
|
||||
let tombstone_key = format!("tombstones/proxies/{}.json", proxy_id);
|
||||
if let Ok(stat) = self.client.stat(&tombstone_key).await {
|
||||
if stat.exists {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
log::info!(
|
||||
"Proxy {} exists remotely but not locally, downloading...",
|
||||
proxy_id
|
||||
);
|
||||
if let Err(e) = self.download_proxy(proxy_id, Some(app_handle)).await {
|
||||
log::warn!("Failed to download missing proxy {}: {}", proxy_id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for remote groups not present locally
|
||||
let remote_groups = self.client.list("groups/").await?;
|
||||
for obj in &remote_groups.objects {
|
||||
if let Some(group_id) = obj
|
||||
.key
|
||||
.strip_prefix("groups/")
|
||||
.and_then(|s| s.strip_suffix(".json"))
|
||||
{
|
||||
let exists_locally = {
|
||||
let group_manager = crate::group_manager::GROUP_MANAGER.lock().unwrap();
|
||||
group_manager
|
||||
.get_all_groups()
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.any(|g| g.id == group_id)
|
||||
};
|
||||
if !exists_locally {
|
||||
let tombstone_key = format!("tombstones/groups/{}.json", group_id);
|
||||
if let Ok(stat) = self.client.stat(&tombstone_key).await {
|
||||
if stat.exists {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
log::info!(
|
||||
"Group {} exists remotely but not locally, downloading...",
|
||||
group_id
|
||||
);
|
||||
if let Err(e) = self.download_group(group_id, Some(app_handle)).await {
|
||||
log::warn!("Failed to download missing group {}: {}", group_id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check for remote VPNs not present locally
|
||||
let remote_vpns = self.client.list("vpns/").await?;
|
||||
for obj in &remote_vpns.objects {
|
||||
if let Some(vpn_id) = obj
|
||||
.key
|
||||
.strip_prefix("vpns/")
|
||||
.and_then(|s| s.strip_suffix(".json"))
|
||||
{
|
||||
let exists_locally = {
|
||||
let storage = crate::vpn::VPN_STORAGE.lock().unwrap();
|
||||
storage.load_config(vpn_id).is_ok()
|
||||
};
|
||||
if !exists_locally {
|
||||
let tombstone_key = format!("tombstones/vpns/{}.json", vpn_id);
|
||||
if let Ok(stat) = self.client.stat(&tombstone_key).await {
|
||||
if stat.exists {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
log::info!(
|
||||
"VPN {} exists remotely but not locally, downloading...",
|
||||
vpn_id
|
||||
);
|
||||
if let Err(e) = self.download_vpn(vpn_id, Some(app_handle)).await {
|
||||
log::warn!("Failed to download missing VPN {}: {}", vpn_id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log::info!("Missing synced entities check complete");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if proxy is used by any synced profile
|
||||
@@ -1228,7 +1435,7 @@ pub fn is_proxy_used_by_synced_profile(proxy_id: &str) -> bool {
|
||||
if let Ok(profiles) = profile_manager.list_profiles() {
|
||||
profiles
|
||||
.iter()
|
||||
.any(|p| p.sync_enabled && p.proxy_id.as_deref() == Some(proxy_id))
|
||||
.any(|p| p.is_sync_enabled() && p.proxy_id.as_deref() == Some(proxy_id))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -1240,7 +1447,7 @@ pub fn is_group_used_by_synced_profile(group_id: &str) -> bool {
|
||||
if let Ok(profiles) = profile_manager.list_profiles() {
|
||||
profiles
|
||||
.iter()
|
||||
.any(|p| p.sync_enabled && p.group_id.as_deref() == Some(group_id))
|
||||
.any(|p| p.is_sync_enabled() && p.group_id.as_deref() == Some(group_id))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -1281,7 +1488,7 @@ pub fn is_vpn_used_by_synced_profile(vpn_id: &str) -> bool {
|
||||
if let Ok(profiles) = profile_manager.list_profiles() {
|
||||
profiles
|
||||
.iter()
|
||||
.any(|p| p.sync_enabled && p.vpn_id.as_deref() == Some(vpn_id))
|
||||
.any(|p| p.is_sync_enabled() && p.vpn_id.as_deref() == Some(vpn_id))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -1346,11 +1553,18 @@ pub async fn enable_group_sync_if_needed(
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn set_profile_sync_enabled(
|
||||
pub async fn set_profile_sync_mode(
|
||||
app_handle: tauri::AppHandle,
|
||||
profile_id: String,
|
||||
enabled: bool,
|
||||
sync_mode: String,
|
||||
) -> Result<(), String> {
|
||||
let new_mode = match sync_mode.as_str() {
|
||||
"Disabled" => SyncMode::Disabled,
|
||||
"Regular" => SyncMode::Regular,
|
||||
"Encrypted" => SyncMode::Encrypted,
|
||||
_ => return Err(format!("Invalid sync mode: {sync_mode}")),
|
||||
};
|
||||
|
||||
let profile_manager = ProfileManager::instance();
|
||||
let profiles = profile_manager
|
||||
.list_profiles()
|
||||
@@ -1367,9 +1581,14 @@ pub async fn set_profile_sync_enabled(
|
||||
return Err("Cannot modify sync settings for a cross-OS profile".to_string());
|
||||
}
|
||||
|
||||
// If enabling, first check that sync settings are configured
|
||||
if enabled {
|
||||
// Cloud auth provides sync settings dynamically — skip local checks
|
||||
if profile.ephemeral {
|
||||
return Err("Cannot enable sync for an ephemeral profile".to_string());
|
||||
}
|
||||
|
||||
let old_mode = profile.sync_mode;
|
||||
let enabling = new_mode != SyncMode::Disabled;
|
||||
|
||||
if enabling {
|
||||
let cloud_logged_in = crate::cloud_auth::CLOUD_AUTH.is_logged_in().await;
|
||||
|
||||
if !cloud_logged_in {
|
||||
@@ -1407,7 +1626,32 @@ pub async fn set_profile_sync_enabled(
|
||||
}
|
||||
}
|
||||
|
||||
profile.sync_enabled = enabled;
|
||||
// If switching to Encrypted, verify password and generate salt
|
||||
if new_mode == SyncMode::Encrypted {
|
||||
if !encryption::has_e2e_password() {
|
||||
return Err("E2E password not set. Please set a password in Settings first.".to_string());
|
||||
}
|
||||
if profile.encryption_salt.is_none() {
|
||||
profile.encryption_salt = Some(encryption::generate_salt());
|
||||
}
|
||||
}
|
||||
|
||||
// If switching between Regular<->Encrypted, delete remote manifest to force full re-upload
|
||||
let mode_switched = old_mode != SyncMode::Disabled && enabling && old_mode != new_mode;
|
||||
if mode_switched {
|
||||
if let Ok(engine) = SyncEngine::create_from_settings(&app_handle).await {
|
||||
let manifest_key = format!("profiles/{}/manifest.json", profile_id);
|
||||
let _ = engine.client.delete(&manifest_key, None).await;
|
||||
log::info!(
|
||||
"Deleted remote manifest for profile {} due to sync mode change ({:?} -> {:?})",
|
||||
profile_id,
|
||||
old_mode,
|
||||
new_mode
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
profile.sync_mode = new_mode;
|
||||
|
||||
profile_manager
|
||||
.save_profile(&profile)
|
||||
@@ -1415,8 +1659,7 @@ pub async fn set_profile_sync_enabled(
|
||||
|
||||
let _ = events::emit("profiles-changed", ());
|
||||
|
||||
if enabled {
|
||||
// Check if profile is running to determine status
|
||||
if enabling {
|
||||
let is_running = profile.process_id.is_some();
|
||||
|
||||
let _ = events::emit(
|
||||
@@ -1427,13 +1670,11 @@ pub async fn set_profile_sync_enabled(
|
||||
}),
|
||||
);
|
||||
|
||||
// Queue sync via scheduler (not direct sync)
|
||||
if let Some(scheduler) = super::get_global_scheduler() {
|
||||
scheduler
|
||||
.queue_profile_sync_immediate(profile_id.clone())
|
||||
.await;
|
||||
|
||||
// Auto-enable sync for proxy and group if they exist
|
||||
if let Some(ref proxy_id) = profile.proxy_id {
|
||||
if let Err(e) = enable_proxy_sync_if_needed(proxy_id, &app_handle).await {
|
||||
log::warn!("Failed to enable sync for proxy {}: {}", proxy_id, e);
|
||||
@@ -1459,6 +1700,30 @@ pub async fn set_profile_sync_enabled(
|
||||
log::warn!("Scheduler not initialized, sync will not start");
|
||||
}
|
||||
} else {
|
||||
// Delete remote data when disabling sync
|
||||
if old_mode != SyncMode::Disabled {
|
||||
let profile_id_clone = profile_id.clone();
|
||||
let app_handle_clone = app_handle.clone();
|
||||
tokio::spawn(async move {
|
||||
match SyncEngine::create_from_settings(&app_handle_clone).await {
|
||||
Ok(engine) => {
|
||||
if let Err(e) = engine.delete_profile(&profile_id_clone).await {
|
||||
log::warn!(
|
||||
"Failed to delete profile {} from sync: {}",
|
||||
profile_id_clone,
|
||||
e
|
||||
);
|
||||
} else {
|
||||
log::info!("Profile {} deleted from sync service", profile_id_clone);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::debug!("Sync not configured, skipping remote deletion: {}", e);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
let _ = events::emit(
|
||||
"profile-sync-status",
|
||||
serde_json::json!({
|
||||
@@ -1468,11 +1733,10 @@ pub async fn set_profile_sync_enabled(
|
||||
);
|
||||
}
|
||||
|
||||
// Report updated sync-enabled profile count to the cloud backend
|
||||
if crate::cloud_auth::CLOUD_AUTH.is_logged_in().await {
|
||||
let sync_count = profile_manager
|
||||
.list_profiles()
|
||||
.map(|profiles| profiles.iter().filter(|p| p.sync_enabled).count())
|
||||
.map(|profiles| profiles.iter().filter(|p| p.is_sync_enabled()).count())
|
||||
.unwrap_or(0);
|
||||
|
||||
tokio::spawn(async move {
|
||||
@@ -1506,7 +1770,7 @@ pub async fn request_profile_sync(
|
||||
.find(|p| p.id == profile_uuid)
|
||||
.ok_or_else(|| format!("Profile with ID '{profile_id}' not found"))?;
|
||||
|
||||
if !profile.sync_enabled {
|
||||
if !profile.is_sync_enabled() {
|
||||
return Err("Sync is not enabled for this profile".to_string());
|
||||
}
|
||||
|
||||
|
||||
@@ -52,6 +52,8 @@ pub struct SyncManifest {
|
||||
#[serde(rename = "excludeGlobs")]
|
||||
pub exclude_globs: Vec<String>,
|
||||
pub files: Vec<ManifestFileEntry>,
|
||||
#[serde(default)]
|
||||
pub encrypted: bool,
|
||||
}
|
||||
|
||||
impl SyncManifest {
|
||||
@@ -64,6 +66,7 @@ impl SyncManifest {
|
||||
updated_at: now,
|
||||
exclude_globs,
|
||||
files: Vec::new(),
|
||||
encrypted: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -547,6 +550,7 @@ mod tests {
|
||||
hash: "def".to_string(),
|
||||
},
|
||||
],
|
||||
encrypted: false,
|
||||
};
|
||||
|
||||
let diff = compute_diff(&local, None);
|
||||
@@ -588,6 +592,7 @@ mod tests {
|
||||
hash: "new".to_string(),
|
||||
},
|
||||
],
|
||||
encrypted: false,
|
||||
};
|
||||
|
||||
let remote = SyncManifest {
|
||||
@@ -616,6 +621,7 @@ mod tests {
|
||||
hash: "gone".to_string(),
|
||||
},
|
||||
],
|
||||
encrypted: false,
|
||||
};
|
||||
|
||||
let diff = compute_diff(&local, Some(&remote));
|
||||
@@ -634,4 +640,22 @@ mod tests {
|
||||
.files_to_delete_remote
|
||||
.contains(&"deleted.txt".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_manifest_encrypted_flag_default() {
|
||||
let json = r#"{"version":1,"profileId":"test","generatedAt":"2024-01-01T00:00:00Z","updatedAt":"2024-01-01T00:00:00Z","excludeGlobs":[],"files":[]}"#;
|
||||
let manifest: SyncManifest = serde_json::from_str(json).unwrap();
|
||||
assert!(!manifest.encrypted);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_manifest_with_encrypted_flag() {
|
||||
let json = r#"{"version":1,"profileId":"test","generatedAt":"2024-01-01T00:00:00Z","updatedAt":"2024-01-01T00:00:00Z","excludeGlobs":[],"files":[],"encrypted":true}"#;
|
||||
let manifest: SyncManifest = serde_json::from_str(json).unwrap();
|
||||
assert!(manifest.encrypted);
|
||||
|
||||
let serialized = serde_json::to_string(&manifest).unwrap();
|
||||
let deserialized: SyncManifest = serde_json::from_str(&serialized).unwrap();
|
||||
assert!(deserialized.encrypted);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
mod client;
|
||||
pub mod encryption;
|
||||
mod engine;
|
||||
pub mod manifest;
|
||||
pub mod scheduler;
|
||||
@@ -6,13 +7,15 @@ pub mod subscription;
|
||||
pub mod types;
|
||||
|
||||
pub use client::SyncClient;
|
||||
pub use encryption::{check_has_e2e_password, delete_e2e_password, set_e2e_password};
|
||||
pub use engine::{
|
||||
enable_group_sync_if_needed, enable_proxy_sync_if_needed, enable_sync_for_all_entities,
|
||||
enable_vpn_sync_if_needed, get_unsynced_entity_counts, is_group_in_use_by_synced_profile,
|
||||
is_group_used_by_synced_profile, is_proxy_in_use_by_synced_profile,
|
||||
is_proxy_used_by_synced_profile, is_vpn_in_use_by_synced_profile, is_vpn_used_by_synced_profile,
|
||||
request_profile_sync, set_group_sync_enabled, set_profile_sync_enabled, set_proxy_sync_enabled,
|
||||
set_vpn_sync_enabled, sync_profile, trigger_sync_for_profile, SyncEngine,
|
||||
is_proxy_used_by_synced_profile, is_sync_configured, is_vpn_in_use_by_synced_profile,
|
||||
is_vpn_used_by_synced_profile, request_profile_sync, set_group_sync_enabled,
|
||||
set_profile_sync_mode, set_proxy_sync_enabled, set_vpn_sync_enabled, sync_profile,
|
||||
trigger_sync_for_profile, SyncEngine,
|
||||
};
|
||||
pub use manifest::{compute_diff, generate_manifest, HashCache, ManifestDiff, SyncManifest};
|
||||
pub use scheduler::{get_global_scheduler, set_global_scheduler, SyncScheduler};
|
||||
|
||||
@@ -232,7 +232,10 @@ impl SyncScheduler {
|
||||
}
|
||||
};
|
||||
|
||||
let sync_enabled_profiles: Vec<_> = profiles.into_iter().filter(|p| p.sync_enabled).collect();
|
||||
let sync_enabled_profiles: Vec<_> = profiles
|
||||
.into_iter()
|
||||
.filter(|p| p.is_sync_enabled())
|
||||
.collect();
|
||||
|
||||
if sync_enabled_profiles.is_empty() {
|
||||
log::debug!("No sync-enabled profiles found");
|
||||
@@ -353,7 +356,7 @@ impl SyncScheduler {
|
||||
profile_manager.list_profiles().ok().and_then(|profiles| {
|
||||
profiles
|
||||
.into_iter()
|
||||
.find(|p| p.id.to_string() == profile_id && p.sync_enabled)
|
||||
.find(|p| p.id.to_string() == profile_id && p.is_sync_enabled())
|
||||
})
|
||||
};
|
||||
|
||||
@@ -615,7 +618,7 @@ impl SyncScheduler {
|
||||
}
|
||||
}
|
||||
|
||||
async fn process_pending_tombstones(&self, app_handle: &tauri::AppHandle) {
|
||||
async fn process_pending_tombstones(&self, _app_handle: &tauri::AppHandle) {
|
||||
let tombstones: Vec<(String, String)> = {
|
||||
let mut pending = self.pending_tombstones.lock().await;
|
||||
std::mem::take(&mut *pending)
|
||||
@@ -629,67 +632,68 @@ impl SyncScheduler {
|
||||
log::info!("Processing tombstone for {} {}", entity_type, entity_id);
|
||||
match entity_type.as_str() {
|
||||
"profile" => {
|
||||
let exists_locally = {
|
||||
let profile_manager = ProfileManager::instance();
|
||||
let profile_manager = ProfileManager::instance();
|
||||
let profile_to_delete = {
|
||||
if let Ok(profiles) = profile_manager.list_profiles() {
|
||||
let profile_uuid = uuid::Uuid::parse_str(&entity_id).ok();
|
||||
profile_uuid
|
||||
.as_ref()
|
||||
.map(|uuid| profiles.iter().any(|p| p.id == *uuid))
|
||||
.unwrap_or(false)
|
||||
profile_uuid.and_then(|uuid| profiles.into_iter().find(|p| p.id == uuid))
|
||||
} else {
|
||||
false
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
if exists_locally {
|
||||
// Profile exists locally but was deleted remotely - delete locally
|
||||
if let Some(mut profile) = profile_to_delete {
|
||||
log::info!(
|
||||
"Profile {} exists locally, deleting due to remote tombstone",
|
||||
"Profile {} was deleted remotely, disabling sync locally",
|
||||
entity_id
|
||||
);
|
||||
// Note: We don't actually delete here to avoid data loss.
|
||||
// The user should be notified or we could add a confirmation step.
|
||||
// For now, just log it.
|
||||
} else {
|
||||
// Profile doesn't exist locally - check if it still exists remotely
|
||||
// (tombstone might have been created but profile files still exist)
|
||||
// Try to download it
|
||||
match SyncEngine::create_from_settings(app_handle).await {
|
||||
Ok(engine) => {
|
||||
if let Ok(true) = engine
|
||||
.download_profile_if_missing(app_handle, &entity_id)
|
||||
.await
|
||||
{
|
||||
log::info!(
|
||||
"Downloaded missing profile {} from remote storage",
|
||||
entity_id
|
||||
);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
log::debug!("Sync not configured, skipping profile download: {}", e);
|
||||
}
|
||||
profile.sync_mode = crate::profile::types::SyncMode::Disabled;
|
||||
if let Err(e) = profile_manager.save_profile(&profile) {
|
||||
log::warn!("Failed to disable sync for profile {}: {}", entity_id, e);
|
||||
} else {
|
||||
log::info!(
|
||||
"Profile {} sync disabled due to remote tombstone (local copy kept)",
|
||||
entity_id
|
||||
);
|
||||
let _ = events::emit("profiles-changed", ());
|
||||
}
|
||||
}
|
||||
}
|
||||
"proxy" => {
|
||||
log::debug!(
|
||||
"Proxy tombstone for {} - local deletion not implemented",
|
||||
entity_id
|
||||
);
|
||||
let proxy_manager = &crate::proxy_manager::PROXY_MANAGER;
|
||||
let proxies = proxy_manager.get_stored_proxies();
|
||||
if let Some(proxy) = proxies.iter().find(|p| p.id == entity_id) {
|
||||
if proxy.sync_enabled {
|
||||
log::info!("Proxy {} was deleted remotely, deleting locally", entity_id);
|
||||
let proxy_file = proxy_manager.get_proxy_file_path(&entity_id);
|
||||
if proxy_file.exists() {
|
||||
let _ = std::fs::remove_file(&proxy_file);
|
||||
}
|
||||
proxy_manager.remove_from_memory(&entity_id);
|
||||
let _ = events::emit("stored-proxies-changed", ());
|
||||
}
|
||||
}
|
||||
}
|
||||
"group" => {
|
||||
log::debug!(
|
||||
"Group tombstone for {} - local deletion not implemented",
|
||||
entity_id
|
||||
);
|
||||
let group_manager = crate::group_manager::GROUP_MANAGER.lock().unwrap();
|
||||
let groups = group_manager.get_all_groups().unwrap_or_default();
|
||||
if let Some(group) = groups.iter().find(|g| g.id == entity_id) {
|
||||
if group.sync_enabled {
|
||||
log::info!("Group {} was deleted remotely, deleting locally", entity_id);
|
||||
let _ = group_manager.delete_group_internal(&entity_id);
|
||||
let _ = events::emit("groups-changed", ());
|
||||
}
|
||||
}
|
||||
}
|
||||
"vpn" => {
|
||||
log::debug!(
|
||||
"VPN tombstone for {} - local deletion not implemented",
|
||||
entity_id
|
||||
);
|
||||
let storage = crate::vpn::VPN_STORAGE.lock().unwrap();
|
||||
if let Ok(vpn) = storage.load_config(&entity_id) {
|
||||
if vpn.sync_enabled {
|
||||
log::info!("VPN {} was deleted remotely, deleting locally", entity_id);
|
||||
let _ = storage.delete_config(&entity_id);
|
||||
let _ = events::emit("vpn-configs-changed", ());
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user