fix: stabilize incremental library scan and fold 3.5.1 into 3.5.0

This commit is contained in:
zarzet
2026-02-07 13:11:23 +07:00
parent 03d29a73f7
commit ca136b8e17
6 changed files with 1833 additions and 1101 deletions

View File

@@ -17,6 +17,10 @@
- New settings fields for storage mode + SAF tree URI
- SAF platform bridge methods: pick tree, stat/exists/delete, open content URI, copy to temp, write back to SAF
- SAF library scan mode (DocumentFile traversal + metadata read)
- Incremental library scanning for filesystem and SAF paths (only scans new/modified files and detects removed files)
- Force Full Scan action in Library Settings to rescan all files on demand
- Downloaded files are now excluded from Local Library scan results to prevent duplicate entries
- Legacy library rows now support `file_mod_time` backfill before incremental scans (faster follow-up scans after upgrade)
- Library UI toggle to show SAF-repaired history items
- Scan cancelled banner + retry action for library scans
- Android DocumentFile dependency for SAF operations
@@ -24,6 +28,7 @@
- Donate page in Settings with Ko-fi and Buy Me a Coffee links
- Per-App Language support on Android 13+ (locale_config.xml)
- Interactive tutorial with working search bar simulation and clickable download buttons
- Tutorial completion state is persisted after onboarding
- Visual feedback animations for page transitions, entrance effects, and feature lists
- New dedicated welcome step in setup wizard with improved branding
@@ -33,6 +38,10 @@
- Tidal/Qobuz/Amazon/Extension downloads use SAF-aware output when enabled
- Post-processing hooks run for SAF content URIs (via temp file bridge)
- File operations in Library/Queue/Track screens now SAF-aware (`open`, `exists`, `delete`, `stat`)
- Local Library scan defaults to incremental mode; full rescan is available via Force Full Scan
- Local library database upgraded to schema v3 with `file_mod_time` tracking for incremental scan cache
- Platform channels expanded with incremental scan APIs (`scanLibraryFolderIncremental`) on Android and iOS
- Android platform channel adds `getSafFileModTimes` for SAF legacy cache backfill
- Android build tooling upgraded to Gradle 9.3.1 (wrapper)
- Android build path validated with Java 25 (Gradle/Kotlin/assemble debug)
- SAF tree picker flow in `MainActivity` migrated to Activity Result API (`registerForActivityResult`)
@@ -61,6 +70,10 @@
- Library scan hero card showing 0 tracks during scan (now shows scanned file count in real-time)
- Library folder picker no longer requires MANAGE_EXTERNAL_STORAGE on Android 10+ (uses SAF tree picker)
- One-time SAF migration prompt for users updating from pre-SAF versions
- Fixed `fileModTime` propagation across Go/Android/Dart so incremental scan cache is stored and reused correctly
- Fixed SAF incremental scan key mismatch (`lastModified` vs `fileModTime`) and normalized result fields (`skippedCount`, `totalFiles`)
- Fixed incremental scan progress when all files are skipped (`scanned_files` now reaches total files)
- Removed duplicate `"removeExtension"` branch in Android method channel handler (eliminates Kotlin duplicate-branch warning)
---

View File

@@ -606,7 +606,9 @@ class MainActivity: FlutterFragmentActivity() {
val metadataJson = Gobackend.readAudioMetadataJSON(tempPath)
if (metadataJson.isNotBlank()) {
val obj = JSONObject(metadataJson)
val lastModified = doc.lastModified()
obj.put("filePath", doc.uri.toString())
obj.put("fileModTime", lastModified)
results.put(obj)
} else {
errors++
@@ -637,6 +639,226 @@ class MainActivity: FlutterFragmentActivity() {
return results.toString()
}
/**
* Incremental SAF tree scan - only scans new or modified files.
* @param treeUriStr The SAF tree URI to scan
* @param existingFilesJson JSON object mapping file URI -> lastModified timestamp
* @return JSON object with new/changed files and removed URIs
*/
private fun scanSafTreeIncremental(treeUriStr: String, existingFilesJson: String): String {
if (treeUriStr.isBlank()) {
val result = JSONObject()
result.put("files", JSONArray())
result.put("removedUris", JSONArray())
result.put("skippedCount", 0)
result.put("totalFiles", 0)
return result.toString()
}
val treeUri = Uri.parse(treeUriStr)
val root = DocumentFile.fromTreeUri(this, treeUri) ?: run {
val result = JSONObject()
result.put("files", JSONArray())
result.put("removedUris", JSONArray())
result.put("skippedCount", 0)
result.put("totalFiles", 0)
return result.toString()
}
// Parse existing files map: URI -> lastModified
val existingFiles = mutableMapOf<String, Long>()
try {
val obj = JSONObject(existingFilesJson)
val keys = obj.keys()
while (keys.hasNext()) {
val key = keys.next()
existingFiles[key] = obj.optLong(key, 0)
}
} catch (_: Exception) {}
resetSafScanProgress()
safScanCancel = false
safScanActive = true
val supportedExt = setOf(".flac", ".m4a", ".mp3", ".opus", ".ogg")
val audioFiles = mutableListOf<Triple<DocumentFile, String, Long>>() // doc, path, lastModified
val currentUris = mutableSetOf<String>()
// Collect all audio files with lastModified
val queue: ArrayDeque<Pair<DocumentFile, String>> = ArrayDeque()
queue.add(root to "")
while (queue.isNotEmpty()) {
if (safScanCancel) {
updateSafScanProgress { it.isComplete = true }
val result = JSONObject()
result.put("files", JSONArray())
result.put("removedUris", JSONArray())
result.put("skippedCount", 0)
result.put("totalFiles", 0)
result.put("cancelled", true)
return result.toString()
}
val (dir, path) = queue.removeFirst()
for (child in dir.listFiles()) {
if (safScanCancel) {
updateSafScanProgress { it.isComplete = true }
val result = JSONObject()
result.put("files", JSONArray())
result.put("removedUris", JSONArray())
result.put("skippedCount", 0)
result.put("totalFiles", 0)
result.put("cancelled", true)
return result.toString()
}
if (child.isDirectory) {
val childName = child.name ?: continue
val childPath = if (path.isBlank()) childName else "$path/$childName"
queue.add(child to childPath)
} else if (child.isFile) {
val name = child.name ?: continue
val ext = name.substringAfterLast('.', "").lowercase(Locale.ROOT)
if (ext.isNotBlank() && supportedExt.contains(".$ext")) {
val uriStr = child.uri.toString()
val lastModified = child.lastModified()
currentUris.add(uriStr)
// Check if file is new or modified
val existingModified = existingFiles[uriStr]
if (existingModified == null || existingModified != lastModified) {
audioFiles.add(Triple(child, path, lastModified))
}
}
}
}
}
// Find removed files (in existing but not in current)
val removedUris = existingFiles.keys.filter { !currentUris.contains(it) }
val totalFiles = currentUris.size
val skippedCount = (totalFiles - audioFiles.size).coerceAtLeast(0)
updateSafScanProgress {
it.totalFiles = totalFiles
}
if (audioFiles.isEmpty()) {
updateSafScanProgress {
it.isComplete = true
it.scannedFiles = totalFiles
it.progressPct = 100.0
}
val result = JSONObject()
result.put("files", JSONArray())
result.put("removedUris", JSONArray(removedUris))
result.put("skippedCount", skippedCount)
result.put("totalFiles", totalFiles)
return result.toString()
}
val results = JSONArray()
var scanned = 0
var errors = 0
for ((doc, _, lastModified) in audioFiles) {
if (safScanCancel) {
updateSafScanProgress { it.isComplete = true }
val result = JSONObject()
result.put("files", JSONArray())
result.put("removedUris", JSONArray())
result.put("skippedCount", skippedCount)
result.put("totalFiles", totalFiles)
result.put("cancelled", true)
return result.toString()
}
val name = doc.name ?: ""
updateSafScanProgress {
it.currentFile = name
}
val ext = name.substringAfterLast('.', "").lowercase(Locale.ROOT)
val fallbackExt = if (ext.isNotBlank()) ".${ext}" else null
val tempPath = copyUriToTemp(doc.uri, fallbackExt)
if (tempPath == null) {
errors++
} else {
try {
val metadataJson = Gobackend.readAudioMetadataJSON(tempPath)
if (metadataJson.isNotBlank()) {
val obj = JSONObject(metadataJson)
obj.put("filePath", doc.uri.toString())
obj.put("fileModTime", lastModified)
obj.put("lastModified", lastModified)
results.put(obj)
} else {
errors++
}
} catch (_: Exception) {
errors++
} finally {
try {
File(tempPath).delete()
} catch (_: Exception) {}
}
}
scanned++
val processed = skippedCount + scanned
val pct = if (totalFiles > 0) {
processed.toDouble() / totalFiles.toDouble() * 100.0
} else {
100.0
}
updateSafScanProgress {
it.scannedFiles = processed
it.errorCount = errors
it.progressPct = pct
}
}
updateSafScanProgress {
it.isComplete = true
it.progressPct = 100.0
}
val result = JSONObject()
result.put("files", results)
result.put("removedUris", JSONArray(removedUris))
result.put("skippedCount", skippedCount)
result.put("totalFiles", totalFiles)
return result.toString()
}
/**
* Resolve SAF file last-modified values for a list of content URIs.
* Returns JSON object mapping uri -> lastModified (unix millis).
*/
private fun getSafFileModTimes(urisJson: String): String {
val result = JSONObject()
val uris = try {
JSONArray(urisJson)
} catch (_: Exception) {
JSONArray()
}
for (i in 0 until uris.length()) {
val uriStr = uris.optString(i, "")
if (uriStr.isBlank()) continue
try {
val uri = Uri.parse(uriStr)
val doc = DocumentFile.fromSingleUri(this, uri)
if (doc != null && doc.exists()) {
result.put(uriStr, doc.lastModified())
}
} catch (_: Exception) {}
}
return result.toString()
}
private fun runPostProcessingSaf(fileUriStr: String, metadataJson: String): String {
val uri = Uri.parse(fileUriStr)
val doc = DocumentFile.fromSingleUri(this, uri)
@@ -1476,13 +1698,6 @@ class MainActivity: FlutterFragmentActivity() {
}
result.success(response)
}
"removeExtension" -> {
val extensionId = call.argument<String>("extension_id") ?: ""
withContext(Dispatchers.IO) {
Gobackend.removeExtensionByID(extensionId)
}
result.success(null)
}
"cleanupExtensions" -> {
withContext(Dispatchers.IO) {
Gobackend.cleanupExtensions()
@@ -1746,6 +1961,15 @@ class MainActivity: FlutterFragmentActivity() {
}
result.success(response)
}
"scanLibraryFolderIncremental" -> {
val folderPath = call.argument<String>("folder_path") ?: ""
val existingFiles = call.argument<String>("existing_files") ?: "{}"
val response = withContext(Dispatchers.IO) {
safScanActive = false
Gobackend.scanLibraryFolderIncrementalJSON(folderPath, existingFiles)
}
result.success(response)
}
"scanSafTree" -> {
val treeUri = call.argument<String>("tree_uri") ?: ""
val response = withContext(Dispatchers.IO) {
@@ -1753,6 +1977,21 @@ class MainActivity: FlutterFragmentActivity() {
}
result.success(response)
}
"scanSafTreeIncremental" -> {
val treeUri = call.argument<String>("tree_uri") ?: ""
val existingFiles = call.argument<String>("existing_files") ?: "{}"
val response = withContext(Dispatchers.IO) {
scanSafTreeIncremental(treeUri, existingFiles)
}
result.success(response)
}
"getSafFileModTimes" -> {
val uris = call.argument<String>("uris") ?: "[]"
val response = withContext(Dispatchers.IO) {
getSafFileModTimes(uris)
}
result.success(response)
}
"getLibraryScanProgress" -> {
val response = withContext(Dispatchers.IO) {
if (safScanActive) {

View File

@@ -20,6 +20,7 @@ type LibraryScanResult struct {
FilePath string `json:"filePath"`
CoverPath string `json:"coverPath,omitempty"`
ScannedAt string `json:"scannedAt"`
FileModTime int64 `json:"fileModTime,omitempty"` // Unix timestamp in milliseconds
ISRC string `json:"isrc,omitempty"`
TrackNumber int `json:"trackNumber,omitempty"`
DiscNumber int `json:"discNumber,omitempty"`
@@ -40,6 +41,14 @@ type LibraryScanProgress struct {
IsComplete bool `json:"is_complete"`
}
// IncrementalScanResult contains results of an incremental library scan
type IncrementalScanResult struct {
Scanned []LibraryScanResult `json:"scanned"` // New or updated files
DeletedPaths []string `json:"deletedPaths"` // Files that no longer exist
SkippedCount int `json:"skippedCount"` // Files that were unchanged
TotalFiles int `json:"totalFiles"` // Total files in folder
}
var (
libraryScanProgress LibraryScanProgress
libraryScanProgressMu sync.RWMutex
@@ -179,6 +188,11 @@ func scanAudioFile(filePath, scanTime string) (*LibraryScanResult, error) {
Format: strings.TrimPrefix(ext, "."),
}
// Get file modification time
if info, err := os.Stat(filePath); err == nil {
result.FileModTime = info.ModTime().UnixMilli()
}
libraryCoverCacheMu.RLock()
coverCacheDir := libraryCoverCacheDir
libraryCoverCacheMu.RUnlock()
@@ -413,3 +427,183 @@ func ReadAudioMetadata(filePath string) (string, error) {
return string(jsonBytes), nil
}
// ScanLibraryFolderIncremental performs an incremental scan of the library folder
// existingFilesJSON is a JSON object mapping filePath -> modTime (unix millis)
// Only files that are new or have changed modification time will be scanned
func ScanLibraryFolderIncremental(folderPath, existingFilesJSON string) (string, error) {
if folderPath == "" {
return "{}", fmt.Errorf("folder path is empty")
}
info, err := os.Stat(folderPath)
if err != nil {
return "{}", fmt.Errorf("folder not found: %w", err)
}
if !info.IsDir() {
return "{}", fmt.Errorf("path is not a folder: %s", folderPath)
}
// Parse existing files map
existingFiles := make(map[string]int64)
if existingFilesJSON != "" && existingFilesJSON != "{}" {
if err := json.Unmarshal([]byte(existingFilesJSON), &existingFiles); err != nil {
GoLog("[LibraryScan] Warning: failed to parse existing files JSON: %v\n", err)
}
}
GoLog("[LibraryScan] Incremental scan starting, %d existing files in database\n", len(existingFiles))
// Reset progress
libraryScanProgressMu.Lock()
libraryScanProgress = LibraryScanProgress{}
libraryScanProgressMu.Unlock()
// Setup cancellation
libraryScanCancelMu.Lock()
if libraryScanCancel != nil {
close(libraryScanCancel)
}
libraryScanCancel = make(chan struct{})
cancelCh := libraryScanCancel
libraryScanCancelMu.Unlock()
// Collect all audio files with their mod times
type fileInfo struct {
path string
modTime int64
}
var currentFiles []fileInfo
currentPathSet := make(map[string]bool)
err = filepath.Walk(folderPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return nil
}
select {
case <-cancelCh:
return fmt.Errorf("scan cancelled")
default:
}
if !info.IsDir() {
ext := strings.ToLower(filepath.Ext(path))
if supportedAudioFormats[ext] {
currentFiles = append(currentFiles, fileInfo{
path: path,
modTime: info.ModTime().UnixMilli(),
})
currentPathSet[path] = true
}
}
return nil
})
if err != nil {
return "{}", err
}
totalFiles := len(currentFiles)
libraryScanProgressMu.Lock()
libraryScanProgress.TotalFiles = totalFiles
libraryScanProgressMu.Unlock()
// Find files to scan (new or modified)
var filesToScan []fileInfo
skippedCount := 0
for _, f := range currentFiles {
existingModTime, exists := existingFiles[f.path]
if !exists {
// New file
filesToScan = append(filesToScan, f)
} else if f.modTime != existingModTime {
// Modified file
filesToScan = append(filesToScan, f)
} else {
// Unchanged file - skip
skippedCount++
}
}
// Find deleted files
var deletedPaths []string
for existingPath := range existingFiles {
if !currentPathSet[existingPath] {
deletedPaths = append(deletedPaths, existingPath)
}
}
GoLog("[LibraryScan] Incremental: %d to scan, %d skipped, %d deleted\n",
len(filesToScan), skippedCount, len(deletedPaths))
if len(filesToScan) == 0 {
libraryScanProgressMu.Lock()
libraryScanProgress.ScannedFiles = totalFiles
libraryScanProgress.IsComplete = true
libraryScanProgress.ProgressPct = 100
libraryScanProgressMu.Unlock()
result := IncrementalScanResult{
Scanned: []LibraryScanResult{},
DeletedPaths: deletedPaths,
SkippedCount: skippedCount,
TotalFiles: totalFiles,
}
jsonBytes, _ := json.Marshal(result)
return string(jsonBytes), nil
}
// Scan the files that need scanning
results := make([]LibraryScanResult, 0, len(filesToScan))
scanTime := time.Now().UTC().Format(time.RFC3339)
errorCount := 0
for i, f := range filesToScan {
select {
case <-cancelCh:
return "{}", fmt.Errorf("scan cancelled")
default:
}
libraryScanProgressMu.Lock()
libraryScanProgress.ScannedFiles = skippedCount + i + 1
libraryScanProgress.CurrentFile = filepath.Base(f.path)
libraryScanProgress.ProgressPct = float64(skippedCount+i+1) / float64(totalFiles) * 100
libraryScanProgressMu.Unlock()
result, err := scanAudioFile(f.path, scanTime)
if err != nil {
errorCount++
GoLog("[LibraryScan] Error scanning %s: %v\n", f.path, err)
continue
}
results = append(results, *result)
}
libraryScanProgressMu.Lock()
libraryScanProgress.ErrorCount = errorCount
libraryScanProgress.IsComplete = true
libraryScanProgress.ScannedFiles = totalFiles
libraryScanProgress.ProgressPct = 100
libraryScanProgressMu.Unlock()
GoLog("[LibraryScan] Incremental scan complete: %d scanned, %d skipped, %d deleted, %d errors\n",
len(results), skippedCount, len(deletedPaths), errorCount)
scanResult := IncrementalScanResult{
Scanned: results,
DeletedPaths: deletedPaths,
SkippedCount: skippedCount,
TotalFiles: totalFiles,
}
jsonBytes, err := json.Marshal(scanResult)
if err != nil {
return "{}", fmt.Errorf("failed to marshal results: %w", err)
}
return string(jsonBytes), nil
}

View File

@@ -1,7 +1,9 @@
import 'dart:async';
import 'dart:io';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:path_provider/path_provider.dart';
import 'package:shared_preferences/shared_preferences.dart';
import 'package:spotiflac_android/services/history_database.dart';
import 'package:spotiflac_android/services/library_database.dart';
import 'package:spotiflac_android/services/platform_bridge.dart';
import 'package:spotiflac_android/utils/logger.dart';
@@ -96,6 +98,7 @@ class LocalLibraryState {
class LocalLibraryNotifier extends Notifier<LocalLibraryState> {
final LibraryDatabase _db = LibraryDatabase.instance;
final HistoryDatabase _historyDb = HistoryDatabase.instance;
Timer? _progressTimer;
bool _isLoaded = false;
bool _scanCancelRequested = false;
@@ -145,14 +148,14 @@ class LocalLibraryNotifier extends Notifier<LocalLibraryState> {
await _loadFromDatabase();
}
Future<void> startScan(String folderPath) async {
Future<void> startScan(String folderPath, {bool forceFullScan = false}) async {
if (state.isScanning) {
_log.w('Scan already in progress');
return;
}
_scanCancelRequested = false;
_log.i('Starting library scan: $folderPath');
_log.i('Starting library scan: $folderPath (incremental: ${!forceFullScan})');
state = state.copyWith(
isScanning: true,
scanProgress: 0,
@@ -176,40 +179,163 @@ class LocalLibraryNotifier extends Notifier<LocalLibraryState> {
try {
final isSaf = folderPath.startsWith('content://');
final results = isSaf
? await PlatformBridge.scanSafTree(folderPath)
: await PlatformBridge.scanLibraryFolder(folderPath);
if (_scanCancelRequested) {
state = state.copyWith(isScanning: false, scanWasCancelled: true);
return;
}
final items = <LocalLibraryItem>[];
for (final json in results) {
final item = LocalLibraryItem.fromJson(json);
items.add(item);
// Get all file paths from download history to exclude them
final downloadedPaths = await _historyDb.getAllFilePaths();
_log.i('Excluding ${downloadedPaths.length} downloaded files from library scan');
if (forceFullScan) {
// Full scan path - ignores existing data
final results = isSaf
? await PlatformBridge.scanSafTree(folderPath)
: await PlatformBridge.scanLibraryFolder(folderPath);
if (_scanCancelRequested) {
state = state.copyWith(isScanning: false, scanWasCancelled: true);
return;
}
final items = <LocalLibraryItem>[];
int skippedDownloads = 0;
for (final json in results) {
final filePath = json['filePath'] as String?;
// Skip files that are already in download history
if (filePath != null && downloadedPaths.contains(filePath)) {
skippedDownloads++;
continue;
}
final item = LocalLibraryItem.fromJson(json);
items.add(item);
}
if (skippedDownloads > 0) {
_log.i('Skipped $skippedDownloads files already in download history');
}
await _db.upsertBatch(items.map((e) => e.toJson()).toList());
final now = DateTime.now();
try {
final prefs = await SharedPreferences.getInstance();
await prefs.setString(_lastScannedAtKey, now.toIso8601String());
_log.d('Saved lastScannedAt: $now');
} catch (e) {
_log.w('Failed to save lastScannedAt: $e');
}
state = state.copyWith(
items: items,
isScanning: false,
scanProgress: 100,
lastScannedAt: now,
scanWasCancelled: false,
);
_log.i('Full scan complete: ${items.length} tracks found');
} else {
// Incremental scan path - only scans new/modified files
final existingFiles = await _db.getFileModTimes();
_log.i('Incremental scan: ${existingFiles.length} existing files in database');
final backfilledModTimes = await _backfillLegacyFileModTimes(
isSaf: isSaf,
existingFiles: existingFiles,
);
if (backfilledModTimes.isNotEmpty) {
await _db.updateFileModTimes(backfilledModTimes);
existingFiles.addAll(backfilledModTimes);
_log.i('Backfilled ${backfilledModTimes.length} legacy mod times');
}
// Use appropriate incremental scan method based on SAF or not
final Map<String, dynamic> result;
if (isSaf) {
result = await PlatformBridge.scanSafTreeIncremental(
folderPath,
existingFiles,
);
} else {
result = await PlatformBridge.scanLibraryFolderIncremental(
folderPath,
existingFiles,
);
}
if (_scanCancelRequested) {
state = state.copyWith(isScanning: false, scanWasCancelled: true);
return;
}
// Parse incremental scan result
// SAF returns 'files' and 'removedUris', non-SAF returns 'scanned' and 'deletedPaths'
final scannedList = (result['files'] as List<dynamic>?)
?? (result['scanned'] as List<dynamic>?)
?? [];
final deletedPaths = (result['removedUris'] as List<dynamic>?)
?.map((e) => e as String)
.toList()
?? (result['deletedPaths'] as List<dynamic>?)
?.map((e) => e as String)
.toList()
?? [];
final skippedCount = result['skippedCount'] as int? ?? 0;
final totalFiles = result['totalFiles'] as int? ?? 0;
_log.i('Incremental result: ${scannedList.length} scanned, '
'$skippedCount skipped, ${deletedPaths.length} deleted, $totalFiles total');
// Upsert new/modified items (excluding downloaded files)
if (scannedList.isNotEmpty) {
final items = <LocalLibraryItem>[];
int skippedDownloads = 0;
for (final json in scannedList) {
final map = json as Map<String, dynamic>;
final filePath = map['filePath'] as String?;
// Skip files that are already in download history
if (filePath != null && downloadedPaths.contains(filePath)) {
skippedDownloads++;
continue;
}
items.add(LocalLibraryItem.fromJson(map));
}
if (items.isNotEmpty) {
await _db.upsertBatch(items.map((e) => e.toJson()).toList());
_log.i('Upserted ${items.length} items');
}
if (skippedDownloads > 0) {
_log.i('Skipped $skippedDownloads files already in download history');
}
}
// Delete removed items
if (deletedPaths.isNotEmpty) {
final deleteCount = await _db.deleteByPaths(deletedPaths);
_log.i('Deleted $deleteCount items from database');
}
// Reload all items from database to get complete list
final allItems = await _db.getAll();
final items = allItems.map((e) => LocalLibraryItem.fromJson(e)).toList();
final now = DateTime.now();
try {
final prefs = await SharedPreferences.getInstance();
await prefs.setString(_lastScannedAtKey, now.toIso8601String());
_log.d('Saved lastScannedAt: $now');
} catch (e) {
_log.w('Failed to save lastScannedAt: $e');
}
state = state.copyWith(
items: items,
isScanning: false,
scanProgress: 100,
lastScannedAt: now,
scanWasCancelled: false,
);
_log.i('Incremental scan complete: ${items.length} total tracks '
'(${scannedList.length} new/updated, $skippedCount unchanged, ${deletedPaths.length} removed)');
}
await _db.upsertBatch(items.map((e) => e.toJson()).toList());
final now = DateTime.now();
try {
final prefs = await SharedPreferences.getInstance();
await prefs.setString(_lastScannedAtKey, now.toIso8601String());
_log.d('Saved lastScannedAt: $now');
} catch (e) {
_log.w('Failed to save lastScannedAt: $e');
}
state = state.copyWith(
items: items,
isScanning: false,
scanProgress: 100,
lastScannedAt: now,
scanWasCancelled: false,
);
_log.i('Scan complete: ${items.length} tracks found');
} catch (e, stack) {
_log.e('Library scan failed: $e', e, stack);
state = state.copyWith(isScanning: false, scanWasCancelled: false);
@@ -316,6 +442,59 @@ class LocalLibraryNotifier extends Notifier<LocalLibraryState> {
Future<int> getCount() async {
return await _db.getCount();
}
Future<Map<String, int>> _backfillLegacyFileModTimes({
required bool isSaf,
required Map<String, int> existingFiles,
}) async {
final legacyPaths = existingFiles.entries
.where((entry) => entry.value <= 0)
.map((entry) => entry.key)
.toList();
if (legacyPaths.isEmpty) {
return const {};
}
if (isSaf) {
final uris = legacyPaths
.where((path) => path.startsWith('content://'))
.toList();
if (uris.isEmpty) {
return const {};
}
const chunkSize = 500;
final backfilled = <String, int>{};
try {
for (var i = 0; i < uris.length; i += chunkSize) {
if (_scanCancelRequested) {
break;
}
final end = (i + chunkSize < uris.length) ? i + chunkSize : uris.length;
final chunk = uris.sublist(i, end);
final chunkResult = await PlatformBridge.getSafFileModTimes(chunk);
backfilled.addAll(chunkResult);
}
return backfilled;
} catch (e) {
_log.w('Failed to backfill SAF mod times: $e');
return const {};
}
}
final backfilled = <String, int>{};
for (final path in legacyPaths) {
if (_scanCancelRequested || path.startsWith('content://')) {
continue;
}
try {
final stat = await File(path).stat();
if (stat.type == FileSystemEntityType.file) {
backfilled[path] = stat.modified.millisecondsSinceEpoch;
}
} catch (_) {}
}
return backfilled;
}
}
final localLibraryProvider =

View File

@@ -15,6 +15,7 @@ class LocalLibraryItem {
final String filePath;
final String? coverPath;
final DateTime scannedAt;
final int? fileModTime;
final String? isrc;
final int? trackNumber;
final int? discNumber;
@@ -34,6 +35,7 @@ class LocalLibraryItem {
required this.filePath,
this.coverPath,
required this.scannedAt,
this.fileModTime,
this.isrc,
this.trackNumber,
this.discNumber,
@@ -54,6 +56,7 @@ class LocalLibraryItem {
'filePath': filePath,
'coverPath': coverPath,
'scannedAt': scannedAt.toIso8601String(),
'fileModTime': fileModTime,
'isrc': isrc,
'trackNumber': trackNumber,
'discNumber': discNumber,
@@ -75,6 +78,7 @@ class LocalLibraryItem {
filePath: json['filePath'] as String,
coverPath: json['coverPath'] as String?,
scannedAt: DateTime.parse(json['scannedAt'] as String),
fileModTime: (json['fileModTime'] as num?)?.toInt(),
isrc: json['isrc'] as String?,
trackNumber: json['trackNumber'] as int?,
discNumber: json['discNumber'] as int?,
@@ -111,7 +115,7 @@ class LibraryDatabase {
return await openDatabase(
path,
version: 2, // Bumped version for cover_path migration
version: 3, // Bumped version for file_mod_time migration
onCreate: _createDB,
onUpgrade: _upgradeDB,
);
@@ -130,6 +134,7 @@ class LibraryDatabase {
file_path TEXT NOT NULL UNIQUE,
cover_path TEXT,
scanned_at TEXT NOT NULL,
file_mod_time INTEGER,
isrc TEXT,
track_number INTEGER,
disc_number INTEGER,
@@ -158,6 +163,12 @@ class LibraryDatabase {
await db.execute('ALTER TABLE library ADD COLUMN cover_path TEXT');
_log.i('Added cover_path column');
}
if (oldVersion < 3) {
// Add file_mod_time column for incremental scanning
await db.execute('ALTER TABLE library ADD COLUMN file_mod_time INTEGER');
_log.i('Added file_mod_time column for incremental scanning');
}
}
Map<String, dynamic> _jsonToDbRow(Map<String, dynamic> json) {
@@ -170,6 +181,7 @@ class LibraryDatabase {
'file_path': json['filePath'],
'cover_path': json['coverPath'],
'scanned_at': json['scannedAt'],
'file_mod_time': json['fileModTime'],
'isrc': json['isrc'],
'track_number': json['trackNumber'],
'disc_number': json['discNumber'],
@@ -192,6 +204,7 @@ class LibraryDatabase {
'filePath': row['file_path'],
'coverPath': row['cover_path'],
'scannedAt': row['scanned_at'],
'fileModTime': row['file_mod_time'],
'isrc': row['isrc'],
'trackNumber': row['track_number'],
'discNumber': row['disc_number'],
@@ -383,4 +396,58 @@ class LibraryDatabase {
await db.close();
_database = null;
}
/// Get all file paths with their modification times for incremental scanning
/// Returns a map of filePath -> fileModTime (unix timestamp in milliseconds)
Future<Map<String, int>> getFileModTimes() async {
final db = await database;
final rows = await db.rawQuery(
'SELECT file_path, COALESCE(file_mod_time, 0) AS file_mod_time FROM library'
);
final result = <String, int>{};
for (final row in rows) {
final path = row['file_path'] as String;
final modTime = (row['file_mod_time'] as num?)?.toInt() ?? 0;
result[path] = modTime;
}
return result;
}
/// Update file_mod_time for existing rows using file_path as key.
Future<void> updateFileModTimes(Map<String, int> fileModTimes) async {
if (fileModTimes.isEmpty) return;
final db = await database;
final batch = db.batch();
for (final entry in fileModTimes.entries) {
batch.update(
'library',
{'file_mod_time': entry.value},
where: 'file_path = ?',
whereArgs: [entry.key],
);
}
await batch.commit(noResult: true);
}
/// Get all file paths in the library (for detecting deleted files)
Future<Set<String>> getAllFilePaths() async {
final db = await database;
final rows = await db.rawQuery('SELECT file_path FROM library');
return rows.map((r) => r['file_path'] as String).toSet();
}
/// Delete multiple items by their file paths
Future<int> deleteByPaths(List<String> filePaths) async {
if (filePaths.isEmpty) return 0;
final db = await database;
final placeholders = List.filled(filePaths.length, '?').join(',');
final result = await db.rawDelete(
'DELETE FROM library WHERE file_path IN ($placeholders)',
filePaths,
);
if (result > 0) {
_log.i('Deleted $result items from library');
}
return result;
}
}

File diff suppressed because it is too large Load Diff