diff --git a/mobile/lib/services/machine_learning/ml_isolate.dart b/mobile/lib/services/machine_learning/ml_indexing_isolate.dart similarity index 93% rename from mobile/lib/services/machine_learning/ml_isolate.dart rename to mobile/lib/services/machine_learning/ml_indexing_isolate.dart index b450f1da72..fa435ac0eb 100644 --- a/mobile/lib/services/machine_learning/ml_isolate.dart +++ b/mobile/lib/services/machine_learning/ml_indexing_isolate.dart @@ -14,10 +14,10 @@ import "package:photos/services/machine_learning/semantic_search/clip/clip_image import "package:photos/utils/ml_util.dart"; import "package:synchronized/synchronized.dart"; -enum MLOperation { analyzeImage, loadModels, releaseModels } +enum MLIndexingOperation { analyzeImage, loadModels, releaseModels } -class MLIsolate { - static final _logger = Logger("MLIsolate"); +class MLIndexingIsolate { + static final _logger = Logger("MLIndexingIsolate"); Timer? _inactivityTimer; final Duration _inactivityDuration = const Duration(seconds: 120); @@ -35,9 +35,9 @@ class MLIsolate { bool shouldPauseIndexingAndClustering = false; // Singleton pattern - MLIsolate._privateConstructor(); - static final instance = MLIsolate._privateConstructor(); - factory MLIsolate() => instance; + MLIndexingIsolate._privateConstructor(); + static final instance = MLIndexingIsolate._privateConstructor(); + factory MLIndexingIsolate() => instance; Future _initIsolate() async { return _initIsolateLock.synchronized(() async { @@ -74,13 +74,13 @@ class MLIsolate { mainSendPort.send(receivePort.sendPort); receivePort.listen((message) async { final functionIndex = message[0] as int; - final function = MLOperation.values[functionIndex]; + final function = MLIndexingOperation.values[functionIndex]; final args = message[1] as Map; final sendPort = message[2] as SendPort; try { switch (function) { - case MLOperation.analyzeImage: + case MLIndexingOperation.analyzeImage: final time = DateTime.now(); final MLResult result = await analyzeImageStatic(args); _logger.info( @@ -88,7 +88,7 @@ class MLIsolate { ); sendPort.send(result.toJsonString()); break; - case MLOperation.loadModels: + case MLIndexingOperation.loadModels: final modelNames = args['modelNames'] as List; final modelPaths = args['modelPaths'] as List; final addresses = []; @@ -101,7 +101,7 @@ class MLIsolate { } sendPort.send(List.from(addresses, growable: false)); break; - case MLOperation.releaseModels: + case MLIndexingOperation.releaseModels: final modelNames = args['modelNames'] as List; final modelAddresses = args['modelAddresses'] as List; for (int i = 0; i < modelNames.length; i++) { @@ -122,13 +122,13 @@ class MLIsolate { /// The common method to run any operation in the isolate. It sends the [message] to [_isolateMain] and waits for the result. Future _runInIsolate( - (MLOperation, Map) message, + (MLIndexingOperation, Map) message, ) async { await _initIsolate(); return _functionLock.synchronized(() async { _resetInactivityTimer(); - if (message.$1 == MLOperation.analyzeImage && + if (message.$1 == MLIndexingOperation.analyzeImage && shouldPauseIndexingAndClustering) { return null; } @@ -198,7 +198,7 @@ class MLIsolate { try { final resultJsonString = await _runInIsolate( ( - MLOperation.analyzeImage, + MLIndexingOperation.analyzeImage, { "enteFileID": instruction.enteFile.uploadedFileID ?? -1, "filePath": filePath, @@ -271,7 +271,7 @@ class MLIsolate { try { final addresses = await _runInIsolate( ( - MLOperation.loadModels, + MLIndexingOperation.loadModels, { "modelNames": modelNames, "modelPaths": modelPaths, @@ -306,7 +306,7 @@ class MLIsolate { try { await _runInIsolate( ( - MLOperation.releaseModels, + MLIndexingOperation.releaseModels, { "modelNames": modelNames, "modelAddresses": modelAddresses, diff --git a/mobile/lib/services/machine_learning/ml_service.dart b/mobile/lib/services/machine_learning/ml_service.dart index ed878cdba7..7ed1ddb8a3 100644 --- a/mobile/lib/services/machine_learning/ml_service.dart +++ b/mobile/lib/services/machine_learning/ml_service.dart @@ -26,7 +26,7 @@ import "package:photos/services/machine_learning/face_ml/person/person_service.d import 'package:photos/services/machine_learning/file_ml/file_ml.dart'; import 'package:photos/services/machine_learning/file_ml/remote_fileml_service.dart'; import 'package:photos/services/machine_learning/ml_exceptions.dart'; -import "package:photos/services/machine_learning/ml_isolate.dart"; +import "package:photos/services/machine_learning/ml_indexing_isolate.dart"; import 'package:photos/services/machine_learning/ml_result.dart'; import "package:photos/services/machine_learning/semantic_search/clip/clip_image_encoder.dart"; import "package:photos/services/machine_learning/semantic_search/semantic_search_service.dart"; @@ -138,13 +138,13 @@ class MLService { void pauseIndexingAndClustering() { if (_isIndexingOrClusteringRunning) { _shouldPauseIndexingAndClustering = true; - MLIsolate.instance.shouldPauseIndexingAndClustering = true; + MLIndexingIsolate.instance.shouldPauseIndexingAndClustering = true; } } void _cancelPauseIndexingAndClustering() { _shouldPauseIndexingAndClustering = false; - MLIsolate.instance.shouldPauseIndexingAndClustering = false; + MLIndexingIsolate.instance.shouldPauseIndexingAndClustering = false; } /// Analyzes all the images in the database with the latest ml version and stores the results in the database. @@ -380,7 +380,7 @@ class MLService { bool actuallyRanML = false; try { - final MLResult? result = await MLIsolate.instance.analyzeImage( + final MLResult? result = await MLIndexingIsolate.instance.analyzeImage( instruction, ); if (result == null) { @@ -518,7 +518,7 @@ class MLService { _logger.info( 'Loading models. faces: $shouldLoadFaces, clip: $shouldLoadClip', ); - await MLIsolate.instance + await MLIndexingIsolate.instance .loadModels(loadFaces: shouldLoadFaces, loadClip: shouldLoadClip); _logger.info('Models loaded'); _logStatus();