[mob][photos] ML Lite initial commit

This commit is contained in:
laurenspriem 2024-10-23 22:43:28 +05:30
parent 44756e7268
commit 77dc0ba6f0
4 changed files with 58 additions and 17 deletions

View File

@ -9,6 +9,7 @@ import 'package:photos/services/machine_learning/face_ml/face_embedding/face_emb
import "package:photos/services/machine_learning/ml_models_overview.dart";
import 'package:photos/services/machine_learning/ml_result.dart';
import "package:photos/services/machine_learning/semantic_search/clip/clip_image_encoder.dart";
import "package:photos/services/remote_assets_service.dart";
import "package:photos/utils/ml_util.dart";
class MLIndexingIsolate extends SuperIsolate {
@ -25,6 +26,8 @@ class MLIndexingIsolate extends SuperIsolate {
@override
bool get shouldAutomaticDispose => true;
bool? indexingModelsCleanedLocally;
@override
Future<void> onDispose() async {
await _releaseModels();
@ -129,6 +132,23 @@ class MLIndexingIsolate extends SuperIsolate {
}
}
Future<void> cleanupLocalIndexingModels() async {
if (indexingModelsCleanedLocally == true) return;
await _releaseModels();
final List<String> remoteModelPaths = [];
for (final model in MLModels.values) {
if (!model.isIndexingModel) continue;
final mlModel = model.model;
remoteModelPaths.add(mlModel.modelRemotePath);
}
await RemoteAssetsService.instance.cleanupSelectedModels(remoteModelPaths);
indexingModelsCleanedLocally = true;
}
Future<void> _releaseModels() async {
final List<String> modelNames = [];
final List<int> modelAddresses = [];

View File

@ -43,6 +43,9 @@ class MLService {
bool _isInitialized = false;
bool areModelsDownloaded = false;
int? lastRemoteFetch;
static const int _kRemoteFetchCooldownOnLite = 1000 * 60 * 5;
late String client;
bool get isInitialized => _isInitialized;
@ -60,9 +63,7 @@ class MLService {
/// Only call this function once at app startup, after that you can directly call [runAllML]
Future<void> init({bool firstTime = false}) async {
if (localSettings.isMLIndexingEnabled == false || _isInitialized) {
return;
}
if (_isInitialized) return;
_logger.info("init called");
// Get client name
@ -72,9 +73,8 @@ class MLService {
// Listen on MachineLearningController
Bus.instance.on<MachineLearningControlEvent>().listen((event) {
if (localSettings.isMLIndexingEnabled == false) {
return;
}
// if (!canFetch()) return;
_mlControllerStatus = event.shouldRun;
if (_mlControllerStatus) {
if (_shouldPauseIndexingAndClustering) {
@ -100,6 +100,17 @@ class MLService {
_logger.info('init done');
}
bool canFetch() {
if (localSettings.isMLIndexingEnabled) return true;
if (lastRemoteFetch == null) return true;
final intDiff = DateTime.now().millisecondsSinceEpoch - lastRemoteFetch!;
final bool canFetch = intDiff > _kRemoteFetchCooldownOnLite;
if (canFetch) {
lastRemoteFetch = DateTime.now().millisecondsSinceEpoch;
}
return canFetch;
}
Future<void> sync() async {
await faceRecognitionService.sync();
}
@ -126,7 +137,9 @@ class MLService {
// refresh discover section
magicCacheService.updateCache(forced: force).ignore();
}
await indexAllImages();
if (canFetch()) {
await fetchAndIndexAllImages();
}
if ((await MLDataDB.instance.getUnclusteredFaceCount()) > 0) {
await clusterAllImages();
}
@ -162,10 +175,11 @@ class MLService {
MLIndexingIsolate.instance.shouldPauseIndexingAndClustering = false;
}
/// Analyzes all the images in the database with the latest ml version and stores the results in the database.
/// Analyzes all the images in the user library with the latest ml version and stores the results in the database.
///
/// This function first checks if the image has already been analyzed with the lastest faceMlVersion and stored in the database. If so, it skips the image.
Future<void> indexAllImages() async {
/// This function first fetches from remote and checks if the image has already been analyzed
/// with the lastest faceMlVersion and stored on remote or local database. If so, it skips the image.
Future<void> fetchAndIndexAllImages() async {
if (_cannotRunMLFunction()) return;
try {
@ -179,7 +193,10 @@ class MLService {
stream:
await for (final chunk in instructionStream) {
if (!await canUseHighBandwidth()) {
if (!localSettings.isMLIndexingEnabled) {
await MLIndexingIsolate.instance.cleanupLocalIndexingModels();
continue;
} else if (!await canUseHighBandwidth()) {
_logger.info(
'stopping indexing because user is not connected to wifi',
);
@ -596,7 +613,7 @@ class MLService {
void _logStatus() {
final String status = '''
isInternalUser: ${flagService.internalUser}
isMLIndexingEnabled: ${localSettings.isMLIndexingEnabled}
ML Lite: ${!localSettings.isMLIndexingEnabled}
canRunMLController: $_mlControllerStatus
isIndexingOrClusteringRunning: $_isIndexingOrClusteringRunning
shouldPauseIndexingAndClustering: $_shouldPauseIndexingAndClustering

View File

@ -128,7 +128,14 @@ class RemoteAssetsService {
"https://models.ente.io/yolov5s_face_opset18_rgba_opt.onnx",
];
for (final remotePath in oldModelNames) {
await cleanupSelectedModels(oldModelNames);
checkRemovedOldAssets = true;
_logger.info("Old ML models cleaned up");
}
Future<void> cleanupSelectedModels(List<String> modelRemotePaths) async {
for (final remotePath in modelRemotePaths) {
final localPath = await _getLocalPath(remotePath);
if (File(localPath).existsSync()) {
_logger.info(
@ -137,8 +144,5 @@ class RemoteAssetsService {
await File(localPath).delete();
}
}
checkRemovedOldAssets = true;
_logger.info("Old ML models cleaned up");
}
}

View File

@ -190,7 +190,7 @@ class _MLDebugSectionWidgetState extends State<MLDebugSectionWidget> {
onTap: () async {
try {
MLService.instance.debugIndexingDisabled = false;
unawaited(MLService.instance.indexAllImages());
unawaited(MLService.instance.fetchAndIndexAllImages());
} catch (e, s) {
logger.warning('indexing failed ', e, s);
await showGenericErrorDialog(context: context, error: e);