mirror of
https://github.com/ente-io/ente.git
synced 2025-08-08 07:28:26 +00:00
Merge remote-tracking branch 'origin/main' into thumbnail_duration
This commit is contained in:
commit
ce7451dbb4
@ -4,7 +4,7 @@ on:
|
||||
workflow_dispatch: # Allow manually running the action
|
||||
|
||||
env:
|
||||
FLUTTER_VERSION: "3.19.4"
|
||||
FLUTTER_VERSION: "3.22.0"
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
3
.github/workflows/mobile-lint.yml
vendored
3
.github/workflows/mobile-lint.yml
vendored
@ -9,7 +9,8 @@ on:
|
||||
- ".github/workflows/mobile-lint.yml"
|
||||
|
||||
env:
|
||||
FLUTTER_VERSION: "3.19.4"
|
||||
|
||||
FLUTTER_VERSION: "3.22.0"
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
|
@ -1,24 +1,28 @@
|
||||
## Releases
|
||||
|
||||
Conceptually, the release is straightforward: We trigger a GitHub workflow that
|
||||
creates a draft release with artifacts built. When ready, we publish that
|
||||
release. The download links on our website, and existing apps already check the
|
||||
latest GitHub release and update accordingly.
|
||||
Conceptually, the release is straightforward:
|
||||
|
||||
1. We trigger a GitHub workflow that creates a draft release with the build.
|
||||
|
||||
2. When ready, we publish that release.
|
||||
|
||||
3. The download links on our website, and existing apps already check the
|
||||
latest GitHub release and update automatically.
|
||||
|
||||
The complication comes by the fact that electron-builder's auto updater (the
|
||||
mechanism that we use for auto updates) doesn't work with monorepos. So we need
|
||||
to keep a separate (non-mono) repository just for doing releases.
|
||||
to keep a separate repository just for holding the releases.
|
||||
|
||||
- Source code lives here, in [ente-io/ente](https://github.com/ente-io/ente).
|
||||
|
||||
- Releases are done from
|
||||
[ente-io/photos-desktop](https://github.com/ente-io/photos-desktop).
|
||||
|
||||
## Workflow - Release Candidates
|
||||
## Workflow - Release candidates
|
||||
|
||||
Nightly RC builds of `main` are published by a scheduled workflow automatically.
|
||||
If needed, these builds can also be manually triggered, including specifying the
|
||||
source repository branch to build:
|
||||
If needed, these builds can also be manually triggered, and the branch of the
|
||||
source repository to build (default "main") also specified:
|
||||
|
||||
```sh
|
||||
gh workflow run desktop-release.yml --source=<branch>
|
||||
@ -46,17 +50,44 @@ Each such workflow run will update the artifacts attached to the same
|
||||
```
|
||||
|
||||
This'll trigger the workflow and create a new draft release, which you can
|
||||
publish after adding the release notes.
|
||||
publish after adding the release notes. Once you publish, the release goes live.
|
||||
|
||||
The release is done at this point, and we can now start a new RC train for
|
||||
subsequent nightly builds.
|
||||
The release is done at this point, and we can now create a new pre-release to
|
||||
host subsequent nightly builds.
|
||||
|
||||
1. Update `package.json` in the source repo to use version `1.x.x-rc`. Create a
|
||||
new draft release in the release repo with title `1.x.x-rc`. In the tag
|
||||
input enter `v1.x.x-rc` and select the option to "create a new tag on
|
||||
publish".
|
||||
1. Update `package.json` in the source repo to use version `1.x.x-rc`, and
|
||||
merge these changes into `main`.
|
||||
|
||||
## Post build
|
||||
2. In the release repo:
|
||||
|
||||
```sh
|
||||
git tag 1.x.x-rc
|
||||
git push origin 1.x.x-rc
|
||||
```
|
||||
|
||||
3. Once the workflow finishes and the draft release is created, edit its
|
||||
description to "Nightly builds", set it as a pre-release and publish.
|
||||
|
||||
4. Delete the pre-release for the previous (already released) version.
|
||||
|
||||
## Workflow - Extra pre-releases
|
||||
|
||||
To create extro one off pre-releases in addition to the nightly `1.x.x-rc` ones,
|
||||
|
||||
1. In your branch in the source repository, set the version in `package.json`
|
||||
to something different, say `1.x.x-my-test`.
|
||||
|
||||
2. Create a new draft release in the release repo with title `1.x.x-test`. In
|
||||
the tag input enter `v1.x.x-test` and select the option to "create a new tag
|
||||
on publish".
|
||||
|
||||
3. Trigger the workflow in the release repo:
|
||||
|
||||
```sh
|
||||
gh workflow run desktop-release.yml --source=my-branch
|
||||
```
|
||||
|
||||
## Details
|
||||
|
||||
The GitHub Action runs on Windows, Linux and macOS. It produces the artifacts
|
||||
defined in the `build` value in `package.json`.
|
||||
|
@ -46,7 +46,7 @@ You can alternatively install the build from PlayStore or F-Droid.
|
||||
|
||||
## 🧑💻 Building from source
|
||||
|
||||
1. [Install Flutter v3.19.4](https://flutter.dev/docs/get-started/install).
|
||||
1. [Install Flutter v3.22.0](https://flutter.dev/docs/get-started/install).
|
||||
|
||||
2. Pull in all submodules with `git submodule update --init --recursive`
|
||||
|
||||
|
@ -427,7 +427,7 @@ SPEC CHECKSUMS:
|
||||
home_widget: 0434835a4c9a75704264feff6be17ea40e0f0d57
|
||||
image_editor_common: d6f6644ae4a6de80481e89fe6d0a8c49e30b4b43
|
||||
in_app_purchase_storekit: 0e4b3c2e43ba1e1281f4f46dd71b0593ce529892
|
||||
integration_test: 13825b8a9334a850581300559b8839134b124670
|
||||
integration_test: ce0a3ffa1de96d1a89ca0ac26fca7ea18a749ef4
|
||||
libwebp: 1786c9f4ff8a279e4dac1e8f385004d5fc253009
|
||||
local_auth_darwin: c7e464000a6a89e952235699e32b329457608d98
|
||||
local_auth_ios: 5046a18c018dd973247a0564496c8898dbb5adf9
|
||||
|
@ -7,20 +7,19 @@ import "package:photos/ui/map/marker_image.dart";
|
||||
|
||||
Marker mapMarker(
|
||||
ImageMarker imageMarker,
|
||||
String key, {
|
||||
ValueKey<int> key, {
|
||||
Size markerSize = MapView.defaultMarkerSize,
|
||||
}) {
|
||||
return Marker(
|
||||
//-6.5 is for taking in the height of the MarkerPointer
|
||||
anchorPos: AnchorPos.exactly(Anchor(markerSize.height / 2, -6.5)),
|
||||
key: Key(key),
|
||||
alignment: Alignment.topCenter,
|
||||
key: key,
|
||||
width: markerSize.width,
|
||||
height: markerSize.height,
|
||||
point: LatLng(
|
||||
imageMarker.latitude,
|
||||
imageMarker.longitude,
|
||||
),
|
||||
builder: (context) => MarkerImage(
|
||||
child: MarkerImage(
|
||||
file: imageMarker.imageFile,
|
||||
seperator: (MapView.defaultMarkerSize.height + 10) -
|
||||
(MapView.defaultMarkerSize.height - markerSize.height),
|
||||
|
@ -114,7 +114,7 @@ class _MapScreenState extends State<MapScreen> {
|
||||
);
|
||||
|
||||
Timer(Duration(milliseconds: debounceDuration), () {
|
||||
calculateVisibleMarkers(mapController.bounds!);
|
||||
calculateVisibleMarkers(mapController.camera.visibleBounds);
|
||||
setState(() {
|
||||
isLoading = false;
|
||||
});
|
||||
|
@ -4,8 +4,8 @@ import "package:flutter_map_marker_cluster/flutter_map_marker_cluster.dart";
|
||||
import "package:latlong2/latlong.dart";
|
||||
import "package:photos/ui/map/image_marker.dart";
|
||||
import "package:photos/ui/map/map_button.dart";
|
||||
import 'package:photos/ui/map/map_gallery_tile.dart';
|
||||
import 'package:photos/ui/map/map_gallery_tile_badge.dart';
|
||||
import "package:photos/ui/map/map_gallery_tile.dart";
|
||||
import "package:photos/ui/map/map_gallery_tile_badge.dart";
|
||||
import "package:photos/ui/map/map_marker.dart";
|
||||
import "package:photos/ui/map/tile/layers.dart";
|
||||
import "package:photos/utils/debouncer.dart";
|
||||
@ -60,11 +60,6 @@ class _MapViewState extends State<MapView> {
|
||||
_markers = _buildMakers();
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
void onChange(LatLngBounds bounds) {
|
||||
_debouncer.run(
|
||||
() async {
|
||||
@ -85,55 +80,44 @@ class _MapViewState extends State<MapView> {
|
||||
widget.onTap!.call();
|
||||
}
|
||||
: null,
|
||||
center: widget.center,
|
||||
initialCenter: widget.center,
|
||||
minZoom: widget.minZoom,
|
||||
maxZoom: widget.maxZoom,
|
||||
enableMultiFingerGestureRace: true,
|
||||
zoom: widget.initialZoom,
|
||||
maxBounds: LatLngBounds(
|
||||
const LatLng(-90, -180),
|
||||
const LatLng(90, 180),
|
||||
interactionOptions: InteractionOptions(
|
||||
flags: widget.interactiveFlags,
|
||||
enableMultiFingerGestureRace: true,
|
||||
),
|
||||
initialZoom: widget.initialZoom,
|
||||
cameraConstraint: CameraConstraint.contain(
|
||||
bounds: LatLngBounds(
|
||||
const LatLng(-90, -180),
|
||||
const LatLng(90, 180),
|
||||
),
|
||||
),
|
||||
onPositionChanged: (position, hasGesture) {
|
||||
if (position.bounds != null) {
|
||||
onChange(position.bounds!);
|
||||
}
|
||||
},
|
||||
interactiveFlags: widget.interactiveFlags,
|
||||
),
|
||||
nonRotatedChildren: [
|
||||
Padding(
|
||||
padding: EdgeInsets.only(
|
||||
bottom: widget.bottomSheetDraggableAreaHeight,
|
||||
),
|
||||
child: OSMFranceTileAttributes(
|
||||
options: widget.mapAttributionOptions,
|
||||
),
|
||||
),
|
||||
],
|
||||
children: [
|
||||
const OSMFranceTileLayer(),
|
||||
MarkerClusterLayerWidget(
|
||||
options: MarkerClusterLayerOptions(
|
||||
anchorPos: AnchorPos.align(AnchorAlign.top),
|
||||
alignment: Alignment.topCenter,
|
||||
maxClusterRadius: 100,
|
||||
showPolygon: false,
|
||||
size: widget.markerSize,
|
||||
fitBoundsOptions: const FitBoundsOptions(
|
||||
padding: EdgeInsets.all(80),
|
||||
),
|
||||
padding: const EdgeInsets.all(80),
|
||||
markers: _markers,
|
||||
onClusterTap: (_) {
|
||||
onChange(widget.controller.bounds!);
|
||||
onChange(widget.controller.camera.visibleBounds);
|
||||
},
|
||||
builder: (context, List<Marker> markers) {
|
||||
final index = int.parse(
|
||||
markers.first.key
|
||||
.toString()
|
||||
.replaceAll(RegExp(r'[^0-9]'), ''),
|
||||
);
|
||||
final String clusterKey =
|
||||
'map-badge-$index-len-${markers.length}';
|
||||
final valueKey = markers.first.key as ValueKey;
|
||||
final index = valueKey.value as int;
|
||||
|
||||
final clusterKey = 'map-badge-$index-len-${markers.length}';
|
||||
|
||||
return Stack(
|
||||
key: ValueKey(clusterKey),
|
||||
@ -148,6 +132,14 @@ class _MapViewState extends State<MapView> {
|
||||
},
|
||||
),
|
||||
),
|
||||
Padding(
|
||||
padding: EdgeInsets.only(
|
||||
bottom: widget.bottomSheetDraggableAreaHeight,
|
||||
),
|
||||
child: OSMFranceTileAttributes(
|
||||
options: widget.mapAttributionOptions,
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
widget.showControls
|
||||
@ -175,8 +167,8 @@ class _MapViewState extends State<MapView> {
|
||||
icon: Icons.add,
|
||||
onPressed: () {
|
||||
widget.controller.move(
|
||||
widget.controller.center,
|
||||
widget.controller.zoom + 1,
|
||||
widget.controller.camera.center,
|
||||
widget.controller.camera.zoom + 1,
|
||||
);
|
||||
},
|
||||
heroTag: 'zoom-in',
|
||||
@ -185,8 +177,8 @@ class _MapViewState extends State<MapView> {
|
||||
icon: Icons.remove,
|
||||
onPressed: () {
|
||||
widget.controller.move(
|
||||
widget.controller.center,
|
||||
widget.controller.zoom - 1,
|
||||
widget.controller.camera.center,
|
||||
widget.controller.camera.zoom - 1,
|
||||
);
|
||||
},
|
||||
heroTag: 'zoom-out',
|
||||
@ -204,7 +196,7 @@ class _MapViewState extends State<MapView> {
|
||||
final imageMarker = widget.imageMarkers[index];
|
||||
return mapMarker(
|
||||
imageMarker,
|
||||
index.toString(),
|
||||
ValueKey(index),
|
||||
markerSize: widget.markerSize,
|
||||
);
|
||||
});
|
||||
|
@ -3,7 +3,7 @@
|
||||
import "dart:async";
|
||||
|
||||
import "package:flutter/material.dart";
|
||||
import "package:flutter_map/plugin_api.dart";
|
||||
import "package:flutter_map/flutter_map.dart";
|
||||
import "package:photos/extensions/list.dart";
|
||||
import "package:photos/theme/colors.dart";
|
||||
import "package:photos/theme/ente_theme.dart";
|
||||
@ -194,10 +194,8 @@ class MapAttributionWidgetState extends State<MapAttributionWidget> {
|
||||
context,
|
||||
() {
|
||||
setState(() => popupExpanded = true);
|
||||
mapEventSubscription = FlutterMapState.of(context)
|
||||
.mapController
|
||||
.mapEventStream
|
||||
.listen((e) {
|
||||
mapEventSubscription =
|
||||
MapController().mapEventStream.listen((e) {
|
||||
setState(() => popupExpanded = false);
|
||||
mapEventSubscription?.cancel();
|
||||
});
|
||||
|
@ -45,10 +45,10 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: animated_list_plus
|
||||
sha256: fe66f9c300d715254727fbdf050487844d17b013fec344fa28081d29bddbdf1a
|
||||
sha256: fb3d7f1fbaf5af84907f3c739236bacda8bf32cbe1f118dd51510752883ff50c
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.4.5"
|
||||
version: "0.5.2"
|
||||
animated_stack_widget:
|
||||
dependency: transitive
|
||||
description:
|
||||
@ -764,26 +764,26 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: flutter_map
|
||||
sha256: "5286f72f87deb132daa1489442d6cc46e986fc105cb727d9ae1b602b35b1d1f3"
|
||||
sha256: "87cc8349b8fa5dccda5af50018c7374b6645334a0d680931c1fe11bce88fa5bb"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "5.0.0"
|
||||
version: "6.2.1"
|
||||
flutter_map_marker_cluster:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: flutter_map_marker_cluster
|
||||
sha256: "14bb31b9dd3a759ab4a1ba320d19bbb554d8d7952c8812029c6f6b7bda956906"
|
||||
sha256: a324f48da5ee83a3f29fd8d08b4b1e6e3114ff5c6cab910124d6a2e1f06f08cc
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.2.0"
|
||||
version: "1.3.6"
|
||||
flutter_map_marker_popup:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: flutter_map_marker_popup
|
||||
sha256: be209c68b19d4c10d9a2f5911e45f7c579624c43a353adb9bf0f2fec0cf30b8c
|
||||
sha256: ec563bcbae24a18ac16815fb75ac5ab33ccba609e14db70e252a67de19c6639c
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "5.2.0"
|
||||
version: "6.1.2"
|
||||
flutter_native_splash:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
@ -971,10 +971,10 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: home_widget
|
||||
sha256: "29565bfee4b32eaf9e7e8b998d504618b779a74b2b1ac62dd4dac7468e66f1a3"
|
||||
sha256: "2a0fdd6267ff975bd07bedf74686bd5577200f504f5de36527ac1b56bdbe68e3"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.5.0"
|
||||
version: "0.6.0"
|
||||
html:
|
||||
dependency: transitive
|
||||
description:
|
||||
@ -1152,26 +1152,26 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: leak_tracker
|
||||
sha256: "78eb209deea09858f5269f5a5b02be4049535f568c07b275096836f01ea323fa"
|
||||
sha256: "7f0df31977cb2c0b88585095d168e689669a2cc9b97c309665e3386f3e9d341a"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "10.0.0"
|
||||
version: "10.0.4"
|
||||
leak_tracker_flutter_testing:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: leak_tracker_flutter_testing
|
||||
sha256: b46c5e37c19120a8a01918cfaf293547f47269f7cb4b0058f21531c2465d6ef0
|
||||
sha256: "06e98f569d004c1315b991ded39924b21af84cf14cc94791b8aea337d25b57f8"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.0.1"
|
||||
version: "3.0.3"
|
||||
leak_tracker_testing:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: leak_tracker_testing
|
||||
sha256: a597f72a664dbd293f3bfc51f9ba69816f84dcd403cdac7066cb3f6003f3ab47
|
||||
sha256: "6ba465d5d76e67ddf503e1161d1f4a6bc42306f9d66ca1e8f079a47290fb06d3"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.0.1"
|
||||
version: "3.0.1"
|
||||
like_button:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
@ -1252,6 +1252,14 @@ packages:
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.0.10"
|
||||
logger:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: logger
|
||||
sha256: af05cc8714f356fd1f3888fb6741cbe9fbe25cdb6eedbab80e1a6db21047d4a4
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "2.3.0"
|
||||
logging:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
@ -1368,10 +1376,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: meta
|
||||
sha256: d584fa6707a52763a52446f02cc621b077888fb63b93bbcb1143a7be5a0c0c04
|
||||
sha256: "7687075e408b093f36e6bbf6c91878cc0d4cd10f409506f7bc996f68220b9136"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.11.0"
|
||||
version: "1.12.0"
|
||||
mgrs_dart:
|
||||
dependency: transitive
|
||||
description:
|
||||
@ -2144,10 +2152,10 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: styled_text
|
||||
sha256: f72928d1ebe8cb149e3b34a689cb1ddca696b808187cf40ac3a0bd183dff379c
|
||||
sha256: fd624172cf629751b4f171dd0ecf9acf02a06df3f8a81bb56c0caa4f1df706c3
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "7.0.0"
|
||||
version: "8.1.0"
|
||||
sync_http:
|
||||
dependency: transitive
|
||||
description:
|
||||
@ -2160,18 +2168,18 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: syncfusion_flutter_core
|
||||
sha256: "9be1bb9bbdb42823439a18da71484f1964c14dbe1c255ab1b931932b12fa96e8"
|
||||
sha256: "63108a33f9b0d89f7b6b56cce908b8e519fe433dbbe0efcf41ad3e8bb2081bd9"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "19.4.56"
|
||||
version: "25.2.5"
|
||||
syncfusion_flutter_sliders:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: syncfusion_flutter_sliders
|
||||
sha256: "1f6a63ccab4180b544074b9264a20f01ee80b553de154192fe1d7b434089d3c2"
|
||||
sha256: f27310bedc0e96e84054f0a70ac593d1a3c38397c158c5226ba86027ad77b2c1
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "19.4.56"
|
||||
version: "25.2.5"
|
||||
synchronized:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
@ -2192,26 +2200,26 @@ packages:
|
||||
dependency: "direct dev"
|
||||
description:
|
||||
name: test
|
||||
sha256: a1f7595805820fcc05e5c52e3a231aedd0b72972cb333e8c738a8b1239448b6f
|
||||
sha256: "7ee446762c2c50b3bd4ea96fe13ffac69919352bd3b4b17bac3f3465edc58073"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.24.9"
|
||||
version: "1.25.2"
|
||||
test_api:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: test_api
|
||||
sha256: "5c2f730018264d276c20e4f1503fd1308dfbbae39ec8ee63c5236311ac06954b"
|
||||
sha256: "9955ae474176f7ac8ee4e989dadfb411a58c30415bcfb648fa04b2b8a03afa7f"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.6.1"
|
||||
version: "0.7.0"
|
||||
test_core:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: test_core
|
||||
sha256: a757b14fc47507060a162cc2530d9a4a2f92f5100a952c7443b5cad5ef5b106a
|
||||
sha256: "2bc4b4ecddd75309300d8096f781c0e3280ca1ef85beda558d33fcbedc2eead4"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.5.9"
|
||||
version: "0.6.0"
|
||||
timezone:
|
||||
dependency: transitive
|
||||
description:
|
||||
@ -2441,10 +2449,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: vm_service
|
||||
sha256: b3d56ff4341b8f182b96aceb2fa20e3dcb336b9f867bc0eafc0de10f1048e957
|
||||
sha256: "3923c89304b715fb1eb6423f017651664a03bf5f4b29983627c4da791f74a4ec"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "13.0.0"
|
||||
version: "14.2.1"
|
||||
volume_controller:
|
||||
dependency: transitive
|
||||
description:
|
||||
@ -2591,4 +2599,4 @@ packages:
|
||||
version: "3.1.2"
|
||||
sdks:
|
||||
dart: ">=3.3.0 <4.0.0"
|
||||
flutter: ">=3.19.0"
|
||||
flutter: ">=3.20.0-1.2.pre"
|
||||
|
@ -21,7 +21,7 @@ environment:
|
||||
dependencies:
|
||||
adaptive_theme: ^3.1.0
|
||||
animate_do: ^2.0.0
|
||||
animated_list_plus: ^0.4.5
|
||||
animated_list_plus: ^0.5.2
|
||||
archive: ^3.1.2
|
||||
background_fetch: ^1.2.1
|
||||
battery_info: ^1.1.1
|
||||
@ -83,8 +83,8 @@ dependencies:
|
||||
flutter_local_notifications: ^17.0.0
|
||||
flutter_localizations:
|
||||
sdk: flutter
|
||||
flutter_map: ^5.0.0
|
||||
flutter_map_marker_cluster: ^1.2.0
|
||||
flutter_map: ^6.2.0
|
||||
flutter_map_marker_cluster: ^1.3.6
|
||||
flutter_native_splash: ^2.2.0+1
|
||||
flutter_password_strength: ^0.1.6
|
||||
flutter_secure_storage: ^8.0.0
|
||||
@ -93,13 +93,13 @@ dependencies:
|
||||
fluttertoast: ^8.0.6
|
||||
freezed_annotation: ^2.4.1
|
||||
google_nav_bar: ^5.0.5
|
||||
home_widget: ^0.5.0
|
||||
home_widget: ^0.6.0
|
||||
html_unescape: ^2.0.0
|
||||
http: ^1.1.0
|
||||
image: ^4.0.17
|
||||
image_editor: ^1.3.0
|
||||
in_app_purchase: ^3.0.7
|
||||
intl: ^0.18.0
|
||||
intl: ^0.19.0
|
||||
json_annotation: ^4.8.0
|
||||
latlong2: ^0.9.0
|
||||
like_button: ^2.0.5
|
||||
@ -152,9 +152,9 @@ dependencies:
|
||||
sqlite3_flutter_libs: ^0.5.20
|
||||
sqlite_async: ^0.6.1
|
||||
step_progress_indicator: ^1.0.2
|
||||
styled_text: ^7.0.0
|
||||
syncfusion_flutter_core: ^19.2.49
|
||||
syncfusion_flutter_sliders: ^19.2.49
|
||||
styled_text: ^8.1.0
|
||||
syncfusion_flutter_core: ^25.2.5
|
||||
syncfusion_flutter_sliders: ^25.2.5
|
||||
synchronized: ^3.1.0
|
||||
tuple: ^2.0.0
|
||||
uni_links: ^0.5.1
|
||||
@ -177,6 +177,7 @@ dependency_overrides:
|
||||
# Remove this after removing dependency from flutter_sodium.
|
||||
# Newer flutter packages depends on ffi > 2.0.0 while flutter_sodium depends on ffi < 2.0.0
|
||||
ffi: 2.1.0
|
||||
intl: 0.18.1
|
||||
video_player:
|
||||
git:
|
||||
url: https://github.com/ente-io/packages.git
|
||||
|
@ -30,7 +30,7 @@ interface FaceDBSchema extends DBSchema {
|
||||
"file-status": {
|
||||
key: number;
|
||||
value: FileStatus;
|
||||
indexes: { isIndexable: number };
|
||||
indexes: { status: FileStatus["status"] };
|
||||
};
|
||||
}
|
||||
|
||||
@ -38,27 +38,25 @@ interface FileStatus {
|
||||
/** The ID of the {@link EnteFile} whose indexing status we represent. */
|
||||
fileID: number;
|
||||
/**
|
||||
* `1` if this file needs to be indexed, `0` otherwise.
|
||||
* The status of the file.
|
||||
*
|
||||
* > Somewhat confusingly, we also have a (IndexedDB) "index" on this field.
|
||||
* That (IDB) index allows us to efficiently select {@link fileIDs} that
|
||||
* still need indexing (i.e. entries where {@link isIndexed} is `1`).
|
||||
* - "indexable" - This file is something that we can index, but it is yet
|
||||
* to be indexed.
|
||||
*
|
||||
* [Note: Boolean IndexedDB indexes].
|
||||
* - "indexed" - We have a corresponding entry for this file in the
|
||||
* "face-index" object (either indexed locally or fetched from remote).
|
||||
*
|
||||
* IndexedDB does not (currently) supported indexes on boolean fields.
|
||||
* https://github.com/w3c/IndexedDB/issues/76
|
||||
* - "failed" - Indexing was attempted but failed.
|
||||
*
|
||||
* As a workaround, we use numeric fields where `0` denotes `false` and `1`
|
||||
* denotes `true`.
|
||||
* We also have a (IndexedDB) "index" on this field to allow us to
|
||||
* efficiently select or count {@link fileIDs} that fall into various
|
||||
* buckets.
|
||||
*/
|
||||
isIndexable: number;
|
||||
status: "indexable" | "indexed" | "failed";
|
||||
/**
|
||||
* The number of times attempts to index this file failed.
|
||||
*
|
||||
* This is guaranteed to be `0` for files which have already been
|
||||
* sucessfully indexed (i.e. files for which `isIndexable` is 0 and which
|
||||
* have a corresponding entry in the "face-index" object store).
|
||||
* This is guaranteed to be `0` for files with status "indexed".
|
||||
*/
|
||||
failureCount: number;
|
||||
}
|
||||
@ -91,7 +89,7 @@ const openFaceDB = async () => {
|
||||
db.createObjectStore("face-index", { keyPath: "fileID" });
|
||||
db.createObjectStore("file-status", {
|
||||
keyPath: "fileID",
|
||||
}).createIndex("isIndexable", "isIndexable");
|
||||
}).createIndex("status", "status");
|
||||
}
|
||||
},
|
||||
blocking() {
|
||||
@ -176,11 +174,11 @@ export const saveFaceIndex = async (faceIndex: FaceIndex) => {
|
||||
indexStore.put(faceIndex),
|
||||
statusStore.put({
|
||||
fileID: faceIndex.fileID,
|
||||
isIndexable: 0,
|
||||
status: "indexed",
|
||||
failureCount: 0,
|
||||
}),
|
||||
tx.done,
|
||||
]);
|
||||
]).then(() => {} /* convert result to void */);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -208,7 +206,7 @@ export const addFileEntry = async (fileID: number) => {
|
||||
if ((await tx.store.getKey(fileID)) === undefined) {
|
||||
await tx.store.put({
|
||||
fileID,
|
||||
isIndexable: 1,
|
||||
status: "indexable",
|
||||
failureCount: 0,
|
||||
});
|
||||
}
|
||||
@ -216,15 +214,16 @@ export const addFileEntry = async (fileID: number) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync entries in the face DB to align with the given list of local indexable
|
||||
* file IDs.
|
||||
* Sync entries in the face DB to align with the state of local files outside
|
||||
* face DB.
|
||||
*
|
||||
* @param localFileIDs The IDs of all the files that the client is aware of,
|
||||
* filtered to only keep the files that the user owns and the formats that can
|
||||
* be indexed by our current face indexing pipeline.
|
||||
* @param localFileIDs Local {@link EnteFile}s, keyed by their IDs. These are
|
||||
* all the files that the client is aware of, filtered to only keep the files
|
||||
* that the user owns and the formats that can be indexed by our current face
|
||||
* indexing pipeline.
|
||||
*
|
||||
* This function syncs the state of file entries in face DB to the state of file
|
||||
* entries stored otherwise by the local client.
|
||||
* entries stored otherwise by the client locally.
|
||||
*
|
||||
* - Files (identified by their ID) that are present locally but are not yet in
|
||||
* face DB get a fresh entry in face DB (and are marked as indexable).
|
||||
@ -232,7 +231,7 @@ export const addFileEntry = async (fileID: number) => {
|
||||
* - Files that are not present locally but still exist in face DB are removed
|
||||
* from face DB (including its face index, if any).
|
||||
*/
|
||||
export const syncWithLocalIndexableFileIDs = async (localFileIDs: number[]) => {
|
||||
export const syncWithLocalFiles = async (localFileIDs: number[]) => {
|
||||
const db = await faceDB();
|
||||
const tx = db.transaction(["face-index", "file-status"], "readwrite");
|
||||
const fdbFileIDs = await tx.objectStore("file-status").getAllKeys();
|
||||
@ -248,7 +247,7 @@ export const syncWithLocalIndexableFileIDs = async (localFileIDs: number[]) => {
|
||||
newFileIDs.map((id) =>
|
||||
tx.objectStore("file-status").put({
|
||||
fileID: id,
|
||||
isIndexable: 1,
|
||||
status: "indexable",
|
||||
failureCount: 0,
|
||||
}),
|
||||
),
|
||||
@ -258,21 +257,25 @@ export const syncWithLocalIndexableFileIDs = async (localFileIDs: number[]) => {
|
||||
removedFileIDs.map((id) => tx.objectStore("face-index").delete(id)),
|
||||
tx.done,
|
||||
].flat(),
|
||||
);
|
||||
).then(() => {} /* convert result to void */);
|
||||
};
|
||||
|
||||
/**
|
||||
* Return the count of files that can be, and that have been, indexed.
|
||||
*
|
||||
* These counts are mutually exclusive. The total number of files that fall
|
||||
* within the purview of the indexer is thus indexable + indexed.
|
||||
*/
|
||||
export const indexedAndIndexableCounts = async () => {
|
||||
const db = await faceDB();
|
||||
const tx = db.transaction(["face-index", "file-status"], "readwrite");
|
||||
const indexedCount = await tx.objectStore("face-index").count();
|
||||
const indexableCount = await tx
|
||||
.objectStore("file-status")
|
||||
.index("isIndexable")
|
||||
.count(IDBKeyRange.only(1));
|
||||
return { indexedCount, indexableCount };
|
||||
const tx = db.transaction("file-status", "readwrite");
|
||||
const indexableCount = await tx.store
|
||||
.index("status")
|
||||
.count(IDBKeyRange.only("indexable"));
|
||||
const indexedCount = await tx.store
|
||||
.index("status")
|
||||
.count(IDBKeyRange.only("indexed"));
|
||||
return { indexableCount, indexedCount };
|
||||
};
|
||||
|
||||
/**
|
||||
@ -281,14 +284,16 @@ export const indexedAndIndexableCounts = async () => {
|
||||
* This list is from the universe of the file IDs that the face DB knows about
|
||||
* (can use {@link addFileEntry} to inform it about new files). From this
|
||||
* universe, we filter out fileIDs the files corresponding to which have already
|
||||
* been indexed, or for which we attempted indexing but failed.
|
||||
* been indexed, or which should be ignored.
|
||||
*
|
||||
* @param count Limit the result to up to {@link count} items.
|
||||
*/
|
||||
export const unindexedFileIDs = async (count?: number) => {
|
||||
export const indexableFileIDs = async (count?: number) => {
|
||||
const db = await faceDB();
|
||||
const tx = db.transaction("file-status", "readonly");
|
||||
return tx.store.index("isIndexable").getAllKeys(IDBKeyRange.only(1), count);
|
||||
return tx.store
|
||||
.index("status")
|
||||
.getAllKeys(IDBKeyRange.only("indexable"), count);
|
||||
};
|
||||
|
||||
/**
|
||||
@ -303,10 +308,10 @@ export const unindexedFileIDs = async (count?: number) => {
|
||||
export const markIndexingFailed = async (fileID: number) => {
|
||||
const db = await faceDB();
|
||||
const tx = db.transaction("file-status", "readwrite");
|
||||
const failureCount = ((await tx.store.get(fileID)).failureCount ?? 0) + 1;
|
||||
const failureCount = ((await tx.store.get(fileID))?.failureCount ?? 0) + 1;
|
||||
await tx.store.put({
|
||||
fileID,
|
||||
isIndexable: 0,
|
||||
status: "failed",
|
||||
failureCount,
|
||||
});
|
||||
return tx.done;
|
||||
|
@ -45,8 +45,7 @@ import type { Box, Dimensions, Face, Point } from "./types";
|
||||
* available. These are used when they are provided, otherwise the file is
|
||||
* downloaded and decrypted from remote.
|
||||
*
|
||||
* @param userAgent The UA of the current client (the client that is generating
|
||||
* the embedding).
|
||||
* @param userAgent The UA of the client that is doing the indexing (us).
|
||||
*/
|
||||
export const indexFaces = async (
|
||||
enteFile: EnteFile,
|
||||
@ -78,11 +77,20 @@ export const indexFaces = async (
|
||||
/**
|
||||
* Return a "renderable" image blob, using {@link file} if present otherwise
|
||||
* downloading the source image corresponding to {@link enteFile} from remote.
|
||||
*
|
||||
* For videos their thumbnail is used.
|
||||
*/
|
||||
const renderableImageBlob = async (enteFile: EnteFile, file: File) =>
|
||||
file
|
||||
? getRenderableImage(enteFile.metadata.title, file)
|
||||
: fetchRenderableBlob(enteFile);
|
||||
const renderableImageBlob = async (enteFile: EnteFile, file: File) => {
|
||||
const fileType = enteFile.metadata.fileType;
|
||||
if (fileType == FILE_TYPE.VIDEO) {
|
||||
const thumbnailData = await DownloadManager.getThumbnail(enteFile);
|
||||
return new Blob([thumbnailData]);
|
||||
} else {
|
||||
return file
|
||||
? getRenderableImage(enteFile.metadata.title, file)
|
||||
: fetchRenderableBlob(enteFile);
|
||||
}
|
||||
};
|
||||
|
||||
const fetchRenderableBlob = async (enteFile: EnteFile) => {
|
||||
const fileStream = await DownloadManager.getFile(enteFile);
|
||||
|
@ -1,18 +1,16 @@
|
||||
import { FILE_TYPE } from "@/media/file-type";
|
||||
import { ComlinkWorker } from "@/next/worker/comlink-worker";
|
||||
import { ensure } from "@/utils/ensure";
|
||||
import { wait } from "@/utils/promise";
|
||||
import { type Remote } from "comlink";
|
||||
import { getLocalFiles } from "services/fileService";
|
||||
import machineLearningService from "services/machineLearning/machineLearningService";
|
||||
import { getAllLocalFiles } from "services/fileService";
|
||||
import mlWorkManager from "services/machineLearning/mlWorkManager";
|
||||
import type { EnteFile } from "types/file";
|
||||
import { isInternalUserForML } from "utils/user";
|
||||
import {
|
||||
faceIndex,
|
||||
indexableFileIDs,
|
||||
indexedAndIndexableCounts,
|
||||
syncWithLocalIndexableFileIDs,
|
||||
unindexedFileIDs,
|
||||
syncWithLocalFiles,
|
||||
} from "./db";
|
||||
import { FaceIndexerWorker } from "./indexer.worker";
|
||||
|
||||
@ -70,6 +68,8 @@ class FaceIndexer {
|
||||
this.tick();
|
||||
}
|
||||
|
||||
/* TODO-ML(MR): This code is not currently in use */
|
||||
|
||||
/**
|
||||
* A promise for the lazily created singleton {@link FaceIndexerWorker} remote
|
||||
* exposed by this module.
|
||||
@ -162,11 +162,11 @@ export interface FaceIndexingStatus {
|
||||
}
|
||||
|
||||
export const faceIndexingStatus = async (): Promise<FaceIndexingStatus> => {
|
||||
const isSyncing = machineLearningService.isSyncing;
|
||||
const isSyncing = mlWorkManager.isSyncing;
|
||||
const { indexedCount, indexableCount } = await indexedAndIndexableCounts();
|
||||
|
||||
let phase: FaceIndexingStatus["phase"];
|
||||
if (indexedCount < indexableCount) {
|
||||
if (indexableCount > 0) {
|
||||
if (!isSyncing) {
|
||||
phase = "scheduled";
|
||||
} else {
|
||||
@ -179,7 +179,7 @@ export const faceIndexingStatus = async (): Promise<FaceIndexingStatus> => {
|
||||
return {
|
||||
phase,
|
||||
nSyncedFiles: indexedCount,
|
||||
nTotalFiles: indexableCount,
|
||||
nTotalFiles: indexableCount + indexedCount,
|
||||
};
|
||||
};
|
||||
|
||||
@ -221,28 +221,29 @@ export const setIsFaceIndexingEnabled = async (enabled: boolean) => {
|
||||
};
|
||||
|
||||
/**
|
||||
* Sync face DB with the local indexable files that we know about. Then return
|
||||
* the next {@link count} files that still need to be indexed.
|
||||
* Sync face DB with the local (and potentially indexable) files that we know
|
||||
* about. Then return the next {@link count} files that still need to be
|
||||
* indexed.
|
||||
*
|
||||
* For more specifics of what a "sync" entails, see
|
||||
* {@link syncWithLocalIndexableFileIDs}.
|
||||
* For more specifics of what a "sync" entails, see {@link syncWithLocalFiles}.
|
||||
*
|
||||
* @param userID Limit indexing to files owned by a {@link userID}.
|
||||
* @param userID Sync only files owned by a {@link userID} with the face DB.
|
||||
*
|
||||
* @param count Limit the resulting list of files to {@link count}.
|
||||
* @param count Limit the resulting list of indexable files to {@link count}.
|
||||
*/
|
||||
export const getFilesToIndex = async (userID: number, count: number) => {
|
||||
const localFiles = await getLocalFiles();
|
||||
const indexableTypes = [FILE_TYPE.IMAGE, FILE_TYPE.LIVE_PHOTO];
|
||||
const indexableFiles = localFiles.filter(
|
||||
(f) =>
|
||||
f.ownerID == userID && indexableTypes.includes(f.metadata.fileType),
|
||||
export const syncAndGetFilesToIndex = async (
|
||||
userID: number,
|
||||
count: number,
|
||||
): Promise<EnteFile[]> => {
|
||||
const isIndexable = (f: EnteFile) => f.ownerID == userID;
|
||||
|
||||
const localFiles = await getAllLocalFiles();
|
||||
const localFilesByID = new Map(
|
||||
localFiles.filter(isIndexable).map((f) => [f.id, f]),
|
||||
);
|
||||
|
||||
const filesByID = new Map(indexableFiles.map((f) => [f.id, f]));
|
||||
await syncWithLocalFiles([...localFilesByID.keys()]);
|
||||
|
||||
await syncWithLocalIndexableFileIDs([...filesByID.keys()]);
|
||||
|
||||
const fileIDsToIndex = await unindexedFileIDs(count);
|
||||
return fileIDsToIndex.map((id) => ensure(filesByID.get(id)));
|
||||
const fileIDsToIndex = await indexableFileIDs(count);
|
||||
return fileIDsToIndex.map((id) => ensure(localFilesByID.get(id)));
|
||||
};
|
||||
|
@ -20,7 +20,7 @@ import type { FaceIndex } from "./types";
|
||||
* comlink workers are structured.
|
||||
*/
|
||||
export class FaceIndexerWorker {
|
||||
/*
|
||||
/**
|
||||
* Index faces in a file, save the persist the results locally, and put them
|
||||
* on remote.
|
||||
*
|
||||
@ -31,6 +31,8 @@ export class FaceIndexerWorker {
|
||||
* cases, pass a web {@link File} object to use that its data directly for
|
||||
* face indexing. If this is not provided, then the file's contents will be
|
||||
* downloaded and decrypted from remote.
|
||||
*
|
||||
* @param userAgent The UA of the client that is doing the indexing (us).
|
||||
*/
|
||||
async index(enteFile: EnteFile, file: File | undefined, userAgent: string) {
|
||||
const f = fileLogID(enteFile);
|
||||
|
@ -32,6 +32,17 @@ const ENDPOINT = getEndpoint();
|
||||
const FILES_TABLE = "files";
|
||||
const HIDDEN_FILES_TABLE = "hidden-files";
|
||||
|
||||
/**
|
||||
* Return all files that we know about locally, both "normal" and "hidden".
|
||||
*/
|
||||
export const getAllLocalFiles = async () =>
|
||||
[].concat(await getLocalFiles("normal"), await getLocalFiles("hidden"));
|
||||
|
||||
/**
|
||||
* Return all files that we know about locally. By default it returns only
|
||||
* "normal" (i.e. non-"hidden") files, but it can be passed the {@link type}
|
||||
* "hidden" to get it to instead return hidden files that we know about locally.
|
||||
*/
|
||||
export const getLocalFiles = async (type: "normal" | "hidden" = "normal") => {
|
||||
const tableName = type === "normal" ? FILES_TABLE : HIDDEN_FILES_TABLE;
|
||||
const files: Array<EnteFile> =
|
||||
@ -64,12 +75,6 @@ const setLocalFiles = async (type: "normal" | "hidden", files: EnteFile[]) => {
|
||||
}
|
||||
};
|
||||
|
||||
export const getAllLocalFiles = async () => {
|
||||
const normalFiles = await getLocalFiles("normal");
|
||||
const hiddenFiles = await getLocalFiles("hidden");
|
||||
return [...normalFiles, ...hiddenFiles];
|
||||
};
|
||||
|
||||
export const syncFiles = async (
|
||||
type: "normal" | "hidden",
|
||||
collections: Collection[],
|
||||
|
@ -1,7 +1,7 @@
|
||||
import log from "@/next/log";
|
||||
import { CustomError, parseUploadErrorCodes } from "@ente/shared/error";
|
||||
import PQueue from "p-queue";
|
||||
import { getFilesToIndex } from "services/face/indexer";
|
||||
import { syncAndGetFilesToIndex } from "services/face/indexer";
|
||||
import { FaceIndexerWorker } from "services/face/indexer.worker";
|
||||
import { EnteFile } from "types/file";
|
||||
|
||||
@ -45,8 +45,6 @@ class MachineLearningService {
|
||||
private localSyncContext: Promise<MLSyncContext>;
|
||||
private syncContext: Promise<MLSyncContext>;
|
||||
|
||||
public isSyncing = false;
|
||||
|
||||
public async sync(
|
||||
token: string,
|
||||
userID: number,
|
||||
@ -58,7 +56,10 @@ class MachineLearningService {
|
||||
|
||||
const syncContext = await this.getSyncContext(token, userID, userAgent);
|
||||
|
||||
syncContext.outOfSyncFiles = await getFilesToIndex(userID, batchSize);
|
||||
syncContext.outOfSyncFiles = await syncAndGetFilesToIndex(
|
||||
userID,
|
||||
batchSize,
|
||||
);
|
||||
|
||||
if (syncContext.outOfSyncFiles.length > 0) {
|
||||
await this.syncFiles(syncContext);
|
||||
@ -70,7 +71,6 @@ class MachineLearningService {
|
||||
}
|
||||
|
||||
private async syncFiles(syncContext: MLSyncContext) {
|
||||
this.isSyncing = true;
|
||||
try {
|
||||
const functions = syncContext.outOfSyncFiles.map(
|
||||
(outOfSyncfile) => async () => {
|
||||
@ -90,7 +90,6 @@ class MachineLearningService {
|
||||
syncContext.error = error;
|
||||
}
|
||||
await syncContext.syncQueue.onIdle();
|
||||
this.isSyncing = false;
|
||||
}
|
||||
|
||||
private async getSyncContext(
|
||||
@ -139,12 +138,18 @@ class MachineLearningService {
|
||||
}
|
||||
|
||||
public async syncLocalFile(
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
token: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
userID: number,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
userAgent: string,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
enteFile: EnteFile,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
localFile?: globalThis.File,
|
||||
) {
|
||||
/* TODO-ML(MR): Currently not used
|
||||
const syncContext = await this.getLocalSyncContext(
|
||||
token,
|
||||
userID,
|
||||
@ -165,6 +170,7 @@ class MachineLearningService {
|
||||
} catch (e) {
|
||||
console.error("Error while syncing local file: ", enteFile.id, e);
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
private async syncFileWithErrorHandler(
|
||||
|
@ -97,6 +97,8 @@ class MLWorkManager {
|
||||
private liveSyncWorker: ComlinkWorker<typeof DedicatedMLWorker>;
|
||||
private mlSearchEnabled: boolean;
|
||||
|
||||
public isSyncing = false;
|
||||
|
||||
constructor() {
|
||||
this.liveSyncQueue = new PQueue({
|
||||
concurrency: 1,
|
||||
@ -270,6 +272,7 @@ class MLWorkManager {
|
||||
* things pending to process, so we should chug along at full speed.
|
||||
*/
|
||||
private async runMLSyncJob(): Promise<boolean> {
|
||||
this.isSyncing = true;
|
||||
try {
|
||||
// TODO: skipping is not required if we are caching chunks through service worker
|
||||
// currently worker chunk itself is not loaded when network is not there
|
||||
@ -290,6 +293,8 @@ class MLWorkManager {
|
||||
// TODO: redirect/refresh to gallery in case of session_expired, stop ml sync job
|
||||
} catch (e) {
|
||||
log.error("Failed to run MLSync Job", e);
|
||||
} finally {
|
||||
this.isSyncing = false;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,7 @@ import { getFormattedDate } from "utils/search";
|
||||
import { clipService, computeClipMatchScore } from "./clip-service";
|
||||
import { localCLIPEmbeddings } from "./embeddingService";
|
||||
import { getLatestEntities } from "./entityService";
|
||||
import { faceIndexingStatus } from "./face/indexer";
|
||||
import { faceIndexingStatus, isFaceIndexingEnabled } from "./face/indexer";
|
||||
import locationSearchService, { City } from "./locationSearchService";
|
||||
|
||||
const DIGITS = new Set(["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]);
|
||||
@ -29,7 +29,11 @@ const CLIP_SCORE_THRESHOLD = 0.23;
|
||||
|
||||
export const getDefaultOptions = async () => {
|
||||
return [
|
||||
await getIndexStatusSuggestion(),
|
||||
// TODO-ML(MR): Skip this for now if indexing is disabled (eventually
|
||||
// the indexing status should not be tied to results).
|
||||
...((await isFaceIndexingEnabled())
|
||||
? [await getIndexStatusSuggestion()]
|
||||
: []),
|
||||
...(await convertSuggestionsToOptions(await getAllPeopleSuggestion())),
|
||||
].filter((t) => !!t);
|
||||
};
|
||||
|
@ -565,7 +565,7 @@
|
||||
"VIDEO": "Video",
|
||||
"LIVE_PHOTO": "Live foto",
|
||||
"editor": {
|
||||
"crop": ""
|
||||
"crop": "Bijsnijden"
|
||||
},
|
||||
"CONVERT": "Converteren",
|
||||
"CONFIRM_EDITOR_CLOSE_MESSAGE": "Weet u zeker dat u de editor wilt afsluiten?",
|
||||
|
Loading…
x
Reference in New Issue
Block a user