diff --git a/src/appConstants/paths.ts b/src/appConstants/paths.ts
index 9b4790c15..09a93c63c 100644
--- a/src/appConstants/paths.ts
+++ b/src/appConstants/paths.ts
@@ -8,4 +8,6 @@ export const photoUploadPath = `${RNFS.DocumentDirectoryPath}/photoUploads`;
export const rotatedOriginalPhotosPath = `${RNFS.DocumentDirectoryPath}/rotatedOriginalPhotos`;
+export const sentinelFilePath = `${RNFS.DocumentDirectoryPath}/sentinelFiles`;
+
export const soundUploadPath = `${RNFS.DocumentDirectoryPath}/soundUploads`;
diff --git a/src/components/App.js b/src/components/App.js
index f9f951616..48006f6fe 100644
--- a/src/components/App.js
+++ b/src/components/App.js
@@ -13,6 +13,7 @@ import Realm from "realm";
import clearCaches from "sharedHelpers/clearCaches.ts";
import { log } from "sharedHelpers/logger";
import { addARCameraFiles } from "sharedHelpers/mlModel.ts";
+import { findAndLogSentinelFiles } from "sharedHelpers/sentinelFiles.ts";
import {
useCurrentUser,
useIconicTaxa,
@@ -95,6 +96,7 @@ const App = ( { children }: Props ): Node => {
useEffect( ( ) => {
addARCameraFiles( );
+ findAndLogSentinelFiles( );
}, [] );
useEffect( ( ) => {
diff --git a/src/components/Camera/AICamera/AICamera.js b/src/components/Camera/AICamera/AICamera.js
index 5583ed6f6..4337bb898 100644
--- a/src/components/Camera/AICamera/AICamera.js
+++ b/src/components/Camera/AICamera/AICamera.js
@@ -1,5 +1,6 @@
// @flow
+import { useNavigation } from "@react-navigation/native";
import classnames from "classnames";
import FadeInOutView from "components/Camera/FadeInOutView";
import useRotation from "components/Camera/hooks/useRotation.ts";
@@ -14,11 +15,12 @@ import { useSafeAreaInsets } from "react-native-safe-area-context";
import { VolumeManager } from "react-native-volume-manager";
import { convertOfflineScoreToConfidence } from "sharedHelpers/convertScores.ts";
import { log } from "sharedHelpers/logger";
+import { deleteSentinelFile, logStage } from "sharedHelpers/sentinelFiles.ts";
import {
useDebugMode, usePerformance, useTranslation
} from "sharedHooks";
import { isDebugMode } from "sharedHooks/useDebugMode";
-// import type { UserLocation } from "sharedHooks/useWatchPosition";
+import useStore from "stores/useStore";
import colors from "styles/tailwindColors";
import {
@@ -69,6 +71,9 @@ const AICamera = ( {
setAiSuggestion,
userLocation
}: Props ): Node => {
+ const navigation = useNavigation( );
+ const sentinelFileName = useStore( state => state.sentinelFileName );
+
const hasFlash = device?.hasFlash;
const { isDebug } = useDebugMode( );
const {
@@ -127,6 +132,7 @@ const AICamera = ( {
};
const handleTakePhoto = useCallback( async ( ) => {
+ await logStage( sentinelFileName, "take_photo_start" );
setHasTakenPhoto( true );
setAiSuggestion( showPrediction && result );
await takePhotoAndStoreUri( {
@@ -135,7 +141,13 @@ const AICamera = ( {
navigateImmediately: true
} );
setHasTakenPhoto( false );
- }, [setAiSuggestion, takePhotoAndStoreUri, result, showPrediction] );
+ }, [
+ setAiSuggestion,
+ sentinelFileName,
+ takePhotoAndStoreUri,
+ result,
+ showPrediction
+ ] );
useEffect( () => {
if ( initialVolume === null ) {
@@ -165,6 +177,11 @@ const AICamera = ( {
};
}, [handleTakePhoto, hasTakenPhoto, initialVolume] );
+ const handleClose = async ( ) => {
+ await deleteSentinelFile( sentinelFileName );
+ navigation.goBack( );
+ };
+
return (
<>
{device && (
@@ -255,6 +272,7 @@ const AICamera = ( {
flipCamera={onFlipCamera}
fps={fps}
hasFlash={hasFlash}
+ handleClose={handleClose}
modelLoaded={modelLoaded}
numStoredResults={numStoredResults}
rotatableAnimatedStyle={rotatableAnimatedStyle}
diff --git a/src/components/Camera/AICamera/AICameraButtons.tsx b/src/components/Camera/AICamera/AICameraButtons.tsx
index 5d51dc081..99aaa8916 100644
--- a/src/components/Camera/AICamera/AICameraButtons.tsx
+++ b/src/components/Camera/AICamera/AICameraButtons.tsx
@@ -22,6 +22,7 @@ interface Props {
cropRatio?: string;
flipCamera: ( _event: GestureResponderEvent ) => void;
fps?: number;
+ handleClose: ( ) => void;
hasFlash: boolean;
modelLoaded: boolean;
numStoredResults?: number;
@@ -48,6 +49,7 @@ const AICameraButtons = ( {
cropRatio,
flipCamera,
fps,
+ handleClose,
hasFlash,
modelLoaded,
numStoredResults,
@@ -89,7 +91,7 @@ const AICameraButtons = ( {
className="absolute left-0 bottom-[17px] h-full justify-end flex gap-y-9"
pointerEvents="box-none"
>
-
+
{
+ const sentinelFileName = useStore( state => state.sentinelFileName );
const { deviceOrientation } = useDeviceOrientation();
const [lastTimestamp, setLastTimestamp] = useState( undefined );
@@ -113,7 +115,7 @@ const FrameProcessorCamera = ( {
} );
return unsubscribeBlur;
- }, [navigation, resetCameraOnFocus] );
+ }, [navigation, resetCameraOnFocus, sentinelFileName] );
const handleResults = Worklets.createRunOnJS( ( result, timeTaken ) => {
setLastTimestamp( result.timestamp );
@@ -148,7 +150,7 @@ const FrameProcessorCamera = ( {
}
}
- patchedRunAsync( frame, () => {
+ patchedRunAsync( frame, ( ) => {
"worklet";
// Reminder: this is a worklet, running on a C++ thread. Make sure to check the
@@ -202,10 +204,22 @@ const FrameProcessorCamera = ( {
cameraRef={cameraRef}
device={device}
frameProcessor={frameProcessor}
- onCameraError={onCameraError}
- onCaptureError={onCaptureError}
- onClassifierError={onClassifierError}
- onDeviceNotSupported={onDeviceNotSupported}
+ onCameraError={async ( ) => {
+ await logStage( sentinelFileName, "fallback_camera_error" );
+ onCameraError( );
+ }}
+ onCaptureError={async ( ) => {
+ await logStage( sentinelFileName, "camera_capture_error" );
+ onCaptureError( );
+ }}
+ onClassifierError={async ( ) => {
+ await logStage( sentinelFileName, "camera_classifier_error" );
+ onClassifierError( );
+ }}
+ onDeviceNotSupported={async ( ) => {
+ await logStage( sentinelFileName, "camera_device_not_supported_error" );
+ onDeviceNotSupported( );
+ }}
pinchToZoom={pinchToZoom}
inactive={inactive}
/>
diff --git a/src/components/Camera/Buttons/Close.tsx b/src/components/Camera/Buttons/Close.tsx
index f81f1cddb..3df101157 100644
--- a/src/components/Camera/Buttons/Close.tsx
+++ b/src/components/Camera/Buttons/Close.tsx
@@ -1,15 +1,17 @@
-import { useNavigation } from "@react-navigation/native";
import { TransparentCircleButton } from "components/SharedComponents";
import React from "react";
import { useTranslation } from "sharedHooks";
-const Close = ( ) => {
+interface Props {
+ handleClose: ( ) => void;
+}
+
+const Close = ( { handleClose }: Props ) => {
const { t } = useTranslation( );
- const navigation = useNavigation( );
return (
navigation.goBack( )}
+ onPress={handleClose}
accessibilityLabel={t( "Close" )}
accessibilityHint={t( "Navigates-to-previous-screen" )}
icon="close"
diff --git a/src/components/Camera/CameraContainer.tsx b/src/components/Camera/CameraContainer.tsx
index 69f93751c..e14602e3a 100644
--- a/src/components/Camera/CameraContainer.tsx
+++ b/src/components/Camera/CameraContainer.tsx
@@ -4,6 +4,7 @@ import {
} from "components/Camera/helpers/visionCameraWrapper";
import React, {
useCallback,
+ useEffect,
useMemo,
useRef,
useState
@@ -12,6 +13,7 @@ import { Alert, StatusBar } from "react-native";
import type {
TakePhotoOptions
} from "react-native-vision-camera";
+import { createSentinelFile, deleteSentinelFile, logStage } from "sharedHelpers/sentinelFiles.ts";
import { useDeviceOrientation, useTranslation, useWatchPosition } from "sharedHooks";
import useLocationPermission from "sharedHooks/useLocationPermission.tsx";
import useStore from "stores/useStore";
@@ -28,6 +30,24 @@ const CameraContainer = ( ) => {
const setCameraState = useStore( state => state.setCameraState );
const evidenceToAdd = useStore( state => state.evidenceToAdd );
const cameraUris = useStore( state => state.cameraUris );
+ const sentinelFileName = useStore( state => state.sentinelFileName );
+ const setSentinelFileName = useStore( state => state.setSentinelFileName );
+
+ const { params } = useRoute( );
+ const cameraType = params?.camera;
+
+ const logStageIfAICamera = useCallback( async (
+ stageName: string,
+ stageData: string
+ ) => {
+ if ( cameraType !== "AI" ) { return; }
+ await logStage( sentinelFileName, stageName, stageData );
+ }, [cameraType, sentinelFileName] );
+
+ const deleteStageIfAICamera = useCallback( async ( ) => {
+ if ( cameraType !== "AI" ) { return; }
+ await deleteSentinelFile( sentinelFileName );
+ }, [cameraType, sentinelFileName] );
const { deviceOrientation } = useDeviceOrientation( );
// Check if location permission granted b/c usePrepareStoreAndNavigate and
@@ -44,8 +64,6 @@ const CameraContainer = ( ) => {
} );
const navigation = useNavigation( );
const { t } = useTranslation( );
- const { params } = useRoute( );
- const cameraType = params?.camera;
const [cameraPosition, setCameraPosition] = useState<"front" | "back">( "back" );
// https://react-native-vision-camera.com/docs/guides/devices#selecting-multi-cams
@@ -71,6 +89,23 @@ const CameraContainer = ( ) => {
const camera = useRef( null );
+ useEffect( () => {
+ const generateSentinelFile = async ( ) => {
+ const fileName = await createSentinelFile( "AICamera" );
+ setSentinelFileName( fileName );
+ };
+ if ( cameraType !== "AI" ) { return; }
+ generateSentinelFile( );
+ }, [setSentinelFileName, cameraType] );
+
+ const logFetchingLocation = !!( hasPermissions && sentinelFileName );
+
+ useEffect( ( ) => {
+ if ( logFetchingLocation ) {
+ logStageIfAICamera( "fetch_user_location_start" );
+ }
+ }, [logStageIfAICamera, logFetchingLocation] );
+
const {
hasPermissions: hasSavePhotoPermission,
hasBlockedPermissions: hasBlockedSavePhotoPermission,
@@ -105,23 +140,29 @@ const CameraContainer = ( ) => {
const handleNavigation = useCallback( async ( newPhotoState = {} ) => {
await prepareStoreAndNavigate( {
...navigationOptions,
- newPhotoState
+ newPhotoState,
+ logStageIfAICamera,
+ deleteStageIfAICamera
} );
}, [
prepareStoreAndNavigate,
- navigationOptions
+ navigationOptions,
+ logStageIfAICamera,
+ deleteStageIfAICamera
] );
const handleCheckmarkPress = useCallback( async newPhotoState => {
if ( !showPhotoPermissionsGate ) {
await handleNavigation( newPhotoState );
} else {
+ await logStageIfAICamera( "request_save_photo_permission_start" );
requestSavePhotoPermission( );
}
}, [
handleNavigation,
requestSavePhotoPermission,
- showPhotoPermissionsGate
+ showPhotoPermissionsGate,
+ logStageIfAICamera
] );
const toggleFlash = ( ) => {
@@ -167,7 +208,9 @@ const CameraContainer = ( ) => {
// this does leave a short period of time where the camera preview is still active
// after taking the photo which we might to revisit if it doesn't look good.
const cameraPhoto = await camera?.current?.takePhoto( takePhotoOptions );
+ await logStageIfAICamera( "take_photo_complete" );
if ( !cameraPhoto ) {
+ await logStageIfAICamera( "take_photo_error" );
throw new Error( "Failed to take photo: missing camera" );
}
if ( options?.inactivateCallback ) options.inactivateCallback();
@@ -214,6 +257,7 @@ const CameraContainer = ( ) => {
onRequestGranted: ( ) => console.log( "granted in save photo permission gate" ),
onRequestBlocked: ( ) => console.log( "blocked in save photo permission gate" ),
onModalHide: async ( ) => {
+ await logStageIfAICamera( "request_save_photo_permission_complete" );
await handleNavigation( {
cameraUris,
evidenceToAdd
diff --git a/src/components/Camera/hooks/usePrepareStoreAndNavigate.ts b/src/components/Camera/hooks/usePrepareStoreAndNavigate.ts
index 4df995e1a..a821a064e 100644
--- a/src/components/Camera/hooks/usePrepareStoreAndNavigate.ts
+++ b/src/components/Camera/hooks/usePrepareStoreAndNavigate.ts
@@ -23,6 +23,7 @@ const usePrepareStoreAndNavigate = ( ): Function => {
const observations = useStore( state => state.observations );
const setSavingPhoto = useStore( state => state.setSavingPhoto );
const setCameraState = useStore( state => state.setCameraState );
+ const setSentinelFileName = useStore( state => state.setSentinelFileName );
const { deviceStorageFull, showStorageFullAlert } = useDeviceStorageFull( );
@@ -31,15 +32,22 @@ const usePrepareStoreAndNavigate = ( ): Function => {
const handleSavingToPhotoLibrary = useCallback( async (
uris,
addPhotoPermissionResult,
- userLocation
+ userLocation,
+ logStageIfAICamera
) => {
- if ( addPhotoPermissionResult !== "granted" ) return Promise.resolve( );
+ await logStageIfAICamera( "save_photos_to_photo_library_start" );
+ if ( addPhotoPermissionResult !== "granted" ) {
+ await logStageIfAICamera( "save_photos_to_photo_library_error" );
+ return Promise.resolve( );
+ }
if ( deviceStorageFull ) {
+ await logStageIfAICamera( "save_photos_to_photo_library_error" );
showStorageFullAlert( );
return Promise.resolve( );
}
setSavingPhoto( true );
const savedPhotoUris = await savePhotosToCameraGallery( uris, userLocation );
+ await logStageIfAICamera( "save_photos_to_photo_library_complete" );
if ( savedPhotoUris.length > 0 ) {
// Save these camera roll URIs, so later on observation editor can update
// the EXIF metadata of these photos, once we retrieve a location.
@@ -60,7 +68,8 @@ const usePrepareStoreAndNavigate = ( ): Function => {
const createObsWithCameraPhotos = useCallback( async (
uris,
addPhotoPermissionResult,
- userLocation
+ userLocation,
+ logStageIfAICamera
) => {
const newObservation = await Observation.new( );
@@ -81,13 +90,15 @@ const usePrepareStoreAndNavigate = ( ): Function => {
await handleSavingToPhotoLibrary(
uris,
addPhotoPermissionResult,
- userLocation
+ userLocation,
+ logStageIfAICamera
);
}, [setObservations, handleSavingToPhotoLibrary] );
const updateObsWithCameraPhotos = useCallback( async (
addPhotoPermissionResult,
- userLocation
+ userLocation,
+ logStageIfAICamera
) => {
const obsPhotos = await ObservationPhoto.createObsPhotosWithPosition(
evidenceToAdd,
@@ -103,7 +114,8 @@ const usePrepareStoreAndNavigate = ( ): Function => {
await handleSavingToPhotoLibrary(
evidenceToAdd,
addPhotoPermissionResult,
- userLocation
+ userLocation,
+ logStageIfAICamera
);
}, [
evidenceToAdd,
@@ -119,17 +131,31 @@ const usePrepareStoreAndNavigate = ( ): Function => {
visionResult,
addPhotoPermissionResult,
userLocation,
- newPhotoState
+ newPhotoState,
+ logStageIfAICamera,
+ deleteStageIfAICamera
} ) => {
+ if ( userLocation !== null ) {
+ logStageIfAICamera( "fetch_user_location_complete" );
+ }
// when backing out from ObsEdit -> Suggestions -> Camera, create a
// new observation
const uris = newPhotoState?.cameraUris || cameraUris;
if ( addEvidence ) {
- await updateObsWithCameraPhotos( addPhotoPermissionResult, userLocation );
+ await updateObsWithCameraPhotos( addPhotoPermissionResult, userLocation, logStageIfAICamera );
+ await deleteStageIfAICamera( );
+ setSentinelFileName( null );
return navigation.goBack( );
}
- await createObsWithCameraPhotos( uris, addPhotoPermissionResult, userLocation );
+ await createObsWithCameraPhotos(
+ uris,
+ addPhotoPermissionResult,
+ userLocation,
+ logStageIfAICamera
+ );
+ await deleteStageIfAICamera( );
+ setSentinelFileName( null );
return navigation.push( "Suggestions", {
entryScreen: "CameraWithDevice",
lastScreen: "CameraWithDevice",
@@ -139,6 +165,7 @@ const usePrepareStoreAndNavigate = ( ): Function => {
addEvidence,
cameraUris,
createObsWithCameraPhotos,
+ setSentinelFileName,
navigation,
updateObsWithCameraPhotos
] );
diff --git a/src/i18n/l10n/en.ftl.json.orig b/src/i18n/l10n/en.ftl.json.orig
index 175572677..c50ab29b4 100644
--- a/src/i18n/l10n/en.ftl.json.orig
+++ b/src/i18n/l10n/en.ftl.json.orig
@@ -105,7 +105,6 @@
"Closes-withdraw-id-sheet": "Closes \"Withdraw ID\" sheet",
"COLLABORATORS": "COLLABORATORS",
"Collection-Project": "Collection Project",
-<<<<<<< HEAD
"Combine-Photos": {
"comment": "Button that combines multiple photos into a single observation",
"val": "Combine Photos"
@@ -137,7 +136,6 @@
"comment": "Onboarding slides",
"val": "Connect to Nature"
},
-=======
"Combine-Photos": "Combine Photos",
"COMMENT": "COMMENT",
"Comment-options": "Comment options",
@@ -145,7 +143,6 @@
"Community-Guidelines": "Community Guidelines",
"COMMUNITY-GUIDELINES": "COMMUNITY GUIDELINES",
"CONFIRM": "CONFIRM",
->>>>>>> main
"Connect-with-other-naturalists": "Connect with other naturalists and engage in conversations.",
"Connection-problem-Please-try-again-later": "Connection problem. Please try again later.",
"CONTACT-SUPPORT": "CONTACT SUPPORT",
@@ -154,7 +151,6 @@
"val": "CONTINUE"
},
"Continue-to-iNaturalist": "Continue to iNaturalist",
-<<<<<<< HEAD
"Contribute-to-Science": "Contribute to Science",
"Coordinates-copied-to-clipboard": {
"comment": "Notification when coordinates have been copied",
@@ -172,12 +168,10 @@
"comment": "Error message when no camera can be found",
"val": "Could not find a camera on this device"
},
-=======
"Coordinates-copied-to-clipboard": "Coordinates copied to clipboard",
"Copy-coordinates": "Copy Coordinates",
"Copyright": "Copyright",
"Could-not-find-a-camera-on-this-device": "Could not find a camera on this device",
->>>>>>> main
"Couldnt-create-comment": "Couldn't create comment",
"Couldnt-create-identification-error": "Couldn't create identification { $error }",
"Couldnt-create-identification-unknown-error": "Couldn't create identification, unknown error.",
@@ -363,15 +357,12 @@
"Import-Photos-From": "Import Photos From",
"IMPORT-X-OBSERVATIONS": "IMPORT { $count ->\n [one] 1 OBSERVATION\n *[other] { $count } OBSERVATIONS\n}",
"IMPROVE-THESE-SUGGESTIONS-BY-USING-YOUR-LOCATION": "IMPROVE THESE SUGGESTIONS BY USING YOUR LOCATION",
-<<<<<<< HEAD
"improving--identification": {
"comment": "Identification category",
"val": "Improving"
},
"iNat-is-global-community": "iNaturalist is a global community of naturalists creating open data for science by collectively observing & identifying organisms",
-=======
"improving--identification": "Improving",
->>>>>>> main
"INATURALIST-ACCOUNT-SETTINGS": "INATURALIST ACCOUNT SETTINGS",
"iNaturalist-AI-Camera": "iNaturalist AI Camera",
"iNaturalist-can-save-photos-you-take-in-the-app-to-your-devices-gallery": "iNaturalist can save photos you take in the app to your device’s gallery.",
diff --git a/src/i18n/l10n/en.ftl.orig b/src/i18n/l10n/en.ftl.orig
index 9333d2311..ef56466a6 100644
--- a/src/i18n/l10n/en.ftl.orig
+++ b/src/i18n/l10n/en.ftl.orig
@@ -146,12 +146,9 @@ Change-taxon-filter = Change taxon filter
Change-user = Change user
# Label for a button that cycles through zoom levels for the camera
Change-zoom = Change zoom
-<<<<<<< HEAD
# Notification that appears after pressing the reset password button
-=======
Check-this-box-if-you-want-to-apply-a-Creative-Commons = Check this box if you want to apply a Creative Commons
# After pressing the reset password button
->>>>>>> main
CHECK-YOUR-EMAIL = CHECK YOUR EMAIL!
# Text for a button prompting the user to grant access to the gallery
CHOOSE-PHOTOS = CHOOSE PHOTOS
@@ -186,14 +183,11 @@ CONFIRM = CONFIRM
Connect-with-other-naturalists = Connect with other naturalists and engage in conversations.
Connection-problem-Please-try-again-later = Connection problem. Please try again later.
CONTACT-SUPPORT = CONTACT SUPPORT
-<<<<<<< HEAD
# Notification when coordinates have been copied
Coordinates-copied-to-clipboard = Coordinates copied to clipboard
# Button that copies coordinates to the clipboard
-=======
CONTINUE = CONTINUE
Coordinates-copied-to-keyboard = Coordinates copied to keyboard
->>>>>>> main
Copy-coordinates = Copy Coordinates
# Right to control copies of a creative work; this string may be used as a
# heading to describe general information about rights, attribution, and
diff --git a/src/sharedHelpers/sentinelFiles.ts b/src/sharedHelpers/sentinelFiles.ts
new file mode 100644
index 000000000..ef7806e14
--- /dev/null
+++ b/src/sharedHelpers/sentinelFiles.ts
@@ -0,0 +1,86 @@
+import RNFS from "react-native-fs";
+import { log } from "sharedHelpers/logger";
+import { unlink } from "sharedHelpers/util.ts";
+
+import { sentinelFilePath } from "../appConstants/paths";
+
+const logger = log.extend( "sentinelFiles" );
+
+const accessFullFilePath = fileName => `${sentinelFilePath}/${fileName}`;
+
+const generateSentinelFileName = ( screenName: string ): string => {
+ const timestamp = new Date().getTime();
+ return `sentinel_${screenName}_${timestamp}.log`;
+};
+
+const createSentinelFile = async ( screenName: string ): Promise => {
+ try {
+ await RNFS.mkdir( sentinelFilePath );
+ const sentinelFileName = generateSentinelFileName( screenName );
+
+ const logEntry = {
+ screenName,
+ entryTimestamp: new Date( ).toISOString( ),
+ stages: []
+ };
+
+ const initialContent = JSON.stringify( logEntry );
+
+ await RNFS.writeFile( accessFullFilePath( sentinelFileName ), initialContent, "utf8" );
+ return sentinelFileName;
+ } catch ( error ) {
+ console.error( "Failed to create sentinel file:", error );
+ return "";
+ }
+};
+
+const logStage = async (
+ sentinelFileName: string,
+ stageName: string
+): Promise => {
+ const fullFilePath = accessFullFilePath( sentinelFileName );
+ try {
+ const existingContent = await RNFS.readFile( fullFilePath, "utf8" );
+ const sentinelData = JSON.parse( existingContent );
+
+ const stage = {
+ name: stageName,
+ timestamp: new Date( ).toISOString( )
+ };
+
+ sentinelData.stages.push( stage );
+
+ await RNFS.writeFile( fullFilePath, JSON.stringify( sentinelData ), "utf8" );
+ } catch ( error ) {
+ console.error( "Failed to log stage to sentinel file:", error, sentinelFileName, stageName );
+ }
+};
+
+const deleteSentinelFile = async ( sentinelFileName: string ): Promise => {
+ try {
+ const fullFilePath = accessFullFilePath( sentinelFileName );
+ await RNFS.unlink( fullFilePath );
+ } catch ( error ) {
+ console.error( "Failed to delete sentinel file:", error, sentinelFileName );
+ }
+};
+
+const findAndLogSentinelFiles = async ( ) => {
+ const directoryExists = await RNFS.exists( sentinelFilePath );
+ if ( !directoryExists ) { return null; }
+ const files = await RNFS.readDir( sentinelFilePath );
+
+ files.forEach( async file => {
+ const existingContent = await RNFS.readFile( file.path, "utf8" );
+ logger.error( "Camera flow error: ", existingContent );
+ await unlink( file.path );
+ } );
+ return files;
+};
+
+export {
+ createSentinelFile,
+ deleteSentinelFile,
+ findAndLogSentinelFiles,
+ logStage
+};
diff --git a/src/sharedHooks/useWatchPosition.ts b/src/sharedHooks/useWatchPosition.ts
index 1ec581a3e..e5d5c25b3 100644
--- a/src/sharedHooks/useWatchPosition.ts
+++ b/src/sharedHooks/useWatchPosition.ts
@@ -35,6 +35,7 @@ const useWatchPosition = ( options: {
const [userLocation, setUserLocation] = useState( null );
const { shouldFetchLocation } = options;
const [hasFocus, setHasFocus] = useState( true );
+ const logStage = options?.logStage;
const stopWatch = useCallback( ( id: number ) => {
clearWatch( id );
@@ -111,7 +112,7 @@ const useWatchPosition = ( options: {
setHasFocus( false );
} );
return unsubscribe;
- }, [navigation, stopWatch, subscriptionId] );
+ }, [navigation, stopWatch, subscriptionId, logStage] );
// Listen for focus. We only want to fetch location when this screen has focus.
useEffect( ( ) => {
diff --git a/src/stores/createObservationFlowSlice.js b/src/stores/createObservationFlowSlice.js
index 01542d17c..5825a73cb 100644
--- a/src/stores/createObservationFlowSlice.js
+++ b/src/stores/createObservationFlowSlice.js
@@ -23,7 +23,8 @@ const DEFAULT_STATE = {
savingPhoto: false,
savedOrUploadedMultiObsFlow: false,
unsavedChanges: false,
- totalSavedObservations: 0
+ totalSavedObservations: 0,
+ sentinelFileName: null
};
const removeObsSoundFromObservation = ( currentObservation, uri ) => {
@@ -185,6 +186,9 @@ const createObservationFlowSlice = ( set, get ) => ( {
return ( {
totalSavedObservations: existingTotalSavedObservations + 1
} );
+ } ),
+ setSentinelFileName: sentinelFileName => set( {
+ sentinelFileName
} )
} );