Pankaj Bhardwaj
Pankaj Bhardwaj

Reputation: 2131

After Expo latest release version 51.0.1, Expo-camera stopped working | Expo-camera error video recording

After Expo latest release version 51.0.1, Expo-camera stopped working | Expo-camera error video recording #expocameraissue #expolatestVersion51 I have changes the implementation but not getting video recorded path.

Below is my package.json dependencies

"dependencies": {
    "@react-native-async-storage/async-storage": "~1.23.1",
    "@react-native-community/checkbox": "^0.5.17",
    "@react-native-community/slider": "4.4.2",
    "@react-navigation/bottom-tabs": "^6.3.1",
    "@react-navigation/core": "^6.2.1",
    "@react-navigation/drawer": "^6.4.1",
    "@react-navigation/elements": "^1.3.3",
    "@react-navigation/material-bottom-tabs": "^6.2.1",
    "@react-navigation/material-top-tabs": "^6.2.1",
    "@react-navigation/native": "^6.0.10",
    "@react-navigation/native-stack": "^6.7.0",
    "@react-navigation/routers": "^6.1.0",
    "@react-navigation/stack": "^6.2.1",
    "@reduxjs/toolkit": "^2.2.2",
    "axios": "^1.6.8",
    "expo": "51.0.1",
    "expo-av": "~14.0.3",
    "expo-camera": "~15.0.5",
    "expo-file-system": "~17.0.1",
    "expo-font": "~12.0.4",
    "expo-image": "~1.12.8",
    "expo-linear-gradient": "~13.0.2",
    "expo-screen-orientation": "~7.0.3",
    "expo-status-bar": "~1.12.1",
    "expo-updates": "~0.25.9",
    "moment": "^2.30.1",
    "react": "18.2.0",
    "react-native": "0.74.1",
    "react-native-camera": "^4.2.1",
    "react-native-gesture-handler": "~2.16.1",
    "react-native-orientation-locker": "^1.6.0",
    "react-native-permissions": "^4.1.4",
    "react-native-push-notification": "^8.1.1",
    "react-native-reanimated": "3.10.0",
    "react-native-safe-area-context": "4.10.1",
    "react-native-screens": "3.31.1",
    "react-native-svg": "15.2.0-rc.0",
    "react-native-toast-message": "^2.2.0",
    "react-native-toastify": "^0.2.2",
    "react-native-video": "^5.2.1",
    "react-native-webview": "13.8.6",
    "react-redux": "^9.1.0",
    "redux": "^5.0.1",
    "redux-thunk": "^3.1.0",
    "expo-image-picker": "~15.0.4",
    "undefined": "babel/core"
  },

Below is my cameraScreen

import React, { useState, useRef } from 'react';
import { Button, StyleSheet, Text, TouchableOpacity, View, Alert } from 'react-native';
import { CameraView, useCameraPermissions } from 'expo-camera';
import { useNavigation } from '@react-navigation/native';

export default function CameraScreen() {
  const [facing, setFacing] = useState('back');
  const [isRecording, setIsRecording] = useState(false);
  const [permission, requestPermission] = useCameraPermissions();
  const cameraRef = useRef(null);
  const navigation = useNavigation();

  if (!permission) {
    // Camera permissions are still loading.
    return <View />;
  }

  // useEffect(() => {
  //   (async () => {
  //     const { status: cameraStatus } = await CameraView.requestCameraPermissionsAsync();
  //     console.log('Camera permission status:', cameraStatus);
  //     const { status: microphoneStatus } = await CameraView.requestMicrophonePermissionsAsync();
  //     console.log('Microphone permission status:', microphoneStatus);
  //   })();

  // }, []);

  if (!permission.granted) {
    // Camera permissions are not granted yet.
    abc()
    return (
      <View style={styles.container}>
        <Text style={{ textAlign: 'center' }}>We need your permission to show the camera</Text>
        <Button onPress={requestPermission} title="Grant Permission" />
      </View>
    );
  }

  const abc = async ()=>{
    const { status: cameraStatus } = await CameraView.requestCameraPermissionsAsync();
    console.log('Camera permission status:', cameraStatus);
    const { status: microphoneStatus } = await CameraView.requestMicrophonePermissionsAsync();
    console.log('Microphone permission status:', microphoneStatus);
}
  function toggleCameraFacing() {
    setFacing(current => (current === 'back' ? 'front' : 'back'));
  }

  const startRecording = async () => {
    console.log("1",cameraRef)

    
    if (cameraRef) {
      console.log("2",cameraRef.current)

      setIsRecording(true);
      try {
        const video = await cameraRef.current.recordAsync({
          quality: CameraView.Constants.VideoQuality['720p'],
        });
        console.log("Recording stopped, URI:", video);
        setIsRecording(false);
        handleVideoRecorded(video.uri); // Implement this function as needed
      } catch (error) {
        console.error("Recording failed:1", error);
        setIsRecording(false);
      }
    }
  };

  const stopRecording = async () => {
    console.log("3",cameraRef)

    if (cameraRef) {
      console.log("4")

      await cameraRef.current.stopRecording();
      // console.log("Recording stopped, URI:2", video);
      // setIsRecording(false);

    }
  };

  const handleVideoRecorded = (uri) => {
    Alert.alert(
      'Preview Video',
      'Do you want to preview the video?',
      [
        { text: 'Cancel', style: 'cancel' },
        { text: 'OK', onPress: () => navigation.navigate('Preview', { uri }) }
      ],
      { cancelable: false }
    );
  };

  return (
    <View style={styles.container}>
      <CameraView style={styles.camera} ref={cameraRef} facing={facing}>
        <View style={styles.buttonContainer}>
          {isRecording ? (
            <TouchableOpacity style={styles.button} onPress={stopRecording}>
              <Text style={styles.text}>Stop Recording</Text>
            </TouchableOpacity>
          ) : (
            <TouchableOpacity style={styles.button} onPress={startRecording}>
              <Text style={styles.text}>Start Recording</Text>
            </TouchableOpacity>
          )}
          <TouchableOpacity style={styles.button} onPress={toggleCameraFacing}>
            <Text style={styles.text}>Flip Camera</Text>
          </TouchableOpacity>
        </View>
      </CameraView>
    </View>
  );
}

const styles = StyleSheet.create({
  container: {
    flex: 1,
    justifyContent: 'center',
    alignItems: 'center',
  },
  camera: {
    width: '100%',
    height: '100%',
  },
  buttonContainer: {
    backgroundColor: 'transparent',
    position: 'absolute',
    bottom: 50,
    flexDirection: 'row',
    justifyContent: 'space-around',
    width: '100%',
  },
  button: {
    backgroundColor: 'white',
    padding: 10,
    borderRadius: 5,
  },
  text: {
    color: 'black',
  },
});

Initially I was getting issue ERROR Recording failed [Error: Video recording failed: Recording was stopped before any data could be produced.]

I tried adding option in video quality like this

setIsRecording(true); try { const video = await cameraRef.current.recordAsync({ quality: CameraView.Constants.VideoQuality['720p'], }); console.log("Recording stopped, URI:", video); setIsRecording(false); handleVideoRecorded(video.uri); // Implement this function as needed } catch (error) { console.error("Recording failed:1", error); setIsRecording(false); }

Then getting this error ERROR Recording failed:1 [TypeError: Cannot read property 'VideoQuality' of undefined]

Upvotes: 2

Views: 1133

Answers (2)

J. L. Muller
J. L. Muller

Reputation: 307

To support the v14 API, you should import components from expo-camera/legacy instead, as described in this migration guide.

Upvotes: 0

a0m0rajab
a0m0rajab

Reputation: 455

If you read the error:

[TypeError: Cannot read property 'VideoQuality' of undefined]

you can understand that the Constants is empty, which you need to check an alternative to it (either the legacy VideoQuality['720p']) or you need to write the video quality of "720p" as string.

The other thing you need to notice is that quality is not provided to the recordAsync function as an option, https://docs.expo.dev/versions/latest/sdk/camera/#camerarecordingoptions

you need to use video quality as props to the CameraView component: https://docs.expo.dev/versions/latest/sdk/camera/#videoquality

which you can achieve with:

<CameraView videoQuality="720p"> </CameraView>

Upvotes: 0

Related Questions