sam rafiei
sam rafiei

Reputation: 71

How to play recording through the speaker in react native expo av

I'm trying to make a voice memo component for my app using expo-av. I've figured out how to create the recording but when playing it, it only plays through the earpiece speaker. Is there a way I can playback the recording through the main phone speakers? I haven't tried testing this on android but for iPhone the audio only plays back through the earphone speaker. Thank you.

    import React from 'react';
import { Button, StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { Audio } from 'expo-av';
import * as Sharing from 'expo-sharing';
import { MaterialCommunityIcons } from '@expo/vector-icons';
import AppText from './AppText';
import Screen from './Screen';

export default function AppVoice() {
  const [recording, setRecording] = React.useState();
  const [recordings, setRecordings] = React.useState([]);
  const [message, setMessage] = React.useState("");

  async function startRecording() {
    try {
        
      const permission = await Audio.requestPermissionsAsync();

      if (permission.status === "granted") {
        await Audio.setAudioModeAsync({
          allowsRecordingIOS: true,
          playsInSilentModeIOS: true,
          
        });
        
        const { recording } = await Audio.Recording.createAsync(
          Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
          
        );

        setRecording(recording);
      } else {
        setMessage("Please grant permission to app to access microphone");
      }
    } catch (err) {
      console.error('Failed to start recording', err);
    }
  }

  async function stopRecording() {
    setRecording(undefined);
    await recording.stopAndUnloadAsync();
    

    let updatedRecordings = [...recordings];
    const { sound, status } = await recording.createNewLoadedSoundAsync();
    updatedRecordings.push({
      sound: sound,
      duration: getDurationFormatted(status.durationMillis),
      file: recording.getURI()
    });
    
    
    setRecordings(updatedRecordings);
  }

  function getDurationFormatted(millis) {
    const minutes = millis / 1000 / 60;
    const minutesDisplay = Math.floor(minutes);
    const seconds = Math.round((minutes - minutesDisplay) * 60);
    const secondsDisplay = seconds < 10 ? `0${seconds}` : seconds;
    return `${minutesDisplay}:${secondsDisplay}`;
  }

  function getRecordingLines() {
    
    return recordings.map((recordingLine, index) => {
      return (
        <View key={index} style={styles.row}>
          <Text style={styles.fill}>Recording {index + 1} - {recordingLine.duration}</Text>
          <Button style={styles.button} onPress={() => recordingLine.sound.replayAsync()} title="Play"></Button>
          <Button style={styles.button}  onPress={() => Sharing.shareAsync(recordingLine.file)} title="Share"></Button>
        </View>
      );
    });
  }

  return (

    <Screen style={{flex:1,  backgroundColor:'black'}}>

        <View style={styles.container}>
            <View style={styles.recorder}>
                <TouchableOpacity  style={{position:'absolute', left:10}}>
                    <MaterialCommunityIcons  name="microphone" size={24} color="black" />
                </TouchableOpacity>
                <AppText style={{color:'black', textAlign:'center'}}>Voice Memo</AppText>
                <TouchableOpacity onPress={recording ? stopRecording : startRecording} style={{position:'absolute', right:10}}>   
                    {recording ? <MaterialCommunityIcons  name="pause" size={28} color="black" /> : <MaterialCommunityIcons  name="record-circle-outline" size={28} color="red" />}
                    
                </TouchableOpacity>
            </View>
            <View style={{flex:1}}>
                {getRecordingLines()}
            </View>
        </View>
    </Screen>
  );
}

const styles = StyleSheet.create({
  recorder: {

    width:300,
    backgroundColor:'white',
    height:50,
    borderRadius: 100,
    justifyContent:'center'
  },
  container:{
    flex:1,
    
    
  },
  row: {
    flexDirection: 'row',
    alignItems: 'center',
    justifyContent: 'center',
  },
  fill: {
    flex: 1,
    margin: 16,
    color:'white'
  },
  button: {
    margin: 16
  }
});

Upvotes: 2

Views: 1934

Answers (2)

rahul vyas
rahul vyas

Reputation: 11

Sure, here is the proper code: #expo audio recorder, Audio Recorder in React native

import React, { useState, useEffect } from "react";
import {
  View,
  Text,
  StyleSheet,
  TouchableOpacity,
  ActivityIndicator,
  Platform,
} from "react-native";
import { Audio } from "expo-av";
import { height, width } from "./TextInputconponent";
import { ResetNavigationStack } from "../screens/AudioRecoder_main";

export default function AudioplayerComponent(props: any) {
  const [sound, setSound] = useState<Audio.Sound | null>(null);
  const [isPlaying, setIsPlaying] = useState(false);
  const [positionMillis, setPositionMillis] = useState(0);
  const [durationMillis, setDurationMillis] = useState(0);
  const [isLoading, setIsLoading] = useState(true);
  const [isLoaded, setIsLoaded] = useState(false);

  const sliderValue = positionMillis / durationMillis;
  const Thumbimage = require("../assets/images/BlackDotthumb.png");

  const onPlaybackStatusUpdate = async (status: Audio.PlaybackStatus) => {
    if (status.isLoaded) {
      setIsPlaying(status.isPlaying);
      setPositionMillis(status.positionMillis);
      setDurationMillis(status.durationMillis);

      if (status.didJustFinish) {
        // Reset the audio to the beginning
        console.log("Audio playback completed. Resetting.");
        // await seekTo(0);
        loadAudio();
        // Reset position and set not playing
        setPositionMillis(0);
        setIsPlaying(false);
      }

      setIsLoading(false); // Set isLoading to false once the sound is loaded
    }
  };

  useEffect(() => {
    loadAudio();
  }, []);

  useEffect(() => {
    if (props.onBackPressed == true) {
      pauseSound();
      // props.navigation.navigate("AudioListScreen");
      ResetNavigationStack({
        navigation: props.navigation,
        ScreenName: "AudioListScreen",
      });
    }
  }, [props.counter]);

  const audioFile = props?.audioFile?.filePathUrl ?? props?.audioFile;
  // console.log(">>>>>>>", audioFile);

  const loadAudio = async () => {
    try {
      // Load the audio file
      const { sound: audioSound } = await Audio.Sound.createAsync(
        {
          uri: audioFile,
        },
        { shouldPlay: false }
      );

      // Set the audio mode to play through the speaker
      await Audio.setAudioModeAsync({
        allowsRecordingIOS: false,
        playsInSilentModeIOS: true,
        playThroughEarpieceAndroid: false,
      });

      // Set the sound's playback status update callback
      audioSound.setOnPlaybackStatusUpdate(onPlaybackStatusUpdate);

      // Set the sound
      setSound(audioSound);

      // setIsLoaded to true once the sound is loaded
      setIsLoaded(true);
    } catch (error) {
      console.error("Error loading audio:", error);
    }
  };

  const playSound = async () => {
    if (!sound) {
      console.log("<<<>>>>>in if", sound);
      await Audio.setAudioModeAsync({
        allowsRecordingIOS: false,
        playsInSilentModeIOS: true,
      });
      await loadAudio();
    } else {
      console.log("in else=======================================>");
    }

    try {
      await sound?.playAsync();
    } catch (error) {
      console.error("Error playing sound:", error);
    }
  };

  const pauseSound = async () => {
    if (sound) {
      try {
        const status: any = await sound.getStatusAsync();
        if (status.isLoaded && status.isPlaying) {
          try {
            await sound.pauseAsync();
          } catch (error) {}

Upvotes: 0

sam rafiei
sam rafiei

Reputation: 71

I don't know how I figured it out but what you need to do is when recording you need to have the "allowRecordingIOS" to true and when stopping the recording you have to set it to false. Here's the updated code:

import React from 'react';
import { Button, StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { Audio } from 'expo-av';
import * as Sharing from 'expo-sharing';
import { MaterialCommunityIcons } from '@expo/vector-icons';
import AppText from './AppText';
import Screen from './Screen';

export default function AppVoice() {
  const [recording, setRecording] = React.useState();
  const [recordings, setRecordings] = React.useState([]);
  const [message, setMessage] = React.useState("");

  async function startRecording() {
    try {
        
      const permission = await Audio.requestPermissionsAsync();

      if (permission.status === "granted") {
        await Audio.setAudioModeAsync({
          allowsRecordingIOS: true,
          playsInSilentModeIOS: true,
          
        });
        
        const { recording } = await Audio.Recording.createAsync(
          Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
          
        );

        setRecording(recording);
        
      } else {
        setMessage("Please grant permission to app to access microphone");
      }
    } catch (err) {
      console.error('Failed to start recording', err);
    }
  }

  async function stopRecording() {
    setRecording(undefined);
    await recording.stopAndUnloadAsync();
    await Audio.setAudioModeAsync({
        allowsRecordingIOS: false,
        playsInSilentModeIOS: true,
        
      });

    let updatedRecordings = [...recordings];
    const { sound, status } = await recording.createNewLoadedSoundAsync();
    updatedRecordings.push({
      sound: sound,
      duration: getDurationFormatted(status.durationMillis),
      file: recording.getURI()
    });
    
    
    setRecordings(updatedRecordings);
  }

  function getDurationFormatted(millis) {
    const minutes = millis / 1000 / 60;
    const minutesDisplay = Math.floor(minutes);
    const seconds = Math.round((minutes - minutesDisplay) * 60);
    const secondsDisplay = seconds < 10 ? `0${seconds}` : seconds;
    return `${minutesDisplay}:${secondsDisplay}`;
  }

  function getRecordingLines() {
    
    return recordings.map((recordingLine, index) => {
      return (
        <View key={index} style={styles.row}>
          <Text style={styles.fill}>Recording {index + 1} - {recordingLine.duration}</Text>
          <Button style={styles.button} onPress={() => recordingLine.sound.replayAsync()} title="Play"></Button>
          <Button style={styles.button}  onPress={() => Sharing.shareAsync(recordingLine.file)} title="Share"></Button>
        </View>
      );
    });
  }

  return (

    <Screen style={{flex:1,  backgroundColor:'black'}}>

        <View style={styles.container}>
            <View style={styles.recorder}>
                <TouchableOpacity  style={{position:'absolute', left:10}}>
                    <MaterialCommunityIcons  name="microphone" size={24} color="black" />
                </TouchableOpacity>
                <AppText style={{color:'black', textAlign:'center'}}>Voice Memo</AppText>
                <TouchableOpacity onPress={recording ? stopRecording : startRecording} style={{position:'absolute', right:10}}>   
                    {recording ? <MaterialCommunityIcons  name="pause" size={28} color="black" /> : <MaterialCommunityIcons  name="record-circle-outline" size={28} color="red" />}
                    
                </TouchableOpacity>
            </View>
            <View style={{flex:1}}>
                {getRecordingLines()}
            </View>
        </View>
    </Screen>
  );
}

const styles = StyleSheet.create({
  recorder: {

    width:300,
    backgroundColor:'white',
    height:50,
    borderRadius: 100,
    justifyContent:'center'
  },
  container:{
    flex:1,
    
    
  },
  row: {
    flexDirection: 'row',
    alignItems: 'center',
    justifyContent: 'center',
  },
  fill: {
    flex: 1,
    margin: 16,
    color:'white'
  },
  button: {
    margin: 16
  }
});

Upvotes: 3

Related Questions