Reputation: 11755
I am fighting to make a web app written using Next.JS work.
At this stage the app allows the user to record his own voice and then play it back. And then make a new voice recording and play it back one or several times ... an so on.
This is my code:
'use client'
import {useState,useRef,useEffect} from "react";
const AudioRecorderPlayer = () => {
const [permission, setPermission] = useState(false);
const [recordComplete, setRcdComplete] = useState(false);
const [stream, setStream] = useState<MediaStream>();
const [recordingStatus, setRecordingStatus] = useState("inactive");
const mrRef = useRef<MediaRecorder | null>(null);
const audioRef = useRef<HTMLAudioElement | null>(null);
const audioChunksRef = useRef<Blob[]>([]);
const [audio, setAudio] = useState('');
const dataType = "video/webm";
const getMicrophonePermission = async () => {
if ("MediaRecorder" in window) {
try {
const streamData = await navigator.mediaDevices.getUserMedia({
audio: true,
video: false,
});
setPermission(true);
setStream(streamData);
} catch (err) {
alert("err.message");
}
} else {
alert("The MediaRecorder API is not supported in your browser.");
}
}; /* End of getMicrophonePermission */
const startRecording = async () => {
URL.revokeObjectURL(audio);
mrRef.current = null
audioChunksRef.current = []
setRecordingStatus("recording");
const mediaRecorder = new MediaRecorder(stream!, {
mimeType: dataType,
audioBitsPerSecond: 16*44100
});
mrRef.current = mediaRecorder;
let localAudioChunks = [];//[Blob];
mediaRecorder.start()
mediaRecorder.ondataavailable = (event) => {
if (typeof event.data === "undefined") return;
if (event.data.size === 0) return;
localAudioChunks.push(event.data);
audioChunksRef.current.push(event.data);
};
}; /* End of startRecording */
const stopRecording = () => {
setRecordingStatus("inactive");
if (!mrRef.current) return
mrRef.current?.stop();
mrRef.current.onstop = async () => {
console.log("Here dataType = ",dataType)
const audioBlob = new Blob(audioChunksRef.current, {
type: dataType});
const audioUrl = URL.createObjectURL(audioBlob);
setAudio(audioUrl);
setRcdComplete(true);
};
}; /* End of stopRecording */
const handlePlay = () => {
if (audioRef.current) {
console.log('currentTime-handlePlay(1) =',audioRef.current.currentTime)
audioRef.current.play();
console.log('currentTime-handlePlay(2) =',audioRef.current.currentTime)
}
}; /* End of handlePlay */
const handleEnd = () => {
console.log('Passage +',audioRef.current?'OK':'à vide')
if (audioRef.current) {
console.log('currentTime(1) =',audioRef.current.currentTime)
audioRef.current.currentTime = 0.0;
console.log('currentTime(2) =',audioRef.current.currentTime)
}
console.log('Passage +',audioRef.current?'OK':'à vide')
}; /* End of handleEnd */
return (
<div>
<main>
{!permission ? (
<button onClick={getMicrophonePermission} type="button">
Get Microphone
</button>
): null}
{permission && recordingStatus === "inactive" ? (
<button onClick={startRecording} type="button">
Start Recording
</button>
) : null}
{recordingStatus === "recording" ? (
<button onClick={stopRecording} type="button">
Stop Recording
</button>
) : null}
<audio src={audio}
ref={audioRef}
id="audio-player"
onPlay={handlePlay}
onEnded={handleEnd}
onEmptied = {(event) => {console.log('--onEmptied--')}}
onCanPlayThrough = {(event) => {console.log('--onCanPlayThrough--')}}
controls />
</main>
</div>
);
}; /* End of AudioRecorderPlayer */
export default AudioRecorderPlayer;
Eventhough the code above mainly works. At times I have the following problem:
When playing back a recording for the second or more times, the audio play stops to happen as usual (that is as soon as I click the play button) but starts playing only after a delay of almost 20 seconds.
Is there something wrong in the code ? I do not see any error message.
Upvotes: 5
Views: 2160