Reputation: 571
I'm using hls.js project to view hls content I need to implement a 'recording' feature - Once a user clicks 'Start Recording' button, all the fragments starting from this position are aggregated into a temporary buffer, once a user clicks 'Stop Recording', these fragments are saved into mp4 file along with the prepended init segment.
So far I was able to listen for BUFFER_APPENDING events and push new segments into a temp array
hls.on(Hls.Events.BUFFER_APPENDING, (event, data) => {
if (this.state.recording) {
this.dataStream[data.type].push(data.data);
console.log("recording " + data.type + " data, " + data.data.length + " bytes ...")
}
});
But these are much more newer segments that are being added to the cached buffer and the actual recorded data is shifted about 30s-60s forward from the actual 'Start Record' time
I checked the FRAG_CHANGED event and it seems this is the currently played fragment - hls.js events
hls.on(Hls.Events.FRAG_CHANGED, (event, data) => {
console.log("HLS Player Frag Changed")
console.log("Data = " + inspect(data))
})
How can I actually get the currently played video fragment array to aggregate it and later use it for building mp4 file ?
Upvotes: 1
Views: 1629
Reputation: 571
this.dataStream = [];
this.fragments = []
...
hls.on(Hls.Events.FRAG_CHANGED, (event, data) => {
console.log("HLS Player Frag Changed")
//console.log("Data = " + inspect(data))
if (this.state.recording) {
let recdata = this.fragments[data.frag.sn]
let dataToRecord = [...recdata.data1,...recdata.data2]
this.dataStream.push(dataToRecord);
console.log("recording video data sn(" + data.frag.sn+ ") " + dataToRecord.length + " bytes ...")
}
})
hls.on(Hls.Events.FRAG_PARSING_DATA, (event, data) => {
console.log("HLS Player Frag Parsing Data")
//console.log("Data = " + inspect(data))
this.fragments[data.frag.sn]=data
console.log("saved sn(" + data.frag.sn + ") - fragment")
})
But possibly there is a memory issue, because I'm storing all the fragments received in the FRAG_PARSING_DATA event
Upvotes: 1