Pawan Pillai
Pawan Pillai

Reputation: 2065

How to use GStreamer to directly stream to a web browser?

There are many examples online to use GStreamer pipeline with "tcpclientsink" or "udpsink" with NodeJS to consume the GStreamer pipeline output to Web Browser.

But I could not find any example or documentation which clearly explains how to use the webrtcbin element with a NodeJS server to send stream to a web browser. (An alternative to webrtcbin would be fine, too.)

I have the following GStreamer pipeline:

gst-launch-1.0 videotestsrc  \
! queue ! vp8enc ! rtpvp8pay \
! application/x-rtp,media=video,encoding-name=VP8,payload=96 \
! webrtcbin name=sendrecv

Can someone help in consuming this pipeline with a NodeJS based server to display the stream onto a web browser?

Here is a similar example, but it uses tcpclientsink: https://tewarid.github.io/2011/04/26/stream-live-webm-video-to-browser-using-node.js-and-gstreamer.html

Upvotes: 9

Views: 30323

Answers (3)

Pawan Pillai
Pawan Pillai

Reputation: 2065

Update: In the end, I was able to achieve GStreamer to Browser using NodeJS tutotial mentioned in the question. Here is a proof of concept code that someone can use if needed (or in case the tutorial link gets removed from internet):

var express = require('express')
var http = require('http')
var net = require('net');
var child = require('child_process');
require('log-timestamp');   //adds timestamp in console.log()

var app = express();
app.use(express.static(__dirname + '/'));

var httpServer = http.createServer(app);
const port = 9001;  //change port number is required

//send the html page which holds the video tag
app.get('/', function (req, res) {
    res.send('index.html');
});

//stop the connection
app.post('/stop', function (req, res) {
    console.log('Connection closed using /stop endpoint.');

    if (gstMuxer != undefined) {
        gstMuxer.kill();    //killing GStreamer Pipeline
        console.log(`After gstkill in connection`);
    }
    gstMuxer = undefined;
    res.end();
});

//send the video stream
app.get('/stream', function (req, res) {

    res.writeHead(200, {
        'Content-Type': 'video/webm',
    });

    var tcpServer = net.createServer(function (socket) {
        socket.on('data', function (data) {
            res.write(data);
        });
        socket.on('close', function (had_error) {
            console.log('Socket closed.');
            res.end();
        });
    });

    tcpServer.maxConnections = 1;

    tcpServer.listen(function () {
        console.log("Connection started.");
        if (gstMuxer == undefined) {
            console.log("inside gstMuxer == undefined");
            var cmd = 'gst-launch-1.0';
            var args = getGstPipelineArguments(this);
            var gstMuxer = child.spawn(cmd, args);

            gstMuxer.stderr.on('data', onSpawnError);
            gstMuxer.on('exit', onSpawnExit);

        }
        else {
            console.log("New GST pipeline rejected because gstMuxer != undefined.");
        }
    });
});

httpServer.listen(port);
console.log(`Camera Stream App listening at http://localhost:${port}`)

process.on('uncaughtException', function (err) {
    console.log(err);
});

//functions
function onSpawnError(data) {
    console.log(data.toString());
}

function onSpawnExit(code) {
    if (code != null) {
        console.log('GStreamer error, exit code ' + code);
    }
}

function getGstPipelineArguments(tcpServer) {
    //Replace 'videotestsrc', 'pattern=ball' with camera source in below GStreamer pipeline arguments.
    //Note: Every argument should be written in single quotes as done below
    var args =
        ['videotestsrc', 'pattern=ball',
            '!', 'video/x-raw,width=320,height=240,framerate=100/1',
            '!', 'vpuenc_h264', 'bitrate=2000',
            '!', 'mp4mux', 'fragment-duration=10',
            '!', 'tcpclientsink', 'host=localhost',
            'port=' + tcpServer.address().port];
    return args;
}

And also sharing the HTML code:

<!DOCTYPE html>

<head>
    <title>GStreamer with NodeJS Demo</title>
    <meta name="viewport" content="width=device-width, initial-scale=0.9">

    <style>
        html,
        body {
            overflow: hidden;
        }
    </style>
    
    <script>
        function buffer() {
            //Start playback as soon as possible to minimize latency at startup 
            var dStream = document.getElementById('vidStream');

            try {
                dStream.play();
            } catch (error) {
                console.log("Error in buffer() method.");
                console.log(error);
            }

        }
    </script>
</head>

<body onload="buffer();">
    <video id="vidStream" width="640" height="480" muted>
        <source src="/stream" type="video/mp4" />
        <source src="/stream" type="video/webm" />
        <source src="/stream" type="video/ogg" />
        <!-- fallback -->
        Your browser does not support the <code>video</code> element.
    </video>
</body>

Upvotes: 17

alexswerner
alexswerner

Reputation: 31

There is a nice integration test for gstreamer (and other applications suchs as browsers) available here: https://github.com/sipsorcery/webrtc-echoes/tree/master/gstreamer. It works, with minimal quirks (at least in chrome). It gets data from this gstreamer pipeline

  pipeline =
     gst_parse_launch ("webrtcbin bundle-policy=max-bundle name=sendonly "
       "videotestsrc is-live=true pattern=ball ! videoconvert ! queue ! vp8enc deadline=1 ! rtpvp8pay ! "
       "queue ! " RTP_CAPS_VP8 " ! sendonly. "
       , &error);

and opens a web server for the browser to obtain this stream. You have to manually open the index.html

Upvotes: 3

RSATom
RSATom

Reputation: 867

Unfortunately it's not that simple. You have to have some way to interact with browser to be able exchange SDP offer/answer, and also ICE candidates exchange.

You can look example here

Upvotes: 3

Related Questions