Reputation: 147
I've developed a video player based on Qt and QtGstreamer. It is used to play live streams (RTSP). I have to add the possibility for the user to take snapshots while he is playing a live stream without perturbing the video playback.
Here the graph of the pipeline I've made:
-->queue-->autovideosink
uridecodebin-->videoflip-->tee--|
| -->queue->videoconvert-->pngenc-->filesink
|
|->audioconvert-->autoaudiosink
I use the pad-added
signal from uridecodebin
to add and link dynamically my elements to the pipeline, function of the received caps.
void Player::onPadAdded(const QGst::PadPtr &pad)
{
QGst::CapsPtr caps = pad->currentCaps();
if (caps->toString().startsWith("video/x-raw")) {
qDebug("Received 'video/x-raw' caps");
handleNewVideoPad(pad);
}
else if (caps->toString().startsWith("audio/x-raw")) {
qDebug("Received 'audio/x-raw' caps");
if (!m_audioEnabled) {
qDebug("Audio is disabled in the player. Ignoring...");
return;
}
handleNewAudioPad(pad);
}
else {
qWarning("Unsuported caps, arborting ...!");
return;
}
}
[...]
void Player::handleNewVideoPad(QGst::PadPtr pad)
{
m_player->videoTeeVideoSrcPad = m_player->videoTee->getRequestPad("src_%u");
// Add video elements
m_player->pipeline->add(m_player->videoFlip);
m_player->pipeline->add(m_player->videoTee);
m_player->pipeline->add(m_player->videoQueue);
m_player->pipeline->add(m_player->videoSink);
// Add snap elements
m_player->pipeline->add(m_player->snapQueue);
m_player->pipeline->add(m_player->snapConverter);
m_player->pipeline->add(m_player->snapEncoder);
m_player->pipeline->add(m_player->snapSink);
// Link video elements
m_player->videoFlip->link(m_player->videoTee);
m_player->videoQueue->link(m_player->videoSink);
// Link snap elements
m_player->snapQueue->link(m_player->snapConverter);
m_player->snapConverter->link(m_player->snapEncoder);
m_player->snapEncoder->link(m_player->snapSink);
// Lock snap elements
m_player->snapQueue->setStateLocked(true);
m_player->snapConverter->setStateLocked(true);
m_player->snapEncoder->setStateLocked(true);
m_player->snapSink->setStateLocked(true);
m_player->videoFlip->setState(QGst::StatePlaying);
m_player->videoTee->setState(QGst::StatePlaying);
m_player->videoQueue->setState(QGst::StatePlaying);
m_player->videoSink->setState(QGst::StatePlaying);
// Link pads
m_player->videoTeeVideoSrcPad->link(m_player->videoQueue->getStaticPad("sink"));
pad->link(m_player->videoSinkPad);
m_player->videoLinked = true;
}
The method to take a snapshot:
void Player::takeSnapshot()
{
QDateTime dateTime = QDateTime::currentDateTime();
QString snapLocation = QString("/%1/snap_%2.png").arg(m_snapDir).arg(dateTime.toString(Qt::ISODate));
m_player->inSnapshotCaputre = true;
if (m_player->videoTeeSnapSrcPad) {
m_player->videoTee->releaseRequestPad(m_player->videoTeeSnapSrcPad);
m_player->videoTeeSnapSrcPad.clear();
}
m_player->videoTeeSnapSrcPad = m_player->videoTee->getRequestPad("src_%u");
// Stop the snapshot branch
m_player->snapQueue->setState(QGst::StateNull);
m_player->snapConverter->setState(QGst::StateNull);
m_player->snapEncoder->setState(QGst::StateNull);
m_player->snapSink->setState(QGst::StateNull);
// Link Tee src pad to snap queue sink pad
m_player->videoTeeSnapSrcPad->link(m_player->snapQueue->getStaticPad("sink"));
// Set the snapshot location property
m_player->snapSink->setProperty("location", snapLocation);
// Unlock snapshot branch
m_player->snapQueue->setStateLocked(false);
m_player->snapConverter->setStateLocked(false);
m_player->snapEncoder->setStateLocked(false);
m_player->snapSink->setStateLocked(false);
m_player->videoTeeSnapSrcPad->setActive(true);
// Synch snapshot branch state with parent
m_player->snapQueue->syncStateWithParent();
m_player->snapConverter->syncStateWithParent();
m_player->snapEncoder->syncStateWithParent();
m_player->snapSink->syncStateWithParent();
}
The bus message callback:
void Player::onBusMessage(const QGst::MessagePtr & message)
{
QGst::ElementPtr source = message->source().staticCast<QGst::Element>();
switch (message->type()) {
case QGst::MessageEos: { //End of stream. We reached the end of the file.
qDebug("Message End Off Stream");
if (m_player->inSnapshotCaputre) {
blockSignals(true);
pause();
play();
blockSignals(false);
m_player->inSnapshotCaputre = false;
}
else {
m_eos = true;
stop();
}
break;
}
[...]
}
The problem is:
snapshot
property to true
of the pngenc
element, I receive the EOS
event which stop my pipeline, so I need to restart it, which freeze the video playback for about half a second, which in not acceptable in my case.snapshot
property to false
of the pngenc
element, I have no pipeline perturbations, but my png file keeps growing until I call again the Player::takeSnapshot()
method.Where am I wrong ? Is there a better way to do it ?
I've tried unsuccessfully creating a QGst::Bin
element for my snapshot
branch. What about pad probe ?
Thanks by advance
Upvotes: 2
Views: 5606
Reputation: 147
Thanks to @sebastian-droge answer, I found the solution, using gst_video_convert_sample
and the last-sample
property of my video sink.
The solution I've implemented is:
void Player::takeSnapshot()
{
QDateTime currentDate = QDateTime::currentDateTime();
QString location = QString("%1/snap_%2.png").arg(QDir::homePath()).arg(currentDate.toString(Qt::ISODate));
QImage snapShot;
QImage::Format snapFormat;
QGlib::Value val = m_videoSink->property("last-sample");
GstSample *videoSample = (GstSample *)g_value_get_boxed(val);
QGst::SamplePtr sample = QGst::SamplePtr::wrap(videoSample);
QGst::SamplePtr convertedSample;
QGst::BufferPtr buffer;
QGst::CapsPtr caps = sample->caps();
QGst::MapInfo mapInfo;
GError *err = NULL;
GstCaps * capsTo = NULL;
const QGst::StructurePtr structure = caps->internalStructure(0);
int width, height;
width = structure.data()->value("width").get<int>();
height = structure.data()->value("height").get<int>();
qDebug() << "Sample caps:" << structure.data()->toString();
/*
* { QImage::Format_RGBX8888, GST_VIDEO_FORMAT_RGBx },
* { QImage::Format_RGBA8888, GST_VIDEO_FORMAT_RGBA },
* { QImage::Format_RGB888 , GST_VIDEO_FORMAT_RGB },
* { QImage::Format_RGB16 , GST_VIDEO_FORMAT_RGB16 }
*/
snapFormat = QImage::Format_RGB888;
capsTo = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "RGB",
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
NULL);
convertedSample = QGst::SamplePtr::wrap(gst_video_convert_sample(videoSample, capsTo, GST_SECOND, &err));
if (convertedSample.isNull()) {
qWarning() << "gst_video_convert_sample Failed:" << err->message;
}
else {
qDebug() << "Converted sample caps:" << convertedSample->caps()->toString();
buffer = convertedSample->buffer();
buffer->map(mapInfo, QGst::MapRead);
snapShot = QImage((const uchar *)mapInfo.data(),
width,
height,
snapFormat);
qDebug() << "Saving snap to" << location;
snapShot.save(location);
buffer->unmap(mapInfo);
}
val.clear();
sample.clear();
convertedSample.clear();
buffer.clear();
caps.clear();
g_clear_error(&err);
if (capsTo)
gst_caps_unref(capsTo);
}
I've create a simple test application, which implement this solution. The code is available on my Github
Upvotes: 3
Reputation: 2143
You can take the last-sample property on any sink, e.g. your video sink. This contains a GstSample, which has a buffer with the very latest video frame in it. You can take that as a snapshot, and e.g. with gst_video_convert_sample() or the async variant of it, convert it to a PNG/JPG/whatever.
See https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gstreamer-libs/html/GstBaseSink.html#GstBaseSink--last-sample and https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/gst-plugins-base-libs-gstvideo.html#gst-video-convert-sample
Alternatively, you would have to shut down the filesink snapshot pipeline after the first frame. For example by having a pad probe to know when the first frame happened, and then injecting an EOS event to prevent further PNG frames to be appended to the same file.
Upvotes: 3