naki
naki

Reputation: 77

Display ffmpeg frames on opgel texture

I am using Dranger tutorial01 (ffmpeg) to decode the video and get AVI frames. I want to use OpenGL to display the video.

http://dranger.com/ffmpeg/tutorial01.html

The main function is as follows:

int main (int argc, char** argv) {
// opengl stuff
glutInit(&argc, argv);
glutInitDisplayMode(GLUT_RGBA);
glutInitWindowSize(800, 600);
glutCreateWindow("Hello GL");

glutReshapeFunc(changeViewport);
glutDisplayFunc(render);

GLenum err = glewInit();
if(GLEW_OK !=err){
    fprintf(stderr, "GLEW error");
    return 1;
}

glClear(GL_COLOR_BUFFER_BIT);


glEnable(GL_TEXTURE_2D);
GLuint texture;
glGenTextures(1, &texture); //Make room for our texture
glBindTexture(GL_TEXTURE_2D, texture);

//ffmpeg stuff

 AVFormatContext *pFormatCtx = NULL;
 int             i, videoStream;
 AVCodecContext  *pCodecCtx = NULL;
 AVCodec         *pCodec = NULL;
 AVFrame         *pFrame = NULL; 
 AVFrame         *pFrameRGB = NULL;
 AVPacket        packet;
 int             frameFinished;
 int             numBytes;
 uint8_t         *buffer = NULL;

 AVDictionary    *optionsDict = NULL;


 if(argc < 2) {
printf("Please provide a movie file\n");
return -1;
 }
 // Register all formats and codecs

av_register_all();

 // Open video file
 if(avformat_open_input(&pFormatCtx, argv[1], NULL, NULL)!=0)
   return -1; // Couldn't open file

// Retrieve stream information

if(avformat_find_stream_info(pFormatCtx, NULL)<0)
return -1; // Couldn't find stream information

// Dump information about file onto standard error
 av_dump_format(pFormatCtx, 0, argv[1], 0);

// Find the first video stream

videoStream=-1;
 for(i=0; i<pFormatCtx->nb_streams; i++)
if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
  videoStream=i;
  break;
}
 if(videoStream==-1)
return -1; // Didn't find a video stream

 // Get a pointer to the codec context for the video stream
 pCodecCtx=pFormatCtx->streams[videoStream]->codec;

 // Find the decoder for the video stream
 pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
 if(pCodec==NULL) {
   fprintf(stderr, "Unsupported codec!\n");
   return -1; // Codec not found
 }
 // Open codec
if(avcodec_open2(pCodecCtx, pCodec, &optionsDict)<0)
   return -1; // Could not open codec

 // Allocate video frame
 pFrame=av_frame_alloc();

 // Allocate an AVFrame structure
pFrameRGB=av_frame_alloc();
if(pFrameRGB==NULL)
return -1;

 // Determine required buffer size and allocate buffer
 numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width,
              pCodecCtx->height);
 buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));

struct SwsContext      *sws_ctx = sws_getContext(pCodecCtx->width,
           pCodecCtx->height, pCodecCtx->pix_fmt, 800,
           600, PIX_FMT_RGB24, SWS_BICUBIC, NULL,
           NULL, NULL);


 // Assign appropriate parts of buffer to image planes in pFrameRGB
 // Note that pFrameRGB is an AVFrame, but AVFrame is a superset
 // of AVPicture
 avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
     pCodecCtx->width, pCodecCtx->height);

 // Read frames and save first five frames to disk
 i=0;
 while(av_read_frame(pFormatCtx, &packet)>=0) {


// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
  // Decode video frame
  avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, 
           &packet);

  // Did we get a video frame?
  if(frameFinished) {
// Convert the image from its native format to RGB
  /*  sws_scale
    (
        sws_ctx,
        (uint8_t const * const *)pFrame->data,
        pFrame->linesize,
        0,
        pCodecCtx->height,
        pFrameRGB->data,
        pFrameRGB->linesize
    );
   */
sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
  // additional opengl
    glBindTexture(GL_TEXTURE_2D, texture);

        //gluBuild2DMipmaps(GL_TEXTURE_2D, 3, pCodecCtx->width, pCodecCtx->height, GL_RGB, GL_UNSIGNED_INT, pFrameRGB->data[0]);
   // glTexSubImage2D(GL_TEXTURE_2D, 0, 0,0, 840, 460, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]);

        glTexImage2D(GL_TEXTURE_2D,                //Always GL_TEXTURE_2D
            0,                            //0 for now
            GL_RGB,                       //Format OpenGL uses for image
            pCodecCtx->width, pCodecCtx->height,  //Width and height
            0,                            //The border of the image
            GL_RGB, //GL_RGB, because pixels are stored in RGB format
            GL_UNSIGNED_BYTE, //GL_UNSIGNED_BYTE, because pixels are stored
                            //as unsigned numbers
            pFrameRGB->data[0]);               //The actual pixel data
  // additional opengl end   

// Save the frame to disk
if(++i<=5)
  SaveFrame(pFrameRGB, pCodecCtx->width, pCodecCtx->height, 
        i);
  }
}

glColor3f(1,1,1);
glBindTexture(GL_TEXTURE_2D, texture);
glBegin(GL_QUADS);
    glTexCoord2f(0,1);
    glVertex3f(0,0,0);

    glTexCoord2f(1,1);
    glVertex3f(pCodecCtx->width,0,0);

    glTexCoord2f(1,0);
    glVertex3f(pCodecCtx->width, pCodecCtx->height,0);

    glTexCoord2f(0,0);
    glVertex3f(0,pCodecCtx->height,0);

glEnd();
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
 }


  // Free the RGB image
 av_free(buffer);
 av_free(pFrameRGB);

 // Free the YUV frame
 av_free(pFrame);

 // Close the codec
 avcodec_close(pCodecCtx);

 // Close the video file
 avformat_close_input(&pFormatCtx);

 return 0;
}

Unfortunately i could not find my solution here

ffmpeg video to opengl texture

The program compiles but does not show any video on the texture. Just a OpenGL window is created.

Upvotes: 2

Views: 5016

Answers (2)

flownated
flownated

Reputation: 1

I was doing pretty much the same exact thing as you, and was either running into an issue with what looked like an improperly decoded image where my video should have been playing. I found this page trying to solve this problem, and referring to datenwolf's answer was able to get the video playing by adding the glTexSubImage2D function in. However, when I tried to take the glTexImage2D function out, except for a single initial call for the creation, my previously working video was replaced with a white rectangle. So for some reason, it works for me when calling glTexImage2D followed by glTexSubImage2D for every frame.

Upvotes: 0

datenwolf
datenwolf

Reputation: 162317

One problem is your use of a single buffered pixel format. Most modern operating systems use window composition which relies on double buffered pixel formats. Easy enough to change:

--- glutInitDisplayMode(GLUT_RGBA);
+++ glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE);

At the end of the render function call glutSwapBuffers().

The other problem is that you never enter glutMainLoop so no events (like drawing requests from the OS) get processed. Also some parts of your code must go into the render function.

The frame decoding and texture upload must be placed in either an idle handler (you didn't create one) followed by a call of glutPostRedisplay() or directly in the render function:

 void render(void) {
 /* ... */

 --- while(av_read_frame(pFormatCtx, &packet)>=0) {
 +++ if(av_read_frame(pFormatCtx, &packet)>=0) {


// Is this a packet from the video stream?
if(packet.stream_index==videoStream) {
  // Decode video frame
  avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

  // Did we get a video frame?
  if(frameFinished) {
  // Convert the image from its native format to RGB
  /*  sws_scale
    (
        sws_ctx,
        (uint8_t const * const *)pFrame->data,
        pFrame->linesize,
        0,
        pCodecCtx->height,
        pFrameRGB->data,
        pFrameRGB->linesize
    );
   */
sws_scale(sws_ctx, pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
  // additional opengl
   glBindTexture(GL_TEXTURE_2D, texture);

At this point you should use glTexSubImage2D instead of glTexImage2D, because it's way faster. However you must create the texture with glTexImage2D first; do this once before calling glutMainLoop().

   glTexSubImage2D(GL_TEXTURE_2D, 0, 0,0, pCodecCtx->width, pCodecCtx->height, GL_RGB, GL_UNSIGNED_BYTE, pFrameRGB->data[0]);

/*
        glTexImage2D(GL_TEXTURE_2D,              //Always GL_TEXTURE_2D
            0,                                   //0 for now
            GL_RGB,                              //Format OpenGL uses for image
            pCodecCtx->width, pCodecCtx->height, //Width and height
            0,                                   //The border of the image
            GL_RGB,                              //GL_RGB, because pixels are stored 
                                                 //in RGB format
            GL_UNSIGNED_BYTE,                    //GL_UNSIGNED_BYTE, because pixels are 
                                                 //stored as unsigned numbers
            pFrameRGB->data[0]);                 //The actual pixel data
*/
  // additional opengl end   
}

glColor3f(1,1,1);
glBindTexture(GL_TEXTURE_2D, texture);
glBegin(GL_QUADS);
    glTexCoord2f(0,1);
    glVertex3f(0,0,0);

    glTexCoord2f(1,1);
    glVertex3f(pCodecCtx->width,0,0);

    glTexCoord2f(1,0);
    glVertex3f(pCodecCtx->width, pCodecCtx->height,0);

    glTexCoord2f(0,0);
    glVertex3f(0,pCodecCtx->height,0);

glEnd();
// Free the packet that was allocated by av_read_frame
av_free_packet(&packet);
 }

/* ... */

glutSwapBuffers();

}

Upvotes: 3

Related Questions