user1586208
user1586208

Reputation: 101

OpenGL Calls corrupt memory

When I call glGetIntergerv, or any other opengl function, and step through it in gdb, upon reaching it, gdb would skip a few lines and continue stepping later in the code.

Below is the code for loading opengl, and windows. It is the only code that runs before glGetIntergerv, the first opengl call.

HWND window;
HDC dev_context;

HGLRC rend_context;
//Creating the Window
    char const *name = "Opengl Test";
    HINSTANCE inst = (HINSTANCE)GetModuleHandle(0);
    WNDCLASS windowClass;
    DWORD dwExStyle = WS_EX_APPWINDOW | WS_EX_WINDOWEDGE;
    windowClass.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC;
    windowClass.lpfnWndProc = (WNDPROC) WndProcedure;
    windowClass.cbClsExtra = 0;
    windowClass.cbWndExtra = 0;
    windowClass.hInstance = inst;
    windowClass.hIcon = LoadIcon(NULL, IDI_WINLOGO);
    windowClass.hCursor = LoadCursor(NULL, IDC_ARROW);
    windowClass.hbrBackground = NULL;
    windowClass.lpszMenuName = NULL;
    windowClass.lpszClassName = name;
    RegisterClass(&windowClass);

    window = CreateWindowEx(dwExStyle, name, name, WS_OVERLAPPEDWINDOW,
      CW_USEDEFAULT, 0, 0, 0, NULL, NULL, inst, NULL);

//Context
    dev_context = GetDC( window );
    std::cout << dev_context << std::endl;
    //Get pixel format
    PIXELFORMATDESCRIPTOR pfd;
    memset(&pfd, 0, sizeof(PIXELFORMATDESCRIPTOR));
    pfd.nSize  = sizeof(PIXELFORMATDESCRIPTOR);
    pfd.nVersion   = 1;
    pfd.dwFlags    = PFD_DOUBLEBUFFER | PFD_SUPPORT_OPENGL | PFD_DRAW_TO_WINDOW;
    pfd.iPixelType = PFD_TYPE_RGBA;
    pfd.cColorBits = 32;
    pfd.cDepthBits = 32;
    pfd.iLayerType = PFD_MAIN_PLANE;

    int nPixelFormat = ChoosePixelFormat(dev_context, &pfd);
    SetPixelFormat( dev_context, nPixelFormat, &pfd );

    HGLRC temp_rend_context = wglCreateContext( dev_context );
    wglMakeCurrent( dev_context, temp_rend_context );

    HGLRC (WINAPI *wglCreateContextAttribsARB) (HDC hDC, HGLRC hShareContext, const int *attribList) = (HGLRC (WINAPI *) (HDC hDC, HGLRC hShareContext, const int *attribList)) gl3wGetProcAddress("wglCreateContextAttribsARB");

    const int attribs[] = { WGL_CONTEXT_MAJOR_VERSION_ARB, 3,  WGL_CONTEXT_MINOR_VERSION_ARB, 0, WGL_CONTEXT_FLAGS_ARB, /*WGL_CONTEXT_FORWARD_COMPATIBLE_BIT_ARB*/0, 0};

    rend_context = wglCreateContextAttribsARB(dev_context, 0, attribs);
    wglMakeCurrent(0,0);
    wglDeleteContext(temp_rend_context);
    wglMakeCurrent(dev_context, rend_context);

    gl3wInit();

    int glVersion[2] = {-1, -1};
    glGetIntegerv(GL_MAJOR_VERSION, &glVersion[0]); //First gl call
    glGetIntegerv(GL_MINOR_VERSION, &glVersion[1]);

Below is my WndProcedure function:

static LRESULT CALLBACK WndProcedure(HWND hWnd, UINT Msg, WPARAM wParam, LPARAM lParam){
    switch(Msg){
        case WM_DESTROY:
            PostQuitMessage(WM_QUIT);
            return 0;
        default:
            return DefWindowProc(hWnd, Msg, wParam, lParam);
    }
}

I am using the gl3w library for loading the opengl functions.

Upvotes: 0

Views: 978

Answers (2)

zezba9000
zezba9000

Reputation: 3383

Here is the code I use create a GL context and use GL3 features. Now I know this is C# but you get the picture. There is no reason to create two GL contexts to use OpenGL3... unless im totally missing what your saying.

void Init(IntPtr handle, bool fullscreen, bool vSync)
{
                this.handle = handle;
                #if WINDOWS
                //Get DC
                dc = WGL.GetDC(handle);
                WGL.SwapBuffers(dc);

                //Set BackBuffer format
                WGL.PIXELFORMATDESCRIPTOR pfd = new WGL.PIXELFORMATDESCRIPTOR();
                WGL.ZeroPixelDescriptor(ref pfd);
                pfd.nVersion        = 1;
                pfd.dwFlags         = WGL.PFD_DRAW_TO_WINDOW | WGL.PFD_SUPPORT_OPENGL | WGL.PFD_DOUBLEBUFFER;
                pfd.iPixelType      = (byte)WGL.PFD_TYPE_RGBA;
                pfd.cColorBits      = 24;
                pfd.cAlphaBits      = 8;
                pfd.cDepthBits      = 16;
                pfd.iLayerType      = (byte)WGL.PFD_MAIN_PLANE;
                unsafe{pfd.nSize = (ushort)sizeof(WGL.PIXELFORMATDESCRIPTOR);}

                int pixelFormatIndex = WGL.ChoosePixelFormat(dc, ref pfd);
                if (pixelFormatIndex == 0) Debug.ThrowError("Video", "ChoosePixelFormat failed");
                if (WGL.SetPixelFormat(dc, pixelFormatIndex, ref pfd) == 0) Debug.ThrowError("Video", "Failed to set PixelFormat");

                ctx = WGL.CreateContext(dc);
                if (ctx == IntPtr.Zero) Debug.ThrowError("Video", "Failed to create GL context");
                if (WGL.MakeCurrent(dc, ctx) == 0) Debug.ThrowError("Video", "Failed to make GL context current");

                WGL.Init();//<< load 'wglSwapIntervalEXT'
                WGL.SwapInterval(vSync ? 1 : 0);
}

And to load GL extensions:

public const string DLL = "opengl32";
[DllImport(DLL, EntryPoint = "wglGetProcAddress", ExactSpelling = true)]
private static extern IntPtr getProcAddress(string procedureName);

Upvotes: 0

dthorpe
dthorpe

Reputation: 36072

It sounds like you have a mismatch either in calling convention or parameter list, or both, which is corrupting the stack enough to screw up the call return address.

Double check that the opengl .h file(s) you're compiling with match the version of the opengl .dll(s) that you're calling. Double check that any conditional defines required for Windows are defined and enabled for the .h file. The norm for calling conventions in Win API calls is STDCALL. If you see no calling convention on the gl functions in your .h file, be suspicious.

I vaguely recall that STDCALL and cdecl calling conventions push the parameters onto the stack in the same order (right to left) but differ in who is responsible for adjusting the stack pointer after the call. I believe STDCALL expects the callee to pop the stack, whereas with cdecl the caller restores the stack pointer after the call returns.

What this means is if the caller is making a cdecl call but the callee is actually STDCALL, the parameters will make it into the call just fine but all hell will break loose on the return. Depending on which way the mismatch runs, either the stack pointer won't be adjusted at all or it will be over adjusted (adjusted twice).

Upvotes: 1

Related Questions