Reputation: 522
So I've stumbled upon something rather peculiar (after going through a project and fixing memory leaks, which were causing 100% CPU spikes once you closed the program), which seems to cause an incredibly short 100% GPU usage spike, also, once you close the program.
Here's the minimal code I ran to get this:
#undef UNICODE
#include <Windows.h>
#define GLEW_STATIC
#include "include/GL/glew.h"
#include <string>
#include <chrono>
#include <thread>
//
// window
//
HWND window_hwnd = { };
WNDCLASS window_wndclass = { };
std::string wndclass_name = "class";
std::string window_name = "class";
LRESULT CALLBACK WindowProc(
_In_ HWND hWnd,
_In_ UINT uMsg,
_In_ WPARAM wParam,
_In_ LPARAM lParam
)
{
switch (
uMsg
)
{
default:
return DefWindowProc(
hWnd,
uMsg,
wParam,
lParam
);
}
return 0;
}
void window_create_class()
{
window_wndclass = { };
window_wndclass.lpfnWndProc = WindowProc;
window_wndclass.lpszClassName = wndclass_name.c_str();
window_wndclass.cbClsExtra = 0;
window_wndclass.cbWndExtra = 0;
window_wndclass.hInstance = 0;
RegisterClass(
&window_wndclass
);
}
void window_create()
{
window_create_class();
window_hwnd = CreateWindow(
wndclass_name.c_str(),
window_name.c_str(),
WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_THICKFRAME | WS_MINIMIZEBOX | WS_MAXIMIZEBOX,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
CW_USEDEFAULT,
0,
0,
0,
0
);
}
int window_loop()
{
MSG msg;
BOOL get_message;
while (
(
get_message = GetMessage(
&msg,
window_hwnd,
0,
0
) > 0
) != 0
)
{
if (
get_message == -1
)
{
// error handling
break;
}
else
{
TranslateMessage(
&msg
);
DispatchMessage(
&msg
);
}
}
return get_message;
}
//
// opengl
//
HDC gl_hdc = { };
HGLRC gl_hglrc = { };
PIXELFORMATDESCRIPTOR gl_pixel_format_descriptor = {
sizeof(
PIXELFORMATDESCRIPTOR
),
1,
// Flags
PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER,
// The kind of framebuffer. RGBA or palette.
PFD_TYPE_RGBA,
// Colordepth of the framebuffer.
32,
0, 0, 0, 0, 0, 0,
0,
0,
0,
0, 0, 0, 0,
// Number of bits for the depthbuffer
24,
// Number of bits for the stencilbuffer
8,
// Number of Aux buffers in the framebuffer.
0,
PFD_MAIN_PLANE,
0,
0, 0, 0
};
int gl_pixel_format = -1;
void gl_pixel_format_configure()
{
gl_pixel_format = ChoosePixelFormat(
gl_hdc,
&gl_pixel_format_descriptor
);
SetPixelFormat(
gl_hdc,
gl_pixel_format,
&gl_pixel_format_descriptor
);
}
void gl_create()
{
gl_pixel_format_configure();
gl_hglrc = wglCreateContext(
gl_hdc
);
wglMakeCurrent(
gl_hdc,
gl_hglrc
);
GLenum state = glewInit();
if (
state != GLEW_OK
)
{
throw;
}
}
void gl_draw()
{
RECT client_rect = { };
GetClientRect(
window_hwnd,
&client_rect
);
int window_width = client_rect.right - client_rect.left;
int window_height = client_rect.bottom - client_rect.top;
glViewport(
0,
0,
window_width,
window_height
);
glClearColor(
0.0f,
0.0f,
0.0f,
1.0f
);
glClear(
GL_COLOR_BUFFER_BIT
);
wglSwapLayerBuffers(
gl_hdc,
WGL_SWAP_MAIN_PLANE
);
}
int main()
{
window_create();
gl_hdc = GetDC(
window_hwnd
);
gl_create();
ShowWindow(
window_hwnd,
SW_SHOW
);
// simulate drawing frames
auto now = std::chrono::steady_clock::now();
auto interval = std::chrono::duration<
double,
std::chrono::seconds::period
>(
1.0 / 60.0
);
auto next = now + interval;
for (
int i = 0;
i < 400;
i++
)
{
gl_draw();
std::this_thread::sleep_until(
next
);
next += interval;
}
return window_loop();
}
on Windows, using the GLEW library. Compiled with Visual Studio's compiler, with Release optimization. I tested it on my laptop too, which has a completely different GPU (intel integrated graphics, actually) with the same effect.
This does not happen when wglSwapLayerBuffers
is not called.
Notice how it doesn't even draw anything, doesn't even create any OpenGL objects, nor are pointers used anywhere. Which makes me wonder, am I using Windows' OpenGL context wrong? Or is this some peculiarity of GLEW? (By the way, I assume the effect of 400 drawn frames (which isn't the exact amount to cause this, I didn't bother trying to pin it down exactly, but it doesn't happen with 300, for example) might differ from GPU to GPU. Though setting it to something like 1000+ should definitely work; it's not even an unrealistic amount for that matter).
To be honest, this isn't exactly an issue (after all, it happens after main
returns), but it's still weird, and I'd like to avoid it. So why does it happen, or at the very least how can I fix it?
EDIT #1: I dabbled around with the OpenGL context creation a little more, creating a proper context as described on the official OpenGL Wiki, instead of the simple one as in the example I provided, with no success.
Curiosity also got the better of me, which made me go through my Steam library and just start up about a dozen games, waiting a few seconds in the main menu, letting them draw a few hundred, if not a few thousand frames.
The majority of games had the same 100% GPU spike once you quit the game. Notably, however, some AAA titles (namely DOOM Eternal and GTA V) did not exhibit this behavior.
I guess, for one, this further drives home the point that this isn't really something to be concerned about. Yet this also proves that it is avoidable, though how exactly to achieve this I still don't know.
I would think the engines running those AAA games have their own OpenGL wrappers and maybe even their own lower level OpenGL-OS interfaces, avoiding WGL, where the issue seems to be stemming from, altogether.
But that's just a guess, I definitely haven't found a concrete answer.
Upvotes: 2
Views: 1963
Reputation:
From what I can tell, the GPU will spike to 100% for a split second immediately after closing a program that uses it, including browsers and compilers. I guess this is just the effect of cleaning up the GPU once it's done with a program.
As for why it's happening with your program as compared to other win32 programs I'm pretty sure it's just because you're using GLEW (I might be wrong though).
Upvotes: 1