Reputation: 88
I started making a game in C using SDL2 library. I just got the classic rect moving on a blank screen, and used the following to calculate delta_time :
Uint64 now = SDL_GetPerformanceCounter();
Uint64 last = 0;
double delta_time = 0;
while (running) {
last = now;
now = SDL_GetPerformanceCounter();
delta_time = (double)(now - last) / SDL_GetPerformanceFrequency();
}
Is this approach correct as the fps fluctuation I am getting is insane.
385.733269
1021.394121
3567.873440
364.403997
276.115181
700.447376
393.435451
571.590903
494.964479
532.557362
1445.981040
593.772396
1938.300033
468.804132
1008.120410
195.745012
Here are the relevant code:
void renderContext(GraphicContext* to_render, const Entity* player) {
if (to_render->renderer == NULL) {
return;
}
SDL_SetRenderDrawColor(to_render->renderer, 255, 255, 255, 255);
SDL_RenderClear(to_render->renderer);
SDL_SetRenderDrawColor(to_render->renderer, 255, 255, 0, 255);
SDL_RenderFillRect(to_render->renderer, (const SDL_Rect*)&player->dimensions);
SDL_RenderPresent(to_render->renderer);
}
void quitSDL(GraphicContext* m_context) {
destroyGraphicContext(m_context);
SDL_Quit();
}
static void handleKeyboard(Entity* player, double delta_time) {
const Uint8* keys = SDL_GetKeyboardState(NULL);
double delta_dist = calculateDeltaDist(player, delta_time);
printf("%lf\n", 1000.0/delta_dist);
fflush(stdout);
if (keys[SDL_SCANCODE_A] || keys[SDL_SCANCODE_LEFT]) {
player->dimensions.x -= delta_dist;
} if (keys[SDL_SCANCODE_D] || keys[SDL_SCANCODE_RIGHT]) {
player->dimensions.x += delta_dist;
} if (keys[SDL_SCANCODE_W] || keys[SDL_SCANCODE_UP]) {
player->dimensions.y -= delta_dist;
} if (keys[SDL_SCANCODE_S] || keys[SDL_SCANCODE_DOWN]) {
player->dimensions.y += delta_dist;
}
}
static void handleMouse(SDL_Event* event) {
}
int handleEvents(Entity* player, double delta_time) {
SDL_Event event;
while (SDL_PollEvent(&event)) {
if (event.type == SDL_QUIT) {
return 1;
}
}
//! Usage of this in this way can be a performance bottleneck.
handleKeyboard(player, delta_time);
return 0;
}
int main(int argc, char** argv) {
GraphicContext m_context = createGraphicContext(SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 1200, 800,
"My Game", SDL_WINDOW_ALLOW_HIGHDPI | SDL_WINDOW_ALWAYS_ON_TOP);
int running = 1;
Entity player = createEntity(100, 100, 32, 32, 100, 1000);
Uint64 now = SDL_GetPerformanceCounter();
Uint64 last = 0;
double delta_time = 0;
if (m_context.win == NULL) {
puts("Unable to generate window. Exiting.");
exit(0);
}
while (running) {
last = now;
now = SDL_GetPerformanceCounter();
delta_time = (double)(now - last) / SDL_GetPerformanceFrequency();
if (handleEvents(&player, delta_time)) {
break;
}
renderContext(&m_context, &player);
}
quitSDL(&m_context);
return 0;
}
Currently using SDL_RENDERER_PRESENTVSYNC as a render flag fixes the issue and I get a consistent 60 fps.
I use gcc -Wall -Wextra -Ofast src/main.c -lSDL2 -o build/Game to compile.
Is there any better way to do this, or am I doing something wrong. Anything will help
Upvotes: 0
Views: 55
Reputation: 67447
Currently using SDL_RENDERER_PRESENTVSYNC as a render flag fixes the issue and I get a consistent 60 fps.
VSync synchronizes your loop frequency with your monitor's. On your machine that's 60fps, on mine that would be 280fps and on others could be as low as 15. VSync is a good option to give to the user, but not something to base your code on.
delta_time = (double)(now - last) / SDL_GetPerformanceFrequency();
If that's how you measure fps (1.0 / delta_time
), that's far too unstable to use. Instead count the number of frames in a unit of time (I generally count them over 2 seconds) and only update your fps variable when the timeout triggers, resetting your frame counter back to 0.
Upvotes: 2