Reputation: 347
I have a function which rotates the camera around the player by yaw and pitch angles.
void Camera::updateVectors() {
GLfloat radius = glm::length(center - position);
position.x = cos(glm::radians(this->yaw)) * cos(glm::radians(this->pitch));
position.y = sin(glm::radians(this->pitch));
position.z = sin(glm::radians(this->yaw)) * cos(glm::radians(this->pitch));
position *= radius;
this->front = glm::normalize(center - position);
this->right = glm::normalize(glm::cross(this->front, this->worldUp));
this->up = glm::normalize(glm::cross(this->right, this->front));
lookAt = glm::lookAt(this->position, this->position + this->front, this->up);
}
When I move the player the camera should moves with it by adding a translation vector to both the center and position of the camera:
void Camera::Transform(glm::vec3& t) {
this->position += t;
this->center += t;
}
Vefore moving the player the camera rotation works fine and the player movement also works fine but once I try to rotate the camera after player moving it start to change position unexpected.
After making some debugging I noticed that the radius which Is calculated at first line which is the distance between center and position of the camera like 49.888889 or 50.000079 and due to the initialized values it should be 50.0, this very small difference makes the result unexpected at all. so how could I treat this float precision or is there a bug in my code or calculations.
Edit: position the player depends on its yaw and pitch and update the center of the camera
GLfloat velocity = this->movementSpeed * deltaTime;
if (direction == FORWARD) {
glm::vec3 t = glm::vec3(sin(glm::radians(yaw)), sin(glm::radians(pitch)), cos(glm::radians(yaw))) * velocity;
matrix = glm::translate(matrix, t);
for (GLuint i = 0; i < this->m_Entries.size(); i++) {
this->m_Entries[i].setModelMatrix(matrix);
}
glm::vec3 f(matrix[2][0], matrix[2][1], matrix[2][2]);
f *= velocity;
scene->getDefCamera()->Transform(f);
}
if (direction == BACKWARD) {
glm::vec3 t = glm::vec3(sin(glm::radians(yaw)), 0.0, cos(glm::radians(yaw))) * velocity;
matrix = glm::translate(matrix, -t);
for (GLuint i = 0; i < this->m_Entries.size(); i++) {
this->m_Entries[i].setModelMatrix(matrix);
}
glm::vec3 f(matrix[2][0], matrix[2][1], matrix[2][2]);
f *= velocity;
f = -f;
scene->getDefCamera()->Transform(f);
}
Upvotes: 1
Views: 460
Reputation: 347
the solution simply is making transformations on the camera view matrix instead of making it by lookAt
function
first initialize the camera
void Camera::initCamera(glm::vec3& pos, glm::vec3& center, GLfloat yaw, GLfloat pitch) {
view = glm::translate(view, center-pos);
view = glm::rotate(view, glm::radians(yaw), glm::vec3(0.0, 1.0, 0.0));
view = glm::rotate(view, glm::radians(pitch), glm::vec3(1.0, 0.0, 0.0));
view = glm::translate(view, pos-center);
}
then the rotation function:
void Camera::Rotate(GLfloat xoffset, GLfloat yoffset, glm::vec3& c) {
xoffset *= this->mouseSensitivity;
yoffset *= this->mouseSensitivity;
view = glm::translate(view, c );//c is the player position
view = glm::rotate(view, glm::radians(xoffset), glm::vec3(0.0, 1.0, 0.0));
view = glm::rotate(view, glm::radians(yoffset), glm::vec3(1.0, 0.0, 0.0));
view = glm::translate(view, - c);
}
and the camera move function:
void Camera::Translate(glm::vec3& t) {
view = glm::translate(view, -t);
}
and in the player class when the player moves it push camera to move in its direction by this code
void Mesh::Move(Move_Directions direction, GLfloat deltaTime) {
GLfloat velocity = 50.0f * this->movementSpeed * deltaTime;
if (direction == FORWARD) {
glm::vec3 t = glm::vec3(sin(glm::radians(yaw)), sin(glm::radians(pitch)), cos(glm::radians(yaw))) * velocity;
matrix = glm::translate(matrix, t);
for (GLuint i = 0; i < this->m_Entries.size(); i++) {
this->m_Entries[i].setModelMatrix(matrix);
}
glm::vec3 f(matrix[2][0], matrix[2][1], matrix[2][2]);
f *= velocity;
scene->getDefCamera()->Translate(f);
}
if (direction == BACKWARD) {
glm::vec3 t = glm::vec3(sin(glm::radians(yaw)), 0.0, cos(glm::radians(yaw))) * velocity;
matrix = glm::translate(matrix, -t);
for (GLuint i = 0; i < this->m_Entries.size(); i++) {
this->m_Entries[i].setModelMatrix(matrix);
}
glm::vec3 f(matrix[2][0], matrix[2][1], matrix[2][2]);
f *= velocity;
f = -f;
scene->getDefCamera()->Translate(f);
}
if (direction == RIGHT) {
matrix = glm::rotate(matrix, (GLfloat) -M_PI * deltaTime, glm::vec3(0.0, 1.0, 0.0));
for (GLuint i = 0; i < this->m_Entries.size(); i++) {
this->m_Entries[i].setModelMatrix(matrix);
}
}
if (direction == LEFT) {
matrix = glm::rotate(matrix, (GLfloat) M_PI * deltaTime, glm::vec3(0.0, 1.0, 0.0));
for (GLuint i = 0; i < this->m_Entries.size(); i++) {
this->m_Entries[i].setModelMatrix(matrix);
}
}
}
thanks for every body helped
Upvotes: 0
Reputation: 473437
The main problem here is that you're rotating based on a position that is moving. But rotations are based on the origin of the coordinate system. So when you move the position, the rotation is still being done relative to the origin.
Instead of having Transform
offset the position
, it should only offset the center
. Indeed, storing position
makes no sense; you compute the camera's position based on its current center point, the radius, and the angles of rotation. The radius is a property that should be stored, not computed.
Upvotes: 3