Reputation: 5325
Today I thought I'd modify a small example of how to use gl-rs (OpenGL bindings for Rust) adding a colour array and drawing some points instead of a triangle. Trivial, I thought...
However, my COLOUR_DATA
appears to be being used for the vertex positions somehow.
Given
static VERTEX_DATA: [GLfloat, ..6] = [
0.2, 0.0,
0.0, 0.2,
0.0, 0.0];
static COLOUR_DATA: [GLfloat, ..12] = [
0.0, 0.5, 0.0, 1.0,
0.5, 0.0, 0.0, 1.0,
0.0, 0.0, 5.0, 1.0];
it is obvious that the points in the screenshot below are the first 6 values of COLOUR_DATA
, not VERTEX_DATA
. The 'problem' disappears when I comment out the BindBuffer
and BufferData
calls pertaining to my colour buffer object.
Source code is below the screenshot, and of course removing BindBuffer
/BufferData
also means removing the EnableVertexAttribArray
and VertexAttribPointer
class in order to compile, but they have no impact on the situation at hand).
Why is this happening and how can I avoid it? Am I just missing something obvious? Or am I dealing with something deeper (e.g. a bug in gl-rs) here?
#![feature(globs)]
extern crate gl;
extern crate glfw;
extern crate native;
use gl::types::*;
use glfw::Context;
use std::mem;
use std::ptr;
use std::str;
// Vertex data
static VERTEX_DATA: [GLfloat, ..6] = [
0.2, 0.0,
0.0, 0.2,
0.0, 0.0];
static COLOUR_DATA: [GLfloat, ..12] = [
0.0, 0.5, 0.0, 1.0,
0.5, 0.0, 0.0, 1.0,
0.0, 0.0, 5.0, 1.0];
// Shader sources
static VS_SRC: &'static str =
"#version 150\n\
in vec2 position;\n\
in vec4 vertexColor;\n\
out vec4 fragmentColor;\n\
void main() {\n\
gl_Position = vec4(position, 0.0, 1.0);\n\
fragmentColor = vertexColor;\n\
}";
static FS_SRC: &'static str =
"#version 150\n\
in vec4 fragmentColor;\n\
out vec4 out_color;\n\
void main() {\n\
out_color = vec4(1.0, 0.0, 0.0, 1.0);\n\
}";
//out_color = fragmentColor;\n\
// the above line removed from shader string for debugging this
fn compile_shader(src: &str, ty: GLenum) -> GLuint {
let shader;
unsafe {
shader = gl::CreateShader(ty);
// Attempt to compile the shader
src.with_c_str(|ptr| gl::ShaderSource(shader, 1, &ptr, ptr::null()));
gl::CompileShader(shader);
// Get the compile status
let mut status = gl::FALSE as GLint;
gl::GetShaderiv(shader, gl::COMPILE_STATUS, &mut status);
// Fail on error
if status != (gl::TRUE as GLint) {
let mut len = 0;
gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut len);
let mut buf = Vec::from_elem(len as uint - 1, 0u8); // subtract 1 to skip the trailing null character
gl::GetShaderInfoLog(shader, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);
panic!("{}", str::from_utf8(buf.as_slice()).expect("ShaderInfoLog not valid utf8"));
}
}
shader
}
fn link_program(vs: GLuint, fs: GLuint) -> GLuint {
unsafe {
let program = gl::CreateProgram();
gl::AttachShader(program, vs);
gl::AttachShader(program, fs);
gl::LinkProgram(program);
// Get the link status
let mut status = gl::FALSE as GLint;
gl::GetProgramiv(program, gl::LINK_STATUS, &mut status);
// Fail on error
if status != (gl::TRUE as GLint) {
let mut len: GLint = 0;
gl::GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut len);
let mut buf = Vec::from_elem(len as uint - 1, 0u8); // subtract 1 to skip the trailing null character
gl::GetProgramInfoLog(program, len, ptr::null_mut(), buf.as_mut_ptr() as *mut GLchar);
panic!("{}", str::from_utf8(buf.as_slice()).expect("ProgramInfoLog not valid utf8"));
}
program
}
}
#[start]
fn start(argc: int, argv: *const *const u8) -> int {
native::start(argc, argv, main)
}
fn main() {
let glfw = glfw::init(glfw::FAIL_ON_ERRORS).unwrap();
// Choose a GL profile that is compatible with OS X 10.7+
glfw.window_hint(glfw::ContextVersion(3, 2));
glfw.window_hint(glfw::OpenglForwardCompat(true));
glfw.window_hint(glfw::OpenglProfile(glfw::OpenGlCoreProfile));
let (window, _) = glfw.create_window(800, 600, "OpenGL", glfw::Windowed)
.expect("Failed to create GLFW window.");
// It is essential to make the context current before calling `gl::load_with`.
window.make_current();
// Load the OpenGL function pointers
gl::load_with(|s| window.get_proc_address(s));
// Create GLSL shaders
let vs = compile_shader(VS_SRC, gl::VERTEX_SHADER);
let fs = compile_shader(FS_SRC, gl::FRAGMENT_SHADER);
let program = link_program(vs, fs);
let mut vao = 0;
let mut vbo = 0;
let mut cbo = 0;
unsafe {
// Create Vertex Array Object
gl::GenVertexArrays(1, &mut vao);
gl::BindVertexArray(vao);
// Set up vertex buffer object
gl::GenBuffers(1, &mut vbo);
gl::BindBuffer(gl::ARRAY_BUFFER, vbo);
gl::BufferData(gl::ARRAY_BUFFER,
(VERTEX_DATA.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
mem::transmute(&VERTEX_DATA[0]),
gl::STATIC_DRAW);
// Set up colour buffer object
gl::GenBuffers(1, &mut cbo);
gl::BindBuffer(gl::ARRAY_BUFFER, cbo);
gl::BufferData(gl::ARRAY_BUFFER,
(COLOUR_DATA.len() * mem::size_of::<GLfloat>()) as GLsizeiptr,
mem::transmute(&COLOUR_DATA[0]),
gl::STATIC_DRAW);
gl::UseProgram(program);
// Bind fragment shader
"out_color".with_c_str(|ptr| gl::BindFragDataLocation(program, 0, ptr));
// Configure vertex buffer
let pos_attr = "position".with_c_str(|ptr| gl::GetAttribLocation(program, ptr));
println!("{}", pos_attr);
gl::EnableVertexAttribArray(pos_attr as GLuint);
gl::VertexAttribPointer(pos_attr as GLuint, 2, gl::FLOAT,
gl::FALSE as GLboolean, 0, ptr::null());
gl::PointSize(10.0);
// Configure colour buffer
let col_attr = "vertexColor".with_c_str(|ptr| gl::GetAttribLocation(program, ptr));
println!("{}", col_attr);
gl::EnableVertexAttribArray(col_attr as GLuint);
gl::VertexAttribPointer(col_attr as GLuint, 4, gl::FLOAT,
gl::FALSE as GLboolean, 0, ptr::null());
}
while !window.should_close() {
glfw.poll_events();
unsafe {
gl::ClearColor(0.3, 0.3, 0.3, 1.0);
gl::Clear(gl::COLOR_BUFFER_BIT);
gl::DrawArrays(gl::POINTS, 0, 3);
}
window.swap_buffers();
}
unsafe {
gl::DeleteProgram(program);
gl::DeleteShader(fs);
gl::DeleteShader(vs);
gl::DeleteBuffers(1, &cbo);
gl::DeleteBuffers(1, &vbo);
gl::DeleteVertexArrays(1, &vao);
}
}
Note: Code depends on gl-rs and glfw-rs. Running Windows 8.1 and Rust 0.13 nightly 40fb87d40). gl-rs does not appear to have anything like this in the issues tracker.
Upvotes: 1
Views: 313
Reputation: 17266
Because you need to have the right buffer bound (BindBuffer
) before calling VertexAttribPointer
.
// Configure vertex buffer
let pos_attr = "position".with_c_str(|ptr| gl::GetAttribLocation(program, ptr));
println!("{}", pos_attr);
gl::EnableVertexAttribArray(pos_attr as GLuint);
gl::BindBuffer(gl::ARRAY_BUFFER, vbo);
gl::VertexAttribPointer(pos_attr as GLuint, 2, gl::FLOAT,
gl::FALSE as GLboolean, 0, ptr::null());
gl::PointSize(10.0);
// Configure colour buffer
let col_attr = "vertexColor".with_c_str(|ptr| gl::GetAttribLocation(program, ptr));
println!("{}", col_attr);
gl::EnableVertexAttribArray(col_attr as GLuint);
gl::BindBuffer(gl::ARRAY_BUFFER, cbo);
gl::VertexAttribPointer(col_attr as GLuint, 4, gl::FLOAT,
gl::FALSE as GLboolean, 0, ptr::null());
With no buffer bound, the final argument to VertexAttribPointer
is a pointer to system memory. With vertex buffer objects, it becomes an offset into the currently bound buffer. In your case, the colour buffer was the last to be bound during initialization and was being used for both vertex attributes.
Upvotes: 6