Sneha
Sneha

Reputation:

Creating buttons and icons in OpenGL

I'm trying to make an editor using OpenGL similar to MS Paint, with a panel and all. Where can I find material on how to create buttons and icons?

P.S - I did find out about GLUI that provides such entities, but I am not allowed to use anything other than GLUT.

Source code, if provided, will be very helpful..

Upvotes: 16

Views: 38291

Answers (2)

oriyon
oriyon

Reputation: 95

If you are programming in C only, what i can recommend is nuklear, it is a free open source GUI library that is surprisingly flexible to use and you can even create your own custom widgets.

Initializing the library context/loading a single font & baking it into a texture:

#include <nuklear.h>

/* size of the font */
#define GUI_FONT_SIZE           16

/* ... */

struct nk_context gui_context = {0};
struct nk_font_atlas gui_font_atlas = {0};
struct nk_font *gui_font = NULL;
struct nk_draw_null_texture gui_null_texture = {0};
GLuint gui_font_texture_id = 0;

/* ... */

int initialize_gui(void)
{
    struct nk_font_config cfg = {0};
    const void *pixels = NULL;
    GLint texwidth = 0;
    int w = 0;
    int h = 0;

    /* initialize the font baker */
    nk_font_atlas_init_default(&gui_font_atlas);
    nk_font_atlas_begin(&gui_font_atlas);

    /* set font baker's parameters */
    memset(&cfg, 0, sizeof(cfg));
    cfg.size = (float)GUI_FONT_SIZE;
    cfg.merge_mode = nk_false;
    cfg.pixel_snap = nk_false;
    cfg.oversample_h = 4;
    cfg.oversample_v = 4;
    cfg.range = nk_font_default_glyph_ranges();
    cfg.coord_type = NK_COORD_UV;

    /* add fonts to bake */
    gui_font = nk_font_atlas_add_from_file(&gui_font_atlas,
         "font/DroidSans.ttf", (float)GUI_FONT_SIZE, &cfg);
    if (!gui_font) {
        /* error handling */
    }

    /* bake all the fonts into a single image */
    pixels = nk_font_atlas_bake(&gui_font_atlas, &w, &h,
                        NK_FONT_ATLAS_RGBA32);
    if (!pixels) {
        /* error handling */
    }

    /* create a texture from the baked image and upload it to OpenGL */
    glGenTextures(1, &gui_font_texture_id);
    glBindTexture(GL_TEXTURE_2D, gui_font_texture_id);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, w, h, 0, GL_RGBA,
                    GL_UNSIGNED_BYTE, pixels);

    /* set texture filtering */
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);

    /* clamp texture S,T coordinates in a range of 0.0~1.0 */
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

    /* release temporary memory used by the font baker */
    nk_font_atlas_end(&gui_font_atlas, nk_handle_id(gui_font_texture_id),
                        &gui_null_texture);
    nk_font_atlas_cleanup(&gui_font_atlas);

    /* initialize nuklear context */
    if (!nk_init_default(&gui_context, &gui_font->handle)) {
        /* error handling */
    }

    /* set default GUI theme */
    nk_style_default(&gui_context);

    /* set GUI font */
    nk_style_set_font(&gui_context, &gui_font->handle);

    /* ... */

    return 1;
}

Now, once we have a valid nuklear context and have created a font atlas texture, we can create windows/buttons/custom widgets etc..

/* creates a window containing a button */

if (nk_begin(ctx, "Window title", nk_rect(
    0, 0, 300, 300), NK_WINDOW_TITLE |
        NK_WINDOW_BORDER | NK_WINDOW_MOVABLE |
        NK_WINDOW_MINIMIZABLE | NK_WINDOW_CLOSABLE |
            NK_WINDOW_NO_SCROLLBAR)) {
    
    nk_layout_row_static(&gui_context, 24, 100, 1);
    if (nk_button_label(&gui_context, "Button")) {
         /* do stuff on click */
    }
}

nk_end(&gui_context);

After that is done you will need the library to convert your widget commands into draw commands (using a call to nk_convert()) that will be used to actually draw all the stuff.

/* the maximum number of vertices allocated for use */
#define GUI_MAX_NUM_VERTICES        6000

/* the maximum number of indices allocated for use */
#define GUI_MAX_NUM_INDICES     6000

struct gui_vertex {
    struct vec2 position;   /* vertex position */
    struct vec2 texcoord;   /* vertex texture coordinate */
    struct color color; /* vertex RGBA color */
};

/* ... */

struct nk_buffer vbuf = {0};
struct nk_buffer ibuf = {0};
struct nk_buffer cbuf = {0};
struct nk_convert_config cfg = {0};
struct gui_vertex *vertices = NULL;
unsigned short *indices = NULL;

/* map your OpenGL vertex/index buffers to @vertices/@indices here */

/* initialize nuklear's vertex/index buffers for conversion */
nk_buffer_init_fixed(&vbuf, vertices, sizeof(*vertices) *
                        GUI_MAX_NUM_VERTICES);
nk_buffer_init_fixed(&ibuf, indices, sizeof(*indices) *
                        GUI_MAX_NUM_INDICES);
nk_buffer_init_default(&cbuf);

/* initialize parameters for @nk_convert() */
memset(&cfg, 0, sizeof(cfg));
cfg.vertex_layout = layout;
cfg.vertex_size = sizeof(struct gui_vertex);
cfg.vertex_alignment = NK_ALIGNOF(struct gui_vertex);
cfg.null = gui_null_texture;
cfg.circle_segment_count = 22;
cfg.curve_segment_count = 22;
cfg.arc_segment_count = 22;
cfg.global_alpha = 1.0f;
cfg.shape_AA = NK_ANTI_ALIASING_ON;
cfg.line_AA = NK_ANTI_ALIASING_ON;

nk_convert(&gui_context, &cbuf, &vbuf, &ibuf, &cfg);

/* unmap your OpenGL vertex/index buffers here */

Then how you will interpret these draw commands is up to you, you could draw them using immediate mode or use buffer orphaning maybe? if you are using modern OpenGL. Anyway, the syntax is the same you basically loop through every draw command and interpret it:

const nk_draw_index *offset = NULL;

/* ... */

nk_draw_foreach(cmd, &gui_context, &cbuf) {
    /* if the current command doesn't draw anything, skip it */
    if (cmd->elem_count == 0)
        continue;

    /* bind each command's texture */
    glBindTexture(GL_TEXTURE_2D, (GLuint)cmd->texture.id);

    /* set each command's scissor rectangle */
    glScissor((GLint)cmd->clip_rect.x, (GLint)window_client_h -
            (GLint)(cmd->clip_rect.y + cmd->clip_rect.h),
                (GLint)cmd->clip_rect.w,
                    (GLint)cmd->clip_rect.h);

    /* draw each command */
    glDrawElements(GL_TRIANGLES, (GLsizei)cmd->elem_count,
                GL_UNSIGNED_SHORT, offset);

    offset += cmd->elem_count;
}

/* reset and prepare the context for the next frame */
nk_clear(&gui_context);

/* free commands buffer */
nk_buffer_free(&cbuf);

Also to render the GUI you can use this vertex shader:

in vec2 position;
in vec2 texcoord;
in vec4 color;

uniform mat4 transform; // orthographic projection matrix

out vec2 frag_texcoord;
out vec4 frag_colorRGBA;

void main(void)
{
    frag_texcoord = texcoord;
    frag_colorRGBA = color;
    gl_Position = transform * vec4(position, 0.0, 1.0);
}

And this fragment shader:

precision highp float;

uniform sampler2D gui_texture;

in vec2 frag_texcoord;
in vec4 frag_colorRGBA;

out vec4 frag_color;    // output fragment color

void main(void)
{
    frag_color = frag_colorRGBA * texture(gui_texture, frag_texcoord);
}

Upvotes: -1

G. Putnam
G. Putnam

Reputation: 1775

Since this question does not specify a language (other than OpenGL), then I can provide information / advice relative to OpenGL.

Effectively, you need:

  • A vertex shader: that will render screen facing quads or tiles.
  • A fragment shader: that will render tile locations from a tileset texture
void main(void) {
   vec2 tile = texture2D( uTilenum, vTexCoord).xy;                  // Choose the specific tile to use from integer texture
   vec2 spriteOffset = floor( tile * 256.0 ) * uTileSize_pix; // Find the distance to the tile corner in pixels
   vec2 spriteCoord = floor( mod( vPixelCoord, uTileSize_pix ) ) + vec2( 0.5, 0.5 );         // Choose the pixel within the tile

   gl_FragColor = uColor * texture2D( uTileset, ( spriteOffset + spriteCoord ) * uInverseTilesetSize_pix );
}
  • A menu texture: Divided into tiles as a tileset that represent your button choices. Ex: from a Simcity game I made: https://i.sstatic.net/mgjav.jpg
  • Some form of structure that describes the menu. I personally used something similar to HTML as a format since its well known and clear. I then parse this structure with a text parser to create the menu structure. I list the source as "tile numbers" within the tileset.
<button name='button1' align='bottom left' onTouch='switchMovementMode'>
    <img name='btn1img' src='74,75|90,91' width='100' height='100'></img>
</button>
<button name='button2' align='bottom left'>
    <img name='btn2img' src='76,77|92,93' width='100' height='100'></img>
</button>
  • A User Interface object in whatever language you are using that acts as a collection of Menu Elements and has at least a Create, CheckForInteractions, and Draw methods. The User Interface is the last Draw of each render pass, and only uses an identity matrix so there's no perspective (IE, ortho or flat to the screen). How this draws is preference, although I loop through my Menu Elements calling a sub-Draw on each. With CheckForInteractions, I personally pass the mouse / finger location to each Menu Element to check for a touch/drag/ect and have the Menu Element track its own position.
  • Menu Element object that keeps track of its location, state, and responses if interacted with. May also Draw itself if the User Interface is designed that way. In my case I have each Menu Element translate and scale its ModelViewProjection matrix prior to calling Draw. Ex: interactions from a phone.
    • touchAction = someTouchMethod();
    • holdAction = someHoldMethod();
    • dragAction = someDragMethod();
  • Drawable object (can be combined with Menu Element) that keeps track of data necessary to draw a quad or tile. I personally chose glDrawElements with a drawListBuffer, which means you need:
    • Four vertices put into a vertexCoordinateBuffer (Java example):
Vec3[] vC = new Vec3[]{
   new Vec3( -1.0f, 1.0f, 0.0f ),   // top left
   new Vec3( -1.0f, -1.0f, 0.0f ),   // bottom left
   new Vec3( 1.0f, -1.0f, 0.0f ),   // bottom right
   new Vec3( 1.0f, 1.0f, 0.0f )    // top right
};
ByteBuffer vertexCoordBuffer = ByteBuffer.allocateDirect( vertexCoord.length * 3 * 4 );
vertexCoordBuffer.order( ByteOrder.nativeOrder() );
for( Vec3 v : vC ){
   vertexCoordBuffer.putFloat( v.x ).putFloat( v.y ).putFloat( v.z );
}

  • Four texture coordinates put into a textureCoordinateBuffer (Java example):
Vec2[] tC= new Vec2[]{
   new Vec2( 0.0f, 0.0f ),  // top left
   new Vec2( 0.0f, 1.0f ),  // bottom left
   new Vec2( 1.0f, 1.0f ),  // bottom right
   new Vec2( 1.0f, 0.0f )  // top right
};
ByteBuffer texCoordBuffer = ByteBuffer.allocateDirect( texCoord.length * 2 * 4 );
texCoordBuffer.order( ByteOrder.nativeOrder() );
for( Vec2 c : Tc ){
   texCoordBuffer.putFloat( c.x ).putFloat( c.y );
}
  • A draw order list (6 items for two triangles of a quad)
int[] drawOrder = new int[]{
   0, 1, 2, 0, 2, 3
};
ByteBuffer dlb = ByteBuffer.allocateDirect( drawOrder.length * 4 );
dlb.order( ByteOrder.nativeOrder() );
drawListBuffer = dlb.asIntBuffer();
drawListBuffer.put( drawOrder );
  • A Draw command that chooses your compiled Vertex and Fragment shader program, performs all the pre-render assignments for Attributes and Uniforms, and then calls glDrawElements such as below:
glDrawElements( GLES30.GL_TRIANGLES, drawOrder.length, GLES30.GL_UNSIGNED_INT, drawListBuffer );

Upvotes: 0

Related Questions