-1

I'm a bit confused on the first example of the OpenGL Programming Guide.

The author uses this example:

///////////////////////////////////////////////////////////////////////
//
// triangles.cpp
//
///////////////////////////////////////////////////////////////////////
// g++ -g -o tri triangles.cpp LoadShader.cpp -lglut -lGLEW -lGLU -lGL -lX11 -lm
#include <iostream>
using namespace std;

#include "vgl.h"
#include "LoadShader.h"

enum VAO_IDs { Triangles, NumVAOs };
enum Buffer_IDs { ArrayBuffer, NumBuffers };
enum Attrib_IDs { vPosition = 0 };

GLuint  VAOs[NumVAOs];
GLuint  Buffers[NumBuffers];

const GLuint  NumVertices = 6;

//---------------------------------------------------------------------
//
// init
//

void
init(void)
{
    glGenVertexArrays(NumVAOs, VAOs);
    glBindVertexArray(VAOs[Triangles]);

    GLfloat  vertices[NumVertices][2] = {
        { -0.90, -0.90 },  // Triangle 1
        {  0.85, -0.90 },
        { -0.90,  0.85 },
        {  0.90, -0.85 },  // Triangle 2
        {  0.90,  0.90 },
        { -0.85,  0.90 }
    };

    glGenBuffers(NumBuffers, Buffers);
    glBindBuffer(GL_ARRAY_BUFFER, Buffers[ArrayBuffer]);
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices),
                 vertices, GL_STATIC_DRAW);

    ShaderInfo  shaders[] = {
        { GL_VERTEX_SHADER, "triangles.vert" },
        { GL_FRAGMENT_SHADER, "triangles.frag" },
        { GL_NONE, NULL }
    };

    GLuint program = LoadShaders(*shaders);
    glUseProgram(program);

    glVertexAttribPointer(vPosition, 2, GL_FLOAT,
                          GL_FALSE, 0, BUFFER_OFFSET(0));
    glEnableVertexAttribArray(vPosition);
}

//---------------------------------------------------------------------
//
// display
//

void
display(void)
{
    glClear(GL_COLOR_BUFFER_BIT);

    glBindVertexArray(VAOs[Triangles]);
    glDrawArrays(GL_TRIANGLES, 0, NumVertices);

    glFlush();
}

//---------------------------------------------------------------------
//
// main
//

int
main(int argc, char** argv)
{



     glutInit(&argc, argv);
     glutInitDisplayMode(GLUT_RGBA);
     glutInitWindowSize(512, 512);
     glutInitContextVersion(4, 3);
     glutInitContextProfile(GLUT_CORE_PROFILE);
     glutCreateWindow(argv[0]);

     glewExperimental = GL_TRUE;

     if (glewInit()) {
         cerr << "Unable to initialize GLEW ... exiting" << endl;
         exit(EXIT_FAILURE);
     }

     init();

     glutDisplayFunc(display);

     glutMainLoop();
}

And here is my implementation:

#include <GL\glew.h>
#include <GLFW\glfw3.h>
#include <iostream>

#include <GLShaderLoader.hpp> 

using namespace std;

// Reserved VAO ID indicies
enum VAO_IDs {TRIS, NUM_VAO};

// Reserved VBO ID indicies
enum VBO_IDs {BUFFER, NUM_BUFFER};

// Reserved names (for VAOs and VBOs)
GLuint VAOs[NUM_VAO];
GLuint buffers[NUM_BUFFER];

// Hard-coded vertex count
const GLuint numVerts = 6;

// Initialize VAOs and VBOs, populate VBOs, and provide OGL server memory access
void glInit() {
    glGenVertexArrays(NUM_VAO, VAOs);
    glBindVertexArray(VAOs[TRIS]);

    GLfloat verts[numVerts][2] {
        { 0.0, 0.5 },
        { 0.5, -0.5 },
        { -0.5, -0.5},
        {-0.9, 0.9},
        {-0.9, 0.7},
        {-0.7, 0.7}
    };

    glGenBuffers(NUM_BUFFER, buffers);
    glBindBuffer(GL_ARRAY_BUFFER, buffers[BUFFER]);
    glBufferData(GL_ARRAY_BUFFER, sizeof(verts), verts, GL_STATIC_DRAW);

    GLuint prog = loadShaders("pass.vert", "pass.frag");

    glUseProgram(prog);
    glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);

    glEnableVertexAttribArray(0);
}

// Main entry point
void main(int argc, char** argv) {
    glewExperimental = GL_TRUE;

    GLFWwindow* window;

    if (!glfwInit()) {
    }

    window = glfwCreateWindow(640, 480, "Hello Fam", NULL, NULL);

    if (!window) {
        glfwTerminate();
    }

    glfwMakeContextCurrent(window);

    GLenum err = glewInit();

    if (GLEW_OK != err) {
        fprintf(stderr, "Error: %s\n", glewGetErrorString(err));
    }

    glInit();

    //*************
    //  MAIN LOOP
    //*************

    while (!glfwWindowShouldClose(window)) {
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

        glBindVertexArray(VAOs[TRIS]);
        glDrawArrays(GL_TRIANGLES, 0, numVerts);

        glfwSwapBuffers(window);
        glfwPollEvents();
    }

    glfwTerminate();
}

Now, the thing is, my implementation compiles and runs just fine, but I'm confused. In the book itself, it says glBindVertexArray() will not take 0, since 0 is reserved for default state. However, the enums are never initialize and default to their indices. This means that OpenGL binds to a VAO name that is 0 when I call glBindVertexArray(VAOs[TRIS]).

My question is this: is OpenGL really binding to a name that is 0, or am I missing something/understanding wrong?

Thank you in advance!

johnjohn
  • 1
  • 1

1 Answers1

1

The previous line glGenVertexArrays(NUM_VAO, VAOs); fills the array VAOs with GLuints. One of which is what you are passing into glBindVertexArray(VAOs[TRIS]);

If you want to see what you are passing in, set a debugger break point and inspect VAOs.

VAOs[TRIS] means take the first (as TRIS == 0) element of the VAOs array.

Dominique McDonnell
  • 2,460
  • 14
  • 25
  • I'm aware that I'm accessing the 0 element of the array, but the element should still be 0. I'll set the break point and take a peek! Thanks. – johnjohn Sep 08 '15 at 04:28
  • You were right: `VAOs[TRIS]` ie `VAOs[0]` has a value of 1, actually. I'm not sure why though. Do you happen to know? – johnjohn Sep 08 '15 at 04:34
  • @Xakota, have a look at the documentation link I just posted in the comment above. That call generates the names of the array. – Dominique McDonnell Sep 08 '15 at 04:35
  • BAH. I reread your answer. I didn't know the array gets filled with guaranteed unused ints. Thank you. Question answered. – johnjohn Sep 08 '15 at 04:36