Rendering a triangle using a Vertex Array object does not display anything (OpenGL)

I use OpenGL 3.2, GLFW and GLEW. I am trying to make a simple triangle using VAO and a simple shader on OS X (10.8.2), but nothing is displayed, only a white screen. Shaders compile ok, GLEW inits ok, glGetString (GL_VERSION) shows 3.2, tried to put glGetError after each line, it did not report any errors. I do not know what I am doing wrong. Here is the code:

#include "include/GL/glew.h" #include "include/GL/glfw.h" #include <cstdlib> #include <iostream> GLuint program; char *textFileRead(char *fn) { FILE *fp; char *content = NULL; int count=0; if (fn != NULL) { fp = fopen(fn,"rt"); if (fp != NULL) { fseek(fp, 0, SEEK_END); count = ftell(fp); rewind(fp); if (count > 0) { content = (char *)malloc(sizeof(char) * (count+1)); count = fread(content,sizeof(char),count,fp); content[count] = '\0'; } fclose(fp); } } return content; } void checkCompilationStatus(GLuint s) { GLint status = 0; glGetShaderiv(s, GL_COMPILE_STATUS, &status); if (status == 0) { int infologLength = 0; int charsWritten = 0; glGetShaderiv(s, GL_INFO_LOG_LENGTH, &infologLength); if (infologLength > 0) { GLchar* infoLog = (GLchar *)malloc(infologLength); if (infoLog == NULL) { printf( "ERROR: Could not allocate InfoLog buffer"); exit(1); } glGetShaderInfoLog(s, infologLength, &charsWritten, infoLog); printf( "Shader InfoLog:\n%s", infoLog ); free(infoLog); } } } void setShaders() { GLuint v, f; char *vs = NULL,*fs = NULL; v = glCreateShader(GL_VERTEX_SHADER); f = glCreateShader(GL_FRAGMENT_SHADER); vs = textFileRead("minimal.vert"); fs = textFileRead("minimal.frag"); const char * vv = vs; const char * ff = fs; glShaderSource(v, 1, &vv,NULL); glShaderSource(f, 1, &ff,NULL); free(vs);free(fs); glCompileShader(v); checkCompilationStatus(v); glCompileShader(f); checkCompilationStatus(f); program = glCreateProgram(); glAttachShader(program,v); glAttachShader(program,f); GLuint error; glLinkProgram(program); glUseProgram(program); } int main(int argc, char* argv[]) { glfwInit(); glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, 3); glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, 2); glfwOpenWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); glfwOpenWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE); glfwOpenWindow(800, 600, 8, 8, 8, 8, 24, 8, GLFW_WINDOW); glViewport(0, 0, 800, 600); glfwSetWindowTitle("Triangle"); glewExperimental = GL_TRUE; GLenum result = glewInit(); if (result != GLEW_OK) { std::cout << "Error: " << glewGetErrorString(result) << std::endl; } std::cout << "VENDOR: " << glGetString(GL_VENDOR) << std::endl; std::cout << "RENDERER: " << glGetString(GL_RENDERER) << std::endl; std::cout << "VERSION: " << glGetString(GL_VERSION) << std::endl; std::cout << "GLSL: " << glGetString(GL_SHADING_LANGUAGE_VERSION) << std::endl; setShaders(); GLfloat vertices[] = { 1.0f, 1.0f, 0.f, -1.f, -1.f, 0.f, 1.f, -1.f, 0.f }; GLuint VertexArrayID; glGenVertexArrays(1, &VertexArrayID); glBindVertexArray(VertexArrayID); GLuint vertexbuffer; glGenBuffers(1, &vertexbuffer); glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer); glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW); GLuint pos = glGetAttribLocation(program, "position"); glEnableVertexAttribArray(pos); glVertexAttribPointer(pos, 3, GL_FLOAT, GL_FALSE, 0, 0); glClearColor(1.0, 1.0, 1.0, 1.0); while (glfwGetWindowParam(GLFW_OPENED)) { glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glDrawArrays(GL_TRIANGLES, 0, 3); glfwSwapBuffers(); glfwSleep(0.001); } } 

And here are the shaders, the vertex shader:

 #version 150 in vec3 position; void main() { gl_Position = vec4(position, 0); } 

shader fragment:

 #version 150 out vec4 out_color; void main() { out_color = vec4(1.0f, 0.0f, 0.0f, 1.0f); } 
+4
source share
1 answer

The parameter w in your vertex shader should be set to 1, not 0.

 gl_Position = vec4(position, 1) 

For more information, see the "Normalized Coordinates" section in the "Rasterization Overview" section on this page.

... X, Y and Z of each vertex position is divided by W to get the normalized coordinates of the device ...

So, your coordinates are divided by 0. The number divided by 0 is undefined.

+7
source

All Articles