Why does this crash when using OpenGL core profile

2019-05-10 16:22发布

When I try to run this simple OpenGL test program I get a segmentation fault. This only happens when I create the context using the core profile flag. If I use the compatibility profile flag, the program runs without issue.

Edit: I checked the pointer to the function glGenVertexArrays and it returned NULL. If glfwCreateWindow doesn't return NULL, and glGetString(GL_VERSION) confirms that the context is version 4.3 and glewInit returns GLEW_OK then why is glGenVertexArrays == NULL?

My OS is Windows 7 64-bit and my GPU is a Nvidia GTX 760 with 331.82 WHQL driver.

Code:

#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <stdlib.h>
#include <stdio.h>

#define GLSL(src) "#version 430 core\n" #src

void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods)
{
    if(key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
        glfwSetWindowShouldClose(window, GL_TRUE);
}

GLuint create_program(const char* vertex_source, const char* fragment_source)
{
    GLuint vs = glCreateShader(GL_VERTEX_SHADER);
    glShaderSource(vs, 1, &vertex_source, NULL);
    glCompileShader(vs);
    unsigned int fs = glCreateShader(GL_FRAGMENT_SHADER);
    glShaderSource(fs, 1, &fragment_source, NULL);
    glCompileShader(fs);

    GLuint shader_program = glCreateProgram();
    glAttachShader(shader_program, fs);
    glAttachShader(shader_program, vs);
    glLinkProgram(shader_program);

    return shader_program;
}

const char* vertex_shader = GLSL(
    layout(location = 0) in vec3 vertex_position;

    void main()
    {
        gl_Position = vec4(vertex_position, 1.0);
    }
);

const char* fragment_shader = GLSL(
    out vec4 frag_color;

    void main()
    {
        frag_color = vec4(1.0, 0.0, 0.0, 1.0);
    }
);

int main(int argc, char* argv[])
{
    if(!glfwInit())
        exit(EXIT_FAILURE);

    glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 4);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    //if we set GLFW_OPENGL_PROFILE to GLFW_OPENGL_CORE_PROFILE 
    //instead of GLFW_OPENGL_COMPAT_PROFILE the program will 
    //segfault at line 98, call to glGenVertexArrays
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);

    GLFWwindow* window = glfwCreateWindow(512, 512, "OpenGL", NULL, NULL);

    if(!window)
    {
        glfwTerminate();
        exit(EXIT_FAILURE);
    }

    glfwSetKeyCallback(window, key_callback);
    glfwMakeContextCurrent(window);

    GLenum glewError = glewInit();

    if(glewError != GLEW_OK)
    {
        glfwTerminate();
        exit(EXIT_FAILURE);
    }

    printf("OpenGL Version: %s\n\n", glGetString(GL_VERSION));

    float position[] = 
    {
        1.0f, 1.0f, 0.0f,
        -1.0f, 1.0f, 0.0f,
        1.0f, -1.0f, 0.0f,
        -1.0f, -1.0f, 0.0f
    };

    unsigned short indices[] = 
    {
        1, 0, 2,
        3, 1, 2
    };

    GLuint vao = 0;
    glGenVertexArrays(1, &vao);
    glBindVertexArray(vao);

    GLuint index_buffer = 0;
    GLuint vertex_buffer = 0;

    glGenBuffers(1, &index_buffer);
    glGenBuffers(1, &vertex_buffer);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, index_buffer);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), &indices, GL_STATIC_DRAW);

    glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer);
    glBufferData(GL_ARRAY_BUFFER, sizeof(position), &position, GL_STATIC_DRAW);
    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, 0);
    glEnableVertexAttribArray(0);

    GLuint shader_program = create_program(vertex_shader, fragment_shader);
    glUseProgram(shader_program);

    while(!glfwWindowShouldClose(window))
    {
        glClear(GL_COLOR_BUFFER_BIT);
        glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, NULL);
        glfwSwapBuffers(window);
        glfwPollEvents();
    }

    glfwDestroyWindow(window);
    glfwTerminate();
    exit(EXIT_SUCCESS);
}

3条回答
Fickle 薄情
2楼-- · 2019-05-10 16:50

Output the value of glGenVertexArrays. It is likely 0 or null right now. I've found I have to setup certain functions first even in the core profile.

查看更多
Emotional °昔
3楼-- · 2019-05-10 16:50

I've encountered similar issues with glew not properly initializing all the function pointers if you create a core profile. I've meant to look into the glew implementation to see why this is the case but haven't gotten around it it, since the compatibility context is serving me OK.

However, one possibility you could try is to use GLFW to create a compatibility context, initialize GLEW, destroy the GLFW window and create a new one with the core context.

查看更多
疯言疯语
4楼-- · 2019-05-10 17:02

You're actually getting an Invalid Enum [1280] from OpenGL, after you call glewInit() the easiest fix is to do.

glewExperimental = GL_TRUE;

Before you call glewInit() thereby.

glewExperimental = GL_TRUE;

GLenum glewError = glewInit();

if (glewError != GLEW_OK)
{
    glfwTerminate();
    exit(EXIT_FAILURE);
}

Why? well it has to do with how GLEW loads extensions, functions, etc. by default GLEW will set some function, etc. as unsupported, thereby to get around that you need to set glewExperimental = GL_TRUE; or else it will generate an error like you where getting.

Experimental Drivers

GLEW obtains information on the supported extensions from the graphics driver. Experimental or pre-release drivers, however, might not report every available extension through the standard mechanism, in which case GLEW will report it unsupported. To circumvent this situation, the glewExperimental global switch can be turned on by setting it to GL_TRUE before calling glewInit(), which ensures that all extensions with valid entry points will be exposed.

Source

Extra

Always remember to check for OpenGL Errors, they usually tell you what's wrong and/or help you find the problem.

GLenum error = glGetError();

if (error != GL_NO_ERROR)
{
    std::cout << "OpenGL Error: " << error << std::endl;
}

You can read about the different errors here.

查看更多
登录 后发表回答