绘制使用OpenGL ES 2.0 2D图像(Draw a 2D Image using OpenG

2019-08-01 06:09发布

我一直在挣扎着从使用的OpenGL ES 2.0的Android JPG / PNG文件绘制2D图像。 我到处都看的教程是纹理3D图像所以它一直粗糙搞清楚如何绘制普通的2D雪碧。 我有一个正方形绘制和旋转,但一旦来到纹理我一定是什么地方搞砸了,因为我不断收到一个错误说DrawElements不绑定到任何数据,但如果我注释掉任何代码做纹理它工作正常。

任何帮助将不胜感激。

这是我为我的Sprite类和渲染类代码:

public class Sprite
{
//Reference to Activity Context
private final Context mActivityContext;

//Added for Textures
private final FloatBuffer mCubeTextureCoordinates;
private int mTextureUniformHandle;
private int mTextureCoordinateHandle;
private final int mTextureCoordinateDataSize = 2;
private int mTextureDataHandle;

private final String vertexShaderCode =
//Test
"attribute vec2 a_TexCoordinate;" +
"varying vec2 v_TexCoordinate;" +
//End Test
"uniform mat4 uMVPMatrix;" +
"attribute vec4 vPosition;" +
"void main() {" +
"  gl_Position = vPosition * uMVPMatrix;" +
    //Test
    "v_TexCoordinate = a_TexCoordinate" +
    //End Test
"}";

private final String fragmentShaderCode =
"precision mediump float;" +
"uniform vec4 vColor;" +
//Test
"uniform sampler2D u_Texture;" +
"varying vec2 v_TexCoordinate;" +
//End Test
"void main() {" +
//"gl_FragColor = vColor;" +
"gl_FragColor = (v_Color * texture2D(u_Texture, v_TexCoordinate));" +
"}";

private final int shaderProgram;    
private final FloatBuffer vertexBuffer;
private final ShortBuffer drawListBuffer;
private int mPositionHandle;
private int mColorHandle;
private int mMVPMatrixHandle;

// number of coordinates per vertex in this array
static final int COORDS_PER_VERTEX = 2;
static float spriteCoords[] = { -0.5f,  0.5f,   // top left
                                -0.5f, -0.5f,   // bottom left
                                 0.5f, -0.5f,   // bottom right
                                 0.5f,  0.5f }; //top right

private short drawOrder[] = { 0, 1, 2, 0, 2, 3 }; //Order to draw vertices
private final int vertexStride = COORDS_PER_VERTEX * 4; //Bytes per vertex

// Set color with red, green, blue and alpha (opacity) values
float color[] = { 0.63671875f, 0.76953125f, 0.22265625f, 1.0f };

public Sprite(final Context activityContext)
{
    mActivityContext = activityContext;

    //Initialize Vertex Byte Buffer for Shape Coordinates / # of coordinate values * 4 bytes per float
    ByteBuffer bb = ByteBuffer.allocateDirect(spriteCoords.length * 4); 
    //Use the Device's Native Byte Order
    bb.order(ByteOrder.nativeOrder());
    //Create a floating point buffer from the ByteBuffer
    vertexBuffer = bb.asFloatBuffer();
    //Add the coordinates to the FloatBuffer
    vertexBuffer.put(spriteCoords);
    //Set the Buffer to Read the first coordinate
    vertexBuffer.position(0);

    // S, T (or X, Y)
    // Texture coordinate data.
    // Because images have a Y axis pointing downward (values increase as you move down the image) while
    // OpenGL has a Y axis pointing upward, we adjust for that here by flipping the Y axis.
    // What's more is that the texture coordinates are the same for every face.
    final float[] cubeTextureCoordinateData =
    {                                               
            //Front face
            /*0.0f, 0.0f,               
            0.0f, 1.0f,
            1.0f, 0.0f,
            0.0f, 1.0f,
            1.0f, 1.0f,
            1.0f, 0.0f*/

            -0.5f,  0.5f,
            -0.5f, -0.5f,
             0.5f, -0.5f,
             0.5f,  0.5f
    };

    mCubeTextureCoordinates = ByteBuffer.allocateDirect(cubeTextureCoordinateData.length * 4).order(ByteOrder.nativeOrder()).asFloatBuffer();
    mCubeTextureCoordinates.put(cubeTextureCoordinateData).position(0);

    //Initialize byte buffer for the draw list
    ByteBuffer dlb = ByteBuffer.allocateDirect(spriteCoords.length * 2);
    dlb.order(ByteOrder.nativeOrder());
    drawListBuffer = dlb.asShortBuffer();
    drawListBuffer.put(drawOrder);
    drawListBuffer.position(0);

    int vertexShader = MyGL20Renderer.loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
    int fragmentShader = MyGL20Renderer.loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);

    shaderProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(shaderProgram, vertexShader);
    GLES20.glAttachShader(shaderProgram, fragmentShader);

    //Texture Code
    GLES20.glBindAttribLocation(shaderProgram, 0, "a_TexCoordinate");

    GLES20.glLinkProgram(shaderProgram);

    //Load the texture
    mTextureDataHandle = loadTexture(mActivityContext, R.drawable.brick);
}

public void Draw(float[] mvpMatrix)
{
    //Add program to OpenGL ES Environment
    GLES20.glUseProgram(shaderProgram);

    //Get handle to vertex shader's vPosition member
    mPositionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition");

    //Enable a handle to the triangle vertices
    GLES20.glEnableVertexAttribArray(mPositionHandle);

    //Prepare the triangle coordinate data
    GLES20.glVertexAttribPointer(mPositionHandle, COORDS_PER_VERTEX, GLES20.GL_FLOAT, false, vertexStride, vertexBuffer);

    //Get Handle to Fragment Shader's vColor member
    mColorHandle = GLES20.glGetUniformLocation(shaderProgram, "vColor");

    //Set the Color for drawing the triangle
    GLES20.glUniform4fv(mColorHandle, 1, color, 0);

    //Set Texture Handles and bind Texture
    mTextureUniformHandle = GLES20.glGetAttribLocation(shaderProgram, "u_Texture");
    mTextureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "a_TexCoordinate");

    //Set the active texture unit to texture unit 0.
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);

    //Bind the texture to this unit.
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureDataHandle);

    //Tell the texture uniform sampler to use this texture in the shader by binding to texture unit 0.
    GLES20.glUniform1i(mTextureUniformHandle, 0); 

    //Pass in the texture coordinate information
    mCubeTextureCoordinates.position(0);
    GLES20.glVertexAttribPointer(mTextureCoordinateHandle, mTextureCoordinateDataSize, GLES20.GL_FLOAT, false, 0, mCubeTextureCoordinates);
    GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);

    //Get Handle to Shape's Transformation Matrix
    mMVPMatrixHandle = GLES20.glGetUniformLocation(shaderProgram, "uMVPMatrix");

    //Apply the projection and view transformation
    GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);

    //Draw the triangle
    GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer);

    //Disable Vertex Array
    GLES20.glDisableVertexAttribArray(mPositionHandle);
}

public static int loadTexture(final Context context, final int resourceId)
{
    final int[] textureHandle = new int[1];

    GLES20.glGenTextures(1, textureHandle, 0);

    if (textureHandle[0] != 0)
    {
        final BitmapFactory.Options options = new BitmapFactory.Options();
        options.inScaled = false;   // No pre-scaling

        // Read in the resource
        final Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(), resourceId, options);

        // Bind to the texture in OpenGL
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);

        // Set filtering
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
        GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);

        // Load the bitmap into the bound texture.
        GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);

        // Recycle the bitmap, since its data has been loaded into OpenGL.
        bitmap.recycle();
    }

    if (textureHandle[0] == 0)
    {
        throw new RuntimeException("Error loading texture.");
    }

    return textureHandle[0];
}
}

我的渲染器类:

public class MyGL20Renderer implements GLSurfaceView.Renderer
{
private final Context mActivityContext;

//Matrix Initializations
private final float[] mMVPMatrix = new float[16];
private final float[] mProjMatrix = new float[16];
private final float[] mVMatrix = new float[16];
private float[] mRotationMatrix = new float[16];

//Declare as volatile because we are updating it from another thread
public volatile float mAngle;

//private Triangle triangle;
private Sprite sprite;

public MyGL20Renderer(final Context activityContext)
{
    mActivityContext = activityContext;
}

public void onSurfaceCreated(GL10 unused, EGLConfig config)
{
    //Set the background frame color
    GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);

    //Initialize Shapes
    //triangle = new Triangle();
    sprite = new Sprite(mActivityContext);
}

public void onDrawFrame(GL10 unused)
{
    //Redraw background color
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);

    //Set the camera position (View Matrix)
    Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);

    //Calculate the projection and view transformation
    Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0);

    //Create a rotation transformation for the triangle
    Matrix.setRotateM(mRotationMatrix, 0, mAngle, 0, 0, -1.0f);

    //Combine the rotation matrix with the projection and camera view
    Matrix.multiplyMM(mMVPMatrix, 0, mRotationMatrix, 0, mMVPMatrix, 0);

    //Draw Shape
    //triangle.Draw(mMVPMatrix);
    sprite.Draw(mMVPMatrix);
}

public void onSurfaceChanged(GL10 unused, int width, int height)
{
    GLES20.glViewport(0, 0, width, height);

    float ratio = (float) width / height;

    //This Projection Matrix is applied to object coordinates in the onDrawFrame() method
    Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7);
}

public static int loadShader(int type, String shaderCode)
{
    //Create a Vertex Shader Type Or a Fragment Shader Type (GLES20.GL_VERTEX_SHADER OR GLES20.GL_FRAGMENT_SHADER)
    int shader = GLES20.glCreateShader(type);

    //Add The Source Code and Compile it
    GLES20.glShaderSource(shader, shaderCode);
    GLES20.glCompileShader(shader);

    return shader;
}
}

Answer 1:

“v_TexCoordinate = a_TexCoordinate” +

本来应该

“v_TexCoordinate = a_TexCoordinate;” +

显然,我忘了一个分号,现在我知道我有多么依赖我的IDE的时候我乱了愚蠢的事情哈哈告诉我。



Answer 2:

有一个在fragmentShaderCode可变vColor命名(或使用)一个错误。 在这里你的变量名vColor:

uniform vec4 vColor;

并在此行中有名称v_Color

gl_FragColor = (v_Color * texture2D(u_Texture, v_TexCoordinate));


Answer 3:

试着用以下的纹理坐标:

最终浮[] = {cubeTextureCoordinateData
0.5,-0.5, 0.5,0.5, -0.5,0.5, -0.5,-0.5 }; 0.5,-0.5,0.5,0.5,-0.5,0.5,-0.5,-0.5};

其工作。 非常感谢你。



Answer 4:

我也将在着色器更改此

  gl_Position = vPosition * uMVPMatrix;

  gl_Position = uMVPMatrix * vPosition;

尝试翻译的图像的位置时,它会有所作为。



Answer 5:

该解决方案可以作为分配VertexAttribPointer之前启用mTextureCoord ......那样简单;



文章来源: Draw a 2D Image using OpenGL ES 2.0