How to draw Buffer[] to a TextureView on Android?

2019-08-15 11:20发布

I'm using JavaCV's FFmpegFrameGrabber to retrieve frames from a video file. This FFmpegFrameGrabber return a Frame which basically contain a Buffer[] to hold image pixels for a video frame.

Because performance is my top priority, I would like to use OpenGL ES to display this Buffer[] directly without converting it into Bitmap.

The view to be displayed is only taking less than half of the screen and following the OpenGL ES document:

Developers who want to incorporate OpenGL ES graphics in a small portion of their layouts should take a look at TextureView.

So I guess TextureViewis the right choice for this task. However I haven't found much resources about this (most of them is Camera Preview example).

I would like to ask how can I draw Buffer[] to a TextureView? And if this is not the most efficient way to do this, I'm willing to try your alternatives.


Update: So currently I have this set up like this:

In my VideoActivity where I repeatedly extract video's Frame which contain a ByteBuffer and then send this to my MyGLRenderer2 to be converted to OpenGLES's texture:

...
mGLSurfaceView = (GLSurfaceView)findViewById(R.id.gl_surface_view);
mGLSurfaceView.setEGLContextClientVersion(2);
mRenderer = new MyGLRenderer2(this);
mGLSurfaceView.setRenderer(mRenderer);
mGLSurfaceView.setRenderMode(GLSurfaceView.RENDERMODE_CONTINUOUSLY);
...

private void grabCurrentFrame(final long currentPosition){
    if(mCanSeek){
        new AsyncTask(){

            @Override
            protected void onPreExecute() {
                super.onPreExecute();
                mCanSeek = false;
            }

            @Override
            protected Object doInBackground(Object[] params) {
                try {
                    Frame frame = mGrabber.grabImage();
                    setCurrentFrame((ByteBuffer)frame.image[0]);
                }
                catch (Exception e) {
                    e.printStackTrace();
                }
                return null;
            }

            @Override
            protected void onPostExecute(Object o) {
                super.onPostExecute(o);
                mCanSeek = true;
                }
            }
        }.execute();
    }
}

private void setCurrentFrame(ByteBuffer buffer){
    mRenderer.setTexture(buffer);
}

MyGLRenderer2 looks like this:

public class MyGLRenderer2 implements GLSurfaceView.Renderer {
private static final String TAG = "MyGLRenderer2";
private FullFrameTexture mFullFrameTexture;

public MyGLRenderer2(Context context){
    super();
}

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
    GLES20.glViewport(0,0,width, height);
    GLES20.glClearColor(0,0,0,1);
    mFullFrameTexture = new FullFrameTexture();
}

@Override
public void onDrawFrame(GL10 gl) {
    createFrameTexture(mCurrentBuffer, 1280, 720, GLES20.GL_RGB); //should not need to be a power of 2 since I use GL_CLAMP_TO_EDGE
    mFullFrameTexture.draw(textureHandle);
    if(mCurrentBuffer != null){
        mCurrentBuffer.clear();
    }
}

//test
private ByteBuffer mCurrentBuffer;

public void setTexture(ByteBuffer buffer){
    mCurrentBuffer = buffer.duplicate();
    mCurrentBuffer.position(0);
}

private int[] textureHandles = new int[1];
private int textureHandle;

public void createFrameTexture(ByteBuffer data, int width, int height, int format) {
    GLES20.glGenTextures(1, textureHandles, 0);
    textureHandle = textureHandles[0];
    GlUtil.checkGlError("glGenTextures");

    // Bind the texture handle to the 2D texture target.
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);

    // Configure min/mag filtering, i.e. what scaling method do we use if what we're rendering
    // is smaller or larger than the source image.
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    GlUtil.checkGlError("loadImageTexture");

    // Load the data from the buffer into the texture handle.
    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, /*level*/ 0, format,
            width, height, /*border*/ 0, format, GLES20.GL_UNSIGNED_BYTE, data);
    GlUtil.checkGlError("loadImageTexture");
}

}

And FullFrameTexture looks like this:

public class FullFrameTexture {
private static final String VERTEXT_SHADER =
    "uniform mat4 uOrientationM;\n" +
        "uniform mat4 uTransformM;\n" +
        "attribute vec2 aPosition;\n" +
        "varying vec2 vTextureCoord;\n" +
        "void main() {\n" +
        "gl_Position = vec4(aPosition, 0.0, 1.0);\n" +
        "vTextureCoord = (uTransformM * ((uOrientationM * gl_Position + 1.0) * 0.5)).xy;" +
        "}";

private static final String FRAGMENT_SHADER =
    "precision mediump float;\n" +
        "uniform sampler2D sTexture;\n" +
        "varying vec2 vTextureCoord;\n" +
        "void main() {\n" +
        "gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
        "}";

private final byte[] FULL_QUAD_COORDINATES = {-1, 1, -1, -1, 1, 1, 1, -1};

private ShaderProgram shader;

private ByteBuffer fullQuadVertices;

private final float[] orientationMatrix = new float[16];
private final float[] transformMatrix = new float[16];

public FullFrameTexture() {
    if (shader != null) {
        shader = null;
    }

    shader = new ShaderProgram(EglUtil.getInstance());

    shader.create(VERTEXT_SHADER, FRAGMENT_SHADER);

    fullQuadVertices = ByteBuffer.allocateDirect(4 * 2);

    fullQuadVertices.put(FULL_QUAD_COORDINATES).position(0);

    Matrix.setRotateM(orientationMatrix, 0, 0, 0f, 0f, 1f);
    Matrix.setIdentityM(transformMatrix, 0);
}

public void release() {
    shader = null;
    fullQuadVertices = null;
}

public void draw(int textureId) {
    shader.use();

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);

    int uOrientationM = shader.getAttributeLocation("uOrientationM");
    int uTransformM = shader.getAttributeLocation("uTransformM");

    GLES20.glUniformMatrix4fv(uOrientationM, 1, false, orientationMatrix, 0);
    GLES20.glUniformMatrix4fv(uTransformM, 1, false, transformMatrix, 0);

    // Trigger actual rendering.
    renderQuad(shader.getAttributeLocation("aPosition"));

    shader.unUse();
}

private void renderQuad(int aPosition) {
    GLES20.glVertexAttribPointer(aPosition, 2, GLES20.GL_BYTE, false, 0, fullQuadVertices);
    GLES20.glEnableVertexAttribArray(aPosition);
    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
}

}

For now I can display some frame for a very brief moment before the app crash (wrong color too).

1条回答
冷血范
2楼-- · 2019-08-15 11:47

The most efficient way to do what you ask will be to convert your pixels to an OpenGL ES texture, and render that on the TextureView. The function to use is glTexImage2D().

You can find some examples in Grafika, which uses the function to upload some generated textures. Take a look at createImageTexture(). Grafika's gles package may be of use if you don't already have GLES code in your app.

FWIW, it would be more efficient to decode video frames directly to a Surface created from the TextureView's SurfaceTexture, but I don't know if JavaCV supports that.

Edit: Another approach, if you don't mind working with the NDK, is to use ANativeWindow. Create a Surface for the TextureView's SurfaceTexture, pass it to native code, then call ANativeWindow_fromSurface() to get the ANativeWindow. Use ANativeWindow_setBuffersGeometry() to set the size and color format. Lock the buffer, copy the pixels in, unlock the buffer to post it. I don't think this requires an extra data copy internally, and potentially has some advantages over the glTexImage2D() approach.

查看更多
登录 后发表回答