Slow H264 1080P@60fps Decoding on Android Lollipop

2019-05-31 15:27发布

问题:

I'm developing a JAVA RTP Streaming App for a company project, which should be capable of joining the Multicast Server and receive the RTP Packets.Later I use the H264 Depacketizer to recreate the a complete frame from the NAL FU (Keep append the data until End Bit & Marker Bit set )

I want to decode and display a raw h264 video byte stream in Android and therefore I'm currently using the MediaCodec classes with Hardware Decoder configured.

The Application is Up and running for the Jeallybean (API 17). Various Resolutions which I need to decodes are :
480P at 30/60 FPS
720P/I at 30/60 FPS
1080P/I at 30/60 FPS

Recently, Due to System Upgrade we are porting the App to Android L Version 5.0.2. My App is not capable of playing the high resolutions videos like 720p@60fps and 1080p@60fps.

For the debugging purpose I started feeding the Elementary H264 Frames with size from the dump file to MediaCodec and found out the Video is Lagging.
There are timestamps on the sample video I used and it seems the actual time taken to proceed by 1 sec in Rendered Video is more
Below is my sample code and links to sample video
h264 video https://www.dropbox.com/s/cocjhhovihm8q25/dump60fps.h264?dl=0
h264 framesize https://www.dropbox.com/s/r146d5zederrne1/dump60fps.size?dl=0

Also as this is my question on stackoverflow, Please bear with me on Bad code formatting and Direct references.

public class MainActivity extends Activity {

    static final String TAG = "MainActivity";
    private PlayerThread mPlayer = null;
    private static final String MIME_TYPE = "video/avc";


    private byte[] mSPSPPSFrame = new byte [3000];
    private byte[] sps = new byte[37];
    File videoFile = null;
    File videoFile1 = null;
    TextView tv ;

    FileInputStream videoFileStream = null;
    FileInputStream videoFileStream1 = null;
    int[] tall = null ;
    SpeedControlCallback mspeed = new SpeedControlCallback();

    int mStreamLen = 0;
    FrameLayout game;
    RelativeLayout rl ;
    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);


        //mVideoSurfaceView = (SurfaceView)findViewById(R.id.videoSurfaceView);
        setContentView(R.layout.activity_main);

        SurfaceView first = (SurfaceView) findViewById(R.id.firstSurface);
        first.getHolder().addCallback(new SurfaceHolder.Callback() {
            @Override
            public void surfaceCreated(SurfaceHolder surfaceHolder) {
                Log.d(TAG, "First surface created!");
            }

            @Override
            public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i2, int i3) {
                Log.d(TAG, "surfaceChanged()");
                surfaceHolder.getSurface();

                if (mPlayer == null) {
                    mPlayer = new PlayerThread(surfaceHolder.getSurface());
                    mPlayer.start();

                }

            }

            @Override
            public void surfaceDestroyed(SurfaceHolder surfaceHolder) {
                Log.d(TAG, "First surface destroyed!");
            }
        });

        tv = (TextView) findViewById(R.id.textview);

        videoFile = new File("/data/local/tmp/dump60fps.h264");
        videoFile1 = new File("/data/local/tmp/dump60fps.size");

    }

    private class PlayerThread extends Thread {
        private Surface surface;

        public PlayerThread(Surface surface) {
            this.surface = surface;
        }

        @Override
        public void run() {
            try {
                decodeVideo(0, 1920,1080, 50, surface);
            } catch (IOException e) {
                e.printStackTrace();
            } catch (InterruptedException e) {
                e.printStackTrace();
            } catch (Throwable e) {
                e.printStackTrace();
            }

        }
    }




    private void decodeVideo(int testinput, int width, int height,
            int threshold, Surface surface) throws Throwable {

        MediaCodec codec = null;
        MediaFormat mFormat;
        final long kTimeOutUs = 10000;
        MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
        boolean sawInputEOS = false;
        boolean sawOutputEOS = false;
        MediaFormat oformat = null;
        int errors = -1;
        long presentationTimeUs = 0L;
        boolean mVideoStart = false;
        byte[] byteArray = new byte[65525*5*3];
        int i; 
        int sizeInBytes = 0, index, sampleSize = 0;


        try {

            byte[] bytes = new byte[(int) videoFile1.length()];
            FileInputStream fis = new FileInputStream(videoFile1);
            fis.read(bytes);
            fis.close();
            String[] valueStr = new String(bytes).trim().split("\\s+");
            tall = new int[valueStr.length];
            mStreamLen = valueStr.length;
            Log.e(TAG, "++++++ Total Frames ++++++"+mStreamLen);
            for ( i = 0; i < valueStr.length; i++) {
                tall[i] = Integer.parseInt(valueStr[i]);
            }
        } catch (IOException e1) {
            e1.printStackTrace();
        }

        index =1;
        try {
            videoFileStream = new FileInputStream(videoFile);
        } catch (FileNotFoundException e1) {
            e1.printStackTrace();
        }


        System.currentTimeMillis();

        if (mVideoStart == false) {
            try {
                sizeInBytes = videoFileStream.read(mSPSPPSFrame, 0,37);
                Log.e(TAG, "VideoEngine configure ."+sizeInBytes);
                //for (i = 0 ; i < sizeInBytes; i++){
                //  Log.e(TAG, "VideoEngine  ."+mSPSPPSFrame[i]);}



            } catch (IOException e1) {
                e1.printStackTrace();
            }
            sampleSize = sizeInBytes;
            index++;
            index++;

            mFormat = MediaFormat.createVideoFormat(MIME_TYPE, 1920,1080);
            mFormat.setByteBuffer("csd-0", ByteBuffer.wrap( mSPSPPSFrame,0, sizeInBytes));
            codec = MediaCodec.createDecoderByType(MIME_TYPE);

            codec.configure(mFormat, surface /*surface*/ , null /* crypto */, 0 /* flags */);
            codec.start();
            codec.getInputBuffers();
            codec.getOutputBuffers();

        }

        //  index = 0;
        while (!sawOutputEOS && errors < 0) {


            if (!sawInputEOS) {
                int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
                //Log.d(TAG, String.format("Archana Dqing the input buffer with BufIndex #: %d",inputBufIndex));


                if (inputBufIndex >= 0) {
                    ByteBuffer dstBuf = codec.getInputBuffers()[inputBufIndex];


                    /*
                     * Read data from file and copy to the input ByteBuffer
                     */
                    try {
                        sizeInBytes = videoFileStream.read(byteArray, 0,
                                tall[index] /*+ 4*/);
                        sampleSize = tall[index]/*+ 4*/;
                        index++;

                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    if (sizeInBytes <= 0) {
                        codec.queueInputBuffer(
                                inputBufIndex,
                                0 /* offset */,
                                0,
                                presentationTimeUs,
                                MediaCodec.BUFFER_FLAG_END_OF_STREAM );
                        sawInputEOS = true;
                    }
                    else {
                        dstBuf.put(byteArray, 0, sizeInBytes);

                        if (mVideoStart == false) mVideoStart = true;

                        codec.queueInputBuffer(
                                inputBufIndex,
                                0 /* offset */,
                                sampleSize,
                                presentationTimeUs,
                                mVideoStart ? 0:MediaCodec.BUFFER_FLAG_CODEC_CONFIG );
                        //Log.d(TAG, String.format(" After queueing the buffer to decoder with inputbufindex and samplesize #: %d ,%d ind %d",inputBufIndex,sampleSize,index));
                    }
                }
            }

            int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
            //Log.d(TAG, String.format(" Getting the information about decoded output buffer flags,offset,PT,size #: %d %d %d %d",info.flags,info.offset,info.presentationTimeUs,info.size));
            //Log.d(TAG, String.format(" Getting the output of decoder in res #: %d",res));

            if (res >= 0) {
                int outputBufIndex = res;

                //Log.d(TAG, "Output PTS "+info.presentationTimeUs);


                //mspeed.preRender(info.presentationTimeUs);
                //mspeed.setFixedPlaybackRate(25);

                codec.releaseOutputBuffer(outputBufIndex, true /* render */);
                //Log.d(TAG, String.format(" releaseoutputbuffer index= #: %d",outputBufIndex));


                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.d(TAG, "saw output EOS.");
                    sawOutputEOS = true;
                }

            } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codec.getOutputBuffers();
                Log.d(TAG, "output buffers have changed.");

            } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                oformat = codec.getOutputFormat();
                Log.d(TAG, "output format has changed to " + oformat);
            }

        }
        codec.stop();
        codec.release();
        this.finish();

    }


    @Override
    public boolean onCreateOptionsMenu(Menu menu) {
        getMenuInflater().inflate(R.menu.activity_main, menu);
        return true;
    }
}

回答1:

There are couples of workaround to problem with the above sample test.

  • Instead of feeding One Full frame to the decoder Inout, I was feeding single of NAL Units at a time. But still the playback was slow and could not match 60FPS
  • Google has changed the Implementation of Surface BufferQueue from Asynchronous to Synchronous.Hence when we call MediaCodec.dequeueBuffer to get decoded data, the server side (SurfaceTexture::dequeueBuffer) will wait for a buffer to be queued, and the client side waits for that, so that SurfaceTextureClient::dequeueBuffer will not return until a buffer has actually been queued on the server side. Where as in the Asynchronous Mode, a new GraphicBuffer is allocated.