2016-01-21 70 views
2

我试图在Android上为使用MediaCodec的实时视频流编码h264视频,但dequeueOutputBuffer一直花费很长时间(实际上它有时非常快,但在其他时间非常慢,请参阅下面的日志输出)。我已经看到它甚至高达200ms,输出缓冲区已准备就绪。有没有我做错了我的代码或你认为这是OMX.Nvidia.h264.encoder的问题?MediaCodec.dequeueOutputBuffer在Android上编码h264时花费很长时间

也许我需要将图像从1280x720缩减为更小的图像?或者,也许我需要出队和更多的输入缓冲区排队,而我在等待输出缓冲区? (有6个输入和6个输出缓冲区可用)。我正在使用Android API 19,所以我不能使用异步MediaCodec处理方法。实际上,我是从Google Project Tango平板电脑中传输图片,所以我的其他猜测是Tango的后台操作可能需要很长时间,导致编码器速度变慢。有什么想法可能会放慢这一点呢?

01-20 23:36:30.728 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.400666ms. 
01-20 23:36:30.855 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 94.290667ms. 
01-20 23:36:30.880 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.57ms. 
01-20 23:36:30.929 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 4.878417ms. 
01-20 23:36:31.042 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 77.495417ms. 
01-20 23:36:31.064 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.3225ms. 
01-20 23:36:31.182 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 74.777583ms. 
01-20 23:36:31.195 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 0.23ms. 
01-20 23:36:31.246 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 17.243583ms. 
01-20 23:36:31.350 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 80.14725ms. 
01-20 23:36:31.373 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 2.493834ms. 
01-20 23:36:31.421 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.273ms. 
01-20 23:36:31.546 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 93.543667ms. 
01-20 23:36:31.576 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 5.309334ms. 
01-20 23:36:31.619 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 13.402583ms. 
01-20 23:36:31.686 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 22.5485ms. 
01-20 23:36:31.809 2920-3014/com.... D/StreamingThread: dequeueOutputBuffer took 91.392083ms. 

我的相关代码如下:

public class StreamingThread extends Thread { 
    ... 

    // encoding 
    private MediaCodec mVideoEncoder = null; 
    private ByteBuffer[] mEncoderInputBuffers = null; 
    private ByteBuffer[] mEncoderOutputBuffers = null; 
    private NV21Convertor mNV21Converter = null; 

    public static native VideoFrame getNewFrame(); 

    public StreamingThread() 
    { 
     this.setPriority(MAX_PRIORITY); 
    } 

    @Override 
    public void run() 
    { 
     Looper.prepare(); 
     init(); 
     Looper.loop(); 
    } 

    private void init() 
    { 
     mHandler = new Handler() { 
      public void handleMessage(Message msg) { 
       // process incoming messages here 
       switch(msg.what) 
       { 
        case HAVE_NEW_FRAME: // new frame has arrived (signaled from main thread) 
         processBufferedFrames(); 
         break; 

        case CLOSE_THREAD: 
         close(); 
         break; 

        default: 
         Log.e(LOGTAG, "received unknown message!"); 
       } 
      } 
     }; 

     try { 
      ... 
      // set up video encoding 
      final String mime = "video/avc"; // H.264/AVC 
      listAvailableEncoders(mime); // (this creates some debug output only) 
      String codec = "OMX.Nvidia.h264.encoder"; // instead, hard-code the codec we want to use for now 

      mVideoEncoder = MediaCodec.createByCodecName(codec); 
      if(mVideoEncoder == null) 
       Log.e(LOGTAG, "Media codec " + codec + " is not available!"); 

      // TODO: change, based on what we're streaming... 
      int FRAME_WIDTH = 1280; 
      int FRAME_HEIGHT = 720; 

      // https://github.com/fyhertz/libstreaming/blob/ac44416d88ed3112869ef0f7eab151a184bbb78d/src/net/majorkernelpanic/streaming/hw/EncoderDebugger.java 
      mNV21Converter = new NV21Convertor(); 
      mNV21Converter.setSize(FRAME_WIDTH, FRAME_HEIGHT); 
      mNV21Converter.setEncoderColorFormat(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar); 
      mNV21Converter.setColorPanesReversed(true); 
      mNV21Converter.setYPadding(0); 

      MediaFormat format = MediaFormat.createVideoFormat(mime, FRAME_WIDTH, FRAME_HEIGHT); 
      format.setInteger(MediaFormat.KEY_FRAME_RATE, 25); 
      format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10); 
      format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar); 
      // TODO: optimize bit rate 
      format.setInteger(MediaFormat.KEY_BIT_RATE, 250000); // 4 Million bits/second = 0.48 Megabytes/s 

      mVideoEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 
      mVideoEncoder.start(); 
      mEncoderInputBuffers = mVideoEncoder.getInputBuffers(); 
      mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers(); 

      Log.d(LOGTAG, "Number of input buffers " + mEncoderInputBuffers.length); 
      Log.d(LOGTAG, "Number of output buffers " + mEncoderOutputBuffers.length); 

      initialized = true; 

     } catch (Exception e) { 
      e.printStackTrace(); 
     } 
    } 

    private void close() 
    { 
     Looper.myLooper().quit(); 
     mVideoEncoder.stop(); 
     mVideoEncoder.release(); 
     mVideoEncoder = null; 
    } 

    private void processBufferedFrames() 
    { 
     if (!initialized) 
      return; 
     VideoFrame frame = getNewFrame(); 

     try { 
      sendTCPFrame(frame); 

     } catch (Exception e) { 
      e.printStackTrace(); 
     } 
    } 
    private void sendTCPFrame(VideoFrame frame) 
    { 
     long start = System.nanoTime(); 

     long start2 = System.nanoTime(); 
     int inputBufferIndex = -1; 
     while((inputBufferIndex = mVideoEncoder.dequeueInputBuffer(-1)) < 0) { // -1: wait indefinitely for the buffer 
      switch(inputBufferIndex) { 
       default: 
        Log.e(LOGTAG, "dequeueInputBuffer returned unknown value: " + inputBufferIndex); 
      } 
     } 
     // fill in input (raw) data: 
     mEncoderInputBuffers[inputBufferIndex].clear(); 

     long stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "dequeueInputBuffer took " + (stop2 - start2)/1e6 + "ms."); 

     start2 = System.nanoTime(); 
     byte[] pixels = mNV21Converter.convert(frame.pixels); 
     stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "mNV21Converter.convert took " + (stop2-start2)/1e6 + "ms."); 

     start2 = System.nanoTime(); 
     mEncoderInputBuffers[inputBufferIndex].put(pixels); 
     stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "mEncoderInputBuffers[inputBufferIndex].put(pixels) took " + (stop2 - start2)/1e6 + "ms."); 

     start2 = System.nanoTime(); 
     //mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, 0, 0); 
     //mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime()/1000, 0); 
     mVideoEncoder.queueInputBuffer(inputBufferIndex, 0, pixels.length, System.nanoTime(), 0); 
     stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "queueInputBuffer took " + (stop2 - start2)/1e6 + "ms."); 

     start2 = System.nanoTime(); 
     // wait for encoded data to become available: 
     int outputBufferIndex = -1; 
     MediaCodec.BufferInfo bufInfo = new MediaCodec.BufferInfo(); 
     long timeoutUs = -1;//10000; // microseconds 
     while((outputBufferIndex = mVideoEncoder.dequeueOutputBuffer(bufInfo, timeoutUs)) < 0) { // -1: wait indefinitely for the buffer 
      Log.i(LOGTAG, "dequeueOutputBuffer returned value: " + outputBufferIndex); 
      switch(outputBufferIndex) { 
       case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 
        // output buffers have changed, move reference 
        mEncoderOutputBuffers = mVideoEncoder.getOutputBuffers(); 
        break; 
       case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 
        // Subsequent data will conform to new format. 
        //MediaFormat format = codec.getOutputFormat(); 
        Log.e(LOGTAG, "dequeueOutputBuffer returned INFO_OUTPUT_FORMAT_CHANGED ?!"); 
        break; 
       case MediaCodec.INFO_TRY_AGAIN_LATER: 
        Log.w(LOGTAG, "dequeueOutputBuffer return INFO_TRY_AGAIN_LATER"); 
        break; 
       default: 
        Log.e(LOGTAG, "dequeueOutputBuffer returned unknown value: " + outputBufferIndex); 
      } 
     } 
     stop2 = System.nanoTime(); 
     Log.d(LOGTAG, "dequeueOutputBuffer took " + (stop2 - start2)/1e6 + "ms."); 

     // output (encoded) data available! 
     Log.d(LOGTAG, "encoded buffer info: size = " + bufInfo.size + ", offset = " + bufInfo.offset + ", presentationTimeUs = " + bufInfo.presentationTimeUs + ", flags = " + bufInfo.flags); 
     ByteBuffer encodedData = mEncoderOutputBuffers[outputBufferIndex]; 
     final int sizeOfImageData = bufInfo.size; 

     long stop = System.nanoTime(); 
     Log.d(LOGTAG, "Encoding image took " + (stop-start)/1e6 + "ms."); 

     start = System.nanoTime(); 
     // assemble header: 
    ... 

     encodedData.rewind(); 
     // copy (!) raw image data to "direct" (array-backed) buffer: 
     ByteBuffer imageBuffer = ByteBuffer.allocateDirect(encodedData.remaining()); 
     imageBuffer.put(encodedData); // TODO: can this copy be avoided? 

     stop = System.nanoTime(); 
     Log.d(LOGTAG, "Preparing content for streaming took " + (stop - start)/1e6 + "ms."); 
     // do streaming via TCP 
     ... 
     mVideoEncoder.releaseOutputBuffer(outputBufferIndex, false); 
    } 

    // see http://developer.android.com/reference/android/media/MediaCodecInfo.html 
    private void listAvailableEncoders(String mimeType) 
    { 
     Log.d(LOGTAG, "Available encoders for mime type " + mimeType + ":"); 
     for (int i = 0; i < MediaCodecList.getCodecCount(); i++) { 
      MediaCodecInfo codec = MediaCodecList.getCodecInfoAt(i); 

      if (!codec.isEncoder()) 
       continue; 

      String[] types = codec.getSupportedTypes(); 
      for (int j = 0; j < types.length; j++) { 
       //if (types[j].equalsIgnoreCase(mimeType)) { 
       String msg = "- name: " + codec.getName() + ", supported color formats for " + mimeType + ":"; 
       MediaCodecInfo.CodecCapabilities cap = codec.getCapabilitiesForType(mimeType); 
       for(int k = 0; k < cap.colorFormats.length; ++k) msg = msg + " " + cap.colorFormats[k]; 
       Log.d(LOGTAG, msg); 
       // break; 
       //} 
      } 
     } 
    } 

回答

2

是的,有什么不对您的代码 - 你是同步等待当前帧是从编码器输出的下一个操作前帧。大多数硬件编解码器的延迟比您预期的要多,为了获得编码器能够达到的正确吞吐量,您需要异步使用它。

也就是说,在发送一个输入缓冲区进行编码之后,您不应该等待编码的输出缓冲区,而只是检查是否有输出。您应该继续输入下一个缓冲区,并再次检查是否有可用的输出。只有当您没有立即获得输入缓冲区时,您才可以开始等待输出。通过这种方式,编码器始终有多个输入缓冲区可供开始工作,以保持忙碌状态,以便实际达到其能够支持的帧速率。

(如果您没有问题,需要的是Android 5.0,你可以看看MediaCodec.setCallback,这使得它更容易与异步工作。)

甚至有一些编解码器(主要是解码器不过,如果我没记错我正确),甚至不会输出第一个缓冲区,直到你通过了几个输入缓冲区。