2014-10-31 35 views
4

当谈到MediaCodec(以及一般的视频编码/解码)时,我有点新意,所以如果我在这里说的任何内容都是错误的,请纠正我。如何玩由MediaCodec编码器生产的原始h264?

我想用VLC/ffplay播放MediaCodec的原始h264输出。因为我需要这个游戏,因为我的最终目标是将一些实时视频流式传输到计算机,而MediaMuxer只在磁盘上生成文件,而不是我可以以非常低的延迟流到桌面。 (我打开其他解决方案,但我还没有发现任何其他适合延迟要求的东西)

这是我使用的代码对视频进行编码并将其写入文件:(它基于MediaCodec例如发现here,仅除去了MediaMuxer一部分)

package com.jackos2500.droidtop; 

import android.media.MediaCodec; 
import android.media.MediaCodecInfo; 
import android.media.MediaFormat; 
import android.opengl.EGL14; 
import android.opengl.EGLConfig; 
import android.opengl.EGLContext; 
import android.opengl.EGLDisplay; 
import android.opengl.EGLExt; 
import android.opengl.EGLSurface; 
import android.opengl.GLES20; 
import android.os.Environment; 
import android.util.Log; 
import android.view.Surface; 

import java.io.BufferedOutputStream; 
import java.io.File; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.nio.ByteBuffer; 

public class StreamH264 { 
    private static final String TAG = "StreamH264"; 
    private static final boolean VERBOSE = true;   // lots of logging 

    // where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission) 
    private static final File OUTPUT_DIR = Environment.getExternalStorageDirectory(); 

    public static int MEGABIT = 1000 * 1000; 
    private static final int IFRAME_INTERVAL = 10; 

    private static final int TEST_R0 = 0; 
    private static final int TEST_G0 = 136; 
    private static final int TEST_B0 = 0; 
    private static final int TEST_R1 = 236; 
    private static final int TEST_G1 = 50; 
    private static final int TEST_B1 = 186; 

    private MediaCodec codec; 
    private CodecInputSurface inputSurface; 
    private BufferedOutputStream out; 

    private MediaCodec.BufferInfo bufferInfo; 
    public StreamH264() { 

    } 

    private void prepareEncoder() throws IOException { 
     bufferInfo = new MediaCodec.BufferInfo(); 

     MediaFormat format = MediaFormat.createVideoFormat("video/avc", 1280, 720); 
     format.setInteger(MediaFormat.KEY_BIT_RATE, 2 * MEGABIT); 
     format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 
     format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 
     format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 

     codec = MediaCodec.createEncoderByType("video/avc"); 
     codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 
     inputSurface = new CodecInputSurface(codec.createInputSurface()); 
     codec.start(); 

     File dst = new File(OUTPUT_DIR, "test.264"); 
     out = new BufferedOutputStream(new FileOutputStream(dst)); 
    } 
    private void releaseEncoder() throws IOException { 
     if (VERBOSE) Log.d(TAG, "releasing encoder objects"); 
     if (codec != null) { 
      codec.stop(); 
      codec.release(); 
      codec = null; 
     } 
     if (inputSurface != null) { 
      inputSurface.release(); 
      inputSurface = null; 
     } 
     if (out != null) { 
      out.flush(); 
      out.close(); 
      out = null; 
     } 
    } 
    public void stream() throws IOException { 
     try { 
      prepareEncoder(); 
      inputSurface.makeCurrent(); 
      for (int i = 0; i < (30 * 5); i++) { 
       // Feed any pending encoder output into the file. 
       drainEncoder(false); 

       // Generate a new frame of input. 
       generateSurfaceFrame(i); 
       inputSurface.setPresentationTime(computePresentationTimeNsec(i, 30)); 

       // Submit it to the encoder. The eglSwapBuffers call will block if the input 
       // is full, which would be bad if it stayed full until we dequeued an output 
       // buffer (which we can't do, since we're stuck here). So long as we fully drain 
       // the encoder before supplying additional input, the system guarantees that we 
       // can supply another frame without blocking. 
       if (VERBOSE) Log.d(TAG, "sending frame " + i + " to encoder"); 
       inputSurface.swapBuffers(); 
      } 
      // send end-of-stream to encoder, and drain remaining output 
      drainEncoder(true); 
     } finally { 
      // release encoder, muxer, and input Surface 
      releaseEncoder(); 
     } 
    } 

    private void drainEncoder(boolean endOfStream) throws IOException { 
     final int TIMEOUT_USEC = 10000; 
     if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")"); 

     if (endOfStream) { 
      if (VERBOSE) Log.d(TAG, "sending EOS to encoder"); 
      codec.signalEndOfInputStream(); 
     } 
     ByteBuffer[] outputBuffers = codec.getOutputBuffers(); 
     while (true) { 
      int encoderStatus = codec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); 
      if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 
       // no output available yet 
       if (!endOfStream) { 
        break;  // out of while 
       } else { 
        if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS"); 
       } 
      } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 
       // not expected for an encoder 
       outputBuffers = codec.getOutputBuffers(); 
      } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 
       // should happen before receiving buffers, and should only happen once 
       MediaFormat newFormat = codec.getOutputFormat(); 
       Log.d(TAG, "encoder output format changed: " + newFormat); 
      } else if (encoderStatus < 0) { 
       Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); 
       // let's ignore it 
      } else { 
       ByteBuffer encodedData = outputBuffers[encoderStatus]; 
       if (encodedData == null) { 
        throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); 
       } 

       if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 
        // The codec config data was pulled out and fed to the muxer when we got 
        // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. 
        if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG"); 
        bufferInfo.size = 0; 
       } 

       if (bufferInfo.size != 0) { 
        // adjust the ByteBuffer values to match BufferInfo (not needed?) 
        encodedData.position(bufferInfo.offset); 
        encodedData.limit(bufferInfo.offset + bufferInfo.size); 

        byte[] data = new byte[bufferInfo.size]; 
        encodedData.get(data); 
        out.write(data); 
        if (VERBOSE) Log.d(TAG, "sent " + bufferInfo.size + " bytes to file"); 
       } 

       codec.releaseOutputBuffer(encoderStatus, false); 

       if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 
        if (!endOfStream) { 
         Log.w(TAG, "reached end of stream unexpectedly"); 
        } else { 
         if (VERBOSE) Log.d(TAG, "end of stream reached"); 
        } 
        break;  // out of while 
       } 
      } 
     } 
    } 
    private void generateSurfaceFrame(int frameIndex) { 
     frameIndex %= 8; 

     int startX, startY; 
     if (frameIndex < 4) { 
      // (0,0) is bottom-left in GL 
      startX = frameIndex * (1280/4); 
      startY = 720/2; 
     } else { 
      startX = (7 - frameIndex) * (1280/4); 
      startY = 0; 
     } 

     GLES20.glClearColor(TEST_R0/255.0f, TEST_G0/255.0f, TEST_B0/255.0f, 1.0f); 
     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 

     GLES20.glEnable(GLES20.GL_SCISSOR_TEST); 
     GLES20.glScissor(startX, startY, 1280/4, 720/2); 
     GLES20.glClearColor(TEST_R1/255.0f, TEST_G1/255.0f, TEST_B1/255.0f, 1.0f); 
     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 
     GLES20.glDisable(GLES20.GL_SCISSOR_TEST); 
    } 
    private static long computePresentationTimeNsec(int frameIndex, int frameRate) { 
     final long ONE_BILLION = 1000000000; 
     return frameIndex * ONE_BILLION/frameRate; 
    } 

    /** 
    * Holds state associated with a Surface used for MediaCodec encoder input. 
    * <p> 
    * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that 
    * to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent 
    * to the video encoder. 
    * <p> 
    * This object owns the Surface -- releasing this will release the Surface too. 
    */ 
    private static class CodecInputSurface { 
     private static final int EGL_RECORDABLE_ANDROID = 0x3142; 

     private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; 
     private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; 
     private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; 

     private Surface mSurface; 

     /** 
     * Creates a CodecInputSurface from a Surface. 
     */ 
     public CodecInputSurface(Surface surface) { 
      if (surface == null) { 
       throw new NullPointerException(); 
      } 
      mSurface = surface; 

      eglSetup(); 
     } 

     /** 
     * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording. 
     */ 
     private void eglSetup() { 
      mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); 
      if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { 
       throw new RuntimeException("unable to get EGL14 display"); 
      } 
      int[] version = new int[2]; 
      if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { 
       throw new RuntimeException("unable to initialize EGL14"); 
      } 

      // Configure EGL for recording and OpenGL ES 2.0. 
      int[] attribList = { 
        EGL14.EGL_RED_SIZE, 8, 
        EGL14.EGL_GREEN_SIZE, 8, 
        EGL14.EGL_BLUE_SIZE, 8, 
        EGL14.EGL_ALPHA_SIZE, 8, 
        EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 
        EGL_RECORDABLE_ANDROID, 1, 
        EGL14.EGL_NONE 
      }; 
      EGLConfig[] configs = new EGLConfig[1]; 
      int[] numConfigs = new int[1]; 
      EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, 
        numConfigs, 0); 
      checkEglError("eglCreateContext RGB888+recordable ES2"); 

      // Configure context for OpenGL ES 2.0. 
      int[] attrib_list = { 
        EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, 
        EGL14.EGL_NONE 
      }; 
      mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, 
        attrib_list, 0); 
      checkEglError("eglCreateContext"); 

      // Create a window surface, and attach it to the Surface we received. 
      int[] surfaceAttribs = { 
        EGL14.EGL_NONE 
      }; 
      mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface, 
        surfaceAttribs, 0); 
      checkEglError("eglCreateWindowSurface"); 
     } 

     /** 
     * Discards all resources held by this class, notably the EGL context. Also releases the 
     * Surface that was passed to our constructor. 
     */ 
     public void release() { 
      if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { 
       EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, 
         EGL14.EGL_NO_CONTEXT); 
       EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface); 
       EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); 
       EGL14.eglReleaseThread(); 
       EGL14.eglTerminate(mEGLDisplay); 
      } 

      mSurface.release(); 

      mEGLDisplay = EGL14.EGL_NO_DISPLAY; 
      mEGLContext = EGL14.EGL_NO_CONTEXT; 
      mEGLSurface = EGL14.EGL_NO_SURFACE; 

      mSurface = null; 
     } 

     /** 
     * Makes our EGL context and surface current. 
     */ 
     public void makeCurrent() { 
      EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext); 
      checkEglError("eglMakeCurrent"); 
     } 

     /** 
     * Calls eglSwapBuffers. Use this to "publish" the current frame. 
     */ 
     public boolean swapBuffers() { 
      boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface); 
      checkEglError("eglSwapBuffers"); 
      return result; 
     } 

     /** 
     * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. 
     */ 
     public void setPresentationTime(long nsecs) { 
      EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs); 
      checkEglError("eglPresentationTimeANDROID"); 
     } 

     /** 
     * Checks for EGL errors. Throws an exception if one is found. 
     */ 
     private void checkEglError(String msg) { 
      int error; 
      if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { 
       throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); 
      } 
     } 
    } 
} 

然而,从由此代码生成的文件不与VLC或ffplay播放。谁能告诉我我做错了什么?我相信这是由于播放原始h264所需的标头格式不正确(或完全缺失),因为我已成功播放使用ffplay从互联网下载的.264个文件。另外,我不确定我是如何将这部影片传输到电脑的,所以如果有人可以给我一些关于我可以怎么做的建议,我会非常感激!谢谢!

回答

5

你应该能够播放原始的H264流正如你写的,其他原始.264文件在VLC或ffplay中可以很好地播放),但是你缺少参数集。这些通过两种不同的方式传递,而你恰好缺少这两种方式。首先当你得到MediaCodec.INFO_OUTPUT_FORMAT_CHANGED(你不处理,你只是记录一条消息)返回MediaFormat,其次它们返回到一个设置为MediaCodec.BUFFER_FLAG_CODEC_CONFIG的缓冲区中(你忽略的大小为0)。这里最简单的解决方案是取消MediaCodec.BUFFER_FLAG_CODEC_CONFIG的特殊情况处理,它应该都可以正常工作。

你基于它的代码以这种方式做事情,以便测试所有不同的做事方式 - 从哪里复制它,参数集在MediaFormatMediaCodec.INFO_OUTPUT_FORMAT_CHANGED进行。如果你想在你的情况下使用H264字节流,你可以用和csd-1来编写字节缓冲区,从MediaFormat开始,并且忽略设置了MediaCodec.BUFFER_FLAG_CODEC_CONFIG的缓冲区。

+0

对不起,延迟回复,但删除忽略这些参数的代码,通过设置bufferInfo .size为0完美工作,谢谢! – jackos2500 2014-11-02 19:28:13

+0

我试图找到如何通过MedoaCodec生成SPS/PPS头几天。感谢您的回答。 – yorkw 2015-11-03 03:50:55

1

你不能玩纯粹的h264。它没有关于格式的任何信息。你也可以找到几个很好的例子here。为了流式传输,你需要实现一些流媒体协议,如RTSP(在实时流媒体的情况下)或更灵活的HLS(如果不需要实时)

+0

好的,如果我忘记播放原始视频,我怎么能发送一帧到计算机,解码并显示在屏幕上? (最好使用Java) – jackos2500 2014-10-31 16:23:27

+0

那么你可以通过TCP或UDP发送一个帧(描述它的字节有多大),然后如果你需要使用一些h264解码器来提供一帧并解释它的格式如bps,宽度/高度等。因为你流到电脑,你需要自己的解码器应用程序,可能使用一些库来帮助你解码视频和渲染它(通常OpenGL) – 2014-10-31 16:26:52

相关问题