2016-09-20 50 views
6

我试图在视频的每个帧上绘制一个位图作为叠加层。我找到了一个关于如何解码和编码视频的例子,它正在工作。这个例子有一个TextureRenderer类,它有一个drawFrame函数,我需要修改它来添加位图。我是opengl的新手,但我知道我需要用位图创建纹理并将其绑定。我在下面的代码中试过,但它抛出一个异常。抛出Opengl es 2.0在视频上绘制位图叠加

/* 
* Copyright (C) 2013 The Android Open Source Project 
* 
* Licensed under the Apache License, Version 2.0 (the "License"); 
* you may not use this file except in compliance with the License. 
* You may obtain a copy of the License at 
* 
*  http://www.apache.org/licenses/LICENSE-2.0 
* 
* Unless required by applicable law or agreed to in writing, software 
* distributed under the License is distributed on an "AS IS" BASIS, 
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 
* See the License for the specific language governing permissions and 
* limitations under the License. 
*/ 
// from: https://android.googlesource.com/platform/cts/+/lollipop-release/tests/tests/media/src/android/media/cts/TextureRender.java 
// blob: 4125dcfcfed6ed7fddba5b71d657dec0d433da6a 
// modified: removed unused method bodies 
// modified: use GL_LINEAR for GL_TEXTURE_MIN_FILTER to improve quality. 

package com.example.name.videoeditortest; 
/** 
* Code for rendering a texture onto a surface using OpenGL ES 2.0. 
*/ 

import android.graphics.Bitmap; 
import android.graphics.SurfaceTexture; 
import android.opengl.GLES11Ext; 
import android.opengl.GLES20; 
import android.opengl.GLUtils; 
import android.opengl.Matrix; 
import android.util.Log; 

import java.io.FileOutputStream; 
import java.io.IOException; 
import java.nio.ByteBuffer; 
import java.nio.ByteOrder; 
import java.nio.FloatBuffer; 

/** 
* Code for rendering a texture onto a surface using OpenGL ES 2.0. 
*/ 
class TextureRender { 
    private Bitmap bitmap; 
    private static final String TAG = "TextureRender"; 
    private static final int FLOAT_SIZE_BYTES = 4; 
    private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 
    private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 
    private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 
    private final float[] mTriangleVerticesData = { 
      // X, Y, Z, U, V 
      -1.0f, -1.0f, 0, 0.f, 0.f, 
      1.0f, -1.0f, 0, 1.f, 0.f, 
      -1.0f, 1.0f, 0, 0.f, 1.f, 
      1.0f, 1.0f, 0, 1.f, 1.f, 
    }; 
    private FloatBuffer mTriangleVertices; 
    private static final String VERTEX_SHADER = 
      "uniform mat4 uMVPMatrix;\n" + 
        "uniform mat4 uSTMatrix;\n" + 
        "attribute vec4 aPosition;\n" + 
        "attribute vec4 aTextureCoord;\n" + 
        "varying vec2 vTextureCoord;\n" + 
        "void main() {\n" + 
        " gl_Position = uMVPMatrix * aPosition;\n" + 
        " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 
        "}\n"; 
    private static final String FRAGMENT_SHADER = 
      "#extension GL_OES_EGL_image_external : require\n" + 
        "precision mediump float;\n" +  // highp here doesn't seem to matter 
        "varying vec2 vTextureCoord;\n" + 
        "uniform samplerExternalOES sTexture;\n" + 
        "void main() {\n" + 
        " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 
        "}\n"; 
    private float[] mMVPMatrix = new float[16]; 
    private float[] mSTMatrix = new float[16]; 
    private int mProgram; 
    private int mTextureID = -12345; 
    private int mTextureBitmapID = -12345; 
    private int muMVPMatrixHandle; 
    private int muSTMatrixHandle; 
    private int maPositionHandle; 
    private int maTextureHandle; 
    public TextureRender() { 
     mTriangleVertices = ByteBuffer.allocateDirect(
       mTriangleVerticesData.length * FLOAT_SIZE_BYTES) 
       .order(ByteOrder.nativeOrder()).asFloatBuffer(); 
     mTriangleVertices.put(mTriangleVerticesData).position(0); 
     Matrix.setIdentityM(mSTMatrix, 0); 
    } 
    public int getTextureId() { 
     return mTextureID; 
    } 
    public void drawFrame(SurfaceTexture st) { 
     checkGlError("onDrawFrame start"); 
     st.getTransformMatrix(mSTMatrix); 
     GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 
     GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 
     GLES20.glUseProgram(mProgram); 
     checkGlError("glUseProgram"); 
     //Bing textrues 
     GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 
     GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); 
     GLES20.glActiveTexture(GLES20.GL_TEXTURE_2D); 
     GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureBitmapID); 

     mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 
     GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 
       TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 
     checkGlError("glVertexAttribPointer maPosition"); 
     GLES20.glEnableVertexAttribArray(maPositionHandle); 
     checkGlError("glEnableVertexAttribArray maPositionHandle"); 
     mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 
     GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, false, 
       TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); 
     checkGlError("glVertexAttribPointer maTextureHandle"); 
     GLES20.glEnableVertexAttribArray(maTextureHandle); 
     checkGlError("glEnableVertexAttribArray maTextureHandle"); 
     Matrix.setIdentityM(mMVPMatrix, 0); 
     GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); 
     GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); 
     GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 
     checkGlError("glDrawArrays"); 
     GLES20.glFinish(); 
    } 
    /** 
    * Initializes GL state. Call this after the EGL surface has been created and made current. 
    */ 
    public void surfaceCreated() { 
     mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 
     if (mProgram == 0) { 
      throw new RuntimeException("failed creating program"); 
     } 
     maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition"); 
     checkGlError("glGetAttribLocation aPosition"); 
     if (maPositionHandle == -1) { 
      throw new RuntimeException("Could not get attrib location for aPosition"); 
     } 
     maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord"); 
     checkGlError("glGetAttribLocation aTextureCoord"); 
     if (maTextureHandle == -1) { 
      throw new RuntimeException("Could not get attrib location for aTextureCoord"); 
     } 
     muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix"); 
     checkGlError("glGetUniformLocation uMVPMatrix"); 
     if (muMVPMatrixHandle == -1) { 
      throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 
     } 
     muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix"); 
     checkGlError("glGetUniformLocation uSTMatrix"); 
     if (muSTMatrixHandle == -1) { 
      throw new RuntimeException("Could not get attrib location for uSTMatrix"); 
     } 
     int[] textures = new int[1]; 
     GLES20.glGenTextures(1, textures, 0); 
     mTextureID = textures[0]; 
     GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); 
     checkGlError("glBindTexture mTextureID"); 
     GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, 
       GLES20.GL_NEAREST); 
     GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, 
       GLES20.GL_LINEAR); 
     GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 
       GLES20.GL_CLAMP_TO_EDGE); 
     GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 
       GLES20.GL_CLAMP_TO_EDGE); 
     checkGlError("glTexParameter"); 

     mTextureBitmapID = loadBitmapTexture(); 
    } 


    private int loadBitmapTexture() 
    { 
     final int[] textureHandle = new int[1]; 

     GLES20.glGenTextures(1, textureHandle, 0); 

     if (textureHandle[0] != 0) 
     { 
      // Bind to the texture in OpenGL 
      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]); 

      // Set filtering 
      GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); 
      GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); 

      // Load the bitmap into the bound texture. 
      GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0); 
     } 

     if (textureHandle[0] == 0) 
     { 
      throw new RuntimeException("Error loading texture."); 
     } 

     return textureHandle[0]; 
    } 

    /** 
    * Replaces the fragment shader. 
    */ 
    public void changeFragmentShader(String fragmentShader) { 
     GLES20.glDeleteProgram(mProgram); 
     mProgram = createProgram(VERTEX_SHADER, fragmentShader); 
     if (mProgram == 0) { 
      throw new RuntimeException("failed creating program"); 
     } 
    } 
    private int loadShader(int shaderType, String source) { 
     int shader = GLES20.glCreateShader(shaderType); 
     checkGlError("glCreateShader type=" + shaderType); 
     GLES20.glShaderSource(shader, source); 
     GLES20.glCompileShader(shader); 
     int[] compiled = new int[1]; 
     GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 
     if (compiled[0] == 0) { 
      Log.e(TAG, "Could not compile shader " + shaderType + ":"); 
      Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 
      GLES20.glDeleteShader(shader); 
      shader = 0; 
     } 
     return shader; 
    } 
    private int createProgram(String vertexSource, String fragmentSource) { 
     int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 
     if (vertexShader == 0) { 
      return 0; 
     } 
     int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 
     if (pixelShader == 0) { 
      return 0; 
     } 
     int program = GLES20.glCreateProgram(); 
     checkGlError("glCreateProgram"); 
     if (program == 0) { 
      Log.e(TAG, "Could not create program"); 
     } 
     GLES20.glAttachShader(program, vertexShader); 
     checkGlError("glAttachShader"); 
     GLES20.glAttachShader(program, pixelShader); 
     checkGlError("glAttachShader"); 
     GLES20.glLinkProgram(program); 
     int[] linkStatus = new int[1]; 
     GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 
     if (linkStatus[0] != GLES20.GL_TRUE) { 
      Log.e(TAG, "Could not link program: "); 
      Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 
      GLES20.glDeleteProgram(program); 
      program = 0; 
     } 
     return program; 
    } 
    public void checkGlError(String op) { 
     int error; 
     while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 
      Log.e(TAG, op + ": glError " + error); 
      throw new RuntimeException(op + ": glError " + error); 
     } 
    } 

    public void setBitmap(Bitmap bitmap){ 
     this.bitmap = bitmap; 

    } 
    /** 
    * Saves the current frame to disk as a PNG image. Frame starts from (0,0). 
    * <p> 
    * Useful for debugging. 
    */ 
    public static void saveFrame(String filename, int width, int height) { 
     // glReadPixels gives us a ByteBuffer filled with what is essentially big-endian RGBA 
     // data (i.e. a byte of red, followed by a byte of green...). We need an int[] filled 
     // with native-order ARGB data to feed to Bitmap. 
     // 
     // If we implement this as a series of buf.get() calls, we can spend 2.5 seconds just 
     // copying data around for a 720p frame. It's better to do a bulk get() and then 
     // rearrange the data in memory. (For comparison, the PNG compress takes about 500ms 
     // for a trivial frame.) 
     // 
     // So... we set the ByteBuffer to little-endian, which should turn the bulk IntBuffer 
     // get() into a straight memcpy on most Android devices. Our ints will hold ABGR data. 
     // Swapping B and R gives us ARGB. We need about 30ms for the bulk get(), and another 
     // 270ms for the color swap. 
     // 
     // Making this even more interesting is the upside-down nature of GL, which means we 
     // may want to flip the image vertically here. 
     ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); 
     buf.order(ByteOrder.LITTLE_ENDIAN); 
     GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); 
     buf.rewind(); 
     int pixelCount = width * height; 
     int[] colors = new int[pixelCount]; 
     buf.asIntBuffer().get(colors); 
     for (int i = 0; i < pixelCount; i++) { 
      int c = colors[i]; 
      colors[i] = (c & 0xff00ff00) | ((c & 0x00ff0000) >> 16) | ((c & 0x000000ff) << 16); 
     } 
     FileOutputStream fos = null; 
     try { 
      fos = new FileOutputStream(filename); 
      Bitmap bmp = Bitmap.createBitmap(colors, width, height, Bitmap.Config.ARGB_8888); 
      bmp.compress(Bitmap.CompressFormat.PNG, 90, fos); 
      bmp.recycle(); 
     } catch (IOException ioe) { 
      throw new RuntimeException("Failed to write file " + filename, ioe); 
     } finally { 
      try { 
       if (fos != null) fos.close(); 
      } catch (IOException ioe2) { 
       throw new RuntimeException("Failed to close file " + filename, ioe2); 
      } 
     } 
     Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'"); 
    } 
} 

例外:

E/ExtractDecodeEditEncodeMuxTest: error while releasing muxer 
            java.lang.IllegalStateException: Can't stop due to wrong state. 
            at android.media.MediaMuxer.stop(MediaMuxer.java:231) 
            at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.extractDecodeEditEncodeMux(ExtractDecodeEditEncodeMuxTest.java 434) 
            at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.access$000(ExtractDecodeEditEncodeMuxTest.java:58) 
            at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest$TestWrapper.run(ExtractDecodeEditEncodeMuxTest.java:171) 
            at java.lang.Thread.run(Thread.java:841) 

如果我在并条评论GLES20.glActiveTexture(GLES20.GL_TEXTURE_2D);视频是正确的,但不能用位图呈现。如果我在并条评论GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureBitmapID);我得到以下异常:

java.lang.RuntimeException: glVertexAttribPointer maPosition: glError 1280 
at com.example.name.videoeditortest.TextureRender.checkGlError(TextureRender.java:259) 
at com.example.name.videoeditortest.TextureRender.drawFrame(TextureRender.java:111) 
at com.example.name.videoeditortest.OutputSurface.drawImage(OutputSurface.java:252) 
at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.doExtractDecodeEditEncodeMux(ExtractDecodeEditEncodeMuxTest.java:793) 
at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.extractDecodeEditEncodeMux(ExtractDecodeEditEncodeMuxTest.java:341) 
at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest.access$000(ExtractDecodeEditEncodeMuxTest.java:58) 
at com.example.name.videoeditortest.ExtractDecodeEditEncodeMuxTest$TestWrapper.run(ExtractDecodeEditEncodeMuxTest.java:171) 
at java.lang.Thread.run(Thread.java:841) 

回答

2

我看到两件事情,似乎我错了。

  1. 您试图在同一时间绑定所有内容,并希望一次调用GLES20.glDrawArrays()将绘制所有内容。

  2. 你只有一个着色器,你应该有两个着色器:一个用于做视频纹理渲染,另一个用于你的位图层渲染。

你必须知道什么是一个框架可以由多个调用调用glDrawArrays绘制,每次通话将只画在以前绘制的东西(基本上)一小部分。


呈现在你的情况下,框架应该看看这第一部分:

初始化

loadShaderForVideo()

loadShaderForBitmapLayer()

prepareYourArraysEtc()

...

循环

GLClear()

updateVideoTexture()

并条机(){

drawVideo(){ 

    bindYourActiveTextureToVideo() 

    setYourVertexAttribAndUniform() 

    GLES20.glDrawArrays() 

} 

drawBitmap() { 

    bindYourActiveTextureToBitmap() 

    setYourVertexAttribAndUniform() // This should be the same as above for video 
    // Considering you want to draw above your video, consider activating the blending for transparency : 

    GLES20.glEnable(GLES20.GL_BLEND); 
    GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); 

    GLES20.glDrawArrays() 

} 

}


关于着色器,只取看看这些:

一个共同的顶点着色器两种:

public static final String vertexDefaultShaderCode = 
     "uniform mat4 uVPMatrix;" + 
       "uniform mat4 uModelMatrix;" + // uniform = input const 
       "attribute vec3 aPosition;" + // attribute = input property different for each vertex 
       "attribute vec2 aTexCoordinate;" + 
       "varying vec2 vTexCoordinate;" +// varying = output property different for each pixel 

       "void main() {" + 
       "vTexCoordinate = aTexCoordinate;" + 
       "gl_Position = uVPMatrix * uModelMatrix * vec4(aPosition,1.0);" + 
       "}"; 

然后一个基本的片段着色器(对位图2D纹理):

public static final String fragmentDefaultShaderCode = 
     "precision mediump float;" + 
       "uniform sampler2D uTexture;" + 
       "varying vec2 vTexCoordinate;" + 

       "void main() {" + 
       " gl_FragColor = texture2D(uTexture, vTexCoordinate);" + 
       "}"; 

然后,不同版本的视频渲染:

public static final String fragmentExternalShaderCode = 
     "#extension GL_OES_EGL_image_external : require\n" + 
       "precision mediump float;" + 
       "uniform samplerExternalOES sTexture;" + 
       "varying vec2 vTexCoordinate;" + 

       "void main() {" + 
       " gl_FragColor = texture2D(sTexture, vTexCoordinate);" + 
       "}"; 

因此,您将需要两个程序,一个使用defaultVertexShader + defaultFragmentShader,另一个使用defaultVertexShader + fragmentExternalShaderCode。

我希望只有这些修改才能解决您的问题。

问候

+0

谢谢你的答案,这是真的很有帮助。正如你可以看到我说的,我期望从赏金获胜者提供一个功能的代码。再次感谢! –

+0

是的,我看到你的条件。如果我有很多空闲时间,我会看到更深的一点,但现在不能做更多。 –