android - 如何提供着色器函数来执行视频的 alpha mask

标签 android opengl-es glsl vertex-shader glsurfaceview

enter image description here enter image description here我正在使用 Android 中的 virocore 库开发 Vr 应用程序。我必须在球体上显示视频。我必须实现的视频实际上不是视频,而是提供的两个帧是颜色帧(左)和 alpha 蒙版(右)帧。我没有使用过 openGl,但似乎我需要提供一个着色器函数来执行 alpha 掩蔽。

我已经将其用于着色器 Adding transparency to a video from black and white (and gray) alpha information video images

但是我如何在绘制方法中将它与 OpenGL 一起使用呢?或者如果在 virocore 中有任何方法可以使用它来进行 alpha 掩蔽。我在 virocore 中尝试过色度过滤方法,但这会使整个视频变得透明。

public class VideoSurfaceView   extends GLSurfaceView {

VideoRender mRenderer;
private MediaPlayer mMediaPlayer = null;

public VideoSurfaceView(Context context, MediaPlayer mp) {
    super(context);

    setEGLContextClientVersion(2);
    mMediaPlayer = mp;
    mRenderer = new VideoRender(context);

    this.getHolder().setFormat(PixelFormat.RGB_565);
    this.getHolder().setFormat(PixelFormat.TRANSPARENT);
    setEGLConfigChooser(8,8,8,8,16,0);
    setEGLContextClientVersion(2);

    setRenderer(mRenderer);

}

@Override
public void onResume() {
    Log.e("onResume ", "onResume");
    queueEvent(new Runnable(){
        public void run() {
            Log.e("runnable ", "runnable");
            mRenderer.setMediaPlayer(mMediaPlayer);
        }});

    super.onResume();
}

private static class VideoRender
        implements Renderer, SurfaceTexture.OnFrameAvailableListener, MediaPlayer.OnPreparedListener {
    private static String TAG = "VideoRender";

    private static final int FLOAT_SIZE_BYTES = 4;
    private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
    private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
    private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
    private final float[] mTriangleVerticesData = {
            // X, Y, Z, U, V
            -1.0f, -1.0f, 0, 0.f, 0.f,
            1.0f, -1.0f, 0, 1.f, 0.f,
            -1.0f,  1.0f, 0, 0.f, 1.f,
            1.0f,  1.0f, 0, 1.f, 1.f,
    };

    private FloatBuffer mTriangleVertices;


    private static final String mVertexShader =
            "uniform mat4 uMVPMatrix;\n" +
                    "uniform mat4 uSTMatrix;\n" +
            "attribute vec4 position;\n" +
            "attribute vec4 inputTextureCoordinate;\n" +
            " \n" +
            "varying vec2 textureCoordinate;\n" +
            "varying vec2 textureCoordinate2;\n" +
            " \n" +
            "void main()\n" +
            "{\n" +
            "    gl_Position = uMVPMatrix * position;\n" +
            "    vec4 texCoord = uSTMatrix * inputTextureCoordinate;\n"+

                    "textureCoordinate  = vec2(inputTextureCoordinate.x * 0.5, inputTextureCoordinate.y);\n" +
  "   textureCoordinate2 = vec2(inputTextureCoordinate.x * 0.5 + 0.5, inputTextureCoordinate.y);\n" +
            "}";


    public static final String mFragmentShader = "#extension GL_OES_EGL_image_external : require\n"+
            "varying highp vec2 textureCoordinate;\n"+
            "varying highp vec2 textureCoordinate2;\n"+
            "uniform samplerExternalOES inputImageTexture;\n" +
            "void main() {\n"+
            "    lowp vec4 rgbcolor = texture2D(inputImageTexture, textureCoordinate);\n"+
            "    lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);\n"+
            "    if (alphaValue.g < 0.5)\n"+
            "    discard;\n"+
            "    gl_FragColor = vec4(rgbcolor.rgb, 1.0);\n"+
            "}";

    private float[] mMVPMatrix = new float[16];
    private float[] mSTMatrix = new float[16];

    private int mProgram;
    private int mTextureID;
    private int muMVPMatrixHandle;
    private int muSTMatrixHandle;
    private int maPositionHandle;
    private int maTextureHandle;

    private SurfaceTexture mSurface;
    private boolean updateSurface = false;

    private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;

    private MediaPlayer mMediaPlayer;

    public VideoRender(Context context) {
        mTriangleVertices = ByteBuffer.allocateDirect(
                mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
        mTriangleVertices.put(mTriangleVerticesData).position(0);

        Matrix.setIdentityM(mSTMatrix, 0);

    }

    public void setMediaPlayer(MediaPlayer player) {
        mMediaPlayer = player;
    }

    @Override
    public void onDrawFrame(GL10 glUnused) {
        synchronized(this) {
            if (updateSurface) {
                mSurface.updateTexImage();
                mSurface.getTransformMatrix(mSTMatrix);
                updateSurface = false;
            }
        }

        GLES20.glClearColor(0.0f, 0.0f, 0.0f, .0f);
        GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT|GLES20.GL_COLOR_BUFFER_BIT);
        GLES20.glEnable(GLES20.GL_BLEND);
        GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA);

        GLES20.glUseProgram(mProgram);
        checkGlError("glUseProgram");

        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);

        mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
                TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
        checkGlError("glVertexAttribPointer maPosition");
        GLES20.glEnableVertexAttribArray(maPositionHandle);
        checkGlError("glEnableVertexAttribArray maPositionHandle");

        mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
        GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
                TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
        checkGlError("glVertexAttribPointer maTextureHandle");
        GLES20.glEnableVertexAttribArray(maTextureHandle);
        checkGlError("glEnableVertexAttribArray maTextureHandle");

        Matrix.setIdentityM(mMVPMatrix, 0);
        GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
        GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        checkGlError("glDrawArrays");
        GLES20.glFinish();

    }

    @Override
    public void onSurfaceChanged(GL10 glUnused, int width, int height) {

    }

    @Override
    public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
        mProgram = createProgram(mVertexShader, mFragmentShader);
        if (mProgram == 0) {
            return;
        }
        maPositionHandle = GLES20.glGetAttribLocation(mProgram, "position");
        checkGlError("glGetAttribLocation aPosition");
        if (maPositionHandle == -1) {
            throw new RuntimeException("Could not get attrib location for aPosition");
        }
        maTextureHandle = GLES20.glGetAttribLocation(mProgram, "inputTextureCoordinate");
        checkGlError("glGetAttribLocation aTextureCoord");
        if (maTextureHandle == -1) {
            throw new RuntimeException("Could not get attrib location for aTextureCoord");
        }

        muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
        checkGlError("glGetUniformLocation uMVPMatrix");
        if (muMVPMatrixHandle == -1) {
            throw new RuntimeException("Could not get attrib location for uMVPMatrix");
        }

        muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
        checkGlError("glGetUniformLocation uSTMatrix");
        if (muSTMatrixHandle == -1) {
            throw new RuntimeException("Could not get attrib location for uSTMatrix");
        }


        int[] textures = new int[1];
        GLES20.glGenTextures(1, textures, 0);

        mTextureID = textures[0];
        GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
        checkGlError("glBindTexture mTextureID");

        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
                GLES20.GL_NEAREST);
        GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
                GLES20.GL_LINEAR);

        /*
         * Create the SurfaceTexture that will feed this textureID,
         * and pass it to the MediaPlayer
         */
        mSurface = new SurfaceTexture(mTextureID);
        mSurface.setOnFrameAvailableListener(this);
        Log.e("surface ", "surface");
        Surface surface = new Surface(mSurface);
        mMediaPlayer.setSurface(surface);
        mMediaPlayer.setScreenOnWhilePlaying(true);
        surface.release();
        mMediaPlayer.setOnPreparedListener(this);
        mMediaPlayer.prepareAsync();

        synchronized(this) {
            updateSurface = false;
        }
    }

    synchronized public void onFrameAvailable(SurfaceTexture surface) {
        updateSurface = true;
    }

    private int loadShader(int shaderType, String source) {
        int shader = GLES20.glCreateShader(shaderType);
        if (shader != 0) {
            GLES20.glShaderSource(shader, source);
            GLES20.glCompileShader(shader);
            int[] compiled = new int[1];
            GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
            if (compiled[0] == 0) {
                Log.e(TAG, "Could not compile shader " + shaderType + ":");
                Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
                GLES20.glDeleteShader(shader);
                shader = 0;
            }
        }
        return shader;
    }

    private int createProgram(String vertexSource, String fragmentSource) {
        int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
        if (vertexShader == 0) {
            return 0;
        }
        int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
        if (pixelShader == 0) {
            return 0;
        }

        int program = GLES20.glCreateProgram();
        if (program != 0) {
            GLES20.glAttachShader(program, vertexShader);
            checkGlError("glAttachShader");
            GLES20.glAttachShader(program, pixelShader);
            checkGlError("glAttachShader");
            GLES20.glLinkProgram(program);
            int[] linkStatus = new int[1];
            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
            if (linkStatus[0] != GLES20.GL_TRUE) {
                Log.e(TAG, "Could not link program: ");
                Log.e(TAG, GLES20.glGetProgramInfoLog(program));
                GLES20.glDeleteProgram(program);
                program = 0;
            }
        }
        return program;
    }

    private void checkGlError(String op) {
        int error;
        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
            Log.e(TAG, op + ": glError " + error);
            throw new RuntimeException(op + ": glError " + error);
        }
    }

    @Override
    public void onPrepared(MediaPlayer mediaPlayer) {
        mediaPlayer.start();
    }
}  // End of class VideoRender.

}//VideoSurfaceView 类结束。

使用这段代码,视频看起来是颠倒的

最佳答案

如果你想丢弃 fragment ,那么你可以使用discard fragment shader 中的关键字.

例如丢弃所有 alpha 值小于 0.5 的 fragment :

void main()
{
    lowp vec4 rgbcolor   = texture2D(inputImageTexture, textureCoordinate);
    lowp vec4 alphaValue = texture2D(inputImageTexture, textureCoordinate2);

    if (alphaValue.g < 0.5)
        discard;   

    gl_FragColor = vec4(rgbcolor.rgb, 1.0);
}

另见 OpenGL ES Shading Language 1.00 Specification; 6.4 Jumps; page 58 :

The discard keyword is only allowed within fragment shaders. It can be used within a fragment shader to abandon the operation on the current fragment. This keyword causes the fragment to be discarded and no updates to any buffers will occur. It would typically be used within a conditional statement, for example:

if (intensity < 0.0)
    discard;

引用评论

As there is no matrix so what will i use in GLES20.glGetUniformLocation?

当然你可以将矩阵添加到顶点着色器:

attribute vec4 inputTextureCoordinate;

varying vec2 textureCoordinate;
varying vec2 textureCoordinate2;

uniform mat4 matMVP;
uniform mat4 matST;

void main()
{
    gl_Position = matMVP * position;

    vec4 texCoord = matST * inputTextureCoordinate;

    textureCoordinate  = vec2(texCoord.x * 0.5,       1.0 - texCoord.y);
    textureCoordinate2 = vec2(texCoord.x * 0.5 + 0.5, 1.0 - texCoord.y);
}
muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "matMVP");
muSTMatrixHandle  = GLES20.glGetUniformLocation(mProgram, "matST");
GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

关于android - 如何提供着色器函数来执行视频的 alpha mask ,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/53985760/

相关文章:

android - Android 中的俄语本地化

opengl-es - SSAO 文物分为三部分

opengl - 使用着色器更改纹理某些片段的颜色

python - 如何在Python中为GLFW设置窗口提示

c++ - 法线贴图出了可怕的错误

android - 如何使用图表引擎在 TabHost 中显示销售图表

Android:处理ListView回收

java - 如何在RecyclerView中使用HashMap数据?

iphone - 为什么图像数据在plist字典中?

android - 在后台捕获相机帧 (Android)