android - Android 上的 openGL ES 2.0,YUV 到 RGB 以及使用 ffMpeg 渲染

标签 android android-ndk ffmpeg opengl-es-2.0

当视频显示后,我的渲染器会在 1~2 帧后终止。

fatal error 11:blabla...(恰好发生在 glDrawElements(Y 部分)中)

我认为问题是“glPixelStorei”或“GL_RGB”、“GL_LUMINANCE”,但是..我不明白。

我的渲染方式:

  1. 对从网络获取的数据进行解码,(SDK获取->NDK解码),入队。

  2. 使另一个线程出列(当然是同步的),准备设置 OpenGL ES 2.0。(SDK)

  3. 当调用onDrawFrame、onSurfaceCreated、onSurfaceChanged方法时,它会缩小到NDK。 (我在 NDK 中的渲染器源代码将附在下面。)

  4. 渲染。

如您所知, fragment 着色器用于转换。 我的数据是 YUV 420p (pix_fmt_YUV420p)(每像素 12 位)

这是我的全部来源。

我以前对OpenGL ES没有任何了解,这是第一次。

请让我知道我正在采取哪些措施来提高性能。

我在“glTexImage2D”、“glTexSubImage2D”、“glRenderbufferStorage”中使用什么参数??? GL_亮度? GL_RGBA? GL_RGB? (GL_LUMINANCE正在使用)

void Renderer::set_draw_frame(JNIEnv* jenv, jbyteArray yData, jbyteArray uData, jbyteArray vData)
{
    for (int i = 0; i < 3; i++) {
        if (yuv_data_[i] != NULL) {
            free(yuv_data_[i]);
        }
    }

  int YSIZE = -1;
  int USIZE = -1;
  int VSIZE = -1;

  if (yData != NULL) {
        YSIZE = (int)jenv->GetArrayLength(yData);
    LOG_DEBUG("YSIZE : %d", YSIZE);
        yuv_data_[0] = (unsigned char*)malloc(sizeof(unsigned char) * YSIZE);
    memset(yuv_data_[0], 0, YSIZE);
        jenv->GetByteArrayRegion(yData, 0, YSIZE, (jbyte*)yuv_data_[0]);
    yuv_data_[0] = reinterpret_cast<unsigned char*>(yuv_data_[0]);
    } else {
        YSIZE = (int)jenv->GetArrayLength(yData);
        yuv_data_[0] = (unsigned char*)malloc(sizeof(unsigned char) * YSIZE);
    memset(yuv_data_[0], 1, YSIZE);
  }

    if (uData != NULL) {
        USIZE = (int)jenv->GetArrayLength(uData);
    LOG_DEBUG("USIZE : %d", USIZE);
        yuv_data_[1] = (unsigned char*)malloc(sizeof(unsigned char) * USIZE);
    memset(yuv_data_[1], 0, USIZE);
        jenv->GetByteArrayRegion(uData, 0, USIZE, (jbyte*)yuv_data_[1]);
    yuv_data_[1] = reinterpret_cast<unsigned char*>(yuv_data_[1]);
    } else {
        USIZE = YSIZE/4;
        yuv_data_[1] = (unsigned char*)malloc(sizeof(unsigned char) * USIZE);
    memset(yuv_data_[1], 1, USIZE);
  }

    if (vData != NULL) {
        VSIZE = (int)jenv->GetArrayLength(vData);
    LOG_DEBUG("VSIZE : %d", VSIZE);
        yuv_data_[2] = (unsigned char*)malloc(sizeof(unsigned char) * VSIZE);
    memset(yuv_data_[2], 0, VSIZE);
        jenv->GetByteArrayRegion(vData, 0, VSIZE, (jbyte*)yuv_data_[2]);
    yuv_data_[2] = reinterpret_cast<unsigned char*>(yuv_data_[2]);
    } else {
        VSIZE = YSIZE/4;
        yuv_data_[2] = (unsigned char*)malloc(sizeof(unsigned char) * VSIZE);
    memset(yuv_data_[2], 1, VSIZE);
  }

    glClearColor(1.0F, 1.0F, 1.0F, 1.0F);
    check_gl_error("glClearColor");
    glClear(GL_COLOR_BUFFER_BIT);
    check_gl_error("glClear");
}

void Renderer::draw_frame()
{
  // Binding created FBO
  glBindFramebuffer(GL_FRAMEBUFFER, frame_buffer_object_);
  check_gl_error("glBindFramebuffer");
    // Add program to OpenGL environment
    glUseProgram(program_object_);
    check_gl_error("glUseProgram");

  for (int i = 0; i < 3; i++) {
    LOG_DEBUG("Success");
      //Bind texture
      glActiveTexture(GL_TEXTURE0 + i);
      check_gl_error("glActiveTexture");
      glBindTexture(GL_TEXTURE_2D, yuv_texture_id_[i]);
      check_gl_error("glBindTexture");
      glUniform1i(yuv_texture_object_[i], i);
      check_gl_error("glBindTexture");
    glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, stream_yuv_width_[i], stream_yuv_height_[i], GL_RGBA, GL_UNSIGNED_BYTE, yuv_data_[i]);
      check_gl_error("glTexSubImage2D");
  }

  LOG_DEBUG("Success");
    // Load vertex information
    glVertexAttribPointer(position_object_, 2, GL_FLOAT, GL_FALSE, kStride, kVertexInformation);
    check_gl_error("glVertexAttribPointer");
    // Load texture information
    glVertexAttribPointer(texture_position_object_, 2, GL_SHORT, GL_FALSE, kStride, kTextureCoordinateInformation);
    check_gl_error("glVertexAttribPointer");

LOG_DEBUG("9");
    glEnableVertexAttribArray(position_object_);
    check_gl_error("glEnableVertexAttribArray");
    glEnableVertexAttribArray(texture_position_object_);
    check_gl_error("glEnableVertexAttribArray");

  // Back to window buffer
  glBindFramebuffer(GL_FRAMEBUFFER, 0);
  check_gl_error("glBindFramebuffer");
  LOG_DEBUG("Success");
    // Draw the Square
    glDrawElements(GL_TRIANGLE_STRIP, 6, GL_UNSIGNED_SHORT, kIndicesInformation);
    check_gl_error("glDrawElements");
}

void Renderer::setup_render_to_texture()
{
    glGenFramebuffers(1, &frame_buffer_object_);
    check_gl_error("glGenFramebuffers");
    glBindFramebuffer(GL_FRAMEBUFFER, frame_buffer_object_);
    check_gl_error("glBindFramebuffer");
    glGenRenderbuffers(1, &render_buffer_object_);
    check_gl_error("glGenRenderbuffers");
    glBindRenderbuffer(GL_RENDERBUFFER, render_buffer_object_);
    check_gl_error("glBindRenderbuffer");
    glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA4, stream_yuv_width_[0], stream_yuv_height_[0]);
    check_gl_error("glRenderbufferStorage");
    glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, render_buffer_object_);
    check_gl_error("glFramebufferRenderbuffer");
  glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id_[0], 0);
    check_gl_error("glFramebufferTexture2D");  
  glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id_[1], 0);
    check_gl_error("glFramebufferTexture2D");  
  glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, yuv_texture_id_[2], 0);
    check_gl_error("glFramebufferTexture2D");  

  glBindFramebuffer(GL_FRAMEBUFFER, 0);
    check_gl_error("glBindFramebuffer");

    GLint status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
    if (status != GL_FRAMEBUFFER_COMPLETE) {
        print_log("renderer.cpp", "setup_graphics", "FBO setting fault.", LOGERROR);
        LOG_ERROR("%d\n", status);
        return;
    }
}

void Renderer::setup_yuv_texture() 
{
    // Use tightly packed data
    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
    check_gl_error("glPixelStorei");

  for (int i = 0; i < 3; i++) {
    if (yuv_texture_id_[i]) {
      glDeleteTextures(1, &yuv_texture_id_[i]);
      check_gl_error("glDeleteTextures");
    }
      glActiveTexture(GL_TEXTURE0+i);
      check_gl_error("glActiveTexture"); 
      // Generate texture object
      glGenTextures(1, &yuv_texture_id_[i]);
      check_gl_error("glGenTextures");
      glBindTexture(GL_TEXTURE_2D, yuv_texture_id_[i]);
      check_gl_error("glBindTexture");
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
      check_gl_error("glTexParameteri");
      glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
      check_gl_error("glTexParameteri");
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
      check_gl_error("glTexParameterf");
      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
      check_gl_error("glTexParameterf"); 
    glEnable(GL_TEXTURE_2D);
    check_gl_error("glEnable");
    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, maximum_yuv_width_[i], maximum_yuv_height_[i], 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL);
      check_gl_error("glTexImage2D");
  }
}

void Renderer::setup_graphics()
{
    print_gl_string("Version", GL_VERSION);
    print_gl_string("Vendor", GL_VENDOR);
    print_gl_string("Renderer", GL_RENDERER);
    print_gl_string("Extensions", GL_EXTENSIONS);

    program_object_ = create_program(kVertexShader, kFragmentShader);
    if (!program_object_) {
        print_log("renderer.cpp", "setup_graphics", "Could not create program.", LOGERROR);
        return;
    }

    position_object_ = glGetAttribLocation(program_object_, "vPosition");
    check_gl_error("glGetAttribLocation");
    texture_position_object_ = glGetAttribLocation(program_object_, "vTexCoord");
    check_gl_error("glGetAttribLocation");

    yuv_texture_object_[0] = glGetUniformLocation(program_object_, "yTexture");
    check_gl_error("glGetUniformLocation");
  yuv_texture_object_[1] = glGetUniformLocation(program_object_, "uTexture");
    check_gl_error("glGetUniformLocation");
    yuv_texture_object_[2] = glGetUniformLocation(program_object_, "vTexture");
    check_gl_error("glGetUniformLocation");

  setup_yuv_texture();
    setup_render_to_texture();

  glViewport(0, 0, stream_yuv_width_[0], stream_yuv_height_[0]);//736, 480);//1920, 1080);//maximum_yuv_width_[0], maximum_yuv_height_[0]);
  check_gl_error("glViewport");
}

GLuint Renderer::create_program(const char* vertex_source, const char* fragment_source)
{
    GLuint vertexShader = load_shader(GL_VERTEX_SHADER, vertex_source);
    if (!vertexShader) {
        return 0;
    }

    GLuint pixelShader = load_shader(GL_FRAGMENT_SHADER, fragment_source);
    if (!pixelShader) {
        return 0;
    }

    GLuint program = glCreateProgram();
    if (program) {
        glAttachShader(program, vertexShader);
        check_gl_error("glAttachShader");
        glAttachShader(program, pixelShader);
        check_gl_error("glAttachShader");
        glLinkProgram(program);
        /* Get a Status */
        GLint linkStatus = GL_FALSE;
        glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
        if (linkStatus != GL_TRUE) {
            GLint bufLength = 0;
            glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
            if (bufLength) {
                char* buf = (char*) malloc(bufLength);
                if (buf) {
                    glGetProgramInfoLog(program, bufLength, NULL, buf);
                    print_log("renderer.cpp", "create_program", "Could not link program.", LOGERROR);
                    LOG_ERROR("%s\n", buf);
                    free(buf);
                }
            }
            glDeleteProgram(program);
            program = 0;
        }
    }
    return program;
}

GLuint Renderer::load_shader(GLenum shaderType, const char* pSource)
{
    GLuint shader = glCreateShader(shaderType);
        if (shader) {
            glShaderSource(shader, 1, &pSource, NULL);
            glCompileShader(shader);
            /* Get a Status */
            GLint compiled = 0;
            glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
            if (!compiled) {
                GLint infoLen = 0;
                glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
                if (infoLen) {
                    char* buf = (char*) malloc(infoLen);
                    if (buf) {
                        glGetShaderInfoLog(shader, infoLen, NULL, buf);
                        print_log("renderer.cpp", "load_shader", "Could not link program.", LOGERROR);
                                  LOG_ERROR("%d :: %s\n", shaderType, buf);
                        free(buf);
                    }
                    glDeleteShader(shader);
                    shader = 0;
                }
            }
        }
    return shader;
}


void Renderer::onDrawFrame(JNIEnv* jenv, jbyteArray yData, jbyteArray uData, jbyteArray vData)
{
    set_draw_frame(jenv, yData, uData, vData);
    draw_frame();
    return;
}

void Renderer::setSize(int stream_width, int stream_height) {
  stream_yuv_width_[0] = stream_width;
  stream_yuv_width_[1] = stream_width/2;
  stream_yuv_width_[2] = stream_width/2;
  stream_yuv_height_[0] = stream_height;
  stream_yuv_height_[1] = stream_height/2;
  stream_yuv_height_[2] = stream_height/2;
}

void Renderer::onSurfaceChanged(int width, int height)
{
  mobile_yuv_width_[0] = width;
  mobile_yuv_width_[1] = width/2;
  mobile_yuv_width_[2] = width/2; 
  mobile_yuv_height_[0] = height;
  mobile_yuv_height_[1] = height/2;
  mobile_yuv_height_[2] = height/2;

  maximum_yuv_width_[0] = 1920;
  maximum_yuv_width_[1] = 1920/2;
  maximum_yuv_width_[2] = 1920/2;
  maximum_yuv_height_[0] = 1080;
  maximum_yuv_height_[1] = 1080/2;
  maximum_yuv_height_[2] = 1080/2;

  // If stream size not setting, default size D1
  //if (stream_yuv_width_[0] == 0) {
    stream_yuv_width_[0] = 736;
    stream_yuv_width_[1] = 736/2;
    stream_yuv_width_[2] = 736/2;
    stream_yuv_height_[0] = 480;
    stream_yuv_height_[1] = 480/2;
    stream_yuv_height_[2] = 480/2;
  //}

    setup_graphics();
    return;
}

这是我的 fragment 、顶点源和坐标:

static const char kVertexShader[] =
    "attribute vec4 vPosition;      \n"
      "attribute vec2 vTexCoord;        \n"
      "varying vec2 v_vTexCoord;        \n"
    "void main() {                        \n"
        "gl_Position = vPosition;       \n"
        "v_vTexCoord = vTexCoord;       \n"
    "}                                          \n";

static const char kFragmentShader[] =
        "precision mediump float;               \n"
        "varying vec2 v_vTexCoord;          \n"
        "uniform sampler2D yTexture;        \n"
        "uniform sampler2D uTexture;        \n"
        "uniform sampler2D vTexture;        \n"
        "void main() {                      \n"
            "float y=texture2D(yTexture, v_vTexCoord).r;\n"
            "float u=texture2D(uTexture, v_vTexCoord).r - 0.5;\n"
            "float v=texture2D(vTexture, v_vTexCoord).r - 0.5;\n"
            "float r=y + 1.13983 * v;\n"
            "float g=y - 0.39465 * u - 0.58060 * v;\n"
            "float b=y + 2.03211 * u;\n"
            "gl_FragColor = vec4(r, g, b, 1.0);\n"
        "}\n";

static const GLfloat kVertexInformation[] =
{
         -1.0f, 1.0f,           // TexCoord 0 top left
         -1.0f,-1.0f,           // TexCoord 1 bottom left
          1.0f,-1.0f,           // TexCoord 2 bottom right
          1.0f, 1.0f            // TexCoord 3 top right
};
static const GLshort kTextureCoordinateInformation[] =
{
          0, 0,         // TexCoord 0 top left
          0, 1,         // TexCoord 1 bottom left
          1, 1,         // TexCoord 2 bottom right
          1, 0          // TexCoord 3 top right
};
static const GLuint kStride = 0;//COORDS_PER_VERTEX * 4;
static const GLshort kIndicesInformation[] =
{
    0, 1, 2, 
    0, 2, 3
};

最佳答案

您不能调用 glTexSubImage2D() 并期望它跟上视频帧速率。这是一个非常常见的错误。 glTexSubImage2D() 和 glTexImage2D() 对于视频来说太慢。

您应该改用 EGL 图像扩展。这里有一个适用于 Android 的示例:

http://software.intel.com/en-us/articles/using-opengl-es-to-accelerate-apps-with-legacy-2d-guis

关于android - Android 上的 openGL ES 2.0,YUV 到 RGB 以及使用 ffMpeg 渲染,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/19355709/

相关文章:

android - 很困惑,为什么我构建的 libffmpeg.so > 17M?

android - 覆盖完成 Activity 的动画

android - 使用 picasso 的 GIF 图像动画

android - C++库交叉编译

java - 如何将静态库链接到在 NDK Android Build 中用作 native 库的源

python - 使用 ffmpeg 将 OpenCV 帧流式传输到 http

java - UrlEncodedFormEntity 替换

android - 如何使用 eclipse 在 apk 中包含预构建的共享库

audio - 警告 : passthru() has been disabled for security reasons in _audioconverter_scheduler_ffmpeg_helper

ffmpeg在转换时设置持续时间