-4

I have get the h.264 data and decoder them to YUV buffers,and i have display the yup data by opengl on ios5 and ios6,but when I try to run it on my iPad(ios4.2.1),it can not display currently,just all the screen green color.I don't know why,here is my code:

-(void)playVideoData:(void *)data
{

if (!_textureY)
{
    glGenTextures(1, &_textureY);
    glGenTextures(1, &_textureU);
    glGenTextures(1, &_textureV);
}

glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _textureY);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW, _videoH, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, y);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);


glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _textureU);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, u);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, _textureV);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, _videoW/2, _videoH/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, v);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);

[self render];

}
- (void)render {

glViewport(viewportx,viewporty, VIEWWIDTH, VIEWHEIGHT);
glClearColor(0.0, 0, 0.0, 0);
glClear(GL_COLOR_BUFFER_BIT);

glUseProgram(programId);

// Update uniform value
//glUniform1f(uniforms[UNIFORM_TRANSLATE], 0.0f);
GLuint textureUniformY = glGetUniformLocation(programId, "SamplerY");
GLuint textureUniformU = glGetUniformLocation(programId, "SamplerU");
GLuint textureUniformV = glGetUniformLocation(programId, "SamplerV");

// Update attribute values
glVertexAttribPointer(ARDRONE_ATTRIB_POSITION, 2, GL_FLOAT, 0, 0, squareVertices);
glEnableVertexAttribArray(ARDRONE_ATTRIB_POSITION);

glVertexAttribPointer(ARDRONE_ATTRIB_TEXCOORD, 2, GL_FLOAT, 0, 0, coordVertices);
glEnableVertexAttribArray(ARDRONE_ATTRIB_TEXCOORD);




glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, _textureY);
glUniform1i(textureUniformY, 0);

glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _textureU);
glUniform1i(textureUniformU, 1);

glActiveTexture(GL_TEXTURE2);
glBindTexture(GL_TEXTURE_2D, _textureV);
glUniform1i(textureUniformV, 2);

} -(void)drawFrame2
{

if (context != nil)
{
    //make it the current context for rendering
    [EAGLContext setCurrentContext:context];

    //if our framebuffers have not been created yet, do that now!
    if (!defaultFramebuffer)
        [self createFramebuffer];

    glBindFramebuffer(GL_FRAMEBUFFER, defaultFramebuffer);

    [self playVideoData];
    glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
    glBindRenderbuffer(GL_RENDERBUFFER, colorRenderbuffer);




    [context presentRenderbuffer:GL_RENDERBUFFER];
}
else
    NSLog(@"Context not set!");

}

I have put the data to the y u v buffer here is my fsh and vsh:

    vsh:    

    attribute vec4 position; // 1

    //uniform float translate;

    attribute vec2 TexCoordIn; // New

    varying vec2 TexCoordOut; // New

    void main(void)

    {
    gl_Position = position; // 6

    TexCoordOut = TexCoordIn;

    }

    fsh:

    varying lowp vec2 TexCoordOut;

    uniform sampler2D SamplerY;

    uniform sampler2D SamplerU;

    uniform sampler2D SamplerV;


    void main(void)
   {

   mediump vec3 yuv;

   lowp vec3 rgb;

   yuv.x = texture2D(SamplerY, TexCoordOut).r;

   yuv.y = texture2D(SamplerU, TexCoordOut).r - 0.5;

   yuv.z = texture2D(SamplerV, TexCoordOut).r - 0.5;    

   rgb = mat3( 1,       1,         1,
           0,       -0.39465,  2.03211,
           1.13983, -0.58060,  0) * yuv;

   gl_FragColor = vec4(rgb, 1);

   }

iOs4.2 no erros ,the shaders Compile ok, but just can not display...

genpfault
  • 47,669
  • 9
  • 68
  • 119

1 Answers1

1

This is because the GL_RED_EXT extension that you are using to upload your textures was only added in iOS 5.0, and isn't present in iOS 4.2. You won't be able to use that to upload your YUV textures in this manner, so you'll need to rewrite that part of your code. Also, I believe this extension is only supported for iPad 2 and newer devices, not the original iPad and older iPhones, so you won't be able to use it on even iOS 5+ for the older ones.

Brad Larson
  • 168,330
  • 45
  • 388
  • 563
  • by the way,can we decoder the raw h264 data by the gpu?i know your gpuimage can display the camera data by gnu ,because the samplebuffer is supplyed by system,and i saw some topics about recording it just media format like mov mp4,if raw h264 data can not recorder it, do you have some ideas ? – user1586321 Dec 11 '12 at 11:15