BradLarson / GPUImage

An open source iOS framework for GPU-based image and video processing
http://www.sunsetlakesoftware.com/2012/02/12/introducing-gpuimage-framework
BSD 3-Clause "New" or "Revised" License
20.23k stars 4.61k forks source link

Enable depth test in GPUImageFilter #2473

Open hoangdado opened 7 years ago

hoangdado commented 7 years ago

Hi @BradLarson , I need to use GPUImage to implement some face filters like MSQRD. My approach is modifying GPUImageFilter so that it can overlay a face mask on camera texture. By following this tutorial series https://www.raywenderlich.com/3664/opengl-tutorial-for-ios-opengl-es-2-0, I changed GPUImageFilter.m as follow:

- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString andFrameSize:(CGSize)size
{
    if (!(self = [super init]))
    {
        return nil;
    }

    [self setupDepthBufferWithFrameSize:size];

    uniformStateRestorationBlocks = [NSMutableDictionary dictionaryWithCapacity:10];
    _preventRendering = NO;
    currentlyReceivingMonochromeInput = NO;
    inputRotation = kGPUImageNoRotation;
    backgroundColorRed = 0.0;
    backgroundColorGreen = 0.0;
    backgroundColorBlue = 0.0;
    backgroundColorAlpha = 0.0;
    imageCaptureSemaphore = dispatch_semaphore_create(0);
    dispatch_semaphore_signal(imageCaptureSemaphore);

    // MVKPicture is just a derived class of GPUImagePicture to ouput framebuffer to outside
    testPicture = [[MVKPicture alloc] initWithImage:[UIImage imageNamed:@"test_obj.png"]];

    runSynchronouslyOnVideoProcessingQueue(^{
        [GPUImageContext useImageProcessingContext];

        filterProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:vertexShaderString fragmentShaderString:fragmentShaderString];

        if (!filterProgram.initialized)
        {
            [self initializeAttributes];

            if (![filterProgram link])
            {
                NSString *progLog = [filterProgram programLog];
                NSLog(@"Program link log: %@", progLog);
                NSString *fragLog = [filterProgram fragmentShaderLog];
                NSLog(@"Fragment shader compile log: %@", fragLog);
                NSString *vertLog = [filterProgram vertexShaderLog];
                NSLog(@"Vertex shader compile log: %@", vertLog);
                filterProgram = nil;
                NSAssert(NO, @"Filter shader link failed");
            }
        }

        filterPositionAttribute = [filterProgram attributeIndex:@"position"];
        filterTextureCoordinateAttribute = [filterProgram attributeIndex:@"inputTextureCoordinate"];
        filterInputTextureUniform = [filterProgram uniformIndex:@"inputImageTexture"];
        projectionUniform = [filterProgram uniformIndex:@"projection"];
        modelViewUniform = [filterProgram uniformIndex:@"modelview"];

        [GPUImageContext setActiveShaderProgram:filterProgram];

        glEnableVertexAttribArray(filterPositionAttribute);
        glEnableVertexAttribArray(filterTextureCoordinateAttribute);

        glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, _depthRenderBuffer);
    });

    CC3GLMatrix *projection = [CC3GLMatrix matrix];
    [projection populateFromFrustumLeft:-0.5 andRight:0.5 andBottom:-0.5 andTop:0.5 andNear:3 andFar:9];
    glUniformMatrix4fv(projectionUniform, 1, 0, projection.glMatrix);

    CC3GLMatrix *modelView = [CC3GLMatrix matrix];
    [modelView populateFromTranslation:CC3VectorMake(0, 0, -6)];
    glUniformMatrix4fv(modelViewUniform, 1, 0, modelView.glMatrix);

    return self;
}

- (void)setupDepthBufferWithFrameSize:(CGSize)size {
    glGenRenderbuffers(1, &_depthRenderBuffer);
    glBindRenderbuffer(GL_RENDERBUFFER, _depthRenderBuffer);
    glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, size.width, size.height);
}

- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
{
    if (self.preventRendering)
    {
        [firstInputFramebuffer unlock];
        return;
    }

    [GPUImageContext setActiveShaderProgram:filterProgram];

    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:[self sizeOfFBO] textureOptions:self.outputTextureOptions onlyTexture:NO];
    [outputFramebuffer activateFramebuffer];
    if (usingNextFrameForImageCapture)
    {
        [outputFramebuffer lock];
    }

    [self setUniformsForProgramAtIndex:0];

    glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
    glEnable(GL_BLEND);

    glClearColor(0, 104.0/255.0, 55.0/255.0, 1.0);
    glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    glEnable(GL_DEPTH_TEST);

    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, [firstInputFramebuffer texture]);
    glUniform1i(filterInputTextureUniform, 2);

    const GLvoid* pCamVertice = &camVertices[0].Position;
    const GLvoid* pCamTextCoord = &camVertices[0].TextCoord;
    GLsizei stride = sizeof(Vertex);
    glVertexAttribPointer(filterPositionAttribute, 3, GL_FLOAT, 0, stride, pCamVertice);
    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, stride, pCamTextCoord);

    glDrawElements(GL_TRIANGLES, sizeof(camIndices)/sizeof(camIndices[0]), GL_UNSIGNED_BYTE, camIndices);

    // draw test texture
    glActiveTexture(GL_TEXTURE2);
    glBindTexture(GL_TEXTURE_2D, [[testPicture frameBuffer] texture]);
    glUniform1i(filterInputTextureUniform, 2);

    const GLvoid* pTestVertice = &testVertices[0].Position;
    const GLvoid* pTestTextCoord = &testVertices[0].TextCoord;

    glVertexAttribPointer(filterPositionAttribute, 3, GL_FLOAT, 0, stride, pTestVertice);
    glVertexAttribPointer(filterTextureCoordinateAttribute, 2, GL_FLOAT, 0, stride, pTestTextCoord);

    glDrawElements(GL_TRIANGLES, sizeof(testIndices)/sizeof(testIndices[0]), GL_UNSIGNED_BYTE, testIndices);

    [firstInputFramebuffer unlock];

    if (usingNextFrameForImageCapture)
    {
        dispatch_semaphore_signal(imageCaptureSemaphore);
    }
}

and modified Vertex shader as follow:

NSString *const kMVKFilterVertexShaderString = SHADER_STRING
(
 attribute vec4 position;
 attribute vec4 inputTextureCoordinate;

 uniform mat4 projection;
 uniform mat4 modelview;
 varying vec2 textureCoordinate;

 void main()
 {
     gl_Position = projection * modelview * position;
     textureCoordinate = inputTextureCoordinate.xy;
 }
 );

I successfully overlay a test AR scene on the camera texture but failed with depth testing. How can I fix this code so that OpenGL ES depth test enabled?