BradLarson / GPUImage2

GPUImage 2 is a BSD-licensed Swift framework for GPU-accelerated video and image processing.
BSD 3-Clause "New" or "Revised" License
4.88k stars 611 forks source link

How do you put a date/time overlay when recording? #176

Open EriFisher opened 7 years ago

EriFisher commented 7 years ago

I saw there is something that does this in the first GPUImage. I am curious if anyone knows how to do this for GPU Image2, so it looks like a security camera.

EriFisher commented 7 years ago

I recreated the UIElement class in swift 3, however the code doesn't work yet because I think I am not handling targets correctly. If anyone wants to do the final step and fix this, we can then all benefit and add UI objects on top of our recordings.

Edit: Code Updated. Works but with extremely high CPU as well as fuzzy.


import UIKit
import OpenGLES

import AVFoundation

public class UIElementthingthing: ImageSource {
    private var view: UIView?
    private var layer: CALayer!
    private var previousLayerSizeInPixels = CGSize.zero
    private var time = CMTime()

    private var actualTimeOfLastUpdate = TimeInterval()
    var outputFramebuffer:Framebuffer!
    var hasProcessedImage:Bool = false

    public let targets = TargetContainer()

    // MARK: -
    // MARK: Initialization and teardown

    public init?(view inputView: UIView) {

        view = inputView
        layer = inputView.layer
        previousLayerSizeInPixels = CGSize.zero
        update()
    }

    public init(layer inputLayer: CALayer) {

        view = nil
        layer = inputLayer
        previousLayerSizeInPixels = CGSize.zero
        update()
    }

    func layerSizeInPixels() -> CGSize {

        let pointSize: CGSize = layer.bounds.size
        return CGSize(width: layer.contentsScale * pointSize.width, height: layer.contentsScale * pointSize.height)
    }

    func update() {
        update(withTimestamp: kCMTimeIndefinite)
    }

    func updateUsingCurrentTime() {
        if CMTIME_IS_INVALID(time) {
            time = CMTimeMakeWithSeconds(0, 600)
            actualTimeOfLastUpdate = Date.timeIntervalSinceReferenceDate
        }
        else {
            let now: TimeInterval = Date.timeIntervalSinceReferenceDate
            let diff: TimeInterval = now - actualTimeOfLastUpdate
            time = CMTimeAdd(time, CMTimeMakeWithSeconds(diff, 600))
            actualTimeOfLastUpdate = now
        }
        update(withTimestamp: time)
    }

    func update(withTimestamp frameTime: CMTime) {
        sharedImageProcessingContext.runOperationAsynchronously{
            let layerPixelSize: CGSize = self.layerSizeInPixels()
            let imageData = UnsafeMutablePointer<GLubyte>.allocate(capacity:Int(GLint(self.layer.bounds.width * UIScreen.main.scale) * GLint(self.layer.bounds.height * UIScreen.main.scale) ) * 4)
            let genericRGBColorspace: CGColorSpace? = CGColorSpaceCreateDeviceRGB()
            let imageContext = CGContext(data: imageData, width: Int(layerPixelSize.width), height: Int(layerPixelSize.height), bitsPerComponent: 8, bytesPerRow: Int(layerPixelSize.width) * 4, space: genericRGBColorspace!, bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue|CGBitmapInfo.byteOrder32Little.rawValue).rawValue)
            //    CGContextRotateCTM(imageContext, M_PI_2);
            imageContext?.translateBy(x: 0.0, y: layerPixelSize.height)
            imageContext?.scaleBy(x: self.layer.contentsScale, y: -self.layer.contentsScale)
            //        CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html
            self.layer.render(in: imageContext!)
            //CGContextRelease(imageContext!)
            // TODO: This may not work
            let outputFramebuffer =  sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size: GLSize(width: GLint(layerPixelSize.width)  , height: GLint(layerPixelSize.height)), textureOnly:true)

            glBindTexture(GLenum(GL_TEXTURE_2D), outputFramebuffer.texture)
            // no need to use self.outputTextureOptions here, we always need these texture options
            glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(Int(layerPixelSize.width)), GLsizei(Int(layerPixelSize.height)), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), imageData)
            free(imageData)
            self.updateUsingCurrentTime()
           // outputFramebuffer.timingStyle = .videoFrame(timestamp:Timestamp(self.time))
            self.updateTargetsWithFramebuffer(outputFramebuffer)

        }

        /*   for i in targets.targets {

         var indexOfObject: Int = (targets.targets as NSArray).index(of: i)
         var textureIndexOfTarget = CInt(Int(targets.count))
         let normalConsumer = i.value

         (self.layerSizeInPixels(), at: textureIndexOfTarget)
         currentTarget.newFrameReady(at: frameTime, at: textureIndexOfTarget)

         }
         */
    }
    public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) {
        // Not needed for camera inputs
    }
}