BradLarson / GPUImage2

GPUImage 2 is a BSD-licensed Swift framework for GPU-accelerated video and image processing.
BSD 3-Clause "New" or "Revised" License
4.88k stars 611 forks source link

UI Element #17

Open pocketwod opened 8 years ago

pocketwod commented 8 years ago

Hi Brad,

Thanks for making a SWIFT port, learning so much studying it. Any plans to do a UI Element demo similar to Obj-C version?

BradLarson commented 8 years ago

Yes, as soon as I get the UI element input added to the framework.

pocketwod commented 8 years ago

Thanks for getting back to me, looking forward to seeing it 👍

donaldpiret commented 8 years ago

+1 for this as well! Thank you for this amazing library

EriFisher commented 7 years ago

I almost have it, if any gentleman can finish it I would appreciate it. I think the issue is the targeting in GPUImage is ran differently than in GPUImage2. Brad is I imagine to busy for this. But if anyone else could fix that last bit of code I would appreciate it and many many other people will as well.

import UIKit
import GPUImage
import OpenGLES

import AVFoundation

public class UIElementthingthing: ImageSource {
    private var view: UIView?
    private var layer: CALayer!
    private var previousLayerSizeInPixels = CGSize.zero
    private var time = CMTime()
    private var actualTimeOfLastUpdate = TimeInterval()
    var imageFramebuffer:Framebuffer!
    var hasProcessedImage:Bool = false

    public let targets = TargetContainer()

    // MARK: -
    // MARK: Initialization and teardown

    public init?(view inputView: UIView) {

        view = inputView
        layer = inputView.layer
        previousLayerSizeInPixels = CGSize.zero
        update()
    }

    public init(layer inputLayer: CALayer) {

        view = nil
        layer = inputLayer
        previousLayerSizeInPixels = CGSize.zero
        update()
    }

    func layerSizeInPixels() -> CGSize {

        let pointSize: CGSize = layer.bounds.size
        return CGSize(width: layer.contentsScale * pointSize.width, height: layer.contentsScale * pointSize.height)
    }

    func update() {
        update(withTimestamp: kCMTimeIndefinite)
    }

    func updateUsingCurrentTime() {
        if CMTIME_IS_INVALID(time) {
            time = CMTimeMakeWithSeconds(0, 600)
            actualTimeOfLastUpdate = Date.timeIntervalSinceReferenceDate
        }
        else {
            let now: TimeInterval = Date.timeIntervalSinceReferenceDate
            let diff: TimeInterval = now - actualTimeOfLastUpdate
            time = CMTimeAdd(time, CMTimeMakeWithSeconds(diff, 600))
            actualTimeOfLastUpdate = now
        }
        update(withTimestamp: time)
    }

    func update(withTimestamp frameTime: CMTime) {
        sharedImageProcessingContext.runOperationSynchronously {

            let layerPixelSize: CGSize = layerSizeInPixels()
            let imageData = UnsafeMutablePointer<GLubyte>.allocate(capacity:Int(GLint(layer.bounds.width * UIScreen.main.scale) * GLint(layer.bounds.height * UIScreen.main.scale) ) * 4)

            let genericRGBColorspace: CGColorSpace? = CGColorSpaceCreateDeviceRGB()
            let imageContext = CGContext(data: imageData, width: Int(layerPixelSize.width), height: Int(layerPixelSize.height), bitsPerComponent: 8, bytesPerRow: Int(layerPixelSize.width) * 4, space: genericRGBColorspace!, bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.premultipliedFirst.rawValue|CGBitmapInfo.byteOrder32Little.rawValue).rawValue)
            //    CGContextRotateCTM(imageContext, M_PI_2);
            imageContext?.translateBy(x: 0.0, y: layerPixelSize.height)
            imageContext?.scaleBy(x: layer.contentsScale, y: -layer.contentsScale)
            //        CGContextSetBlendMode(imageContext, kCGBlendModeCopy); // From Technical Q&A QA1708: http://developer.apple.com/library/ios/#qa/qa1708/_index.html
            layer.render(in: imageContext!)
            //CGContextRelease(imageContext!)
            // TODO: This may not work
            let outputFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation:.portrait, size:GLSize(width:GLint(layer.bounds.width * UIScreen.main.scale), height:GLint(layer.bounds.height * UIScreen.main.scale)), textureOnly:true)

            glBindTexture(GLenum(GL_TEXTURE_2D), outputFramebuffer.texture)
            // no need to use self.outputTextureOptions here, we always need these texture options
            glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, GLsizei(Int(layerPixelSize.width)), GLsizei(Int(layerPixelSize.height)), 0, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), imageData)
            free(imageData)
          //  self.updateTargetsWithFramebuffer(outputFramebuffer)

                sharedImageProcessingContext.runOperationSynchronously{
                    self.updateTargetsWithFramebuffer(outputFramebuffer)
                    self.hasProcessedImage = true
            }
            /*
             for currentTarget: ImageSource in targets {
             if currentTarget != targetToIgnoreForUpdates {
             var indexOfObject: Int = (targets as NSArray).index(of: currentTarget)
             var textureIndexOfTarget = CInt(Int(targetTextureIndices[indexOfObject]))
             currentTarget.setInputSize(layerPixelSize, at: textureIndexOfTarget)
             currentTarget.newFrameReady(at: frameTime, at: textureIndexOfTarget)
             }}*/
        }
    }
       public func transmitPreviousImage(to target:ImageConsumer, atIndex:UInt) {
           }

}