Open Trushangpatel3993 opened 3 years ago
Same as here. But if you need animated background you should not specify single image as a background, rather blurring sourceImage
e.g.:
sourceImage.composited(over: sourceImage.applyBlur())
applyBlur
is not real and method
And this is possible implementation for image blaring if that is what you needed (it is not clear from your questions):
extension CIImage {
convenience init(color: UIColor) {
self.init(color: CIColor(cgColor: color.cgColor))
}
enum Filter {
case
darkenBlendMode(UIColor),
gaussianBlur(radius: Double),
overlayBlendMode(CIImage)
var name: String {
switch self {
case .darkenBlendMode:
return "CIDarkenBlendMode"
case .gaussianBlur:
return "CIGaussianBlur"
case .overlayBlendMode:
return "CIOverlayBlendMode"
}
}
var parameters: [String: Any] {
switch self {
case .darkenBlendMode(let color):
return [kCIInputBackgroundImageKey: CIImage(color: color)]
case .gaussianBlur(let radius):
return [kCIInputRadiusKey: radius]
case .overlayBlendMode(let image):
return [kCIInputBackgroundImageKey: image]
}
}
}
func applying(_ filter: Filter, when shouldApply: Bool = true) -> CIImage {
guard shouldApply else { return self }
return applyingFilter(filter.name, parameters: filter.parameters)
}
}
extension CIImage {
func blurred(
radius: Double = 20,
color: UIColor = UIColor.white.withAlphaComponent(0.4)
) -> CIImage {
applying(
.overlayBlendMode(CIImage(
color: CIColor(
red: color.rgba().r,
green: color.rgba().g,
blue: color.rgba().b,
alpha: color.rgba().a
)
))
)
.applying(.gaussianBlur(radius: radius))
}
}
extension UIColor {
func rgba() -> (r: CGFloat, g: CGFloat, b: CGFloat, a: CGFloat) {
let components = cgColor.components ?? []
switch cgColor.numberOfComponents {
case 4:
return (components[0], components[1], components[2], components[3])
case 2:
return (components[0], components[0], components[0], components[1])
default:
return (0, 0, 0, 1)
}
}
}
I tried the Blurred method and it didn't work
I think you just don't see it, because blurred image is hidden underneath the source image. I'm not a guru in AVFoundation
so try playing with transforms. I tried to implement quick fix to make background image fill the render size. And seems like it works at least for me:
func applyEffect(
to sourceImage: CIImage,
at time: CMTime,
renderSize: CGSize
) -> CIImage {
let scaleToFillValue = renderSize.height / sourceImage.extent.height
let scaledImage = sourceImage.transformed(by: .init(scaleX: scaleToFillValue, y: scaleToFillValue))
let scaledImageWithFixedOrigin = scaledImage.transformed(
by: .init(
translationX: -renderSize.width / 2,
y: -(scaledImage.extent.origin.y + renderSize.height) / 2
)
)
return sourceImage.composited(over: scaledImageWithFixedOrigin.blurred(radius: 50, color: #colorLiteral(red: 1, green: 1, blue: 1, alpha: 0.2)))
}
Thank you for your reply. It is indeed blocked by the source image. I changed the position of the blurred image with the method you mentioned, and the effect is great!
Here is an example VideoCompositionProvider
for blurring background from current video frame. Thanks for @Alexey-Matjuk he showed us the start point.
@available(iOS 10.0, *)
final class BlurredBackgroundComposition: VideoCompositionProvider {
public enum Effect {
case gaussianBlur(sigma: Double, isDark: Bool)
}
private var effect: Effect! = nil
private var resizeFilter: CIFilter! = nil
init(appliedEffect: Effect) {
self.effect = appliedEffect
self.resizeFilter = CIFilter(name:"CILanczosScaleTransform")!
}
func applyEffect(
to sourceImage: CIImage,
at time: CMTime,
renderSize: CGSize
) -> CIImage {
autoreleasepool {
var backgroundImage: CIImage? = sourceImage
switch effect {
case .gaussianBlur(let sigma, let isDark):
// Compute calculation for aspectFill render size
let scale = renderSize.height / sourceImage.extent.height
let aspectRatio = renderSize.width / (sourceImage.extent.width * scale)
// Pass parameters to filter
resizeFilter.setValue(scale, forKey: kCIInputScaleKey)
resizeFilter.setValue(aspectRatio, forKey: kCIInputAspectRatioKey)
resizeFilter.setValue(sourceImage, forKey: kCIInputImageKey)
// Get output image
guard let scaledImage = resizeFilter.outputImage else {
return sourceImage
}
// First clamp to extend (will make infinity edges)
// then apply gaussian blur and crop result image to non cropped image extend
// to remove black edges from the image
// transform image to original position and final step is
// blending result image with our dark color if needed
backgroundImage = scaledImage.clampedToExtent()
.applyingGaussianBlur(sigma: sigma)
.cropped(to: scaledImage.extent)
.transformed(by: CGAffineTransform(translationX: -scaledImage.extent.origin.x, y: -scaledImage.extent.origin.y))
.apply(color: isDark ? UIColor.black.withAlphaComponent(0.6) : nil)
case .none: break
}
// Return new composited image
return sourceImage.composited(over: backgroundImage!)
}
}
}
CIImage extension for apply
function:
import CoreImage
import UIKit
extension CIImage {
func apply(color: UIColor? = nil) -> CIImage {
guard let color = color else {
return self
}
let filter = CIFilter(name: "CIDarkenBlendMode")
filter?.setDefaults()
filter?.setValue(self, forKey: kCIInputImageKey)
filter?.setValue(CIImage(color: CIColor(color: color)), forKey: kCIInputBackgroundImageKey)
if let outputImage = filter?.outputImage {
return outputImage
}
return self
}
}
And here is example of usage:
// Create your timeline
let timeline = ...
timeline.renderSize = renderSize
// Blurred background (will create from current visible frame)
timeline.passingThroughVideoCompositionProvider = BlurredBackgroundComposition(appliedEffect: .gaussianBlur(sigma: 25, isDark: false))
NOTE: When we use custom VideoCompositionProvider
item transitions looks different I don't know why? It's looks like background image appears later then video frame. Any help will be appreciated.
I'm trying to do