nonocast / me

记录和分享技术的博客
http://nonocast.cn
MIT License
20 stars 0 forks source link

学习 MacOS 开发 (Part 17.2: Core Image) #261

Open nonocast opened 2 years ago

nonocast commented 2 years ago

随着逐步的深入,对各个框架认识如下:

进入正题我们来看几个Core Image的场景。

Applying a Chroma Key Effect

俗话说绿幕抠像,智能抠像就要用到后面的Vision Framework。

2个步骤:

  1. 通过CIColorFilterCube实现关键色抠图
  2. 通过CISourceOverCompositing filter实现背景图合成

这次我们用console swift来实现:

import Cocoa
import CoreGraphics
import CoreImage

run()

func run() {
  let forgroundImage = NSImage(byReferencingFile: "./assets/sample.jpg")
  guard let bitmapRep = forgroundImage?.representations.first as? NSBitmapImageRep else {
    return
  }
  let foregroundCIImage = CIImage(bitmapImageRep: bitmapRep)
  let chromaCIFilter = chromaKeyFilter(fromHue: 0.32, toHue: 0.4)
  chromaCIFilter?.setValue(foregroundCIImage, forKey: kCIInputImageKey)
  guard let sourceCIImageWithoutBackground = chromaCIFilter?.outputImage else {
    print("background remove FAILED")
    return
  }

  let outputRep = NSBitmapImageRep(ciImage: sourceCIImageWithoutBackground)
  if let data = outputRep.representation(using: .png, properties: [:]) {
    do { try data.write(to: URL(fileURLWithPath: "output.png")) } catch { print(error) }
  }
}

func getHue(red: CGFloat, green: CGFloat, blue: CGFloat) -> CGFloat {
  let color = NSColor(red: red, green: green, blue: blue, alpha: 1)
  var hue: CGFloat = 0
  color.getHue(&hue, saturation: nil, brightness: nil, alpha: nil)
  return hue
}

// 将Hue的一个范围(fromHue, toHue)设置作为参数添加到colorCubeFilter
func chromaKeyFilter(fromHue: CGFloat, toHue: CGFloat) -> CIFilter? {
  let size = 64
  var cubeRGB = [Float]()

  for z in 0 ..< size {
    let blue = CGFloat(z) / CGFloat(size - 1)
    for y in 0 ..< size {
      let green = CGFloat(y) / CGFloat(size - 1)
      for x in 0 ..< size {
        let red = CGFloat(x) / CGFloat(size - 1)

        let hue = getHue(red: red, green: green, blue: blue)
        let alpha: CGFloat = (hue >= fromHue && hue <= toHue) ? 0 : 1

        cubeRGB.append(Float(red * alpha))
        cubeRGB.append(Float(green * alpha))
        cubeRGB.append(Float(blue * alpha))
        cubeRGB.append(Float(alpha))
      }
    }
  }

  let data = Data(buffer: UnsafeBufferPointer(start: &cubeRGB, count: cubeRGB.count))

  let colorCubeFilter = CIFilter(name: "CIColorCube", parameters: ["inputCubeDimension": size, "inputCubeData": data])
  return colorCubeFilter
}

效果如下:

继续叠加底图

let backgroundImage = NSImage(byReferencingFile: "./assets/back.jpg")
guard let backgroundBitmapRep = backgroundImage?.representations.first as? NSBitmapImageRep else {
  return
}

let backgroundCIImage = CIImage(bitmapImageRep: backgroundBitmapRep)
guard let compositor = CIFilter(name: "CISourceOverCompositing") else {
  print("create CISourceOverCompositing FAILED")
  return
}
compositor.setValue(sourceCIImageWithoutBackground, forKey: kCIInputImageKey)
compositor.setValue(backgroundCIImage, forKey: kCIInputBackgroundImageKey)
guard let compositedCIImage = compositor.outputImage else {
  print("composited FAILED")
  return
}

//  保存二阶段
let compositedRep = NSBitmapImageRep(ciImage: compositedCIImage)
if let data = compositedRep.representation(using: .png, properties: [:]) {
  do { try data.write(to: URL(fileURLWithPath: "phase2.png")) } catch { print(error) }
}

phase2

两相对比,CoreGraphics的color mask啥都不是。

调整下比例和合成的坐标,

sourceCIImageWithoutBackground = scaleFilter(sourceCIImageWithoutBackground, aspectRatio: 1.0, scale: 0.35)
sourceCIImageWithoutBackground = perspectiveFilter(sourceCIImageWithoutBackground, pixelsWide: backgroundBitmapRep.pixelsWide, pixelsHigh: backgroundBitmapRep.pixelsHigh)
func perspectiveFilter(_ input: CIImage, pixelsWide pixelsWide: Int, pixelsHigh pixelsHigh: Int) -> CIImage {
  let filter = CIFilter(name: "CIPerspectiveTransform")!
  let w = Float(input.extent.size.width)
  let h = Float(input.extent.size.height)
  let centerX = Float(pixelsWide) / 2
  let centerY = Float(pixelsHigh) / 2

  print("\(w)x\(h)")
  print("center: \(centerX), \(centerY)")

  filter.setValue(CIVector(x: CGFloat(centerX - w / 2), y: CGFloat(centerY + h / 2)), forKey: "inputTopLeft")
  filter.setValue(CIVector(x: CGFloat(centerX + w / 2), y: CGFloat(centerY + h / 2)), forKey: "inputTopRight")
  filter.setValue(CIVector(x: CGFloat(centerX - w / 2), y: CGFloat(centerY - h / 2)), forKey: "inputBottomLeft")
  filter.setValue(CIVector(x: CGFloat(centerX + w / 2), y: CGFloat(centerY - h / 2)), forKey: "inputBottomRight")
  filter.setValue(input, forKey: kCIInputImageKey)
  return filter.outputImage!
}

func scaleFilter(_ input: CIImage, aspectRatio: Double, scale: Double) -> CIImage {
  let scaleFilter = CIFilter(name: "CILanczosScaleTransform")!
  scaleFilter.setValue(input, forKey: kCIInputImageKey)
  scaleFilter.setValue(scale, forKey: kCIInputScaleKey)
  scaleFilter.setValue(aspectRatio, forKey: kCIInputAspectRatioKey)
  return scaleFilter.outputImage!
}

最后,多个filter可以通过CIContext进行chain,这样可以提升buffer效率,一次性收获。

参考阅读