liufan321 / SwiftQRCode

Simple QRCode detector and generator in Swift
MIT License
504 stars 133 forks source link

Swift 4 #38

Open reni99 opened 6 years ago

reni99 commented 6 years ago

Hi there

First of all thank you for the framework. Do you plan to update it to Swift 4? I see open PRs concerning Swift 4 migration. Any chance for them to be merged?

dcaric commented 6 years ago

Hi I have corrected file QRCode.swift for my project and for me it works for Swift 4.1 Xcode 9.3 beta 4

Dario QRCode.swift.zip

AresVampire commented 6 years ago

This is the latest code perfectly working for Swift 4. Others don't work.

// // QRCode.swift // QRCode // // Created by 刘凡 on 15/5/15. // Copyright (c) 2015年 joyios. All rights reserved. //

import UIKit import AVFoundation

open class QRCode: NSObject, AVCaptureMetadataOutputObjectsDelegate {

/// corner line width
var lineWidth: CGFloat
/// corner stroke color
var strokeColor: UIColor
/// the max count for detection
var maxDetectedCount: Int
/// current count for detection
var currentDetectedCount: Int = 0
/// auto remove sub layers when detection completed
var autoRemoveSubLayers: Bool
/// completion call back
var completedCallBack: ((_ stringValue: String) -> ())?
/// the scan rect, default is the bounds of the scan view, can modify it if need
open var scanFrame: CGRect = CGRect.zero

///  init function
///
///  - returns: the scanner object
public override init() {
    self.lineWidth = 4
    self.strokeColor = UIColor.green
    self.maxDetectedCount = 20
    self.autoRemoveSubLayers = false

    super.init()
}

///  init function
///
///  - parameter autoRemoveSubLayers: remove sub layers auto after detected code image
///  - parameter lineWidth:           line width, default is 4
///  - parameter strokeColor:         stroke color, default is Green
///  - parameter maxDetectedCount:    max detecte count, default is 20
///
///  - returns: the scanner object
public init(autoRemoveSubLayers: Bool, lineWidth: CGFloat = 4, strokeColor: UIColor = UIColor.green, maxDetectedCount: Int = 20) {

    self.lineWidth = lineWidth
    self.strokeColor = strokeColor
    self.maxDetectedCount = maxDetectedCount
    self.autoRemoveSubLayers = autoRemoveSubLayers
}

deinit {
    if session.isRunning {
        session.stopRunning()
    }

    removeAllLayers()
}

// MARK: - Generate QRCode Image
///  generate image
///
///  - parameter stringValue: string value to encoe
///  - parameter avatarImage: avatar image will display in the center of qrcode image
///  - parameter avatarScale: the scale for avatar image, default is 0.25
///
///  - returns: the generated image
class open func generateImage(_ stringValue: String, avatarImage: UIImage?, avatarScale: CGFloat = 0.25) -> UIImage? {
    return generateImage(stringValue, avatarImage: avatarImage, avatarScale: avatarScale, color: CIColor(color: UIColor.black), backColor: CIColor(color: UIColor.white))
}

///  Generate Qrcode Image
///
///  - parameter stringValue: string value to encoe
///  - parameter avatarImage: avatar image will display in the center of qrcode image
///  - parameter avatarScale: the scale for avatar image, default is 0.25
///  - parameter color:       the CI color for forenground, default is black
///  - parameter backColor:   th CI color for background, default is white
///
///  - returns: the generated image
class open func generateImage(_ stringValue: String, avatarImage: UIImage?, avatarScale: CGFloat = 0.25, color: CIColor, backColor: CIColor) -> UIImage? {

    // generate qrcode image
    let qrFilter = CIFilter(name: "CIQRCodeGenerator")!
    qrFilter.setDefaults()
    qrFilter.setValue(stringValue.data(using: String.Encoding.utf8, allowLossyConversion: false), forKey: "inputMessage")

    let ciImage = qrFilter.outputImage

    // scale qrcode image
    let colorFilter = CIFilter(name: "CIFalseColor")!
    colorFilter.setDefaults()
    colorFilter.setValue(ciImage, forKey: "inputImage")
    colorFilter.setValue(color, forKey: "inputColor0")
    colorFilter.setValue(backColor, forKey: "inputColor1")

    let transform = CGAffineTransform(scaleX: 10, y: 10)
    let transformedImage = qrFilter.outputImage!.transformed(by: transform)

    let image = UIImage(ciImage: transformedImage)

    if avatarImage != nil {
        return insertAvatarImage(image, avatarImage: avatarImage!, scale: avatarScale)
    }

    return image
}

class func insertAvatarImage(_ codeImage: UIImage, avatarImage: UIImage, scale: CGFloat) -> UIImage {

    let rect = CGRect(x: 0, y: 0, width: codeImage.size.width, height: codeImage.size.height)
    UIGraphicsBeginImageContext(rect.size)

    codeImage.draw(in: rect)

    let avatarSize = CGSize(width: rect.size.width * scale, height: rect.size.height * scale)
    let x = (rect.width - avatarSize.width) * 0.5
    let y = (rect.height - avatarSize.height) * 0.5
    avatarImage.draw(in: CGRect(x: x, y: y, width: avatarSize.width, height: avatarSize.height))

    let result = UIGraphicsGetImageFromCurrentImageContext()

    UIGraphicsEndImageContext()

    return result!
}

// MARK: - Video Scan
///  prepare scan
///
///  - parameter view:       the scan view, the preview layer and the drawing layer will be insert into this view
///  - parameter completion: the completion call back
open func prepareScan(_ view: UIView, completion:@escaping (_ stringValue: String)->()) {

    scanFrame = view.bounds

    completedCallBack = completion
    currentDetectedCount = 0

    setupSession()
    setupLayers(view)
}

/// start scan
open func startScan() {
    if session.isRunning {
        print("the  capture session is running")

        return
    }
    session.startRunning()
}

/// stop scan
open func stopScan() {
    if !session.isRunning {
        print("the capture session is not running")

        return
    }
    session.stopRunning()
}

func setupLayers(_ view: UIView) {
    drawLayer.frame = view.bounds
    view.layer.insertSublayer(drawLayer, at: 0)
    previewLayer.frame = view.bounds
    view.layer.insertSublayer(previewLayer, at: 0)
}

func setupSession() {
    if session.isRunning {
        print("the capture session is running")
        return
    }

    if !session.canAddInput(videoInput!) {
        print("can not add input device")
        return
    }

    if !session.canAddOutput(dataOutput) {
        print("can not add output device")
        return
    }

    session.addInput(videoInput!)
    session.addOutput(dataOutput)

    dataOutput.metadataObjectTypes = dataOutput.availableMetadataObjectTypes;
    dataOutput.setMetadataObjectsDelegate(self, queue: DispatchQueue.main)
}

open func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {

    clearDrawLayer()

    for dataObject in metadataObjects {

        if let codeObject = dataObject as? AVMetadataMachineReadableCodeObject,
            let obj = previewLayer.transformedMetadataObject(for: codeObject) as? AVMetadataMachineReadableCodeObject {

            if scanFrame.contains(obj.bounds) {
                currentDetectedCount = currentDetectedCount + 1
                if currentDetectedCount > maxDetectedCount {
                    session.stopRunning()

                    completedCallBack!(codeObject.stringValue!)

                    if autoRemoveSubLayers {
                        removeAllLayers()
                    }
                }

                // transform codeObject
                drawCodeCorners(previewLayer.transformedMetadataObject(for: codeObject) as! AVMetadataMachineReadableCodeObject)
            }
        }
    }
}

open func removeAllLayers() {
    previewLayer.removeFromSuperlayer()
    drawLayer.removeFromSuperlayer()
}

func clearDrawLayer() {
    if drawLayer.sublayers == nil {
        return
    }

    for layer in drawLayer.sublayers! {
        layer.removeFromSuperlayer()
    }
}

func drawCodeCorners(_ codeObject: AVMetadataMachineReadableCodeObject) {
    if codeObject.corners.count == 0 {
        return
    }

    let shapeLayer = CAShapeLayer()
    shapeLayer.lineWidth = lineWidth
    shapeLayer.strokeColor = strokeColor.cgColor
    shapeLayer.fillColor = UIColor.clear.cgColor
    shapeLayer.path = createPath(codeObject.corners as NSArray).cgPath

    drawLayer.addSublayer(shapeLayer)
}

func createPath(_ points: NSArray) -> UIBezierPath {
    let path = UIBezierPath()

    var point = points[0] as! CGPoint
    path.move(to: point)

    var index = 1
    while index < points.count {
        point = points[index] as! CGPoint
        path.addLine(to: point)

        index = index + 1
    }
    path.close()

    return path
}

/// previewLayer
lazy var previewLayer: AVCaptureVideoPreviewLayer = {
    let layer = AVCaptureVideoPreviewLayer(session: self.session)
    layer.videoGravity = AVLayerVideoGravity.resizeAspectFill
    return layer
}()

/// drawLayer
lazy var drawLayer = CALayer()
/// session
lazy var session = AVCaptureSession()
/// input
lazy var videoInput: AVCaptureDeviceInput? = {

    if let device = AVCaptureDevice.default(for: AVMediaType.video) {
        return try? AVCaptureDeviceInput(device: device)
    }
    return nil
}()

/// output
lazy var dataOutput = AVCaptureMetadataOutput()

}

jenthone commented 5 years ago

You can add this to your Podfile:

post_install do |installer|
      installer.pods_project.targets.each do |target|
          if target.name == 'SwiftQRCode'
              target.build_configurations.each do |config|
                  config.build_settings['SWIFT_VERSION'] = '3.4'
              end
          end
      end
  end