3
2

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?

More than 5 years have passed since last update.

iOSアプリ リアルタイム顔認識 【コードのみ】【Swift5】

Posted at

そのままコピーして使用可能。

1:前カメラを起動
2:顔を認証
3:顔に四角い枠を表示


import UIKit
import AVFoundation

class ViewController: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate {
    
    var faceTracker: FaceTracker!

    var rectView = UIView()
    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        rectView.layer.borderWidth = 2 //四角い枠
        faceTracker = FaceTracker(view: view, findface: {arr in
            let rect = arr[0] //一番の顔だけ使う
            print(rect)
            self.rectView.frame = rect //四角い枠を顔の位置に移動する
        })
        view.addSubview(rectView)
    }
}


class FaceTracker: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
    
    let captureSession = AVCaptureSession()
    var videoDevice = AVCaptureDevice.default(.builtInTrueDepthCamera, for: .video, position: .front)
    let audioDevice = AVCaptureDevice.default(for: .audio)

    var videoOutput = AVCaptureVideoDataOutput()
    var view: UIView
    private var findface: (_ arr: Array<CGRect>) -> Void
    
    required init(view: UIView, findface: @escaping (_ arr: Array<CGRect>) -> Void) {
        self.view = view
        self.findface = findface
        super.init()
        initialize()
    }

    func initialize() {
        
        // 各デバイスの登録
        let videoInput = try! AVCaptureDeviceInput(device: videoDevice!)
        captureSession.addInput(videoInput)
        
        let audioInput = try! AVCaptureDeviceInput(device: audioDevice!)
        captureSession.addInput(audioInput)

        videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String : Int(kCVPixelFormatType_32BGRA)]

        // フレーム毎に呼び出すデリゲート登録
        let queue = DispatchQueue(label: "myqueue", attributes: .concurrent)
        videoOutput.setSampleBufferDelegate(self, queue: queue)
        videoOutput.alwaysDiscardsLateVideoFrames = true

        captureSession.addOutput(videoOutput)

        let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        videoLayer.frame = view.bounds
        videoLayer.videoGravity = .resizeAspectFill
        
        view.layer.addSublayer(videoLayer)

        //カメラ向き
        for c in videoOutput.connections {
            if c.isVideoOrientationSupported {
                c.videoOrientation = .portrait
            }
        }
        captureSession.startRunning()
    }

    func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage {
        //バッファーをUIImageに変換
        let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
        CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
        let context = CGContext(data: CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0),
                                width: CVPixelBufferGetWidth(imageBuffer),
                                height: CVPixelBufferGetHeight(imageBuffer),
                                bitsPerComponent: 8,
                                bytesPerRow: CVPixelBufferGetBytesPerRow(imageBuffer),
                                space: CGColorSpaceCreateDeviceRGB(),
                                bitmapInfo: CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue)
        let imageRef = context!.makeImage()
        CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
        return UIImage(cgImage: imageRef!)
    }

    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        
        DispatchQueue.main.sync(execute: {

            //バッファーをUIImageに変換
            let image = self.imageFromSampleBuffer(sampleBuffer: sampleBuffer)
            let ciimage = CIImage(image: image)!

            let detector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyLow])!
            let faces = detector.features(in: ciimage)
            
            if faces.count != 0 {
                var rects = Array<CGRect>()
                for face in faces {

                    // 座標変換
                    var faceR: CGRect = (face as AnyObject).bounds
                    let widthPer = view.bounds.width/image.size.width
                    let heightPer = view.bounds.height/image.size.height

                    // CoreImageは左下に原点があるので揃える
                    faceR.origin.y = image.size.height - faceR.origin.y - faceR.size.height

                    // 倍率変換
                    faceR.origin.x = faceR.origin.x * widthPer
                    faceR.origin.y = faceR.origin.y * heightPer
                    faceR.size.width = faceR.size.width * widthPer
                    faceR.size.height = faceR.size.height * heightPer

                    rects.append(faceR)
                }
                findface(rects)
            }
        })
    }
}

参考ページ
[コピペで使える]swift3/swift4/swift5でリアルタイム顔認識をする方法

3
2
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
3
2

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?