8
6

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?

More than 5 years have passed since last update.

iOSでリアルタイム顔検出を行う

Posted at

こんにちはrenです。

今回はリアルタイム顔検出のアプリを作っていきます。
一週間程度でぱぱっと作ったものなので、間違いなどあればご指摘等いただけると嬉しいです。

環境

macOS Catalina 10.15
Xcode 11.1
iOS13
Swift 5

流れ

「Login」ボタンをタップ

フロントカメラを起動する

出力された映像を切り出し、画像に変換する

画像から顔を検出する

検出された顔の座標にフレームを表示させる

準備

今回はカメラを利用するのでInfo.plistPrivacy - Camera Usage Descriptionを追加してください

実装

// 初期表示View
import UIKit

class ViewController: UIViewController {

    override func viewDidLoad() {
        super.viewDidLoad()
        // Do any additional setup after loading the view.
    }

    @IBAction func tappedLogin(_ sender: Any) {
        self.performSegue(withIdentifier: "gotoFaceDetect", sender: nil)
    }

}

// 顔検出をするView
import UIKit

class FaceDetectViewController: UIViewController {

    @IBOutlet weak var cameraView: UIView!

    // 顔検出をするためのクラス
    private var faceDetecter: FaceDetecter?
    // 検出された顔のフレームを表示するためのView
    private let frameView = UIView()
    // 切り出された画像
    private var image = UIImage()

    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        setup()
    }

    override func viewWillDisappear(_ animated: Bool) {
        if let faceDetecter = faceDetecter {
            faceDetecter.stopRunning()
        }
        faceDetecter = nil
    }

    private func setup() {
        frameView.layer.borderWidth = 3
        view.addSubview(frameView)
        faceDetecter = FaceDetecter(view: cameraView, completion: {faceRect, image in
            self.frameView.frame = faceRect
            self.image = image
        })
    }

    private func stopRunning() {
        guard let faceDetecter = faceDetecter else { return }
        faceDetecter.stopRunning()
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
        // Dispose of any resources that can be recreated.
    }

    @IBAction func tappedBackButton(_ sender: UIButton) {
        dismiss(animated: true, completion: nil)
    }

}

// 顔検出をするクラス
import UIKit
import AVFoundation

final class FaceDetecter: NSObject {
    private let captureSession = AVCaptureSession()
    private var videoDataOutput = AVCaptureVideoDataOutput()
    private var view: UIView
    private var completion: (_ rect: CGRect, _ image: UIImage) -> Void

    required init(view: UIView, completion: @escaping (_ rect: CGRect, _ image: UIImage) -> Void) {
        self.view = view
        self.completion = completion
        super.init()
        self.initialize()
    }

    private func initialize() {
        addCaptureSessionInput()
        registerDelegate()
        setVideoDataOutput()
        addCaptureSessionOutput()
        addVideoPreviewLayer()
        setCameraOrientation()
        startRunning()
    }

    private func addCaptureSessionInput() {
        do {
            guard let frontVideoCamera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else { return }
            let frontVideoCameraInput = try AVCaptureDeviceInput(device: frontVideoCamera) as AVCaptureDeviceInput
            captureSession.addInput(frontVideoCameraInput)
        } catch let error {
            print(error)
        }
    }

    private func setVideoDataOutput() {
        videoDataOutput.alwaysDiscardsLateVideoFrames = true

        guard let pixelFormatTypeKey = kCVPixelBufferPixelFormatTypeKey as AnyHashable as? String else { return }
        let pixelFormatTypeValue = Int(kCVPixelFormatType_32BGRA)

        videoDataOutput.videoSettings = [pixelFormatTypeKey : pixelFormatTypeValue]
    }

    private func setCameraOrientation() {
        for connection in videoDataOutput.connections where connection.isVideoOrientationSupported {
            connection.videoOrientation = .portrait
            connection.isVideoMirrored = true
        }
    }

    private func registerDelegate() {
        let queue = DispatchQueue(label: "queue", attributes: .concurrent)
        videoDataOutput.setSampleBufferDelegate(self, queue: queue)
    }

    private func addCaptureSessionOutput() {
        captureSession.addOutput(videoDataOutput)
    }

    private func addVideoPreviewLayer() {
        let videoPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        videoPreviewLayer.frame = view.bounds
        videoPreviewLayer.videoGravity = .resizeAspectFill

        view.layer.addSublayer(videoPreviewLayer)
    }

    func startRunning() {
        captureSession.startRunning()
    }

    func stopRunning() {
        captureSession.stopRunning()
    }

    private func convertToImage(from sampleBuffer: CMSampleBuffer) -> UIImage? {

        guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }

        CVPixelBufferLockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))

        let baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
        let width = CVPixelBufferGetWidth(imageBuffer)
        let height = CVPixelBufferGetHeight(imageBuffer)

        let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer)
        let colorSpace = CGColorSpaceCreateDeviceRGB()
        let bitmapInfo = (CGBitmapInfo.byteOrder32Little.rawValue | CGImageAlphaInfo.premultipliedFirst.rawValue)
        let context = CGContext(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)

        guard let imageRef = context?.makeImage() else { return nil }

        CVPixelBufferUnlockBaseAddress(imageBuffer, CVPixelBufferLockFlags(rawValue: 0))
        let resultImage = UIImage(cgImage: imageRef)

        return resultImage
    }
}

extension FaceDetecter: AVCaptureVideoDataOutputSampleBufferDelegate {
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        DispatchQueue.main.sync(execute: {

            guard let image = convertToImage(from: sampleBuffer), let ciimage = CIImage(image: image) else { return }
            guard let detector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: [CIDetectorAccuracy: CIDetectorAccuracyHigh]) else { return }
            guard let feature = detector.features(in: ciimage).first else { return }

            sendFaceRect(feature: feature, image: image)

        })
    }

    private func sendFaceRect(feature: CIFeature, image: UIImage) {
        var faceRect = feature.bounds

        let widthPer = view.bounds.width / image.size.width
        let heightPer = view.bounds.height / image.size.height

        // 原点を揃える
        faceRect.origin.y = image.size.height - faceRect.origin.y - faceRect.size.height

        // 倍率変換
        faceRect.origin.x *= widthPer
        faceRect.origin.y *= heightPer
        faceRect.size.width *= widthPer
        faceRect.size.height *= heightPer

        completion(faceRect, image)
    }
}

この記事は下記の記事を参考にしています。
[コピペで使える]swift3/swift4/swift5でリアルタイム顔認識をする方法

8
6
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
8
6

Delete article

Deleted articles cannot be recovered.

Draft of this article would be also deleted.

Are you sure you want to delete this article?