Help us understand the problem. What is going on with this article?

MacOSでカメラの映像を加工して表示する

More than 1 year has passed since last update.

 2019-01-25 15.58.56.png

カメラ使用の許可

info.plistにNSCameraUsageDescriptionの追加

基本コード

ほとんどiOSアプリの場合と同じです。

CameraManager.swift
import Cocoa
import AVFoundation

/// カメラ周りの処理担当するやつ
class CameraManager {
    //ターゲットのカメラがあれば設定(見つからなければデフォルト)
    private let targetDeviceName = ""
//    private let targetDeviceName = "FaceTime HDカメラ(ディスプレイ)"
//    private let targetDeviceName = "FaceTime HD Camera"

    // AVFoundation
    private let session = AVCaptureSession()
    private var captureDevice : AVCaptureDevice!
    private var videoOutput = AVCaptureVideoDataOutput()

    /// セッション開始
    func startSession(delegate:AVCaptureVideoDataOutputSampleBufferDelegate){

        let devices = AVCaptureDevice.devices()
        if devices.count > 0 {
            captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
            // ターゲットが設定されていればそれを選択
            print("\n[接続カメラ一覧]")
            for d in devices {
                if d.localizedName == targetDeviceName {
                    captureDevice = d
                }
                print(d.localizedName)
            }
            print("\n[使用カメラ]\n\(captureDevice!.localizedName)\n\n")
            // セッションの設定と開始
            session.beginConfiguration()
            let videoInput = try? AVCaptureDeviceInput.init(device: captureDevice)
            session.sessionPreset = .low
            session.addInput(videoInput!)
            session.addOutput(videoOutput)
            session.commitConfiguration()
            session.startRunning()
            // 画像バッファ取得のための設定
            let queue:DispatchQueue = DispatchQueue(label: "videoOutput", attributes: .concurrent)
            videoOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable as! String : Int(kCVPixelFormatType_32BGRA)]
            videoOutput.setSampleBufferDelegate(delegate, queue: queue)
            videoOutput.alwaysDiscardsLateVideoFrames = true
        } else {
            print("カメラが接続されていません")
        }
    }

}

// Singleton
extension CameraManager {
    class var shared : CameraManager {
        struct Static { static let instance : CameraManager = CameraManager() }
        return Static.instance
    }
}

NSViewController からカメラの映像を取得

AVCaptureVideoDataOutputSampleBufferDelegateを設定してcaptureOutputでカメラ映像を受け取れます。

MainVC.swift
import Cocoa
import AVFoundation

/// メイン画面のViewController
class MainVC: NSViewController {

    override func viewDidLoad() {
        super.viewDidLoad()
        //セッション開始
        CameraManager.shared.startSession(delegate: self)
    }

    override var representedObject: Any? {
        didSet {}
    }

}

/// カメラ映像を取得して処理
extension MainVC: AVCaptureVideoDataOutputSampleBufferDelegate {

    /// カメラ映像取得時
    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        DispatchQueue.main.sync(execute: {
            connection.videoOrientation = .portrait
            let pixelBuffer:CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
            //CIImage
            let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
            let w = CGFloat(CVPixelBufferGetWidth(pixelBuffer))
            let h = CGFloat(CVPixelBufferGetHeight(pixelBuffer))
            let rect:CGRect = CGRect.init(x: 0, y: 0, width: w, height: h)
            let context = CIContext.init()
            //CGImage
            let cgimage = context.createCGImage(ciImage, from: rect)
            //UIImage
            let image = NSImage(cgImage: cgimage!, size: NSSize(width: w, height: h))
            //加工してNSImageViewなどに..
        })
    }

}

加工例

本記事では、CIFilterを利用しますが、Vision.FrameworkやOpenCVとの組み合わせで画像解析にも利用できます。

MainVC.swift
let filter = ThresouldFilter()
filter.inputImage = ciImage
filter.inputAmount = filterThresholdSlider.floatValue
ciImage = filter.outputImage()!
ThresouldFilter.swift
import Cocoa

class ThresouldFilter: CIFilter {
    private let kernelStr = "kernel vec4 threshold(__sample image, float threshold) { vec3 col = image.rgb; float bright = 0.33333 * (col.r + col.g + col.b); float b = mix(0.0, 1.0, step(threshold, bright)); return vec4(vec3(b), 1.0);}"

    private let kernel: CIColorKernel
    var inputImage: CIImage?
    var inputAmount: Float = 0.5
    override init() {
        kernel = CIColorKernel(source: kernelStr)!
        super.init()
    }
    required init?(coder aDecoder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
    func outputImage() -> CIImage? {
        guard let inputImage = inputImage else {return nil}
        return kernel.apply(extent: inputImage.extent, arguments: [inputImage, inputAmount])
    }
}
knou
戦闘力1
Why not register and get more from Qiita?
  1. We will deliver articles that match you
    By following users and tags, you can catch up information on technical fields that you are interested in as a whole
  2. you can read useful information later efficiently
    By "stocking" the articles you like, you can search right away
Comments
No comments
Sign up for free and join this conversation.
If you already have a Qiita account
Why do not you register as a user and use Qiita more conveniently?
You need to log in to use this function. Qiita can be used more conveniently after logging in.
You seem to be reading articles frequently this month. Qiita can be used more conveniently after logging in.
  1. We will deliver articles that match you
    By following users and tags, you can catch up information on technical fields that you are interested in as a whole
  2. you can read useful information later efficiently
    By "stocking" the articles you like, you can search right away
ユーザーは見つかりませんでした