LoginSignup
33
28

More than 5 years have passed since last update.

swiftでAVCaptureVideoDataOutputを使ったカメラのテンプレート

Last updated at Posted at 2014-12-12

備忘録的なものです

import UIKit
import AVFoundation

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

    var device: AVCaptureDevice!
    var session: AVCaptureSession!
    var adjustingExposure: Bool!

    override func viewWillAppear(animated: Bool) {
        self.initCamera()
    }

    override func viewDidDisappear(animated: Bool) {
        self.session.stopRunning()
        for output in self.session.outputs {
            self.session.removeOutput(output as AVCaptureOutput)
        }

        for input in self.session.inputs {
            self.session.removeInput(input as AVCaptureInput)
        }
        self.session = nil
        self.device = nil
    }

    private func initCamera() {
        for caputureDevice: AnyObject in AVCaptureDevice.devices() {
            if caputureDevice.position == AVCaptureDevicePosition.Back {
                self.device = caputureDevice as AVCaptureDevice
            }
        }

        self.device.activeVideoMinFrameDuration = CMTimeMake(1, 30)

        var input: AVCaptureDeviceInput = AVCaptureDeviceInput.deviceInputWithDevice(self.device, error: nil) as AVCaptureDeviceInput

        let cameraQueue = dispatch_queue_create("cameraQueue", nil)
        var videoDataOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
        videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey : kCVPixelFormatType_32BGRA]
        videoDataOutput.setSampleBufferDelegate(self, queue: cameraQueue)
        videoDataOutput.alwaysDiscardsLateVideoFrames = true

        self.session = AVCaptureSession()

        if(self.session.canAddInput(input)) {
            self.session.addInput(input)
        }

        if(self.session.canAddOutput(videoDataOutput)) {
            self.session.addOutput(videoDataOutput)
        }

        self.session.sessionPreset = AVCaptureSessionPreset1920x1080
        self.session.startRunning()

        self.adjustingExposure = false
        self.device.addObserver(self, forKeyPath: "adjustingExposure", options: NSKeyValueObservingOptions.New, context: nil)
    }

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
        let image = self.imageFromSampleBuffer(sampleBuffer)
        dispatch_async(dispatch_get_main_queue()) {
            self.preview.image = image
        }
    }

    private func imageFromSampleBuffer(sampleBuffer :CMSampleBufferRef) -> UIImage {
        let imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer)
        CVPixelBufferLockBaseAddress(imageBuffer, 0)
        let baseAddress: UnsafeMutablePointer<Void> = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, UInt(0))

        let bytesPerRow: UInt = CVPixelBufferGetBytesPerRow(imageBuffer)
        let width: UInt = CVPixelBufferGetWidth(imageBuffer)
        let height: UInt = CVPixelBufferGetHeight(imageBuffer)

        let colorSpace: CGColorSpaceRef = CGColorSpaceCreateDeviceRGB()

        let bitsPerCompornent: UInt = 8
        var bitmapInfo = CGBitmapInfo((CGBitmapInfo.ByteOrder32Little.rawValue | CGImageAlphaInfo.PremultipliedFirst.rawValue) as UInt32)
        let newContext: CGContextRef = CGBitmapContextCreate(baseAddress, width, height, bitsPerCompornent, bytesPerRow, colorSpace, bitmapInfo) as CGContextRef

        let imageRef: CGImageRef = CGBitmapContextCreateImage(newContext)
        let resultImage = UIImage(CGImage: imageRef, scale: 1.0, orientation: UIImageOrientation.Right)!

        return resultImage
    }

     // 以下のコメントの値を引数で受け取る値の例
   // let anyTouch = sender as UIGestureRecognizer
     // let origin = anyTouch.locationInView(self.preview);
     // let focusPoint = CGPointMake(origin.y / self.preview.bounds.size.height, 1 - origin.x / self.preview.bounds.size.width);
    func setFocusAndrExposure(focusPoint: CGPoint) {
        if self.device.lockForConfiguration(nil) {
            self.device.focusPointOfInterest = focusPoint
            self.device.focusMode = AVCaptureFocusMode.AutoFocus

            if self.device.isExposureModeSupported(AVCaptureExposureMode.ContinuousAutoExposure) {
                self.adjustingExposure = true
                self.device.exposurePointOfInterest = focusPoint
                self.device.exposureMode = AVCaptureExposureMode.AutoExpose
            }
            self.device.unlockForConfiguration()
        }
    }

    override func observeValueForKeyPath(keyPath: String, ofObject object: AnyObject, change: [NSObject : AnyObject], context: UnsafeMutablePointer<Void>) {
        if !self.adjustingExposure {
            return
        }

        if keyPath == "adjustingExposure" {
            let isNew = change[NSKeyValueChangeNewKey]! as Bool
            if !isNew {
                self.adjustingExposure = false

                if self.device.lockForConfiguration(nil) {
                    self.device.exposureMode = AVCaptureExposureMode.Locked
                    self.device.unlockForConfiguration()
                }
            }
        }
    }

参考

Swiftでカメラを使う
http://mslgt.hatenablog.com/entry/2014/09/24/233459

33
28
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
33
28