毎回調べている気がするので。
Info.plistにPrivacy - Camera Usage Description追加。
必要なプロパティ。
var captureSession = AVCaptureSession()
var previewView = UIView()
var previewLayer:AVCaptureVideoPreviewLayer?
var videoOutput = AVCaptureVideoDataOutput() // VideoDataの場合
var photoOutput = AVCapturePhotoOutput()// PhotoDataの場合
設定。
動画の場合は、AVCaptureVideoDataOutputSampleBufferDelegateを、写真の場合はAVCapturePhotoCaptureDelegateを使用クラスに設定します。
class ViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate,AVCapturePhotoCaptureDelegate {
previewView.frame = view.bounds
view.addSubview(previewView)
captureSession.beginConfiguration()
let device = AVCaptureDevice.default(for: AVMediaType.video)
let deviceInput = try! AVCaptureDeviceInput(device: device!)
captureSession.addInput(deviceInput)
//## 動画の場合
videoOutput = AVCaptureVideoDataOutput()
let queue = DispatchQueue(label: "VideoQueue")
videoOutput.setSampleBufferDelegate(self, queue: queue)
captureSession.addOutput(videoOutput)
if let videoConnection = videoOutput.connection(with: .video) {
if videoConnection.isVideoOrientationSupported {
videoConnection.videoOrientation = .portrait
}
}
//##
//** 写真の場合
photoOutput = AVCapturePhotoOutput()
captureSession.addOutput(photoOutput)
//**
captureSession.commitConfiguration()
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer?.frame = previewView.bounds
previewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
previewView.layer.addSublayer(previewLayer!)
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
}
撮影。
VideoDataの場合、デリゲートメソッド内でフレームを取得。
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
}
Photoの場合、capturePhotoしてデリゲートメソッド内で処理。
self.photoOutput?.capturePhoto(with: AVCapturePhotoSettings(), delegate: self)
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
if let imageData = photo.fileDataRepresentation() {
let uiImage = UIImage(data: imageData)
}
}
お仕事のご相談こちらまで
rockyshikoku@gmail.com
Core MLを使ったアプリを作っています。
機械学習関連の情報を発信しています。
Twitter
[MLBoysチャンネル]
(https://www.youtube.com/channel/UCbHff-wfjTnB3rtXIP6y0xg)
Medium