LoginSignup
23
23

More than 5 years have passed since last update.

AVFoundationで動画のリアルタイム合成

Posted at

AVCaptureVideoDataOutputを使用して動画のリアルタイム合成を行ったのでそのメモ。

まずは普通に動画撮影の準備

let session: AVCaptureSession = AVCaptureSession()

let device: AVCaptureDevice = AVCaptureDevice.defaultDevice(withDeviceType: .builtInWideAngleCamera, mediaType: AVMediaTypeVideo, position: .back)
device.activeVideoMinFrameDuration = CMTimeMake(1, 30)

let videoInput: AVCaptureDeviceInput = try! AVCaptureDeviceInput.init(device: device)
session.addInput(videoInput)

let imageOutput: AVCapturePhotoOutput = AVCapturePhotoOutput()
session.addOutput(imageOutput)

let videoQueue: DispatchQueue = DispatchQueue(label: "videoqueue")

// VideoDataOutputにする
let videoDataOutput: AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: kCVPixelFormatType_32BGRA]
videoDataOutput.setSampleBufferDelegate(self, queue: videoQueue)
videoDataOutput.alwaysDiscardsLateVideoFrames = true
session.addOutput(videoDataOutput)

動画撮影を始めるとフレーム毎に下記のメソッドが呼ばれるのでそこに処理を書く

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
    // ここに処理を書く
}

合成は、

  1. CMSampleBufferをUIImageに変換
  2. 変換したUIImageを加工して新しいUIImageを生成
  3. 生成したUIImageをCVPixelBufferに変換

の流れで行う

// CMSampleBuffer → UIImage
func uiImageFromCMSampleBuffer(buffer: CMSampleBuffer) -> UIImage {
    let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(buffer)!
    let ciImage: CIImage = CIImage(cvPixelBuffer: pixelBuffer)
    let image: UIImage = UIImage(ciImage: ciImage)
    return image
}

// 合成
func synthesis(image: UIImage) -> UIImage {
    // 合成処理    

    return newImage
}

// UIImage → CVPixelBuffer
func pixelBufferFromUIImage(image: UIImage) -> CVPixelBuffer {
    let cgImage: CGImage = image.cgImage

    let options = [
        kCVPixelBufferCGImageCompatibilityKey as String: true,
        kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
    ]

    var pxBuffer: CVPixelBuffer? = nil

    let width: Int = cgImage.width
    let height: Int = cgImage.height

    CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxBuffer)

    CVPixelBufferLockBaseAddress(pxBuffer!, CVPixelBufferLockFlags(rawValue: 0))

    let pxData: UnsafeMutableRawPointer = CVPixelBufferGetBaseAddress(pxBuffer!)!

    let bitsPerComponent: size_t = 8
    let bytePerRow: size_t = 4 * width

    let rgbColorSpace: CGColorSpace = CGColorSpaceCreateDeviceRGB()
    let context: CGContext = CGContext(data: pxData, width: width, height: height, bitsPerComponent: bitsPerComponent, bytesPerRow: bytePerRow, space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)!

    context.draw(cgImage, in: CGRect(x: 0, y: 0, width: CGFloat(width), height: CGFloat(height)))

    CVPixelBufferUnlockBaseAddress(pxBuffer!, CVPixelBufferLockFlags(rawValue: 0))

    return pxBuffer!
}

画面に表示したい場合は2の段階でUIImageViewに貼り付けるようにする
保存する場合は3まで行った後、AVAssetWriterを使用して書き出す

AVAssetWriterの準備

let fileWriter: AVAssetWriter = try? AVAssetWriter(outputURL: fileUrl, fileType: AVFileTypeQuickTimeMovie)

let videoOutputSettings: Dictionary<String, Any> = [
    AVVideoCodecKey: AVVideoCodecH264 as Any,
    AVVideoWidthKey: size.width as Any,
    AVVideoHeightKey: size.height as Any
];
let videoInput: AVAssetWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoOutputSettings)
videoInput.expectsMediaDataInRealTime = true
fileWriter.add(videoInput)

let adaptor: AVAssetWriterInputPixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: [
    kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB),
    kCVPixelBufferWidthKey as String: size.width,
    kCVPixelBufferHeightKey as String: size.height
])

var frameCount: Int = 0

fileWriter.startWriting()
fileWriter.startSession(atSourceTime: kCMTimeZero)

書き出し

if CMSampleBufferDataIsReady(sample) {
    if fileWriter.status == .writing {

    if frameCount == 0 {
        firstTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
    }

    if adaptor.assetWriterInput.isReadyForMoreMediaData {
        let timeStamp: CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
        let frameTime: CMTime = CMTimeSubtract(timeStamp, firstTime)
        let pxBuffer: CVPixelBuffer = video.synthesis(buffer: sampleBuffer)
        adaptor.append(pxBuffer, withPresentationTime: frameTime)
        frameCount += 1
    }
}

後処理

fileWriter.endSession(atSourceTime: CMTimeMake(Int64((frameCount - 1) * 30), 30))
fileWriter.finishWriting(completionHandler: nil)
23
23
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
23
23