概要です。
で、これを Swift と AV Foundation Framework でやるぞ、という話をします。
画像を切り出す
まずは、切り出したい時間の配列をつくります。Objective-C では CMTimeCompare
や CMTimeAdd
と書いていたところが、Swift では <
や +
と書けて便利。
var times = [kCMTimeZero]
while let current = times.last, current < asset.duration {
times.append(current + CMTimeMakeWithSeconds(interval, 100))
}
それを AVAssetImageGenerator
へ渡すと、与えた時間の画像が非同期に返ってきます。
AVAssetImageGenerator(asset: asset)
.generateCGImagesAsynchronously(forTimes: times.map { NSValue(time: $0) }) { time, image, _, _, _ in
...
}
動画を書き出す
まずは、必要となる AVAssetWriter
、AVAssetWriterInput
、AVAssetWriterInputPixelBufferAdaptor
をそれぞれつくります。
let writer: AVAssetWriter
do {
writer = try AVAssetWriter(outputURL: outputUrl, fileType: AVFileTypeMPEG4)
} catch let error {
fatalError(error.localizedDescription)
}
let input = AVAssetWriterInput(
mediaType: AVMediaTypeVideo,
outputSettings: [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: asset.size.width,
AVVideoHeightKey: asset.size.height,
])
let adaptor = AVAssetWriterInputPixelBufferAdaptor(
assetWriterInput: input,
sourcePixelBufferAttributes: [
kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB),
kCVPixelBufferWidthKey as String: asset.size.width,
kCVPixelBufferHeightKey as String: asset.size.height,
])
その AVAssetWriterInputPixelBufferAdaptor
へ表示したい画像と表示する時間を追加していきます。CGImage
を CVPixelBuffer
に変換する必要があるので、append(image:withPresentationTime:)
のようなメソッドを生やしておくと便利。
adaptor.append(image: image, withPresentationTime: CMTimeMake(Int64(times.index(of: time)!), frameRate))
そして、AVAssetWriter
で書き出します。
writer.add(input)
writer.startWriting()
writer.startSession(atSourceTime: kCMTimeZero)
...
input.markAsFinished()
writer.endSession(atSourceTime: CMTimeMake(Int64(times.count), frameRate))
writer.finishWriting {
...
}
実装
Gist に上げているので、git clone
とか chmod a+x
とかすると良いと思います。
# !/usr/bin/env xcrun swift
import AVFoundation
extension Array {
func nth(_ index: Int) -> Array.Element? {
return (self.indices ~= index) ? self[index] : nil
}
}
extension CGImage {
var frame: CGRect {
return CGRect(x: 0, y: 0, width: self.width, height: self.height)
}
}
extension AVAsset {
var size: CGSize {
return self.tracks(withMediaType: AVMediaTypeVideo).nth(0)?.naturalSize ?? CGSize.zero
}
}
extension AVAssetWriterInputPixelBufferAdaptor {
func append(image: CGImage, withPresentationTime presentationTime: CMTime) -> Bool {
guard let pixelBufferPool = self.pixelBufferPool else {
fatalError("Failed to allocate the PixelBufferPool")
}
var pixelBufferOut: CVPixelBuffer? = nil
CVPixelBufferPoolCreatePixelBuffer(kCFAllocatorDefault, pixelBufferPool, &pixelBufferOut)
guard let pixelBuffer = pixelBufferOut else {
fatalError("Failed to create the PixelBuffer")
}
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
let context = CGContext(
data: CVPixelBufferGetBaseAddress(pixelBuffer),
width: image.width,
height: image.height,
bitsPerComponent: image.bitsPerComponent,
bytesPerRow: image.bytesPerRow,
space: CGColorSpaceCreateDeviceRGB(),
bitmapInfo: image.bitmapInfo.rawValue)
context?.draw(image, in: image.frame)
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
return self.append(pixelBuffer, withPresentationTime: presentationTime)
}
}
// ₍₍ (ง╹◡╹)ว ⁾⁾
guard let inputPath = CommandLine.arguments.nth(1) else {
print("USAGE: timelapse <input-path> [output-path] [sampling-interval] [frame-rate]")
exit(0)
}
let outputPath = CommandLine.arguments.nth(2) ?? "output.mp4"
let interval = CommandLine.arguments.nth(3).flatMap { Double($0) } ?? 1
let frameRate = CommandLine.arguments.nth(4).flatMap { Int32($0) } ?? 15
let outputUrl = URL(fileURLWithPath: outputPath)
let semaphore = DispatchSemaphore(value: 0)
do {
if FileManager.default.fileExists(atPath: outputPath) {
try FileManager.default.removeItem(at: outputUrl)
}
} catch let error {
fatalError(error.localizedDescription)
}
let asset = AVAsset(url: URL(fileURLWithPath: inputPath))
let writer: AVAssetWriter
do {
writer = try AVAssetWriter(outputURL: outputUrl, fileType: AVFileTypeMPEG4)
} catch let error {
fatalError(error.localizedDescription)
}
let input = AVAssetWriterInput(
mediaType: AVMediaTypeVideo,
outputSettings: [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: asset.size.width,
AVVideoHeightKey: asset.size.height,
])
let adaptor = AVAssetWriterInputPixelBufferAdaptor(
assetWriterInput: input,
sourcePixelBufferAttributes: [
kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB),
kCVPixelBufferWidthKey as String: asset.size.width,
kCVPixelBufferHeightKey as String: asset.size.height,
])
var times = [kCMTimeZero]
while let current = times.last, current < asset.duration {
times.append(current + CMTimeMakeWithSeconds(interval, 100))
}
writer.add(input)
writer.startWriting()
writer.startSession(atSourceTime: kCMTimeZero)
AVAssetImageGenerator(asset: asset)
.generateCGImagesAsynchronously(forTimes: times.map { NSValue(time: $0) }) { time, image, _, _, _ in
if let image = image {
let _ = adaptor.append(image: image, withPresentationTime: CMTimeMake(Int64(times.index(of: time)!), frameRate))
}
if times.last == time {
input.markAsFinished()
writer.endSession(atSourceTime: CMTimeMake(Int64(times.count), frameRate))
writer.finishWriting {
semaphore.signal()
}
}
}
let _ = semaphore.wait(timeout: DispatchTime.distantFuture)
楽しい!₍₍ (ง╹◡╹)ว ⁾⁾