こちらの記事を参考にしました。
利用するクラス(AVFoundation.framework)
- AVAssetWriter
- AVAssetWriterInput
- AVAssetWriterInputPixelBufferAdaptor
セットアップ
上記のクラスをセットアップし、動画を生成します。
なんとなく名称からなにをしてくれるクラスなのかは想像できると思います。
AVAssetWriter
まず、書き込むためのAVAssetWriter
オブジェクトを生成します。
// パスは適切な保存先を指定
NSURL *url = [NSURL fileURLWithPath:path];
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:url fileType:AVFileTypeQuickTimeMovie error:nil];
fileType:
に指定できる値は以下のように定義されています。
// File format UTIs
/*!
@constant AVFileTypeQuickTimeMovie
@abstract A UTI for the QuickTime movie file format.
@discussion
The value of this UTI is @"com.apple.quicktime-movie".
Files are identified with the .mov and .qt extensions.
*/
AVF_EXPORT NSString *const AVFileTypeQuickTimeMovie NS_AVAILABLE(10_7, 4_0);
/*!
@constant AVFileTypeMPEG4
@abstract A UTI for the MPEG-4 file format.
@discussion
The value of this UTI is @"public.mpeg-4".
Files are identified with the .mp4 extension.
*/
AVF_EXPORT NSString *const AVFileTypeMPEG4 NS_AVAILABLE(10_7, 4_0);
/*!
@constant AVFileTypeAppleM4V
@discussion
The value of this UTI is @"com.apple.m4v-video".
Files are identified with the .m4v extension.
*/
AVF_EXPORT NSString *const AVFileTypeAppleM4V NS_AVAILABLE(10_7, 4_0);
/*!
@constant AVFileTypeAppleM4A
@discussion
The value of this UTI is @"com.apple.m4a-audio".
Files are identified with the .m4a extension.
*/
AVF_EXPORT NSString *const AVFileTypeAppleM4A NS_AVAILABLE(10_7, 4_0);
#if TARGET_OS_IPHONE
/*!
@constant AVFileType3GPP
@abstract A UTI for the 3GPP file format.
@discussion
The value of this UTI is @"public.3gpp".
Files are identified with the .3gp, .3gpp, and .sdv extensions.
*/
AVF_EXPORT NSString *const AVFileType3GPP NS_AVAILABLE_IOS(4_0);
/*!
@constant AVFileType3GPP2
@abstract A UTI for the 3GPP file format.
@discussion
The value of this UTI is @"public.3gpp2".
Files are identified with the .3g2, .3gp2 extensions.
*/
AVF_EXPORT NSString *const AVFileType3GPP2 NS_AVAILABLE_IOS(7_0);
#endif // TARGET_OS_IPHONE
/*!
@constant AVFileTypeCoreAudioFormat
@abstract A UTI for the CoreAudio file format.
@discussion
The value of this UTI is @"com.apple.coreaudio-format".
Files are identified with the .caf extension.
*/
AVF_EXPORT NSString *const AVFileTypeCoreAudioFormat NS_AVAILABLE(10_7, 4_0);
/*!
@constant AVFileTypeWAVE
@abstract A UTI for the WAVE audio file format.
@discussion
The value of this UTI is @"com.microsoft.waveform-audio".
Files are identified with the .wav, .wave, and .bwf extensions.
*/
AVF_EXPORT NSString *const AVFileTypeWAVE NS_AVAILABLE(10_7, 4_0);
/*!
@constant AVFileTypeAIFF
@abstract A UTI for the AIFF audio file format.
@discussion
The value of this UTI is @"public.aiff-audio".
Files are identified with the .aif and .aiff extensions.
*/
AVF_EXPORT NSString *const AVFileTypeAIFF NS_AVAILABLE(10_7, 4_0);
/*!
@constant AVFileTypeAIFC
@abstract A UTI for the AIFC audio file format.
@discussion
The value of this UTI is @"public.aifc-audio".
Files are identified with the .aifc and .cdda extensions.
*/
AVF_EXPORT NSString *const AVFileTypeAIFC NS_AVAILABLE(10_7, 4_0);
/*!
@constant AVFileTypeAMR
@abstract A UTI for the adaptive multi-rate audio file format.
@discussion
The value of this UTI is @"org.3gpp.adaptive-multi-rate-audio".
Files are identified with the .amr extension.
*/
AVF_EXPORT NSString *const AVFileTypeAMR NS_AVAILABLE(10_7, 4_0);
/*!
@constant AVFileTypeMPEGLayer3
@abstract A UTI for the MPEG layer 3 audio file format.
@discussion
The value of this UTI is @"public.mp3".
Files are identified with the .mp3 extension.
*/
AVF_EXPORT NSString *const AVFileTypeMPEGLayer3 NS_AVAILABLE(10_9, 7_0);
/*!
@constant AVFileTypeSunAU
@abstract A UTI for the Sun/NeXT audio file format.
@discussion
The value of this UTI is @"public.au-audio".
Files are identified with the .au and .snd extensions.
*/
AVF_EXPORT NSString *const AVFileTypeSunAU NS_AVAILABLE(10_9, 7_0);
/*!
@constant AVFileTypeAC3
@abstract A UTI for the AC-3 audio file format.
@discussion
The value of this UTI is @"public.ac3-audio".
Files are identified with the .ac3 extension.
*/
AVF_EXPORT NSString *const AVFileTypeAC3 NS_AVAILABLE(10_9, 7_0);
AVAssetWriterInput
次にAVAssetWriterInput
を生成します。
// アウトプットの設定
NSDictionary *outputSettings =
@{
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : @(width),
AVVideoHeightKey: @(height),
};
// writer inputを生成
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:outputSettings];
// writerに、writer inputを設定
[videoWriter addInput:writerInput];
AVAssetWriterInputPixelBufferAdaptor
// source pixel buffer attributesを設定
NSDictionary *sourcePixelBufferAttributes =
@{
(NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32ARGB),
(NSString *)kCVPixelBufferWidthKey: @(width),
(NSString *)kCVPixelBufferHeightKey: @(height),
};
// writer input pixel buffer adaptorを生成
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttribute:sourcePixelBufferAttributes];
//
writerInput.expectsMediaDataInRealTime = YES;
動画の生成開始
さて、上記までで3つのクラスのセットアップが終わりました。
最後に動画生成のためのメソッドを実行します。
// 生成開始できるか確認
if (![videoWriter startWriting]) {
// Error!
}
// 動画生成開始
[videoWriter startSessionAtSourceTime:kCMTimeZero];
// pixel bufferを宣言
CVPixelBufferRef buffer = NULL;
// 現在のフレームカウント
int frameCount = 0;
// 各画像の表示する時間
int durationForEachImage = 1;
// FPS
int32_t fps = 24;
// 全画像をバッファに貯めこむ
for (UIImage *image in images) {
@autoreleasepool {
if (!adaptor.assetWriterInput.readForMoreMediaData) {
break;
}
// 動画の時間を生成(その画像の表示する時間。開始時点と表示時間を渡す)
CMTime frameTime = CMTimeMake((int64_t)frameCount * fps * durationForEachImage, fps);
// CGImageからバッファを生成(後述)
buffer = [self pixelBufferFromCGImage:image.CGImage];
// 生成したバッファを追加
if (![adaptor appendPixelBuffer:buffer withPresentationTime:frameTime]) {
// Error!
}
if (buffer) {
CVBufferRelease(buffer);
}
frameCount++;
}
}
// 動画生成終了
[writerInput markAsFinished];
[writer endSessionAtSourceTime:CMTimeMake((int64_t)(frameCount - 1) * fps * durationForEachImage, fps)];
[videoWriter finishWritingWithCompletionHandler:^{
// Finish!
}];
// 後片付け
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
CVPixelBufferRef
CGImageからCVPixelBufferRef
を生成します。
- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
{
NSDictionary *options = @{ (NSString *)kCVPixelBufferCGImageCompatibilityKey: @YES,
(NSString *)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES, };
CVPixelBufferRef pxbuffer = NULL;
CGFloat width = CGImageGetWidth(image);
CGFloat height = CGImageGetHeight(image);
CVPixelBufferCreate(kCFAllocatorDefault,
width,
height,
kCVPixelFormatType_32ARGB,
(__bridge CFDictionaryRef)options,
&pxbuffer);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
size_t bitsPerComponent = 8;
size_t bytesPerRow = 4 * width;
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata,
width,
height,
bitsPerComponent,
bytesPerRow,
rgbColorSpace,
(CGBitmapInfo)kCGImageAlphaNoneSkipFirst);
CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}