2017/02/14「CMSampleBuffer - UIImage」を追記
毎回忘れるのでまとめました。
メモ程度なので何かもっと良い方法あるとかリークするとかあったら教えて下さい。
#CMSampleBuffer - UIImage
//CMSampleBufferRefからUIImage
-(UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// ピクセルバッファのベースアドレスをロックする
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get information of the image
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
// RGBの色空間
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress,
width,
height,
8,
bytesPerRow,
colorSpace,
kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef cgImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
UIImage *rtnImage = [UIImage imageWithCGImage:cgImage scale:1.0 orientation:UIImageOrientationUp];
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CGImageRelease(cgImage);
return rtnImage;
}
#UIImage - CGImage
uiimage-cgimage.m
//UIImageからCGImage
CGImageRef cgimage = uiimage.CGImage;
//CGImageからUIImage
UIImage *uiimage = [UIImage imageWithCGImage:cgimage];
CGImageRelease(cgimage);
#CIImage - CGImage
uiimage-ciimage.m
//CIImageからCGImage
//[CIContext new]はダメ 2016/06/08更新
CGImageRef cgimage = [[CIContext contextWithOptions:nil] createCGImage:ciimage fromRect:ciimage.extent];
//CGImageからCIImage
CIImage *ciimage = [CIImage imageWithCGImage:cgimage];
#vImage - CGImage
vimage-cgimage.m
/*
* format
*
* typedef struct vImage_CGImageFormat
* {
* uint32_t bitsPerComponent;
* uint32_t bitsPerPixel;
* CGColorSpaceRef colorSpace;
* CGBitmapInfo bitmapInfo;
* uint32_t version;
* const CGFloat * decode;
* CGColorRenderingIntent renderingIntent;
* }vImage_CGImageFormat;
* 例
* ARGB8888 -> {8, 32, NULL, alpha first, 0, NULL, kCGRenderingIntentDefault} alpha first = { kCGImageAlphaFirst, kCGImageAlphaPremultipliedFirst, kCGImageAlphaNoneSkipFirst }
* RGBA8888 -> {8, 32, NULL, alpha last, 0, NULL, kCGRenderingIntentDefault} alpha last = { kCGImageAlphaLast, kCGImageAlphaPremultipliedLast, kCGImageAlphaNoneSkipLast }
* BGRA8888 -> {8, 32, NULL, alpha first | kCGBitmapByteOrder32Little, 0, NULL, kCGRenderingIntentDefault}
* RGB888 -> {8, 24, NULL, kCGImageAlphaNone | kCGBitmapByteOrderDefault, 0, NULL, kCGRenderingIntentDefault}
* RGB565 -> {5, 16, NULL, kCGImageAlphaNone | kCGBitmapByteOrder16Little, 0, NULL, kCGRenderingIntentDefault}
* ARGB1555 -> {5, 16, NULL, alpha first | kCGBitmapByteOrder16Little, 0, NULL, kCGRenderingIntentDefault}
* RGBA16F -> {16, 64, NULL, alpha last | kCGBitmapFloatComponents | kCGBitmapByteOrder16Little, 0, NULL, kCGRenderingIntentDefault }
* CMYK8888 -> {8, 32, CGColorSpaceCreateDeviceCMYK(), kCGImageAlphaNone, 0, NULL, kCGRenderingIntentDefault }
* ARGBFFFF premultiplied -> {32, 128, NULL, kCGImageAlphaPremultipliedFirst | kCGBitmapFloatComponents | kCGBitmapByteOrder32Little, 0, NULL, kCGRenderingIntentDefault }
* ARGBFFFF not-premultiplied -> {32, 128, NULL, kCGImageAlphaFirst | kCGBitmapFloatComponents | kCGBitmapByteOrder32Little, 0, NULL, kCGRenderingIntentDefault }
* ARGBFFFF, alpha = 1 -> {32, 128, NULL, kCGImageAlphaNoneSkipFirst | kCGBitmapFloatComponents | kCGBitmapByteOrder32Little, 0, NULL, kCGRenderingIntentDefault }
* uiimageviewに表示する時用のformat
* vImage_CGImageFormat format = {
* .bitsPerComponent = 8,
* .bitsPerPixel = 32,
* .bitmapInfo = kCGBitmapByteOrderDefault | kCGImageAlphaLast, //RGBA8888
* }; // .colorspace, .version, .renderingIntent and .decode all initialized to 0 per C rules
*/
//vImageからCGImage
-(CGImageRef)vImageToCGImage:(vImage_Buffer)vimage withFormat:(vImage_CGImageFormat) format{
vImage_Error error;
CGImageRef cgimage = vImageCreateCGImageFromBuffer( &vimage, &format, NULL, NULL, kvImageNoFlags, &error);
if (error != kvImageNoError) {
NSLog(@"[VIImage ERROR][No %d]", (int)error);
}
return cgimage;
}
//CGImageからvImage
-(vImage_Buffer)CGImageToVImage:(CGImageRef)cgimage withFormat:(vImage_CGImageFormat) format{
vImage_Buffer vimage;
vImage_Error error = vImageBuffer_InitWithCGImage( &vimage, &format, NULL, cgimage, kvImageNoFlags );
if (error != kvImageNoError) {
NSLog(@"[VIImage ERROR][No %d]", (int)error);
}
return vimage;
}
#GPUImagePicture(GPUImage) - UIImage
gpuimagepicture-uiimage.m
//uiimageからgpuimagepicture
GPUImagePicture *gpuImage = [[GPUImagePicture alloc] initWithImage:uiimage];
//gpuimagepictureからuiimage(GPUImageFilterを通す)
GPUImageFilter *filter = /*GPUImgeFilterを継承しているなにかしら*/
[gpuImage addTarget:filter];
[filter useNextFrameForImageCapture];
[gpuImage processImage];
UIImage *dstuiimage = [filter imageFromCurrentFramebuffer];
#cv::Mat(OpenCV) - UIImage
cvmat-uiimage.m
#import "opencv2/imgcodecs/ios.h"
//uiimageからcv::Mat
cv::Mat cvmat;
UIImageToMat(uiimage, cvmat);
//cv::Matからuiimage
UIImage *uiimage = MatToUIImage(cvmat);
#(オマケ)opencvで画像処理するときのテンプレート
UtilOpenCVHelper.h
#ifndef UtilOpenCVHelper_h
#define UtilOpenCVHelper_h
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
@interface UtilOpenCVHelper : NSObject
+(UIImage *)testOpenCVMethodWithUIImage:(UIImage *)image;
@end
UtilOpenCVHelper.mm
#import "UtilOpenCVHelper.h"
#import "opencv2/imgcodecs/ios.h"
@interface UtilOpenCVHelper()
@end
@implementation UtilOpenCVHelper
typedef BOOL (^openCVAlgorithm)(const cv::Mat&, cv::Mat&) ;
//UIImageをopencvで処理してUIImageで返すテンプレート
+(UIImage *)template:(UIImage *)uiimage withAlgorithm:(openCVAlgorithm)algorithm{
cv::Mat cvmat;
UIImageToMat(uiimage, cvmat);
UIImage *dstuiimage = uiimage;
cv::Mat cvdstmat;
if(algorithm(cvmat, cvdstmat)){
dstuiimage = MatToUIImage(cvdstmat);
}
else{
NSLog(@"[OPENCV ERROR]");
}
return dstuiimage;
}
//実際にOpenCVで処理する部分
+(UIImage *)testOpenCVMethodWithUIImage:(UIImage *)image{
return [UtilOpenCVHelper template:image
withAlgorithm:^BOOL (const cv::Mat &cvmat, cv::Mat &cvdstmat){
/*
opencvで画像処理
cvdstmatが出力
*/
return ! cvdstmat.empty()
}];
}