SwiftUI上で必要最低限のコードでカメラのプレビューをMetalで表示します。
- フロントカメラのみ
- デバイスの向きはポートレートのみ
- カメラのアクセス許可のチェックなし
環境
- Swift: 5.4
- Xcode: 12.5 (12E262)
- macOS: Big Sur 11.3.1 (20E241)
コード
ContentView.swift
import SwiftUI
import AVFoundation
import MetalKit
struct ContentView: View {
var body: some View {
CameraView()
.edgesIgnoringSafeArea(.all)
}
}
struct CameraView: UIViewRepresentable {
func makeUIView(context: Context) -> some UIView { BaseCameraView() }
func updateUIView(_ uiView: UIViewType, context: Context) {}
}
class BaseCameraView: UIView, AVCaptureVideoDataOutputSampleBufferDelegate {
let metalLayer = CAMetalLayer()
let device = MTLCreateSystemDefaultDevice()!
lazy var commandQueue = device.makeCommandQueue()
let renderPassDescriptor = MTLRenderPassDescriptor()
lazy var renderPipelineState: MTLRenderPipelineState! = {
guard let library = device.makeDefaultLibrary() else { return nil }
let descriptor = MTLRenderPipelineDescriptor()
descriptor.vertexFunction = library.makeFunction(name: "vertexShader")
descriptor.fragmentFunction = library.makeFunction(name: "fragmentShader")
descriptor.colorAttachments[0].pixelFormat = .bgra8Unorm
return try? device.makeRenderPipelineState(descriptor: descriptor)
}()
let captureSession = AVCaptureSession()
var lockFlag = false
override func layoutSubviews() {
super.layoutSubviews()
_ = initMetalAndCaptureSession
metalLayer.frame = layer.frame
}
lazy var initMetalAndCaptureSession: Void = {
metalLayer.device = device
metalLayer.isOpaque = false
layer.addSublayer(metalLayer)
renderPassDescriptor.colorAttachments[0].loadAction = .clear
renderPassDescriptor.colorAttachments[0].storeAction = .store
renderPassDescriptor.colorAttachments[0].clearColor = MTLClearColorMake(0, 0, 0, 0)
guard let captureDevice = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
mediaType: .video,
position: .front).devices.first,
let input = try? AVCaptureDeviceInput(device: captureDevice) else { return }
let output = AVCaptureVideoDataOutput()
output.videoSettings = [kCVPixelBufferPixelFormatTypeKey: kCVPixelFormatType_32BGRA] as [String : Any]
output.setSampleBufferDelegate(self, queue: DispatchQueue.main)
captureSession.addInput(input)
captureSession.addOutput(output)
captureSession.startRunning()
captureSession.connections.forEach {
$0.videoOrientation = .portrait
$0.isVideoMirrored = true
}
}()
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if lockFlag { return }
lockFlag = true
guard let buffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
CVPixelBufferLockBaseAddress(buffer, .readOnly)
let width = CVPixelBufferGetWidth(buffer)
let height = CVPixelBufferGetHeight(buffer)
var textureCache: CVMetalTextureCache!
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device, nil, &textureCache)
var texture: CVMetalTexture!
_ = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, textureCache, buffer, nil, .bgra8Unorm, width, height, 0, &texture)
guard let drawable = metalLayer.nextDrawable(),
let commandBuffer = commandQueue?.makeCommandBuffer() else { return }
renderPassDescriptor.colorAttachments[0].texture = drawable.texture
guard let encoder = commandBuffer.makeRenderCommandEncoder(descriptor: renderPassDescriptor) else { return }
encoder.setRenderPipelineState(renderPipelineState)
let aspect = Float(frame.width / frame.height) * Float(height) / Float(width)
let vertexData: [[Float]] = [
// 0: positions
[
-1, -aspect, 0, 1,
-1, aspect, 0, 1,
1, -aspect, 0, 1,
1, aspect, 0, 1,
],
// 1: texCoords
[
0, 1,
0, 0,
1, 1,
1, 0,
],
]
vertexData.enumerated().forEach { i, array in
let size = array.count * MemoryLayout.size(ofValue: array[0])
let buffer = device.makeBuffer(bytes: array, length: size)
encoder.setVertexBuffer(buffer, offset: 0, index: i)
}
encoder.setFragmentTexture(CVMetalTextureGetTexture(texture), index: 0)
encoder.drawPrimitives(type: .triangleStrip,
vertexStart: 0,
vertexCount: vertexData[0].count / 4)
encoder.endEncoding()
commandBuffer.present(drawable)
commandBuffer.commit()
CVPixelBufferUnlockBaseAddress(buffer, .readOnly)
lockFlag = false
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
Shader.metal
#include <metal_stdlib>
using namespace metal;
struct RasterizerData
{
float4 clipSpacePosition [[position]];
float2 texCoord;
};
vertex RasterizerData
vertexShader(uint vertexID [[ vertex_id ]],
const device float4 *position [[ buffer(0) ]],
const device float2 *uv [[ buffer(1) ]])
{
RasterizerData out;
out.clipSpacePosition = position[vertexID];
out.texCoord = uv[vertexID];
return out;
}
fragment float4
fragmentShader(RasterizerData in [[ stage_in ]],
texture2d<float, access::sample> texture [[ texture(0) ]])
{
constexpr sampler sampler2d(coord::normalized, filter::linear, address::clamp_to_zero);
return texture.sample(sampler2d, in.texCoord);
}
Info.plist
<key>NSCameraUsageDescription</key>
<string>This app uses the camera</string>
リポジトリ