This Kodeco tutorial shows how to display an iOS live camera feed in SwiftUI using Combine.
Here are the essential parts doing this (after stripping away non-essential code lines):
class FrameManager : NSObject, ObservableObject
{
@Published var current: CVPixelBuffer?
let videoOutputQueue = DispatchQueue(label: "com.raywenderlich.VideoOutputQ",
qos: .userInitiated,
attributes: [],
autoreleaseFrequency: .workItem)
}
extension FrameManager : AVCaptureVideoDataOutputSampleBufferDelegate
{
func captureOutput(_ output: AVCaptureOutput,
didOutput sampleBuffer: CMSampleBuffer,
from connection: AVCaptureConnection)
{
if let buffer = sampleBuffer.imageBuffer
{
DispatchQueue.main.async
{
self.current = buffer
}
}
}
}
extension CGImage
{
static func create(from cvPixelBuffer: CVPixelBuffer?) -> CGImage?
{
guard let pixelBuffer = cvPixelBuffer else
{
return nil
}
var image: CGImage?
VTCreateCGImageFromCVPixelBuffer(pixelBuffer,
options: nil,
imageOut: &image)
return image
}
}
class ContentViewModel : ObservableObject
{
@Published var frame: CGImage?
private let context = CIContext()
private let frameManager = FrameManager.shared
init()
{
setupSubscriptions()
}
func setupSubscriptions()
{
frameManager.$current
.receive(on: RunLoop.main)
.compactMap
{ buffer in
guard let image = CGImage.create(from: buffer) else
{
return nil
}
var ciImage = CIImage(cgImage: image)
return self.context.createCGImage(ciImage, from: ciImage.extent)
}
.assign(to: &$frame)
}
}
struct ContentView : View
{
@StateObject private var model = ContentViewModel()
var body: some View
{
ZStack {
FrameView(image: model.frame)
.edgesIgnoringSafeArea(.all)
}
}
}
struct FrameView : View
{
var image: CGImage?
var body: some View
{
if let image = image
{
GeometryReader
{ geometry in
Image(image, scale: 1.0, orientation: .up, label: Text("Video feed"))
.resizable()
.scaledToFill()
.frame(width: geometry.size.width,
height: geometry.size.height,
alignment: .center)
.clipped()
}
}
}
}
Although it's working, is converting each CVPixelBuffer
to a SwiftUI Image
and showing these on screen using Combine/bindings a good/efficient way to display the live camera feed?
And, what would happen if image processing gets too slow to keep up with the AVCaptureVideoDataOutputSampleBufferDelegate
feed; will out of date frames be skipped? (The full code has a few CI filters that slows down things quite a lot.)