I'm playing with SwiftUI and trying to build a custom camera with it. I found tutorials on how to use system built-in camera with SwiftUI(using ImagePickerController) and how to build a custom camera with storyboard.
I've already built a struct CameraViewController: UIViewControllerRepresentable
that initialize the camera and setup capturesession.(using AVFoundation).
First I'm not sure how to setup func makeUIViewController
for CameraViewController
struct, since I dont know which controller class to conform to.
Also I don't know how to integrate my CameraViewController class into the app with SwiftUI. Can someone help?
Thanks!
SwiftUI Custom Camera View?
Asked Answered
SwiftUI - Custom Camera Implementation Example
- CustomCameraPhotoView / Main Screen - Photo Preview
2. CustomCameraView / Camera Screen - Combines SwiftUI View (Record Button) with UIKit ViewController
3. CustomCameraRepresentable / Custom Camera ViewController SwiftUI Wrapper
4. CustomCameraController / Custom Camera View Controller
5. CaptureButtonView / SwiftUI View - Capture Button
Note: Avoid app crashing by adding this Privacy - Camera Usage Description
into the Info.plist file.
import SwiftUI
import AVFoundation
struct CustomCameraPhotoView: View {
@State private var image: Image?
@State private var showingCustomCamera = false
@State private var inputImage: UIImage?
var body: some View {
NavigationView {
VStack {
ZStack {
Rectangle().fill(Color.secondary)
if image != nil
{
image?
.resizable()
.aspectRatio(contentMode: .fill)
}
else
{
Text("Take Photo").foregroundColor(.white).font(.headline)
}
}
.onTapGesture {
self.showingCustomCamera = true
}
}
.sheet(isPresented: $showingCustomCamera, onDismiss: loadImage) {
CustomCameraView(image: self.$inputImage)
}
.edgesIgnoringSafeArea(.all)
}
}
func loadImage() {
guard let inputImage = inputImage else { return }
image = Image(uiImage: inputImage)
}
}
struct CustomCameraView: View {
@Binding var image: UIImage?
@State var didTapCapture: Bool = false
var body: some View {
ZStack(alignment: .bottom) {
CustomCameraRepresentable(image: self.$image, didTapCapture: $didTapCapture)
CaptureButtonView().onTapGesture {
self.didTapCapture = true
}
}
}
}
struct CustomCameraRepresentable: UIViewControllerRepresentable {
@Environment(\.presentationMode) var presentationMode
@Binding var image: UIImage?
@Binding var didTapCapture: Bool
func makeUIViewController(context: Context) -> CustomCameraController {
let controller = CustomCameraController()
controller.delegate = context.coordinator
return controller
}
func updateUIViewController(_ cameraViewController: CustomCameraController, context: Context) {
if(self.didTapCapture) {
cameraViewController.didTapRecord()
}
}
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
class Coordinator: NSObject, UINavigationControllerDelegate, AVCapturePhotoCaptureDelegate {
let parent: CustomCameraRepresentable
init(_ parent: CustomCameraRepresentable) {
self.parent = parent
}
func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
parent.didTapCapture = false
if let imageData = photo.fileDataRepresentation() {
parent.image = UIImage(data: imageData)
}
parent.presentationMode.wrappedValue.dismiss()
}
}
}
class CustomCameraController: UIViewController {
var image: UIImage?
var captureSession = AVCaptureSession()
var backCamera: AVCaptureDevice?
var frontCamera: AVCaptureDevice?
var currentCamera: AVCaptureDevice?
var photoOutput: AVCapturePhotoOutput?
var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
//DELEGATE
var delegate: AVCapturePhotoCaptureDelegate?
func didTapRecord() {
let settings = AVCapturePhotoSettings()
photoOutput?.capturePhoto(with: settings, delegate: delegate!)
}
override func viewDidLoad() {
super.viewDidLoad()
setup()
}
func setup() {
setupCaptureSession()
setupDevice()
setupInputOutput()
setupPreviewLayer()
startRunningCaptureSession()
}
func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(deviceTypes: [AVCaptureDevice.DeviceType.builtInWideAngleCamera],
mediaType: AVMediaType.video,
position: AVCaptureDevice.Position.unspecified)
for device in deviceDiscoverySession.devices {
switch device.position {
case AVCaptureDevice.Position.front:
self.frontCamera = device
case AVCaptureDevice.Position.back:
self.backCamera = device
default:
break
}
}
self.currentCamera = self.backCamera
}
func setupInputOutput() {
do {
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera!)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray([AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])], completionHandler: nil)
captureSession.addOutput(photoOutput!)
} catch {
print(error)
}
}
func setupPreviewLayer()
{
self.cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
self.cameraPreviewLayer?.videoGravity = AVLayerVideoGravity.resizeAspectFill
self.cameraPreviewLayer?.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
self.cameraPreviewLayer?.frame = self.view.frame
self.view.layer.insertSublayer(cameraPreviewLayer!, at: 0)
}
func startRunningCaptureSession(){
captureSession.startRunning()
}
}
struct CaptureButtonView: View {
@State private var animationAmount: CGFloat = 1
var body: some View {
Image(systemName: "video").font(.largeTitle)
.padding(30)
.background(Color.red)
.foregroundColor(.white)
.clipShape(Circle())
.overlay(
Circle()
.stroke(Color.red)
.scaleEffect(animationAmount)
.opacity(Double(2 - animationAmount))
.animation(Animation.easeOut(duration: 1)
.repeatForever(autoreverses: false))
)
.onAppear
{
self.animationAmount = 2
}
}
}
I stumbled upon this post when I was looking for help on using AVFoundation in SwiftUI. This worked like a charm. –
Extraterritorial
Hello @ozmpai. Your code works great for capturing still images. I was trying to get frame by frame data in my project using AVCaptureVideoDataOutputSampleBufferDelegate. Can you suggest modifications to the above code you posted? Thanks. –
Extraterritorial
Stumbled across this through google and it works perfectly, thanks! However, is there any way to get the View to be smaller to fit in the corner of the screen? I can't seem to change the size of it. –
Membranous
@Extraterritorial have you got your answer? i'm also looking for same. –
Lapp
Here's a version, where you can pass any frame size for camera preview layer.
If you have a back button, ozmpai answer does not work out of the box. I have edited ozmpai answer, so all kudos still goes to him.
Don't like the shared singleton, but for now, haven't figured a better approach for adaptation of SwiftUI view lifecycle yet. As SwiftUI is probably using black magic behind it.
Also, passing a bool to take a photo is probably not the greatest approach, so I have refactored it with a closure.
import SwiftUI
struct MyCameraView: View {
@State private var image: UIImage?
var customCameraRepresentable = CustomCameraRepresentable(
cameraFrame: .zero,
imageCompletion: { _ in }
)
var body: some View {
CustomCameraView(
customCameraRepresentable: customCameraRepresentable,
imageCompletion: { newImage in
self.image = newImage
}
)
.onAppear {
customCameraRepresentable.startRunningCaptureSession()
}
.onDisappear {
customCameraRepresentable.stopRunningCaptureSession()
}
if let image = image {
Image(uiImage: image)
.resizable()
.aspectRatio(contentMode: .fit)
}
}
}
import SwiftUI
struct CustomCameraView: View {
var customCameraRepresentable: CustomCameraRepresentable
var imageCompletion: ((UIImage) -> Void)
var body: some View {
GeometryReader { geometry in
VStack {
let frame = CGRect(x: 0, y: 0, width: geometry.size.width, height: geometry.size.height - 100)
cameraView(frame: frame)
HStack {
CameraControlsView(captureButtonAction: { [weak customCameraRepresentable] in
customCameraRepresentable?.takePhoto()
})
}
}
}
}
private func cameraView(frame: CGRect) -> CustomCameraRepresentable {
customCameraRepresentable.cameraFrame = frame
customCameraRepresentable.imageCompletion = imageCompletion
return customCameraRepresentable
}
}
import SwiftUI
struct CameraControlsView: View {
var captureButtonAction: (() -> Void)
var body: some View {
CaptureButtonView()
.onTapGesture {
captureButtonAction()
}
}
}
import SwiftUI
struct CaptureButtonView: View {
@Environment(\.colorScheme) var colorScheme
@State private var animationAmount: CGFloat = 1
var body: some View {
Image(systemName: "camera")
.font(.largeTitle)
.padding(20)
.background(colorScheme == .dark ? Color.white : Color.black)
.foregroundColor(colorScheme == .dark ? Color.black : Color.white)
.clipShape(Circle())
.overlay(
Circle()
.stroke(colorScheme == .dark ? Color.white : Color.black)
.scaleEffect(animationAmount)
.opacity(Double(2 - animationAmount))
.animation(
Animation.easeOut(duration: 1)
.repeatForever(autoreverses: false)
)
)
.onAppear {
animationAmount = 2
}
}
}
import SwiftUI
import AVFoundation
final class CustomCameraController: UIViewController {
static let shared = CustomCameraController()
private var captureSession = AVCaptureSession()
private var backCamera: AVCaptureDevice?
private var frontCamera: AVCaptureDevice?
private var currentCamera: AVCaptureDevice?
private var photoOutput: AVCapturePhotoOutput?
private var cameraPreviewLayer: AVCaptureVideoPreviewLayer?
weak var captureDelegate: AVCapturePhotoCaptureDelegate?
override func viewDidLoad() {
super.viewDidLoad()
setup()
}
func configurePreviewLayer(with frame: CGRect) {
let cameraPreviewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
cameraPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
cameraPreviewLayer.connection?.videoOrientation = AVCaptureVideoOrientation.portrait
cameraPreviewLayer.frame = frame
view.layer.insertSublayer(cameraPreviewLayer, at: 0)
}
func startRunningCaptureSession() {
captureSession.startRunning()
}
func stopRunningCaptureSession() {
captureSession.stopRunning()
}
func takePhoto() {
let settings = AVCapturePhotoSettings()
guard let delegate = captureDelegate else {
print("delegate nil")
return
}
photoOutput?.capturePhoto(with: settings, delegate: delegate)
}
// MARK: Private
private func setup() {
setupCaptureSession()
setupDevice()
setupInputOutput()
}
private func setupCaptureSession() {
captureSession.sessionPreset = AVCaptureSession.Preset.photo
}
private func setupDevice() {
let deviceDiscoverySession = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera],
mediaType: .video,
position: .unspecified
)
for device in deviceDiscoverySession.devices {
switch device.position {
case AVCaptureDevice.Position.front:
frontCamera = device
case AVCaptureDevice.Position.back:
backCamera = device
default:
break
}
}
self.currentCamera = self.backCamera
}
private func setupInputOutput() {
do {
guard let currentCamera = currentCamera else { return }
let captureDeviceInput = try AVCaptureDeviceInput(device: currentCamera)
captureSession.addInput(captureDeviceInput)
photoOutput = AVCapturePhotoOutput()
photoOutput?.setPreparedPhotoSettingsArray(
[AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])],
completionHandler: nil
)
guard let photoOutput = photoOutput else { return }
captureSession.addOutput(photoOutput)
} catch {
print(error)
}
}
}
struct CustomCameraRepresentable: UIViewControllerRepresentable {
// @Environment(\.presentationMode) var presentationMode
init(cameraFrame: CGRect, imageCompletion: @escaping ((UIImage) -> Void)) {
self.cameraFrame = cameraFrame
self.imageCompletion = imageCompletion
}
@State var cameraFrame: CGRect
@State var imageCompletion: ((UIImage) -> Void)
func makeCoordinator() -> Coordinator {
Coordinator(self)
}
func makeUIViewController(context: Context) -> CustomCameraController {
CustomCameraController.shared.configurePreviewLayer(with: cameraFrame)
CustomCameraController.shared.captureDelegate = context.coordinator
return CustomCameraController.shared
}
func updateUIViewController(_ cameraViewController: CustomCameraController, context: Context) {}
func takePhoto() {
CustomCameraController.shared.takePhoto()
}
func startRunningCaptureSession() {
CustomCameraController.shared.startRunningCaptureSession()
}
func stopRunningCaptureSession() {
CustomCameraController.shared.stopRunningCaptureSession()
}
}
extension CustomCameraRepresentable {
final class Coordinator: NSObject, AVCapturePhotoCaptureDelegate {
private let parent: CustomCameraRepresentable
init(_ parent: CustomCameraRepresentable) {
self.parent = parent
}
func photoOutput(_ output: AVCapturePhotoOutput,
didFinishProcessingPhoto photo: AVCapturePhoto,
error: Error?) {
if let imageData = photo.fileDataRepresentation() {
guard let newImage = UIImage(data: imageData) else { return }
parent.imageCompletion(newImage)
}
// parent.presentationMode.wrappedValue.dismiss()
}
}
}
Working like a charm! –
Nevile
Crashing on Xcode 14, iOS16. –
Underquote
Fixed it, CustomCameraRepresentable should be a struct and all the vars should be @State vars. See if that works. –
Titanic
You can record video with this code
import AVFoundation
import Photos
struct CameraView: View {
@State private var overlaidValue: String = "Initial Value"
@StateObject private var cameraViewModel = CameraViewModel()
@State private var isRecording = false
var body: some View {
ZStack {
CameraPreview(cameraViewModel: cameraViewModel)
.edgesIgnoringSafeArea(.all)
VStack {
Text(overlaidValue)
.font(.title)
.foregroundColor(.white)
.padding()
Spacer()
HStack {
if isRecording {
Button(action: {
cameraViewModel.stopRecording()
isRecording = false
}) {
Text("Stop Recording")
.foregroundColor(.white)
.padding()
.background(Color.red)
.cornerRadius(8)
}
} else {
Button(action: {
cameraViewModel.startRecording { success in
if success {
isRecording = true
}
}
}) {
Text("Start Recording")
.foregroundColor(.white)
.padding()
.background(Color.green)
.cornerRadius(8)
}
}
}
.padding()
}
}
.onTapGesture {
overlaidValue = "Updated Value"
}
}
}
class CameraViewModel: NSObject, ObservableObject, AVCaptureFileOutputRecordingDelegate {
let captureSession = AVCaptureSession()
var videoOutput: AVCaptureMovieFileOutput?
@Published private(set) var isRecording = false
var videoDataOutput: AVCaptureVideoDataOutput?
var assetWriter: AVAssetWriter?
var assetWriterInput: AVAssetWriterInput?
override init() {
super.init()
requestCameraPermission()
}
func setupAssetWriter(url: URL) {
do {
assetWriter = try AVAssetWriter(outputURL: url, fileType: .mov)
let videoSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: 1920,
AVVideoHeightKey: 1080
]
assetWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
assetWriterInput?.expectsMediaDataInRealTime = true
if assetWriter!.canAdd(assetWriterInput!) {
assetWriter!.add(assetWriterInput!)
}
} catch {
print("Error setting up video writer: \(error)")
}
}
private func requestCameraPermission() {
AVCaptureDevice.requestAccess(for: .video) { granted in
if granted {
DispatchQueue.main.async {
self.setupCamera()
}
} else {
print("Camera access denied")
}
}
}
func saveVideoToPhotos(url: URL) {
guard FileManager.default.fileExists(atPath: url.path) else {
print("File does not exist at path: \(url.path), cannot save to Photos.")
return
}
PHPhotoLibrary.requestAuthorization { status in
if status == .authorized {
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url)
}) { saved, error in
if saved {
print("Video saved to Photos")
} else {
print("Could not save video to Photos: \(String(describing: error))")
}
}
} else {
print("Photos permission not granted")
}
}
}
func setupCamera() {
guard let videoDevice = AVCaptureDevice.default(for: .video) else {
print("No video device available")
return
}
do {
let videoInput = try AVCaptureDeviceInput(device: videoDevice)
if captureSession.canAddInput(videoInput) {
captureSession.addInput(videoInput)
} else {
print("Cannot add video input")
return
}
let videoOutput = AVCaptureMovieFileOutput()
if captureSession.canAddOutput(videoOutput) {
captureSession.addOutput(videoOutput)
self.videoOutput = videoOutput
} else {
print("Cannot add video output")
return
}
// Move the startRunning call to a background thread
DispatchQueue.global(qos: .userInitiated).async {
self.captureSession.startRunning()
DispatchQueue.main.async {
print("Camera setup and session started")
}
}
} catch {
print("Failed to set up camera: \(error)")
}
videoDataOutput = AVCaptureVideoDataOutput()
videoDataOutput?.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
videoDataOutput?.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
if captureSession.canAddOutput(videoDataOutput!) {
captureSession.addOutput(videoDataOutput!)
}
}
func startRecording(completion: @escaping (Bool) -> Void) {
guard let videoOutput = videoOutput, !isRecording else {
print("Recording is already in progress or output is not available.")
completion(false)
return
}
let tempDirectory = FileManager.default.temporaryDirectory
let fileURL = tempDirectory.appendingPathComponent(UUID().uuidString).appendingPathExtension("mov")
videoOutput.startRecording(to: fileURL, recordingDelegate: self)
isRecording = true
completion(true)
}
func stopRecording() {
guard isRecording, let videoOutput = videoOutput else { return }
videoOutput.stopRecording()
isRecording = false
}
}
extension CameraViewModel: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let ciImage = CIImage(cvImageBuffer: imageBuffer)
let context = CIContext()
// Perform your drawing here
let overlayImage = drawOverlay(on: ciImage)
// Create a new sample buffer with the modified image
if let cgImage = context.createCGImage(overlayImage, from: overlayImage.extent) {
// Here you would typically write this image to a video file
}
}
func drawOverlay(on image: CIImage) -> CIImage {
let overlay = CIImage(color: .red).cropped(to: CGRect(x: 0, y: 0, width: 100, height: 100))
return image.composited(over: overlay)
}
func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
isRecording = false
if let error = error {
print("Recording failed with error: \(error.localizedDescription)")
return
}
// Check if the file exists
if FileManager.default.fileExists(atPath: outputFileURL.path) {
print("File exists at path: \(outputFileURL.path), ready to be saved or used.")
// Optionally, save to Photos
saveVideoToPhotos(url: outputFileURL)
} else {
print("File does not exist at path: \(outputFileURL.path)")
}
}
}
struct CameraPreview: UIViewRepresentable {
@ObservedObject var cameraViewModel: CameraViewModel
func makeUIView(context: Context) -> UIView {
let view = UIView(frame: UIScreen.main.bounds)
let previewLayer = AVCaptureVideoPreviewLayer(session: cameraViewModel.captureSession)
previewLayer.frame = view.bounds
previewLayer.videoGravity = .resizeAspectFill
view.layer.addSublayer(previewLayer)
return view
}
func updateUIView(_ uiView: UIView, context: Context) {
// Update UI View if needed
}
}
© 2022 - 2025 — McMap. All rights reserved.
ZStack
,VStack
,HStack
etc as opposed toIBOutlet
– Quasimodo