Recording videos with real-time filters in Swift
Asked Answered
R

1

6

I am new to swift and trying to build a camera app which can apply real-time filters, and save with the applied filters.

So far i can preview real-time with the applied filters, but when i save the video its all black.

import UIKit
import AVFoundation
import AssetsLibrary
import CoreMedia
import Photos

class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate {

    var captureSession: AVCaptureSession!

    @IBOutlet weak var previewView: UIView!
    @IBOutlet weak var recordButtton: UIButton!
    @IBOutlet weak var imageView: UIImageView!

    var assetWriter: AVAssetWriter?
    var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor?
    var isWriting = false
    var currentSampleTime: CMTime?
    var currentVideoDimensions: CMVideoDimensions?

    override func viewDidLoad() {
        super.viewDidLoad()
        FilterVendor.register()
        setupCaptureSession()
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    func setupCaptureSession() {
        let captureSession = AVCaptureSession()
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else {
            print("Can't access the camera")
            return
        }

        if captureSession.canAddInput(input) {
            captureSession.addInput(input)
        }

        let videoOutput = AVCaptureVideoDataOutput()

        videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main)
        if captureSession.canAddOutput(videoOutput) {
            captureSession.addOutput(videoOutput)
        }

        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        if((previewLayer) != nil) {
            view.layer.addSublayer(previewLayer!)
        }

        captureSession.startRunning()
    }

    @IBAction func record(_ sender: Any) {
        if isWriting {
            print("stop record")
            self.isWriting = false
            assetWriterPixelBufferInput = nil
            assetWriter?.finishWriting(completionHandler: {[unowned self] () -> Void in
                self.saveMovieToCameraRoll()
            })
        } else {
            print("start record")
            createWriter()
            assetWriter?.startWriting()
            assetWriter?.startSession(atSourceTime: currentSampleTime!)
            isWriting = true
        }
    }

    func saveMovieToCameraRoll() {
        PHPhotoLibrary.shared().performChanges({
            PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL)
        }) { saved, error in
            if saved {
                print("saved")
            }
        }
    }

    func movieURL() -> NSURL {
        let tempDir = NSTemporaryDirectory()
        let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov")
        return url! as NSURL
    }

    func checkForAndDeleteFile() {
        let fm = FileManager.default
        let url = movieURL()
        let exist = fm.fileExists(atPath: url.path!)

        if exist {
            do {
                try fm.removeItem(at: url as URL)
            } catch let error as NSError {
                print(error.localizedDescription)
            }
        }
    }

    func createWriter() {
        self.checkForAndDeleteFile()

        do {
            assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie)
        } catch let error as NSError {
            print(error.localizedDescription)
            return
        }

        let outputSettings = [
            AVVideoCodecKey : AVVideoCodecH264,
            AVVideoWidthKey : Int(currentVideoDimensions!.width),
            AVVideoHeightKey : Int(currentVideoDimensions!.height)
        ] as [String : Any]

        let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject])
        assetWriterVideoInput.expectsMediaDataInRealTime = true
        assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI / 2.0))

        let sourcePixelBufferAttributesDictionary = [
            String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA),
            String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width),
            String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height),
            String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue
        ] as [String : Any]

        assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput,
                                                                           sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)

        if assetWriter!.canAdd(assetWriterVideoInput) {
            assetWriter!.add(assetWriterVideoInput)
        } else {
            print("no way\(assetWriterVideoInput)")
        }
    }

    func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
        autoreleasepool {

            connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

            guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
            let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

            let filter = CIFilter(name: "Fİlter")!
            filter.setValue(cameraImage, forKey: kCIInputImageKey)


            let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
            self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
            self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

            if self.isWriting {
                if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                    var newPixelBuffer: CVPixelBuffer? = nil

                    CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                    let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                    if success == false {
                        print("Pixel Buffer failed")
                    }
                }
            }

            DispatchQueue.main.async {

                if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                    let filteredImage = UIImage(ciImage: outputValue)
                    self.imageView.image = filteredImage
                }
            }
        }
    }
}
Receivership answered 18/7, 2017 at 16:18 Comment(2)
Have you tried saving a video without a filter?Broccoli
@Broccoli no difference :(Receivership
A
7

I've added some comments to the critical part below:

func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) {
    autoreleasepool {

        connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft;

        // COMMENT: This line makes sense - this is your pixelbuffer from the camera.
        guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

        // COMMENT: OK, so you turn pixelBuffer into a CIImage...
        let cameraImage = CIImage(cvPixelBuffer: pixelBuffer)

        // COMMENT: And now you've create a CIImage with a Filter instruction...
        let filter = CIFilter(name: "Fİlter")!
        filter.setValue(cameraImage, forKey: kCIInputImageKey)


        let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)!
        self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription)
        self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)

        if self.isWriting {
            if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true {
                // COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write...
                var newPixelBuffer: CVPixelBuffer? = nil

                // COMMENT: And you grabbed memory from the pool.
                CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer)

                // COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame.
                let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!)

                if success == false {
                    print("Pixel Buffer failed")
                }
            }
        }

        // COMMENT: And now you're sending the filtered image back to the screen.
        DispatchQueue.main.async {

            if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage {
                let filteredImage = UIImage(ciImage: outputValue)
                self.imageView.image = filteredImage
            }
        }
    }
}

It looks to me like you're basically getting the screen image, creating a filtered copy, then making a NEW pixel buffer which is empty and writing that out.

If you write the pixelBuffer you grabbed instead of the new one you're creating, you should successfully write the image.

What you need to successfully write out the filtered video is to create a new CVPixelBuffer from a CIImage - that solution exists here on StackOverflow already, I know because I needed that step myself!

Antiar answered 18/7, 2017 at 17:26 Comment(2)
Not swift based, but here's the relevant ObjectiveC code from a few years back now #22819837Antiar
please check this #51018011Unknit

© 2022 - 2024 — McMap. All rights reserved.