Swift 3: How to add watermark on video ? AVVideoCompositionCoreAnimationTool iOS 10 issue
Asked Answered
A

4

15

This code used to work on iOS9 to add a watermark and text to a video but since iOS10 it's not working anymore. There is a iOS 10 bug that has been filed but no answer from Apple. I have not been able to implement any workaround to add watermark and text on video. With this code some times the video will be exported successfully but most of time it won't be exported.

How should I use AVVideoCompositionCoreAnimationTool so that it works as it did on iOS9.

let videoComposition: AVMutableVideoComposition = AVMutableVideoComposition()

videoComposition.frameDuration = CMTimeMake(1, 60)
videoComposition.renderSize = CGSize(width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height)


let instruction: AVMutableVideoCompositionInstruction = AVMutableVideoCompositionInstruction()

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30))

// transformer is applied to set the video in portrait otherwise it is rotated by 90 degrees
let transformer: AVMutableVideoCompositionLayerInstruction =
    AVMutableVideoCompositionLayerInstruction(assetTrack: clipVideoTrack)

let t1: CGAffineTransform = CGAffineTransform(translationX: clipVideoTrack.naturalSize.height, y: -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height)/2)

let t2: CGAffineTransform = t1.rotated(by: CGFloat(M_PI_2))

var finalTransform: CGAffineTransform = t2

transformer.setTransform(finalTransform, at: kCMTimeZero)

instruction.layerInstructions = NSArray(object: transformer) as! [AVVideoCompositionLayerInstruction]

videoComposition.instructions = NSArray(object: instruction) as! [AVVideoCompositionInstructionProtocol]



let mixComposition = AVMutableComposition()
let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)


do {
    try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: clipVideoTrack, at: kCMTimeZero)
} catch {
    print(error)
}


//Add watermark


let myImage = UIImage(named: "logo")

let aLayer = CALayer()
aLayer.contents = myImage!.cgImage
aLayer.frame = CGRect(x: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-45))/self.view.bounds.width, y: (clipVideoTrack.naturalSize.height*(self.view.bounds.width-40))/self.view.bounds.width, width: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width, height: (clipVideoTrack.naturalSize.height*40)/self.view.bounds.width)

let titleLayer = CATextLayer()
titleLayer.string = "text"
titleLayer.font = UIFont(name: "helvetica", size: 0)
titleLayer.fontSize = clipVideoTrack.naturalSize.height/16
titleLayer.shadowOpacity = 0.5
titleLayer.alignmentMode = kCAAlignmentCenter
titleLayer.frame = CGRect(x: 0, y: 0, width: clipVideoTrack.naturalSize.height, height: clipVideoTrack.naturalSize.height/6)
titleLayer.display()


let videoSize = asset.tracks(withMediaType: AVMediaTypeVideo)[0].naturalSize
let parentLayer = CALayer()
let videoLayer = CALayer()
parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height)
videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.height, height: videoSize.height)

parentLayer.addSublayer(videoLayer)
parentLayer.addSublayer(aLayer)
parentLayer.addSublayer(titleLayer)


videoComposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)



do { try FileManager.default.removeItem(at: filePath) }
catch let error as NSError {
    NSLog("\(error), \(error.localizedDescription)")
}



var exportUrl: URL = filePath
self.videoUrl = filePath as NSURL


var exporter = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality)

exporter!.videoComposition = videoComposition
exporter!.outputFileType = AVFileTypeQuickTimeMovie
exporter!.outputURL = URL(fileURLWithPath: exportUrl.path)


exporter!.exportAsynchronously(completionHandler: {

    DispatchQueue.main.async {


        self.view.layer.addSublayer(self.avPlayerLayer)

        let item = AVPlayerItem(url: exportUrl)
        self.player.replaceCurrentItem(with: item)

        if (self.player.currentItem != nil) {
            print("Starting playback!")
            self.player.play()
        }

    }

})

PLEASE NOTE: If I remove AVVideoCompositionCoreAnimationTool then the video is always exported but this ,hans no watermark and text on video. How to make it work so that AVVideoCompositionCoreAnimationTool does not conflict with AVAssetExportSession ?

Some have implemented a workaround with customVideoCompositorClass and AVVideoCompositing protocol but this seems to be a heavy workaround compared to how it used to work.

Antechamber answered 10/11, 2016 at 14:45 Comment(0)
X
17

This solution work for me, super easy and super fast

func addWatermark(inputURL: URL, outputURL: URL, handler:@escaping (_ exportSession: AVAssetExportSession?)-> Void) {
    let mixComposition = AVMutableComposition()
    let asset = AVAsset(url: inputURL)
    let videoTrack = asset.tracks(withMediaType: AVMediaType.video)[0]
    let timerange = CMTimeRangeMake(kCMTimeZero, asset.duration)

        let compositionVideoTrack:AVMutableCompositionTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: CMPersistentTrackID(kCMPersistentTrackID_Invalid))!

    do {
        try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero)
        compositionVideoTrack.preferredTransform = videoTrack.preferredTransform
    } catch {
        print(error)
    }

    let watermarkFilter = CIFilter(name: "CISourceOverCompositing")!
    let watermarkImage = CIImage(image: UIImage(named: "waterMark")!)
    let videoComposition = AVVideoComposition(asset: asset) { (filteringRequest) in
        let source = filteringRequest.sourceImage.clampedToExtent()
        watermarkFilter.setValue(source, forKey: "inputBackgroundImage")
        let transform = CGAffineTransform(translationX: filteringRequest.sourceImage.extent.width - (watermarkImage?.extent.width)! - 2, y: 0)
        watermarkFilter.setValue(watermarkImage?.transformed(by: transform), forKey: "inputImage")
        filteringRequest.finish(with: watermarkFilter.outputImage!, context: nil)
    }

    guard let exportSession = AVAssetExportSession(asset: asset, presetName: AVAssetExportPreset640x480) else {
        handler(nil)

        return
    }

    exportSession.outputURL = outputURL
    exportSession.outputFileType = AVFileType.mp4
    exportSession.shouldOptimizeForNetworkUse = true
    exportSession.videoComposition = videoComposition
    exportSession.exportAsynchronously { () -> Void in
        handler(exportSession)
    }
}

Sorry if i haven't posted how to call this function. It's been a while that i haven't touched that code, so i hope this will be enough:

let outputURL = NSURL.fileURL(withPath: "TempPath")
let inputURL = NSURL.fileURL(withPath: "VideoWithWatermarkPath")
addWatermark(inputURL: inputURL, outputURL: outputURL, handler: { (exportSession) in
    guard let session = exportSession else {
        // Error 
        return
    }
    switch session.status {
        case .completed:
        guard NSData(contentsOf: outputURL) != nil else {
            // Error
            return
        }

        // Now you can find the video with the watermark in the location outputURL

        default:
        // Error
    }
})
Xanthic answered 10/12, 2017 at 18:32 Comment(6)
How call the function ?Calvary
I added an example on how to call this functionXanthic
add example pleaseGraniteware
How to add at particular Position using CGReactGraniteware
To add at particular position change this line let transform = CGAffineTransform(translationX: filteringRequest.sourceImage.extent.width - (watermarkImage?.extent.width)! - 2, y: 0). Though for me changing position didn't work unless I commented out let source = filteringRequest.sourceImage.clampedToExtent() Note that this method wouldn't fit you if you want your watermark to have some alpha (at least I failed to find how to do it)Yandell
upd: you can change alpha by changing CISourceOverCompositing to other CIFilters developer.apple.com/library/archive/documentation/…Yandell
M
3

I have got this answer from here and it is working for me. See if it is working for you.

import UIKit
import AssetsLibrary
import AVFoundation

enum QUWatermarkPosition {
    case TopLeft
    case TopRight
    case BottomLeft
    case BottomRight
    case Default
}

class QUWatermarkManager: NSObject {

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status: status, session: session, outputURL: outputURL)
        }
    }

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status: status, session: session, outputURL: outputURL)
        }
    }

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((status : AVAssetExportSessionStatus!, session: AVAssetExportSession!, outputURL : NSURL!) -> ())?) {

        dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), { () -> Void in
            var mixComposition = AVMutableComposition()

            var compositionVideoTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            var clipVideoTrack = videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack
            compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), ofTrack: clipVideoTrack, atTime: kCMTimeZero, error: nil)
            clipVideoTrack.preferredTransform

            let videoSize = clipVideoTrack.naturalSize

            var parentLayer = CALayer()
            var videoLayer = CALayer()
            parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
            videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height)
            parentLayer.addSublayer(videoLayer)

            if text != nil {
                var titleLayer = CATextLayer()
                titleLayer.backgroundColor = UIColor.redColor().CGColor
                titleLayer.string = text
                titleLayer.font = "Helvetica"
                titleLayer.fontSize = 15
                titleLayer.alignmentMode = kCAAlignmentCenter
                titleLayer.bounds = CGRectMake(0, 0, videoSize.width, videoSize.height)
                parentLayer.addSublayer(titleLayer)
            } else if name != nil {
                var watermarkImage = UIImage(named: name)
                var imageLayer = CALayer()
                imageLayer.contents = watermarkImage?.CGImage

                var xPosition : CGFloat = 0.0
                var yPosition : CGFloat = 0.0
                let imageSize : CGFloat = 57.0

                switch (position) {
                case .TopLeft:
                    xPosition = 0
                    yPosition = 0
                    break
                case .TopRight:
                    xPosition = videoSize.width - imageSize
                    yPosition = 0
                    break
                case .BottomLeft:
                    xPosition = 0
                    yPosition = videoSize.height - imageSize
                    break
                case .BottomRight, .Default:
                    xPosition = videoSize.width - imageSize
                    yPosition = videoSize.height - imageSize
                    break
                default:
                    break
                }


                imageLayer.frame = CGRectMake(xPosition, yPosition, imageSize, imageSize)
                imageLayer.opacity = 0.65
                parentLayer.addSublayer(imageLayer)
            }

            var videoComp = AVMutableVideoComposition()
            videoComp.renderSize = videoSize
            videoComp.frameDuration = CMTimeMake(1, 30)
            videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer)

            var instruction = AVMutableVideoCompositionInstruction()
            instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
            var videoTrack = mixComposition.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack

            let layerInstruction = self.videoCompositionInstructionForTrack(compositionVideoTrack, asset: videoAsset)

            instruction.layerInstructions = [layerInstruction]
            videoComp.instructions = [instruction]

            let documentDirectory = NSSearchPathForDirectoriesInDomains(.DocumentDirectory, .UserDomainMask, true)[0] as! String
            var dateFormatter = NSDateFormatter()
            dateFormatter.dateStyle = .LongStyle
            dateFormatter.timeStyle = .ShortStyle
            let date = dateFormatter.stringFromDate(NSDate())
            let savePath = documentDirectory.stringByAppendingPathComponent("watermarkVideo-\(date).mov")
            let url = NSURL(fileURLWithPath: savePath)

            let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
            exporter.outputURL = url
            exporter.outputFileType = AVFileTypeQuickTimeMovie
            exporter.shouldOptimizeForNetworkUse = true
            exporter.videoComposition = videoComp

            exporter.exportAsynchronouslyWithCompletionHandler() {
                dispatch_async(dispatch_get_main_queue(), { () -> Void in
                    if exporter.status == AVAssetExportSessionStatus.Completed {
                        let outputURL = exporter.outputURL
                        if flag {
                            // Save to library
                            let library = ALAssetsLibrary()
                            if library.videoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL) {
                                library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
                                    completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
                                        completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL)
                                })
                            }
                        } else {
                            completion!(status: AVAssetExportSessionStatus.Completed, session: exporter, outputURL: outputURL)
                        }

                    } else {
                        // Error
                        completion!(status: exporter.status, session: exporter, outputURL: nil)
                    }
                })
            }
        })
    }


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.Up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .Right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .Left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .Up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .Down
        }
        return (assetOrientation, isPortrait)
    }

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracksWithMediaType(AVMediaTypeVideo)[0] as! AVAssetTrack

        var transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform)

        var scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.width
        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.mainScreen().bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
            instruction.setTransform(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor),
                atTime: kCMTimeZero)
        } else {
            let scaleFactor = CGAffineTransformMakeScale(scaleToFitRatio, scaleToFitRatio)
            var concat = CGAffineTransformConcat(CGAffineTransformConcat(assetTrack.preferredTransform, scaleFactor), CGAffineTransformMakeTranslation(0, UIScreen.mainScreen().bounds.width / 2))
            if assetInfo.orientation == .Down {
                let fixUpsideDown = CGAffineTransformMakeRotation(CGFloat(M_PI))
                let windowBounds = UIScreen.mainScreen().bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransformMakeTranslation(assetTrack.naturalSize.width, yFix)
                concat = CGAffineTransformConcat(CGAffineTransformConcat(fixUpsideDown, centerFix), scaleFactor)
            }
            instruction.setTransform(concat, atTime: kCMTimeZero)
        }

        return instruction
    }
}
Metaphysic answered 18/11, 2016 at 11:3 Comment(3)
hi is there a way by which we can place video inside another image . For example a frame image and export the video inside this frame.Nipper
please check #51018011Ardra
Is this not slow? Export session is really slow when videoComp is added, at least for me.Putandtake
S
2

The answer of @User511 in

Swift 3:

import UIKit
import AssetsLibrary
import AVFoundation
import Photos

enum QUWatermarkPosition {
    case TopLeft
    case TopRight
    case BottomLeft
    case BottomRight
    case Default
}

class QUWatermarkManager: NSObject {

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status, session, outputURL)
        }
    }

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status, session, outputURL)
        }
    }

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSessionStatus?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
        DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async {

            let mixComposition = AVMutableComposition()

            let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0]
            do {
                try compositionVideoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, at: kCMTimeZero)
            }
            catch {
                print(error.localizedDescription)
            }

            let videoSize = clipVideoTrack.naturalSize

            let parentLayer = CALayer()
            let videoLayer = CALayer()
            parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
            videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
            parentLayer.addSublayer(videoLayer)

            if text != nil {
                let titleLayer = CATextLayer()
                titleLayer.backgroundColor = UIColor.red.cgColor
                titleLayer.string = text
                titleLayer.font = "Helvetica" as CFTypeRef
                titleLayer.fontSize = 15
                titleLayer.alignmentMode = kCAAlignmentCenter
                titleLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
                parentLayer.addSublayer(titleLayer)
            } else if name != nil {
                let watermarkImage = UIImage(named: name)
                let imageLayer = CALayer()
                imageLayer.contents = watermarkImage?.cgImage

                var xPosition : CGFloat = 0.0
                var yPosition : CGFloat = 0.0
                let imageSize : CGFloat = 57.0

                switch (position) {
                case .TopLeft:
                    xPosition = 0
                    yPosition = 0
                    break
                case .TopRight:
                    xPosition = videoSize.width - imageSize
                    yPosition = 0
                    break
                case .BottomLeft:
                    xPosition = 0
                    yPosition = videoSize.height - imageSize
                    break
                case .BottomRight, .Default:
                    xPosition = videoSize.width - imageSize
                    yPosition = videoSize.height - imageSize
                    break
                }


                imageLayer.frame = CGRect(x: xPosition, y: yPosition, width: imageSize, height: imageSize)
                imageLayer.opacity = 0.65
                parentLayer.addSublayer(imageLayer)
            }

            let videoComp = AVMutableVideoComposition()
            videoComp.renderSize = videoSize
            videoComp.frameDuration = CMTimeMake(1, 30)
            videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

            let instruction = AVMutableVideoCompositionInstruction()
            instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration)
            _ = mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack

            let layerInstruction = self.videoCompositionInstructionForTrack(track: compositionVideoTrack, asset: videoAsset)

            instruction.layerInstructions = [layerInstruction]
            videoComp.instructions = [instruction]

            let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] 
            let dateFormatter = DateFormatter()
            dateFormatter.dateStyle = .long
            dateFormatter.timeStyle = .short
            let date = dateFormatter.string(from: Date())
            let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("watermarkVideo-\(date).mov")

            let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
            exporter?.outputURL = url
            exporter?.outputFileType = AVFileTypeQuickTimeMovie
            exporter?.shouldOptimizeForNetworkUse = true
            exporter?.videoComposition = videoComp

            exporter?.exportAsynchronously() {
                DispatchQueue.main.async {

                    if exporter?.status == AVAssetExportSessionStatus.completed {
                        let outputURL = exporter?.outputURL
                        if flag {
                            // Save to library
//                            let library = ALAssetsLibrary()

                            if UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(outputURL!.path) {
                                PHPhotoLibrary.shared().performChanges({
                                    PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: outputURL!)
                                }) { saved, error in
                                    if saved {
                                        completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
                                    }
                                }
                            }

//                            if library.videoAtPathIs(compatibleWithSavedPhotosAlbum: outputURL) {
//                                library.writeVideoAtPathToSavedPhotosAlbum(outputURL,
//                                                                           completionBlock: { (assetURL:NSURL!, error:NSError!) -> Void in
//                                                                            
//                                                                            completion!(AVAssetExportSessionStatus.Completed, exporter, outputURL)
//                                })
//                            }
                        } else {
                            completion!(AVAssetExportSessionStatus.completed, exporter, outputURL)
                        }

                    } else {
                        // Error
                        completion!(exporter?.status, exporter, nil)
                    }
                }
            }
        }
    }


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImageOrientation, isPortrait: Bool) {
        var assetOrientation = UIImageOrientation.up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }
        return (assetOrientation, isPortrait)
    }

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaTypeVideo)[0]

        let transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform: transform)

        var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
                                     at: kCMTimeZero)
        } else {
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
            if assetInfo.orientation == .down {
                let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                let windowBounds = UIScreen.main.bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
            }
            instruction.setTransform(concat, at: kCMTimeZero)
        }

        return instruction
    }
}
Symposiarch answered 22/7, 2017 at 4:17 Comment(2)
@SeyedI tried your code, Watermark is adding to the video but the video is not coming with actual size.Ideography
Please check this #51018011Ardra
O
0

The answer of @User511 in Swift 5:

import UIKit
import AssetsLibrary
import AVFoundation
import Photos

enum QUWatermarkPosition {
    case TopLeft
    case TopRight
    case BottomLeft
    case BottomRight
    case Default
}

class QUWatermarkManager: NSObject {

    func watermark(video videoAsset:AVAsset, watermarkText text : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSession.Status?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: text, imageName: nil, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status, session, outputURL)
        }
    }

    func watermark(video videoAsset:AVAsset, imageName name : String, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSession.Status?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
        self.watermark(video: videoAsset, watermarkText: nil, imageName: name, saveToLibrary: flag, watermarkPosition: position) { (status, session, outputURL) -> () in
            completion!(status, session, outputURL)
        }
    }

    private func watermark(video videoAsset:AVAsset, watermarkText text : String!, imageName name : String!, saveToLibrary flag : Bool, watermarkPosition position : QUWatermarkPosition, completion : ((_ status : AVAssetExportSession.Status?, _ session: AVAssetExportSession?, _ outputURL : URL?) -> ())?) {
        DispatchQueue.global(qos: DispatchQoS.QoSClass.default).async {

            let mixComposition = AVMutableComposition()

            let compositionVideoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaType.video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid))
            let clipVideoTrack = videoAsset.tracks(withMediaType: AVMediaType.video)[0]
            do {
              try compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: videoAsset.duration), of: clipVideoTrack, at: CMTime.zero)
            }
            catch {
                print(error.localizedDescription)
            }

            let videoSize = clipVideoTrack.naturalSize

            let parentLayer = CALayer()
            let videoLayer = CALayer()
            parentLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
            videoLayer.frame = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
            parentLayer.addSublayer(videoLayer)

            if text != nil {
                let titleLayer = CATextLayer()
                titleLayer.backgroundColor = UIColor.red.cgColor
                titleLayer.string = text
                titleLayer.font = "Helvetica" as CFTypeRef
                titleLayer.fontSize = 10
                titleLayer.alignmentMode = CATextLayerAlignmentMode.right
                titleLayer.bounds = CGRect(x: 0, y: 0, width: videoSize.width, height: videoSize.height)
                parentLayer.addSublayer(titleLayer)
            } else if name != nil {
                let watermarkImage = UIImage(named: name)
                let imageLayer = CALayer()
                imageLayer.contents = watermarkImage?.cgImage

                var xPosition : CGFloat = 0.0
                var yPosition : CGFloat = 0.0
                let imageSize : CGFloat = 57.0

                switch (position) {
                case .TopLeft:
                    xPosition = 0
                    yPosition = 0
                    break
                case .TopRight:
                    xPosition = videoSize.width - imageSize
                    yPosition = 0
                    break
                case .BottomLeft:
                    xPosition = 0
                    yPosition = videoSize.height - imageSize
                    break
                case .BottomRight, .Default:
                    xPosition = videoSize.width - imageSize
                    yPosition = videoSize.height - imageSize
                    break
                }


                imageLayer.frame = CGRect(x: xPosition, y: yPosition, width: imageSize, height: imageSize)
                imageLayer.opacity = 0.65
                parentLayer.addSublayer(imageLayer)
            }

            let videoComp = AVMutableVideoComposition()
            videoComp.renderSize = videoSize
          videoComp.frameDuration = CMTimeMake(value: 1, timescale: 30)
            videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, in: parentLayer)

            let instruction = AVMutableVideoCompositionInstruction()
            instruction.timeRange = CMTimeRange.init(start: .zero, end: mixComposition.duration)
            _ = mixComposition.tracks(withMediaType: AVMediaType.video)[0] as AVAssetTrack

          let layerInstruction = self.videoCompositionInstructionForTrack(track: compositionVideoTrack!, asset: videoAsset)

            instruction.layerInstructions = [layerInstruction]
            videoComp.instructions = [instruction]

            let documentDirectory = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0]
            let dateFormatter = DateFormatter()
            dateFormatter.dateStyle = .long
            dateFormatter.timeStyle = .short
            let date = dateFormatter.string(from: Date())
            let url = URL(fileURLWithPath: documentDirectory).appendingPathComponent("watermarkVideo-\(date).mov")

            let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetHighestQuality)
            exporter?.outputURL = url
            exporter?.outputFileType = AVFileType.mov
            exporter?.shouldOptimizeForNetworkUse = true
            exporter?.videoComposition = videoComp

            exporter?.exportAsynchronously() {
                DispatchQueue.main.async {

                    if exporter?.status == AVAssetExportSession.Status.completed {
                        let outputURL = exporter?.outputURL
                        
                          completion!(AVAssetExportSession.Status.completed, exporter, outputURL)
                        
                    } else {
                        // Error
                        completion!(exporter?.status, exporter, nil)
                    }
                }
            }
        }
    }


    private func orientationFromTransform(transform: CGAffineTransform) -> (orientation: UIImage.Orientation, isPortrait: Bool) {
        var assetOrientation = UIImage.Orientation.up
        var isPortrait = false
        if transform.a == 0 && transform.b == 1.0 && transform.c == -1.0 && transform.d == 0 {
            assetOrientation = .right
            isPortrait = true
        } else if transform.a == 0 && transform.b == -1.0 && transform.c == 1.0 && transform.d == 0 {
            assetOrientation = .left
            isPortrait = true
        } else if transform.a == 1.0 && transform.b == 0 && transform.c == 0 && transform.d == 1.0 {
            assetOrientation = .up
        } else if transform.a == -1.0 && transform.b == 0 && transform.c == 0 && transform.d == -1.0 {
            assetOrientation = .down
        }
        return (assetOrientation, isPortrait)
    }

    private func videoCompositionInstructionForTrack(track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction {
        let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
        let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0]

        let transform = assetTrack.preferredTransform
        let assetInfo = orientationFromTransform(transform: transform)

        var scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.width
        if assetInfo.isPortrait {
            scaleToFitRatio = UIScreen.main.bounds.width / assetTrack.naturalSize.height
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            instruction.setTransform(assetTrack.preferredTransform.concatenating(scaleFactor),
                                     at: CMTime.zero)
        } else {
            let scaleFactor = CGAffineTransform(scaleX: scaleToFitRatio, y: scaleToFitRatio)
            var concat = assetTrack.preferredTransform.concatenating(scaleFactor).concatenating(CGAffineTransform(translationX: 0, y: UIScreen.main.bounds.width / 2))
            if assetInfo.orientation == .down {
                let fixUpsideDown = CGAffineTransform(rotationAngle: CGFloat(Double.pi))
                let windowBounds = UIScreen.main.bounds
                let yFix = assetTrack.naturalSize.height + windowBounds.height
                let centerFix = CGAffineTransform(translationX: assetTrack.naturalSize.width, y: yFix)
                concat = fixUpsideDown.concatenating(centerFix).concatenating(scaleFactor)
            }
            instruction.setTransform(concat, at: CMTime.zero)
        }

        return instruction
    }
}
Outdate answered 16/4, 2022 at 4:42 Comment(1)
This code doesn't work. I don't know why, but he just asserts RED view at the bottom of the videoVulcanite

© 2022 - 2024 — McMap. All rights reserved.