I am trying to export an AVMutableComposition
using AVAssetExportSession
.
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = mainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
{
switch (exporter.status)
{
case AVAssetExportSessionStatusCompleted:
{
NSLog(@"Video Merge SuccessFullt");
}
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Failed:%@", exporter.error.description);
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Canceled:%@", exporter.error);
break;
case AVAssetExportSessionStatusExporting:
NSLog(@"Exporting!");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(@"Waiting");
break;
default:
break;
}
}];
But exporting even 1 minute video takes around 30 seconds, which is too much considering iPad inbuilt camera app takes less than 2 seconds.
Also if I remove videoComposition
from exporter, time reduces to 7 seconds, which is still bad considering video length to be only 1 minute.
So, I want to know how to decrease the export time to minimum?
Also, I want to know, does AVAssetExportSession
takes generally this much time or is it just my case?
Update: Merge Code:
AVMutableComposition *mutableComposition = [AVMutableComposition composition];
AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio
preferredTrackID:kCMPersistentTrackID_Invalid];
AVMutableVideoCompositionLayerInstruction *videoTrackLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];
NSMutableArray *instructions = [NSMutableArray new];
CGSize size = CGSizeZero;
CMTime time = kCMTimeZero;
for (AVURLAsset *asset in assets)
{
AVAssetTrack *assetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVAssetTrack *audioAssetTrack = [asset tracksWithMediaType:AVMediaTypeAudio].firstObject;
NSError *error;
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetTrack.timeRange.duration )
ofTrack:assetTrack
atTime:time
error:&error];
[videoTrackLayerInstruction setTransform:assetTrack.preferredTransform atTime:time];
if (error) {
NSLog(@"asset url :: %@",assetTrack.asset);
NSLog(@"Error1 - %@", error.debugDescription);
}
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, audioAssetTrack.timeRange.duration)
ofTrack:audioAssetTrack
atTime:time
error:&error];
if (error) {
NSLog(@"Error2 - %@", error.debugDescription);
}
time = CMTimeAdd(time, assetTrack.timeRange.duration);
if (CGSizeEqualToSize(size, CGSizeZero)) {
size = assetTrack.naturalSize;
}
}
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, time);
mainInstruction.layerInstructions = [NSArray arrayWithObject:videoTrackLayerInstruction];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = size;