0x00 写在前面
- 需求是把两个或多个小视频合并成一个视频
- 合并效果为首尾相接,不是视频重叠
- 当前代码为示例代码,只展示了合并两个视频文件,要实现多个视频文件合并为一个,需要改动代码
0x01 代码展示(Obj-c&Swift)
//Obj-c
- (void)combVideos {
NSBundle *mainBundle = [NSBundle mainBundle];
NSString *firstVideo = [mainBundle pathForResource:@"1" ofType:@"mp4"];
NSString *secondVideo = [mainBundle pathForResource:@"2" ofType:@"mp4"];
NSDictionary *optDict = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:NO] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
AVAsset *firstAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:firstVideo] options:optDict];
AVAsset *secondAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:secondVideo] options:optDict];
AVMutableComposition *composition = [AVMutableComposition composition];
//为视频类型的的Track
AVMutableCompositionTrack *compositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//由于没有计算当前CMTime的起始位置,现在插入0的位置,所以合并出来的视频是后添加在前面,可以计算一下时间,插入到指定位置
//CMTimeRangeMake 指定起去始位置
CMTimeRange firstTimeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration);
CMTimeRange secondTimeRange = CMTimeRangeMake(kCMTimeZero, secondAsset.duration);
[compositionTrack insertTimeRange:secondTimeRange ofTrack:[secondAsset tracksWithMediaType:AVMediaTypeVideo][0] atTime:kCMTimeZero error:nil];
[compositionTrack insertTimeRange:firstTimeRange ofTrack:[firstAsset tracksWithMediaType:AVMediaTypeVideo][0] atTime:kCMTimeZero error:nil];
//只合并视频,导出后声音会消失,所以需要把声音插入到混淆器中
//添加音频,添加本地其他音乐也可以,与视频一致
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTrack insertTimeRange:secondTimeRange ofTrack:[firstAsset tracksWithMediaType:AVMediaTypeAudio][0] atTime:kCMTimeZero error:nil];
[audioTrack insertTimeRange:firstTimeRange ofTrack:[firstAsset tracksWithMediaType:AVMediaTypeAudio][0] atTime:kCMTimeZero error:nil];
NSString *cachePath = [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) lastObject];
NSString *filePath = [cachePath stringByAppendingPathComponent:@"comp.mp4"];
AVAssetExportSession *exporterSession = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exporterSession.outputFileType = AVFileTypeMPEG4;
exporterSession.outputURL = [NSURL fileURLWithPath:filePath]; //如果文件已存在,将造成导出失败
exporterSession.shouldOptimizeForNetworkUse = YES; //用于互联网传输
[exporterSession exportAsynchronouslyWithCompletionHandler:^{
switch (exporterSession.status) {
case AVAssetExportSessionStatusUnknown:
NSLog(@"exporter Unknow");
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"exporter Canceled");
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"exporter Failed");
break;
case AVAssetExportSessionStatusWaiting:
NSLog(@"exporter Waiting");
break;
case AVAssetExportSessionStatusExporting:
NSLog(@"exporter Exporting");
break;
case AVAssetExportSessionStatusCompleted:
NSLog(@"exporter Completed");
break;
}
}];
}
Swift代码:
需要先导入 import AVFoundation
*Swift版本注释可参考Obj-c版本*
func combVideos() {
let firstVideo = NSBundle.mainBundle().pathForResource("1", ofType: "mp4")
let secondVideo = NSBundle.mainBundle().pathForResource("2", ofType: "mp4")
let optDict = [AVURLAssetPreferPreciseDurationAndTimingKey : NSNumber(bool: false)]
let firstAsset = AVURLAsset(URL: NSURL(fileURLWithPath: firstVideo!), options: optDict)
let secondAsset = AVURLAsset(URL: NSURL(fileURLWithPath: secondVideo!), options: optDict)
let composition = AVMutableComposition()
do {
let compositionTrack = composition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
let firstTimeRange = CMTimeRange(start: kCMTimeZero, duration: firstAsset.duration)
let secondTimeRange = CMTimeRange(start: kCMTimeZero, duration: secondAsset.duration)
// 添加视频
try compositionTrack.insertTimeRange(secondTimeRange, ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeVideo).first!, atTime: kCMTimeZero)
try compositionTrack.insertTimeRange(firstTimeRange, ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeVideo).first!, atTime: kCMTimeZero)
// 添加声音
let audioTrack = composition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid)
try audioTrack.insertTimeRange(secondTimeRange, ofTrack: secondAsset.tracksWithMediaType(AVMediaTypeAudio).first!, atTime: kCMTimeZero)
try audioTrack.insertTimeRange(firstTimeRange, ofTrack: firstAsset.tracksWithMediaType(AVMediaTypeAudio).first!, atTime: kCMTimeZero)
let cache = NSSearchPathForDirectoriesInDomains(.CachesDirectory, .UserDomainMask, true).last
let filePath = cache! + "/comp_sw.mp4"
let exporterSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality)
exporterSession?.outputFileType = AVFileTypeMPEG4
exporterSession?.outputURL = NSURL(fileURLWithPath: filePath)
exporterSession?.shouldOptimizeForNetworkUse = true
exporterSession?.exportAsynchronouslyWithCompletionHandler({ () -> Void in
switch exporterSession!.status {
case .Unknown:
print("unknow")
case .Cancelled:
print("cancelled")
case .Failed:
print("failed")
case .Waiting:
print("waiting")
case .Exporting:
print("exporting")
case .Completed:
print("completed")
}
})
} catch {
print("\(error)")
}
}
0x10 效果 (时间变化)
欢迎大家交流指正