本文内容
1、多张图片合成视频
2、音视频合成
- 音视频合成主要分两种
1.音频+无音频的视频
2.音频+有音频的视频
3、多个视频合称为一个视频
1、多张图片合成视频
推荐使用:https://github.com/HarrisonJackson/HJImagesToVideo
warning:我使用此方法时出现了一些问题(最后一张图片总是一闪而过)
不过里面的方法不支持外部传入视频保存的地址,我就加上了一个
/**
图片生成视频
@param images 图片数组
@param path 视频保存地址
@param size 视频尺寸
@param fps 1秒钟显示图片个数
@param animate animate
@param callbackBlock 成功回调
*/
+ (void)saveVideoToPhotosWithImages:(NSArray *)images
videoPath:(NSString *)path
withSize:(CGSize)size
withFPS:(int)fps
animateTransitions:(BOOL)animate
withCallbackBlock:(SuccessBlock)callbackBlock
{
[[NSFileManager defaultManager] removeItemAtPath:path error:NULL];
[HJImagesToVideo videoFromImages:images
toPath:path
withSize:size
withFPS:fps
animateTransitions:animate
withCallbackBlock:^(BOOL success) {
if (success) {
UISaveVideoAtPathToSavedPhotosAlbum(path, self, nil, nil);
}
if (callbackBlock) {
callbackBlock(success);
}
}];
}
使用
NSString *path = [NSHomeDirectory() stringByAppendingPathComponent:
[NSString stringWithFormat:@"Documents/movie.mp4"]];
NSArray * testImageArray = @[ [UIImage imageNamed:@"12.jpeg"],
[UIImage imageNamed:@"1.jpeg"],
[UIImage imageNamed:@"3.jpeg"],
[UIImage imageNamed:@"4.jpeg"],
[UIImage imageNamed:@"5.jpeg"],
[UIImage imageNamed:@"6.jpeg"],
[UIImage imageNamed:@"7.jpeg"],
[UIImage imageNamed:@"8.jpeg"],
[UIImage imageNamed:@"9.jpeg"],
[UIImage imageNamed:@"10.jpeg"],
[UIImage imageNamed:@"11.jpeg"],
[UIImage imageNamed:@"2.jpeg"]];
CGSize currentSize = CGSizeMake(320, 480);
NSMutableArray *imageArray=[NSMutableArray array];
for (int i = 0; i<testImageArray.count; i++) {
UIImage *imageNew = testImageArray[i];
//设置image的尺寸
CGSize imagesize = imageNew.size;
imagesize.height =currentSize.height;
imagesize.width =currentSize.width;
//对图片大小进行压缩--确保图片大小一样
imageNew = [imageNew imageByScalingAndCroppingForSize:currentSize];
[imageArray addObject:imageNew];
}
//每次生成视频前,先移除重复名的,
[[NSFileManager defaultManager] removeItemAtPath:path error:NULL];
NSLog(@"path:%@",path);
//开始合成
__weak typeof(self)weakSelf=self;
[HJImagesToVideo saveVideoToPhotosWithImages:imageArray videoPath:path withSize:currentSize withFPS:1 animateTransitions:YES withCallbackBlock:^(BOOL success) {
dispatch_async(dispatch_get_main_queue(), ^{
if (success) {
NSLog(@"success");
}
});
}];
附另一种方法
// 初始化压缩引擎 path视频保存地址
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie error:nil];
// 断言,就是希望程序在相应位置设定的条件不满足的时候抛出来,用NSParameterAssert让程序crash到相应位置,用来作安全检查
NSParameterAssert(videoWriter);
// 对生成的视频的设置 size 视频小
NSDictionary *videoSetting = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,[NSNumber numberWithInt:size.width],AVVideoWidthKey,[NSNumber numberWithInt:size.height],AVVideoHeightKey, nil];
AVAssetWriterInput *writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSetting];
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ABGR],kCVPixelBufferPixelFormatTypeKey, nil];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
if([videoWriter canAddInput:writerInput]){
NSLog(@"1111");
}
else{
NSLog(@"2222");
}
[videoWriter addInput:writerInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
// 将多张图片合成为一个视频文件
dispatch_queue_t queue = dispatch_queue_create("mediaInputQueue", NULL);
int __block frame = 0;
[writerInput requestMediaDataWhenReadyOnQueue:queue usingBlock:^{
while ([writerInput isReadyForMoreMediaData]) {
if(++frame >= [array count] * 5){// 5 为控制每张图片显示时间,具体看着调
[writerInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{
NSLog(@"结束了");
callbackBlock(YES);
}];
break;
}
CVPixelBufferRef buffer = NULL;
int idx = frame / 5;// 5 为控制每张图片显示时间,与上面对应
NSLog(@"idx == %d",idx);
buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:idx] CGImage] size:size];
if(buffer){
// 设置视频的时间
if([adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame, 5)]){
NSLog(@"写入成功");
}
else{
NSLog(@"写入失败");
CFRelease(buffer);
}
}
}
}];
2、音视频合成
2.1、音频+无音频的视频
/**
没有背景音乐的视频添加背景音乐
@param musicPath 背景音乐地址
@param videoPath 视频地址
@param savePath 保存视频地址
@param successBlock 合成成功
*/
+ (void)mergeVideoWithMusic:(NSString *)musicPath noBgMusicVideo:(NSString *)videoPath saveVideoPath:(NSString *)savePath success:(mergeVideoSuccessBlock)successBlock{
// 声音来源
NSURL *audioInputUrl = [NSURL fileURLWithPath:musicPath];
// 视频来源
NSURL *videoInputUrl = [NSURL fileURLWithPath:videoPath];
// 添加合成路径
NSURL *outputFileUrl = [NSURL fileURLWithPath:savePath];
// 时间起点
CMTime nextClistartTime = kCMTimeZero;
// 创建可变的音视频组合
AVMutableComposition *comosition = [AVMutableComposition composition];
// 视频采集
AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoInputUrl options:nil];
// 视频时间范围
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
// 视频通道 枚举 kCMPersistentTrackID_Invalid = 0
AVMutableCompositionTrack *videoTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
// 视频采集通道
AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
// 把采集轨道数据加入到可变轨道之中
[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:nextClistartTime error:nil];
// 声音采集
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:audioInputUrl options:nil];
// 因为视频短这里就直接用视频长度了,如果自动化需要自己写判断
CMTimeRange audioTimeRange = videoTimeRange;
// 音频通道
AVMutableCompositionTrack *audioTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
// 音频采集通道
AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
// 加入合成轨道之中
[audioTrack insertTimeRange:audioTimeRange ofTrack:audioAssetTrack atTime:nextClistartTime error:nil];
// 创建一个输出
AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:comosition presetName:AVAssetExportPresetMediumQuality];
// 输出类型
assetExport.outputFileType = AVFileTypeQuickTimeMovie;
// 输出地址
assetExport.outputURL = outputFileUrl;
// 优化
assetExport.shouldOptimizeForNetworkUse = YES;
// 合成完毕
[assetExport exportAsynchronouslyWithCompletionHandler:^{
// 回到主线程
dispatch_async(dispatch_get_main_queue(), ^{
successBlock();
});
}];
}
附另外一种方法
- (IBAction)addBackgroundMusicAction:(UIButton *)sender {
if (self.asset == nil) {
return;
}
//合成之后的输出路径
NSString *outPutPath = [NSTemporaryDirectory() stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]];
//混合后的视频输出路径
NSURL *outPutUrl = [NSURL fileURLWithPath:outPutPath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outPutPath])
{
[[NSFileManager defaultManager] removeItemAtPath:outPutPath error:nil];
}
//可变音视频组合
AVMutableComposition *composition = [AVMutableComposition composition];
//视频时间
CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);
//可变视频轨
AVMutableCompositionTrack *videoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *videoAssetTrack = [[self.asset tracksWithMediaType:AVMediaTypeVideo] firstObject];
//把素材轨道添加到可变的视频轨道中去
[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];
//声音采集
AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"一个人的冬天" ofType:@"mp3"]] options:nil];
//音频轨道
AVMutableCompositionTrack *audioTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//音频采集通道
AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];
[audioTrack insertTimeRange:videoTimeRange ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
// 3.1 - Create AVMutableVideoCompositionInstruction
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.asset.duration);
// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation.
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp;
BOOL isVideoAssetPortrait_ = NO;
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform;
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationRight;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) {
videoAssetOrientation_ = UIImageOrientationLeft;
isVideoAssetPortrait_ = YES;
}
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) {
videoAssetOrientation_ = UIImageOrientationUp;
}
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) {
videoAssetOrientation_ = UIImageOrientationDown;
}
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero];
[videolayerInstruction setOpacity:0.0 atTime:self.asset.duration];
// 3.3 - Add instructions
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
CGSize naturalSize;
if(isVideoAssetPortrait_){
naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
} else {
naturalSize = videoAssetTrack.naturalSize;
}
//防止合成后的视频旋转90度
float renderWidth, renderHeight;
renderWidth = naturalSize.width;
renderHeight = naturalSize.height;
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight);
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
//创建输出
AVAssetExportSession * assetExport = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality];
assetExport.outputURL = outPutUrl;//输出路径
assetExport.outputFileType = AVFileTypeQuickTimeMovie;//输出类型
assetExport.shouldOptimizeForNetworkUse = YES;
assetExport.videoComposition = mainCompositionInst;
[assetExport exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:assetExport];
});
}];
}
- (void)exportDidFinish:(AVAssetExportSession*)session {
if (session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Video Saving Failed"
delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
} else {
AVPlayerItem * playeritem = [AVPlayerItem playerItemWithURL:outputURL];
[_player replaceCurrentItemWithPlayerItem:playeritem];
[_player play];
}
});
}];
}
}
}
2.2音频+有音频的视频
处理方法
1、先获取视频的音频,与需要添加的音频进行混音,得到混音文件。
2、将混音文件和视频进行合成,得到新的视频文件
/**
音频视频合成
@param musicPath 音频
@param videoPath 视频
@param savePath 保存地址
@param successBlock 合成成功
*/
+ (void)mergeVideoWithMusic:(NSString *)musicPath video:(NSString *)videoPath saveVideoPath:(NSString *)savePath success:(mergeVideoSuccessBlock)successBlock {
//第一步:音频和视频中的音频混音,生成混音文件
AVMutableComposition *composition =[AVMutableComposition composition];
NSMutableArray *audioMixParams =[NSMutableArray array];
//录制的视频
NSURL *video_inputFileUrl =[NSURL fileURLWithPath:videoPath];
AVURLAsset *songAsset =[AVURLAsset URLAssetWithURL:video_inputFileUrl options:nil];
CMTime startTime =CMTimeMakeWithSeconds(0,songAsset.duration.timescale);
CMTime trackDuration =songAsset.duration;
//获取视频中的音频素材
[self setUpAndAddAudioAtPath:video_inputFileUrl toComposition:composition start:startTime dura:trackDuration offset:CMTimeMake(14*44100,44100) addAudioParams:audioMixParams];
//本地要插入的音乐
NSURL *assetURL2 =[NSURL fileURLWithPath:musicPath];
//获取设置完的本地音乐素材
[self setUpAndAddAudioAtPath:assetURL2 toComposition:composition start:startTime dura:trackDuration offset:CMTimeMake(0,44100) addAudioParams:audioMixParams];
//创建一个可变的音频混合
AVMutableAudioMix *audioMix =[AVMutableAudioMix audioMix];
audioMix.inputParameters =[NSArray arrayWithArray:audioMixParams];//从数组里取出处理后的音频轨道参数
//创建一个输出
AVAssetExportSession *exporter =[[AVAssetExportSession alloc]
initWithAsset:composition
presetName:AVAssetExportPresetAppleM4A];
exporter.audioMix = audioMix;
exporter.outputFileType=@"com.apple.m4a-audio";//输出的类型也是 m4a文件
//混音后的输出地址
NSString *exportFile = [NSHomeDirectory() stringByAppendingPathComponent:
[NSString stringWithFormat:@"Documents/music.m4a"]];//和上面的类型保持一致
if([[NSFileManager defaultManager]fileExistsAtPath:exportFile]) {
[[NSFileManager defaultManager]removeItemAtPath:exportFile error:nil];
}
NSLog(@"输出混音路径===%@",exportFile);
NSURL *exportURL =[NSURL fileURLWithPath:exportFile];
exporter.outputURL = exportURL;
[exporter exportAsynchronouslyWithCompletionHandler:^{
NSLog(@"音频混音完毕,开始合成音频、视频");
if ([[NSFileManager defaultManager] fileExistsAtPath:exportFile]) {
//第二步:混音和视频合成
[self theVideoWithMixMusic:exportFile videoPath:videoPath savePath:savePath success:successBlock];
}
}];
}
/**
音频和视频混合
@param mixURLPath 混音
@param videoPath 视频
@param savePath 保存视频
@param successBlock 成功
*/
+ (void)theVideoWithMixMusic:(NSString *)mixURLPath videoPath:(NSString *)videoPath savePath:(NSString *)savePath success:(mergeVideoSuccessBlock)successBlock
{
//声音来源路径(最终混合的音频)
NSURL *audio_inputFileUrl =[NSURL fileURLWithPath:mixURLPath];
//视频来源路径
NSURL *video_inputFileUrl = [NSURL fileURLWithPath:videoPath];
//最终合成输出路径
NSURL *outputFileUrl = [NSURL fileURLWithPath:savePath];
CMTime nextClipStartTime =kCMTimeZero;
//创建可变的音频视频组合
AVMutableComposition* mixComposition =[AVMutableComposition composition];
//视频采集
AVURLAsset* videoAsset =[[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange =CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack*a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]atTime:nextClipStartTime error:nil];
//声音采集
AVURLAsset* audioAsset =[[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange =CMTimeRangeMake(kCMTimeZero,videoAsset.duration);//声音长度截取范围==视频长度
AVMutableCompositionTrack*b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]atTime:nextClipStartTime error:nil];
//创建一个输出
AVAssetExportSession* _assetExport =[[AVAssetExportSession alloc]initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality];
_assetExport.outputFileType =AVFileTypeQuickTimeMovie;
_assetExport.outputURL =outputFileUrl;
_assetExport.shouldOptimizeForNetworkUse=YES;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
dispatch_async(dispatch_get_main_queue(), ^{
NSLog(@"完成!输出路径==%@",savePath);
if([[NSFileManager defaultManager]fileExistsAtPath:mixURLPath]) {
[[NSFileManager defaultManager]removeItemAtPath:mixURLPath error:nil];
}
successBlock();
});
}];
}
//通过文件路径建立和添加音频素材
+ (void)setUpAndAddAudioAtPath:(NSURL*)assetURL toComposition:(AVMutableComposition*)composition start:(CMTime)start dura:(CMTime)dura offset:(CMTime)offset addAudioParams:(NSMutableArray *)audioMixParams {
AVURLAsset *songAsset =[AVURLAsset URLAssetWithURL:assetURL options:nil];
AVMutableCompositionTrack *track =[composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
AVAssetTrack *sourceAudioTrack =[[songAsset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0];
NSError *error =nil;
BOOL ok =NO;
CMTime startTime = start;
CMTime trackDuration = dura;
CMTimeRange tRange =CMTimeRangeMake(startTime,trackDuration);
//设置音量
//AVMutableAudioMixInputParameters(输入参数可变的音频混合)
//audioMixInputParametersWithTrack(音频混音输入参数与轨道)
AVMutableAudioMixInputParameters *trackMix =[AVMutableAudioMixInputParameters audioMixInputParametersWithTrack:track];
[trackMix setVolume:0.8f atTime:startTime];
//素材加入数组
[audioMixParams addObject:trackMix];
//Insert audio into track //offsetCMTimeMake(0, 44100)
ok = [track insertTimeRange:tRange ofTrack:sourceAudioTrack atTime:kCMTimeInvalid error:&error];
}
3、多个视频合称为一个视频
// self.firstAsset、self.secondAsset 为视频的地址, AVAsset 对象
// 沙河地址可转为 AVURLAsset 对象
- (IBAction)mergeAction:(UIButton *)sender {
if (self.firstAsset == nil || self.secondAsset == nil) {
return;
}
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *firstCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[firstCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.firstAsset.duration) ofTrack:[[self.firstAsset tracksWithMediaType:AVMediaTypeVideo] firstObject] atTime:kCMTimeZero error:nil];
[firstCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, self.secondAsset.duration) ofTrack:[[self.secondAsset tracksWithMediaType:AVMediaTypeVideo] firstObject] atTime:kCMTimeZero error:nil];
// ...可继续添加视频
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *ducumentDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [ducumentDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *export = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
export.outputURL = url;
export.outputFileType = AVFileTypeQuickTimeMovie;
export.shouldOptimizeForNetworkUse = YES;
[export exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:export];
});
}];
}
- (void)exportDidFinish:(AVAssetExportSession*)session {
if (session.status == AVAssetExportSessionStatusCompleted) {
NSURL *outputURL = session.outputURL;
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {
[library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
if (error) {
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Error" message:@"Video Saving Failed"
delegate:nil cancelButtonTitle:@"OK" otherButtonTitles:nil];
[alert show];
} else {
AVPlayerItem * playeritem = [AVPlayerItem playerItemWithURL:outputURL];
[_player replaceCurrentItemWithPlayerItem:playeritem];
[_player play];
// UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Video Saved" message:@"Saved To Photo Album"
// delegate:self cancelButtonTitle:@"OK" otherButtonTitles:nil];
// [alert show];
}
});
}];
}
}
}