核心代码
1,创建需要处理的视频素材,
#pragma mark - 视频编辑
-(void)videoEdit{
//1,将素材拖入到素材库
AVAsset *asset = [AVAsset assetWithURL:self.videoUrl];
//素材的视频轨
AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//素材的音频轨
AVAssetTrack *audioAssetTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
// Trim to half duration
// double halfDuration = CMTimeGetSeconds([asset duration]) - 5;
CMTime trimmedDuration = CMTimeSubtract(_videoView.newEndTime, _videoView.newStartTime);
CMTimeShow(trimmedDuration);
//2,将素材的视频插入视频轨,音频插入音频轨
//这是工程文件
self.composition = [AVMutableComposition composition];
//视频轨道
AVMutableCompositionTrack *videoCompositionTrack = [self.composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
//在视频轨道插入一个时间段的视频
[videoCompositionTrack insertTimeRange:CMTimeRangeMake(_videoView.newStartTime, trimmedDuration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];
//音频轨道
AVMutableCompositionTrack *audioCompositionTrack = [self.composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
//插入音频数据,否则没声音
[audioCompositionTrack insertTimeRange:CMTimeRangeMake(_videoView.newStartTime,trimmedDuration) ofTrack:audioAssetTrack atTime:kCMTimeZero error:nil];
//可以添加其他音视频
// [videoCompositionTrack insertTimeRange:CMTimeRangeMake(_newStartTime, trimmedDuration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil];
// //3,裁剪视频
//AVMutableVideoComposition:管理所有视频轨道,可以决定最终视频的尺寸,裁剪需要在这里进行
self.videoComposition = [AVMutableVideoComposition videoComposition];
self.videoComposition.frameDuration = CMTimeMake(1, 30);
self.videoComposition.renderSize = videoAssetTrack.naturalSize;
// AVMutableVideoCompositionInstruction 视频轨道中的一个视频,可以缩放、旋转等
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, trimmedDuration);
// 3.2 AVMutableVideoCompositionLayerInstruction 一个视频轨道,包含了这个轨道上的所有视频素材
AVAssetTrack *videoTrack = [self.composition tracksWithMediaType:AVMediaTypeVideo][0];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
//视频旋转处理
if (self.isRotate) {
CGAffineTransform t1 = CGAffineTransformMakeTranslation(videoCompositionTrack.naturalSize.height, 0.0);
// Rotate transformation
CGAffineTransform t2 = CGAffineTransformRotate(t1, degreesToRadians(90.0));
[layerInstruction setTransform:t2 atTime:kCMTimeZero];
self.videoComposition.renderSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width);
}
// 3.3 - Add instructions
instruction.layerInstructions = [NSArray arrayWithObjects:layerInstruction,nil];
self.videoComposition.instructions = [NSArray arrayWithObject:instruction];
//添加水印 重新刷新player的时候会重置 所以在导出水印的时候添加水印
}
2,视频旋转,添加水印
视频处理后需要刷新 player,刷新前必须调用 self.videoComposition.animationTool = NULL;
会把水印清空,所以添加水印要在导出前添加,显示在palyerLayer上的水印为"假水印"
if (self.isAddWaterMark) {
CGSize videoSize = self.videoComposition.renderSize;
self.watermarkLayer = [self watermarkLayerForSize:videoSize];
CALayer *exportWatermarkLayer = [self copyWatermarkLayer:self.watermarkLayer];
CALayer *parentLayer = [CALayer layer];
CALayer *videoLayer = [CALayer layer];
parentLayer.frame = CGRectMake(0, 0, self.videoComposition.renderSize.width, self.videoComposition.renderSize.height);
videoLayer.frame = CGRectMake(0, 0, self.videoComposition.renderSize.width, self.videoComposition.renderSize.height);
[parentLayer addSublayer:videoLayer];
exportWatermarkLayer.position = CGPointMake(self.videoComposition.renderSize.width/2, self.videoComposition.renderSize.height/4);
[parentLayer addSublayer:exportWatermarkLayer];
CABasicAnimation *anima = [CABasicAnimation animationWithKeyPath:@"opacity"];
anima.fromValue = [NSNumber numberWithFloat:1.0f];
anima.toValue = [NSNumber numberWithFloat:0.0f];
anima.repeatCount = 0;
anima.duration = 5.0f; //5s之后消失
[anima setRemovedOnCompletion:NO];
[anima setFillMode:kCAFillModeForwards];
anima.beginTime = AVCoreAnimationBeginTimeAtZero;
[exportWatermarkLayer addAnimation:anima forKey:@"opacityAniamtion"];
self.videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
}
3,导出视频
//保存至沙盒路径
[self creatSandBoxFilePathIfNoExist];
//保存至沙盒路径
NSString *pathDocuments = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0];
NSString *videoPath = [NSString stringWithFormat:@"%@/Video", pathDocuments];
NSString *urlPath = [videoPath stringByAppendingPathComponent:@"cyan.mp4"];
//先移除
NSFileManager *manager = [NSFileManager defaultManager];
[manager removeItemAtPath:urlPath error:nil];
// AVAssetExportPresetPassthrough AVAssetExportPresetHighestQuality
self.exportSession = [[AVAssetExportSession alloc] initWithAsset:self.composition presetName:AVAssetExportPresetHighestQuality];
self.exportSession.videoComposition = self.videoComposition;
self.exportSession.outputURL = [NSURL fileURLWithPath:urlPath];
self.exportSession.outputFileType = AVFileTypeMPEG4;
// exporter.shouldOptimizeForNetworkUse = YES;
[self.exportSession exportAsynchronouslyWithCompletionHandler:^{
int exportStatus = self.exportSession.status;
NSLog(@"exportStatus : %d",exportStatus);
switch (exportStatus)
{
case AVAssetExportSessionStatusFailed:
{
// log error to text view
NSError *exportError = self.exportSession.error;
NSLog (@"AVAssetExportSessionStatusFailed: %@", exportError);
break;
}
case AVAssetExportSessionStatusCompleted:
{
//保存到相册
[self writeVideoToPhotoLibrary:[NSURL fileURLWithPath:urlPath]];
NSLog(@"视频转码成功");
}
}
}];
读取解析视频帧图片,用来可视化裁剪视频,
#pragma mark - 读取解析视频帧图片
-(void)analysisVideoFrames{
AVURLAsset *videoAsset = [[AVURLAsset alloc]initWithURL:self.videoUrl options:nil];
//获取视频总长度 = 总帧数 / 每秒的帧数
long videoSumTime = videoAsset.duration.value / videoAsset.duration.timescale;
//创建AVAssetImageGenerator对象
AVAssetImageGenerator *generator = [[AVAssetImageGenerator alloc]initWithAsset:videoAsset];
generator.maximumSize = self.bottomView.frame.size;
generator.appliesPreferredTrackTransform = YES;
generator.requestedTimeToleranceBefore = kCMTimeZero;
generator.requestedTimeToleranceAfter = kCMTimeZero;
//添加需要帧数的时间集合
self.framesArray = [NSMutableArray array];
for (NSInteger index = 0; index < videoSumTime; index ++) {
CMTime time = CMTimeMake(index * videoAsset.duration.timescale, videoAsset.duration.timescale);
NSValue *value = [NSValue valueWithCMTime:time];
[self.framesArray addObject:value];
}
__block long count = 0 ;
__weak typeof(self)weakSelf = self;
__block UIImage *showImage = [[UIImage alloc] init];
__block CGFloat showImageViewWitd = (self.bottomView.frame.size.width - self.leftView.frame.size.width * 2)/videoSumTime;
[generator generateCGImagesAsynchronouslyForTimes:self.framesArray completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
if (result == AVAssetImageGeneratorSucceeded) {
showImage = [UIImage imageWithCGImage:image];
dispatch_async(dispatch_get_main_queue(), ^{
UIImageView *thumImgView = [[UIImageView alloc]initWithFrame:CGRectMake( 20 + count * showImageViewWitd , 0, showImageViewWitd, 40)];
thumImgView.image = showImage;
[weakSelf.showImageViewBgView addSubview:thumImgView];
count++;
}) ;
}
if (result == AVAssetImageGeneratorFailed) {
NSLog(@"Failed with error: %@", [error localizedDescription]);
}
if (result == AVAssetImageGeneratorCancelled) {
NSLog(@"AVAssetImageGeneratorCancelled");
}
}];
}