最近项目中需要用到视频拍摄和上传
拍摄界面参考的是微信朋友圈发视频的界面
拍摄完视频正常的逻辑应该是压缩,然后上传
但是在我压缩完并上传之后,在后台查看上传的视频,经验是倒着的!旋转了90度
既然旋转了90度,那我们再把它旋转回来就可以了呗
直接上代码吧,也没有什么注释,因为懒(这个方法同时包含了压缩和旋转)
+(void)lowQuailtyWithInputURL:(NSURL *)inputURL blockHandler:(void (^)(BOOL, AVAssetExportSession *, NSURL *))handler
{
AVURLAsset *asset = [AVURLAsset URLAssetWithURL:inputURL options:nil];
AVAssetExportSession *session = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality];
NSString *path = [NSString stringWithFormat:@"%@VideoCompression/",NSTemporaryDirectory()];
NSFileManager *fileManage = [[NSFileManager alloc] init]; static dispatch_once_t predicate;
dispatch_once(&predicate, ^{
if(![fileManage fileExistsAtPath:path]){
[fileManage createDirectoryAtPath:path withIntermediateDirectories:YES attributes:nil error:nil];
}
});
if([fileManage fileExistsAtPath:[NSString stringWithFormat:@"%@VideoCompressionTemp.mp4",path]]){
[fileManage removeItemAtPath:[NSString stringWithFormat:@"%@VideoCompressionTemp.mp4",path] error:nil];
}
NSURL *compressionVideoURL = [NSURL fileURLWithPath:[NSString stringWithFormat:@"%@VideoCompressionTemp.mp4",path]];
session.outputURL = compressionVideoURL;
session.outputFileType = AVFileTypeMPEG4;
session.shouldOptimizeForNetworkUse = YES;
session.videoComposition = [VideoEditingView getVideoComposition:asset];
[session exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(),^{
switch ([session status]) {
case AVAssetExportSessionStatusFailed:{
NSLog(@"Export failed: %@ : %@", [[session error] localizedDescription], [session error]);
handler(NO,nil, nil);
break;
}case AVAssetExportSessionStatusCancelled:{
NSLog(@"Export canceled");
handler(NO,nil, nil);
break;
}default:
handler(YES,session,compressionVideoURL);
break;
}
});
}];
}
+ (AVMutableVideoComposition *)getVideoComposition:(AVAsset *)asset {
AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableComposition *composition = [AVMutableComposition composition];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
CGSize videoSize = videoTrack.naturalSize;
NSArray *tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
if([tracks count] > 0) {
AVAssetTrack *videoTrack = [tracks objectAtIndex:0];
CGAffineTransform t = videoTrack.preferredTransform;
if((t.a == 0 && t.b == 1.0 && t.c == -1.0 && t.d == 0)){
videoSize = CGSizeMake(videoSize.height, videoSize.width);
}
}
composition.naturalSize = videoSize;
videoComposition.renderSize = videoSize;
videoComposition.frameDuration = CMTimeMakeWithSeconds( 1 / videoTrack.nominalFrameRate, 600);
AVMutableCompositionTrack *compositionVideoTrack;
compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoTrack atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionLayerInstruction *layerInst;
layerInst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInst setTransform:videoTrack.preferredTransform atTime:kCMTimeZero];
AVMutableVideoCompositionInstruction *inst = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
inst.timeRange = CMTimeRangeMake(kCMTimeZero, asset.duration);
inst.layerInstructions = [NSArray arrayWithObject:layerInst];
videoComposition.instructions = [NSArray arrayWithObject:inst];
return videoComposition;
}
就这样