iOS 视频合成 (图片和视频的合成 ,视频跟音频的合成)

这个 给我的感觉就像是 PPT 播放一样。这里找了一些资料,学习视频合成方面的知识。

一:图片和视频的合成:


@interface ViewController ()

@property(nonatomic, strong)NSMutableArray *imageArr;

@property(nonatomic, strong)NSString  *theVideoPath;

@end

@implementation ViewController

- (void)viewDidLoad {

[super viewDidLoad];

self.imageArr =[[NSMutableArray alloc]initWithObjects:

[UIImage imageNamed:@"1.jpg"],[UIImage imageNamed:@"2.jpg"],[UIImage imageNamed:@"3.jpg"],[UIImage imageNamed:@"4.jpg"],[UIImage imageNamed:@"5.jpg"],[UIImage imageNamed:@"6.jpg"],[UIImage imageNamed:@"7"],[UIImage imageNamed:@"8"],[UIImage imageNamed:@"9.jpg"],[UIImage imageNamed:@"10.jpg"],[UIImage imageNamed:@"11.jpg"],[UIImage imageNamed:@"12.jpg"],[UIImage imageNamed:@"13.jpg"],[UIImage imageNamed:@"14.jpg"],[UIImage imageNamed:@"15.jpg"],[UIImage imageNamed:@"16.jpg"],[UIImage imageNamed:@"17.jpg"],[UIImage imageNamed:@"18.jpg"],[UIImage imageNamed:@"19.jpg"],[UIImage imageNamed:@"20.jpg"],[UIImage imageNamed:@"21.jpg"],[UIImage imageNamed:@"22.jpg"],[UIImage imageNamed:@"23.jpg"],nil];

UIButton * button =[UIButton buttonWithType:UIButtonTypeRoundedRect];

[button setFrame:CGRectMake(100,100, 100,100)];

[button setTitle:@"合成"forState:UIControlStateNormal];

[button addTarget:self action:@selector(testCompressionSession)forControlEvents:UIControlEventTouchUpInside];

[self.view addSubview:button];

UIButton * button1 =[UIButton buttonWithType:UIButtonTypeRoundedRect];

[button1 setFrame:CGRectMake(100,200, 100,100)];

[button1 setTitle:@"播放"forState:UIControlStateNormal];

[button1 addTarget:self action:@selector(playAction)forControlEvents:UIControlEventTouchUpInside];

[self.view addSubview:button1];

// Do any additional setup after loading the view, typically from a nib.

}

-(void)testCompressionSession

{

NSLog(@"开始");

//NSString *moviePath = [[NSBundle mainBundle]pathForResource:@"Movie" ofType:@"mov"];

NSArray *paths =NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES);

NSString *moviePath =[[paths objectAtIndex:0]stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",@"2016全球三大超跑宣传片_超清"]];

self.theVideoPath=moviePath;

CGSize size =CGSizeMake(320,400);//定义视频的大小

//[self writeImages:_imageArr ToMovieAtPath:moviePath withSize:size  inDuration:4 byFPS:30];//第2中方法

NSError *error =nil;

unlink([moviePath UTF8String]);

NSLog(@"path->%@",moviePath);

//—-initialize compression engine

AVAssetWriter *videoWriter =[[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:moviePath]fileType:AVFileTypeQuickTimeMovie error:&error];

NSParameterAssert(videoWriter);

if(error)

NSLog(@"error =%@", [error localizedDescription]);

NSDictionary *videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264,AVVideoCodecKey,

[NSNumber numberWithInt:size.width],AVVideoWidthKey,

[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];

AVAssetWriterInput *writerInput =[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

NSDictionary*sourcePixelBufferAttributesDictionary =[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];

AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput

sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];

NSParameterAssert(writerInput);

NSParameterAssert([videoWriter canAddInput:writerInput]);

if ([videoWriter canAddInput:writerInput])

NSLog(@"11111");

else

NSLog(@"22222");

[videoWriter addInput:writerInput];

[videoWriter startWriting];

[videoWriter startSessionAtSourceTime:kCMTimeZero];

//合成多张图片为一个视频文件

dispatch_queue_t dispatchQueue =dispatch_queue_create("mediaInputQueue",NULL);

int __block frame =0;

[writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{

while([writerInput isReadyForMoreMediaData])

{

if(++frame >=[self.imageArr count]*10)

{

[writerInput markAsFinished];

[videoWriter finishWriting];

break;}

CVPixelBufferRef buffer =NULL;

int idx =frame/10;

NSLog(@"idx==%d",idx);

buffer =(CVPixelBufferRef)

[self pixelBufferFromCGImage:[[self.imageArr objectAtIndex:idx]CGImage]size:size];

if (buffer){

if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame,10)])

NSLog(@"FAIL");

else

NSLog(@"OK");

CFRelease(buffer);}}}];}

- (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size{

NSDictionary *options =[NSDictionary dictionaryWithObjectsAndKeys:

[NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,

[NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];

CVPixelBufferRef pxbuffer =NULL;

CVReturn status =CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);

NSParameterAssert(status ==kCVReturnSuccess && pxbuffer !=NULL);

CVPixelBufferLockBaseAddress(pxbuffer,0);

void *pxdata =CVPixelBufferGetBaseAddress(pxbuffer);

NSParameterAssert(pxdata !=NULL);

CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();

CGContextRef context =CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);

NSParameterAssert(context);

CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);

CGColorSpaceRelease(rgbColorSpace);

CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer,0);

return pxbuffer;}

-(void)playAction{

MPMoviePlayerViewController *theMovie =[[MPMoviePlayerViewController alloc]initWithContentURL:[NSURL fileURLWithPath:self.theVideoPath]];

[self presentMoviePlayerViewControllerAnimated:theMovie];

theMovie.moviePlayer.movieSourceType=MPMovieSourceTypeFile;[theMovie.moviePlayer play];}

//第二种方式

- (void)writeImages:(NSArray *)imagesArray ToMovieAtPath:(NSString *)path withSize:(CGSize)size  inDuration:(float)duration byFPS:(int32_t)fps{

//Wire the writer:

NSError *error =nil;

AVAssetWriter *videoWriter =[[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:path]fileType:AVFileTypeQuickTimeMovie  error:&error];  

NSParameterAssert(videoWriter);

NSDictionary *videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:

AVVideoCodecH264,AVVideoCodecKey,

[NSNumber numberWithInt:size.width],AVVideoWidthKey,

[NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];

AVAssetWriterInput* videoWriterInput =[AVAssetWriterInput

assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:nil];

NSParameterAssert(videoWriterInput);

NSParameterAssert([videoWriter canAddInput:videoWriterInput]);

[videoWriter addInput:videoWriterInput];

//Start a session:

[videoWriter startWriting];

[videoWriter startSessionAtSourceTime:kCMTimeZero];

//Write some samples:

CVPixelBufferRef buffer =NULL;

int frameCount =0;

int imagesCount = [imagesArray count];

float averageTime =duration/imagesCount;

int averageFrame =(int)(averageTime * fps);

for(UIImage *img in imagesArray){

buffer=[self pixelBufferFromCGImage:[img CGImage]size:size];

BOOL append_ok =NO;

int j =0;

while (!append_ok && j <= 30)

{

if(adaptor.assetWriterInput.readyForMoreMediaData)

{

printf("appending %d attemp%d\n", frameCount, j);

CMTime frameTime =CMTimeMake(frameCount,(int32_t)fps);float frameSeconds =CMTimeGetSeconds(frameTime);

NSLog(@"frameCount:%d,kRecordingFPS:%d,frameSeconds:%f",frameCount,fps,frameSeconds);

append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];

if(buffer)

[NSThread sleepForTimeInterval:0.05];}else{

printf("adaptor not ready %d,%d\n", frameCount, j);

[NSThread sleepForTimeInterval:0.1];}

j++;}

if (!append_ok){

printf("error appendingimage %d times %d\n", frameCount, j);}

frameCount = frameCount + averageFrame;}

//Finish the session:

[videoWriterInput markAsFinished];

[videoWriter finishWriting];NSLog(@"finishWriting");}




二:视频跟音频的合成

// 混合音乐

-(void)merge{

// mbp提示框

//  [MBProgressHUD showMessage:@"正在处理中"];

// 路径

NSString *documents = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents"];

// 声音来源

NSURL *audioInputUrl = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"蓝瘦香菇" ofType:@"mp3"]];

// 视频来源

NSURL *videoInputUrl = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"2016全球三大超跑宣传片_超清" ofType:@"mp4"]];

// 最终合成输出路径

NSString *outPutFilePath = [documents stringByAppendingPathComponent:@"merge.mp4"];

// 添加合成路径

NSURL *outputFileUrl = [NSURL fileURLWithPath:outPutFilePath];

// 时间起点

CMTime nextClistartTime = kCMTimeZero;

// 创建可变的音视频组合

AVMutableComposition *comosition = [AVMutableComposition composition];

// 视频采集

AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:videoInputUrl options:nil];

// 视频时间范围

CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);

// 视频通道 枚举 kCMPersistentTrackID_Invalid = 0

AVMutableCompositionTrack *videoTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

// 视频采集通道

AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];

//  把采集轨道数据加入到可变轨道之中

[videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:nextClistartTime error:nil];

// 声音采集

AVURLAsset *audioAsset = [[AVURLAsset alloc] initWithURL:audioInputUrl options:nil];

// 因为视频短这里就直接用视频长度了,如果自动化需要自己写判断

CMTimeRange audioTimeRange = videoTimeRange;

// 音频通道

AVMutableCompositionTrack *audioTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];

// 音频采集通道

AVAssetTrack *audioAssetTrack = [[audioAsset tracksWithMediaType:AVMediaTypeAudio] firstObject];

// 加入合成轨道之中

[audioTrack insertTimeRange:audioTimeRange ofTrack:audioAssetTrack atTime:nextClistartTime error:nil];

// 创建一个输出

AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:comosition presetName:AVAssetExportPresetMediumQuality];

// 输出类型

assetExport.outputFileType = AVFileTypeQuickTimeMovie;

// 输出地址

assetExport.outputURL = outputFileUrl;

// 优化

assetExport.shouldOptimizeForNetworkUse = YES;

// 合成完毕

[assetExport exportAsynchronouslyWithCompletionHandler:^{

// 回到主线程

dispatch_async(dispatch_get_main_queue(), ^{

// 调用播放方法  outputFileUrl 这个就是合成视频跟音频的视频

[self playWithUrl:outputFileUrl];

});

}];

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
  • 序言:七十年代末,一起剥皮案震惊了整个滨河市,随后出现的几起案子,更是在滨河造成了极大的恐慌,老刑警刘岩,带你破解...
    沈念sama阅读 203,362评论 5 477
  • 序言:滨河连续发生了三起死亡事件,死亡现场离奇诡异,居然都是意外死亡,警方通过查阅死者的电脑和手机,发现死者居然都...
    沈念sama阅读 85,330评论 2 381
  • 文/潘晓璐 我一进店门,熙熙楼的掌柜王于贵愁眉苦脸地迎上来,“玉大人,你说我怎么就摊上这事。” “怎么了?”我有些...
    开封第一讲书人阅读 150,247评论 0 337
  • 文/不坏的土叔 我叫张陵,是天一观的道长。 经常有香客问我,道长,这世上最难降的妖魔是什么? 我笑而不...
    开封第一讲书人阅读 54,560评论 1 273
  • 正文 为了忘掉前任,我火速办了婚礼,结果婚礼上,老公的妹妹穿的比我还像新娘。我一直安慰自己,他们只是感情好,可当我...
    茶点故事阅读 63,580评论 5 365
  • 文/花漫 我一把揭开白布。 她就那样静静地躺着,像睡着了一般。 火红的嫁衣衬着肌肤如雪。 梳的纹丝不乱的头发上,一...
    开封第一讲书人阅读 48,569评论 1 281
  • 那天,我揣着相机与录音,去河边找鬼。 笑死,一个胖子当着我的面吹牛,可吹牛的内容都是我干的。 我是一名探鬼主播,决...
    沈念sama阅读 37,929评论 3 395
  • 文/苍兰香墨 我猛地睁开眼,长吁一口气:“原来是场噩梦啊……” “哼!你这毒妇竟也来了?” 一声冷哼从身侧响起,我...
    开封第一讲书人阅读 36,587评论 0 258
  • 序言:老挝万荣一对情侣失踪,失踪者是张志新(化名)和其女友刘颖,没想到半个月后,有当地人在树林里发现了一具尸体,经...
    沈念sama阅读 40,840评论 1 297
  • 正文 独居荒郊野岭守林人离奇死亡,尸身上长有42处带血的脓包…… 初始之章·张勋 以下内容为张勋视角 年9月15日...
    茶点故事阅读 35,596评论 2 321
  • 正文 我和宋清朗相恋三年,在试婚纱的时候发现自己被绿了。 大学时的朋友给我发了我未婚夫和他白月光在一起吃饭的照片。...
    茶点故事阅读 37,678评论 1 329
  • 序言:一个原本活蹦乱跳的男人离奇死亡,死状恐怖,灵堂内的尸体忽然破棺而出,到底是诈尸还是另有隐情,我是刑警宁泽,带...
    沈念sama阅读 33,366评论 4 318
  • 正文 年R本政府宣布,位于F岛的核电站,受9级特大地震影响,放射性物质发生泄漏。R本人自食恶果不足惜,却给世界环境...
    茶点故事阅读 38,945评论 3 307
  • 文/蒙蒙 一、第九天 我趴在偏房一处隐蔽的房顶上张望。 院中可真热闹,春花似锦、人声如沸。这庄子的主人今日做“春日...
    开封第一讲书人阅读 29,929评论 0 19
  • 文/苍兰香墨 我抬头看了看天上的太阳。三九已至,却和暖如春,着一层夹袄步出监牢的瞬间,已是汗流浃背。 一阵脚步声响...
    开封第一讲书人阅读 31,165评论 1 259
  • 我被黑心中介骗来泰国打工, 没想到刚下飞机就差点儿被人妖公主榨干…… 1. 我叫王不留,地道东北人。 一个月前我还...
    沈念sama阅读 43,271评论 2 349
  • 正文 我出身青楼,却偏偏与公主长得像,于是被迫代替她去往敌国和亲。 传闻我的和亲对象是个残疾皇子,可洞房花烛夜当晚...
    茶点故事阅读 42,403评论 2 342

推荐阅读更多精彩内容