-(void)startWriteWithCompletionHandler:(void(^)(NSString* path))complete andProgress:(void(^)(CGFloatprogress))progress
{
CMTimeduration = [self.assetduration];
CGFloattotalTime =CMTimeGetSeconds(duration);
dispatch_queue_tqueue =dispatch_queue_create("com.writequeue",DISPATCH_QUEUE_CONCURRENT);
__blockNSIntegercount =0;
__blockBOOLisCompleteVideo =NO;
__blockBOOLisCompleteAudio =NO;
//开启写入会话,并指定样本的开始时间
[_assetWritestartWriting];
[_assetWritestartSessionAtSourceTime:kCMTimeZero];
WS(weakSelf)
[_assetWriterInputVideorequestMediaDataWhenReadyOnQueue:queueusingBlock:^{
while(!isCompleteVideo && weakSelf.assetWriterInputVideo.readyForMoreMediaData)
{
@autoreleasepool{
CMSampleBufferRefbuffer = [weakSelf.assetReaderTrackOutputVideocopyNextSampleBuffer];
CMTimepresentTime =CMSampleBufferGetPresentationTimeStamp(buffer);
progress(CMTimeGetSeconds(presentTime)/totalTime);
CVPixelBufferRefpixelBuffer =CMSampleBufferGetImageBuffer(buffer);
CGFloatdiff = totalTime -CMTimeGetSeconds(presentTime);
CGFloatwidth =CVPixelBufferGetWidth(pixelBuffer);
CGFloatheight =CVPixelBufferGetHeight(pixelBuffer);
//if(weakSelf.needWatermark)
//{
//UIImage *overlayImage = [UIImage imageNamed:@"video_watermark"];
//
//CGColorSpaceRef cSpace = CGColorSpaceCreateDeviceRGB();
//
//CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
//CGContextRef context = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(pixelBuffer),
//width,
//height,
//8,
//CVPixelBufferGetBytesPerRow(pixelBuffer),
//CGColorSpaceCreateDeviceRGB(),
//(CGBitmapInfo)kCGBitmapByteOrder32Little |kCGImageAlphaPremultipliedFirst);
//
//
////视频大小360*640是录制的一半,所以标注也是一半。
////750*1334
//CGFloat waterWidth = width * 300.0f/750.0f;
//CGFloat waterHeight = waterWidth * 66.0f/300.0f;
//CGFloat waterRightMargin = width * 0.053f;
//CGFloat waterBottomMargin = height - width * 0.448f;
//CGRect renderBounds = CGRectMake(width - waterWidth - waterRightMargin, waterBottomMargin , waterWidth , waterHeight);
//CGContextDrawImage(context, renderBounds, [overlayImage CGImage]);
//
//if(diff < 3.0f)
//{
//NSInteger imageIndex = round(-35.0f/3.0f * diff + 35.0f);
//
//NSString * imageName = [NSString stringWithFormat:@"te%ld",imageIndex];
//
//UIImage *overlayImage = [UIImage imageNamed:imageName];
//
//CGRect renderBounds = CGRectMake(0, 0, width , height);
//CGContextDrawImage(context, renderBounds, [overlayImage CGImage]);
//
//CGContextDrawImage(context, renderBounds, [weakSelf.anchorInfoImage CGImage]);
//}
//
//CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
//CGColorSpaceRelease(cSpace);
//
//}
//else if(diff < 3.0f)
//{
//NSInteger imageIndex = round(-35.0f/3.0f * diff + 35.0f);
//
//NSString * imageName = [NSString stringWithFormat:@"te%ld",imageIndex];
//UIImage *overlayImage = [UIImage imageNamed:imageName];
//
//CGColorSpaceRef cSpace = CGColorSpaceCreateDeviceRGB();
//
//CVPixelBufferLockBaseAddress( pixelBuffer, 0);
//CGContextRef context = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(pixelBuffer),
//CVPixelBufferGetWidth(pixelBuffer),
//CVPixelBufferGetHeight(pixelBuffer),
//8,
//CVPixelBufferGetBytesPerRow(pixelBuffer),
//CGColorSpaceCreateDeviceRGB(),
//(CGBitmapInfo)kCGBitmapByteOrder32Little|kCGImageAlphaPremultipliedFirst);
//
//CGRect renderBounds = CGRectMake(0, 0, width , height);
//CGContextDrawImage(context, renderBounds, [overlayImage CGImage]);
//
//CGContextDrawImage(context, renderBounds, [[weakSelf anchorInfoImage] CGImage]);
//
//CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
//CGColorSpaceRelease(cSpace);
//}
if(buffer && weakSelf.assetWriterInputVideo.readyForMoreMediaData) {
[weakSelf.assetWriterInputVideoappendSampleBuffer:buffer];
NSLog(@"%ld",++count);
CFRelease(buffer);
}
else
{
isCompleteVideo =YES;
if(isCompleteAudio)
{
[weakSelfcompleteWrite:complete];
}
}
}
}
}];
[_assetWriterInputAudiorequestMediaDataWhenReadyOnQueue:queueusingBlock:^{
while(!isCompleteAudio && weakSelf.assetWriterInputAudio.readyForMoreMediaData) {
CMSampleBufferRefbuffer = [weakSelf.assetReaderTrackOutputAudiocopyNextSampleBuffer];
if(buffer && weakSelf.assetWriterInputAudio.readyForMoreMediaData) {
[weakSelf.assetWriterInputAudioappendSampleBuffer:buffer];
}
else
{
isCompleteAudio =YES;
if(isCompleteVideo)
{
[weakSelfcompleteWrite:complete];
}
}
}
}];
}