工程中用到了,所以小研究了下
一.使用条件
(1)系统版本需要是iOS9.0及以上才支持
(2)模拟器不支持录屏
(3)[RPScreenRecorder sharedRecorder].available 框架自带的检测机器硬件等是否支持
二.录屏方法
1.开始录制
(1)- (void)startRecordingWithMicrophoneEnabled:(BOOL)microphoneEnabled handler:(nullable void(^)(NSError * _Nullable error))handler可以选择是否开启麦克风
(2)- (void)startRecordingWithHandler:(nullable void(^)(NSError * _Nullable error))handler
(3)- (void)startCaptureWithHandler:(nullable void(^)(CMSampleBufferRef sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error))captureHandler completionHandler:(nullable void(^)(NSError * _Nullable error))completionHandler iOS才能用的方法,录制屏幕的时候可以获取到流媒体(得到的流媒体可以自己去处理分辨率和格式,也就是说可以直接将流媒体直播出去,不需要去选择第三方的直播软件(前提是你的app有直播功能))sampleBuffer的默认格式是420f
2.结束录制
(1)- (void)stopRecordingWithHandler:(nullable void(^)(RPPreviewViewController * _Nullable previewViewController, NSError * _Nullable error))handler 与开始录制的(1)(2)对应
previewViewController 是显示你录制的内容的容器,录制结束需要显示的录制内容需要如下设置,设置代理可以监控保存,分享,返回等
if (previewViewController) {
//设置预览页面到代理
previewViewController.previewControllerDelegate = self;
[self presentViewController:previewViewController animated:YES completion:nil];
}
(2)- (void)stopCaptureWithHandler:(nullable void(^)(NSError * _Nullable error))handler与开始录制的(3)对应
3.RPScreenRecorderDelegate
(1)- (void)screenRecorder:(RPScreenRecorder *)screenRecorder didStopRecordingWithError:(NSError *)error previewViewController:(nullable RPPreviewViewController *)previewViewController API_DEPRECATED("No longer supported"
(2)- (void)screenRecorder:(RPScreenRecorder *)screenRecorder didStopRecordingWithPreviewViewController:(nullable RPPreviewViewController *)previewViewController error:(nullable NSError *)error
(3)- (void)screenRecorderDidChangeAvailability:(RPScreenRecorder *)screenRecorder;用来监听 ReplayKit 是否可用的,如果状态发生变化(比如录制过程中,切入设置,关闭权限。)会回调该方法
4.RPPreviewViewControllerDelegate
//回放预览界面的代理方法
- (void)previewControllerDidFinish:(RPPreviewViewController *)previewController {
//用户操作完成后,返回之前的界面
[previewController dismissViewControllerAnimated:YES completion:nil];
}
//选择了某些功能的回调(如分享和保存)
- (void)previewController:(RPPreviewViewController *)previewController didFinishWithActivityTypes:(NSSet <NSString *> *)activityTypes {
__weak ViewController *weakSelf = self;
if ([activityTypes containsObject:@"com.apple.UIKit.activity.SaveToCameraRoll"]) {
dispatch_async(dispatch_get_main_queue(), ^{
// [weakSelf showAlert:@"保存成功" andMessage:@"已经保存到系统相册"];
[MBProgressHUD showSuccess:@"已经保存到系统相册"];
});
}
if ([activityTypes containsObject:@"com.apple.UIKit.activity.CopyToPasteboard"]) {
dispatch_async(dispatch_get_main_queue(), ^{
// [weakSelf showAlert:@"复制成功" andMessage:@"已经复制到粘贴板"];
[MBProgressHUD showSuccess:@"已经复制到粘贴板"];
});
}
}
三.其他
1.(1)检测版本
- (BOOL)isSystemVersionOk {
if ([[UIDevice currentDevice].systemVersion floatValue] < 9.0) {
return NO;
} else {
return YES;
}
}
(2)模拟器
#if TARGET_IPHONE_SIMULATOR
#define SIMULATOR 1
#elif TARGET_OS_IPHONE
#define SIMULATOR 0
#endif
2.处理流的操作
(1)将视频帧中的CVPixelBufferRef转img有颜色
-(UIImage*)pixelBuffer2Image:(CVPixelBufferRef) pixelBuffer{
CIImage *coreImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
CIContext *context = [CIContext contextWithOptions:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:kCIContextUseSoftwareRenderer]];//CPU渲染
CGImageRef cgimg = [context createCGImage:coreImage fromRect:[coreImage extent]];
UIImage* image = [UIImage imageWithCGImage:cgimg];
CFRelease(cgimg);
return image;
}
3.将视频帧/转换图片(灰色的我试的是420f格式的)
- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer {
// 为媒体数据设置一个CMSampleBuffer的Core Video图像缓存对象
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer == nil) {
return nil;
}
// 锁定pixel buffer的基地址
CVPixelBufferLockBaseAddress(imageBuffer, 0);
void *baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer,0);
// CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer,0);
// CVPixelBufferGetBytesPerRow(imageBuffer);
// 得到pixel buffer的宽和高
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
if (width == 0 || height == 0) {return nil;}
// 创建一个依赖于设备的RGB颜色空间
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
// 用抽样缓存的数据创建一个位图格式的图形上下文(graphics context)对象
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
bytesPerRow, colorSpace,kCGImageAlphaNone);
// CGAffineTransform transform = CGAffineTransformIdentity;
// CGContextConcatCTM(context, transform);
// 根据这个位图context中的像素数据创建一个Quartz image对象
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// 裁剪 图片
// struct CGImage *cgImage = CGImageCreateWithImageInRect(quartzImage, CGRectMake(0, 0, height, height));
// 解锁pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
// 释放context和颜色空间
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
// 用Quartz image创建一个UIImage对象image
UIImage *image = [UIImage imageWithCGImage:quartzImage];
// 释放Quartz image对象
// CGImageRelease(cgImage);
CGImageRelease(quartzImage);
return (image);
}
4.流数据转成MP4
- (void)startScreenRecording {
self.screenRecorder = [RPScreenRecorder sharedRecorder];
if (self.screenRecorder.isRecording) {
return;
}
NSError *error = nil;
NSArray *pathDocuments = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *outputURL = pathDocuments[0];
NSString *videoOutPath = [[outputURL stringByAppendingPathComponent:[NSString stringWithFormat:@"%u", arc4random() % 1000]] stringByAppendingPathExtension:@"mp4"];
self.assetWriter = [AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:videoOutPath] fileType:AVFileTypeMPEG4 error:&error];
NSDictionary *compressionProperties =
@{
AVVideoAverageBitRateKey : [NSNumber numberWithDouble:2000 * 1000],
};
NSNumber* width= [NSNumber numberWithFloat:self.view.frame.size.width];
NSNumber* height = [NSNumber numberWithFloat:self.view.frame.size.height];
NSDictionary *videoSettings =
@{
AVVideoCompressionPropertiesKey : compressionProperties,
AVVideoCodecKey : AVVideoCodecH264,
AVVideoWidthKey : width,
AVVideoHeightKey : height};
self.assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
self.assetWriterInput.expectsMediaDataInRealTime = YES;
if ([self.assetWriter canAddInput:self.assetWriterInput]) {
[self.assetWriter addInput:self.assetWriterInput];
// videoQueue = dispatch_queue_create("AVAssetWriter.videoprocessVideoQueue", DISPATCH_QUEUE_SERIAL);
}
[self.screenRecorder startCaptureWithHandler:^(CMSampleBufferRef _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) {
if (CMSampleBufferDataIsReady(sampleBuffer)) {
if (self.assetWriter.status == AVAssetWriterStatusUnknown && bufferType == RPSampleBufferTypeVideo) {
[self.assetWriter startWriting];
//丢掉无用帧
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
int64_t videopts = CMTimeGetSeconds(pts) * 1000;
if(videopts < 0)
return ;
// if(lastVideoPts != videopts)
// lastVideoPts = videopts;
// else
// return ;
[self.assetWriter startSessionAtSourceTime:pts];
// startTime = CFAbsoluteTimeGetCurrent();
}
if (self.assetWriter.status == AVAssetWriterStatusFailed) {
NSLog(@"An error occured.");
[[RPScreenRecorder sharedRecorder] stopCaptureWithHandler:^(NSError * _Nullable error) {}];
return;
}
if (bufferType == RPSampleBufferTypeVideo) {
if (self.assetWriterInput.isReadyForMoreMediaData) {
// CFRetain(sampleBuffer);
// dispatch_async(videoQueue, ^{
//将sampleBuffer添加进视频输入源
[self.assetWriterInput appendSampleBuffer:sampleBuffer];
// });
// CFRelease(sampleBuffer);
}else{
NSLog(@"Not ready for video");
}
}
}
} completionHandler:^(NSError * _Nullable error) {
if (!error) {
NSLog(@"Recording started successfully.");
}else{
NSLog(@"Recording started error %@",error);
}
}];
}
初学者,还有好多地方不懂,只能把我理解的写下来,请多指教