iOS开发中,摄像头采集的NV12数据缩放代码实现:
YUV格式解释,步长(间距)解释:https://www.jianshu.com/p/eace8c08b169
摄像头采集的数据CMSampleBufferRef里面的CVImageBufferRef是NV12格式,如果需要对它进行缩放或者添加水印,需要先转换为I420,对I420进行缩放或者添加水印后,再转换为NV12,然后再转换为CVImageBufferRef,CVImageBufferRef再转换为CMSampleBufferRef。
下面是利用libyuv库进行的转换代码:
需要特别注意的是开辟内存空间使用的是步长,不是宽度,网上很多资料都是:宽度x长度x1.5 来计算,这种情况必须是要缩放的宽是16(iOS13之前是16,iOS13之后是64)的倍数。例如886x1920的尺寸缩放成720x1280。这样可以直接用宽,这是因为720是16的倍数。如果是缩放成710x1280,那就不能直接使用宽,而是需要使用 (710 / 16 + 1) x 16 也就是720来计算,具体可以看下面的代码。iOS13使用了64位对齐,也就是步长是64的倍数。而之前的版本使用的是16位对齐。
每次转换结束都会存储对应的yuv数据,可以使用yuv查看软件来检查是否转换成功。
yuv查看软件 百度网盘下载链接: https://pan.baidu.com/s/1A4Vt6NMedOT4ASVxiVmjcg 提取码: cyvb
1.CMSampleBufferRef缩放到指定尺寸,并返回CVPixelBufferRef:
/**
* 数据流NV12(420f)压缩
* 数据流中数据格式是NV12,但是NV12数据限制不能直接处理,故先转换成i420数据,再做缩放
* 然后转回NV12数据,后续通过其他函数转换为数据流
* 注意:转换的宽高必须是偶数,如传入的不是偶数,内部会自动转为偶数
*/
+ (CVPixelBufferRef)convertNV12ToI420Scale:(CMSampleBufferRef)sampleBufRef scaleSize:(CGSize)scaleSize {
int scale_width = scaleSize.width;
int scale_hight = scaleSize.height;
// 确保宽高是偶数
if (scale_width % 2 != 0) {
scale_width++;
}
if (scale_hight % 2 != 0) {
scale_hight++;
}
//CVPixelBufferRef是CVImageBufferRef的别名,两者操作几乎一致。
//获取CMSampleBuffer的图像地址
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufRef);
if (!pixelBuffer) {
return nil;
}
//表示开始操作数据
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//图像宽度(像素)
size_t buffer_width = CVPixelBufferGetWidth(pixelBuffer);
//图像高度(像素)
size_t buffer_height = CVPixelBufferGetHeight(pixelBuffer);
//获取CVImageBufferRef中的y数据
uint8_t *src_y_frame = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
//获取CMVImageBufferRef中的uv数据
uint8_t *src_uv_frame =(unsigned char *) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
//y stride
size_t plane1_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
//uv stride
size_t plane2_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
//y height
size_t plane1_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
//uv height
size_t plane2_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
//y_size
size_t plane1_size = plane1_stride * plane1_height;
//uv_size
size_t plane2_size = plane2_stride * plane2_height;
//yuv_size(内存空间)
size_t frame_size = plane1_size + plane2_size;
size_t buffer_u_strate = plane2_stride / 2;
size_t buffer_v_strate = plane2_stride / 2;
// 1.NV12转换为I420
// 开辟buffer_frame大小的内存空间用于存放转换好的i420数据
uint8* buffer_frame = (unsigned char *)malloc(frame_size);
uint8* buffer_u = buffer_frame + plane1_size;
uint8* buffer_v = buffer_u + plane1_size / 4;
libyuv::NV12ToI420(/*const uint8 *src_y*/ src_y_frame,
/*int src_stride_y*/ (int)plane1_stride,
/*const uint8 *src_uv*/ src_uv_frame,
/*int src_stride_uv*/ (int)plane2_stride,
/*uint8 *dst_y*/ buffer_frame,
/*int dst_stride_y*/ (int)plane1_stride,
/*uint8 *dst_u*/ buffer_u,
/*int dst_stride_u*/ (int)buffer_u_strate,
/*uint8 *dst_v*/ buffer_v,
/*int dst_stride_v*/ (int)buffer_v_strate,
/*int width*/ (int)buffer_width,
/*int height*/ (int)buffer_height);
// static NSInteger count = 0;
// count++;
// if (count == 1) {
// NSData *dstData = [NSData dataWithBytes:buffer_frame length:frame_size];
// NSString *dstPath = [NSString stringWithFormat:@"%@%@", NSHomeDirectory(), @"/Documents/i420.yuv"];
// if ([[NSFileManager defaultManager] fileExistsAtPath:dstPath]) {
// [[NSFileManager defaultManager] removeItemAtPath:dstPath error:nil];
// }
// [dstData writeToFile:dstPath atomically:NO];
// NSLog(@"============buffer_size:%@ x %@, strate_width:%@, scale_size:%@ x %@", @(buffer_width), @(buffer_height), @(plane1_stride), @(scale_width), @(scale_hight));
// }
// 2.I420数据进行相应的缩放
int scale_plane1_stride = scale_width;
// 步长必须是16的倍数,因为涉及到字节对齐,而且iOS13和之前的版本处理方式不一样,要注意
int stride_length = 16;
if ([UIDevice currentDevice].systemVersion.floatValue >= 13.0) {
stride_length = 64;
} else {
stride_length = 16;
}
if ((scale_width % stride_length) != 0) {
scale_plane1_stride = (scale_width / stride_length + 1) * stride_length;
}
int scale_plane2_stride = scale_plane1_stride;
int scale_plane1_height = scale_hight;
int scale_plane2_height = scale_hight / 2;
int scale_plane1_size = scale_plane1_stride * scale_plane1_height;
int scale_plane2_size = scale_plane2_stride * scale_plane2_height;
int scale_frame_size = scale_plane1_size + scale_plane2_size;
uint8* scale_buffer = (unsigned char *)malloc(scale_frame_size);
uint8* scale_buffer_u = scale_buffer + scale_plane1_size;
uint8* scale_buffer_v = scale_buffer_u + scale_plane1_size / 4;
libyuv::I420Scale(/*const uint8 *src_y*/ buffer_frame,
/*int src_stride_y*/ (int)plane1_stride,
/*const uint8 *src_u*/ buffer_u,
/*int src_stride_u*/ (int)plane2_stride >> 1,
/*const uint8 *src_v*/ buffer_v,
/*int src_stride_v*/ (int)plane2_stride >> 1,
/*int src_width*/ (int)buffer_width,
/*int src_height*/ (int)buffer_height,
/*uint8 *dst_y*/ scale_buffer,
/*int dst_stride_y*/ scale_plane1_stride,
/*uint8 *dst_u*/ scale_buffer_u,
/*int dst_stride_u*/ scale_plane1_stride >> 1,
/*uint8 *dst_v*/ scale_buffer_v,
/*int dst_stride_v*/ scale_plane1_stride >> 1,
/*int dst_width*/ scale_width,
/*int dst_height*/ scale_hight,
/*enum FilterMode filtering*/ libyuv::kFilterNone
);
// if (count == 1) {
// NSData *dstData = [NSData dataWithBytes:scale_buffer length:scale_frame_size];
// NSString *dstPath = [NSString stringWithFormat:@"%@%@", NSHomeDirectory(), @"/Documents/scalei420.yuv"];
// if ([[NSFileManager defaultManager] fileExistsAtPath:dstPath]) {
// [[NSFileManager defaultManager] removeItemAtPath:dstPath error:nil];
// }
// [dstData writeToFile:dstPath atomically:NO];
// }
// 3.把缩放后的I420数据转换为NV12
int nv12_plane1_stride = scale_plane1_stride;
int nv12_width = scale_width;
int nv12_hight = scale_hight;
int nv12_frame_size = scale_frame_size;
uint8 *nv12_dst_y = (uint8 *)malloc(nv12_frame_size);
uint8 *nv12_dst_uv = nv12_dst_y + nv12_plane1_stride * nv12_hight;
libyuv::I420ToNV12(/*const uint8 *src_y*/ scale_buffer,
/*int src_stride_y*/ scale_plane1_stride,
/*const uint8 *src_u*/ scale_buffer_u,
/*int src_stride_u*/ scale_plane1_stride >> 1,
/*const uint8 *src_v*/ scale_buffer_v,
/*int src_stride_v*/ scale_plane1_stride >> 1,
/*uint8 *dst_y*/ nv12_dst_y,
/*int dst_stride_y*/ nv12_plane1_stride,
/*uint8 *dst_uv*/ nv12_dst_uv,
/*int dst_stride_uv*/ nv12_plane1_stride,
/*int width*/ nv12_width,
/*int height*/ nv12_hight);
// if (count == 1) {
// NSData *dstData = [NSData dataWithBytes:nv12_dst_y length:nv12_frame_size];
// NSString *dstPath = [NSString stringWithFormat:@"%@%@", NSHomeDirectory(), @"/Documents/toNv12.yuv"];
// if ([[NSFileManager defaultManager] fileExistsAtPath:dstPath]) {
// [[NSFileManager defaultManager] removeItemAtPath:dstPath error:nil];
// }
// [dstData writeToFile:dstPath atomically:NO];
// }
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
free(buffer_frame);
free(scale_buffer);
// 4.NV12转换为CVPixelBufferRef
NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferRef dstPixelBuffer = NULL;
CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
nv12_width, nv12_hight, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
(__bridge CFDictionaryRef)pixelAttributes, &dstPixelBuffer);
CVPixelBufferLockBaseAddress(dstPixelBuffer, 0);
uint8_t *yDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 0);
memcpy(yDstPlane, nv12_dst_y, nv12_plane1_stride * nv12_hight);
uint8_t *uvDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 1);
memcpy(uvDstPlane, nv12_dst_uv, nv12_plane1_stride * nv12_hight / 2);
if (result != kCVReturnSuccess) {
NSLog(@"Unable to create cvpixelbuffer %d", result);
}
CVPixelBufferUnlockBaseAddress(dstPixelBuffer, 0);
free(nv12_dst_y);
return dstPixelBuffer;
}
2.CVPixelBufferRef转换为CMSampleBufferRef:
// NV12数据转换为数据流
+ (CMSampleBufferRef)pixelBufferToSampleBuffer:(CVPixelBufferRef)pixelBuffer {
CMSampleBufferRef sampleBuffer;
CMTime frameTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSince1970], 1000000000);
CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo);
OSStatus status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &timing, &sampleBuffer);
if (status != noErr) {
NSLog(@"Failed to create sample buffer with error %d.", (int)status);
}
CVPixelBufferRelease(pixelBuffer);
if (videoInfo) {
CFRelease(videoInfo);
}
return sampleBuffer;
}