场景:从SDK或者相机获取到pixelBuffer,需要经过自己处理美颜,贴纸等情况
#import <GPUImage/GPUImage.h>
NS_ASSUME_NONNULL_BEGIN
@interface SNImagePixelBufferInput : GPUImageOutput
- (void)processPixelBuffer:(CVPixelBufferRef)pixelBuffer time:(CMTime)time;
@end
NS_ASSUME_NONNULL_END
#import "SNImagePixelBufferInput.h"
@interface SNImagePixelBufferInput ()
@property (nonatomic) CVOpenGLESTextureRef textureRef;
@property (nonatomic, strong) dispatch_semaphore_t frameRenderingSemaphore;
@end
@implementation SNImagePixelBufferInput
- (instancetype)init {
if (self = [super init]) {
self.frameRenderingSemaphore = dispatch_semaphore_create(1);
}
return self;
}
- (void)dealloc {
runSynchronouslyOnVideoProcessingQueue(^{
[GPUImageContext useImageProcessingContext];
if (self.textureRef) {
CFRelease(self.textureRef);
}
});
}
- (void)processPixelBuffer:(CVPixelBufferRef)pixelBuffer time:(CMTime)time {
if (dispatch_semaphore_wait(self.frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) {
return ;
}
CFRetain(pixelBuffer);
runAsynchronouslyOnVideoProcessingQueue(^{
[self processVideoPixelBuffer:pixelBuffer time:time];
CFRelease(pixelBuffer);
dispatch_semaphore_signal(self.frameRenderingSemaphore);
});
}
- (void)processVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer time:(CMTime)time
{
NSAssert(CVPixelBufferGetPixelFormatType(pixelBuffer) == kCVPixelFormatType_32BGRA, @"%@: only kCVPixelFormatType_32BGRA is supported currently.",self);
size_t bufferWidth = CVPixelBufferGetWidth(pixelBuffer);
size_t bufferHeight = CVPixelBufferGetHeight(pixelBuffer);
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
[GPUImageContext useImageProcessingContext];
if (self.textureRef) {
CFRelease(self.textureRef);
}
CVOpenGLESTextureRef textureRef = NULL;
CVReturn result = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
[[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache],
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RGBA,
(GLsizei)bufferWidth,
(GLsizei)bufferHeight,
GL_BGRA,
GL_UNSIGNED_BYTE,
0,
&textureRef);
NSAssert(result == kCVReturnSuccess, @"CVOpenGLESTextureCacheCreateTextureFromImage error: %@",@(result));
if (result == kCVReturnSuccess && textureRef) {
self.textureRef = textureRef;
glActiveTexture(GL_TEXTURE4);
glBindTexture(CVOpenGLESTextureGetTarget(textureRef), CVOpenGLESTextureGetName(textureRef));
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
outputFramebuffer = [[GPUImageFramebuffer alloc] initWithSize:CGSizeMake(bufferWidth, bufferHeight) overriddenTexture:CVOpenGLESTextureGetName(textureRef)];
for (id<GPUImageInput> currentTarget in targets) {
if ([currentTarget enabled]) {
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger targetTextureIndex = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
if (currentTarget != self.targetToIgnoreForUpdates) {
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:targetTextureIndex];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
[currentTarget newFrameReadyAtTime:time atIndex:targetTextureIndex];
} else {
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:targetTextureIndex];
}
}
}
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
/*
第二种方式:
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(pixelBuffer);
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow / 4, bufferHeight) onlyTexture:YES];
[outputFramebuffer activateFramebuffer];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
// Using BGRA extension to pull in video frame data directly
// The use of bytesPerRow / 4 accounts for a display glitch present in preview video frames when using the photo preset on the camera
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow / 4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(pixelBuffer));
[self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow / 4 height:bufferHeight time:currentTime];
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
*/
}
- (void)updateTargetsForVideoCameraUsingCacheTextureAtWidth:(int)bufferWidth height:(int)bufferHeight time:(CMTime)currentTime;
{
// First, update all the framebuffers in the targets
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget enabled])
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
[currentTarget setInputSize:CGSizeMake(bufferWidth, bufferHeight) atIndex:textureIndexOfTarget];
if ([currentTarget wantsMonochromeInput])
{
[currentTarget setCurrentlyReceivingMonochromeInput:YES];
// TODO: Replace optimization for monochrome output
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
else
{
[currentTarget setCurrentlyReceivingMonochromeInput:NO];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
}
else
{
[currentTarget setInputRotation:outputRotation atIndex:textureIndexOfTarget];
[currentTarget setInputFramebuffer:outputFramebuffer atIndex:textureIndexOfTarget];
}
}
}
// Then release our hold on the local framebuffer to send it back to the cache as soon as it's no longer needed
[outputFramebuffer unlock];
outputFramebuffer = nil;
// Finally, trigger rendering as needed
for (id<GPUImageInput> currentTarget in targets)
{
if ([currentTarget enabled])
{
NSInteger indexOfObject = [targets indexOfObject:currentTarget];
NSInteger textureIndexOfTarget = [[targetTextureIndices objectAtIndex:indexOfObject] integerValue];
if (currentTarget != self.targetToIgnoreForUpdates)
{
[currentTarget newFrameReadyAtTime:currentTime atIndex:textureIndexOfTarget];
}
}
}
}
@end
注意点:输入的pixelBuffer要BGRA格式,如果是其他的请先转成BGRA
使用方式:
self.pixelBufferFilter = [[YUGPUImageCVPixelBufferInput alloc] init];
self.outputFilter = [[GPUImageFilter alloc] init];
self.outputFilter.frameProcessingCompletionBlock = ^(GPUImageOutput *output, CMTime time) {
// output.framebufferForOutput.pixelBuffer就是通过滤镜处理后的pixelBuffer
};
[self.pixelBufferFilter addTarget: self.outputFilter];
[self.pixelBufferFilter processCVPixelBuffer:frame frameTime:time];