滤镜接入过程
1.检查SDK是否可用;
2.初始化句柄,设置参数;
3.将选中的滤镜model设置到相机数据中;
4.输出。
商汤提供了两种校验方式,1.网络校验:商汤给的Demo就是使用网络校验方式,2.本文介绍的本地校验,商汤相关人员给了一个license文件,校验文件:
//使用本地license进行本地鉴权
- (BOOL)checkLicenseFromLocal {
self.licenseData = [NSData dataWithContentsOfFile:[[NSBundle mainBundle] pathForResource:@"SENSEME" ofType:@"lic"]];
return [self checkActiveCodeWithData:self.licenseData];
}
- (BOOL)checkActiveCodeWithData:(NSData *)dataLicense
{
NSString *strKeyActiveCode = @"ACTIVE_CODE_ONLINE";
NSUserDefaults *userDefaults = [NSUserDefaults standardUserDefaults];
NSString *strActiveCode = [userDefaults objectForKey:strKeyActiveCode];
st_result_t iRet = ST_E_FAIL;
iRet = st_mobile_check_activecode_from_buffer(
[dataLicense bytes],
(int)[dataLicense length],
strActiveCode.UTF8String,
(int)[strActiveCode length]
);
if (ST_OK == iRet) {
return YES;
}
char active_code[1024];
int active_code_len = 1024;
iRet = st_mobile_generate_activecode_from_buffer(
[dataLicense bytes],
(int)[dataLicense length],
active_code,
&active_code_len
);
strActiveCode = [[NSString alloc] initWithUTF8String:active_code];
if (iRet == ST_OK && strActiveCode.length) {
[userDefaults setObject:strActiveCode forKey:strKeyActiveCode];
[userDefaults synchronize];
return YES;
}
return NO;
}
初始化商汤,设置参数
/// 全局变量
{
st_handle_t _hFilter; // filter句柄
CVOpenGLESTextureRef _cvTextureFilter;
CVPixelBufferRef _cvFilterBuffer;
GLuint _textureOriginInput; // 原相机数据输入
GLuint _textureFilterOutput; // 添加滤镜之后的输出
CVOpenGLESTextureRef _cvTextureOrigin; // 原始上下文
CVOpenGLESTextureCacheRef _cvTextureCache; // 缓存上下文
st_mobile_animal_face_t *_detectResult1;
st_mobile_human_action_t _detectResult;
}
////////////////////////////////////////////////
@property (nonatomic, strong) NSData *licenseData;
@property (nonatomic, readwrite, assign) BOOL bFilter;
@property (nonatomic, readwrite, assign) CGFloat imageWidth;
@property (nonatomic, readwrite, assign) CGFloat imageHeight;
@property (nonatomic) dispatch_queue_t renderQueue;
@property (nonatomic, assign) double lastTimeAttrDetected;
@property (nonatomic , assign) int iBufferedCount;
@property (nonatomic, strong) EAGLContext *glContext; // openGL 上下文
////////////////////////////////////////////////
- (void)initResourceAndStartPreview
{
///ST_MOBILE:设置预览时需要注意 EAGLContext 的初始化
[self setupCameraAndPreview];
[EAGLContext setCurrentContext:self.glContext];
// 初始化结果文理及纹理缓存
CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, self.glContext, NULL, &_cvTextureCache);
if (err) {
NSLog(@"CVOpenGLESTextureCacheCreate %d" , err);
}
[self initResultTexture];
///ST_MOBILE:初始化句柄之前需要验证License
if ([self checkActiveCodeWithData:self.licenseData]) {
///ST_MOBILE:初始化相关的句柄
[self setupHandle];
}else{
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"错误提示" message:@"使用 license 文件生成激活码时失败,可能是授权文件过期。" delegate:nil cancelButtonTitle:@"好的" otherButtonTitles:nil, nil];
[alert show];
}
}
/// 项目中是在七牛云的基础上做的滤镜, 所以直接在七牛云回调的相机数据中做的处理,然后返回, 如果没有现成的相机数据可以参考商汤Demo中对相机的封装;
- (void)setupCameraAndPreview {
self.glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
}
- (void)setupHandle {
st_result_t iRet = ST_OK;
// 初始化滤镜句柄
iRet = st_mobile_gl_filter_create(&_hFilter);
if (ST_OK != iRet || !_hFilter) {
NSLog(@"st mobile gl filter create failed: %d", iRet);
UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"错误提示" message:@"滤镜SDK初始化失败,可能是SDK权限过期或与绑定包名不符" delegate:nil cancelButtonTitle:@"好的" otherButtonTitles:nil, nil];
[alert show];
}
iRet = ST_OK;
// 设置滤镜程度,这里1.0是写死的
iRet = st_mobile_gl_filter_set_param(_hFilter, ST_GL_FILTER_STRENGTH, 1.0);
if (ST_OK != iRet) {
NXLog(@"st_mobile_gl_filter_set_param %d" , iRet);
}
}
收到七牛云的回调
/// @abstract 获取到摄像头原数据时的回调, 便于开发者做滤镜等处理,需要注意的是这个回调在 camera 数据的输出线程,请不要做过于耗时的操作,否则可能会导致推流帧率下降
- (CVPixelBufferRef)mediaStreamingSession:(PLMediaStreamingSession *)session cameraSourceDidGetPixelBuffer:(CVPixelBufferRef)pixelBuffer {
// NXLog(@"HTLivePushViewController");
if (self.iBufferedCount >= 2) {
return pixelBuffer;
}
//获取每一帧图像信息
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
int iBytesPerRow = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
int iWidth = (int)CVPixelBufferGetWidth(pixelBuffer);
int iHeight = (int)CVPixelBufferGetHeight(pixelBuffer);
size_t iTop , iBottom , iLeft , iRight;
CVPixelBufferGetExtendedPixels(pixelBuffer, &iLeft, &iRight, &iTop, &iBottom);
iWidth = iWidth + (int)iLeft + (int)iRight;
iHeight = iHeight + (int)iTop + (int)iBottom;
iBytesPerRow = iBytesPerRow + (int)iLeft + (int)iRight;
st_mobile_human_action_t detectResult;
memset(&detectResult, 0, sizeof(st_mobile_human_action_t));
// 如果需要做属性,每隔一秒做一次属性
double dTimeNow = CFAbsoluteTimeGetCurrent();
BOOL isAttributeTime = (dTimeNow - self.lastTimeAttrDetected) >= 1.0;
if (isAttributeTime) {
self.lastTimeAttrDetected = dTimeNow;
}
int catFaceCount = -1;
self.iBufferedCount ++;
CFRetain(pixelBuffer);
__block st_mobile_human_action_t newDetectResult;
memset(&newDetectResult, 0, sizeof(st_mobile_human_action_t));
st_mobile_human_action_copy(&detectResult, &newDetectResult);
int faceCount = catFaceCount;
st_mobile_animal_face_t *newDetectResult1 = NULL;
if (faceCount > 0) {
newDetectResult1 = malloc(sizeof(st_mobile_animal_face_t) * faceCount);
memset(newDetectResult1, 0, sizeof(st_mobile_animal_face_t) * faceCount);
copyCatFace(_detectResult1, faceCount, newDetectResult1);
}
st_result_t iRet = ST_E_FAIL;
// 设置 OpenGL 环境 , 需要与初始化 SDK 时一致
if ([EAGLContext currentContext] != self.glContext) {
[EAGLContext setCurrentContext:self.glContext];
}
// 当图像尺寸发生改变时需要对应改变纹理大小
if (iWidth != self.imageWidth || iHeight != self.imageHeight) {
[self releaseResultTexture];
self.imageWidth = iWidth;
self.imageHeight = iHeight;
[self initResultTexture];
}
// 获取原图纹理
[self setupOriginTextureWithPixelBuffer:pixelBuffer];
GLuint textureResult = _textureOriginInput;
CVPixelBufferRef resultPixelBufffer = pixelBuffer;
///ST_MOBILE 以下为滤镜部分
if (_bFilter && _hFilter) {
iRet = st_mobile_gl_filter_set_style(_hFilter, self.selectPath.UTF8String);
if (iRet != ST_OK) {
NSLog(@"st mobile filter set style failed: %d", iRet);
}
NSLog(@"resullt - %d , output - %d", textureResult, _textureFilterOutput);
iRet = st_mobile_gl_filter_process_texture(_hFilter, textureResult, iWidth, iHeight, _textureFilterOutput);
if (ST_OK != iRet) {
NXLog(@"st_mobile_gl_filter_process_texture %d" , iRet);
}
textureResult = _textureFilterOutput;
resultPixelBufffer = _cvFilterBuffer;
}
st_mobile_human_action_delete(&newDetectResult);
if (faceCount > 0) {
freeCatFace(newDetectResult1, faceCount);
}
// [self.glPreview renderTexture:textureResult];
if (_cvTextureOrigin) {
CFRelease(_cvTextureOrigin);
_cvTextureOrigin = NULL;
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CVOpenGLESTextureCacheFlush(_cvTextureCache, 0);
CFRelease(pixelBuffer);
self.iBufferedCount --;
return resultPixelBufffer;
}
void copyCatFace(st_mobile_animal_face_t *src, int faceCount, st_mobile_animal_face_t *dst) {
memcpy(dst, src, sizeof(st_mobile_animal_face_t) * faceCount);
for (int i = 0; i < faceCount; ++i) {
size_t key_points_size = sizeof(st_pointf_t) * src[i].key_points_count;
st_pointf_t *p_key_points = malloc(key_points_size);
memset(p_key_points, 0, key_points_size);
memcpy(p_key_points, src[i].p_key_points, key_points_size);
dst[i].p_key_points = p_key_points;
}
}
void freeCatFace(st_mobile_animal_face_t *src, int faceCount) {
if (faceCount > 0) {
for (int i = 0; i < faceCount; ++i) {
if (src[i].p_key_points != NULL) {
free(src[i].p_key_points);
src[i].p_key_points = NULL;
}
}
free(src);
src = NULL;
}
}
#pragma mark - 获取皮肤数组
- (NSArray *)getFilterModelPathsByType:(NSInteger)type {
NSString *strPrefix;
switch (type) {
case 0:
strPrefix = @"PortraitFilters";
break;
default:
break;
}
NSFileManager *fileManger = [[NSFileManager alloc] init];
NSString *strBundlePath = [[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:[strPrefix stringByAppendingString:@".bundle"]];
NSArray *arrFileNames = [fileManger contentsOfDirectoryAtPath:strBundlePath error:nil];
NSMutableArray *arrFilterPaths = [NSMutableArray array];
for (NSString *strFileName in arrFileNames) {
if ([strFileName hasSuffix:@"model"] && [strFileName hasPrefix:@"filter_style"]) {
[arrFilterPaths addObject:[NSString pathWithComponents:@[strBundlePath , strFileName]]];
}
}
NSString *strDocumentsPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
NSString *filterPortraitPath = [strDocumentsPath stringByAppendingPathComponent:@"PortraitFilters"];
NSString *filterSceneryPath = [strDocumentsPath stringByAppendingPathComponent:@"SceneryFilters"];
NSString *filterStillLifePath = [strDocumentsPath stringByAppendingPathComponent:@"StillLifeFilters"];
NSString *filterDeliciousFoodPath = [strDocumentsPath stringByAppendingPathComponent:@"DeliciousFoodFilters"];
if (![[NSFileManager defaultManager] fileExistsAtPath:filterPortraitPath]) {
[fileManger createDirectoryAtPath:filterPortraitPath withIntermediateDirectories:YES attributes:nil error:nil];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:filterSceneryPath]) {
[fileManger createDirectoryAtPath:filterSceneryPath withIntermediateDirectories:YES attributes:nil error:nil];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:filterStillLifePath]) {
[fileManger createDirectoryAtPath:filterStillLifePath withIntermediateDirectories:YES attributes:nil error:nil];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:filterDeliciousFoodPath]) {
[fileManger createDirectoryAtPath:filterDeliciousFoodPath withIntermediateDirectories:YES attributes:nil error:nil];
}
NSString *filterPath = [strDocumentsPath stringByAppendingPathComponent:strPrefix];
arrFileNames = [fileManger contentsOfDirectoryAtPath:filterPath error:nil];
for (NSString *strFileName in arrFileNames) {
if ([strFileName hasSuffix:@"model"] && [strFileName hasPrefix:@"filter_style"]) {
[arrFilterPaths addObject:[NSString pathWithComponents:@[filterPath , strFileName]]];
}
}
return [arrFilterPaths copy];
}
- (NSArray *)getFilterModels {
NSArray *filterModelPath = [self getFilterModelPathsByType:0];
NSMutableArray *arrModels = [NSMutableArray array];
HTSenseTimeModel *model = [HTSenseTimeModel new];
model.modelName = @"无";
model.modelIcon = kGetImage(@"live_gb");
model.modelPath = nil;
model.select = false;
[arrModels addObject:model];
for (int i = 1; i < filterModelPath.count + 1; ++i) {
HTSenseTimeModel *model = [[HTSenseTimeModel alloc] init];
model.modelPath = filterModelPath[i - 1];
model.modelName = [[model.modelPath.lastPathComponent stringByDeletingPathExtension] stringByReplacingOccurrencesOfString:@"filter_style_" withString:@""];
UIImage *thumbImage = [UIImage imageWithContentsOfFile:[[model.modelPath stringByDeletingPathExtension] stringByAppendingPathExtension:@"png"]];
model.modelIcon = thumbImage ? thumbImage : kGetImage(@"live_gb");
[arrModels addObject:model];
}
return [arrModels copy];
}
- (NSString *)getFilterModelPath {
NSString *strPrefix = @"PortraitFilters";
NSFileManager *fileManger = [[NSFileManager alloc] init];
NSString *strBundlePath = [[[NSBundle mainBundle] resourcePath] stringByAppendingPathComponent:[strPrefix stringByAppendingString:@".bundle"]];
NSArray *arrFileNames = [fileManger contentsOfDirectoryAtPath:strBundlePath error:nil];
NSMutableArray *arrFilterPaths = [NSMutableArray array];
for (NSString *strFileName in arrFileNames) {
if ([strFileName hasSuffix:@"model"] && [strFileName hasPrefix:@"filter_style"]) {
[arrFilterPaths addObject:[NSString pathWithComponents:@[strBundlePath , strFileName]]];
}
}
NSString *strDocumentsPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject];
NSString *filterPortraitPath = [strDocumentsPath stringByAppendingPathComponent:@"PortraitFilters"];
NSString *filterSceneryPath = [strDocumentsPath stringByAppendingPathComponent:@"SceneryFilters"];
NSString *filterStillLifePath = [strDocumentsPath stringByAppendingPathComponent:@"StillLifeFilters"];
NSString *filterDeliciousFoodPath = [strDocumentsPath stringByAppendingPathComponent:@"DeliciousFoodFilters"];
if (![[NSFileManager defaultManager] fileExistsAtPath:filterPortraitPath]) {
[fileManger createDirectoryAtPath:filterPortraitPath withIntermediateDirectories:YES attributes:nil error:nil];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:filterSceneryPath]) {
[fileManger createDirectoryAtPath:filterSceneryPath withIntermediateDirectories:YES attributes:nil error:nil];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:filterStillLifePath]) {
[fileManger createDirectoryAtPath:filterStillLifePath withIntermediateDirectories:YES attributes:nil error:nil];
}
if (![[NSFileManager defaultManager] fileExistsAtPath:filterDeliciousFoodPath]) {
[fileManger createDirectoryAtPath:filterDeliciousFoodPath withIntermediateDirectories:YES attributes:nil error:nil];
}
NSString *filterPath = [strDocumentsPath stringByAppendingPathComponent:strPrefix];
arrFileNames = [fileManger contentsOfDirectoryAtPath:filterPath error:nil];
for (NSString *strFileName in arrFileNames) {
if ([strFileName hasSuffix:@"model"] && [strFileName hasPrefix:@"filter_style"]) {
[arrFilterPaths addObject:[NSString pathWithComponents:@[filterPath , strFileName]]];
}
}
return arrFilterPaths.firstObject;
}
- (void)releaseResources
{
if ([EAGLContext currentContext] != self.glContext) {
[EAGLContext setCurrentContext:self.glContext];
}
if (_hFilter) {
st_mobile_gl_filter_destroy(_hFilter);
_hFilter = NULL;
}
self.glContext = nil;
}
#pragma mark - handle texture
- (void)initResultTexture {
// 创建结果纹理
[self setupTextureWithPixelBuffer:&_cvFilterBuffer
w:self.imageWidth
h:self.imageHeight
glTexture:&_textureFilterOutput
cvTexture:&_cvTextureFilter];
}
- (BOOL)setupTextureWithPixelBuffer:(CVPixelBufferRef *)pixelBufferOut
w:(int)iWidth
h:(int)iHeight
glTexture:(GLuint *)glTexture
cvTexture:(CVOpenGLESTextureRef *)cvTexture {
CFDictionaryRef empty = CFDictionaryCreate(kCFAllocatorDefault,
NULL,
NULL,
0,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CFMutableDictionaryRef attrs = CFDictionaryCreateMutable(kCFAllocatorDefault,
1,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks);
CFDictionarySetValue(attrs, kCVPixelBufferIOSurfacePropertiesKey, empty);
CVReturn cvRet = CVPixelBufferCreate(kCFAllocatorDefault,
iWidth,
iHeight,
kCVPixelFormatType_32BGRA,
attrs,
pixelBufferOut);
if (kCVReturnSuccess != cvRet) {
NSLog(@"CVPixelBufferCreate %d" , cvRet);
}
cvRet = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_cvTextureCache,
*pixelBufferOut,
NULL,
GL_TEXTURE_2D,
GL_RGBA,
self.imageWidth,
self.imageHeight,
GL_BGRA,
GL_UNSIGNED_BYTE,
0,
cvTexture);
CFRelease(attrs);
CFRelease(empty);
if (kCVReturnSuccess != cvRet) {
NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage %d" , cvRet);
return NO;
}
*glTexture = CVOpenGLESTextureGetName(*cvTexture);
glBindTexture(CVOpenGLESTextureGetTarget(*cvTexture), *glTexture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
return YES;
}
- (BOOL)setupOriginTextureWithPixelBuffer:(CVPixelBufferRef)pixelBuffer {
CVReturn cvRet = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault,
_cvTextureCache,
pixelBuffer,
NULL,
GL_TEXTURE_2D,
GL_RGBA,
self.imageWidth,
self.imageHeight,
GL_BGRA,
GL_UNSIGNED_BYTE,
0,
&_cvTextureOrigin);
if (!_cvTextureOrigin || kCVReturnSuccess != cvRet) {
NSLog(@"CVOpenGLESTextureCacheCreateTextureFromImage %d" , cvRet);
return NO;
}
_textureOriginInput = CVOpenGLESTextureGetName(_cvTextureOrigin);
glBindTexture(GL_TEXTURE_2D , _textureOriginInput);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindTexture(GL_TEXTURE_2D, 0);
return YES;
}
- (void)releaseResultTexture {
_textureFilterOutput = 0;
if (_cvTextureOrigin) {
CFRelease(_cvTextureOrigin);
_cvTextureOrigin = NULL;
}
CFRelease(_cvTextureFilter);
CVPixelBufferRelease(_cvFilterBuffer);
}