直接上代码
一个简单的视频剪辑app
https://apps.apple.com/cn/app/id6689516466
1、初始化metalView
metalView = MTKView.init(frame: self.view.bounds)
//获取GPU硬件设备
metalView.device = MTLCreateSystemDefaultDevice()
//初始化metalRender, metalRender负责metal的渲染逻辑metalRender = MetalRender.init(mtkView: metalView)
//实现metalView的绘制刷新代理metalView?.delegate = metalRenderself.view.addSubview(metalView)
2、metalRender的实现逻辑
//初始化metalRenderinit(mtkView: MTKView) {
super.init()
device = mtkView.device
//记录metalView 的size, 后面会传给metal shader 用于画面渲染
self.videoPortSize = vector_float2.init(x:
Float(mtkView.drawableSize.width), y:
Float(mtkView.drawableSize.height))
//设置渲染管道
self.setupPipeline(mtkView: mtkView)
//设置顶点数据
self.setupVexterData()
//设置lut 纹理
self.setupLutTexture()
//初始化metal纹理缓存,用于上传视频解码的纹理
CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, device!, nil, &videoTextureCache)
}
3、初始化渲染管道
func setupPipeline(mtkView: MTKView) {
//获取默认的shader library 也可以通过名字加载library
let library = device?.makeDefaultLibrary()
//获取顶点函数
let vexterFunc = library?.makeFunction(name: "videoVertexShader") //获取片元函数
let fragmentFunc = library?.makeFunction(name: "videoFragmentShader")
//初始化渲染管道描述 用于附加顶点函数和片元函数
let pipelineDescription = MTLRenderPipelineDescriptor.init()
pipelineDescription.vertexFunction = vexterFunc
pipelineDescription.fragmentFunction = fragmentFunc
pipelineDescription.colorAttachments[0].pixelFormat =
mtkView.colorPixelFormat //生成渲染管道,并返回渲染管道状态
pipelineStatus = try? device?.makeRenderPipelineState(descriptor: pipelineDescription)
if pipelineStatus == nil {
debugPrint("pipeline init fail")
}
//初始化命令队列
self.commendQueue = self.device?.makeCommandQueue() }
4、设置顶点函数
//设置顶点函数func setupVexterData() {
let x = self.videoPortSize.x / 2
let y = self.videoPortSize.y / 2
//设置两个三角形坐标,组成一个正方形坐标
//metal的视图的坐标原点在视图的正中心
//所以计算视图的宽高可以将正方形铺满整个view
let vexterData: [VexterData] = [
VexterData(position: [x, -y], textcoord: [1.0, 0.0]),
VexterData(position: [-x, -y], textcoord: [0.0, 0.0]),
VexterData(position: [-x, y], textcoord: [0.0, 1.0]), VexterData(position: [x, -y], textcoord: [1.0, 0.0]),
VexterData(position: [-x, y], textcoord: [0.0, 1.0]),
VexterData(position: [x, y], textcoord: [1.0, 1.0]),
]
//生成顶点buffer
vexterBuffer = self.device?.makeBuffer(bytes: vexterData, length: MemoryLayout<VexterData>.size * 6, options: .storageModeShared)
//生成默认的yuv转RGB颜色转换矩阵
self.converMatrix = self.device?.makeBuffer(bytes: &colorConversionMatrix, length: MemoryLayout<matrix_float3x3>.size , options: .storageModeShared)
}
5、设置lut纹理
func setupLutTexture() {
guard let image = UIImage.init(named: "lookup")?.cgImage else { return }
//初始化纹理描述对象
let textureDes = MTLTextureDescriptor.init()
//设置纹理的宽 从图像中读取
textureDes.width = image.width //设置纹理的高
textureDes.height = image.height //设置纹理的像素格式
textureDes.pixelFormat = .rgba8Unorm //设置纹理的使用方式
textureDes.usage = .shaderRead //初始化纹理buffer
self.textureLut = device?.makeTexture(descriptor: textureDes)
//获取image的数据
let data = loadImageData(image: image)
//设置纹理显示的区域
let region = MTLRegionMake2D(0, 0, image.width, image.height)
//上传纹理到GPU
self.textureLut?.replace(region: region, mipmapLevel: 0, withBytes: data, bytesPerRow: image.width * 4) }
func loadImageData(image: CGImage) ->UnsafeMutablePointer<GLubyte> {
let width = image.width
let height = image.height
let data: UnsafeMutablePointer = UnsafeMutablePointer<GLubyte>.allocate(capacity: MemoryLayout<GLubyte>.size * width * height * 4)
UIGraphicsBeginImageContext(CGSize.init(width: width, height: height))
let context = CGContext(data: data, width: width, height: height, bitsPerComponent: 8, bytesPerRow: width * 4, space: image.colorSpace!, bitmapInfo: image.bitmapInfo.rawValue)
context?.translateBy( x: 0, y: CGFloat(height)) context?.scaleBy(x: 1, y: -1)
context?.draw(image, in: CGRect(x: 0, y: 0, width: width, height: height))
UIGraphicsEndImageContext()
return data
}
6、上传视频帧到GPU
func setTexture(pix: CVPixelBuffer) {
//获取图像中的plane
let planeCount = CVPixelBufferGetPlaneCount(pix)
//获取图像中的颜色转换矩阵
let colorAttachments = CVBufferGetAttachment(pix, kCVImageBufferYCbCrMatrixKey, nil)?.takeUnretainedValue() as? String if colorAttachments != nil {
if CFStringCompare(colorAttachments as! CFString, kCVImageBufferYCbCrMatrix_ITU_R_601_4, .compareCaseInsensitive) == .compareEqualTo {
colorConversionMatrix = kMetalColorConversion601
//颜色空间是601
} else {
//颜色空间是709
colorConversionMatrix = kMetalColorConversion709
}
self.converMatrix = self.device?.makeBuffer(bytes: &colorConversionMatrix, length: MemoryLayout<matrix_float3x3>.size, options: .storageModeShared)
} else {
assert(false, "get color space fail ...")
}
//获的YUV图像中Y的宽度
let plane0Width = CVPixelBufferGetWidthOfPlane(pix, 0)
//获取YUV图像中UV的高度
let plane0Height = CVPixelBufferGetHeightOfPlane(pix, 0)
if let videoTextureCache = self.videoTextureCache {
var texture: CVMetalTexture?
//上传YUV图像中的Y分量到cache中
let status = CVMetalTextureCacheCreateTextureFromImage(nil, videoTextureCache, pix, nil, .r8Unorm, plane0Width, plane0Height, 0, &texture)
if status == kCVReturnSuccess {
//获取YUV图像中的Y分量的纹理对象
self.textureY = CVMetalTextureGetTexture(texture!)
}
}
//下面是上传YUV分量中的UV
if planeCount == 2 {
let plane1Width = CVPixelBufferGetWidthOfPlane(pix, 1)
let plane1Height = CVPixelBufferGetHeightOfPlane(pix, 1)
if let videoTextureCache = self.videoTextureCache {
var texture: CVMetalTexture?
let status = CVMetalTextureCacheCreateTextureFromImage(nil, videoTextureCache, pix, nil, .rg8Unorm, plane1Width, plane1Height, 1, &texture)
if status == kCVReturnSuccess {
self.textureUV = CVMetalTextureGetTexture(texture!) }
}
}
}
7、metalView的代理
//当视图改变时,会调用这个f方法,可以在这个方法里绘制图像的大小
func mtkView(_ view: MTKView, drawableSizeWillChange size: CGSize) {
self.videoPortSize = vector_float2.init(x: Float(size.width), y: Float(size.height))
}
//绘制图像
func draw(in view: MTKView) {
//设置命令buffer
let commendBuffer = self.commendQueue?.makeCommandBuffer()
commendBuffer?.label = "metal render commendBuffer"
//获取当前视图的渲染描述
let renderDescription = view.currentRenderPassDescriptor
if let renderDescription = renderDescription {
//清理视图的颜色
renderDescription.colorAttachments[0].clearColor =
MTLClearColorMake(0, 0, 0, 1.0)
//获取编码对象,
let encoder = commendBuffer?.makeRenderCommandEncoder(descriptor: renderDescription)
encoder?.label = "metal encoder"
//设置视口大小
encoder?.setViewport(MTLViewport.init(originX: 0, originY: 0, width: Double(self.videoPortSize.x), height: Double(self.videoPortSize.y), znear: -1, zfar: 1))
//关联渲染管道
if let pipelineStatus = pipelineStatus {
encoder?.setRenderPipelineState(pipelineStatus) }
//设置顶点数据给metal 给顶点函数 索引是0
encoder?.setVertexBuffer(self.vexterBuffer, offset: 0, index: 0)
//设置绘制图形的d大小 给顶点函数 索引是1
encoder?.setVertexBytes(&self.videoPortSize, length: MemoryLayout<vector_float2>.size, index: 1)
//设置特效类型给片元函数 索引是1
encoder?.setFragmentBytes(&type, length: MemoryLayout<Int>.size, index: 1)
//设置颜色转换矩阵给片元函数。索引是0
encoder?.setFragmentBuffer(self.converMatrix, offset: 0, index: 0)
//设置纹理YUV中的Y 索引是0
encoder?.setFragmentTexture(textureY, index: 0)
//设置纹理YUV中的UV分量 索引是1
encoder?.setFragmentTexture(textureUV, index: 1)
//设置lut图像
encoder?.setFragmentTexture(textureLut, index: 2)
//绘制图形
encoder?.drawPrimitives(type: .triangle, vertexStart: 0, vertexCount: 6)
//结束编码
encoder?.endEncoding()
//提交绘制
commendBuffer?.present(view.currentDrawable!)
}
//结束
commendBuffer?.commit()
}
8、metal shader
// 顶点结构体
typedef struct {
float4 position [[position]];
float2 textcoord;
} VexterData;
// 输入的顶点结构体
typedef struct {
float2 position;
float2 textcoord;
} VexterDataIn;
vertex VexterData videoVertexShader(uint vertexID [[vertex_id]], constant VexterDataIn *vertexData [[buffer(0)]], constant vector_float2 viewPortSizePointer [[buffer(1)]]) {
VexterData out;
//设置默认的顶点位置
out.position = vector_float4(0, 0, 0, 1.0);
//获取顶点坐标
float2 pixelSpacePosition = vertexData[vertexID].position.xy;
//获取实际的View的大小
vector_float2 viewPortSize = vector_float2(viewPortSizePointer);
//转换成metal的顶点 也就是归一化
out.position.xy = pixelSpacePosition / (viewPortSize / 2.0);
//设置纹理坐标
out.textcoord = vertexData[vertexID].textcoord;
//返回顶点数据
return out;
}
9、片元函数
fragment float4 videoFragmentShader(VexterData input [[stage_in]], texture2d<float> textureY [[texture(0)]], texture2d<float> textureUV [[texture(1)]], texture2d<float> lut [[texture(2)]], constant float3x3 *colorMatix [[buffer(0)]], constant int type [[buffer(1)]]) {
//初始化采样器
constexpr sampler textureSampler(mag_filter:: linear, min_filter:: linear);
//设置采样坐标 因为图像是上下颠倒的 所有采样的时候反过来采样
float2 textcoord = float2(input.textcoord.x, 1.0 - input.textcoord.y);
//获取yuv的值
float3 yuv = float3(textureY.sample(textureSampler, textcoord).r, textureUV.sample(textureSampler, textcoord).rg - float2(0.5, 0.5));
float4 rgba = float4(0.0, 0.0, 0.0, 1.0);
switch(type) {
case 0:{
//正常播放
rgba = normalColor(yuv, *colorMatix);
break;
}
case 1:{
//黑白播放
rgba = clearColor(yuv, *colorMatix);
break;
}
case 2:{
//九宫格播放
float2 textCoord = calNighTextCoord(input.textcoord);
textCoord = float2(textCoord.x, 1 - textCoord.y);
float3 yuv = float3(textureY.sample(textureSampler, textCoord).r, textureUV.sample(textureSampler, textCoord).rg - float2(0.5, 0.5));
float3 nRgb = transformYUVToRGB(yuv, *colorMatix);
rgba = float4(nRgb, 1.0);
break;
}
case 3: {
//滤镜播放
float3 nrgb = transformYUVToRGB(yuv, *colorMatix);
rgba = filterPlay(nrgb, lut);
break;
}
default:break;
}
return rgba;
}
//黑白播放
float4 clearColor(float3 yuv,float3x3 matrix) {
return float4(matrix * float3(yuv.x, 0, 0), 1.0);
}
//正常播放
float4 normalColor(float3 yuv, float3x3 matrix) {
return float4(matrix * yuv, 1.0);
}
// yuv转换RGB
float3 transformYUVToRGB(float3 yuv, float3x3 matrix) {
return matrix * yuv;
}
//计算九宫格采样点
float2 calNighTextCoord(float2 textCoord) {
float2 tcd = textCoord;
if (tcd.x < 1.0 / 3.0) {
tcd.x = tcd.x * 3.0;
} else if (tcd.x < 2.0 / 3.0) {
tcd.x = (tcd.x - 1.0 / 3.0) * 3.0;
} else {
tcd.x = (tcd.x - 2.0 / 3.0) * 3.0;
} if (tcd.y <= 1.0 / 3.0) {
tcd.y = tcd.y * 3.0;
} else if (tcd.y < 2.0 / 3.0) {
tcd.y = (tcd.y - 1.0 / 3.0) * 3.0;
} else {
tcd.y = (tcd.y - 2.0 / 3.0) * 3.0;
}
return tcd;
}
//滤镜播放
float4 filterPlay(float3 rgb, texture2d<float> lut) {
float blueColor = rgb.b * 63.0;
float2 quad1;
quad1.y = floor(floor(blueColor) / 8.0);
quad1.x = floor(blueColor) - (quad1.y * 8.0);
float2 quad2;
quad2.y = floor(ceil(blueColor) / 8.0);
quad2.x = ceil(blueColor) - (quad2.y * 8.0);
float2 texPos1;
texPos1.x = (quad1.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * rgb.r);
texPos1.y = (quad1.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * rgb.g);
float2 texPos2;
texPos2.x = (quad2.x * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * rgb.r);
texPos2.y = (quad2.y * 0.125) + 0.5/512.0 + ((0.125 - 1.0/512.0) * rgb.g);
constexpr sampler textureSampler(mag_filter:: linear, min_filter:: linear);
float4 newColor1 = lut.sample(textureSampler, texPos1);
float4 newColor2 = lut.sample(textureSampler, texPos1);
float4 newColor = mix(newColor1, newColor2, fract(blueColor));
float4 fragColor = mix(float4(rgb, 0.0), float4(newColor.rgb, 0.0), 1.0);
return fragColor;
}