iOS-CoreImage实现透明视频

整体思路:通过CoreImage处理每一帧图像,通过AVVideoComposition合成视频。


image.png

一、自定义滤镜

1、CIFilter

要对滤镜进行子类化,您需要执行以下任务:

  1. 声明过滤器输入参数的属性。 您必须在每个输入参数名称前添加input,如inputImage。
  2. 如有必要,重写setDefaults方法。(在此示例中没有必要,因为输入参数是设置值。)
  3. 重写outputImage方法。
class AlphaFrameFilter: CIFilter {
    private(set) var inputImage: CIImage?
    private(set) var maskImage: CIImage?
    override var outputImage: CIImage? {
            let filter = CIFilter(name: "CIBlendWithMask")
            let outputExtent = inputImage.extent
            let backgroundImage = CIImage(color: CIColor(string: "0.0 0.0 0.0 0.0")).cropped(to: outputExtent)
            filter.setValue(backgroundImage, forKey: kCIInputBackgroundImageKey)
            filter.setValue(inputImage, forKey: kCIInputImageKey)
            filter.setValue(maskImage, forKey: kCIInputMaskImageKey)
            return filter.outputImage
    }
}

2、CIColorKernel

private static var colorKernel: CIColorKernel? = {
        return CIColorKernel(source: "\n"
            + "kernel vec4 alphaFrame(__sample s, __sample m) {"
            + "    return vec4( s.rgb, m.r );"
            + "}")
    }()
let outputExtent = inputImage.extent
            let arguments = [inputImage, maskImage]
            return colorKernel.apply(extent: outputExtent, arguments: arguments)

3、CIKernel

private static var metalKernel: CIKernel? = {
        do {
            return try CIKernel.kernelByName(functionName: "alphaFrame")
        } catch {
            metalKernelError = error as? AlphaFrameFilter.ErrorType
            return nil
        }
    }()
let outputExtent = inputImage.extent
            let roiCallback: CIKernelROICallback = { _, rect in rect }
            let arguments = [inputImage, maskImage]
            return metalKernel.apply(extent: outputExtent, roiCallback: roiCallback, arguments: arguments)

二、AVVideoComposition合成视频

let playerItem = createTransparentItem(url: itemUrl)
func createTransparentItem(url: NSURL) -> AVPlayerItem {
        let asset = AVAsset(url: url as URL)
        let playerItem = AVPlayerItem(asset: asset)
        playerItem.seekingWaitsForVideoCompositionRendering = true
        playerItem.videoComposition = createVideoComposition(for: asset)
        return playerItem
    }

    func createVideoComposition(for asset: AVAsset) -> AVVideoComposition {
        let filter = AlphaFrameFilter(renderingMode: .colorKernel)
        let composition = AVMutableVideoComposition(asset: asset, applyingCIFiltersWithHandler: { request in
            do {
                let (inputImage, maskImage) = request.sourceImage.verticalSplit()
                let outputImage = try filter.process(inputImage: inputImage, mask: maskImage)
                return request.finish(with: outputImage, context: nil)
            } catch {
                debugPrint("Video composition error")
                return request.finish(with: NSError(domain: "placeholder", code: 0, userInfo: nil))
            }
        })
        composition.renderSize = asset.videoSize.applying(CGAffineTransform(scaleX: 1.0, y: 0.5))
        return composition
    }
extension AVAsset {
    var videoSize: CGSize {
        let videoTracks = tracks(withMediaType: AVMediaType.video)
        return videoTracks.first?.naturalSize ?? .zero
    }
}
extension CIImage {
    typealias VerticalSplit = (topImage: CIImage, bottomImage: CIImage)
    func verticalSplit() -> VerticalSplit {
        let outputExtent = self.extent.applying(CGAffineTransform(scaleX: 1.0, y: 0.5))
        let translate = CGAffineTransform(translationX: 0, y: outputExtent.height)
        let topRegion = outputExtent.applying(translate)
        var topImage = self.cropped(to: topRegion)
        topImage = topImage.transformed(by: translate.inverted())
        let bottomRegion = outputExtent
        let bottomImage = self.cropped(to: bottomRegion)
        return (topImage, bottomImage)
    }
}
©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容