最近学习Metal在想,想着使用Metal渲染视频。于是有了想法
1. 使用AVAssetReader读取数据
在GPUImage3的Inputs/MovieInput.swift
里面,使用AVAssetReader
读取CMSampleBuffer
,然后渲染到renderView
上
大致流程就是:
大致代码我这里有写到: 点击查看
2.AVPlayerItemVideoOutput读取
上面读取渲染的时候是没有声音播放出来,然后我就想如果我想渲染的时候能有声音一起播放的感觉那要怎么做,翻阅资料,我找到了这个AVPlayerItemVideoOutput。
在AVPlayerItemVideoOutput有一个方法 它可以检索适合在指定项目时间显示的图像,并将该图像标记为已采集。
于是就有了想法,使用播放器播放声音,画面通过Metal渲染。
废话不多说,直接上代码:
class XTVideoMovie: NSObject, AVPlayerItemOutputPullDelegate {
weak var delegate: XTVideoMovieDelegate?
var aqPlayer: AVQueuePlayer?
var displayLink: CADisplayLink?
var playerItems: [AVPlayerItem] = []
var outputs: [AVPlayerItemVideoOutput] = []
var playIndex = 0
deinit {
NotificationCenter.default.removeObserver(self)
}
init(items: [AVPlayerItem]) {
super.init()
initDisplayLink()
setupItems(items: items)
}
func initDisplayLink() {
displayLink = CADisplayLink(target: self, selector: #selector(displayLinkCallBack(_:)))
displayLink?.add(to: .current, forMode: .common)
displayLink?.isPaused = true
}
@objc func displayLinkCallBack(_ displayLink: CADisplayLink) {
processPixelBuffer(at: aqPlayer?.currentItem?.currentTime())
}
func setupItems(items: [AVPlayerItem]) {
playerItems = items
for item in items {
NotificationCenter.default.addObserver(self, selector: #selector(playEnd(notification:)), name: .AVPlayerItemDidPlayToEndTime, object: item)
let outputSetting: [String : Any] = [kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]
let output = AVPlayerItemVideoOutput(outputSettings: outputSetting)
output.setDelegate(self, queue: DispatchQueue.main)
item.add(output)
outputs.append(output)
}
aqPlayer = AVQueuePlayer(items: items)
}
func play() {
if playerItems.count > 0 {
aqPlayer?.play()
displayLink?.isPaused = false
}
}
func pause() {
if aqPlayer?.rate != 0 {
aqPlayer?.pause()
displayLink?.isPaused = true
}
}
func reset() {
playIndex = 0
pause()
aqPlayer?.seek(to: .zero)
play()
}
func processPixelBuffer(at time: CMTime?) {
guard let outputTime = time else {
return
}
guard outputs[playIndex].hasNewPixelBuffer(forItemTime: outputTime) else {
return
}
/// 当前时间
var currentTime = outputTime
for i in 0..<playIndex {
currentTime = CMTimeAdd(currentTime, playerItems[i].asset.duration)
}
/// 获取新的pixelBuffer
guard let pixelBuffer = outputs[playIndex].copyPixelBuffer(forItemTime: outputTime, itemTimeForDisplay: nil) else {
return
}
/// 回调
delegate?.perpare(at: pixelBuffer)
delegate?.perpare(at: currentTime)
}
@objc func playEnd(notification: Notification) {
playIndex += 1
if playIndex >= playerItems.count {
print("播放结束")
displayLink?.isPaused = true
}
}
// MARK: - AVPlayerItemOutputPullDelegate
func outputMediaDataWillChange(_ Ysender: AVPlayerItemOutput) {
guard displayLink?.isPaused ?? false else {
return
}
displayLink?.isPaused = false
}
func outputSequenceWasFlushed(_ output: AVPlayerItemOutput) {
}
}
这样就实现了有声音的视频播放,我们也可以把这个遵循GPUImage3里面的ImageSource
协议,作为数据源输出数据,就可以对视频帧的进行操作。还能通过时间判断,对某一范围的视频帧做不同的滤镜操作。
具体代码详见Metal-10(视频播放)里面。
热爱生活,记录生活!