原文地址: https://blog.lm1024.club/archives/202012021932
一个抽取视频中所有帧的工具类,方便以后使用
import Foundation
import AVFoundation
public class WJVideoReader:NSObject {
private let reader:AVAssetReader?
private var videoTrackOutput:AVAssetReaderTrackOutput?
private var isReading:Bool = false
public var delegate: WJVideoSourceOutputDelegate?
public var pushThread:Thread?
let pushVideoQueue = DispatchQueue(label: "com.push.video.queue",qos: DispatchQoS.default)
init(path:String) {
let assert = AVURLAsset(url: URL(fileURLWithPath: path))
self.reader = try? AVAssetReader.init(asset: assert)
let videoTrack = assert.tracks(withMediaType: .video).first
let outputSettings:[String:Any] = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB)]
self.videoTrackOutput = AVAssetReaderTrackOutput(track: videoTrack!, outputSettings: outputSettings)
self.videoTrackOutput!.alwaysCopiesSampleData = false
if self.reader!.canAdd(self.videoTrackOutput!) {
self.reader?.add(self.videoTrackOutput!)
}
super.init()
self.pushThread = Thread(target: self, selector: #selector(lanchRunloop), object: nil)
self.pushThread!.start()
}
public func startReader()
{
if self.reader!.status != AVAssetReader.Status.reading {
self.goReader()
}
}
public func stopReader(){
if self.reader!.status == AVAssetReader.Status.reading {
self.reader!.cancelReading()
}
}
private func goReader(){
let status = self.reader?.status
if status != AVAssetReader.Status.reading {
self.reader?.startReading()
}
var lastTime:CMTime = CMTime.zero;
DispatchQueue.global().async { [weak self] in
guard let self__ = self else { return }
while self__.reader!.status == AVAssetReader.Status.reading {
if let sampleBuffer = self__.videoTrackOutput?.copyNextSampleBuffer() {
self__.perform(#selector(self__.pushSmaple(smaple:)), on: self__.pushThread!, with: sampleBuffer, waitUntilDone: false)
let bufferDuration = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
var pauseTime = CMTimeSubtract(bufferDuration, lastTime)
if lastTime == .zero {
pauseTime = CMTime(value: 4, timescale: 100)
}
lastTime = bufferDuration
print(" pauseTime = \(CMTimeGetSeconds(pauseTime))")
Thread.sleep(forTimeInterval: CMTimeGetSeconds(pauseTime))
}
}
}
}
@objc internal func lanchRunloop() {
autoreleasepool {
print("lanchRunloop")
let currentThread: Thread = Thread.current
currentThread.name = "com.wuji.video.push"
let currentRunLoop: RunLoop = RunLoop.current
currentRunLoop.add(NSMachPort(), forMode: .common)
currentRunLoop.run()
}
}
@objc private func pushSmaple(smaple:CMSampleBuffer) {
if let outputDelegate = self.delegate {
outputDelegate.output(sampleBuffer: smaple)
}
}
}