线程爆炸
多线程编程里,由于GCD的便利使用,很容易就在queue里dispatch一个block,很方便。当有大量的block提交到并行队列里的时候,并行队列可能会创造许多线程,有些任务内部会有锁,会导致线程休眠、阻塞;这可能会使并行队列创造更多的线程去执行其他任务,大量的线程会抢占CPU的资源。此时大量的时间花费在切换线程中,简称线程爆炸。
解决方案
很简单 限制并发数目
即可
具体方案有以下两种
- 创建多个serial queue 代替concurrent queue
查看了YYDispatchQueuePool的实现方式。使用多个串行队列来代替GCD的并行队列来。需要添加新的任务时,会从创建过的串行队列里通过轮询的方法获取串行队列来添加任务。
下面写了一个swift简单版
public final class AtomicInteger {
private let lock = DispatchSemaphore(value: 1)
private var _value: Int
public init(value initialValue: Int = 0) {
_value = initialValue
}
public var value: Int {
get {
lock.wait()
defer { lock.signal() }
return _value
}
set {
lock.wait()
defer { lock.signal() }
_value = newValue
}
}
public func decrementAndGet() -> Int {
lock.wait()
defer { lock.signal() }
_value -= 1
return _value
}
public func incrementAndGet() -> Int {
lock.wait()
defer { lock.signal() }
_value += 1
return _value
}
}
public class DispatchQueuePool {
private static var maxQueueCount = 32
private var counter: AtomicInteger = AtomicInteger(value: 0)
private var queueCount: Int = 0
private var serialQueuePool: [DispatchQueue] = []
private func createSerialQueue(label: String, qos: DispatchQoS) {
for _ in 0..<queueCount {
let queue = DispatchQueue(label: label, qos: qos)
serialQueuePool.append(queue)
}
}
init(_ label: String, qos: DispatchQoS, queueCount: Int = 0) {
if queueCount > DispatchQueuePool.maxQueueCount {
fatalError("queueCount 不合法")
}
self.queueCount = queueCount > 0 ? queueCount : ProcessInfo.processInfo.activeProcessorCount
createSerialQueue(label: label, qos: qos)
}
convenience init(_ label: String) {
self.init(label, qos: .default)
}
/// counter需要加锁 可能在不同的线程调用
var queue: DispatchQueue {
return serialQueuePool[counter.incrementAndGet() / queueCount]
}
}
- 使用信号量控制个数
这里提供两种API, 一种需要在work里回调leave(),方便work里嵌套异步回掉. 一种是同步执行 即work() 执行完毕后即是否信号量
class LimitedDiapatchQueue {
/// 信号量控制个数
private var lock: DispatchSemaphore
/// 实际执行队列
private var concurrentQueue: DispatchQueue
/// 需要一个串行队列来派发和控制个数,以免阻塞当前线程
private var dispatchQueue: DispatchQueue
init(_ label: String, qos: DispatchQoS = .default, limitCount: Int = 5) {
concurrentQueue = DispatchQueue(label: label, qos: qos, attributes: .concurrent)
lock = DispatchSemaphore(value: limitCount)
dispatchQueue = DispatchQueue(label: label)
}
typealias LimitedLeave = () -> Void
/**
concurrent queue will limit the thread count.
Calling this method will asyc dispatch a block in the current queue.
- Parameters:
- work: The block to execute.
- Precondition: the blcok paramater in `work` must be called.
*/
public func async(execute work: @escaping (LimitedLeave) -> Void) {
dispatchQueue.async { [weak self] in
self?.lock.wait()
self?.concurrentQueue.async {
work({
self?.lock.signal()
})
}
}
}
/**
concurrent queue will limit the thread count.
Calling this method will syc dispatch a block in the current queue.
this method maybe blcok the reveiver thread
- Parameters:
- work: The block to execute.
- Precondition: the blcok paramater in `work` must be called.
*/
public func sync(execute work: @escaping (LimitedLeave) -> Void) {
dispatchQueue.sync { [weak self] in
self?.lock.wait()
self?.concurrentQueue.sync {
work({
self?.lock.signal()
})
}
}
}
/**
concurrent queue will limit the thread count.
Calling this method will asyc dispatch a block in the current queue.
- Parameters:
- work: The block to execute.
- Precondition: the blcok paramater in `work` must be called.
*/
public func async(execute work: @escaping () -> Void) {
dispatchQueue.async { [weak self] in
self?.lock.wait()
self?.concurrentQueue.async {
work()
self?.lock.signal()
}
}
}
/**
concurrent queue will limit the thread count.
Calling this method will syc dispatch a block in the current queue.
this method maybe blcok the reveiver thread
- Parameters:
- work: The block to execute.
*/
public func sync(execute work: @escaping () -> Void) {
dispatchQueue.sync { [weak self] in
self?.lock.wait()
self?.concurrentQueue.sync {
work()
self?.lock.signal()
}
}
}
}