ARKit框架详细解析(九)—— 使用AR Face Tracking和TrueDepth相机进行面部跟踪源码(三)

版本记录

版本号 时间
V1.0 2018.08.22

前言

苹果最近新出的一个API就是ARKit,是在2017年6月6日,苹果发布iOS11系统所新增框架,它能够帮助我们以最简单快捷的方式实现AR技术功能。接下来几篇我们就详细的对ARKit框架进行详细的解析。感兴趣的可以看上面几篇。
1. ARKit框架详细解析(一)—— 基本概览
2. ARKit框架详细解析(二)—— 关于增强现实和ARKit
3. ARKit框架详细解析(三)—— 开启你的第一个AR体验之旅
4. ARKit框架详细解析(四)—— 处理增强现实中的3D交互和UI控件
5. ARKit框架详细解析(五)—— 创建基于面部的AR体验
6. ARKit框架详细解析(六)—— 用Metal展示AR体验
7. ARKit框架详细解析(七)—— 使用AR Face Tracking和TrueDepth相机进行面部跟踪(一)
8. ARKit框架详细解析(八)—— 使用AR Face Tracking和TrueDepth相机进行面部跟踪(二)

源码

1. EmojiBlingViewController.swift
import UIKit
import ARKit

class EmojiBlingViewController: UIViewController {

  @IBOutlet var sceneView: ARSCNView!
  let noseOptions = ["👃", "🐽", "💧", " "]
  let eyeOptions = ["👁", "🌕", "🌟", "🔥", "⚽️", "🔎", " "]
  let mouthOptions = ["👄", "👅", "❤️", " "]
  let hatOptions = ["🎓", "🎩", "🧢", "⛑", "👒", " "]
  let features = ["nose", "leftEye", "rightEye", "mouth", "hat"]
  let featureIndices = [[9], [1064], [42], [24, 25], [20]]
  
  override func viewDidLoad() {
    super.viewDidLoad()
    
    guard ARFaceTrackingConfiguration.isSupported else { fatalError() }
    sceneView.delegate = self
    
  }
  override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
    
    let configuration = ARFaceTrackingConfiguration()
    
    sceneView.session.run(configuration)
  }
  
  override func viewWillDisappear(_ animated: Bool) {
    super.viewWillDisappear(animated)
    
    sceneView.session.pause()
  }
  
  func updateFeatures(for node: SCNNode, using anchor: ARFaceAnchor) {
    for (feature, indices) in zip(features, featureIndices) {
      let child = node.childNode(withName: feature, recursively: false) as? EmojiNode
      let vertices = indices.map { anchor.geometry.vertices[$0] }
      child?.updatePosition(for: vertices)
      
      switch feature {
      case "leftEye":
        let scaleX = child?.scale.x ?? 1.0
        let eyeBlinkValue = anchor.blendShapes[.eyeBlinkLeft]?.floatValue ?? 0.0
        child?.scale = SCNVector3(scaleX, 1.0 - eyeBlinkValue, 1.0)
      case "rightEye":
        let scaleX = child?.scale.x ?? 1.0
        let eyeBlinkValue = anchor.blendShapes[.eyeBlinkRight]?.floatValue ?? 0.0
        child?.scale = SCNVector3(scaleX, 1.0 - eyeBlinkValue, 1.0)
      case "mouth":
        let jawOpenValue = anchor.blendShapes[.jawOpen]?.floatValue ?? 0.2
        child?.scale = SCNVector3(1.0, 0.8 + jawOpenValue, 1.0)
      default:
        break
      }
    }
  }
  
  @IBAction func handleTap(_ sender: UITapGestureRecognizer) {
    let location = sender.location(in: sceneView)
    let results = sceneView.hitTest(location, options: nil)
    if let result = results.first,
      let node = result.node as? EmojiNode {
      node.next()
    }
  }
}

extension EmojiBlingViewController: ARSCNViewDelegate {
  
  func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
    guard let faceAnchor = anchor as? ARFaceAnchor,
          let device = sceneView.device else { return nil }
    let faceGeometry = ARSCNFaceGeometry(device: device)
    let node = SCNNode(geometry: faceGeometry)
    node.geometry?.firstMaterial?.fillMode = .lines
    
    node.geometry?.firstMaterial?.transparency = 0.0
    let noseNode = EmojiNode(with: noseOptions)
    noseNode.name = "nose"
    node.addChildNode(noseNode)
    
    let leftEyeNode = EmojiNode(with: eyeOptions)
    leftEyeNode.name = "leftEye"
    leftEyeNode.rotation = SCNVector4(0, 1, 0, GLKMathDegreesToRadians(180.0))
    node.addChildNode(leftEyeNode)
    
    let rightEyeNode = EmojiNode(with: eyeOptions)
    rightEyeNode.name = "rightEye"
    node.addChildNode(rightEyeNode)
    
    let mouthNode = EmojiNode(with: mouthOptions)
    mouthNode.name = "mouth"
    node.addChildNode(mouthNode)
    
    let hatNode = EmojiNode(with: hatOptions)
    hatNode.name = "hat"
    node.addChildNode(hatNode)
    
    updateFeatures(for: node, using: faceAnchor)
    return node
  }
  
  func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
    guard let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry else { return }
    
    faceGeometry.update(from: faceAnchor.geometry)
    updateFeatures(for: node, using: faceAnchor)
  }
}
2. EmojiNode.swift
import SceneKit

class EmojiNode: SCNNode {
  
  var options: [String]
  var index = 0
  
  init(with options: [String], width: CGFloat = 0.06, height: CGFloat = 0.06) {
    self.options = options
    
    super.init()
    
    let plane = SCNPlane(width: width, height: height)
    plane.firstMaterial?.diffuse.contents = (options.first ?? " ").image()
    plane.firstMaterial?.isDoubleSided = true
    
    geometry = plane
  }
  
  required init?(coder aDecoder: NSCoder) {
    fatalError("init(coder:) has not been implemented")
  }
}

// MARK: - Custom functions

extension EmojiNode {
  
  func updatePosition(for vectors: [vector_float3]) {
    let newPos = vectors.reduce(vector_float3(), +) / Float(vectors.count)
    position = SCNVector3(newPos)
  }
  
  func next() {
    index = (index + 1) % options.count
    
    if let plane = geometry as? SCNPlane {
      plane.firstMaterial?.diffuse.contents = options[index].image()
      plane.firstMaterial?.isDoubleSided = true
    }
  }
}
3. StringExtension.swift
import UIKit

extension String {
  
  func image() -> UIImage? {
    
    let size = CGSize(width: 20, height: 22)
    
    UIGraphicsBeginImageContextWithOptions(size, false, 0)
    UIColor.clear.set()
    
    let rect = CGRect(origin: .zero, size: size)
    UIRectFill(CGRect(origin: .zero, size: size))
    
    (self as AnyObject).draw(in: rect, withAttributes: [.font: UIFont.systemFont(ofSize: 15)])
    
    let image = UIGraphicsGetImageFromCurrentImageContext()
    
    UIGraphicsEndImageContext()
    
    return image
  }
}

后记

本篇主要讲述了使用AR Face Tracking和TrueDepth相机进行面部跟踪源码,感兴趣的给个赞或者关注~~~

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容