gpt4 book ai didi

ios - ARKit:在 Xcode 中跟踪头部上下运动

转载 作者:行者123 更新时间:2023-12-04 13:32:19 25 4
gpt4 key购买 nike

我已经实现了以下代码,它允许我跟踪一个人的不同面部表情。但是,使用此代码,我无法在 Xcode 中使用 ARKit 跟踪人的头部上下运动。如果有人可以让我知道如何使用 Xcode 使用 ARKit 跟踪人的头部上下运动,我将不胜感激!

import UIKit
import SceneKit
import ARKit

class ViewController: UIViewController, ARSCNViewDelegate {

@IBOutlet var sceneView: ARSCNView!
@IBOutlet weak var faceLabel: UILabel!
@IBOutlet weak var labelView: UIView!
var analysis = ""

override func viewDidLoad() {
super.viewDidLoad()

labelView.layer.cornerRadius = 10

sceneView.delegate = self
sceneView.showsStatistics = true
guard ARFaceTrackingConfiguration.isSupported else {
fatalError("Face tracking is not supported on this device")
}
}

override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)

// Create a session configuration
let configuration = ARFaceTrackingConfiguration()

// Run the view's session
sceneView.session.run(configuration)
}

override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)

// Pause the view's session
sceneView.session.pause()
}

// MARK: - ARSCNViewDelegate
func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
let faceMesh = ARSCNFaceGeometry(device: sceneView.device!)
let node = SCNNode(geometry: faceMesh)
node.geometry?.firstMaterial?.fillMode = .lines
return node
}

func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
faceGeometry.update(from: faceAnchor.geometry)
expression(anchor: faceAnchor)

DispatchQueue.main.async {
self.faceLabel.text = self.analysis
}

}
}

func expression(anchor: ARFaceAnchor) {
let smileLeft = anchor.blendShapes[.mouthSmileLeft]
let smileRight = anchor.blendShapes[.mouthSmileRight]
let cheekPuff = anchor.blendShapes[.cheekPuff]
let tongue = anchor.blendShapes[.tongueOut]
self.analysis = ""

if ((smileLeft?.decimalValue ?? 0.0) + (smileRight?.decimalValue ?? 0.0)) > 0.9 {
self.analysis += "You are smiling. "
}

if cheekPuff?.decimalValue ?? 0.0 > 0.1 {
self.analysis += "Your cheeks are puffed. "
}

if tongue?.decimalValue ?? 0.0 > 0.1 {
self.analysis += "Don't stick your tongue out! "
}
}
}

最佳答案

实例属性 blendShapes是代表面部表情的 52 个系数的字典。点头不是必须检测的面部表情,所以实现点头的最快方法是控制ARFaceAnchor方向或节点的方向。

func renderer(_ renderer: SCNSceneRenderer,
didUpdate node: SCNNode,
for anchor: ARAnchor) {

if let faceAnchor = anchor as? ARFaceAnchor,
let faceGeo = node.geometry as? ARSCNFaceGeometry {

if faceAnchor.lookAtPoint.y <= 0 {
print("A head is...")
}

if node.orientation.x >= Float.pi/32 {
print("A head is...")
}
}
}

关于ios - ARKit:在 Xcode 中跟踪头部上下运动,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/64275941/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com