- Java 双重比较
- java - 比较器与 Apache BeanComparator
- Objective-C 完成 block 导致额外的方法调用?
- database - RESTful URI 是否应该公开数据库主键?
Apple ARKitVision 示例在 ViewController.swift 文件中有以下声明:
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
但是,如果我复制相同的 View 和源文件并将它们合并到另一个测试 Storyboard/项目中,我会收到错误消息“实例成员‘children’不能用于‘StatusViewController’类型”
那么,为什么这在 ARKitVision 示例上有效,但如果我自己从头开始设置它却不起作用? ARKitVision 示例还做了什么来让它工作?谢谢😊
StatusViewController 的完整类定义是:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Utility class for showing messages above the AR view.
*/
import Foundation
import ARKit
/**
Displayed at the top of the main interface of the app that allows users to see
the status of the AR experience, as well as the ability to control restarting
the experience altogether.
- Tag: StatusViewController
*/
class StatusViewController: UIViewController {
// MARK: - Types
enum MessageType {
case trackingStateEscalation
case planeEstimation
case contentPlacement
case focusSquare
static var all: [MessageType] = [
.trackingStateEscalation,
.planeEstimation,
.contentPlacement,
.focusSquare
]
}
// MARK: - IBOutlets
@IBOutlet weak private var messagePanel: UIVisualEffectView!
@IBOutlet weak private var messageLabel: UILabel!
@IBOutlet weak private var restartExperienceButton: UIButton!
// MARK: - Properties
/// Trigerred when the "Restart Experience" button is tapped.
var restartExperienceHandler: () -> Void = {}
/// Seconds before the timer message should fade out. Adjust if the app needs longer transient messages.
private let displayDuration: TimeInterval = 6
// Timer for hiding messages.
private var messageHideTimer: Timer?
private var timers: [MessageType: Timer] = [:]
// MARK: - Message Handling
func showMessage(_ text: String, autoHide: Bool = true) {
// Cancel any previous hide timer.
messageHideTimer?.invalidate()
messageLabel.text = text
// Make sure status is showing.
setMessageHidden(false, animated: true)
if autoHide {
messageHideTimer = Timer.scheduledTimer(withTimeInterval: displayDuration, repeats: false, block: { [weak self] _ in
self?.setMessageHidden(true, animated: true)
})
}
}
func scheduleMessage(_ text: String, inSeconds seconds: TimeInterval, messageType: MessageType) {
cancelScheduledMessage(for: messageType)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [weak self] timer in
self?.showMessage(text)
timer.invalidate()
})
timers[messageType] = timer
}
func cancelScheduledMessage(`for` messageType: MessageType) {
timers[messageType]?.invalidate()
timers[messageType] = nil
}
func cancelAllScheduledMessages() {
for messageType in MessageType.all {
cancelScheduledMessage(for: messageType)
}
}
// MARK: - ARKit
func showTrackingQualityInfo(for trackingState: ARCamera.TrackingState, autoHide: Bool) {
showMessage(trackingState.presentationString, autoHide: autoHide)
}
func escalateFeedback(for trackingState: ARCamera.TrackingState, inSeconds seconds: TimeInterval) {
cancelScheduledMessage(for: .trackingStateEscalation)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [unowned self] _ in
self.cancelScheduledMessage(for: .trackingStateEscalation)
var message = trackingState.presentationString
if let recommendation = trackingState.recommendation {
message.append(": \(recommendation)")
}
self.showMessage(message, autoHide: false)
})
timers[.trackingStateEscalation] = timer
}
// MARK: - IBActions
@IBAction private func restartExperience(_ sender: UIButton) {
restartExperienceHandler()
}
// MARK: - Panel Visibility
private func setMessageHidden(_ hide: Bool, animated: Bool) {
// The panel starts out hidden, so show it before animating opacity.
messagePanel.isHidden = false
guard animated else {
messagePanel.alpha = hide ? 0 : 1
return
}
UIView.animate(withDuration: 0.2, delay: 0, options: [.beginFromCurrentState], animations: {
self.messagePanel.alpha = hide ? 0 : 1
}, completion: nil)
}
}
extension ARCamera.TrackingState {
var presentationString: String {
switch self {
case .notAvailable:
return "TRACKING UNAVAILABLE"
case .normal:
return "TRACKING NORMAL"
case .limited(.excessiveMotion):
return "TRACKING LIMITED\nExcessive motion"
case .limited(.insufficientFeatures):
return "TRACKING LIMITED\nLow detail"
case .limited(.initializing):
return "Initializing"
case .limited(.relocalizing):
return "Recovering from interruption"
}
}
var recommendation: String? {
switch self {
case .limited(.excessiveMotion):
return "Try slowing down your movement, or reset the session."
case .limited(.insufficientFeatures):
return "Try pointing at a flat surface, or reset the session."
case .limited(.relocalizing):
return "Return to the location where you left off or try resetting the session."
default:
return nil
}
}
}
ViewController类的定义是:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Main view controller for the ARKitVision sample.
*/
import UIKit
import SpriteKit
import ARKit
import Vision
class ViewController: UIViewController, UIGestureRecognizerDelegate, ARSKViewDelegate, ARSessionDelegate {
@IBOutlet weak var sceneView: ARSKView!
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
// MARK: - View controller lifecycle
override func viewDidLoad() {
super.viewDidLoad()
// Configure and present the SpriteKit scene that draws overlay content.
let overlayScene = SKScene()
overlayScene.scaleMode = .aspectFill
sceneView.delegate = self
sceneView.presentScene(overlayScene)
sceneView.session.delegate = self
// Hook up status view controller callback.
statusViewController.restartExperienceHandler = { [unowned self] in
self.restartSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
// MARK: - ARSessionDelegate
// Pass camera frames received from ARKit to Vision (when not already processing one)
/// - Tag: ConsumeARFrames
func session(_ session: ARSession, didUpdate frame: ARFrame) {
// Do not enqueue other buffers for processing while another Vision task is still running.
// The camera stream has only a finite amount of buffers available; holding too many buffers for analysis would starve the camera.
guard currentBuffer == nil, case .normal = frame.camera.trackingState else {
return
}
// Retain the image buffer for Vision processing.
self.currentBuffer = frame.capturedImage
classifyCurrentImage()
}
// MARK: - Vision classification
// Vision classification request and model
/// - Tag: ClassificationRequest
private lazy var classificationRequest: VNCoreMLRequest = {
do {
// Instantiate the model from its generated Swift class.
let model = try VNCoreMLModel(for: Inceptionv3().model)
let request = VNCoreMLRequest(model: model, completionHandler: { [weak self] request, error in
self?.processClassifications(for: request, error: error)
})
// Crop input images to square area at center, matching the way the ML model was trained.
request.imageCropAndScaleOption = .centerCrop
// Use CPU for Vision processing to ensure that there are adequate GPU resources for rendering.
request.usesCPUOnly = true
return request
} catch {
fatalError("Failed to load Vision ML model: \(error)")
}
}()
// The pixel buffer being held for analysis; used to serialize Vision requests.
private var currentBuffer: CVPixelBuffer?
// Queue for dispatching vision classification requests
private let visionQueue = DispatchQueue(label: "com.example.apple-samplecode.ARKitVision.serialVisionQueue")
// Run the Vision+ML classifier on the current image buffer.
/// - Tag: ClassifyCurrentImage
private func classifyCurrentImage() {
// Most computer vision tasks are not rotation agnostic so it is important to pass in the orientation of the image with respect to device.
let orientation = CGImagePropertyOrientation(UIDevice.current.orientation)
let requestHandler = VNImageRequestHandler(cvPixelBuffer: currentBuffer!, orientation: orientation)
visionQueue.async {
do {
// Release the pixel buffer when done, allowing the next buffer to be processed.
defer { self.currentBuffer = nil }
try requestHandler.perform([self.classificationRequest])
} catch {
print("Error: Vision request failed with error \"\(error)\"")
}
}
}
// Classification results
private var identifierString = ""
private var confidence: VNConfidence = 0.0
// Handle completion of the Vision request and choose results to display.
/// - Tag: ProcessClassifications
func processClassifications(for request: VNRequest, error: Error?) {
guard let results = request.results else {
print("Unable to classify image.\n\(error!.localizedDescription)")
return
}
// The `results` will always be `VNClassificationObservation`s, as specified by the Core ML model in this project.
let classifications = results as! [VNClassificationObservation]
// Show a label for the highest-confidence result (but only above a minimum confidence threshold).
if let bestResult = classifications.first(where: { result in result.confidence > 0.5 }),
let label = bestResult.identifier.split(separator: ",").first {
identifierString = String(label)
confidence = bestResult.confidence
} else {
identifierString = ""
confidence = 0
}
DispatchQueue.main.async { [weak self] in
self?.displayClassifierResults()
}
}
// Show the classification results in the UI.
private func displayClassifierResults() {
guard !self.identifierString.isEmpty else {
return // No object was classified.
}
let message = String(format: "Detected \(self.identifierString) with %.2f", self.confidence * 100) + "% confidence"
statusViewController.showMessage(message)
}
// MARK: - Tap gesture handler & ARSKViewDelegate
// Labels for classified objects by ARAnchor UUID
private var anchorLabels = [UUID: String]()
// When the user taps, add an anchor associated with the current classification result.
/// - Tag: PlaceLabelAtLocation
@IBAction func placeLabelAtLocation(sender: UITapGestureRecognizer) {
let hitLocationInView = sender.location(in: sceneView)
let hitTestResults = sceneView.hitTest(hitLocationInView, types: [.featurePoint, .estimatedHorizontalPlane])
if let result = hitTestResults.first {
// Add a new anchor at the tap location.
let anchor = ARAnchor(transform: result.worldTransform)
sceneView.session.add(anchor: anchor)
// Track anchor ID to associate text with the anchor after ARKit creates a corresponding SKNode.
anchorLabels[anchor.identifier] = identifierString
}
}
// When an anchor is added, provide a SpriteKit node for it and set its text to the classification label.
/// - Tag: UpdateARContent
func view(_ view: ARSKView, didAdd node: SKNode, for anchor: ARAnchor) {
guard let labelText = anchorLabels[anchor.identifier] else {
fatalError("missing expected associated label for anchor")
}
let label = TemplateLabelNode(text: labelText)
node.addChild(label)
}
// MARK: - AR Session Handling
func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
statusViewController.showTrackingQualityInfo(for: camera.trackingState, autoHide: true)
switch camera.trackingState {
case .notAvailable, .limited:
statusViewController.escalateFeedback(for: camera.trackingState, inSeconds: 3.0)
case .normal:
statusViewController.cancelScheduledMessage(for: .trackingStateEscalation)
// Unhide content after successful relocalization.
setOverlaysHidden(false)
}
}
func session(_ session: ARSession, didFailWithError error: Error) {
guard error is ARError else { return }
let errorWithInfo = error as NSError
let messages = [
errorWithInfo.localizedDescription,
errorWithInfo.localizedFailureReason,
errorWithInfo.localizedRecoverySuggestion
]
// Filter out optional error messages.
let errorMessage = messages.compactMap({ $0 }).joined(separator: "\n")
DispatchQueue.main.async {
self.displayErrorMessage(title: "The AR session failed.", message: errorMessage)
}
}
func sessionWasInterrupted(_ session: ARSession) {
setOverlaysHidden(true)
}
func sessionShouldAttemptRelocalization(_ session: ARSession) -> Bool {
/*
Allow the session to attempt to resume after an interruption.
This process may not succeed, so the app must be prepared
to reset the session if the relocalizing status continues
for a long time -- see `escalateFeedback` in `StatusViewController`.
*/
return true
}
private func setOverlaysHidden(_ shouldHide: Bool) {
sceneView.scene!.children.forEach { node in
if shouldHide {
// Hide overlay content immediately during relocalization.
node.alpha = 0
} else {
// Fade overlay content in after relocalization succeeds.
node.run(.fadeIn(withDuration: 0.5))
}
}
}
private func restartSession() {
statusViewController.cancelAllScheduledMessages()
statusViewController.showMessage("RESTARTING SESSION")
anchorLabels = [UUID: String]()
let configuration = ARWorldTrackingConfiguration()
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
// MARK: - Error handling
private func displayErrorMessage(title: String, message: String) {
// Present an alert informing about the error that has occurred.
let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
let restartAction = UIAlertAction(title: "Restart Session", style: .default) { _ in
alertController.dismiss(animated: true, completion: nil)
self.restartSession()
}
alertController.addAction(restartAction)
present(alertController, animated: true, completion: nil)
}
}
最佳答案
这表明您的类 StatusViewController
没有继承自 UIViewController,因为 children
的属性已经可用于 UIViewController 的子类已有一段时间了.
您能分享一下您是如何组成 StatusViewController 的吗?
关于ios - Swift:实例成员不能用于 ARKitVision 示例中的类型,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/50714737/
IO 设备如何知道属于它的内存中的值在memory mapped IO 中发生了变化? ? 例如,假设内存地址 0 专用于保存 VGA 设备的背景颜色。当我们更改 memory[0] 中的值时,VGA
我目前正在开发一个使用Facebook sdk登录(通过FBLoginView)的iOS应用。 一切正常,除了那些拥有较旧版本的facebook的人。 当他们按下“使用Facebook登录”按钮时,他
假设我有: this - is an - example - with some - dashesNSRange将使用`rangeOfString:@“-”拾取“-”的第一个实例,但是如果我只想要最后
Card.io SDK提供以下详细信息: 卡号,有效期,月份,年份,CVV和邮政编码。 如何从此SDK获取国家名称。 - (void)userDidProvideCreditCardInfo:(Car
iOS 应用程序如何从网络服务下载图片并在安装过程中将它们安装到用户的 iOS 设备上?可能吗? 最佳答案 您无法控制应用在用户设备上的安装,因此无法在安装过程中下载其他数据。 只需在安装后首次启动应
我曾经开发过一款企业版 iOS 产品,我们公司曾将其出售给大型企业,供他们的员工使用。 该应用程序通过 AppStore 提供,企业用户获得了公司特定的配置文件(包含应用程序配置文件)以启用他们有权使
我正在尝试将 Card.io SDK 集成到我的 iOS 应用程序中。我想为 CardIO ui 做一个简单的本地化,如更改取消按钮标题或“在此保留信用卡”提示文本。 我在 github 上找到了这个
我正在使用 CardIOView 和 CardIOViewDelegate 类,没有可以设置为 YES 的 BOOL 来扫描 collectCardholderName。我可以看到它在 CardIOP
我有一个集成了通话工具包的 voip 应用程序。每次我从我的 voip 应用程序调用时,都会在 native 电话应用程序中创建一个新的最近通话记录。我在 voip 应用程序中也有自定义联系人(电话应
iOS 应用程序如何知道应用程序打开时屏幕上是否已经有键盘?应用程序运行后,它可以接收键盘显示/隐藏通知。但是,如果应用程序在分屏模式下作为辅助应用程序打开,而主应用程序已经显示键盘,则辅助应用程序不
我在模拟器中收到以下错误: ImageIO: CGImageReadSessionGetCachedImageBlockData *** CGImageReadSessionGetCachedIm
如 Apple 文档所示,可以通过 EAAccessory Framework 与经过认证的配件(由 Apple 认证)进行通信。但是我有点困惑,因为一些帖子告诉我它也可以通过 CoreBluetoo
尽管现在的调试器已经很不错了,但有时找出应用程序中正在发生的事情的最好方法仍然是古老的 NSLog。当您连接到计算机时,这样做很容易; Xcode 会帮助弹出日志查看器面板,然后就可以了。当您不在办公
在我的 iOS 应用程序中,我定义了一些兴趣点。其中一些有一个 Kontakt.io 信标的名称,它绑定(bind)到一个特定的 PoI(我的意思是通常贴在信标标签上的名称)。现在我想在附近发现信标,
我正在为警报提示创建一个 trigger.io 插件。尝试从警报提示返回数据。这是我的代码: // Prompt + (void)show_prompt:(ForgeTask*)task{
您好,我是 Apple iOS 的新手。我阅读并搜索了很多关于推送通知的文章,但我没有发现任何关于 APNS 从 io4 到 ios 6 的新更新的信息。任何人都可以向我提供 APNS 如何在 ios
UITabBar 的高度似乎在 iOS 7 和 8/9/10/11 之间发生了变化。我发布这个问题是为了让其他人轻松找到答案。 那么:在 iPhone 和 iPad 上的 iOS 8/9/10/11
我想我可以针对不同的 iOS 版本使用不同的 Storyboard。 由于 UI 的差异,我将创建下一个 Storyboard: Main_iPhone.storyboard Main_iPad.st
我正在写一些东西,我将使用设备的 iTunes 库中的一部分音轨来覆盖 2 个视频的组合,例如: AVMutableComposition* mixComposition = [[AVMutableC
我创建了一个简单的 iOS 程序,可以顺利编译并在 iPad 模拟器上运行良好。当我告诉 XCode 4 使用我连接的 iPad 设备时,无法编译相同的程序。问题似乎是当我尝试使用附加的 iPad 时
我是一名优秀的程序员,十分优秀!