- Java 双重比较
- java - 比较器与 Apache BeanComparator
- Objective-C 完成 block 导致额外的方法调用?
- database - RESTful URI 是否应该公开数据库主键?
Apple ARKitVision 示例在 ViewController.swift 文件中有以下声明:
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
但是,如果我复制相同的 View 和源文件并将它们合并到另一个测试 Storyboard/项目中,我会收到错误消息“实例成员‘children’不能用于‘StatusViewController’类型”
那么,为什么这在 ARKitVision 示例上有效,但如果我自己从头开始设置它却不起作用? ARKitVision 示例还做了什么来让它工作?谢谢😊
StatusViewController 的完整类定义是:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Utility class for showing messages above the AR view.
*/
import Foundation
import ARKit
/**
Displayed at the top of the main interface of the app that allows users to see
the status of the AR experience, as well as the ability to control restarting
the experience altogether.
- Tag: StatusViewController
*/
class StatusViewController: UIViewController {
// MARK: - Types
enum MessageType {
case trackingStateEscalation
case planeEstimation
case contentPlacement
case focusSquare
static var all: [MessageType] = [
.trackingStateEscalation,
.planeEstimation,
.contentPlacement,
.focusSquare
]
}
// MARK: - IBOutlets
@IBOutlet weak private var messagePanel: UIVisualEffectView!
@IBOutlet weak private var messageLabel: UILabel!
@IBOutlet weak private var restartExperienceButton: UIButton!
// MARK: - Properties
/// Trigerred when the "Restart Experience" button is tapped.
var restartExperienceHandler: () -> Void = {}
/// Seconds before the timer message should fade out. Adjust if the app needs longer transient messages.
private let displayDuration: TimeInterval = 6
// Timer for hiding messages.
private var messageHideTimer: Timer?
private var timers: [MessageType: Timer] = [:]
// MARK: - Message Handling
func showMessage(_ text: String, autoHide: Bool = true) {
// Cancel any previous hide timer.
messageHideTimer?.invalidate()
messageLabel.text = text
// Make sure status is showing.
setMessageHidden(false, animated: true)
if autoHide {
messageHideTimer = Timer.scheduledTimer(withTimeInterval: displayDuration, repeats: false, block: { [weak self] _ in
self?.setMessageHidden(true, animated: true)
})
}
}
func scheduleMessage(_ text: String, inSeconds seconds: TimeInterval, messageType: MessageType) {
cancelScheduledMessage(for: messageType)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [weak self] timer in
self?.showMessage(text)
timer.invalidate()
})
timers[messageType] = timer
}
func cancelScheduledMessage(`for` messageType: MessageType) {
timers[messageType]?.invalidate()
timers[messageType] = nil
}
func cancelAllScheduledMessages() {
for messageType in MessageType.all {
cancelScheduledMessage(for: messageType)
}
}
// MARK: - ARKit
func showTrackingQualityInfo(for trackingState: ARCamera.TrackingState, autoHide: Bool) {
showMessage(trackingState.presentationString, autoHide: autoHide)
}
func escalateFeedback(for trackingState: ARCamera.TrackingState, inSeconds seconds: TimeInterval) {
cancelScheduledMessage(for: .trackingStateEscalation)
let timer = Timer.scheduledTimer(withTimeInterval: seconds, repeats: false, block: { [unowned self] _ in
self.cancelScheduledMessage(for: .trackingStateEscalation)
var message = trackingState.presentationString
if let recommendation = trackingState.recommendation {
message.append(": \(recommendation)")
}
self.showMessage(message, autoHide: false)
})
timers[.trackingStateEscalation] = timer
}
// MARK: - IBActions
@IBAction private func restartExperience(_ sender: UIButton) {
restartExperienceHandler()
}
// MARK: - Panel Visibility
private func setMessageHidden(_ hide: Bool, animated: Bool) {
// The panel starts out hidden, so show it before animating opacity.
messagePanel.isHidden = false
guard animated else {
messagePanel.alpha = hide ? 0 : 1
return
}
UIView.animate(withDuration: 0.2, delay: 0, options: [.beginFromCurrentState], animations: {
self.messagePanel.alpha = hide ? 0 : 1
}, completion: nil)
}
}
extension ARCamera.TrackingState {
var presentationString: String {
switch self {
case .notAvailable:
return "TRACKING UNAVAILABLE"
case .normal:
return "TRACKING NORMAL"
case .limited(.excessiveMotion):
return "TRACKING LIMITED\nExcessive motion"
case .limited(.insufficientFeatures):
return "TRACKING LIMITED\nLow detail"
case .limited(.initializing):
return "Initializing"
case .limited(.relocalizing):
return "Recovering from interruption"
}
}
var recommendation: String? {
switch self {
case .limited(.excessiveMotion):
return "Try slowing down your movement, or reset the session."
case .limited(.insufficientFeatures):
return "Try pointing at a flat surface, or reset the session."
case .limited(.relocalizing):
return "Return to the location where you left off or try resetting the session."
default:
return nil
}
}
}
ViewController类的定义是:
/*
See LICENSE folder for this sample’s licensing information.
Abstract:
Main view controller for the ARKitVision sample.
*/
import UIKit
import SpriteKit
import ARKit
import Vision
class ViewController: UIViewController, UIGestureRecognizerDelegate, ARSKViewDelegate, ARSessionDelegate {
@IBOutlet weak var sceneView: ARSKView!
// The view controller that displays the status and "restart experience" UI.
private lazy var statusViewController: StatusViewController = {
return children.lazy.compactMap({ $0 as? StatusViewController }).first!
}()
// MARK: - View controller lifecycle
override func viewDidLoad() {
super.viewDidLoad()
// Configure and present the SpriteKit scene that draws overlay content.
let overlayScene = SKScene()
overlayScene.scaleMode = .aspectFill
sceneView.delegate = self
sceneView.presentScene(overlayScene)
sceneView.session.delegate = self
// Hook up status view controller callback.
statusViewController.restartExperienceHandler = { [unowned self] in
self.restartSession()
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARWorldTrackingConfiguration()
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
// MARK: - ARSessionDelegate
// Pass camera frames received from ARKit to Vision (when not already processing one)
/// - Tag: ConsumeARFrames
func session(_ session: ARSession, didUpdate frame: ARFrame) {
// Do not enqueue other buffers for processing while another Vision task is still running.
// The camera stream has only a finite amount of buffers available; holding too many buffers for analysis would starve the camera.
guard currentBuffer == nil, case .normal = frame.camera.trackingState else {
return
}
// Retain the image buffer for Vision processing.
self.currentBuffer = frame.capturedImage
classifyCurrentImage()
}
// MARK: - Vision classification
// Vision classification request and model
/// - Tag: ClassificationRequest
private lazy var classificationRequest: VNCoreMLRequest = {
do {
// Instantiate the model from its generated Swift class.
let model = try VNCoreMLModel(for: Inceptionv3().model)
let request = VNCoreMLRequest(model: model, completionHandler: { [weak self] request, error in
self?.processClassifications(for: request, error: error)
})
// Crop input images to square area at center, matching the way the ML model was trained.
request.imageCropAndScaleOption = .centerCrop
// Use CPU for Vision processing to ensure that there are adequate GPU resources for rendering.
request.usesCPUOnly = true
return request
} catch {
fatalError("Failed to load Vision ML model: \(error)")
}
}()
// The pixel buffer being held for analysis; used to serialize Vision requests.
private var currentBuffer: CVPixelBuffer?
// Queue for dispatching vision classification requests
private let visionQueue = DispatchQueue(label: "com.example.apple-samplecode.ARKitVision.serialVisionQueue")
// Run the Vision+ML classifier on the current image buffer.
/// - Tag: ClassifyCurrentImage
private func classifyCurrentImage() {
// Most computer vision tasks are not rotation agnostic so it is important to pass in the orientation of the image with respect to device.
let orientation = CGImagePropertyOrientation(UIDevice.current.orientation)
let requestHandler = VNImageRequestHandler(cvPixelBuffer: currentBuffer!, orientation: orientation)
visionQueue.async {
do {
// Release the pixel buffer when done, allowing the next buffer to be processed.
defer { self.currentBuffer = nil }
try requestHandler.perform([self.classificationRequest])
} catch {
print("Error: Vision request failed with error \"\(error)\"")
}
}
}
// Classification results
private var identifierString = ""
private var confidence: VNConfidence = 0.0
// Handle completion of the Vision request and choose results to display.
/// - Tag: ProcessClassifications
func processClassifications(for request: VNRequest, error: Error?) {
guard let results = request.results else {
print("Unable to classify image.\n\(error!.localizedDescription)")
return
}
// The `results` will always be `VNClassificationObservation`s, as specified by the Core ML model in this project.
let classifications = results as! [VNClassificationObservation]
// Show a label for the highest-confidence result (but only above a minimum confidence threshold).
if let bestResult = classifications.first(where: { result in result.confidence > 0.5 }),
let label = bestResult.identifier.split(separator: ",").first {
identifierString = String(label)
confidence = bestResult.confidence
} else {
identifierString = ""
confidence = 0
}
DispatchQueue.main.async { [weak self] in
self?.displayClassifierResults()
}
}
// Show the classification results in the UI.
private func displayClassifierResults() {
guard !self.identifierString.isEmpty else {
return // No object was classified.
}
let message = String(format: "Detected \(self.identifierString) with %.2f", self.confidence * 100) + "% confidence"
statusViewController.showMessage(message)
}
// MARK: - Tap gesture handler & ARSKViewDelegate
// Labels for classified objects by ARAnchor UUID
private var anchorLabels = [UUID: String]()
// When the user taps, add an anchor associated with the current classification result.
/// - Tag: PlaceLabelAtLocation
@IBAction func placeLabelAtLocation(sender: UITapGestureRecognizer) {
let hitLocationInView = sender.location(in: sceneView)
let hitTestResults = sceneView.hitTest(hitLocationInView, types: [.featurePoint, .estimatedHorizontalPlane])
if let result = hitTestResults.first {
// Add a new anchor at the tap location.
let anchor = ARAnchor(transform: result.worldTransform)
sceneView.session.add(anchor: anchor)
// Track anchor ID to associate text with the anchor after ARKit creates a corresponding SKNode.
anchorLabels[anchor.identifier] = identifierString
}
}
// When an anchor is added, provide a SpriteKit node for it and set its text to the classification label.
/// - Tag: UpdateARContent
func view(_ view: ARSKView, didAdd node: SKNode, for anchor: ARAnchor) {
guard let labelText = anchorLabels[anchor.identifier] else {
fatalError("missing expected associated label for anchor")
}
let label = TemplateLabelNode(text: labelText)
node.addChild(label)
}
// MARK: - AR Session Handling
func session(_ session: ARSession, cameraDidChangeTrackingState camera: ARCamera) {
statusViewController.showTrackingQualityInfo(for: camera.trackingState, autoHide: true)
switch camera.trackingState {
case .notAvailable, .limited:
statusViewController.escalateFeedback(for: camera.trackingState, inSeconds: 3.0)
case .normal:
statusViewController.cancelScheduledMessage(for: .trackingStateEscalation)
// Unhide content after successful relocalization.
setOverlaysHidden(false)
}
}
func session(_ session: ARSession, didFailWithError error: Error) {
guard error is ARError else { return }
let errorWithInfo = error as NSError
let messages = [
errorWithInfo.localizedDescription,
errorWithInfo.localizedFailureReason,
errorWithInfo.localizedRecoverySuggestion
]
// Filter out optional error messages.
let errorMessage = messages.compactMap({ $0 }).joined(separator: "\n")
DispatchQueue.main.async {
self.displayErrorMessage(title: "The AR session failed.", message: errorMessage)
}
}
func sessionWasInterrupted(_ session: ARSession) {
setOverlaysHidden(true)
}
func sessionShouldAttemptRelocalization(_ session: ARSession) -> Bool {
/*
Allow the session to attempt to resume after an interruption.
This process may not succeed, so the app must be prepared
to reset the session if the relocalizing status continues
for a long time -- see `escalateFeedback` in `StatusViewController`.
*/
return true
}
private func setOverlaysHidden(_ shouldHide: Bool) {
sceneView.scene!.children.forEach { node in
if shouldHide {
// Hide overlay content immediately during relocalization.
node.alpha = 0
} else {
// Fade overlay content in after relocalization succeeds.
node.run(.fadeIn(withDuration: 0.5))
}
}
}
private func restartSession() {
statusViewController.cancelAllScheduledMessages()
statusViewController.showMessage("RESTARTING SESSION")
anchorLabels = [UUID: String]()
let configuration = ARWorldTrackingConfiguration()
sceneView.session.run(configuration, options: [.resetTracking, .removeExistingAnchors])
}
// MARK: - Error handling
private func displayErrorMessage(title: String, message: String) {
// Present an alert informing about the error that has occurred.
let alertController = UIAlertController(title: title, message: message, preferredStyle: .alert)
let restartAction = UIAlertAction(title: "Restart Session", style: .default) { _ in
alertController.dismiss(animated: true, completion: nil)
self.restartSession()
}
alertController.addAction(restartAction)
present(alertController, animated: true, completion: nil)
}
}
最佳答案
这表明您的类 StatusViewController
没有继承自 UIViewController,因为 children
的属性已经可用于 UIViewController 的子类已有一段时间了.
您能分享一下您是如何组成 StatusViewController 的吗?
关于ios - Swift:实例成员不能用于 ARKitVision 示例中的类型,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/50714737/
我在为 MacOSX 构建的独立包中添加 DMG 背景的自定义图标时遇到问题。我在项目的根目录中添加了一个包。正在从中加载自定义图标,但没有加载 DMG 背景图标。我正在使用 Java fx 2.2.
Qt for Symbian 和 Qt for MeeGo 有什么区别?我知道 Qt 是一个交叉编译平台。这是否意味着如果我使用来自 Qt 的库,完全相同的库可以在所有支持 Qt 的设备(例如 Sym
我正在尝试使用 C# .NET 3.5/4.0 务实地运行 SQL Server 数据库的备份。我已经找到了如何完成此操作,但是我似乎找不到用于备份的命名空间库。 我正在寻找 Microsoft.Sq
我最近在疯狂学习 Java,但我通常是一名 .NET 开发人员。 (所以请原谅我的新手问题。) 在 .Net 中,我可以在不使用 IIS 的情况下开发 ASP.Net 页面,因为它有一个简化的 Web
这post仅当打印命令中有字符串时才有用。现在我有大量的源代码,其中包含一条声明,例如 print milk,butter 应该格式化为 print(milk,butter) 用\n 捕获行尾并不成功
所以我的问题是: https://gist.github.com/panSarin/4a221a0923927115584a 当我保存这个表格时,我收到了标题中的错误 NoMethodError (u
如何让 Html5 音频在点击时播放声音? (ogg 用于 Firefox 等浏览器,mp3 用于 chrome 等浏览器) 到目前为止,我可以通过 onclick 更改为单个文件类型,但我无法像在普
如果it1和it2有什么区别? std::set s; auto it1 = std::inserter(s, s.begin()); auto it2 = std::inserter(s, s.en
4.0.0 com.amkit myapp SpringMVCFirst
我目前使用 Eclipse 作为其他语言的 IDE,而且我习惯于不必离开 IDE 做任何事情 - 但是我真的很难为纯 ECMAScript-262 找到相同或类似的设置。 澄清一下,我不是在寻找 DO
我想将带有字符串数组的C# 结构发送到C++ 函数,该函数接受void * 作为c# 结构和char** 作为c# 结构字符串数组成员。 我能够将结构发送到 c++ 函数,但问题是,无法从 c++ 函
我正在使用动态创建的链接: 我想为f:param附加自定义转换器,以从#{name}等中删除空格。 但是f:param中没有转换器
是否可以利用Redis为.NET创建后写或直写式缓存?理想情况下,透明的高速缓存是由单个进程写入的,并且支持从数据库加载丢失的数据,并每隔一段时间持久保存脏块? 我已经搜查了好几个小时,也许是goog
我正在通过bash执行命令的ssh脚本。 FILENAMES=( "export_production_20200604.tgz" "export_production_log_2020060
我需要一个正则表达式来出现 0 到 7 个字母或 0 到 7 个数字。 例如:匹配:1234、asdbs 不匹配:123456789、absbsafsfsf、asf12 我尝试了([a-zA-Z]{0
我有一个用于会计期间的表格,该表格具有期间结束和开始的开始日期和结束日期。我使用此表来确定何时发生服务交易以及何时在查询中收集收入,例如... SELECT p.PeriodID, p.FiscalY
我很难为只接受字符或数字的 Laravel 构建正则表达式验证。它是这样的: 你好<-好的 123 <- 好的 你好123 <-不行 我现在的正则表达式是这样的:[A-Za-z]|[0-9]。 reg
您实际上会在 Repeater 上使用 OnItemDataBound 做什么? 最佳答案 “此事件为您提供在客户端显示数据项之前访问数据项的最后机会。引发此事件后,数据项将被清空,不再可用。” ~
我有一个 fragment 工作正常的项目,我正在使用 jeremyfeinstein 的 actionbarsherlock 和滑动菜单, 一切正常,但是当我想自定义左侧抽屉列表单元格时,出现异常
最近几天,我似乎平均分配时间在构建我的第一个应用程序和在这里发布问题!! 这是我的第一个应用程序,也是我们的设计师完成的第一个应用程序。我试图满足他所做的事情的外观和感觉,但我认为他没有做适当的事情。
我是一名优秀的程序员,十分优秀!