gpt4 book ai didi

ios - NSGenericException,原因 : [AVCapturePhotoOutput capturePhotoWithSettings:delegate:] No active and enabled video connection

转载 作者:可可西里 更新时间:2023-10-31 23:13:20 31 4
gpt4 key购买 nike

我在尝试运行我的应用程序并拍摄照片时不断收到上述错误消息。

我是编程新手,所以如果有人能提供帮助那就太棒了。

这是我的代码

import UIKit
import AVFoundation
import Firebase


class CameraViewController: UIViewController, AVCapturePhotoCaptureDelegate, UITextViewDelegate, UITextFieldDelegate {

@IBOutlet weak var cameraView: UIView!
@IBOutlet weak var cancelButton: UIButton!
@IBOutlet weak var topButton: UIButton!
@IBOutlet weak var switchCamera: UIButton!
@IBOutlet weak var captionButton: UIButton!
@IBOutlet weak var takePhoto: UIButton!
@IBOutlet weak var postButton: UIButton!



var captureSession = AVCaptureSession()
var sessionOutput = AVCapturePhotoOutput()
var previewLayer = AVCaptureVideoPreviewLayer()
var settings = AVCapturePhotoSettings()


var ref = FIRDatabase.database().reference()
let storage = FIRStorage.storage()





override func viewDidLoad() {
super.viewDidLoad()

// Do any additional setup after loading the view.
}

override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)

let deviceSession = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInDualCamera, .builtInTelephotoCamera, .builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .unspecified)

for device in (deviceSession?.devices)!{

if device.position == AVCaptureDevicePosition.front {

do{
let input = try AVCaptureDeviceInput(device: device)
if captureSession.canAddInput(input) {
captureSession.addInput(input)

if captureSession.canAddOutput(sessionOutput) {
captureSession.canAddOutput(sessionOutput)

previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
previewLayer.connection.videoOrientation = .portrait

cameraView.layer.addSublayer(previewLayer)


previewLayer.position = CGPoint (x: self.cameraView.frame.width / 2, y: self.cameraView.frame.height / 2)
previewLayer.bounds = cameraView.frame

captureSession.startRunning()
}
}

}catch let avError {
print (avError)
}
}
}
}



func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) {

if let error = error {
print(error.localizedDescription)
return
}

AppDelegate.instance().showActivityIndicator()

let uid = FIRAuth.auth()!.currentUser!.uid
let ref = FIRDatabase.database().reference()
let storage = FIRStorage.storage().reference(forURL: "my url")

if let sampleBuffer = photoSampleBuffer, let previewBuffer = photoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) {

let key = ref.child("posts").childByAutoId().key
let imageRef = storage.child("posts").child(uid).child("\(key).jpg")

let uploadTask = imageRef.put(dataImage, metadata: nil) {(metadata, error) in
if error != nil {
print(error!.localizedDescription)
AppDelegate.instance().dismissActivityIndicator()
return
}
imageRef.downloadURL(completion: {(url, error) in
if let url = url {
let feed = ["userId" : uid,
"pathToImage" : url.absoluteString,
"likes" : 0,
"author" : FIRAuth.auth()?.currentUser!.displayName! as Any,
"postId" : key] as [String : Any]

let postFeed = ["\(key)" : feed]

ref.child("posts").updateChildValues(postFeed)
AppDelegate.instance().dismissActivityIndicator()

self.dismiss(animated: true, completion: nil)
self.postButton.isHidden = false
}
})


}
uploadTask.resume()
}
captureSession.stopRunning()
previewLayer.removeFromSuperlayer()

}
@IBAction func takePhoto(_ sender: Any) {
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String : previewPixelType, kCVPixelBufferWidthKey as String : 160, kCVPixelBufferHeightKey as String : 160]

settings.previewPhotoFormat = previewFormat
self.sessionOutput.capturePhoto(with: settings, delegate: self)
sessionOutput.isLivePhotoCaptureEnabled = true



}

@IBAction func cancelPressed(_ sender: Any) {

dismiss(animated: true, completion: nil)

}

override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
self.view.endEditing(true)
}
func textFieldShouldReturn(_ textField: UITextField) -> Bool {
textField.resignFirstResponder()
return false

}


}

我想错误发生在这条线的某处 self.sessionOutput.capturePhoto(with: settings, delegate: self)

不过我不确定。我真的很感激一些帮助。谢谢!

最佳答案

您从未添加 sessionOutput,您调用了两次 canAddOutput 函数:

if captureSession.canAddOutput(sessionOutput) {
captureSession.canAddOutput(sessionOutput)

在检查是否可以添加之后,您应该在添加输入时添加输出

if captureSession.canAddOutput(sessionOutput) {
captureSession.addOutput(sessionOutput)

希望对你有帮助

关于ios - NSGenericException,原因 : [AVCapturePhotoOutput capturePhotoWithSettings:delegate:] No active and enabled video connection,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/41574457/

31 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com