gpt4 book ai didi

ios - 从通过 AVCapturePhotoCaptureDelegate 获得的 AVCapturePhoto 中检索 CVSampleBuffer

转载 作者:行者123 更新时间:2023-11-29 11:38:52 25 4
gpt4 key购买 nike

如标题所示,我正在尝试从方法的输出中检索捕获的照片的 CVPixelBuffer:

AVCapturePhotoCaptureDelegate.photoOutput(_输出:AVCapturePhotoOutput,didFinishProcessingPhoto 照片:AVCapturePhoto,错误:Error?)

photo 参数的 pixelBuffer 在委托(delegate)方法调用中为 nil,我想将其用于一些低级别的图像处理。

我主要遵循示例代码,可以在以下位置找到:

https://developer.apple.com/library/content/samplecode/AVCam/Introduction/Intro.html

和 AVFoundation 文档。

由于 AVFoundation session 配置有点冗长并且可能会提供一些答案,我将只粘贴处理它的整个对象,它应该包含所有相关代码:

protocol CameraServiceDelegate: class {
func cameraServiceDidCapturePhoto(withBuffer buffer: CVPixelBuffer)
func cameraServiceEncounteredError(_ error: Error?)
}

final class CameraService: NSObject {

struct BufferRetrievalFailure: Error {}

weak var delegate: CameraServiceDelegate?

private let session = AVCaptureSession()
private var discoverySession = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInDualCamera, .builtInWideAngleCamera],
mediaType: .video,
position: .back
)
private var deviceInput: AVCaptureDeviceInput!
private let photoOutput = AVCapturePhotoOutput()

private let sessionQueue = DispatchQueue(label: "av-capture-session.serial.queue")

private var captureDevice: AVCaptureDevice? {
return .default(.builtInDualCamera, for: .video, position: .back)
?? .default(.builtInWideAngleCamera, for: .video, position: .back)
?? .default(.builtInWideAngleCamera, for: .video, position: .front)
}

func setup(with layer: AVCaptureVideoPreviewLayer) {
layer.session = session

switch AVCaptureDevice.authorizationStatus(for: .video) {
case .authorized:
break
case .notDetermined:
requestVideoAuthorization()
default:
assertionFailure("Just enable video, this is not a real app.")
}

sessionQueue.async { [weak self] in
self?.setupAVSession(with: layer)
}
}

func resume() {
sessionQueue.async { [weak session] in
session?.startRunning()
}
}

func suspend() {
sessionQueue.async { [weak session] in
session?.stopRunning()
}
}

func capturePhoto() {
sessionQueue.async { [weak self] in
guard let strongSelf = self else {
return
}

strongSelf.photoOutput.capturePhoto(with: strongSelf.capturePhotoSettings(), delegate: strongSelf)
}
}

private func requestVideoAuthorization() {
sessionQueue.suspend()

AVCaptureDevice.requestAccess(for: .video) { [weak sessionQueue] isAuthorized in
guard isAuthorized else {
assertionFailure("Just enable video, this is not a real app.")
return
}

sessionQueue?.resume()
}
}

private func setupAVSession(with layer: AVCaptureVideoPreviewLayer) {
session.beginConfiguration()

session.sessionPreset = .photo

setupVideoInput()
setupVideoPreviewViewLayer(with: layer)
setupPhotoOutput()

session.commitConfiguration()
}

private func setupVideoInput() {
guard let videoDevice = captureDevice,
let deviceInput = try? AVCaptureDeviceInput(device: videoDevice),
session.canAddInput(deviceInput) else {
fatalError("Could not retrieve suitable capture device or configure video device input.")
}

self.deviceInput = deviceInput
session.addInput(deviceInput)
}

private func setupVideoPreviewViewLayer(with layer: AVCaptureVideoPreviewLayer) {
DispatchQueue.main.async {
let statusBarOrientation = UIApplication.shared.statusBarOrientation

layer.connection?.videoOrientation =
statusBarOrientation != .unknown
? AVCaptureVideoOrientation(rawValue: statusBarOrientation.rawValue)!
: .portrait
}
}

private func setupPhotoOutput() {
guard session.canAddOutput(photoOutput) else {
fatalError("Could not configure photo output.")
}

session.addOutput(photoOutput)

photoOutput.isHighResolutionCaptureEnabled = true
photoOutput.isLivePhotoCaptureEnabled = false
photoOutput.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliverySupported
}

private func capturePhotoSettings() -> AVCapturePhotoSettings {
let settings: AVCapturePhotoSettings

if photoOutput.availablePhotoCodecTypes.contains(.hevc) {
settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.hevc])
} else {
settings = AVCapturePhotoSettings()
}

settings.isHighResolutionPhotoEnabled = true
settings.isDepthDataDeliveryEnabled = photoOutput.isDepthDataDeliveryEnabled

return settings
}
}

extension CameraService: AVCapturePhotoCaptureDelegate {

func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
guard error == nil else {
delegate?.cameraServiceEncounteredError(error)
return
}

guard let buffer = photo.pixelBuffer else {
delegate?.cameraServiceEncounteredError(BufferRetrievalFailure())
return
}

delegate?.cameraServiceDidCapturePhoto(withBuffer: buffer)
}
}

最佳答案

我没有适合您的代码示例,因为我在 Xamarin 中工作,但您需要设置 previewPhotoFormat在创建捕获时使用的 AVCapturePhotoSettings 对象上。一个例子我 found online :

var settings = AVCapturePhotoSettings()
let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
let previewFormat = [
kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
kCVPixelBufferWidthKey as String: self.capturedButton.frame.width,
kCVPixelBufferHeightKey as String: self.capturedButton.frame.height
] as [String : Any]
settings.previewPhotoFormat = previewFormat

我亲自检查了 availablePreviewPhotoPixelFormatTypes查看我的分析所需的格式 ( kCVPixelFormatType_32BGRA ) 是否在其中。到目前为止,我还没有遇到没有它的设备。

关于ios - 从通过 AVCapturePhotoCaptureDelegate 获得的 AVCapturePhoto 中检索 CVSampleBuffer,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47678894/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com