gpt4 book ai didi

ios - GLKView.display() 方法有时会导致崩溃。 EXC_BAD_ACCESS

转载 作者:行者123 更新时间:2023-11-29 00:10:05 26 4
gpt4 key购买 nike

我正在尝试使用 AVFoundation 实现实时相机应用程序, GLKit Core Image (未使用 GPUImage )

所以,我找到了这个教程
http://altitudelabs.com/blog/real-time-filter/
它是用 Objective-C 编写的,所以我用 Swift4.0、XCode9 重写了该代码

它似乎工作正常但有时(很少),它因以下错误而崩溃。当GLKViewdisplay方法被调用

EXC_BAD_ACCESS(code=1, addresss+0x********)

崩溃时,GLKView 存在(非零),EAGLContext 存在,CIContext 存在。我的代码如下

import UIKitimport AVFoundationimport GLKitimport OpenGLESclass ViewController: UIViewController {    var videoDevice : AVCaptureDevice!    var captureSession : AVCaptureSession!    var captureSessionQueue : DispatchQueue!    var videoPreviewView: GLKView!    var ciContext: CIContext!    var eaglContext: EAGLContext!    var videoPreviewViewBounds: CGRect = CGRect.zero    override func viewDidLoad() {        super.viewDidLoad()        // Do any additional setup after loading the view, typically from a nib.        // remove the view's background color; this allows us not to use the opaque property (self.view.opaque = NO) since we remove the background color drawing altogether        self.view.backgroundColor = UIColor.clear        // setup the GLKView for video/image preview        let window : UIView = UIApplication.shared.delegate!.window!!        eaglContext = EAGLContext(api: .openGLES2)        videoPreviewView = GLKView(frame: videoPreviewViewBounds, context: eaglContext)        videoPreviewView.enableSetNeedsDisplay = false        // because the native video image from the back camera is in UIDeviceOrientationLandscapeLeft (i.e. the home button is on the right), we need to apply a clockwise 90 degree transform so that we can draw the video preview as if we were in a landscape-oriented view; if you're using the front camera and you want to have a mirrored preview (so that the user is seeing themselves in the mirror), you need to apply an additional horizontal flip (by concatenating CGAffineTransformMakeScale(-1.0, 1.0) to the rotation transform)        videoPreviewView.transform = CGAffineTransform(rotationAngle: CGFloat.pi/2.0)        videoPreviewView.frame = window.bounds        // we make our video preview view a subview of the window, and send it to the back; this makes ViewController's view (and its UI elements) on top of the video preview, and also makes video preview unaffected by device rotation        window.addSubview(videoPreviewView)        window.sendSubview(toBack: videoPreviewView)        // bind the frame buffer to get the frame buffer width and height;        // the bounds used by CIContext when drawing to a GLKView are in pixels (not points),        // hence the need to read from the frame buffer's width and height;        // in addition, since we will be accessing the bounds in another queue (_captureSessionQueue),        // we want to obtain this piece of information so that we won't be        // accessing _videoPreviewView's properties from another thread/queue        videoPreviewView.bindDrawable()        videoPreviewViewBounds = CGRect.zero        videoPreviewViewBounds.size.width = CGFloat(videoPreviewView.drawableWidth)        videoPreviewViewBounds.size.height = CGFloat(videoPreviewView.drawableHeight)        // create the CIContext instance, note that this must be done after _videoPreviewView is properly set up        ciContext = CIContext(eaglContext: eaglContext, options: [kCIContextWorkingColorSpace: NSNull()])        if AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInDualCamera, .builtInTelephotoCamera, .builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices.count > 0 {            start()        } else {            print("No device with AVMediaTypeVideo")        }    }    override func didReceiveMemoryWarning() {        super.didReceiveMemoryWarning()        // Dispose of any resources that can be recreated.    }    func start() {        let videoDevices = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaType.video, position: .back).devices        videoDevice = videoDevices.first        var videoDeviceInput : AVCaptureInput!        do {            videoDeviceInput =  try AVCaptureDeviceInput(device: videoDevice)        } catch let error {            print("Unable to obtain video device input, error: \(error)")            return        }        let preset = AVCaptureSession.Preset.high        captureSession = AVCaptureSession()        captureSession.sessionPreset = preset        // core image watns bgra pixel format        let outputSetting = [String(kCVPixelBufferPixelFormatTypeKey): kCVPixelFormatType_32BGRA]        // crate and configure video data output        let videoDataOutput = AVCaptureVideoDataOutput()        videoDataOutput.videoSettings = outputSetting        // create the dispatch queue for handling capture session delegate method calls        captureSessionQueue = DispatchQueue(label: "capture_session_queue")        videoDataOutput.setSampleBufferDelegate(self, queue: captureSessionQueue)        videoDataOutput.alwaysDiscardsLateVideoFrames = true        captureSession.beginConfiguration()        if !captureSession.canAddOutput(videoDataOutput) {            print("Cannot add video data output")            captureSession = nil            return        }        captureSession.addInput(videoDeviceInput)        captureSession.addOutput(videoDataOutput)        captureSession.commitConfiguration()        captureSession.startRunning()    }}extension ViewController : AVCaptureVideoDataOutputSampleBufferDelegate {    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {        let imageBuffer : CVImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!        let sourceImage = CIImage(cvImageBuffer: imageBuffer, options: nil)        let sourceExtent = sourceImage.extent        let vignetteFilter = CIFilter(name: "CIVignetteEffect", withInputParameters: nil)        vignetteFilter?.setValue(sourceImage, forKey: kCIInputImageKey)        vignetteFilter?.setValue(CIVector(x: sourceExtent.size.width/2.0, y: sourceExtent.size.height/2.0), forKey: kCIInputCenterKey)        vignetteFilter?.setValue(sourceExtent.width/2.0, forKey: kCIInputRadiusKey)        let filteredImage = vignetteFilter?.outputImage        let sourceAspect = sourceExtent.width/sourceExtent.height        let previewAspect = videoPreviewViewBounds.width/videoPreviewViewBounds.height        // we want to maintain the aspect radio of the screen size, so we clip the video image        var drawRect = sourceExtent        if sourceAspect > previewAspect {            // use full height of the video image, and center crop the width            drawRect.origin.x += (drawRect.size.width - drawRect.size.height * previewAspect) / 2.0            drawRect.size.width = drawRect.size.height * previewAspect        } else {            // use full width of the video image, and center crop the height            drawRect.origin.y += (drawRect.size.height - drawRect.size.width / previewAspect) / 2.0;            drawRect.size.height = drawRect.size.width / previewAspect;        }        videoPreviewView.bindDrawable()        if eaglContext != EAGLContext.current() {            EAGLContext.setCurrent(eaglContext)        }        print("current thread \(Thread.current)")        // clear eagl view to grey        glClearColor(0.5, 0.5, 0.5, 1.0);        glClear(GLbitfield(GL_COLOR_BUFFER_BIT));        // set the blend mode to "source over" so that CI will use that        glEnable(GLenum(GL_BLEND));        glBlendFunc(GLenum(GL_ONE), GLenum(GL_ONE_MINUS_SRC_ALPHA));        if let filteredImage = filteredImage {            ciContext.draw(filteredImage, in: videoPreviewViewBounds, from: drawRect)        }        videoPreviewView.display()    }}

崩溃时的栈是

* thread #5, queue = 'com.apple.avfoundation.videodataoutput.bufferqueue', stop reason = EXC_BAD_ACCESS (code=1, address=0x8000000000000000)frame #0: 0x00000001a496f098 AGXGLDriver`___lldb_unnamed_symbol149$$AGXGLDriver + 332frame #1: 0x00000001923c029c OpenGLES`-[EAGLContext getParameter:to:] + 80frame #2: 0x000000010038bca4 libglInterpose.dylib`__clang_call_terminate + 1976832frame #3: 0x00000001001ab75c libglInterpose.dylib`__clang_call_terminate + 9400frame #4: 0x000000010038b8b4 libglInterpose.dylib`__clang_call_terminate + 1975824frame #5: 0x00000001001af098 libglInterpose.dylib`__clang_call_terminate + 24052frame #6: 0x00000001001abe5c libglInterpose.dylib`__clang_call_terminate + 11192frame #7: 0x000000010038f9dc libglInterpose.dylib`__clang_call_terminate + 1992504frame #8: 0x000000010038d5b8 libglInterpose.dylib`__clang_call_terminate + 1983252frame #9: 0x000000019a1e2a20 GLKit`-[GLKView _display:] + 308* frame #10: 0x0000000100065e78 RealTimeCameraPractice`ViewController.captureOutput(output=0x0000000174034820, sampleBuffer=0x0000000119e25e70, connection=0x0000000174008850, self=0x0000000119d032d0) at ViewController.swift:160frame #11: 0x00000001000662dc RealTimeCameraPractice`@objc ViewController.captureOutput(_:didOutput:from:) at ViewController.swift:0frame #12: 0x00000001977ec310 AVFoundation`-[AVCaptureVideoDataOutput _handleRemoteQueueOperation:] + 308frame #13: 0x00000001977ec14c AVFoundation`__47-[AVCaptureVideoDataOutput _updateRemoteQueue:]_block_invoke + 100frame #14: 0x00000001926bdf38 CoreMedia`__FigRemoteOperationReceiverCreateMessageReceiver_block_invoke + 260frame #15: 0x00000001926dce9c CoreMedia`__FigRemoteQueueReceiverSetHandler_block_invoke.2 + 224frame #16: 0x000000010111da10 libdispatch.dylib`_dispatch_client_callout + 16frame #17: 0x0000000101129a84 libdispatch.dylib`_dispatch_continuation_pop + 552frame #18: 0x00000001011381f8 libdispatch.dylib`_dispatch_source_latch_and_call + 204frame #19: 0x000000010111fa60 libdispatch.dylib`_dispatch_source_invoke + 828frame #20: 0x000000010112b128 libdispatch.dylib`_dispatch_queue_serial_drain + 692frame #21: 0x0000000101121634 libdispatch.dylib`_dispatch_queue_invoke + 852frame #22: 0x000000010112b128 libdispatch.dylib`_dispatch_queue_serial_drain + 692frame #23: 0x0000000101121634 libdispatch.dylib`_dispatch_queue_invoke + 852frame #24: 0x000000010112c358 libdispatch.dylib`_dispatch_root_queue_drain_deferred_item + 276frame #25: 0x000000010113457c libdispatch.dylib`_dispatch_kevent_worker_thread + 764frame #26: 0x000000018ee56fbc libsystem_pthread.dylib`_pthread_wqthread + 772frame #27: 0x000000018ee56cac libsystem_pthread.dylib`start_wqthread + 4

我的项目在github上
https://github.com/hegrecom/iOS-RealTimeCameraPractice

最佳答案

这里的解决方案: iOS 11 beta 4 presentRenderbuffer crash

转到管理方案->选项->GPU帧捕获->禁用

关于ios - GLKView.display() 方法有时会导致崩溃。 EXC_BAD_ACCESS,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/46722455/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com