- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
我正在使用ARVideoKit录制屏幕(ReplayKit 不适用于此),有时我录制并保存没有问题。其他时候我录制并保存时会崩溃:
** Terminating app due to uncaught exception 'NSInternalInconsistencyException', reason: '*** -[AVAssetWriterInput appendSampleBuffer:] Cannot append sample buffer: Must start a session (using -[AVAssetWriter startSessionAtSourceTime:) first'
查看 StackTrace,它是一个 __pthread__kill
,它发生在 线程 83
内:
特别是在这个DispatchQueue
中:
let audioBufferQueue = DispatchQueue(label: "com.ahmedbekhit.AudioBufferQueue")
如何防止这种情况发生?
这是文件中的代码:
import AVFoundation
import CoreImage
import UIKit
@available(iOS 11.0, *)
class WritAR: NSObject, AVCaptureAudioDataOutputSampleBufferDelegate {
private var assetWriter: AVAssetWriter!
private var videoInput: AVAssetWriterInput!
private var audioInput: AVAssetWriterInput!
private var session: AVCaptureSession!
private var pixelBufferInput: AVAssetWriterInputPixelBufferAdaptor!
private var videoOutputSettings: Dictionary<String, AnyObject>!
private var audioSettings: [String: Any]?
let audioBufferQueue = DispatchQueue(label: "com.ahmedbekhit.AudioBufferQueue")
private var isRecording: Bool = false
weak var delegate: RecordARDelegate?
var videoInputOrientation: ARVideoOrientation = .auto
init(output: URL, width: Int, height: Int, adjustForSharing: Bool, audioEnabled: Bool, orientaions:[ARInputViewOrientation], queue: DispatchQueue, allowMix: Bool) {
super.init()
do {
assetWriter = try AVAssetWriter(outputURL: output, fileType: AVFileType.mp4)
} catch {
// FIXME: handle when failed to allocate AVAssetWriter.
return
}
if audioEnabled {
if allowMix {
let audioOptions: AVAudioSession.CategoryOptions = [.mixWithOthers , .allowBluetooth, .defaultToSpeaker, .interruptSpokenAudioAndMixWithOthers]
try? AVAudioSession.sharedInstance().setCategory(AVAudioSession.Category.playAndRecord, mode: AVAudioSession.Mode.spokenAudio, options: audioOptions)
try? AVAudioSession.sharedInstance().setActive(true)
}
AVAudioSession.sharedInstance().requestRecordPermission({ permitted in
if permitted {
self.prepareAudioDevice(with: queue)
}
})
}
//HEVC file format only supports A10 Fusion Chip or higher.
//to support HEVC, make sure to check if the device is iPhone 7 or higher
videoOutputSettings = [
AVVideoCodecKey: AVVideoCodecType.h264 as AnyObject,
AVVideoWidthKey: width as AnyObject,
AVVideoHeightKey: height as AnyObject
]
let attributes: [String: Bool] = [
kCVPixelBufferCGImageCompatibilityKey as String: true,
kCVPixelBufferCGBitmapContextCompatibilityKey as String: true
]
videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
videoInput.expectsMediaDataInRealTime = true
pixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: nil)
var angleEnabled: Bool {
for v in orientaions {
if UIDevice.current.orientation.rawValue == v.rawValue {
return true
}
}
return false
}
var recentAngle: CGFloat = 0
var rotationAngle: CGFloat = 0
switch UIDevice.current.orientation {
case .landscapeLeft:
rotationAngle = -90
recentAngle = -90
case .landscapeRight:
rotationAngle = 90
recentAngle = 90
case .faceUp, .faceDown, .portraitUpsideDown:
rotationAngle = recentAngle
default:
rotationAngle = 0
recentAngle = 0
}
if !angleEnabled {
rotationAngle = 0
}
var t = CGAffineTransform.identity
switch videoInputOrientation {
case .auto:
t = t.rotated(by: ((rotationAngle*CGFloat.pi) / 180))
case .alwaysPortrait:
t = t.rotated(by: 0)
case .alwaysLandscape:
if rotationAngle == 90 || rotationAngle == -90 {
t = t.rotated(by: ((rotationAngle * CGFloat.pi) / 180))
} else {
t = t.rotated(by: ((-90 * CGFloat.pi) / 180))
}
}
videoInput.transform = t
if assetWriter.canAdd(videoInput) {
assetWriter.add(videoInput)
} else {
delegate?.recorder(didFailRecording: assetWriter.error, and: "An error occurred while adding video input.")
isWritingWithoutError = false
}
assetWriter.shouldOptimizeForNetworkUse = adjustForSharing
}
func prepareAudioDevice(with queue: DispatchQueue) {
let device: AVCaptureDevice = AVCaptureDevice.default(for: .audio)!
var audioDeviceInput: AVCaptureDeviceInput?
do {
audioDeviceInput = try AVCaptureDeviceInput(device: device)
} catch {
audioDeviceInput = nil
}
let audioDataOutput = AVCaptureAudioDataOutput()
audioDataOutput.setSampleBufferDelegate(self, queue: queue)
session = AVCaptureSession()
session.sessionPreset = .medium
session.usesApplicationAudioSession = true
session.automaticallyConfiguresApplicationAudioSession = false
if session.canAddInput(audioDeviceInput!) {
session.addInput(audioDeviceInput!)
}
if session.canAddOutput(audioDataOutput) {
session.addOutput(audioDataOutput)
}
audioSettings = audioDataOutput.recommendedAudioSettingsForAssetWriter(writingTo: .m4v) as? [String: Any]
audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioSettings)
audioInput.expectsMediaDataInRealTime = true
audioBufferQueue.async {
self.session.startRunning()
}
if assetWriter.canAdd(audioInput) {
assetWriter.add(audioInput)
}
}
var startingVideoTime: CMTime?
var isWritingWithoutError: Bool?
var currentDuration: TimeInterval = 0 // Seconds
func insert(pixel buffer: CVPixelBuffer, with intervals: CFTimeInterval) {
let time: CMTime = CMTime(seconds: intervals, preferredTimescale: 1000000)
insert(pixel: buffer, with: time)
}
func insert(pixel buffer: CVPixelBuffer, with time: CMTime) {
if assetWriter.status == .unknown {
guard startingVideoTime == nil else {
isWritingWithoutError = false
return
}
startingVideoTime = time
if assetWriter.startWriting() {
assetWriter.startSession(atSourceTime: startingVideoTime!)
currentDuration = 0
isRecording = true
isWritingWithoutError = true
} else {
delegate?.recorder(didFailRecording: assetWriter.error, and: "An error occurred while starting the video session.")
currentDuration = 0
isRecording = false
isWritingWithoutError = false
}
} else if assetWriter.status == .failed {
delegate?.recorder(didFailRecording: assetWriter.error, and: "Video session failed while recording.")
logAR.message("An error occurred while recording the video, status: \(assetWriter.status.rawValue), error: \(assetWriter.error!.localizedDescription)")
currentDuration = 0
isRecording = false
isWritingWithoutError = false
return
}
if videoInput.isReadyForMoreMediaData {
append(pixel: buffer, with: time)
currentDuration = time.seconds - startingVideoTime!.seconds
isRecording = true
isWritingWithoutError = true
delegate?.recorder?(didUpdateRecording: currentDuration)
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if let input = audioInput {
audioBufferQueue.async { [weak self] in
if input.isReadyForMoreMediaData && (self?.isRecording)! {
input.append(sampleBuffer)
}
}
}
}
func pause() {
isRecording = false
}
func end(writing finished: @escaping () -> Void) {
if let session = session {
if session.isRunning {
session.stopRunning()
}
}
if assetWriter.status == .writing {
assetWriter.finishWriting(completionHandler: finished)
}
}
func cancel() {
if let session = session {
if session.isRunning {
session.stopRunning()
}
}
assetWriter.cancelWriting()
}
}
@available(iOS 11.0, *)
private extension WritAR {
func append(pixel buffer: CVPixelBuffer, with time: CMTime) {
pixelBufferInput.append(buffer, withPresentationTime: time)
}
}
//Simple Logging to show logs only while debugging.
class logAR {
class func message(_ message: String) {
#if DEBUG
print("ARVideoKit @ \(Date().timeIntervalSince1970):- \(message)")
#endif
}
class func remove(from path: URL?) {
if let file = path?.path {
let manager = FileManager.default
if manager.fileExists(atPath: file) {
do{
try manager.removeItem(atPath: file)
self.message("Successfuly deleted media file from cached after exporting to Camera Roll.")
} catch let error {
self.message("An error occurred while deleting cached media: \(error)")
}
}
}
}
}
最佳答案
这是导致此错误的代码:
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if let input = audioInput {
audioBufferQueue.async { [weak self] in
if input.isReadyForMoreMediaData && (self?.isRecording)! {
input.append(sampleBuffer)
}
}
}
}
在调用 input.append(..)
之前,您应该确保您的 AVAssetWriter session 已启动。 AVAssetWriter 似乎没有返回 session 状态的属性,因此您应该向 WritAR 类添加一个 isSessionStarted
标志。
然后在调用 input.append(..)
之前检查此标志,并在需要时(重新)启动 session 。
编辑:编写一个辅助函数来启动 session :
func startSessionIfNeeded(atSourceTime time: CMTime) {
if isSessionStarted {
return
}
assetWriter.startSession(atSourceTime: time)
isSessionStarted = true
}
在您的代码中:
func insert(pixel buffer: CVPixelBuffer, with time: CMTime) {
if assetWriter.status == .unknown {
guard startingVideoTime == nil else {
isWritingWithoutError = false
return
}
startingVideoTime = time
if assetWriter.startWriting() {
assetWriter.startSession(atSourceTime: startingVideoTime!)
currentDuration = 0
isRecording = true
isWritingWithoutError = true
} else {
...
}
将行assetWriter.startSession(atSourceTime:startingVideoTime!)
替换为辅助函数startSessionIfNeeded(atSourceTime:startingVideoTime)
。
此外,更改您的 captureOutput 方法:
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if let input = audioInput {
audioBufferQueue.async { [weak self] in
self?.startSessionIfNeeded(atSourceTime: self?.startingVideoTime)
if input.isReadyForMoreMediaData && (self?.isRecording)! {
input.append(sampleBuffer)
}
}
}
}
关于ios - AVAssetWriterInput 追加样本缓冲区 : Cannot append sample buffer: Must start a session (using -AVAssetWriter startSessionAtSourceTime:) first',我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/60146678/
我刚刚注意到 align-self 属性的一些值,这是我以前从未见过的。什么是start、end、self-start、self-end,它们与有什么区别>flex-start 和 flex-end?
我见过程序员使用公式 mid = start + (end - start) / 2 而不是使用更简单的公式 mid = (start + end) / 2 用于查找数组或列表中的中间元素。 为什么他
我们已经设置了一个小型 AZURE VM(由 Microsoft 提供的普通 Windows 2012 R2 镜像),其中包含一个轻量级 DEMO 应用程序,该应用程序可以与 SQLExpress 和
我在笔记本电脑上安装了Xampp 3.2.1版,之前MySQL在它上面运行得很好,但突然MySQL停止运行,而阿帕奇和其他公司都在运行。当我点击开始MySQL时,它显示这个错误我使用Windows 1
我希望我能解释清楚。 我有自动生成的代码,我希望用 CSS 覆盖它。 这是我希望覆盖的代码示例: #u1150:hover #u1153-4 p {color: red} 重要提示:此代码中的“u”将
在我的 package.json 中,我有以下脚本 block : "scripts": { "start": "react-scripts start",
https://github.com/lodash/lodash/blob/3.7.0/lodash.src.js#L2781 此代码段 start = start == null 中的 +start
上下文 我一直在阅读有关如何将 TUMBLINGWINDOW 函数与 TIMSTAMP BY 子句一起使用的文档,但似乎找不到有关如何计算包含 TUMBLING WINDOW 和 TIMESTAMP
我正在使用 Grunt 运行 Protractor 端到端测试用例。我有以下三个任务(我使用的是 windows 7 机器) webdriver-stop webdriver-start Protra
我正在创建一个简单的Java程序,它具有在窗口生成器的帮助下构建的GUI。 GUI只包含一个按钮。 单击按钮后,启动一个线程,该线程将无限次打印到随机数,直到再次单击同一按钮将其停止为止。 这是我的代
我一直在摆弄创建一个运行渲染的线程,并且我遇到了这种实现它的方法: Class Main implements Runnable { private Thread thread; private bo
我如何在 StartButton 类中编写一个 touchesBegun 命令,它在场景中调用 start() 任何实例本身? 我知道......可能是 OOP 101。但今天我远远超出了我的范围。
关闭。这个问题需要更多 focused .它目前不接受答案。 想改进这个问题?更新问题,使其仅关注一个问题 editing this post . 8年前关闭。 Improve this questi
我的目标是运行多个进程并保存它们的 ProcessName和 Id供以后使用。这是我的代码 [System.Collections.ArrayList]$startedProcesses = @()
我在 8086 汇编方面没有太多经验,我想知道如果您不写起始标签 (start:) 和该标签的结尾,程序会发生什么 (end start)(围绕执行代码的标签)? 所以我的问题是这个标签是否是执行所必
我在 8086 汇编方面没有太多经验,我想知道如果您不写起始标签 (start:) 和该标签的结尾,程序会发生什么 (end start)(围绕执行代码的标签)? 所以我的问题是这个标签是否是执行所必
我想在另一个脚本的 Start() 之前从一个脚本运行 Start()。是否可以?您可以选择脚本的执行顺序吗? 最佳答案 我不太确定 Start() 但您可以配置 Awake 的脚本执行顺序,OnEn
我有一个来自 Unity 文档页面的示例程序,其中包含 IEnumerator Start() ,如下所示,但我想知道如何才能拥有正常的 void Start() > 在同一个脚本中? 我也尝试添加v
正如标题所说,“从机启动”和“从机启动”有什么区别?当我接受DBA面试时,他问了这个问题,我搜索了google但没有找到答案,有人知道吗? 最佳答案 没有区别.. Slave start; 已弃用,现
我有几十个未记录的表,文档说未记录的表在崩溃或不正常关机后会自动截断。 基于此,我需要在数据库启动后检查一些表,看它们是否为“空”并采取一些措施。 简而言之,我需要在数据库启动后立即执行一个过程。 最
我是一名优秀的程序员,十分优秀!