gpt4 book ai didi

ios - AudioFileWriteBytes (AudioToolbox) 失败,错误代码为 -38 kAudioFileNotOpenError

转载 作者:IT王子 更新时间:2023-10-29 05:37:52 25 4
gpt4 key购买 nike

我正在开发记录用户音频的应用程序,使用 AudioToolbox 将数据写入文件并处理这些数据。在分析中,我看到 AudioToolbox 录音中发生了很多错误。其中几乎一半是从 AudioFileWriteBytes() 调用返回的 -38 kAudioFileNotOpenError 错误。

现在这似乎是无缘无故发生的:用户开始记录,一切顺利,然后连续记录 10-80 次错误。在分析中,没有诸如转到后台或暂停记录之类的用户操作(我们有此功能)。

我在网上搜索了信息,但没有找到太多关于错误的信息。

我发布了所有的录音代码(剥离了未使用的代码或分析代码),因为除了 -38 kAudioFileNotOpenError 我还有很多其他错误,这可能意味着我可能正在使用 AudioToolbox 以错误的方式。以下是最常见的错误:

  1. AudioFileWriteBytes() 返回 -38 kAudioFileNotOpenError 错误:(代码中的 [1])- 约占所有错误的 50%
  2. AudioUnitRender() 返回 -10863 kAudioUnitErr_CannotDoInCurrentContext 错误(代码中的 [2])- ~5%
  3. AudioFileWriteBytes() 返回 1868981823 kAudioFileDoesNotAllow64BitDataSizeError 错误:(代码中的 [1])- ~4%
  4. AudioUnitRender() 返回 -1 错误(代码中的 [2])- ~3%

任何帮助、评论或建议都将非常有帮助!

这是代码(它也可以在 GitHub 上获得:https://github.com/derpoliuk/SO-AudioToolbox-error-quesion):

class Recorder {

static let shared = Recorder()
private static let sampleRate: Float64 = 16000

var processAudioData: ((Data) -> ())?

fileprivate var remoteIOUnit: AudioComponentInstance?
private var audioFile: AudioFileID?
private var startingByte: Int64 = 0
// Audio recording settings
private let formatId: AudioFormatID = kAudioFormatLinearPCM
private let bitsPerChannel: UInt32 = 16
private let channelsPerFrame: UInt32 = 1
private let bytesPerFrame: UInt32 = 2 // channelsPerFrame * 2
private let framesPerPacket: UInt32 = 1
private let encoderBitRate = 12800
private let formatFlags: AudioFormatFlags = kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked

func record(atURL url: URL) {
var status = openFileForWriting(fileURL: url)
startingByte = 0
status = prepareAudioToolbox()
status = startAudioToolboxRecording()
}

func openFileForWriting(fileURL: URL) -> OSStatus {
var asbd = AudioStreamBasicDescription()
memset(&asbd, 0, MemoryLayout<AudioStreamBasicDescription>.size)
asbd.mSampleRate = Recorder.sampleRate
asbd.mFormatID = formatId
asbd.mFormatFlags = formatFlags
asbd.mBitsPerChannel = bitsPerChannel
asbd.mChannelsPerFrame = channelsPerFrame
asbd.mFramesPerPacket = framesPerPacket
asbd.mBytesPerFrame = bytesPerFrame
asbd.mBytesPerPacket = framesPerPacket * bytesPerFrame
// Set up the file
var audioFile: AudioFileID?
var audioErr: OSStatus = noErr
audioErr = AudioFileCreateWithURL(fileURL as CFURL, AudioFileTypeID(kAudioFileWAVEType), &asbd, .eraseFile, &audioFile)
if audioErr == noErr {
self.audioFile = audioFile
}
return audioErr
}

func prepareAudioToolbox() -> OSStatus {
var status = noErr
// Describe the RemoteIO unit
var audioComponentDescription = AudioComponentDescription()
audioComponentDescription.componentType = kAudioUnitType_Output
audioComponentDescription.componentSubType = kAudioUnitSubType_RemoteIO
audioComponentDescription.componentManufacturer = kAudioUnitManufacturer_Apple
audioComponentDescription.componentFlags = 0
audioComponentDescription.componentFlagsMask = 0
// Get the RemoteIO unit
var ioUnit: AudioComponentInstance?
let remoteIOComponent = AudioComponentFindNext(nil, &audioComponentDescription)
status = AudioComponentInstanceNew(remoteIOComponent!, &ioUnit)
guard status == noErr else {
return status
}
guard let remoteIOUnit = ioUnit else {
return 656783
}
self.remoteIOUnit = remoteIOUnit
// Configure the RemoteIO unit for input
let bus1: AudioUnitElement = 1
var oneFlag: UInt32 = 1
status = AudioUnitSetProperty(remoteIOUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
bus1,
&oneFlag,
UInt32(MemoryLayout<UInt32>.size));
guard status == noErr else {
return status
}
var asbd = AudioStreamBasicDescription()
asbd.mSampleRate = Recorder.sampleRate
asbd.mFormatID = formatId
asbd.mFormatFlags = formatFlags
asbd.mBitsPerChannel = bitsPerChannel
asbd.mChannelsPerFrame = channelsPerFrame
asbd.mFramesPerPacket = framesPerPacket
asbd.mBytesPerFrame = bytesPerFrame
asbd.mBytesPerPacket = framesPerPacket * bytesPerFrame
// Set format for mic input (bus 1) on RemoteIO's output scope
status = AudioUnitSetProperty(remoteIOUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
bus1,
&asbd,
UInt32(MemoryLayout<AudioStreamBasicDescription>.size))
guard status == noErr else {
return status
}
// Set the recording callback
var callbackStruct = AURenderCallbackStruct()
callbackStruct.inputProc = recordingCallback
callbackStruct.inputProcRefCon = nil
status = AudioUnitSetProperty(remoteIOUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
bus1,
&callbackStruct,
UInt32(MemoryLayout<AURenderCallbackStruct>.size));
guard status == noErr else {
return status
}
// Initialize the RemoteIO unit
return AudioUnitInitialize(remoteIOUnit)
}

func startAudioToolboxRecording() -> OSStatus {
guard let remoteIOUnit = remoteIOUnit else {
return 656783
}
return AudioOutputUnitStart(remoteIOUnit)
}

func writeDataToFile(audioBuffers: UnsafeMutableBufferPointer<AudioBuffer>) -> OSStatus {
guard let audioFile = audioFile else {
return 176136
}
var startingByte = self.startingByte
for audioBuffer in audioBuffers {
var numBytes = audioBuffer.mDataByteSize
guard let mData = audioBuffer.mData else {
continue
}
// [1] following call fails with `-38` error (`kAudioFileNotOpenError`). Less often it fails with `1868981823` error (`kAudioFileDoesNotAllow64BitDataSizeError`)
let audioErr = AudioFileWriteBytes(audioFile,
false,
startingByte,
&numBytes,
mData)
guard audioErr == noErr else {
return audioErr
}
let data = Data(bytes: mData, count: Int(numBytes))
processAudioData?(data)
startingByte += Int64(numBytes)
}
self.startingByte = startingByte
return noErr
}

}

private func recordingCallback(
inRefCon: UnsafeMutableRawPointer,
ioActionFlags: UnsafeMutablePointer<AudioUnitRenderActionFlags>,
inTimeStamp: UnsafePointer<AudioTimeStamp>,
inBusNumber: UInt32,
inNumberFrames: UInt32,
ioData: UnsafeMutablePointer<AudioBufferList>?) -> OSStatus {

guard let remoteIOUnit = Recorder.shared.remoteIOUnit else {
return 656783
}
var status = noErr
let channelCount: UInt32 = 1
var bufferList = AudioBufferList()
bufferList.mNumberBuffers = channelCount
let buffers = UnsafeMutableBufferPointer<AudioBuffer>(start: &bufferList.mBuffers,
count: Int(bufferList.mNumberBuffers))
buffers[0].mNumberChannels = 1
buffers[0].mDataByteSize = inNumberFrames * 2
buffers[0].mData = nil
// get the recorded samples
// [2] following call fails with `-10863` error (`kAudioUnitErr_CannotDoInCurrentContext`) and less often with `-1` error
status = AudioUnitRender(remoteIOUnit,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
UnsafeMutablePointer<AudioBufferList>(&bufferList))
guard status == noErr else {
return status
}
status = Recorder.shared.writeDataToFile(audioBuffers: buffers)
return status
}

最佳答案

好的,我希望这能有所帮助,我认为问题在于记录速度、写入速度和渲染速度,所以你可以用 block 来写,这样你就可以隔离方法,我在图形方面也有同样的问题,所以我总是阻塞(锁定和解锁缓冲区)。

Working with Blocks

关于ios - AudioFileWriteBytes (AudioToolbox) 失败,错误代码为 -38 kAudioFileNotOpenError,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/48344490/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com