- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
我正在尝试用 swift 同步录制和播放。我需要分别在左声道和右声道播放。我使用 AudioUnit 在一个 channel 中成功录制和播放。但是当我尝试使用两个缓冲区来控制两个 channel 后,它们都静音了。下面是我设置格式的方法:
var audioFormat = AudioStreamBasicDescription()
audioFormat.mSampleRate = Double(sampleRate)
audioFormat.mFormatID = kAudioFormatLinearPCM
audioFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked
audioFormat.mChannelsPerFrame = 2
audioFormat.mFramesPerPacket = 1
audioFormat.mBitsPerChannel = 32
audioFormat.mBytesPerPacket = 8
audioFormat.mReserved = 0
这是我的输入回调
private let inputCallback: AURenderCallback = {(
inRefCon,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
ioData) -> OSStatus in
let audioRAP:AudioUnitSample = Unmanaged<AudioUnitSample>.fromOpaque(inRefCon).takeUnretainedValue()
var status = noErr;
var buf = UnsafeMutableRawPointer.allocate(bytes: Int(inNumberFrames * 4),
alignedTo: MemoryLayout<Int8>.alignment)
let bindptr = buf.bindMemory(to: Float.self,
capacity: Int(inNumberFrames * 4))
bindptr.initialize(to: 0)
var buffer: AudioBuffer = AudioBuffer(mNumberChannels: 2,
mDataByteSize: inNumberFrames * 4,
mData: buf)
memset(buffer.mData, 0, Int(buffer.mDataByteSize))
var bufferList: AudioBufferList = AudioBufferList(mNumberBuffers: 1,
mBuffers: buffer)(Int(bufferList.mBuffers.mDataByteSize))")
status = AudioUnitRender(audioRAP.newAudioUnit!,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
&bufferList)
audioRAP.audioBuffers.append((bufferList.mBuffers,Int(inNumberFrames * 4)))
return status
}
这是我的输出回调:
private let outputCallback:AURenderCallback = {
(inRefCon,
ioActionFlags,
inTimeStamp,
inBusNumber,
inNumberFrames,
ioData) -> OSStatus in
let audioRAP:AudioUnitSample = Unmanaged<AudioUnitSample>.fromOpaque(inRefCon).takeUnretainedValue()
if ioData == nil{
return noErr
}
ioData!.pointee.mNumberBuffers = 2
var bufferCount = ioData!.pointee.mNumberBuffers
var tempBuffer = audioRAP.audioBuffers[0]
var monoSamples = [Float]()
let ptr1 = tempBuffer.0.mData?.assumingMemoryBound(to: Float.self)
monoSamples.removeAll()
monoSamples.append(contentsOf: UnsafeBufferPointer(start: ptr1, count: Int(inNumberFrames)))
let abl = UnsafeMutableAudioBufferListPointer(ioData)
let bufferLeft = abl![0]
let bufferRight = abl![1]
let pointerLeft: UnsafeMutableBufferPointer<Float32> = UnsafeMutableBufferPointer(bufferLeft)
let pointerRight: UnsafeMutableBufferPointer<Float32> = UnsafeMutableBufferPointer(bufferRight)
for frame in 0..<inNumberFrames {
let pointerIndex = pointerLeft.startIndex.advanced(by: Int(frame))
pointerLeft[pointerIndex] = monoSamples[Int(frame)]
}
for frame in 0..<inNumberFrames {
let pointerIndex = pointerRight.startIndex.advanced(by: Int(frame))
pointerRight[pointerIndex] = monoSamples[Int(frame)]
}
tempBuffer.0.mData?.deallocate(bytes:tempBuffer.1, alignedTo: MemoryLayout<Int8>.alignment)
audioRAP.audioBuffers.removeFirst()
return noErr
}
这是音频缓冲区的声明:
private var audioBuffers = [(AudioBuffer, Int)]()
我是否错过了输出或输入部分的某些内容?任何帮助将不胜感激!
最佳答案
第一个大问题是你的代码正在内部进行内存分配 音频回调。 Apple 文档明确指出,不应在音频上下文内完成内存管理、同步甚至对象消息传递。在音频回调中,您可能只想坚持只将音频样本的数据复制到预分配的缓冲区或从预分配的缓冲区复制数据。其他所有事情(尤其是缓冲区创建和释放)都应该在音频回调之外完成。
关于swift - AudioUnit - 在 Swift 中控制左声道和右声道输出,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47468717/
我想在我的 android 应用程序中播放 PCM 音频数据。网络上有很多示例,但仅用于单 channel ,我有 4 个 channel (如本问题标题所述)。 当我设置 AudioTrack au
我正在尝试通过 channelsplitter 将立体声音频路由到具有增益控制的 6 个 channel ,然后返回到 channelMerger,以控制 5.1 组的所有 6 个 channel .
我试图从 iPhone XS 的所谓立体声后置麦克风中获取两个 channel ,但在 AVAudioSession 的不同点上只能看到一个 channel 。和 AVAudioSessionPort
我是一名优秀的程序员,十分优秀!