gpt4 book ai didi

ios - 如何让 AKSequencer 切换音色?

转载 作者:行者123 更新时间:2023-11-28 13:30:20 27 4
gpt4 key购买 nike

我正在使用 Audiokit API 创建一个函数,用户将音符按到屏幕上,然后根据他们选择的 SoundFont 发出声音。然后我让他们收集大量笔记,让他们按照他们选择的顺序播放。问题是我正在使用 AKSequencer 播放音符,而当 AKSequencer 播放音符时,它听起来不像 SoundFont。它发出哔声。是否有代码可以让我更改 AKSequencer 发出的声音?

我正在使用音频套件来执行此操作。

Sample是一个NSObject,包含midisampler,player等,下面是代码

    class Sampler1: NSObject {
var engine = AVAudioEngine()
var sampler: AVAudioUnitSampler!
var midisampler = AKMIDISampler()
var octave = 4
let midiChannel = 0
var midiVelocity = UInt8(127)
var audioGraph: AUGraph?
var musicPlayer: MusicPlayer?
var patch = UInt32(0)
var synthUnit: AudioUnit?
var synthNode = AUNode()
var outputNode = AUNode()

override init() {
super.init()
// engine = AVAudioEngine()
sampler = AVAudioUnitSampler()

engine.attach(sampler)
engine.connect(sampler, to: engine.mainMixerNode, format: nil)
loadSF2PresetIntoSampler(5)
/* sampler2 = AVAudioUnitSampler()
engine.attachNode(sampler2)
engine.connect(sampler2, to: engine.mainMixerNode, format: nil)
*/
addObservers()

startEngine()

setSessionPlayback()
/* CheckError(NewAUGraph(&audioGraph))
createOutputNode(audioGraph: audioGraph!, outputNode: &outputNode)
createSynthNode()
CheckError(AUGraphNodeInfo(audioGraph!, synthNode, nil, &synthUnit))
let synthOutputElement: AudioUnitElement = 0
let ioUnitInputElement: AudioUnitElement = 0
CheckError(AUGraphConnectNodeInput(audioGraph!, synthNode, synthOutputElement,
outputNode, ioUnitInputElement))
CheckError(AUGraphInitialize(audioGraph!))
CheckError(AUGraphStart(audioGraph!))
loadnewSoundFont()
loadPatch(patchNo: 0)*/
setUpSequencer()

}
func createOutputNode(audioGraph: AUGraph, outputNode: UnsafeMutablePointer<AUNode>) {
var cd = AudioComponentDescription(
componentType: OSType(kAudioUnitType_Output),
componentSubType: OSType(kAudioUnitSubType_RemoteIO),
componentManufacturer: OSType(kAudioUnitManufacturer_Apple),
componentFlags: 0,componentFlagsMask: 0)
CheckError(AUGraphAddNode(audioGraph, &cd, outputNode))
}
func loadSF2PresetIntoSampler(_ preset: UInt8) {
guard let bankURL = Bundle.main.url(forResource: "Arachno SoundFont - Version 1.0", withExtension: "sf2") else {
print("could not load sound font")
return
}
let folder = bankURL.path

do {
try self.sampler.loadSoundBankInstrument(at: bankURL,
program: preset,
bankMSB: UInt8(kAUSampler_DefaultMelodicBankMSB),
bankLSB: UInt8(kAUSampler_DefaultBankLSB))

try midisampler.loadSoundFont(folder, preset: 0, bank: kAUSampler_DefaultBankLSB)
// try midisampler.loadPath(bankURL.absoluteString)
} catch {
print("error loading sound bank instrument")
}

}
func createSynthNode() {
var cd = AudioComponentDescription(
componentType: OSType(kAudioUnitType_MusicDevice),
componentSubType: OSType(kAudioUnitSubType_MIDISynth),
componentManufacturer: OSType(kAudioUnitManufacturer_Apple),
componentFlags: 0,componentFlagsMask: 0)
CheckError(AUGraphAddNode(audioGraph!, &cd, &synthNode))
}
func setSessionPlayback() {
let audioSession = AVAudioSession.sharedInstance()
do {
try
audioSession.setCategory(AVAudioSession.Category.playback, options:
AVAudioSession.CategoryOptions.mixWithOthers)
} catch {
print("couldn't set category \(error)")
return
}

do {
try audioSession.setActive(true)
} catch {
print("couldn't set category active \(error)")
return
}
}
func startEngine() {
if engine.isRunning {
print("audio engine already started")
return
}

do {
try engine.start()
print("audio engine started")
} catch {
print("oops \(error)")
print("could not start audio engine")
}
}

func addObservers() {
NotificationCenter.default.addObserver(self,
selector:"engineConfigurationChange:",
name:NSNotification.Name.AVAudioEngineConfigurationChange,
object:engine)

NotificationCenter.default.addObserver(self,
selector:"sessionInterrupted:",
name:AVAudioSession.interruptionNotification,
object:engine)

NotificationCenter.default.addObserver(self,
selector:"sessionRouteChange:",
name:AVAudioSession.routeChangeNotification,
object:engine)
}

func removeObservers() {
NotificationCenter.default.removeObserver(self,
name: NSNotification.Name.AVAudioEngineConfigurationChange,
object: nil)

NotificationCenter.default.removeObserver(self,
name: AVAudioSession.interruptionNotification,
object: nil)

NotificationCenter.default.removeObserver(self,
name: AVAudioSession.routeChangeNotification,
object: nil)
}

private func setUpSequencer() {
// set the sequencer voice to storedPatch so we can play along with it using patch
var status = NewMusicSequence(&musicSequence)
if status != noErr {
print("\(#line) bad status \(status) creating sequence")
}

status = MusicSequenceNewTrack(musicSequence!, &track)
if status != noErr {
print("error creating track \(status)")
}

// 0xB0 = bank select, first we do the most significant byte
var chanmess = MIDIChannelMessage(status: 0xB0 | sequencerMidiChannel, data1: 0, data2: 0, reserved: 0)
status = MusicTrackNewMIDIChannelEvent(track!, 0, &chanmess)
if status != noErr {
print("creating bank select event \(status)")
}
// then the least significant byte
chanmess = MIDIChannelMessage(status: 0xB0 | sequencerMidiChannel, data1: 32, data2: 0, reserved: 0)
status = MusicTrackNewMIDIChannelEvent(track!, 0, &chanmess)
if status != noErr {
print("creating bank select event \(status)")
}

// set the voice
chanmess = MIDIChannelMessage(status: 0xC0 | sequencerMidiChannel, data1: UInt8(0), data2: 0, reserved: 0)
status = MusicTrackNewMIDIChannelEvent(track!, 0, &chanmess)
if status != noErr {
print("creating program change event \(status)")
}

CheckError(MusicSequenceSetAUGraph(musicSequence!, audioGraph))
CheckError(NewMusicPlayer(&musicPlayer))
CheckError(MusicPlayerSetSequence(musicPlayer!, musicSequence))
CheckError(MusicPlayerPreroll(musicPlayer!))
}
func loadnewSoundFont() {
var bankURL = Bundle.main.url(forResource: "Arachno SoundFont - Version 1.0", withExtension: "sf2")
CheckError(AudioUnitSetProperty(synthUnit!, AudioUnitPropertyID(kMusicDeviceProperty_SoundBankURL), AudioUnitScope(kAudioUnitScope_Global), 0, &bankURL, UInt32(MemoryLayout<URL>.size)))
}
func loadPatch(patchNo: Int) {
let channel = UInt32(0)
var enabled = UInt32(1)
var disabled = UInt32(0)
patch = UInt32(patchNo)

CheckError(AudioUnitSetProperty(
synthUnit!,
AudioUnitPropertyID(kAUMIDISynthProperty_EnablePreload),
AudioUnitScope(kAudioUnitScope_Global),
0,
&enabled,
UInt32(MemoryLayout<UInt32>.size)))

let programChangeCommand = UInt32(0xC0 | channel)
CheckError(MusicDeviceMIDIEvent(self.synthUnit!, programChangeCommand, patch, 0, 0))

CheckError(AudioUnitSetProperty(
synthUnit!,
AudioUnitPropertyID(kAUMIDISynthProperty_EnablePreload),
AudioUnitScope(kAudioUnitScope_Global),
0,
&disabled,
UInt32(MemoryLayout<UInt32>.size)))

// the previous programChangeCommand just triggered a preload
// this one actually changes to the new voice
CheckError(MusicDeviceMIDIEvent(synthUnit!, programChangeCommand, patch, 0, 0))
}

func play(number: UInt8) {
sampler.startNote(number, withVelocity: 127, onChannel: 0)
}

func stop(number: UInt8) {
sampler.stopNote(number, onChannel: 0)
}
func musicPlayerPlay() {
var status = noErr
var playing:DarwinBoolean = false
CheckError(MusicPlayerIsPlaying(musicPlayer!, &playing))
if playing != false {
status = MusicPlayerStop(musicPlayer!)
if status != noErr {
print("Error stopping \(status)")
CheckError(status)
return
}
}

CheckError(MusicPlayerSetTime(musicPlayer!, 0))
CheckError(MusicPlayerStart(musicPlayer!))
}



var avsequencer: AVAudioSequencer!
var sequencerMode = 1
var sequenceStartTime: Date?
var noteOnTimes = [Date] (repeating: Date(), count:128)
var musicSequence: MusicSequence?
var midisequencer = AKSequencer()
// var musicPlayer: MusicPlayer?
let sequencerMidiChannel = UInt8(1)
var midisynthUnit: AudioUnit?

//track is the variable the notes are written on
var track: MusicTrack?
var newtrack: AKMusicTrack?


func setupSequencer(name: String) {

self.avsequencer = AVAudioSequencer(audioEngine: self.engine)
let options = AVMusicSequenceLoadOptions.smfChannelsToTracks

if let fileURL = Bundle.main.url(forResource: name, withExtension: "mid") {
do {
try avsequencer.load(from: fileURL, options: options)
print("loaded \(fileURL)")
} catch {
print("something screwed up \(error)")
return
}
}
avsequencer.prepareToPlay()
}

func playsequence() {
if avsequencer.isPlaying {
stopsequence()
}

avsequencer.currentPositionInBeats = TimeInterval(0)

do {
try avsequencer.start()
} catch {
print("cannot start \(error)")
}
}


func creatnewtrck(){
let sequencelegnth = AKDuration(beats: 8.0)
newtrack = midisequencer.newTrack()

}
func addnotestotrack(){
// AKMIDISampler
}
func stopsequence() {
avsequencer.stop()
}

func setSequencerMode(mode: Int) {
sequencerMode = mode
switch(sequencerMode) {
case SequencerMode.off.rawValue:
print(mode)
// CheckError(osstatus: MusicPlayerStop(musicPlayer!))
case SequencerMode.recording.rawValue:
print(mode)

case SequencerMode.playing.rawValue:
print(mode)

default:
break
}
}
/* func noteOn(note: UInt8) {
let noteCommand = UInt32(0x90 | midiChannel)
let base = note - 48
let octaveAdjust = (UInt8(octave) * 12) + base
let pitch = UInt32(octaveAdjust)

CheckError(MusicDeviceMIDIEvent(self.midisynthUnit!,
noteCommand, pitch, UInt32(self.midiVelocity), 0))
}

func noteOff(note: UInt8) {
let channel = UInt32(0)
let noteCommand = UInt32(0x80 | channel)
let base = note - 48
let octaveAdjust = (UInt8(octave) * 12) + base
let pitch = UInt32(octaveAdjust)

CheckError(MusicDeviceMIDIEvent(self.midisynthUnit!,
noteCommand, pitch, 0, 0))
}*/

func noteOn(note: UInt8) {
if sequencerMode == SequencerMode.recording.rawValue {
print("recording sequence note")
noteOnTimes[Int(note)] = Date()
} else {
print("no notes")
}
}

func noteOff(note: UInt8, timestamp: Float64, sequencetime: Date) {
if sequencerMode == SequencerMode.recording.rawValue {
let duration: Double = Date().timeIntervalSince(noteOnTimes[Int(note)])
let onset: Double = noteOnTimes[Int(note)].timeIntervalSince(sequencetime)
//the order of the notes in the array
var beat: MusicTimeStamp = 0

CheckError(MusicSequenceGetBeatsForSeconds(musicSequence!, onset, &beat))
var mess = MIDINoteMessage(channel: sequencerMidiChannel,
note: note,
velocity: midiVelocity,
releaseVelocity: 0,
duration: Float(duration) )
CheckError(MusicTrackNewMIDINoteEvent(track!, timestamp, &mess))
}
}
}




The code that plays the collection of notes


_ = sample.midisequencer.newTrack()

let sequencelegnth = AKDuration(beats: 8.0)
sample.midisequencer.setLength(sequencelegnth)
sample.sequenceStartTime = format.date(from: format.string(from: NSDate() as Date))

sample.midisequencer.setTempo(160.0)

sample.midisequencer.enableLooping()
sample.midisequencer.play()

这是改变音色的代码

     func loadSF2PresetIntoSampler(_ preset: UInt8) {
guard let bankURL = Bundle.main.url(forResource: "Arachno SoundFont - Version 1.0", withExtension: "sf2") else {
print("could not load sound font")
return
}
let folder = bankURL.path

do {
try self.sampler.loadSoundBankInstrument(at: bankURL,
program: preset,
bankMSB: UInt8(kAUSampler_DefaultMelodicBankMSB),
bankLSB: UInt8(kAUSampler_DefaultBankLSB))

try midisampler.loadSoundFont(folder, preset: 0, bank: kAUSampler_DefaultBankLSB)
// try midisampler.loadPath(bankURL.absoluteString)
} catch {
print("error loading sound bank instrument")
}

}

midisampler 是一个 AKMidisampler。

最佳答案

至少,您需要将 AKSequencer 连接到某种输出以使其发出声音。对于旧版本(现在称为 AKAppleSequencer),如果您没有明确设置输出,您将听到默认的(哔哔声)采样器。

例如,在 AKAppleSequencer 上(在 AudioKit 4.8 中,或 AKSequencer 早期版本)

let track = seq.newTrack()  
track!.setMIDIOutput(sampler.midiIn)

在新的 AKSequencer

let track = seq.newTrack()  // for the new AKSequencer, in AudioKit 4.8
track!.setTarget(node: sampler)

此外,请确保您已在项目的功能中允许音频背景模式,因为缺少此步骤,这也会让您使用默认采样器。

你已经包含了大量的代码(我没有试图吸收这里发生的所有事情)但是事实上你正在使用 MusicSequenceAKSequencer(我怀疑是旧版本,现在称为 AKAppleSequencer,它只是 MusicSequence 的包装器)是一个危险信号。

关于ios - 如何让 AKSequencer 切换音色?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/57479356/

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com