gpt4 book ai didi

ios - swift中的音频播放器没有获得音量和音高的值(value)

转载 作者:行者123 更新时间:2023-12-03 00:23:14 24 4
gpt4 key购买 nike

我正在尝试在 SwiftUI 中制作音频播放器,音频播放器应该具有这些功能。

  • 播放/停止音频
  • 循环播放
  • 通过 slider 更改音量
  • 通过 slider 更改音频音高。

  • 目前我面临两个问题
  • 音频播放器未使用音量和音高 slider 值
  • 当我停止并播放并更改音量/音高 slider 时,应用程序崩溃并显示以下消息。

  • 2020-10-14 17:34:08.957709+0530 SwiftUIAudioPlayer[1369:24886] [avae]AVAEInternal.h:109 [AVAudioFile.mm:484:-[AVAudioFilereadIntoBuffer:frameCount:error:]:(ExtAudioFileRead(_imp->_extAudioFile, &ioFrames,buffer.mutableAudioBufferList)): error -50


    这是项目的链接。 https://github.com/varun-naharia/SwiftUIAudioPlayer
    内容 View .swift
    import Foundation
    import SwiftUI

    struct ContentView: View {
    @State var volume:Double = 0.00
    @State var pitch:Double = 0.0
    @State var musicFiles:[SoundModel] = [SoundModel(file: "metro35", name: "Metronome", fileExtension: "wav"), SoundModel(file: "johnson_tone_down_5min", name: "Johnson", fileExtension: "wav"), SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")]
    @State var selectedMusicFile:SoundModel = SoundModel(file: "sine_140_6s_fade_ogg", name: "Sine wave", fileExtension: "wav")
    @State var showSoundPicker = false
    @State var selectedGraph = "skin_conductance"
    @State var iconSize:CGFloat = 0.124
    @State var iconSpace:CGFloat = 0.015
    @State var heart = false

    init() {
    Player.setPitch(pitch: Float(self.pitch))
    Player.setVolume(volume: Float(self.volume))
    }

    var body: some View {
    GeometryReader { geometry in
    ZStack{
    VStack(alignment: .leading) {
    Button(action: {
    self.heart = !self.heart
    self.selectedGraph = "heart"
    if(self.heart)
    {
    Player.playMusic(musicfile: self.selectedMusicFile.file, fileExtension: self.selectedMusicFile.fileExtension)
    }
    else
    {
    Player.stopMusic()
    self.selectedGraph = ""
    }
    })
    {

    Image(self.selectedGraph == "heart" ? "heart" : "heart_disabled")
    .resizable()
    .frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)

    }
    .frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
    .padding(.bottom, geometry.size.height*(self.iconSpace/2))

    Button(action: {
    self.showSoundPicker = !self.showSoundPicker
    })
    {

    Image("tone")
    .resizable()
    .frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)

    }
    .frame(width: geometry.size.height*self.iconSize, height: geometry.size.height*self.iconSize)
    .padding(.bottom, geometry.size.height*(self.iconSpace/2))

    HStack{
    SwiftUISlider(
    thumbColor: .green,
    thumbImage: "musicNote 2",
    value: self.$volume
    ).padding(.horizontal)
    Button(action: {

    })
    {

    Image("centerGraph")
    .resizable()
    .frame(width: geometry.size.width*0.05, height: geometry.size.width*0.05)


    }
    .frame(width: geometry.size.width*0.03, height: geometry.size.width*0.03)
    SwiftUISlider(
    thumbColor: .green,

    thumbImage: "timerSlider 2",
    minValue: 0,
    maxValue: 20,
    value: self.$pitch

    )
    .padding(.horizontal)
    .frame(width: (geometry.size.width/2)-geometry.size.width*0.05, height: geometry.size.width*0.05)
    }
    .background(Color(UIColor.lightGray))
    .frame(width: geometry.size.width, height: geometry.size.height*0.10)
    if(self.showSoundPicker)
    {
    ChooseSoundView(
    musicFiles: self.musicFiles,
    selectedMusicFile: self.$selectedMusicFile ,
    showSoundPicker: self.$showSoundPicker,
    isPlaying: self.selectedGraph != ""
    )
    .frame(width: geometry.size.width*0.6, height: geometry.size.height*0.7, alignment: .center)
    .background(Color.white)
    }

    }
    .frame(maxWidth: geometry.size.width,
    maxHeight: geometry.size.height)
    .background(Color(UIColor.lightGray))

    }
    }
    }
    }

    struct ContentView_Previews: PreviewProvider {

    static var previews: some View {
    ContentView()
    }
    }



    struct ChooseSoundView: View {
    @State var musicFiles:[SoundModel]
    @Binding var selectedMusicFile:SoundModel
    @Binding var showSoundPicker:Bool
    @State var isPlaying:Bool
    var body: some View {
    GeometryReader { geometry in
    VStack(alignment: .leading)
    {
    List(self.musicFiles, id: \.name)
    { item in
    Image(self.selectedMusicFile.file == item.file ? "radio-button_on" : "radio-button_off")
    .resizable()
    .frame(width: 15, height: 15)
    Button(action: {
    print(item.name)
    self.selectedMusicFile = item
    self.showSoundPicker = false
    if(self.isPlaying)
    {
    // Player.stopMusic()
    // Player.playMusic(musicfile: self.selectedMusicFile.file, fileExtension: self.selectedMusicFile.fileExtension)
    }
    }){
    Text(item.name)
    .frame(width: geometry.size.width*90,
    height: 50.0,
    alignment: .leading)
    }
    .frame(width: geometry.size.width*90, height: 50.0)
    }
    HStack{
    Button(action: {
    self.showSoundPicker = false
    }){
    Text("Done")
    .frame(width: geometry.size.width*0.45,
    height: 50.0,
    alignment: .center)
    }
    .frame(width: geometry.size.width*0.45, height: 50.0)
    Button(action: {
    self.showSoundPicker = false
    }){
    Text("Cancel")
    .frame(width: geometry.size.width*0.45,
    height: 50.0,
    alignment: .center)
    }
    .frame(width: geometry.size.width*0.45, height: 50.0)
    }
    .background(Color.white)
    }
    }
    }
    }
    Player.swift
    import Foundation
    import AVFoundation

    class Player {

    private static var breathAudioPlayer:AVAudioPlayer?
    private static var audioPlayerEngine = AVAudioEngine()
    private static let speedControl = AVAudioUnitVarispeed()
    private static var pitchControl = AVAudioUnitTimePitch()
    private static var audioPlayerNode = AVAudioPlayerNode()
    private static var volume:Float = 1.0
    private static func playSounds(soundfile: String) {


    if let path = Bundle.main.path(forResource: soundfile, ofType: "m4a"){

    do{

    breathAudioPlayer = try AVAudioPlayer(contentsOf: URL(fileURLWithPath: path))
    breathAudioPlayer?.volume = self.volume
    breathAudioPlayer?.prepareToPlay()
    breathAudioPlayer?.play()

    }catch {
    print("Error")
    }
    }
    }

    static func playMusic(musicfile: String, fileExtension:String) {
    if let path = Bundle.main.path(forResource: musicfile, ofType: fileExtension){

    do{
    // 1: load the file
    let audioPlayFile = try AVAudioFile(forReading: URL(fileURLWithPath: path))
    let audioFileBuffer = AVAudioPCMBuffer(pcmFormat: audioPlayFile.fileFormat, frameCapacity: AVAudioFrameCount(audioPlayFile.length))
    try? audioPlayFile.read(into: audioFileBuffer!)

    // 2: create the audio player

    audioPlayerNode = AVAudioPlayerNode()

    audioPlayerEngine = AVAudioEngine()

    // you can replace mp3 with anything else you like, just make sure you load it from our project

    // making sure to clean up the audio hardware to avoid any damage and bugs

    audioPlayerNode.stop()

    audioPlayerEngine.stop()

    audioPlayerEngine.reset()

    audioPlayerEngine.attach(audioPlayerNode)

    let pitchControl = AVAudioUnitTimePitch()

    // assign the speed and pitch

    audioPlayerEngine.attach(pitchControl)

    audioPlayerEngine.connect(audioPlayerNode, to: pitchControl, format: nil)

    audioPlayerEngine.connect(pitchControl, to: audioPlayerEngine.outputNode, format: nil)

    audioPlayerNode.scheduleFile(audioPlayFile, at: nil, completionHandler: nil)

    // try to start playing the audio
    audioPlayerNode.scheduleBuffer(audioFileBuffer!, at: nil, options: .loops, completionHandler: nil)
    do {
    try audioPlayerEngine.start()
    } catch {
    print(error)
    }

    // play the audio



    audioPlayerNode.play()
    }catch {
    print("Error")
    }
    }
    }
    static func breathIn() {
    // Player.playSounds(soundfile: "breathin")
    }

    static func breathOut() {
    // Player.playSounds(soundfile: "breathout")
    }

    static func play(musicFile:String, fileExtension:String)
    {

    Player.playMusic(musicfile: musicFile,fileExtension: fileExtension)

    }

    static func stopMusic() {
    audioPlayerNode.pause()
    audioPlayerNode.stop()
    }

    static func setPitch(pitch:Float) {
    pitchControl.pitch = pitch
    }

    static func setVolume(volume:Float) {
    audioPlayerNode.volume = volume
    }
    }
    SwiftUISlider.swift
    import Foundation
    import SwiftUI

    struct SwiftUISlider: UIViewRepresentable {
    var onChangeNotification:String = ""

    final class Coordinator: NSObject {
    // The class property value is a binding: It’s a reference to the SwiftUISlider
    // value, which receives a reference to a @State variable value in ContentView.
    var value: Binding<Double>

    // Create the binding when you initialize the Coordinator
    init(value: Binding<Double>) {
    self.value = value
    }

    // Create a valueChanged(_:) action
    @objc func valueChanged(_ sender: UISlider) {
    self.value.wrappedValue = Double(sender.value)

    }
    }

    var thumbColor: UIColor = .white
    var minTrackColor: UIColor?
    var maxTrackColor: UIColor?
    var thumbImage:String?
    var minValue:Float?
    var maxValue:Float?

    @Binding var value: Double

    func makeUIView(context: Context) -> UISlider {
    let slider = UISlider(frame: .zero)
    slider.thumbTintColor = thumbColor
    slider.minimumTrackTintColor = minTrackColor
    slider.maximumTrackTintColor = maxTrackColor
    slider.value = Float(value)
    if(self.minValue != nil)
    {
    slider.minimumValue = self.minValue!
    }
    if(self.maxValue != nil)
    {
    slider.maximumValue = self.maxValue!
    }
    slider.setThumbImage(UIImage(named: self.thumbImage ?? ""), for: .normal)
    slider.setThumbImage(UIImage(named: self.thumbImage ?? ""), for: .focused)
    slider.setThumbImage(UIImage(named: self.thumbImage ?? ""), for: .highlighted)

    slider.addTarget(
    context.coordinator,
    action: #selector(Coordinator.valueChanged(_:)),
    for: .valueChanged
    )

    return slider
    }

    func onValueChange(_ sender: UISlider) {

    }

    func updateUIView(_ uiView: UISlider, context: Context) {
    // Coordinating data between UIView and SwiftUI view
    uiView.value = Float(self.value)
    }

    func makeCoordinator() -> SwiftUISlider.Coordinator {
    Coordinator(value: $value)
    }
    }
    SoundModel.swift
    import Foundation
    import Combine

    class SoundModel:ObservableObject, Identifiable
    {
    @Published var file:String
    @Published var name:String
    @Published var fileExtension:String

    init(file:String, name:String, fileExtension:String) {
    self.file = file
    self.name = name
    self.fileExtension = fileExtension
    }
    }

    最佳答案

    您的第一个问题是您没有跟踪音量/音高值的变化。为此,将它们移动到一个类中:

    class PlayerSetup: ObservableObject {
    @Published var volume:Double = 0.00 {
    didSet {
    Player.setVolume(volume: Float(self.volume))
    }
    }
    @Published var pitch:Double = 0.0{
    didSet {
    Player.setPitch(pitch: Float(self.pitch))
    }
    }
    }
    在 View 中声明:
    @ObservedObject var playerSetup = PlayerSetup()
    并绑定(bind)到您的观点:
    SwiftUISlider(
    thumbColor: .green,
    thumbImage: "musicNote 2",
    value: $playerSetup.volume
    ).padding(.horizontal)
    SwiftUISlider(
    thumbColor: .green,

    thumbImage: "timerSlider 2",
    minValue: 0,
    maxValue: 20,
    value: $playerSetup.pitch

    )
    播放完文件会崩溃,因为 try? audioPlayFile.read(into: audioFileBuffer!)失败并且您的缓冲区在文件为空后安排。第一次播放文件是因为 scheduleFile .如果您想循环单个文件,请尝试调用此函数:
    static func scheduleNext(audioPlayFile: AVAudioFile) {
    audioPlayerNode.scheduleFile(audioPlayFile, at: nil) {
    DispatchQueue.main.async {
    scheduleNext(audioPlayFile: audioPlayFile)
    }
    }
    }
    pitchControl 不起作用,因为您在开始播放时使用本地值,只需删除本地值声明即可。
    快到音量了。正如您在文档中看到的那样, This property is implemented only by the AVAudioEnvironmentNode and AVAudioMixerNode class mixers.所以你不能将它用于播放器节点,你需要创建一个混音器节点,将其添加到节点链中,并更改它的音量。
    另外要清理节点代码,我建议您使用以下代码:
    let nodes = [
    audioPlayerNode,
    pitchControl,
    mixerNode,
    ]
    nodes.forEach { node in
    audioPlayerEngine.attach(node)
    }
    zip(nodes, (nodes.dropFirst() + [audioPlayerEngine.outputNode]))
    .forEach { firstNode, secondNode in
    audioPlayerEngine.connect(firstNode, to: secondNode, format: nil)
    }
    它连接所有 nodes逐个。
    https://github.com/PhilipDukhov/SwiftUIAudioPlayer/tree/fixes

    关于ios - swift中的音频播放器没有获得音量和音高的值(value),我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/64353091/

    24 4 0
    Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
    广告合作:1813099741@qq.com 6ren.com