2015-12-03 87 views
1

在我的代码中,我创建了两个声音,sound1和sound2。每个声音包含许多样本,可以同时播放相同的声音。问题是,如果我为每个AVAudioUnitTimePitch创建看起来多于6到8个AVAudioPlayerNodes,则音频会完全混乱。当我将样本数量增加太多时,我甚至无法播放单个声音。我不确定我的代码是否错误,或者AVAudioEngine的节点限制是什么。AVAudioEngine可以创建的节点数是否有限制?

class AudioManager{ 
    var audioEngine:AVAudioEngine!; 
    var mixer:AVAudioMixerNode!; 
    var sound1:Sound!; 
    var sound2:Sound!; 
    init(){ 
     audioEngine = AVAudioEngine(); 
     mixer = audioEngine.mainMixerNode; //automatically creates instance of mixer node, output node, and connects 

     do{ 
      try audioEngine.start(); 
     }catch let e as NSError{ 
      print("Error Starting AudioEngine \(e)"); 
     } 

     sound1 = Sound(aManager: self, path: "assets/sounds/waterRefill", ofType: "mp3", numOfSamples: 7); 
     sound2 = Sound(aManager: self, path: "assets/sounds/balloonCreate", ofType: "mp3", numOfSamples: 2); 


    } 

    func playSound(){ 
     sound1.play(1.0, pitch: 1.0); 
    } 

    func playSound2(){ 
     sound2.play(1.0, pitch: 1.0); 
    } 

    class Sound { 
     var audioManager:AudioManager!; 
     var audioFileBuffer:AVAudioPCMBuffer!; 
     var numSamples:Int = 1; 
     var audioIndex:Int = 0; 
     var sampleList:[Sample] = [Sample](); 

     init(aManager:AudioManager, path:String, ofType:String, numOfSamples:Int){ 
      audioManager = aManager; 
      if(numOfSamples < 1){ 
       numSamples = 1; 
      }else{ 
       numSamples = numOfSamples; 
      } 
      audioFileBuffer = createAudioBuffer(path, ofType: ofType); 
      for (var i = 0; i < numSamples; i++){ 
       sampleList.append(Sample(sound: self)); 
      } 
     } 

     func createAudioBuffer(path:String, ofType:String)-> AVAudioPCMBuffer?{ 
      let filePath: String = NSBundle.mainBundle().pathForResource(path, ofType: ofType)! 
      let fileURL: NSURL = NSURL(fileURLWithPath: filePath) 
      do{ 
       let audioFile = try AVAudioFile(forReading: fileURL) 
       let audioFormat = audioFile.processingFormat 
       let audioFrameCount = UInt32(audioFile.length) 
       let audioFileBuffer = AVAudioPCMBuffer(PCMFormat: audioFormat, frameCapacity: audioFrameCount) 
       do{ 
        try audioFile.readIntoBuffer(audioFileBuffer) 
        return audioFileBuffer; 
       }catch let e as NSError{ 
        print("Error loading Audio Into Buffer: \(e)"); 
       } 
      }catch let e as NSError{ 
       print("Error loading Audio File: \(e)"); 
      } 
      return nil; 
     } 

     private func runIndex(){ 
      if(audioIndex < (numSamples-1)){ 
       audioIndex++; 
      }else{ 
       audioIndex = 0; 
      } 
     } 

     func play(volume:Float, pitch:Float){ 

      var count:Int = 0; 
      while(count < numSamples){ 
       if(numSamples > 1){ 
        runIndex(); 
       } 
       if (!sampleList[audioIndex].pitchPlayer.playing) { 
        sampleList[audioIndex].volume = volume; 
        sampleList[audioIndex].pitch = pitch; 
        sampleList[audioIndex].playSample(); 
        break; 
       } 
       count++; 
      } 

     } 

     class Sample{ 
      var parentSound:Sound! 
      var pitchPlayer:AVAudioPlayerNode!; 
      var timePitch:AVAudioUnitTimePitch!; 
      var volume:Float = 1.0 
      var pitch:Float = 1.0 

      init(sound:Sound){ 
       parentSound = sound; 
       pitchPlayer = AVAudioPlayerNode(); 
       timePitch = AVAudioUnitTimePitch(); 

       parentSound.audioManager.audioEngine.attachNode(pitchPlayer); 
       parentSound.audioManager.audioEngine.attachNode(timePitch); 

       parentSound.audioManager.audioEngine.connect(pitchPlayer, to: timePitch, format: parentSound.audioFileBuffer.format); 
       parentSound.audioManager.audioEngine.connect(timePitch, to: parentSound.audioManager.mixer, format: parentSound.audioFileBuffer.format); 


      } 

      func playSample(){ 
       pitchPlayer.volume = volume; 
       timePitch.pitch = pitch; 
       print("Sample Play"); 

       pitchPlayer.play(); 
       pitchPlayer.scheduleBuffer(parentSound.audioFileBuffer, atTime: nil, options:.Interrupts, completionHandler: {[unowned self]() in 
        print("Is Stopped: \(self.pitchPlayer.playing)"); 
        self.pitchPlayer.stop(); 
        print("Is Stopped: \(self.pitchPlayer.playing)"); 
        }); 
      } 
     } 
    } 
} 
+0

我的使用AVAAudioEngine和AVAudioPlayerNodes播放文件的应用程序不是在某些设备上播放声音(虽然在iPhone 6s Plus等新设备上工作正常)。当我减少引擎上的节点数量时,它可以在较旧的设备上运行。我正在研究如何调整我的代码以允许它在这些较旧的设备上工作,因为节点数似乎是问题所在。 – vikzilla

回答

0

我从来没有听说过任何限制的节点在AVAudioEngine图形的数量,但我已经看到增加了几百个节点后,一些非常糟糕的表现。我发现的解决方案是在完成播放后删除这些节点。

scheduleBuffer的完成处理程序是这样做的好地方,但我想包在dispatch_async -to-的-主队列调用去除由于音频引擎可能仍在使用节点时,它调用完成处理器。

另一种选择是在播放完样本后重新使用播放器节点,而不是为下一个样本创建一个新节点,但这种方法可能稍微复杂一点。

相关问题