我有以下代码用于生成给定频率和持续时间的音频音调.它基于这个答案松散地在 Android上做同样的事情(谢谢:@Steve Pomeroy): https://stackoverflow.com/a/3731075/973364 import Foundationimport CoreAudio
https://stackoverflow.com/a/3731075/973364
import Foundation import CoreAudio import AVFoundation import Darwin class AudioUtil { class func play(frequency: Int, durationMs: Int) -> Void { let sampleRateHz: Double = 8000.0 let numberOfSamples = Int((Double(durationMs) / 1000 * sampleRateHz)) let factor: Double = 2 * M_PI / (sampleRateHz/Double(frequency)) // Generate an array of Doubles. var samples = [Double](count: numberOfSamples, repeatedValue: 0.0) for i in 1..<numberOfSamples { let sample = sin(factor * Double(i)) samples[i] = sample } // Convert to a 16 bit PCM sound array. var index = 0 var sound = [Byte](count: 2 * numberOfSamples, repeatedValue: 0) for doubleValue in samples { // Scale to maximum amplitude. Int16.max is 37,767. var value = Int16(doubleValue * Double(Int16.max)) // In a 16 bit wav PCM, first byte is the low order byte. var firstByte = Int16(value & 0x00ff) var secondByteHighOrderBits = Int32(value) & 0xff00 var secondByte = Int16(secondByteHighOrderBits >> 8) // Right shift. // println("\(doubleValue) -> \(value) -> \(firstByte), \(secondByte)") sound[index++] = Byte(firstByte) sound[index++] = Byte(secondByte) } let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatInt16, sampleRate: sampleRateHz, channels:AVAudioChannelCount(1), interleaved: false) let buffer = AudioBuffer(mNumberChannels: 1, mDataByteSize: UInt32(sound.count), mData: &sound) let pcmBuffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: AVAudioFrameCount(sound.count)) let audioEngine = AVAudioEngine() let audioPlayer = AVAudioPlayerNode() audioEngine.attachNode(audioPlayer) // Runtime error occurs here: audioEngine.connect(audioPlayer, to: audioEngine.mainMixerNode, format: format) audioEngine.startAndReturnError(nil) audioPlayer.play() audioPlayer.scheduleBuffer(pcmBuffer, atTime: nil, options: nil, completionHandler: nil) } }
我在AVAudioEngine上调用connect()时在运行时得到的错误是这样的:
ERROR: [0x3bfcb9dc] AVAudioNode.mm:521: AUSetFormat: error -10868 *** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'error -10868'
我生成的不是AVAudioCommonFormat.PCMFormatInt16吗?
[编辑]
这是另一个更简单的尝试,只使用一个缓冲区作为PCMFormatFloat32.没有错误,但也没有声音.
import AVFoundation class AudioManager:NSObject { let audioPlayer = AVAudioPlayerNode() lazy var audioEngine: AVAudioEngine = { let engine = AVAudioEngine() // Must happen only once. engine.attachNode(self.audioPlayer) return engine }() func play(frequency: Int, durationMs: Int, completionBlock:dispatch_block_t!) { var error: NSError? var mixer = audioEngine.mainMixerNode var sampleRateHz: Float = Float(mixer.outputFormatForBus(0).sampleRate) var numberOfSamples = AVAudioFrameCount((Float(durationMs) / 1000 * sampleRateHz)) var format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatFloat32, sampleRate: Double(sampleRateHz), channels: AVAudioChannelCount(1), interleaved: false) var buffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: numberOfSamples) buffer.frameLength = numberOfSamples // Generate sine wave for var i = 0; i < Int(buffer.frameLength); i++ { var val = sinf(Float(frequency) * Float(i) * 2 * Float(M_PI) / sampleRateHz) // log.debug("val: \(val)") buffer.floatChannelData.memory[i] = val * 0.5 } // Audio engine audioEngine.connect(audioPlayer, to: mixer, format: format) log.debug("Sample rate: \(sampleRateHz), samples: \(numberOfSamples), format: \(format)") if !audioEngine.startAndReturnError(&error) { log.debug("Error: \(error)") } // Play player and buffer audioPlayer.play() audioPlayer.scheduleBuffer(buffer, atTime: nil, options: nil, completionHandler: completionBlock) } }
谢谢:Thomas Royal(http://www.tmroyal.com/playing-sounds-in-swift-audioengine.html)
问题是,当脱离play()函数时,播放器正在清理并且从未完成(或几乎没有开始)播放.这是一个相当笨拙的解决方案:在从play()返回之前,只要样本一直睡觉.我会接受一个更好的答案,避免必须通过不让玩家清理,如果有人想发布一个这样做.
import AVFoundation class AudioManager: NSObject, AVAudioPlayerDelegate { let audioPlayerNode = AVAudioPlayerNode() var waveAudioPlayer: AVAudioPlayer? var playing: Bool! = false lazy var audioEngine: AVAudioEngine = { let engine = AVAudioEngine() // Must happen only once. engine.attachNode(self.audioPlayerNode) return engine }() func playWaveFromBundle(filename: String, durationInSeconds: NSTimeInterval) -> Void { var error: NSError? var sound = NSURL(fileURLWithPath: NSBundle.mainBundle().pathForResource(filename, ofType: "wav")!) if error != nil { log.error("Error: \(error)") return } self.waveAudioPlayer = AVAudioPlayer(contentsOfURL: sound, error: &error) self.waveAudioPlayer!.delegate = self AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, error: &error) if error != nil { log.error("Error: \(error)") return } log.verbose("Playing \(sound)") self.waveAudioPlayer!.prepareToPlay() playing = true if !self.waveAudioPlayer!.play() { log.error("Failed to play") } // If we don't block here, the player stops as soon as this function returns. While we'd prefer to wait for audioPlayerDidFinishPlaying() to be called here, it's never called if we block here. Instead, pass in the duration of the wave file and simply sleep for that long. /* while (playing!) { NSThread.sleepForTimeInterval(0.1) // seconds } */ NSThread.sleepForTimeInterval(durationInSeconds) log.verbose("Done") } func play(frequency: Int, durationInMillis: Int, completionBlock:dispatch_block_t!) -> Void { var session = AVAudioSession.sharedInstance() var error: NSError? if !session.setCategory(AVAudioSessionCategoryPlayAndRecord, error: &error) { log.error("Error: \(error)") return } var mixer = audioEngine.mainMixerNode var sampleRateHz: Float = Float(mixer.outputFormatForBus(0).sampleRate) var numberOfSamples = AVAudioFrameCount((Float(durationInMillis) / 1000 * sampleRateHz)) var format = AVAudioFormat(commonFormat: AVAudioCommonFormat.PCMFormatFloat32, sampleRate: Double(sampleRateHz), channels: AVAudioChannelCount(1), interleaved: false) var buffer = AVAudioPCMBuffer(PCMFormat: format, frameCapacity: numberOfSamples) buffer.frameLength = numberOfSamples // Generate sine wave for var i = 0; i < Int(buffer.frameLength); i++ { var val = sinf(Float(frequency) * Float(i) * 2 * Float(M_PI) / sampleRateHz) // log.debug("val: \(val)") buffer.floatChannelData.memory[i] = val * 0.5 } AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryPlayback, error: &error) if error != nil { log.error("Error: \(error)") return } // Audio engine audioEngine.connect(audioPlayerNode, to: mixer, format: format) log.debug("Sample rate: \(sampleRateHz), samples: \(numberOfSamples), format: \(format)") if !audioEngine.startAndReturnError(&error) { log.error("Error: \(error)") return } // TODO: Check we're not in the background. Attempting to play audio while in the background throws: // *** Terminating app due to uncaught exception 'com.apple.coreaudio.avfaudio', reason: 'error 561015905' // Play player and schedule buffer audioPlayerNode.play() audioPlayerNode.scheduleBuffer(buffer, atTime: nil, options: nil, completionHandler: completionBlock) // If we don't block here, the player stops as soon as this function returns. NSThread.sleepForTimeInterval(Double(durationInMillis) * 1000.0) // seconds } // MARK: AVAudioPlayerDelegate func audioPlayerDidFinishPlaying(player: AVAudioPlayer!, successfully flag: Bool) { log.verbose("Success: \(flag)") playing = false } func audioPlayerDecodeErrorDidOccur(player: AVAudioPlayer!, error: NSError!) { log.verbose("Error: \(error)") playing = false } // MARK: NSObject overrides deinit { log.verbose("deinit") } }
对于上下文,此AudioManager是我的AppDelegate上的延迟加载属性:
lazy var audioManager: AudioManager = { return AudioManager() }()