我在记录时崩溃:“所需条件为假:format.sampleRate == hwFormat.sampleRate"afterweb rtc呼叫 [英] i got crash when record : "required condition is false: format.sampleRate == hwFormat.sampleRate" afterweb rtc call

查看:257
本文介绍了我在记录时崩溃:“所需条件为假:format.sampleRate == hwFormat.sampleRate"afterweb rtc呼叫的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我的记录工作正常,但是问题是在WebRTC通话后,我崩溃了

my record work normally, but the problem is after WebRTC call, i got crash

所需条件为假:format.sampleRate == hwFormat.sampleRate

这是我开始崩溃和installTap的方式:

here is how i start crash and installTap:

func startRecord() {
        self.filePath = nil
        
        print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")
        let session = AVAudioSession.sharedInstance()
        do {
            try session.setCategory(.playAndRecord, options: .mixWithOthers)
        } catch {
            print("======== Error setting setCategory \(error.localizedDescription)")
        }
        do {
            try session.setPreferredSampleRate(44100.0)
        } catch {
            print("======== Error setting rate \(error.localizedDescription)")
        }
        do {
            try session.setPreferredIOBufferDuration(0.005)
        } catch {
            print("======== Error IOBufferDuration \(error.localizedDescription)")
        }
        do {
            try session.setActive(true, options: .notifyOthersOnDeactivation)
        } catch {
            print("========== Error starting session \(error.localizedDescription)")
        }
        let format = AVAudioFormat(commonFormat: AVAudioCommonFormat.pcmFormatInt16,
            sampleRate: 44100.0,
//            sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,
            channels: 1,
            interleaved: true)
        audioEngine.connect(audioEngine.inputNode, to: mixer, format: format)
        audioEngine.connect(mixer, to: audioEngine.mainMixerNode, format: format)

        let dir = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true).first! as String
        filePath =  dir.appending("/\(UUID.init().uuidString).wav")

        _ = ExtAudioFileCreateWithURL(URL(fileURLWithPath: filePath!) as CFURL,
            kAudioFileWAVEType,(format?.streamDescription)!,nil,AudioFileFlags.eraseFile.rawValue,&outref)

        mixer.installTap(onBus: 0, bufferSize: AVAudioFrameCount((format?.sampleRate)!), format: format, block: { (buffer: AVAudioPCMBuffer!, time: AVAudioTime!) -> Void in

            let audioBuffer : AVAudioBuffer = buffer
            _ = ExtAudioFileWrite(self.outref!, buffer.frameLength, audioBuffer.audioBufferList)
        })

        try! audioEngine.start()
        startMP3Rec(path: filePath!, rate: 128)
    }

    func stopRecord() {

        self.audioFilePlayer.stop()
        self.audioEngine.stop()
        self.mixer.removeTap(onBus: 0)

        self.stopMP3Rec()
        ExtAudioFileDispose(self.outref!)

        try? AVAudioSession.sharedInstance().setActive(false)
    }
    
    func startMP3Rec(path: String, rate: Int32) {

        self.isMP3Active = true
        var total = 0
        var read = 0
        var write: Int32 = 0

        let mp3path = path.replacingOccurrences(of: "wav", with: "mp3")
        var pcm: UnsafeMutablePointer<FILE> = fopen(path, "rb")
        fseek(pcm, 4*1024, SEEK_CUR)
        let mp3: UnsafeMutablePointer<FILE> = fopen(mp3path, "wb")
        let PCM_SIZE: Int = 8192
        let MP3_SIZE: Int32 = 8192
        let pcmbuffer = UnsafeMutablePointer<Int16>.allocate(capacity: Int(PCM_SIZE*2))
        let mp3buffer = UnsafeMutablePointer<UInt8>.allocate(capacity: Int(MP3_SIZE))

        let lame = lame_init()
        lame_set_num_channels(lame, 1)
        lame_set_mode(lame, MONO)
        lame_set_in_samplerate(lame, 44100)
        lame_set_brate(lame, rate)
        lame_set_VBR(lame, vbr_off)
        lame_init_params(lame)

        DispatchQueue.global(qos: .default).async {
            while true {
                pcm = fopen(path, "rb")
                fseek(pcm, 4*1024 + total, SEEK_CUR)
                read = fread(pcmbuffer, MemoryLayout<Int16>.size, PCM_SIZE, pcm)
                if read != 0 {
                    write = lame_encode_buffer(lame, pcmbuffer, nil, Int32(read), mp3buffer, MP3_SIZE)
                    fwrite(mp3buffer, Int(write), 1, mp3)
                    total += read * MemoryLayout<Int16>.size
                    fclose(pcm)
                } else if !self.isMP3Active {
                    _ = lame_encode_flush(lame, mp3buffer, MP3_SIZE)
                    _ = fwrite(mp3buffer, Int(write), 1, mp3)
                    break
                } else {
                    fclose(pcm)
                    usleep(50)
                }
            }
            lame_close(lame)
            fclose(mp3)
            fclose(pcm)
            self.filePathMP3 = mp3path
        }
    }
    
    func stopMP3Rec() {
        self.isMP3Active = false
    }

作为首次运行的应用程序,我使用

as first time run app, i log the last format using

print("last format: \(audioEngine.inputNode.inputFormat(forBus: 0).sampleRate)")

->返回0->正常记录下次返回44100->正常记录

--> return 0 -> record normally next time return 44100 -> record normally

但是在致电webrtc之后,我得到了48000,然后它在此行崩溃

but after webrtc call, i got 48000, then it make crash in this line

self.audioEngine.connect(self.audioEngine.inputNode, to: self.mixer, format: format)

我花了4个小时在stackoverflow上,但对我来说没有解决方案.

我不想要48000格式,因为我将示例设置为

i dont want 48000 format, because i have set the sample to

sampleRate: audioEngine.inputNode.inputFormat(forBus: 0).sampleRate,

->我的输出很难听,我可以认出我的声音:(

-> my output is hard to hear, i can recognize my voice :(

所以我认为44100是最好的

So i think 44100 is the best

有人可以给我一些建议吗?谢谢

can someone give me some advices? Thanks

推荐答案

下采样部分,使您的案例更加生动.

The down sample part , more vivid on your case.

    let bus = 0
    let inputNode = audioEngine.inputNode
    let inputFormat = inputNode.outputFormat(forBus: bus)
        
    let outputFormat = AVAudioFormat(commonFormat: .pcmFormatFloat32, sampleRate: 44100, channels: 1, interleaved: true)!

    let converter = AVAudioConverter(from: inputFormat, to: outputFormat)!

    inputNode.installTap(onBus: bus, bufferSize: 1024, format: inputFormat){ (buffer: AVAudioPCMBuffer, when: AVAudioTime) in
                
                var newBufferAvailable = true
    
                let inputCallback: AVAudioConverterInputBlock = { inNumPackets, outStatus in
                    if newBufferAvailable {
                        outStatus.pointee = .haveData
                        newBufferAvailable = false
                        
                        return buffer
                    } else {
                        outStatus.pointee = .noDataNow
                        return nil
                    }
                }
    
                let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate))!
    
                var error: NSError?
                let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
    
                // 44100 Hz buffer
                print(convertedBuffer.format)
                
            }

这篇关于我在记录时崩溃:“所需条件为假:format.sampleRate == hwFormat.sampleRate"afterweb rtc呼叫的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
相关文章
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆