如何转换的音频数据数组成wav文件? [英] How do I convert an array of audio data into a wav file?

查看:2582
本文介绍了如何转换的音频数据数组成wav文件?的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我有一个功能,可以记录音频注解的用户。它采用HTML5与Flash后备。我能够获得通过getUserMedia()的HTML5版本的音频数据,但闪回退提供数据作为彩车的数组。

我需要这个数据为WAV文件,我无法弄清楚如何做到这一点。任何帮助非常AP preciated!


解决方案

  VAR recLength = 0,
recBuffersL = [],
recBuffersR ​​= [],
采样率;this.onmessage =功能(E){
开关(e.data.command){
    案初始化:
        的init(e.data.config);
        打破;
    案纪录:
        记录(e.data.buffer);
        打破;
    案exportWAV:
        exportWAV(e.data.type);
        打破;
    案的GetBuffer:
        的GetBuffer();
        打破;
    案明确:
        明确();
        打破;
}
};功能的init(配置){
采样率= config.sampleRate;
}功能记录(INPUTBUFFER){
recBuffersL.push(INPUTBUFFER [0]);
recBuffersR.push(INPUTBUFFER [1]);
recLength + = INPUTBUFFER [0]。长度;
}函数exportWAV(类型){
VAR bufferL = mergeBuffers(recBuffersL,recLength);
VAR bufferR = mergeBuffers(recBuffersR,recLength);
VAR交错=交织(bufferL,bufferR);
VAR数据视图= EN codeWAV(交错);
VAR audioBlob =新的Blob([数据视图] {
    型:
});this.postMessage(audioBlob);
}起作用的GetBuffer(){
VAR缓冲区= [];
buffers.push(mergeBuffers(recBuffersL,recLength));
buffers.push(mergeBuffers(recBuffersR,recLength));
this.postMessage(缓冲区);
}功能明确(){
recLength = 0;
recBuffersL = [];
recBuffersR ​​= [];
}功能mergeBuffers(recBuffers,recLength){
VAR的结果=新Float32Array(recLength);
变种偏移= 0;
对于(VAR I = 0; I< recBuffers.length;我++){
    result.set(recBuffers [I],偏移量);
    胶印+ = recBuffers [I]。长度;
}
返回结果;
}功能交错(inputL,inputR){
VAR长度= inputL.length + inputR.length;
VAR的结果=新Float32Array(长度);变种索引= 0,
    inputIndex = 0;而(指数<长度){
    结果[指数++] = inputL [inputIndex]
    结果[指数++] = inputR [inputIndex]
    inputIndex ++;
}
返回结果;
}功能floatTo16BitPCM(输出,偏移,输入){
对于(VAR I = 0; I< input.length;我++,胶印+ = 2){
    变种S = Math.max(-1,Math.min(1,输入由[i]));
    output.setInt16(偏移,S℃,S *为0x8000:S * 0x7FFF的,真正的);
}
}功能writeString(视图,偏移,字符串){
对于(VAR I = 0; I< string.length减;我++){
    view.setUint8(偏移+ I,string.char $ C $猫(一));
}
}功能连接codeWAV(样本){
VAR缓冲=新ArrayBuffer(44 + samples.length * 2);
VAR视图=新的数据视图(缓冲);writeString(查看,0,'RIFF');
view.setUint32(4,32 + samples.length * 2,真);
writeString(查看,8,'波');
writeString(查看,12,'FMT');
view.setUint32(16,16,真);
view.setUint16(20,1,真);
view.setUint16(22,2,真);
view.setUint32(24,采样率,真正的);
view.setUint32(28,采样率* 4,真正的);
view.setUint16(32,图4,真);
view.setUint16(34,16,真);
writeString(查看,36,'数据');
view.setUint32(40,samples.length * 2,真);floatTo16BitPCM(查看,44,样本);返回视图。
}

用法:

  VAR AudioContext = win.webkitAudioContext,
    录音机,audioContext;
功能recordAudio(){
    如果(!config.stream){
        警报('没有音讯。');
        返回;
    }
    initAudioRecorder(config.audioWorkerPath);
    audioContext =新AudioContext;
    VAR MediaStreamSource的= audioContext.createMediaStreamSource(config.stream);    mediaStreamSource.connect(audioContext.destination);
    记录=新window.Recorder(MediaStreamSource的);    录音机和放大器;&安培; recorder.record();
}功能stopAudioRecording(){
    console.warn('录音stopeed');
    录音机和放大器;&安培; recorder.stop();
    录音机和放大器;&安培; recorder.exportWAV(功能(BLOB){
        的fileType ='WAV';
        的setBlob(BLOB);
    });
    录音机和放大器;&安培; recorder.clear();
}VAR作家;功能的setBlob(BLOB){
    blobURL =斑点;    无功配置= {
        一滴:blobURL,
        类型:音频/ WAV',
        文件名:(的Math.random()* 1000℃;< 1000)+'。 +的fileType,
        大小:blobURL.length
    };
    作家= RecordRTCFileWriter(配置);    VAR读卡器=新win.FileReader();
    reader.readAsDataURL(blobURL);
    reader.onload =函数(事件){
        blobURL2 = event.target.result;
    };
}返回{
    stopAudio:stopAudioRecording,
    stopVideo:stopVideoRecording,
    recordVideo:recordVideo,
    recordAudio:recordAudio,
    保存:saveToDisk,
    getBlob:功能(){
        返回blobURL2;
    },
    的toURL:功能(){
        返回writer.toURL();
    }
};

I have a feature that records audio annotations for a user. It uses HTML5 with a flash fallback. I am able to get the audio data from the HTML5 version via getUserMedia(), but the flash fallback provides the data as an array of floats.

I need this data as a wav file, and I can't figure out how to do it. Any help much appreciated!

解决方案

var recLength = 0,
recBuffersL = [],
recBuffersR = [],
sampleRate;

this.onmessage = function (e) {
switch (e.data.command) {
    case 'init':
        init(e.data.config);
        break;
    case 'record':
        record(e.data.buffer);
        break;
    case 'exportWAV':
        exportWAV(e.data.type);
        break;
    case 'getBuffer':
        getBuffer();
        break;
    case 'clear':
        clear();
        break;
}
};

function init(config) {
sampleRate = config.sampleRate;
}

function record(inputBuffer) {
recBuffersL.push(inputBuffer[0]);
recBuffersR.push(inputBuffer[1]);
recLength += inputBuffer[0].length;
}

function exportWAV(type) {
var bufferL = mergeBuffers(recBuffersL, recLength);
var bufferR = mergeBuffers(recBuffersR, recLength);
var interleaved = interleave(bufferL, bufferR);
var dataview = encodeWAV(interleaved);
var audioBlob = new Blob([dataview], {
    type: type
});

this.postMessage(audioBlob);
}

function getBuffer() {
var buffers = [];
buffers.push(mergeBuffers(recBuffersL, recLength));
buffers.push(mergeBuffers(recBuffersR, recLength));
this.postMessage(buffers);
}

function clear() {
recLength = 0;
recBuffersL = [];
recBuffersR = [];
}

function mergeBuffers(recBuffers, recLength) {
var result = new Float32Array(recLength);
var offset = 0;
for (var i = 0; i < recBuffers.length; i++) {
    result.set(recBuffers[i], offset);
    offset += recBuffers[i].length;
}
return result;
}

function interleave(inputL, inputR) {
var length = inputL.length + inputR.length;
var result = new Float32Array(length);

var index = 0,
    inputIndex = 0;

while (index < length) {
    result[index++] = inputL[inputIndex];
    result[index++] = inputR[inputIndex];
    inputIndex++;
}
return result;
}

function floatTo16BitPCM(output, offset, input) {
for (var i = 0; i < input.length; i++, offset += 2) {
    var s = Math.max(-1, Math.min(1, input[i]));
    output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}

function writeString(view, offset, string) {
for (var i = 0; i < string.length; i++) {
    view.setUint8(offset + i, string.charCodeAt(i));
}
}

function encodeWAV(samples) {
var buffer = new ArrayBuffer(44 + samples.length * 2);
var view = new DataView(buffer);

writeString(view, 0, 'RIFF');
view.setUint32(4, 32 + samples.length * 2, true);
writeString(view, 8, 'WAVE');
writeString(view, 12, 'fmt ');
view.setUint32(16, 16, true);
view.setUint16(20, 1, true);
view.setUint16(22, 2, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, sampleRate * 4, true);
view.setUint16(32, 4, true);
view.setUint16(34, 16, true);
writeString(view, 36, 'data');
view.setUint32(40, samples.length * 2, true);

floatTo16BitPCM(view, 44, samples);

return view;
}

usage:

var AudioContext = win.webkitAudioContext,
    recorder, audioContext;


function recordAudio() {
    if (!config.stream) {
        alert('No audio.');
        return;
    }
    initAudioRecorder(config.audioWorkerPath);
    audioContext = new AudioContext;
    var mediaStreamSource = audioContext.createMediaStreamSource(config.stream);

    mediaStreamSource.connect(audioContext.destination);
    recorder = new window.Recorder(mediaStreamSource);

    recorder && recorder.record();
}

function stopAudioRecording() {
    console.warn('Audio recording stopeed');
    recorder && recorder.stop();
    recorder && recorder.exportWAV(function (blob) {
        fileType = 'wav';
        setBlob(blob);
    });
    recorder && recorder.clear();
}

var writer;

function setBlob(blob) {
    blobURL = blob;

    var config = {
        blob: blobURL,
        type: 'audio/wav',
        fileName: (Math.random() * 1000 << 1000) + '.' + fileType,
        size: blobURL.length
    };
    writer = RecordRTCFileWriter(config);

    var reader = new win.FileReader();
    reader.readAsDataURL(blobURL);
    reader.onload = function (event) {
        blobURL2 = event.target.result;
    };
}

return {
    stopAudio: stopAudioRecording,
    stopVideo: stopVideoRecording,
    recordVideo: recordVideo,
    recordAudio: recordAudio,
    save: saveToDisk,
    getBlob: function () {
        return blobURL2;
    },
    toURL: function () {
        return writer.toURL();
    }
};

这篇关于如何转换的音频数据数组成wav文件?的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆