WebRTC播放音频输入作为麦克风 [英] WebRTC Play Audio Input as Microphone

查看:888
本文介绍了WebRTC播放音频输入作为麦克风的处理方法,对大家解决问题具有一定的参考价值,需要的朋友们下面随着小编来一起学习吧!

问题描述

我想将我的音频文件作为麦克风输入播放(不发送我的实时声音,而是发送我的音频文件)给WebRTC连接的用户.有人可以告诉我怎么做吗?

I want to play my audio file as microphone input (without sending my live voice but my audio file) to the WebRTC connected user. Can anybody tell me how could it be done?

我已经在JS代码中进行了以下尝试,例如:

I have done some following tries in the JS code, like:

<script>
var base64string = "T2dnUwACAAAAAAA..";
var snd = new Audio("data:audio/wav;base64," + base64string);
snd.play();
var Sound = (function () {
var df = document.createDocumentFragment();
return function Sound(src) {
    var snd = new Audio(src);
    df.appendChild(snd); 
    snd.addEventListener('ended', function () {df.removeChild(snd);});
    snd.play();
    return snd;
}
}());

var snd = Sound("data:audio/wav;base64," + base64string);
</script>

2. AudioBuffer

window.AudioContext = window.AudioContext || window.webkitAudioContext;

var audioContext = new AudioContext();
var isPlaying = false;
var sourceNode = null;
var theBuffer = null;

window.onload = function() {
var request = new XMLHttpRequest();
request.open("GET", "sounds/DEMO_positive_resp.wav", true);
request.responseType = "arraybuffer";
request.onload = function() {
  audioContext.decodeAudioData( request.response, function(buffer) { 
        theBuffer = buffer;
    } );
}
request.send();
}

function togglePlayback() {
        var now = audioContext.currentTime;

        if (isPlaying) {
            //stop playing and return
            sourceNode.stop( now );
            sourceNode = null;
            analyser = null;
            isPlaying = false;
            if (!window.cancelAnimationFrame)
                window.cancelAnimationFrame = window.webkitCancelAnimationFrame;
            //window.cancelAnimationFrame( rafID );
            return "start";
        }

        sourceNode = audioContext.createBufferSource();
        sourceNode.buffer = theBuffer;
        sourceNode.loop = true;

        analyser = audioContext.createAnalyser();
        analyser.fftSize = 2048;
        sourceNode.connect( analyser );
        analyser.connect( audioContext.destination );
        sourceNode.start( now );
        isPlaying = true;
        isLiveInput = true;
        return "stop";
    }

在这种情况下,请帮助我.这将是非常赞赏的.

Please help me out in this case. It would be highly appreciable.

推荐答案

以下是一个演示,可以帮助您使用chrome流mp3或wav:

  • https://www.webrtc-experiment.com/RTCMultiConnection/stream-mp​​3-live.html
  • Here is a demo that may help you stream mp3 or wav using chrome:

    • https://www.webrtc-experiment.com/RTCMultiConnection/stream-mp3-live.html
      • https://github.com/muaz-khan/RTCMultiConnection/blob/master/demos/stream-mp3-live.html
      • https://github.com/muaz-khan/WebRTC-Experiment/issues/222
      window.AudioContext = window.AudioContext || window.webkitAudioContext;
      
      var context = new AudioContext();
      var gainNode = context.createGain();
      gainNode.connect(context.destination);
      
      // don't play for self
      gainNode.gain.value = 0;
      
      document.querySelector('input[type=file]').onchange = function() {
          this.disabled = true;
      
          var reader = new FileReader();
          reader.onload = (function(e) {
              // Import callback function that provides PCM audio data decoded as an audio buffer
              context.decodeAudioData(e.target.result, function(buffer) {
                  // Create the sound source
                  var soundSource = context.createBufferSource();
      
                  soundSource.buffer = buffer;
                  soundSource.start(0, 0 / 1000);
                  soundSource.connect(gainNode);
      
                  var destination = context.createMediaStreamDestination();
                  soundSource.connect(destination);
      
                  createPeerConnection(destination.stream);
              });
          });
      
          reader.readAsArrayBuffer(this.files[0]);
      };
      
      function createPeerConnection(mp3Stream) {
          // you need to place 3rd party WebRTC code here
      }
      


      更新时间:2014年8月28日,星期四,下午5:55

      以下是从服务器获取mp3的方法:


      Updated at: 5:55 PM - Thursday, August 28, 2014

      Here is how to get mp3 from server:

      function HTTP_GET(url, callback) {
          var xhr = new XMLHttpRequest();
          xhr.open('GET', url, true);
          xhr.responseType = 'arraybuffer';
          xhr.send();
      
          xhr.onload = function(e) {
              if (xhr.status != 200) {
                  alert("Unexpected status code " + xhr.status + " for " + url);
                  return false;
              }
      
              callback(xhr.response); // return array-buffer
          };
      }
      
      // invoke above "HTTP_GET" method
      // to load mp3 as array-buffer
      
      HTTP_GET('http://domain.com/file.mp3', function(array_buffer) {
      
          // Import callback function that provides PCM audio data decoded as an audio buffer
          context.decodeAudioData(array_buffer, function(buffer) {
              // Create the sound source
              var soundSource = context.createBufferSource();
      
              soundSource.buffer = buffer;
              soundSource.start(0, 0 / 1000);
              soundSource.connect(gainNode);
      
              var destination = context.createMediaStreamDestination();
              soundSource.connect(destination);
      
              createPeerConnection(destination.stream);
          });
      });
      

      这篇关于WebRTC播放音频输入作为麦克风的文章就介绍到这了,希望我们推荐的答案对大家有所帮助,也希望大家多多支持IT屋!

查看全文
登录 关闭
扫码关注1秒登录
发送“验证码”获取 | 15天全站免登陆