javascript - WebRTC 和 WebAudio 集成

标签 javascript html streaming webrtc web-audio-api

我正在尝试将 WebRTC 远程流与 WebAudio 集成。我正在使用 wavesurfer ( https://github.com/katspaugh/wavesurfer.js ) 来完成这个。当我附加本地流时,它播放得很好。当我附加远程流时,缓冲区内容填充为零,我看不到任何事件。如何解决这个问题?

我的代码:

if (this.remoteStream_ != null) {
    if (this.wavesurfer_ == null) {
        var parent = this;
        this.wavesurfer_ = Object.create(WaveSurfer);
        this.wavesurfer_.init({
            container: '#waveform',
            waveColor: '#fff'
        });
        this.wavesurferStream_ = Object.create(WaveSurfer.Streamer);
        this.wavesurferStream_.init({
            wavesurfer: this.wavesurfer_
        });
        // start the microphone
        this.wavesurferStream_.start(this.remoteStream_);
        this.audioWaveIconSet_.on();
    } else {
        if (this.wavesurferStream_ != null) {
            this.wavesurferStream_.destroy();
            this.wavesurferStream_ = null;
        }
        this.wavesurfer_.destroy();
        this.wavesurfer_ = null;
        this.audioWaveIconSet_.off();
    }
}

用于流的 Wavesurfer 插件:

/*! wavesurfer.js 1.0.57 (Thu, 25 Feb 2016 17:09:20 GMT)
 * https://github.com/katspaugh/wavesurfer.js
 * @license CC-BY-3.0 */
! function(a, b) {
    "function" == typeof define && define.amd ? define(["wavesurfer"], function(a) {
        return b(a)
    }) : "object" == typeof exports ? module.exports = b(require("wavesurfer.js")) : b(WaveSurfer)
}(this, function(a) {
    "use strict";
    a.Streamer = {
        init: function(a) {
            this.params = a;
            this.wavesurfer = a.wavesurfer;
            if (!this.wavesurfer) throw new Error("No WaveSurfer instance provided");
            this.active = !1, this.paused = !1, this.reloadBufferFunction = this.reloadBuffer.bind(this);
            this.bufferSize = this.params.bufferSize || 4096, this.numberOfInputChannels = this.params.numberOfInputChannels || 1, this.numberOfOutputChannels = this.params.numberOfOutputChannels || 1, this.micContext = this.wavesurfer.backend.getAudioContext();
        },
        start: function(stream) {
            this.gotStream(stream);
        },
        togglePlay: function() {
            this.active ? (this.paused = !this.paused, this.paused ? this.pause() : this.play()) : this.start()
        },
        play: function() {
            this.paused = !1, this.connect()
        },
        pause: function() {
            this.paused = !0, this.disconnect()
        },
        stop: function() {
            this.active && (this.stopDevice(), this.wavesurfer.empty())
        },
        stopDevice: function() {},
        connect: function() {
            void 0 !== this.stream && (this.mediaStreamSource = this.micContext.createMediaStreamSource(this.stream), this.levelChecker = this.micContext.createScriptProcessor(this.bufferSize, this.numberOfInputChannels, this.numberOfOutputChannels), this.mediaStreamSource.connect(this.levelChecker), this.levelChecker.connect(this.micContext.destination), this.levelChecker.onaudioprocess = this.reloadBufferFunction)
        },
        disconnect: function() {
            void 0 !== this.mediaStreamSource && this.mediaStreamSource.disconnect(), void 0 !== this.levelChecker && (this.levelChecker.disconnect(), this.levelChecker.onaudioprocess = void 0)
        },
        reloadBuffer: function(a) {
            this.paused || (this.wavesurfer.empty(), this.wavesurfer.loadDecodedBuffer(a.inputBuffer))
        },
        gotStream: function(a) {
            this.stream = a, this.active = !0, this.play()
        },
        destroy: function(a) {
            this.paused = !0, this.stop()
        },
        deviceError: function(a) {},
        extractVersion: function(a, b, c) {
            var d = a.match(b);
            return d && d.length >= c && parseInt(d[c], 10)
        },
        detectBrowser: function() {
            var a = {};
            return a.browser = null, a.version = null, a.minVersion = null, "undefined" != typeof window && window.navigator ? navigator.mozGetUserMedia ? (a.browser = "firefox", a.version = this.extractVersion(navigator.userAgent, /Firefox\/([0-9]+)\./, 1), a.minVersion = 31, a) : navigator.webkitGetUserMedia && window.webkitRTCPeerConnection ? (a.browser = "chrome", a.version = this.extractVersion(navigator.userAgent, /Chrom(e|ium)\/([0-9]+)\./, 2), a.minVersion = 38, a) : navigator.mediaDevices && navigator.userAgent.match(/Edge\/(\d+).(\d+)$/) ? (a.browser = "edge", a.version = this.extractVersion(navigator.userAgent, /Edge\/(\d+).(\d+)$/, 2), a.minVersion = 10547, a) : (a.browser = "Not a supported browser.", a) : (a.browser = "Not a supported browser.", a)
        }
    }, a.util.extend(a.Streamer, a.Observer)
}), ! function(a, b) {
    "function" == typeof define && define.amd ? define(["wavesurfer"], function(a) {
        return b(a)
    }) : "object" == typeof exports ? module.exports = b(require("wavesurfer.js")) : b(WaveSurfer)
}(this, function(a) {
    "use strict";
    a.Streamer = {
        init: function(a) {
            if (this.params = a, this.wavesurfer = a.wavesurfer, !this.wavesurfer) throw new Error("No WaveSurfer instance provided");
            this.active = !1, this.paused = !1, this.reloadBufferFunction = this.reloadBuffer.bind(this);
            this.bufferSize = this.params.bufferSize || 4096, this.numberOfInputChannels = this.params.numberOfInputChannels || 1, this.numberOfOutputChannels = this.params.numberOfOutputChannels || 1, this.micContext = this.wavesurfer.backend.getAudioContext();
        },
        start: function(stream) {
            this.gotStream(stream);
        },
        togglePlay: function() {
            this.active ? (this.paused = !this.paused, this.paused ? this.pause() : this.play()) : this.start()
        },
        play: function() {
            this.paused = !1, this.connect()
        },
        pause: function() {
            this.paused = !0, this.disconnect()
        },
        stop: function() {
            this.active && (this.stopDevice(), this.wavesurfer.empty())
        },
        stopDevice: function() {},
        connect: function() {
            void 0 !== this.stream && (this.mediaStreamSource = this.micContext.createMediaStreamSource(this.stream), this.levelChecker = this.micContext.createScriptProcessor(this.bufferSize, this.numberOfInputChannels, this.numberOfOutputChannels), this.mediaStreamSource.connect(this.levelChecker), this.levelChecker.connect(this.micContext.destination), this.levelChecker.onaudioprocess = this.reloadBufferFunction)
        },
        disconnect: function() {
            void 0 !== this.mediaStreamSource && this.mediaStreamSource.disconnect(), void 0 !== this.levelChecker && (this.levelChecker.disconnect(), this.levelChecker.onaudioprocess = void 0)
        },
        reloadBuffer: function(a) {
            this.paused || (this.wavesurfer.empty(), this.wavesurfer.loadDecodedBuffer(a.inputBuffer))
        },
        gotStream: function(a) {
            this.stream = a, this.active = !0, this.play()
        },
        destroy: function(a) {
            this.paused = !0, this.stop()
        },
        deviceError: function(a) {},
        extractVersion: function(a, b, c) {
            var d = a.match(b);
            return d && d.length >= c && parseInt(d[c], 10)
        },
        detectBrowser: function() {
            var a = {};
            return a.browser = null, a.version = null, a.minVersion = null, "undefined" != typeof window && window.navigator ? navigator.mozGetUserMedia ? (a.browser = "firefox", a.version = this.extractVersion(navigator.userAgent, /Firefox\/([0-9]+)\./, 1), a.minVersion = 31, a) : navigator.webkitGetUserMedia && window.webkitRTCPeerConnection ? (a.browser = "chrome", a.version = this.extractVersion(navigator.userAgent, /Chrom(e|ium)\/([0-9]+)\./, 2), a.minVersion = 38, a) : navigator.mediaDevices && navigator.userAgent.match(/Edge\/(\d+).(\d+)$/) ? (a.browser = "edge", a.version = this.extractVersion(navigator.userAgent, /Edge\/(\d+).(\d+)$/, 2), a.minVersion = 10547, a) : (a.browser = "Not a supported browser.", a) : (a.browser = "Not a supported browser.", a)
        }
    }, a.util.extend(a.Streamer, a.Observer)
});

最佳答案

这是一个已知的 Chrome 问题:Hook up Web Audio API with WebRTC for audio processing

经过漫长的等待,我想它终于在 49 版中得到修复。请尝试更新您的 chrome。

您可以检查它是否适用于此 Demo app

关于javascript - WebRTC 和 WebAudio 集成,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/35763226/

相关文章:

javascript - DROPDOWN 不想保持打开状态

javascript - 需要一个使用dust.js或handlebars.js以及express3.x和node的两页示例

javascript - 如何从另一个网站获取特定的div内容

html - 如何使用 Nokogiri 删除节点

javascript - 如何在 Node.js 中追加到新行

JavaScript - 如何在递归期间重置局部变量?

javascript - 如何更改类中的特定元素?

ios - 在 iOS 上通过 WiFi 和蓝牙流式传输视频

c# - 使用 IAsyncEnumerable 通过 Blazor 通过 HTTP 流式传输文本行

iOS 流媒体视频错误