javascript - 从浏览器录制视频并流式传输到 Youtube/Twitch 时,如何使用 ffmpeg 添加音频?

标签 javascript node.js ffmpeg live-streaming

我有一个我正在开发的网络应用程序,它允许用户从他们的浏览器流式传输视频,同时使用 ffmpeg 将视频流式传输到 Youtube 和 Twitch。当我不需要发送任何音频时,该应用程序可以正常工作。目前,当我尝试录制视频和音频时,出现以下错误。我是使用 ffmpeg 的新手,因此将不胜感激任何帮助。如果需要,这也是我的仓库:https://github.com/toshvelaga/livestream Node Server
这是我的带有 ffmpeg 的 node.js 服务器

const child_process = require('child_process') // To be used later for running FFmpeg
const express = require('express')
const http = require('http')
const WebSocketServer = require('ws').Server
const NodeMediaServer = require('node-media-server')
const app = express()
const cors = require('cors')
const path = require('path')
const logger = require('morgan')
require('dotenv').config()

app.use(logger('dev'))
app.use(cors())

app.use(express.json({ limit: '200mb', extended: true }))
app.use(
  express.urlencoded({ limit: '200mb', extended: true, parameterLimit: 50000 })
)

var authRouter = require('./routes/auth')
var compareCodeRouter = require('./routes/compareCode')

app.use('/', authRouter)
app.use('/', compareCodeRouter)

if (process.env.NODE_ENV === 'production') {
  // serve static content
  // npm run build
  app.use(express.static(path.join(__dirname, 'client/build')))

  app.get('*', (req, res) => {
    res.sendFile(path.join(__dirname, 'client/build', 'index.html'))
  })
}

const PORT = process.env.PORT || 8080

app.listen(PORT, () => {
  console.log(`Server is starting on port ${PORT}`)
})

const server = http.createServer(app).listen(3000, () => {
  console.log('Listening on PORT 3000...')
})


const wss = new WebSocketServer({
  server: server,
})

wss.on('connection', (ws, req) => {
  const ffmpeg = child_process.spawn('ffmpeg', [
    // works fine when I use this but when I need audio problems arise
    // '-f',
    // 'lavfi',
    // '-i',
    // 'anullsrc',

    '-i',
    '-',

    '-f',
    'flv',
    '-c',
    'copy',
    `${process.env.TWITCH_STREAM_ADDRESS}`,
    '-f',
    'flv',
    '-c',
    'copy',
    `${process.env.YOUTUBE_STREAM_ADDRESS}`,
    // '-f',
    // 'flv',
    // '-c',
    // 'copy',
    // `${process.env.FACEBOOK_STREAM_ADDRESS}`,
  ])

  ffmpeg.on('close', (code, signal) => {
    console.log(
      'FFmpeg child process closed, code ' + code + ', signal ' + signal
    )
    ws.terminate()
  })

  ffmpeg.stdin.on('error', (e) => {
    console.log('FFmpeg STDIN Error', e)
  })

  ffmpeg.stderr.on('data', (data) => {
    console.log('FFmpeg STDERR:', data.toString())
  })

  ws.on('message', (msg) => {
    console.log('DATA', msg)
    ffmpeg.stdin.write(msg)
  })

  ws.on('close', (e) => {
    console.log('kill: SIGINT')
    ffmpeg.kill('SIGINT')
  })
})

const config = {
  rtmp: {
    port: 1935,
    chunk_size: 60000,
    gop_cache: true,
    ping: 30,
    ping_timeout: 60,
  },
  http: {
    port: 8000,
    allow_origin: '*',
  },
}

var nms = new NodeMediaServer(config)
nms.run()
这是我记录视频/音频并发送到服务器的前端代码:
import React, { useState, useEffect, useRef } from 'react'
import Navbar from '../../components/Navbar/Navbar'
import './Dashboard.css'

const CAPTURE_OPTIONS = {
  audio: true,
  video: true,
}

function Dashboard() {
  const [mute, setMute] = useState(false)
  const videoRef = useRef()
  const ws = useRef()
  const mediaStream = useUserMedia(CAPTURE_OPTIONS)

  let liveStream
  let liveStreamRecorder

  if (mediaStream && videoRef.current && !videoRef.current.srcObject) {
    videoRef.current.srcObject = mediaStream
  }

  const handleCanPlay = () => {
    videoRef.current.play()
  }

  useEffect(() => {
    ws.current = new WebSocket(
      window.location.protocol.replace('http', 'ws') +
        '//' + // http: -> ws:, https: -> wss:
        'localhost:3000'
    )

    ws.current.onopen = () => {
      console.log('WebSocket Open')
    }

    return () => {
      ws.current.close()
    }
  }, [])

  const startStream = () => {
    liveStream = videoRef.current.captureStream(30) // 30 FPS
    liveStreamRecorder = new MediaRecorder(liveStream, {
      mimeType: 'video/webm;codecs=h264',
      videoBitsPerSecond: 3 * 1024 * 1024,
    })
    liveStreamRecorder.ondataavailable = (e) => {
      ws.current.send(e.data)
      console.log('send data', e.data)
    }
    // Start recording, and dump data every second
    liveStreamRecorder.start(1000)
  }

  const stopStream = () => {
    liveStreamRecorder.stop()
    ws.current.close()
  }

  const toggleMute = () => {
    setMute(!mute)
  }

  return (
    <>
      <Navbar />
      <div style={{ marginTop: '5rem' }} className='main'>
        <div id='container'>
          <video
            ref={videoRef}
            onCanPlay={handleCanPlay}
            autoPlay
            playsInline
            muted={mute}
          />
        </div>
        <div className='button-container'>
          <button onClick={startStream}>Go Live</button>
          <button onClick={stopStream}>Stop Recording</button>
          <button>Share Screen</button>
          <button onClick={toggleMute}>Mute</button>
        </div>
      </div>
    </>
  )
}

const useUserMedia = (requestedMedia) => {
  const [mediaStream, setMediaStream] = useState(null)

  useEffect(() => {
    async function enableStream() {
      try {
        const stream = await navigator.mediaDevices.getUserMedia(requestedMedia)
        setMediaStream(stream)
      } catch (err) {
        console.log(err)
      }
    }

    if (!mediaStream) {
      enableStream()
    } else {
      return function cleanup() {
        mediaStream.getVideoTracks().forEach((track) => {
          track.stop()
        })
      }
    }
  }, [mediaStream, requestedMedia])

  return mediaStream
}

export default Dashboard

最佳答案

因此,在对 ffmpeg 进行了一些试验和错误之后,我让音频正常工作。不确定这是否是最佳方法,但它暂时有效。
这里也是完整的文件:https://github.com/toshvelaga/livestream/blob/main/server/server.js

  const ffmpeg = child_process.spawn('ffmpeg', [
    '-i',
    '-',

    // video codec config: low latency, adaptive bitrate
    '-c:v',
    'libx264',
    '-preset',
    'veryfast',
    '-tune',
    'zerolatency',

    // audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
    '-c:a',
    'aac',
    '-strict',
    '-2',
    '-ar',
    '44100',
    '-b:a',
    '64k',

    //force to overwrite
    '-y',

    // used for audio sync
    '-use_wallclock_as_timestamps',
    '1',
    '-async',
    '1',

    //'-filter_complex', 'aresample=44100', // resample audio to 44100Hz, needed if input is not 44100
    //'-strict', 'experimental',
    '-bufsize',
    '1000',
    '-f',
    'flv',
    `${process.env.TWITCH_STREAM_ADDRESS}`,

    // video codec config: low latency, adaptive bitrate
    '-c:v',
    'libx264',
    '-preset',
    'veryfast',
    '-tune',
    'zerolatency',

    // audio codec config: sampling frequency (11025, 22050, 44100), bitrate 64 kbits
    '-c:a',
    'aac',
    '-strict',
    '-2',
    '-ar',
    '44100',
    '-b:a',
    '64k',

    //force to overwrite
    '-y',

    // used for audio sync
    '-use_wallclock_as_timestamps',
    '1',
    '-async',
    '1',

    '-f',
    'flv',
    `${process.env.YOUTUBE_STREAM_ADDRESS}`,
  ])

关于javascript - 从浏览器录制视频并流式传输到 Youtube/Twitch 时,如何使用 ffmpeg 添加音频?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/68524122/

相关文章:

javascript - Bluebird Promise 可以在 node.js 中与 redis 一起使用吗?

javascript - 在 Azure Web Apps 或 Azure Functions 上远程调试 Windows Node.js 应用程序

node.js - Node.js 中的标准输入 block

android - 编码器 'aac' 在处理视频以减慢速度时未启用异常

c - 跨过 Emacs GDB

C# 播放 MPEG 音频文件

javascript - jQuery keypress() 事件没有触发?

javascript - 使用javascript将标签文本设置为跨越宽度

javascript - 从多个数组中删除元素

javascript - 从命名范围谷歌表格脚本中选择一个范围