From 4211360eccafde00a733b1d7601ecdfcd704d293 Mon Sep 17 00:00:00 2001 From: David Halls Date: Thu, 3 Jun 2021 20:00:10 +0100 Subject: [PATCH] Use WebAudio to add missing audio track --- site/example.js | 45 ++++++++++++++++++++++++++++++++++----------- site/hls-worker.js | 9 +++++++++ 2 files changed, 43 insertions(+), 11 deletions(-) diff --git a/site/example.js b/site/example.js index 1c85e54..1143df6 100644 --- a/site/example.js +++ b/site/example.js @@ -81,17 +81,27 @@ async function start() { try { // capture video from webcam - camera_stream = await navigator.mediaDevices.getUserMedia({ - audio: true, - video: { - width: 4096, - height: 2160, - frameRate: { - ideal: 30, - max: 30 - } + const video_constraints = { + width: 4096, + height: 2160, + frameRate: { + ideal: 30, + max: 30 } - }); + }; + try { + camera_stream = await navigator.mediaDevices.getUserMedia({ + audio: true, + video: video_constraints + }); + } catch (ex) { + // retry in case audio isn't available + console.warn("Failed to get user media, retrying without audio"); + camera_stream = await navigator.mediaDevices.getUserMedia({ + audio: false, + video: video_constraints + }); + } // create video element which will be used for grabbing the frames to // write to a canvas so we can apply webgl shaders @@ -124,7 +134,20 @@ async function start() { // capture video from the canvas canvas_stream = gl_canvas.canvas.captureStream(30); - canvas_stream.addTrack(camera_stream.getAudioTracks()[0]); + + // add audio if present + let audio_tracks = camera_stream.getAudioTracks(); + if (audio_tracks.length === 0) { + // if audio isn't present, use silence + console.warn("No audio present, adding silence"); + const context = new AudioContext(); + const silence = context.createBufferSource(); + const dest = context.createMediaStreamDestination(); + silence.connect(dest); + silence.start(); + audio_tracks = dest.stream.getAudioTracks(); + } + canvas_stream.addTrack(audio_tracks[0]); // start HLS from the canvas stream to the ingestion URL hls_worker = new HlsWorker(canvas_stream, ingestion_url, ffmpeg_lib_url); diff --git a/site/hls-worker.js b/site/hls-worker.js index eb2bed0..1198a8d 100644 --- a/site/hls-worker.js +++ b/site/hls-worker.js @@ -12,6 +12,15 @@ export class HlsWorker extends EventTarget { } }; + // we should use VideoEncoder and AudioEncoder + // push data into worker, we'll need to be able to handle separate streams + // have a /inbound and async read from 2 files on there via queues + // - fd 3 and 4 should be + // start by implementing this for current system + // chrome supports mp4a.40.2 (AAC LC) and avc1.42001E (H264) + // so we can passthru both and don't need any codecs hopefully + //console.log(stream.getVideoTracks()[0].width, stream.getVideoTracks()[1].height); + // set up video recording from the stream // note we don't start recording until ffmpeg has started (below) let recorder;