mirror of
https://github.com/davedoesdev/streamana.git
synced 2025-09-26 17:51:12 +08:00
Bring over changes from dash branch
This commit is contained in:
20
README.adoc
20
README.adoc
@@ -3,7 +3,7 @@
|
||||
== Description
|
||||
|
||||
Streamana is a Web page which streams your camera and microphone to YouTube Live
|
||||
(or any other HLS receiver). It uses https://github.com/davedoesdev/webm-muxer.js[webm-muxer.js] and
|
||||
(or any other HLS or DASH receiver). It uses https://github.com/davedoesdev/webm-muxer.js[webm-muxer.js] and
|
||||
https://github.com/davedoesdev/ffmpeg.js[ffmpeg.js].
|
||||
|
||||
== Demo
|
||||
@@ -15,7 +15,9 @@ Use Chrome 95 or later.
|
||||
.. Click _CREATE_ and then select _Go Live_ from the drop-down menu.
|
||||
.. Under _Select stream key_, select _Create new stream key_.
|
||||
.. Give your key a name.
|
||||
.. You must select _HLS_ as the streaming protocol.
|
||||
.. You must select _HLS_ as the streaming protocol. Note: YouTube DASH ingestion is only available
|
||||
by using the Youtube API. See https://developers.google.com/youtube/v3/live/guides/encoding-with-dash#url-structure[here]
|
||||
for more details.
|
||||
.. Click _CREATE_.
|
||||
.. Make sure the key you created is selected.
|
||||
.. Click _COPY_ next to _Stream URL_.
|
||||
@@ -34,7 +36,7 @@ You can also change various options:
|
||||
** Lock the camera to portrait mode (where available, e.g. mobile phones).
|
||||
** Zoom the camera to fill the page.
|
||||
** Select a different version of https://github.com/davedoesdev/ffmpeg.js[ffmpeg.js] to perform
|
||||
the HLS encoding.
|
||||
the HLS or DASH encoding.
|
||||
|
||||
== Customisation
|
||||
|
||||
@@ -46,16 +48,16 @@ so you can change this to add video effects or overlays. The shader already hand
|
||||
resizing and rotating the video in `main()`. The optional greyscale conversion is in
|
||||
the `tpix()` function.
|
||||
|
||||
The page's functionality is defined in link:site/streamana.js[] and link:site/hls.js[].
|
||||
The page's functionality is defined in link:site/streamana.js[] and link:site/streamer.js[].
|
||||
|
||||
link:site/hls.js[] exports a class, `HLS`, which does the heavy lifting:
|
||||
link:site/streamer.js[] exports a class, `Streamer`, which does the heavy lifting:
|
||||
|
||||
* The constructor takes the following arguments:
|
||||
** The https://developer.mozilla.org/en-US/docs/Web/API/MediaStream[`MediaStream`]
|
||||
containing your video and audio tracks. Note that link:site/streamana.js[] supplies
|
||||
blank video when the camera is hidden and silent audio when the microphone is muted.
|
||||
** The ingestion URL.
|
||||
** The URL of `ffmpeg-worker-hls.js` in https://github.com/davedoesdev/ffmpeg.js[ffmpeg.js].
|
||||
** The URL of `ffmpeg-worker-hls.js` or `ffmpeg-worker-dash.js` in https://github.com/davedoesdev/ffmpeg.js[ffmpeg.js].
|
||||
This allows your application (or the end user if required) to supply its own version,
|
||||
in accordance with LGPL.
|
||||
** The desired video frame rate.
|
||||
@@ -63,7 +65,7 @@ link:site/hls.js[] exports a class, `HLS`, which does the heavy lifting:
|
||||
* Call the `async start()` function to start streaming.
|
||||
* Call the `end()` function to stop streaming.
|
||||
|
||||
`HLS` extends from https://developer.mozilla.org/en-US/docs/Web/API/EventTarget[`EventTarget`]
|
||||
`Streamer` extends from https://developer.mozilla.org/en-US/docs/Web/API/EventTarget[`EventTarget`]
|
||||
and dispatches the following events:
|
||||
|
||||
* `start` when streaming has started.
|
||||
@@ -80,7 +82,7 @@ Note that https://github.com/davedoesdev/ffmpeg.js[ffmpeg.js] is licensed under
|
||||
Streamana runs it inside a Web Worker and communicates with it via message passing.
|
||||
The end user can replace the version used by changing the URL in the user interface.
|
||||
|
||||
Note also that the https://github.com/davedoesdev/ffmpeg.js[ffmpeg.js] HLS distribution
|
||||
contains no H.264 or MP4 code. All encoding is done by the browser using
|
||||
Note also that the https://github.com/davedoesdev/ffmpeg.js[ffmpeg.js] HLS and DASH
|
||||
distributions contain no H.264 or MP4 code. All encoding is done by the browser using
|
||||
https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder[`MediaRecorder`] or
|
||||
https://www.w3.org/TR/webcodecs/[WebCodecs].
|
||||
|
@@ -29,11 +29,12 @@ export class GlCanvas extends Canvas {
|
||||
}
|
||||
}), options);
|
||||
this.update_limiter = new UpdateLimiter();
|
||||
this.destroyed = false;
|
||||
}
|
||||
// Allow rendering loop to be driven externally (e.g. by the audio encoder)
|
||||
// to avoid requestAnimationFrame (or indeed setInterval) throttling.
|
||||
onLoop() {
|
||||
if (this.update_limiter.check()) {
|
||||
if (this.update_limiter.check() && !this.destroyed) {
|
||||
const now = Date.now();
|
||||
this.checkRender();
|
||||
// Make sure we don't hog the main thread. Software rendering will take
|
||||
@@ -50,14 +51,25 @@ export class GlCanvas extends Canvas {
|
||||
}
|
||||
// Prevent errors after destruction
|
||||
destroy() {
|
||||
super.destroy();
|
||||
this.destroyed = true;
|
||||
if (this.gl) {
|
||||
super.destroy();
|
||||
}
|
||||
this.uniforms = {
|
||||
createTexture() {
|
||||
return {};
|
||||
},
|
||||
create() {}
|
||||
create() {},
|
||||
update() {}
|
||||
};
|
||||
this.textures = {
|
||||
createOrUpdate() {
|
||||
return {
|
||||
then() {}
|
||||
};
|
||||
},
|
||||
values: {}
|
||||
};
|
||||
this.textures = {};
|
||||
this.buffers = {
|
||||
values: {}
|
||||
};
|
||||
|
@@ -8,7 +8,7 @@ export class MuxReceiver extends EventTarget {
|
||||
}, 0);
|
||||
}
|
||||
|
||||
start({ ffmpeg_lib_url, ffmpeg_args, base_url }) {
|
||||
start({ ffmpeg_lib_url, ffmpeg_args, base_url, protocol }) {
|
||||
this.worker = new Worker(ffmpeg_lib_url);
|
||||
this.worker.onerror = this.onerror.bind(this);
|
||||
this.worker.onmessage = e => {
|
||||
@@ -48,7 +48,8 @@ export class MuxReceiver extends EventTarget {
|
||||
case 'start-stream':
|
||||
this.worker.postMessage({
|
||||
type: 'base-url',
|
||||
data: base_url
|
||||
data: base_url,
|
||||
protocol
|
||||
});
|
||||
// falls through
|
||||
case 'sending':
|
||||
|
@@ -65,9 +65,22 @@
|
||||
<input id="zoom-video" class="form-check-input" type="checkbox">
|
||||
<label for="zoom-video" class="form-check-label">Minimize vertical bars in local video display</label>
|
||||
</div>
|
||||
<div class="pt-4">
|
||||
<label class="form-label">Ingestion protocol</label>
|
||||
<div>
|
||||
<div class="form-check form-check-inline">
|
||||
<input id="protocol-hls" name="protocol" class="form-check-input" type="radio" value="ffmpeg-worker-hls.js">
|
||||
<label for="protocol-hls" class="form-check-label">HLS (H.264)</label>
|
||||
</div>
|
||||
<div class="form-check form-check-inline">
|
||||
<input id="protocol-dash" name="protocol" class="form-check-input" type="radio" value="ffmpeg-worker-dash.js">
|
||||
<label for="protocol-dash" class="form-check-label">DASH (VP9)</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="pt-4">
|
||||
<label for="ffmpeg-lib-url" class="form-label">FFmpeg library URL</label>
|
||||
<input id="ffmpeg-lib-url" class="form-control" placeholder="ffmpeg-worker-hls.js" type="text">
|
||||
<input id="ffmpeg-lib-url" class="form-control" type="text">
|
||||
</div>
|
||||
</div>
|
||||
<div id="error-alert" class="alert alert-danger alert-dismissible fade mb-0 flex-grow-0" role="alert">
|
||||
|
@@ -1,17 +1,14 @@
|
||||
import { GlCanvas } from './gl-canvas.js';
|
||||
import {
|
||||
HLS,
|
||||
video_encoder_codec,
|
||||
videoBitsPerSecond
|
||||
} from './hls.js';
|
||||
get_default_config_from_url,
|
||||
Streamer
|
||||
} from './streamer.js';
|
||||
import shader from './shader.js';
|
||||
import {
|
||||
supported_video_configs,
|
||||
max_video_config,
|
||||
} from './resolution.js';
|
||||
|
||||
const target_frame_rate = 30;
|
||||
|
||||
const ingestion_url_el = document.getElementById('ingestion-url');
|
||||
ingestion_url_el.value = localStorage.getItem('streamana-ingestion-url');
|
||||
|
||||
@@ -39,25 +36,34 @@ const initial_ffmpeg_lib_url = (localStorage.getItem('streamana-ffmpeg-lib-url')
|
||||
if (initial_ffmpeg_lib_url) {
|
||||
ffmpeg_lib_url_el.value = initial_ffmpeg_lib_url;
|
||||
}
|
||||
ffmpeg_lib_url_el.addEventListener('input', function () {
|
||||
localStorage.setItem('streamana-ffmpeg-lib-url', this.value);
|
||||
ffmpeg_lib_url_el.addEventListener('change', function () {
|
||||
const value = this.value.trim();
|
||||
localStorage.setItem('streamana-ffmpeg-lib-url', value);
|
||||
if (value) {
|
||||
protocol_hls_el.disabled = true;
|
||||
protocol_dash_el.disabled = true;
|
||||
} else {
|
||||
protocol_hls_el.disabled = false;
|
||||
protocol_dash_el.disabled = false;
|
||||
}
|
||||
set_ingestion();
|
||||
});
|
||||
|
||||
const zoom_video_el = document.getElementById('zoom-video');
|
||||
zoom_video_el.checked = !!localStorage.getItem('streamana-zoom-video');
|
||||
zoom_video_el.addEventListener('input', function () {
|
||||
zoom_video_el.addEventListener('change', function () {
|
||||
localStorage.setItem('streamana-zoom-video', this.checked ? 'true' : '');
|
||||
});
|
||||
|
||||
const lock_portrait_el = document.getElementById('lock-portrait');
|
||||
lock_portrait_el.checked = !!localStorage.getItem('streamana-lock-portrait');
|
||||
lock_portrait_el.addEventListener('input', function () {
|
||||
lock_portrait_el.addEventListener('change', function () {
|
||||
localStorage.setItem('streamana-lock-portrait', this.checked ? 'true' : '');
|
||||
});
|
||||
|
||||
const greyscale_el = document.getElementById('greyscale');
|
||||
greyscale_el.checked = !!localStorage.getItem('streamana-greyscale');
|
||||
greyscale_el.addEventListener('input', function () {
|
||||
greyscale_el.addEventListener('change', function () {
|
||||
localStorage.setItem('streamana-greyscale', this.checked ? 'true' : '');
|
||||
});
|
||||
|
||||
@@ -106,37 +112,82 @@ camera_el.addEventListener('click', camera_save);
|
||||
|
||||
const camera_swap_el = document.getElementById('camera-swap');
|
||||
|
||||
let video_config;
|
||||
let preferred_resolution = localStorage.getItem('streamana-resolution');
|
||||
if (preferred_resolution) {
|
||||
video_config = await max_video_config({
|
||||
...JSON.parse(preferred_resolution),
|
||||
codec: video_encoder_codec,
|
||||
bitrate: videoBitsPerSecond
|
||||
}, true);
|
||||
}
|
||||
if (!video_config) {
|
||||
video_config = await max_video_config({
|
||||
width: 1280,
|
||||
height: 720,
|
||||
ratio: 16/9,
|
||||
codec: video_encoder_codec,
|
||||
bitrate: videoBitsPerSecond
|
||||
}, true);
|
||||
}
|
||||
const protocol_hls_el = document.getElementById('protocol-hls');
|
||||
const protocol_dash_el = document.getElementById('protocol-dash');
|
||||
const resolution_el = document.getElementById('resolution');
|
||||
|
||||
let streamer_config;
|
||||
let video_config;
|
||||
const video_configs = new Map();
|
||||
for (let config of (await supported_video_configs({
|
||||
codec: video_encoder_codec,
|
||||
bitrate: videoBitsPerSecond
|
||||
}, true)).filter(c => c.ratio >= 1)) {
|
||||
const option = document.createElement('option');
|
||||
option.innerHTML = `${config.width}x${config.height} — ${config.label}`;
|
||||
option.selected = config.label === video_config.label;
|
||||
resolution_el.appendChild(option);
|
||||
video_configs.set(option.innerText, config);
|
||||
|
||||
function set_ingestion_protocol(protocol) {
|
||||
if (protocol === 'dash') {
|
||||
protocol_hls_el.checked = false;
|
||||
protocol_dash_el.checked = true;
|
||||
ffmpeg_lib_url_el.placeholder = protocol_dash_el.value;
|
||||
} else {
|
||||
protocol_hls_el.checked = true;
|
||||
protocol_dash_el.checked = false;
|
||||
ffmpeg_lib_url_el.placeholder = protocol_hls_el.value;
|
||||
}
|
||||
}
|
||||
resolution_el.addEventListener('change', function (ev) {
|
||||
|
||||
set_ingestion_protocol(localStorage.getItem('streamana-ingestion-protocol'));
|
||||
|
||||
async function set_ingestion() {
|
||||
const ffmpeg_lib_url = ffmpeg_lib_url_el.value.trim() ||
|
||||
ffmpeg_lib_url_el.placeholder.trim();
|
||||
|
||||
streamer_config = get_default_config_from_url(ffmpeg_lib_url);
|
||||
|
||||
set_ingestion_protocol(streamer_config.protocol);
|
||||
localStorage.setItem('streamana-ingestion-protocol', streamer_config.protocol);
|
||||
|
||||
video_config = null;
|
||||
let preferred_resolution = localStorage.getItem('streamana-resolution');
|
||||
if (preferred_resolution) {
|
||||
video_config = await max_video_config({
|
||||
...JSON.parse(preferred_resolution),
|
||||
...streamer_config.video,
|
||||
...streamer_config.webcodecs.video
|
||||
}, true);
|
||||
}
|
||||
if (!video_config) {
|
||||
video_config = await max_video_config({
|
||||
width: 1280,
|
||||
height: 720,
|
||||
ratio: 16/9,
|
||||
...streamer_config.video,
|
||||
...streamer_config.webcodecs.video
|
||||
}, true);
|
||||
}
|
||||
|
||||
resolution_el.innerHTML = '';
|
||||
for (let config of (await supported_video_configs({
|
||||
...streamer_config.video,
|
||||
...streamer_config.webcodecs.video
|
||||
}, true)).filter(c => c.ratio >= 1)) {
|
||||
const option = document.createElement('option');
|
||||
option.innerHTML = `${config.width}x${config.height} — ${config.label}`;
|
||||
option.selected = config.label === video_config.label;
|
||||
resolution_el.appendChild(option);
|
||||
video_configs.set(option.innerText, config);
|
||||
}
|
||||
}
|
||||
|
||||
await set_ingestion();
|
||||
|
||||
protocol_hls_el.addEventListener('change', function () {
|
||||
ffmpeg_lib_url_el.placeholder = protocol_hls_el.value;
|
||||
set_ingestion();
|
||||
});
|
||||
|
||||
protocol_dash_el.addEventListener('change', function () {
|
||||
ffmpeg_lib_url_el.placeholder = protocol_dash_el.value;
|
||||
set_ingestion();
|
||||
});
|
||||
|
||||
resolution_el.addEventListener('change', function () {
|
||||
video_config = video_configs.get(this.value);
|
||||
localStorage.setItem('streamana-resolution', JSON.stringify({
|
||||
width: video_config.width,
|
||||
@@ -145,7 +196,7 @@ resolution_el.addEventListener('change', function (ev) {
|
||||
}));
|
||||
});
|
||||
|
||||
let hls;
|
||||
let streamer;
|
||||
|
||||
async function start() {
|
||||
const ingestion_url = ingestion_url_el.value.trim();
|
||||
@@ -172,6 +223,8 @@ async function start() {
|
||||
lock_portrait_el.disabled = true;
|
||||
zoom_video_el.disabled = true;
|
||||
resolution_el.disabled = true;
|
||||
protocol_hls_el.disabled = true;
|
||||
protocol_dash_el.disabled = true;
|
||||
waiting_el.classList.remove('d-none');
|
||||
mic_el.removeEventListener('click', mic_save);
|
||||
camera_el.removeEventListener('click', camera_save);
|
||||
@@ -218,7 +271,7 @@ async function start() {
|
||||
camera_icon_el.classList.add('off');
|
||||
}
|
||||
camera_el.addEventListener('click', camera_save);
|
||||
greyscale_el.removeEventListener('input', greyscale);
|
||||
greyscale_el.removeEventListener('change', greyscale);
|
||||
camera_swap_el.classList.add('d-none');
|
||||
camera_swap_el.removeEventListener('click', about_face);
|
||||
canvas_el_parent.classList.add('mx-auto');
|
||||
@@ -267,8 +320,9 @@ async function start() {
|
||||
track.stop();
|
||||
}
|
||||
}
|
||||
if (hls) {
|
||||
hls.end(!!err);
|
||||
if (streamer) {
|
||||
streamer.end(!!err);
|
||||
streamer = null;
|
||||
}
|
||||
|
||||
go_live_el.checked = false;
|
||||
@@ -279,6 +333,8 @@ async function start() {
|
||||
lock_portrait_el.disabled = false;
|
||||
zoom_video_el.disabled = false;
|
||||
resolution_el.disabled = false;
|
||||
protocol_hls_el.disabled = ffmpeg_lib_url_el.value.trim();
|
||||
protocol_dash_el.disabled = ffmpeg_lib_url_el.value.trim();;
|
||||
waiting_el.classList.add('d-none');
|
||||
canvas_el.classList.add('d-none');
|
||||
}
|
||||
@@ -356,7 +412,7 @@ async function start() {
|
||||
camera_swap_el.removeEventListener('click', about_face);
|
||||
|
||||
async function finish() {
|
||||
await hls.start();
|
||||
await streamer.start();
|
||||
mic_el.addEventListener('click', media_toggle);
|
||||
camera_el.addEventListener('click', media_toggle);
|
||||
camera_swap_el.addEventListener('click', about_face);
|
||||
@@ -375,8 +431,8 @@ async function start() {
|
||||
width: video_config.width,
|
||||
height: video_config.height,
|
||||
frameRate: {
|
||||
ideal: target_frame_rate,
|
||||
max: target_frame_rate
|
||||
ideal: streamer_config.video.framerate,
|
||||
max: streamer_config.video.framerate
|
||||
},
|
||||
facingMode: requested_facing_mode
|
||||
};
|
||||
@@ -534,7 +590,7 @@ async function start() {
|
||||
|
||||
// tell shader whether to greyscale
|
||||
gl_canvas.setUniform('u_greyscale', greyscale_el.checked);
|
||||
greyscale_el.addEventListener('input', greyscale);
|
||||
greyscale_el.addEventListener('change', greyscale);
|
||||
|
||||
// tell shader camera hasn't started
|
||||
gl_canvas.setUniform('u_active', false);
|
||||
@@ -572,7 +628,7 @@ async function start() {
|
||||
// capture video from the canvas
|
||||
// Note: Safari on iOS doesn't get any data, might be related to
|
||||
// https://bugs.webkit.org/show_bug.cgi?id=181663
|
||||
canvas_stream = canvas_el.captureStream(target_frame_rate);
|
||||
canvas_stream = canvas_el.captureStream(streamer_config.video.framerate);
|
||||
|
||||
// add audio to canvas stream
|
||||
audio_dest = audio_context.createMediaStreamDestination();
|
||||
@@ -593,11 +649,15 @@ async function start() {
|
||||
audio_source = silence;
|
||||
audio_source.connect(audio_dest);
|
||||
|
||||
// HLS from the canvas stream to the ingestion URL
|
||||
hls = new HLS(canvas_stream, audio_context, ingestion_url, ffmpeg_lib_url, target_frame_rate, lock_portrait);
|
||||
hls.addEventListener('run', () => console.log('HLS running'));
|
||||
hls.addEventListener('exit', ev => {
|
||||
const msg = `HLS exited with status ${ev.detail.code}`;
|
||||
// Stream from the canvas stream to the ingestion URL
|
||||
streamer = new Streamer(canvas_stream,
|
||||
audio_context,
|
||||
ingestion_url,
|
||||
streamer_config,
|
||||
lock_portrait);
|
||||
streamer.addEventListener('run', () => console.log('Streamer running'));
|
||||
streamer.addEventListener('exit', ev => {
|
||||
const msg = `Streamer exited with status ${ev.detail.code}`;
|
||||
if (ev.detail.code === 0) {
|
||||
console.log(msg);
|
||||
cleanup();
|
||||
@@ -605,8 +665,8 @@ async function start() {
|
||||
cleanup(msg);
|
||||
}
|
||||
});
|
||||
hls.addEventListener('error', cleanup);
|
||||
hls.addEventListener('start', function () {
|
||||
streamer.addEventListener('error', cleanup);
|
||||
streamer.addEventListener('start', function () {
|
||||
if (done) {
|
||||
this.end(true);
|
||||
}
|
||||
@@ -616,7 +676,7 @@ async function start() {
|
||||
go_live_el.disabled = false;
|
||||
update();
|
||||
});
|
||||
hls.addEventListener('update', update);
|
||||
streamer.addEventListener('update', update);
|
||||
|
||||
await start_media(facing_mode);
|
||||
} catch (ex) {
|
||||
@@ -626,5 +686,5 @@ async function start() {
|
||||
|
||||
function stop() {
|
||||
go_live_el.disabled = true;
|
||||
hls.end();
|
||||
streamer.end();
|
||||
}
|
||||
|
@@ -1,20 +1,71 @@
|
||||
import { UpdateLimiter } from './update-limiter.js';
|
||||
import { MuxReceiver } from './mux-receiver.js';
|
||||
|
||||
const audioBitsPerSecond = 128 * 1000;
|
||||
export const videoBitsPerSecond = 2500 * 1000;
|
||||
const key_frame_interval = 3;
|
||||
|
||||
export const video_encoder_codec = 'avc1.42E01E' /*'avc1.42001E'*/;
|
||||
export function get_default_config_from_url(ffmpeg_lib_url) {
|
||||
const protocol = ffmpeg_lib_url.indexOf('worker-dash') >= 0 ? 'dash' : 'hls';
|
||||
return {
|
||||
ffmpeg_lib_url,
|
||||
protocol,
|
||||
video: {
|
||||
bitrate: 2500 * 1000,
|
||||
framerate: 30
|
||||
},
|
||||
audio: {
|
||||
bitrate: 128 * 1000
|
||||
},
|
||||
media_recorder: {
|
||||
video: {
|
||||
codec: protocol === 'dash' ? 'vp9' : 'H264',
|
||||
},
|
||||
audio: {
|
||||
codec: 'opus'
|
||||
},
|
||||
webm: true,
|
||||
mp4: false // requires ffmpeg-worker-hls.js or ffmpeg-worker-dash.js
|
||||
// to be configured with MP4 support (which is not the default)
|
||||
},
|
||||
webcodecs: {
|
||||
video: {
|
||||
...(protocol === 'dash' ? {
|
||||
codec: 'vp09.00.10.08.01'
|
||||
} : {
|
||||
codec: 'avc1.42E01E' /*'avc1.42001E'*/,
|
||||
avc: { format: 'annexb' }
|
||||
})
|
||||
},
|
||||
audio: {
|
||||
codec: 'opus' /*'pcm'*/,
|
||||
},
|
||||
webm_muxer: {
|
||||
video: {
|
||||
codec: protocol === 'dash' ? 'V_VP9' : 'V_MPEG4/ISO/AVC'
|
||||
},
|
||||
audio: {
|
||||
codec: 'A_OPUS',
|
||||
bit_depth: 0 // 32 for pcm */
|
||||
}
|
||||
}
|
||||
},
|
||||
ffmpeg: {
|
||||
video: {
|
||||
codec: protocol === 'dash' ? 'libvpx-vp9' : 'libx264'
|
||||
},
|
||||
audio: {
|
||||
codec: protocol === 'dash' ? 'libopus' : 'aac'
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
export class HLS extends EventTarget {
|
||||
constructor(stream, audio_context, base_url, ffmpeg_lib_url, frame_rate, rotate) {
|
||||
export class Streamer extends EventTarget {
|
||||
constructor(stream, audio_context, base_url, config, rotate) {
|
||||
super();
|
||||
this.stream = stream;
|
||||
this.audio_context = audio_context;
|
||||
this.base_url = base_url;
|
||||
this.ffmpeg_lib_url = ffmpeg_lib_url;
|
||||
this.frame_rate = frame_rate;
|
||||
this.config = config;
|
||||
if (rotate) {
|
||||
this.ffmpeg_metadata = ['-metadata:s:v:0', 'rotate=-90'];
|
||||
} else {
|
||||
@@ -30,25 +81,47 @@ export class HLS extends EventTarget {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// first try WebM/H264 MediaRecorder - this should work on Chrome Linux and Windows
|
||||
await this.media_recorder('video/webm;codecs=H264');
|
||||
console.log("Using MediaRecorder WebM/h264");
|
||||
} catch (ex) {
|
||||
console.warn(ex.toString());
|
||||
const mrcfg = this.config.media_recorder;
|
||||
|
||||
const mp4 = async () => {
|
||||
if (mfcrg.mp4) {
|
||||
// try MediaRecorder MP4 - this should work on Safari MacOS and iOS,
|
||||
// producing H.264 video and AAC audio
|
||||
await this.media_recorder('video/mp4');
|
||||
console.log("Using MediaRecorder MP4 (H264,aac)");
|
||||
} else {
|
||||
throw new Error('no supported encoding methods');
|
||||
}
|
||||
};
|
||||
|
||||
const webcodecs = async () => {
|
||||
const wccfg = this.config.webcodecs;
|
||||
if (wccfg) {
|
||||
try {
|
||||
// try WebCodecs - this should work on Chrome including Android
|
||||
await this.webcodecs();
|
||||
console.log("Using WebCodecs");
|
||||
} catch (ex) {
|
||||
console.warn(ex.toString());
|
||||
await mp4();
|
||||
}
|
||||
} else {
|
||||
await mp4();
|
||||
}
|
||||
};
|
||||
|
||||
if (mrcfg.webm) {
|
||||
try {
|
||||
// next try WebCodecs - this should work on Chrome including Android
|
||||
await this.webcodecs(video_encoder_codec,
|
||||
'opus' /*'pcm'*/,
|
||||
{ avc: { format: 'annexb' } });
|
||||
console.log("Using WebCodecs");
|
||||
// try MediaRecorder WebM - this should work on Chrome Linux and Windows
|
||||
const codecs = `${mrcfg.video.codec},${mrcfg.audio.codec}`;
|
||||
await this.media_recorder(`video/webm;codecs=${codecs}`);
|
||||
console.log(`Using MediaRecorder WebM (${codecs})`);
|
||||
} catch (ex) {
|
||||
console.warn(ex.toString());
|
||||
// finally try MP4 - this should work on Safari MacOS and iOS, producing H264
|
||||
// this assumes ffmpeg-hls.js has been configured with MP4 support
|
||||
await this.media_recorder('video/mp4');
|
||||
console.log("Using MediaRecorder MP4");
|
||||
await webcodecs();
|
||||
}
|
||||
} else {
|
||||
await webcodecs();
|
||||
}
|
||||
|
||||
this.started = true;
|
||||
@@ -59,7 +132,7 @@ export class HLS extends EventTarget {
|
||||
await this.audio_context.audioWorklet.addModule('./dummy-worklet.js');
|
||||
this.dummy_processor = new AudioWorkletNode(this.audio_context, 'dummy-processor', {
|
||||
processorOptions: {
|
||||
update_rate: this.frame_rate
|
||||
update_rate: this.config.video.framerate
|
||||
}
|
||||
});
|
||||
this.dummy_processor.onerror = onerror;
|
||||
@@ -73,6 +146,30 @@ export class HLS extends EventTarget {
|
||||
this.dummy_processor.disconnect();
|
||||
}
|
||||
|
||||
receiver_args(video_codec, audio_codec) {
|
||||
return {
|
||||
ffmpeg_lib_url: this.config.ffmpeg_lib_url,
|
||||
ffmpeg_args: [
|
||||
'-i', '/work/stream1',
|
||||
'-map', '0:v',
|
||||
'-map', '0:a',
|
||||
...(video_codec === this.config.ffmpeg.video.codec ||
|
||||
video_codec === 'copy' ?
|
||||
['-c:v', 'copy'] : // pass through the video data (no decoding or encoding)
|
||||
['-c:v', this.config.ffmpeg.video.codec, // re-encode video
|
||||
'-b:v', this.config.video.bitrate.toString()]), // set video bitrate
|
||||
...this.ffmpeg_metadata,
|
||||
...(audio_codec === this.config.ffmpeg.audio.codec ||
|
||||
audio_codec === 'copy' ?
|
||||
['-c:a', 'copy'] : // pass through the audio data
|
||||
['-c:a', this.config.ffmpeg.audio.codec, // re-encode audio
|
||||
'-b:a', this.config.audio.bitrate.toString()]) // set audio bitrate
|
||||
],
|
||||
base_url: this.base_url,
|
||||
protocol: this.config.protocol
|
||||
};
|
||||
}
|
||||
|
||||
async media_recorder(mimeType) {
|
||||
const onerror = this.onerror.bind(this);
|
||||
|
||||
@@ -80,8 +177,8 @@ export class HLS extends EventTarget {
|
||||
// note we don't start recording until ffmpeg has started (below)
|
||||
const recorder = new MediaRecorder(this.stream, {
|
||||
mimeType,
|
||||
audioBitsPerSecond,
|
||||
videoBitsPerSecond
|
||||
videoBitsPerSecond: this.config.video.bitrate,
|
||||
audioBitsPerSecond: this.config.audio.bitrate
|
||||
});
|
||||
recorder.onerror = onerror;
|
||||
|
||||
@@ -101,27 +198,71 @@ export class HLS extends EventTarget {
|
||||
|
||||
await this.start_dummy_processor();
|
||||
|
||||
let video_codec, audio_codec;
|
||||
if (recorder.mimeType === 'video/mp4') {
|
||||
video_codec = 'libx264';
|
||||
audio_codec = 'aac';
|
||||
} else {
|
||||
switch (this.config.media_recorder.video.codec.toLowerCase()) {
|
||||
case 'av1':
|
||||
video_codec = 'libaom-av1';
|
||||
break;
|
||||
|
||||
case 'h264':
|
||||
video_codec = 'libx264';
|
||||
break;
|
||||
|
||||
case 'vp8':
|
||||
video_codec = 'libvpx';
|
||||
break;
|
||||
|
||||
case 'vp9':
|
||||
video_codec = 'libvpx-vp9';
|
||||
break;
|
||||
|
||||
default:
|
||||
video_codec = null;
|
||||
break;
|
||||
}
|
||||
|
||||
switch (this.config.media_recorder.audio.codec.toLowerCase()) {
|
||||
case 'flac':
|
||||
audio_codec = 'flac';
|
||||
break;
|
||||
|
||||
case 'mp3':
|
||||
audio_codec = 'libmp3lame';
|
||||
break;
|
||||
|
||||
case 'opus':
|
||||
audio_codec = 'libopus';
|
||||
break;
|
||||
|
||||
case 'vorbis':
|
||||
audio_codec = 'libvorbis';
|
||||
break;
|
||||
|
||||
case 'pcm':
|
||||
audio_codec = 'f32le';
|
||||
break;
|
||||
|
||||
default:
|
||||
if (audio_codes.startsWith('mp4a')) {
|
||||
audio_codec = 'aac';
|
||||
} else {
|
||||
audio_codec = null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// start the ffmpeg worker
|
||||
this.receiver = new MuxReceiver();
|
||||
this.receiver.addEventListener('message', e => {
|
||||
const msg = e.detail;
|
||||
switch (msg.type) {
|
||||
case 'ready':
|
||||
this.receiver.start({
|
||||
ffmpeg_lib_url: this.ffmpeg_lib_url,
|
||||
ffmpeg_args: [
|
||||
'-i', '/work/stream1',
|
||||
'-map', '0:v',
|
||||
'-map', '0:a',
|
||||
'-c:v', 'copy', // pass through the video data (h264, no decoding or encoding)
|
||||
...this.ffmpeg_metadata,
|
||||
...(recorder.mimeType === 'video/mp4' ?
|
||||
['-c:a', 'copy'] : // assume already AAC
|
||||
['-c:a', 'aac', // re-encode audio as AAC-LC
|
||||
'-b:a', audioBitsPerSecond.toString()]) // set audio bitrate
|
||||
],
|
||||
base_url: this.base_url
|
||||
});
|
||||
this.receiver.start(this.receiver_args(video_codec, audio_codec));
|
||||
break;
|
||||
|
||||
case 'error':
|
||||
@@ -153,7 +294,7 @@ export class HLS extends EventTarget {
|
||||
});
|
||||
}
|
||||
|
||||
async webcodecs(video_codec, audio_codec, video_config, audio_config) {
|
||||
async webcodecs() {
|
||||
const onerror = this.onerror.bind(this);
|
||||
|
||||
const video_track = this.stream.getVideoTracks()[0];
|
||||
@@ -209,13 +350,11 @@ export class HLS extends EventTarget {
|
||||
readable: video_readable,
|
||||
key_frame_interval,
|
||||
config: {
|
||||
codec: video_codec,
|
||||
bitrate: videoBitsPerSecond,
|
||||
framerate: this.frame_rate,
|
||||
...this.config.video,
|
||||
...this.config.webcodecs.video,
|
||||
latencyMode: 'realtime',
|
||||
width: video_settings.width,
|
||||
height: video_settings.height,
|
||||
...video_config
|
||||
},
|
||||
}, [video_readable]);
|
||||
|
||||
@@ -224,11 +363,10 @@ export class HLS extends EventTarget {
|
||||
audio: true,
|
||||
readable: audio_readable,
|
||||
config: {
|
||||
codec: audio_codec,
|
||||
bitrate: audioBitsPerSecond,
|
||||
...this.config.audio,
|
||||
...this.config.webcodecs.audio,
|
||||
sampleRate: audio_settings.sampleRate,
|
||||
numberOfChannels: audio_settings.channelCount,
|
||||
...audio_config
|
||||
},
|
||||
}, [audio_readable]);
|
||||
|
||||
@@ -257,6 +395,60 @@ export class HLS extends EventTarget {
|
||||
}
|
||||
};
|
||||
|
||||
let video_codec;
|
||||
switch (this.config.webcodecs.video.codec) {
|
||||
case 'V_AV1':
|
||||
video_codec = 'libaom-av1';
|
||||
break;
|
||||
|
||||
case 'V_MPEG4/ISO/AVC':
|
||||
video_codec = 'libx264';
|
||||
break;
|
||||
|
||||
case 'V_VP8':
|
||||
video_codec = 'libvpx';
|
||||
break;
|
||||
|
||||
case 'V_VP9':
|
||||
video_codec = 'libvpx-vp9';
|
||||
break;
|
||||
|
||||
default:
|
||||
video_codec = null;
|
||||
break;
|
||||
}
|
||||
|
||||
let audio_codec;
|
||||
switch (this.config.webcodecs.audio.codec) {
|
||||
case 'A_FLAC':
|
||||
audio_codec = 'flac';
|
||||
break;
|
||||
|
||||
case 'A_MPEG/L3':
|
||||
audio_codec = 'libmp3lame';
|
||||
break;
|
||||
|
||||
case 'A_OPUS':
|
||||
audio_codec = 'libopus';
|
||||
break;
|
||||
|
||||
case 'A_VORBIS':
|
||||
audio_codec = 'libvorbis';
|
||||
break;
|
||||
|
||||
case 'A_PCM/FLOAT/IEEE':
|
||||
audio_codec = 'f32le';
|
||||
break;
|
||||
|
||||
default:
|
||||
if (audio_codes.startsWith('A_AAC')) {
|
||||
audio_codec = 'aac';
|
||||
} else {
|
||||
audio_codec = null;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
this.worker.postMessage({
|
||||
type: 'start',
|
||||
webm_metadata: {
|
||||
@@ -264,28 +456,19 @@ export class HLS extends EventTarget {
|
||||
video: {
|
||||
width: video_settings.width,
|
||||
height: video_settings.height,
|
||||
frame_rate: this.frame_rate,
|
||||
codec_id: 'V_MPEG4/ISO/AVC'
|
||||
frame_rate: this.config.video.framerate,
|
||||
codec_id: this.config.webcodecs.webm_muxer.video.codec
|
||||
},
|
||||
audio: {
|
||||
sample_rate: audio_settings.sampleRate,
|
||||
channels: audio_settings.channelCount,
|
||||
codec_id: 'A_OPUS'
|
||||
bit_depth: this.config.webcodecs.webm_muxer.audio.bit_depth,
|
||||
codec_id: this.config.webcodecs.webm_muxer.audio.codec
|
||||
}
|
||||
},
|
||||
webm_receiver: './mux-receiver.js',
|
||||
webm_receiver_data: { name: 'stream1' },
|
||||
ffmpeg_lib_url: this.ffmpeg_lib_url,
|
||||
base_url: this.base_url,
|
||||
ffmpeg_args: [
|
||||
'-i', '/work/stream1',
|
||||
'-map', '0:v',
|
||||
'-map', '0:a',
|
||||
'-c:v', 'copy', // pass through the video data (h264, no decoding or encoding)
|
||||
...this.ffmpeg_metadata,
|
||||
'-c:a', 'aac', // re-encode audio as AAC-LC
|
||||
'-b:a', audioBitsPerSecond.toString() // set audio bitrate
|
||||
]
|
||||
...this.receiver_args(video_codec, audio_codec)
|
||||
});
|
||||
}
|
||||
|
Reference in New Issue
Block a user