mirror of
https://github.com/davedoesdev/streamana.git
synced 2025-12-24 13:28:19 +08:00
Move files to site/ and catch errors
This commit is contained in:
34
example.html
34
example.html
@@ -1,34 +0,0 @@
|
||||
<html>
|
||||
<head>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.1/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-+0n0xVW2eSR5OomGNYDnhzAbDsOXxcvSN1TPprVMTNDbiYZCxYbOOl7+AMvyTG2x" crossorigin="anonymous">
|
||||
</script>
|
||||
<script type="module" src="./example.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<div class="input-group">
|
||||
<input id="ingestion-url" type="text" class="form-control" placeholder="Youtube Stream URL">
|
||||
<div class="input-group-text">
|
||||
<div class="form-check form-switch">
|
||||
<input class="form-check-input" type="checkbox" id="go-live" disabled autocomplete="off">
|
||||
<label for="go-live">Go Live</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="row">
|
||||
<div class="col position-relative">
|
||||
<video id="monitor" muted="true" class="w-100"></video>
|
||||
<div class="position-absolute top-50 start-50 translate-middle">
|
||||
<div id="waiting" class="text-primary spinner-border d-none" role="status">
|
||||
<span class="visually-hidden">Loading...</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
118
example.js
118
example.js
@@ -1,118 +0,0 @@
|
||||
import { InvisibleGlCanvas } from './gl-canvas.js';
|
||||
import { HlsWorker } from './hls-worker.js';
|
||||
import shader from './greyscale-shader.js';
|
||||
|
||||
const ingestion_url_el = document.getElementById('ingestion-url');
|
||||
ingestion_url_el.value = localStorage.getItem('streamana-example-ingestion-url');
|
||||
|
||||
const go_live_el = document.getElementById('go-live');
|
||||
go_live_el.disabled = false;
|
||||
go_live_el.addEventListener('click', function () {
|
||||
if (this.checked) {
|
||||
start();
|
||||
} else {
|
||||
stop();
|
||||
}
|
||||
});
|
||||
|
||||
const monitor_el = document.getElementById('monitor');
|
||||
const waiting_el = document.getElementById('waiting');
|
||||
|
||||
let hls_worker;
|
||||
|
||||
async function start() {
|
||||
const ingestion_url = ingestion_url_el.value.trim();
|
||||
if (!ingestion_url) {
|
||||
go_live_el.checked = false;
|
||||
return;
|
||||
}
|
||||
localStorage.setItem('streamana-example-ingestion-url', ingestion_url);
|
||||
|
||||
go_live_el.disabled = true;
|
||||
waiting_el.classList.remove('d-none');
|
||||
|
||||
// capture video from webcam
|
||||
const camera_stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: true,
|
||||
video: {
|
||||
width: 4096,
|
||||
height: 2160,
|
||||
frameRate: {
|
||||
ideal: 30,
|
||||
max: 30
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// create video element which will be used for grabbing the frames to
|
||||
// write to a canvas so we can apply webgl shaders
|
||||
// also used to get the native video dimensions
|
||||
const video = document.createElement('video');
|
||||
video.muted = true;
|
||||
|
||||
// use glsl-canvas to make managing webgl stuff easier
|
||||
// because it's not visible, client dimensions are zero so we
|
||||
// need to substitute actual dimensions instead
|
||||
const gl_canvas = new InvisibleGlCanvas(document);
|
||||
|
||||
// as an example, greyscale the stream
|
||||
gl_canvas.load(shader);
|
||||
|
||||
// tell canvas to use frames from video
|
||||
gl_canvas.setTexture('u_texture', video);
|
||||
|
||||
// wait for video to load (must come after gl_canvas.setTexture() since it
|
||||
// registers a loadeddata handler which then registers a play handler)
|
||||
video.addEventListener('loadeddata', function () {
|
||||
// make canvas same size as native video dimensions so every pixel is seen
|
||||
gl_canvas.canvas.width = this.videoWidth;
|
||||
gl_canvas.canvas.height = this.videoHeight;
|
||||
|
||||
// start the camera video
|
||||
this.play();
|
||||
|
||||
// capture video from the canvas
|
||||
const canvas_stream = gl_canvas.canvas.captureStream(30);
|
||||
canvas_stream.addTrack(camera_stream.getAudioTracks()[0]);
|
||||
|
||||
// start HLS from the canvas stream to the ingestion URL
|
||||
hls_worker = new HlsWorker(canvas_stream, ingestion_url);
|
||||
hls_worker.addEventListener('run', () => console.log('HLS running'));
|
||||
hls_worker.addEventListener('exit', ev => {
|
||||
console.log('HLS exited with code', ev.detail);
|
||||
for (let track of camera_stream.getTracks()) {
|
||||
track.stop();
|
||||
}
|
||||
gl_canvas.destroy();
|
||||
for (let track of canvas_stream.getTracks()) {
|
||||
track.stop();
|
||||
}
|
||||
monitor_el.srcObject = null;
|
||||
go_live_el.disabled = false;
|
||||
});
|
||||
hls_worker.addEventListener('error', ev => {
|
||||
console.error('HLS errored', ev.detail);
|
||||
});
|
||||
hls_worker.addEventListener('abort', ev => {
|
||||
console.error('HLS aborted', ev.detail);
|
||||
});
|
||||
hls_worker.addEventListener('start-video', () => {
|
||||
// display the video locally so we can see what's going on
|
||||
// note the video seems to set its height automatically to keep the
|
||||
// correct aspect ratio
|
||||
waiting_el.classList.add('d-none');
|
||||
monitor_el.srcObject = canvas_stream;
|
||||
monitor_el.play();
|
||||
});
|
||||
|
||||
go_live_el.disabled = false;
|
||||
});
|
||||
|
||||
// pass the stream from the camera to the video so it can render the frames
|
||||
video.srcObject = camera_stream;
|
||||
}
|
||||
|
||||
function stop() {
|
||||
go_live_el.disabled = true;
|
||||
hls_worker.end();
|
||||
}
|
||||
43
site/example.html
Normal file
43
site/example.html
Normal file
@@ -0,0 +1,43 @@
|
||||
<html>
|
||||
<head>
|
||||
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.1/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-+0n0xVW2eSR5OomGNYDnhzAbDsOXxcvSN1TPprVMTNDbiYZCxYbOOl7+AMvyTG2x" crossorigin="anonymous">
|
||||
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.1/dist/js/bootstrap.bundle.min.js" integrity="sha384-gtEjrD/SeCtmISkJkNUaaKMoLD0//ElJ19smozuHV6z3Iehds+3Ulb9Bn9Plx0x4" crossorigin="anonymous"></script>
|
||||
<script type="module" src="./example.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<nav id="nav" class="navbar navbar-light bg-light">
|
||||
<div class="container-fluid">
|
||||
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarToggleExternalContent">
|
||||
<span class="navbar-toggler-icon"></span>
|
||||
</button>
|
||||
<span class="d-flex" style="flex: 1">
|
||||
<input id="ingestion-url" type="text" class="form-control ms-2 me-2" placeholder="Ingestion URL">
|
||||
<div class="input-group-text">
|
||||
<div class="form-check form-switch">
|
||||
<input class="form-check-input" type="checkbox" id="go-live" disabled autocomplete="off">
|
||||
<label for="go-live">Go Live</label>
|
||||
</div>
|
||||
</div>
|
||||
</span>
|
||||
</div>
|
||||
</nav>
|
||||
<div class="collapse" id="navbarToggleExternalContent">
|
||||
<div class="input-group-text">
|
||||
<label for="ffmpeg-lib-url">FFmpeg library URL:</label>
|
||||
<input id="ffmpeg-lib-url" class="form-control ms-2" placeholder="ffmpeg-worker-hls.js" type="text">
|
||||
</div>
|
||||
</div>
|
||||
<div id="error-alert" class="alert alert-danger alert-dismissible fade mb-0" role="alert">
|
||||
<strong>An error occurred!</strong> See the Developer Console for details.
|
||||
<button type="button" class="btn-close" data-bs-dismiss="alert" aria-label="Close"></button>
|
||||
</div>
|
||||
<div class="position-relative">
|
||||
<video id="monitor" muted="true" class="w-100"></video>
|
||||
<div class="position-absolute top-50 start-50 translate-middle">
|
||||
<div id="waiting" class="text-primary spinner-border d-none" role="status">
|
||||
<span class="visually-hidden">Loading...</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
167
site/example.js
Normal file
167
site/example.js
Normal file
@@ -0,0 +1,167 @@
|
||||
import { InvisibleGlCanvas } from './gl-canvas.js';
|
||||
import { HlsWorker } from './hls-worker.js';
|
||||
import shader from './greyscale-shader.js';
|
||||
|
||||
const ingestion_url_el = document.getElementById('ingestion-url');
|
||||
ingestion_url_el.value = localStorage.getItem('streamana-example-ingestion-url');
|
||||
|
||||
const go_live_el = document.getElementById('go-live');
|
||||
go_live_el.disabled = false;
|
||||
go_live_el.addEventListener('click', function () {
|
||||
if (this.checked) {
|
||||
start();
|
||||
} else {
|
||||
stop();
|
||||
}
|
||||
});
|
||||
|
||||
const monitor_el = document.getElementById('monitor');
|
||||
const waiting_el = document.getElementById('waiting');
|
||||
const error_alert_el = document.getElementById('error-alert');
|
||||
const error_alert_el_parent = error_alert_el.parentNode;
|
||||
const error_alert_el_nextSibling = error_alert_el.nextSibling;
|
||||
error_alert_el_parent.removeChild(error_alert_el);
|
||||
|
||||
const ffmpeg_lib_url_el = document.getElementById('ffmpeg-lib-url');
|
||||
ffmpeg_lib_url_el.value = localStorage.getItem('streamana-ffmpeg-lib-url');
|
||||
ffmpeg_lib_url_el.addEventListener('input', function (e) {
|
||||
localStorage.setItem('streamana-ffmpeg-lib-url', this.value);
|
||||
});
|
||||
|
||||
let hls_worker;
|
||||
|
||||
async function start() {
|
||||
const ingestion_url = ingestion_url_el.value.trim();
|
||||
if (!ingestion_url) {
|
||||
go_live_el.checked = false;
|
||||
return;
|
||||
}
|
||||
localStorage.setItem('streamana-example-ingestion-url', ingestion_url);
|
||||
|
||||
const ffmpeg_lib_url = ffmpeg_lib_url_el.value.trim() ||
|
||||
ffmpeg_lib_url_el.placeholder.trim();
|
||||
|
||||
go_live_el.disabled = true;
|
||||
waiting_el.classList.remove('d-none');
|
||||
|
||||
if (error_alert_el.parentNode) {
|
||||
error_alert_el_parent.removeChild(error_alert_el);
|
||||
}
|
||||
|
||||
let camera_stream, gl_canvas, canvas_stream, done = false;
|
||||
function cleanup(err) {
|
||||
if (done) {
|
||||
return;
|
||||
}
|
||||
done = true;
|
||||
if (err) {
|
||||
console.error(err);
|
||||
error_alert_el_parent.insertBefore(error_alert_el, error_alert_el_nextSibling);
|
||||
error_alert_el.classList.add('show');
|
||||
}
|
||||
if (camera_stream) {
|
||||
for (let track of camera_stream.getTracks()) {
|
||||
track.stop();
|
||||
}
|
||||
}
|
||||
if (gl_canvas) {
|
||||
gl_canvas.destroy();
|
||||
}
|
||||
if (canvas_stream) {
|
||||
for (let track of canvas_stream.getTracks()) {
|
||||
track.stop();
|
||||
}
|
||||
}
|
||||
monitor_el.srcObject = null;
|
||||
go_live_el.checked = false;
|
||||
go_live_el.disabled = false;
|
||||
waiting_el.classList.add('d-none');
|
||||
}
|
||||
|
||||
try {
|
||||
// capture video from webcam
|
||||
camera_stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: true,
|
||||
video: {
|
||||
width: 4096,
|
||||
height: 2160,
|
||||
frameRate: {
|
||||
ideal: 30,
|
||||
max: 30
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// create video element which will be used for grabbing the frames to
|
||||
// write to a canvas so we can apply webgl shaders
|
||||
// also used to get the native video dimensions
|
||||
const video = document.createElement('video');
|
||||
video.muted = true;
|
||||
|
||||
// use glsl-canvas to make managing webgl stuff easier
|
||||
// because it's not visible, client dimensions are zero so we
|
||||
// need to substitute actual dimensions instead
|
||||
gl_canvas = new InvisibleGlCanvas(document);
|
||||
|
||||
// as an example, greyscale the stream
|
||||
gl_canvas.load(shader);
|
||||
|
||||
// tell canvas to use frames from video
|
||||
gl_canvas.setTexture('u_texture', video);
|
||||
|
||||
// wait for video to load (must come after gl_canvas.setTexture() since it
|
||||
// registers a loadeddata handler which then registers a play handler)
|
||||
video.addEventListener('loadeddata', function () {
|
||||
try {
|
||||
// make canvas same size as native video dimensions so every pixel is seen
|
||||
gl_canvas.canvas.width = this.videoWidth;
|
||||
gl_canvas.canvas.height = this.videoHeight;
|
||||
|
||||
// start the camera video
|
||||
this.play();
|
||||
|
||||
// capture video from the canvas
|
||||
canvas_stream = gl_canvas.canvas.captureStream(30);
|
||||
canvas_stream.addTrack(camera_stream.getAudioTracks()[0]);
|
||||
|
||||
// start HLS from the canvas stream to the ingestion URL
|
||||
hls_worker = new HlsWorker(canvas_stream, ingestion_url, ffmpeg_lib_url);
|
||||
hls_worker.addEventListener('run', () => console.log('HLS running'));
|
||||
hls_worker.addEventListener('exit', ev => {
|
||||
const msg = `HLS exited with status ${ev.detail}`;
|
||||
if (ev.detail === 0) {
|
||||
console.log(msg);
|
||||
cleanup();
|
||||
} else {
|
||||
console.error(msg);
|
||||
cleanup(msg);
|
||||
}
|
||||
});
|
||||
hls_worker.addEventListener('error', cleanup);
|
||||
hls_worker.addEventListener('abort', cleanup);
|
||||
hls_worker.addEventListener('start-video', () => {
|
||||
// display the video locally so we can see what's going on
|
||||
// note the video seems to set its height automatically to keep the
|
||||
// correct aspect ratio
|
||||
waiting_el.classList.add('d-none');
|
||||
monitor_el.srcObject = canvas_stream;
|
||||
monitor_el.play();
|
||||
});
|
||||
|
||||
go_live_el.disabled = false;
|
||||
} catch (ex) {
|
||||
cleanup(ex);
|
||||
}
|
||||
});
|
||||
|
||||
// pass the stream from the camera to the video so it can render the frames
|
||||
video.srcObject = camera_stream;
|
||||
} catch (ex) {
|
||||
return cleanup(ex);
|
||||
}
|
||||
}
|
||||
|
||||
function stop() {
|
||||
go_live_el.disabled = true;
|
||||
hls_worker.end();
|
||||
}
|
||||
1
site/ffmpeg-worker-hls.js
Symbolic link
1
site/ffmpeg-worker-hls.js
Symbolic link
@@ -0,0 +1 @@
|
||||
../ffmpeg.js/ffmpeg-worker-hls.js
|
||||
1
site/ffmpeg-worker-hls.wasm
Symbolic link
1
site/ffmpeg-worker-hls.wasm
Symbolic link
@@ -0,0 +1 @@
|
||||
../ffmpeg.js/ffmpeg-worker-hls.wasm
|
||||
@@ -1,6 +1,14 @@
|
||||
export class HlsWorker extends EventTarget {
|
||||
constructor(stream, ingestion_url) {
|
||||
constructor(stream, ingestion_url, ffmpeg_lib_url) {
|
||||
super();
|
||||
|
||||
let exited = false;
|
||||
onerror = e => {
|
||||
if (!exited) {
|
||||
this.dispatchEvent(new CustomEvent('error', { detail: e }));
|
||||
}
|
||||
};
|
||||
|
||||
// set up video recording from the stream
|
||||
// note we don't start recording until ffmpeg has started (below)
|
||||
const recorder = new MediaRecorder(stream, {
|
||||
@@ -8,6 +16,7 @@ export class HlsWorker extends EventTarget {
|
||||
audioBitsPerSecond: 128 * 1000,
|
||||
videoBitsPerSecond: 2500 * 1000
|
||||
});
|
||||
recorder.onerror = onerror;
|
||||
|
||||
// push encoded data into the ffmpeg worker
|
||||
recorder.ondataavailable = async event => {
|
||||
@@ -19,7 +28,8 @@ export class HlsWorker extends EventTarget {
|
||||
};
|
||||
|
||||
// start ffmpeg in a Web Worker
|
||||
this.worker = new Worker('ffmpeg.js/ffmpeg-worker-hls.js');
|
||||
this.worker = new Worker(ffmpeg_lib_url);
|
||||
this.worker.onerror = onerror;
|
||||
this.worker.onmessage = e => {
|
||||
const msg = e.data;
|
||||
switch (msg.type) {
|
||||
@@ -55,6 +65,7 @@ export class HlsWorker extends EventTarget {
|
||||
recorder.start(1000);
|
||||
break;
|
||||
case 'exit':
|
||||
exited = true;
|
||||
this.worker.terminate();
|
||||
if (recorder.state !== 'inactive') {
|
||||
recorder.stop();
|
||||
Reference in New Issue
Block a user