Files
streamana/test.html
2021-05-23 22:47:07 +01:00

215 lines
6.4 KiB
HTML

<html>
<head>
<!-- In production you probably want to use a bundler with resource integrity -->
<script type="importmap">
{
"imports": {
"glsl-canvas-js": "https://unpkg.com/glsl-canvas-js@0.2.6/dist/esm/glsl.js",
"promise-polyfill": "https://unpkg.com/promise-polyfill@8.2.0/src/index.js",
"gl-matrix": "https://unpkg.com/gl-matrix@3.3.0/esm/index.js"
}
}
</script>
<script type="module">
import { InvisibleGlCanvas } from './gl-canvas.js';
let stream_url_el, start_el, stop_el, monitor_el;
let camera_stream, gl_canvas, canvas_stream, recorder, ffmpeg_hls;
async function start() {
const stream_url = stream_url_el.value.trim();
if (!stream_url) {
return;
}
localStorage.setItem('streamana-example-streamurl', stream_url);
start_el.disabled = true;
// capture video from webcam
camera_stream = await navigator.mediaDevices.getUserMedia({
audio: true,
video: {
width: 4096,
height: 2160,
frameRate: {
ideal: 30,
max: 30
}
}
});
// create video element which will be used for grabbing the frames to
// write to a canvas so we can apply webgl shaders
// also used to get the native video dimensions
const video = document.createElement("video");
video.muted = true;
// use glsl-canvas to make managing webgl stuff easier
// because it's not visible, client dimensions are zero so we
// need to substitute actual dimensions instead
gl_canvas = new InvisibleGlCanvas(document);
// sample greyscale fragment shader
const fragmentShader = `
precision highp float;
uniform sampler2D u_texture;
uniform vec2 u_resolution;
void main() {
vec2 st = gl_FragCoord.xy / u_resolution.xy;
vec3 color = texture2D(u_texture, st).rgb;
float grey = dot(color, vec3(0.299, 0.587, 0.114));
gl_FragColor = vec4(vec3(grey), 1.0);
}`;
gl_canvas.load(fragmentShader);
// tell canvas to use frames from video
gl_canvas.setTexture('u_texture', video);
// wait for video to load (must come after gl_canvas.setTexture since it
// registers a loadeddata handler which then registers a play handler)
video.addEventListener('loadeddata', function () {
// make canvas same size as native video dimensions so every pixel is seen
gl_canvas.canvas.width = this.videoWidth;
gl_canvas.canvas.height = this.videoHeight;
// start the camera video
this.play();
// capture video from the canvas
canvas_stream = gl_canvas.canvas.captureStream(30);
canvas_stream.addTrack(camera_stream.getAudioTracks()[0]);
// set up video recording from the canvas; note we don't start
// recording until ffmpeg has started (below)
recorder = new MediaRecorder(canvas_stream, {
mimeType: "video/webm;codecs=H264",
audioBitsPerSecond: 128 * 1000,
videoBitsPerSecond: 2500 * 1000
});
// push encoded data into the ffmpeg worker that we start below
recorder.ondataavailable = async function (event) {
const data = await event.data.arrayBuffer();
ffmpeg_hls.postMessage({
type: 'video-data',
data: data
}, [data]);
};
// start ffmpeg in a worker
ffmpeg_hls = new Worker('ffmpeg.js/ffmpeg-worker-hls.js');
ffmpeg_hls.onmessage = function (e) {
const msg = e.data;
switch (msg.type) {
case 'ready':
this.postMessage({
type: 'run',
arguments: [
'-i', '-', // our worker will simulate stdin
'-f', 'hls', // use hls encoder
'-c:v', 'copy', // pass through the video data (h264, no decoding or encoding)
'-c:a', 'aac', // re-encode audio as AAC-LC
'-b:a', '128k', // set audio bitrate
'-hls_time', '2', // 2 second HLS chunks
'-hls_segment_type', 'mpegts', // MPEG2-TS muxer
'-hls_list_size', '2', // two chunks in the list at a time
'/outbound/output.m3u8' // path to media playlist file in virtual FS,
// must be under /outbound
]
});
break;
case 'run':
console.log("RUNNING");
break;
case 'stdout':
console.log(msg.data);
break;
case 'stderr':
console.error(msg.data);
break;
case 'exit':
console.log("EXITED", msg.data);
if (recorder.state !== 'inactive') {
recorder.stop();
}
this.terminate();
monitor_el.srcObject = null;
start_el.disabled = false;
break;
case 'done':
// due to async stdin read, we get this message immediately
//console.log("DONE", msg.data);
break;
case 'error':
console.error("ERROR", msg.data);
break;
case 'abort':
console.error("ABORT", msg.data);
break;
case 'start-video':
this.postMessage({
type: 'base-url',
data: stream_url
});
// start recording; produce data every second, we'll be chunking it anyway
recorder.start(1000);
break;
}
};
// display the video locally so we can see what's going on
// note the video seems to set its height automatically to keep the
// correct aspect ratio
monitor_el.srcObject = canvas_stream;
monitor_el.play();
stop_el.disabled = false;
});
// pass the stream from the camera to the video so it can render the frames
video.srcObject = camera_stream;
}
function stop() {
stop_el.disabled = true;
for (let track of camera_stream.getTracks()) {
track.stop();
}
gl_canvas.destroy();
for (let track of canvas_stream.getTracks()) {
track.stop();
}
ffmpeg_hls.postMessage({
type: 'video-ended'
});
}
window.addEventListener('load', function () {
stream_url_el = document.getElementById('stream_url');
start_el = document.getElementById('start');
stop_el = document.getElementById('stop');
monitor_el = document.getElementById('monitor');
stream_url.value = localStorage.getItem('streamana-example-streamurl');
start_el.disabled = false;
start_el.addEventListener('click', start);
stop_el.addEventListener('click', stop);
});
</script>
</head>
<body>
<p>
<input id=stream_url type="text" placeholder="Youtube Stream URL">
<input id=start type="button" value="Start" disabled>
<input id=stop type="button" value="Stop" disabled>
</p>
<video id="monitor" style="width:100%" muted="true"></video>
</body>
</html>