mirror of
https://github.com/AlexxIT/go2rtc.git
synced 2025-09-27 20:52:08 +08:00
Compare commits
35 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
368562c540 | ||
![]() |
6d6e7010b4 | ||
![]() |
4157a53dd8 | ||
![]() |
bdf5654c01 | ||
![]() |
66f729aa0e | ||
![]() |
96d1ef2d2c | ||
![]() |
9739f7f416 | ||
![]() |
654fa32b3a | ||
![]() |
db2263c7fe | ||
![]() |
e6c36f1cf7 | ||
![]() |
110f90cb34 | ||
![]() |
aca3bab238 | ||
![]() |
4df44645d7 | ||
![]() |
097fdfbbb8 | ||
![]() |
dc21a04da7 | ||
![]() |
db255b476a | ||
![]() |
464ea417ef | ||
![]() |
c1fac66329 | ||
![]() |
a6057a2eca | ||
![]() |
7c69ba13b0 | ||
![]() |
2b8bfe8bd9 | ||
![]() |
0bd54da456 | ||
![]() |
9f6af1c9e4 | ||
![]() |
c9dd0e37e4 | ||
![]() |
562872beb8 | ||
![]() |
46a278c067 | ||
![]() |
270fc7c1b6 | ||
![]() |
6feb635522 | ||
![]() |
6f48131e4d | ||
![]() |
f120db71a3 | ||
![]() |
72823af9d0 | ||
![]() |
15d9d4ebf4 | ||
![]() |
b09bbd79c4 | ||
![]() |
1830273f02 | ||
![]() |
07f3972794 |
59
.github/workflows/builder.yml
vendored
59
.github/workflows/builder.yml
vendored
@@ -1,59 +0,0 @@
|
||||
# https://github.com/home-assistant/builder
|
||||
name: 'Builder'
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: [ 'v*' ]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
hassio:
|
||||
name: Hassio Addon
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Branch name
|
||||
run: |
|
||||
VERSION="${GITHUB_REF#refs/tags/v}"
|
||||
echo "REPO=alexxit/go2rtc" >> $GITHUB_ENV
|
||||
echo "TAG=${VERSION}" >> $GITHUB_ENV
|
||||
echo "IMAGE=alexxit/go2rtc:${VERSION}" >> $GITHUB_ENV
|
||||
|
||||
- name: Build amd64
|
||||
uses: home-assistant/builder@master
|
||||
with:
|
||||
args: --amd64 --target build/hassio --version $TAG-amd64 --no-latest --docker-hub-check
|
||||
|
||||
- name: Build i386
|
||||
uses: home-assistant/builder@master
|
||||
with:
|
||||
args: --i386 --target build/hassio --version $TAG-i386 --no-latest --docker-hub-check
|
||||
|
||||
- name: Build aarch64
|
||||
uses: home-assistant/builder@master
|
||||
with:
|
||||
args: --aarch64 --target build/hassio --version $TAG-aarch64 --no-latest --docker-hub-check
|
||||
|
||||
- name: Build armv7
|
||||
uses: home-assistant/builder@master
|
||||
with:
|
||||
args: --armv7 --target build/hassio --version $TAG-armv7 --no-latest --docker-hub-check
|
||||
|
||||
- name: Docker manifest
|
||||
run: |
|
||||
# thanks to https://github.com/aler9/rtsp-simple-server/blob/main/Makefile
|
||||
docker manifest create "${IMAGE}" \
|
||||
"${IMAGE}-amd64" "${IMAGE}-i386" "${IMAGE}-aarch64" "${IMAGE}-armv7"
|
||||
docker manifest push "${IMAGE}"
|
||||
|
||||
docker manifest create "${REPO}:latest" \
|
||||
"${IMAGE}-amd64" "${IMAGE}-i386" "${IMAGE}-aarch64" "${IMAGE}-armv7"
|
||||
docker manifest push "${REPO}:latest"
|
75
.github/workflows/ci.yml
vendored
Normal file
75
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
name: ci
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: alexxit/go2rtc
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=semver,pattern={{version}},enable=false
|
||||
type=match,pattern=v(.*),group=1
|
||||
|
||||
- name: Docker meta Hardware
|
||||
id: meta-hw
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
images: alexxit/go2rtc
|
||||
flavor: |
|
||||
suffix=-hardware
|
||||
latest=false
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=semver,pattern={{version}},enable=false
|
||||
type=match,pattern=v(.*),group=1
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
platforms: |
|
||||
linux/amd64
|
||||
linux/386
|
||||
linux/arm/v7
|
||||
linux/arm64/v8
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
- name: Build and push Hardware
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
context: .
|
||||
file: hardware.Dockerfile
|
||||
platforms: linux/amd64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta-hw.outputs.tags }}
|
||||
labels: ${{ steps.meta-hw.outputs.labels }}
|
55
Dockerfile
Normal file
55
Dockerfile
Normal file
@@ -0,0 +1,55 @@
|
||||
# 0. Prepare images
|
||||
ARG PYTHON_VERSION="3.11"
|
||||
ARG GO_VERSION="1.19"
|
||||
ARG NGROK_VERSION="3"
|
||||
|
||||
FROM python:${PYTHON_VERSION}-alpine AS base
|
||||
FROM golang:${GO_VERSION}-alpine AS go
|
||||
FROM ngrok/ngrok:${NGROK_VERSION}-alpine AS ngrok
|
||||
|
||||
|
||||
# 1. Build go2rtc binary
|
||||
FROM go AS build
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
# Cache dependencies
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
COPY . .
|
||||
RUN CGO_ENABLED=0 go build -ldflags "-s -w" -trimpath
|
||||
|
||||
|
||||
# 2. Collect all files
|
||||
FROM scratch AS rootfs
|
||||
|
||||
COPY --from=build /build/go2rtc /usr/local/bin/
|
||||
COPY --from=ngrok /bin/ngrok /usr/local/bin/
|
||||
COPY ./build/docker/run.sh /
|
||||
|
||||
|
||||
# 3. Final image
|
||||
FROM base
|
||||
|
||||
# Install ffmpeg, bash (for run.sh), tini (for signal handling),
|
||||
# and other common tools for the echo source.
|
||||
RUN apk add --no-cache tini ffmpeg bash curl jq
|
||||
|
||||
# Hardware Acceleration for Intel CPU (+50MB)
|
||||
ARG TARGETARCH
|
||||
|
||||
RUN if [ "${TARGETARCH}" = "amd64" ]; then apk add --no-cache libva-intel-driver intel-media-driver; fi
|
||||
|
||||
# Hardware: AMD and NVidia VAAPI (not sure about this)
|
||||
# RUN libva-glx mesa-va-gallium
|
||||
# Hardware: AMD and NVidia VDPAU (not sure about this)
|
||||
# RUN libva-vdpau-driver mesa-vdpau-gallium (+150MB total)
|
||||
|
||||
COPY --from=rootfs / /
|
||||
|
||||
RUN chmod a+x /run.sh && mkdir -p /config
|
||||
|
||||
ENTRYPOINT ["/sbin/tini", "--"]
|
||||
|
||||
CMD ["/run.sh"]
|
53
README.md
53
README.md
@@ -6,7 +6,7 @@ Ultimate camera streaming application with support RTSP, WebRTC, HomeKit, FFmpeg
|
||||
|
||||
- zero-dependency and zero-config [small app](#go2rtc-binary) for all OS (Windows, macOS, Linux, ARM)
|
||||
- zero-delay for many supported protocols (lowest possible streaming latency)
|
||||
- streaming from [RTSP](#source-rtsp), [RTMP](#source-rtmp), [MJPEG](#source-ffmpeg), [HLS/HTTP](#source-ffmpeg), [USB Cameras](#source-ffmpeg-device) and [other sources](#module-streams)
|
||||
- streaming from [RTSP](#source-rtsp), [RTMP](#source-rtmp), [HTTP](#source-http) (FLV/MJPEG/JPEG), [FFmpeg](#source-ffmpeg), [USB Cameras](#source-ffmpeg-device) and [other sources](#module-streams)
|
||||
- streaming to [RTSP](#module-rtsp), [WebRTC](#module-webrtc), [MSE/MP4](#module-mp4) or [MJPEG](#module-mjpeg)
|
||||
- first project in the World with support streaming from [HomeKit Cameras](#source-homekit)
|
||||
- first project in the World with support H265 for WebRTC in browser ([read more](https://github.com/AlexxIT/Blog/issues/5))
|
||||
@@ -50,13 +50,14 @@ Download binary for your OS from [latest release](https://github.com/AlexxIT/go2
|
||||
|
||||
- `go2rtc_win64.zip` - Windows 64-bit
|
||||
- `go2rtc_win32.zip` - Windows 32-bit
|
||||
- `go2rtc_win_arm64.zip` - Windows ARM 64-bit
|
||||
- `go2rtc_linux_amd64` - Linux 64-bit
|
||||
- `go2rtc_linux_i386` - Linux 32-bit
|
||||
- `go2rtc_linux_arm64` - Linux ARM 64-bit (ex. Raspberry 64-bit OS)
|
||||
- `go2rtc_linux_arm` - Linux ARM 32-bit (ex. Raspberry 32-bit OS)
|
||||
- `go2rtc_linux_mipsel` - Linux on MIPS (ex. [Xiaomi Gateway 3](https://github.com/AlexxIT/XiaomiGateway3))
|
||||
- `go2rtc_mac_amd64` - Mac with Intel
|
||||
- `go2rtc_mac_arm64` - Mac with M1
|
||||
- `go2rtc_linux_mipsel` - Linux MIPS (ex. [Xiaomi Gateway 3](https://github.com/AlexxIT/XiaomiGateway3))
|
||||
- `go2rtc_mac_amd64.zip` - Mac Intel 64-bit
|
||||
- `go2rtc_mac_arm64.zip` - Mac ARM 64-bit
|
||||
|
||||
Don't forget to fix the rights `chmod +x go2rtc_xxx_xxx` on Linux and Mac.
|
||||
|
||||
@@ -329,7 +330,34 @@ More cameras, like [Tuya](https://www.home-assistant.io/integrations/tuya/), [ON
|
||||
|
||||
The HTTP API is the main part for interacting with the application. Default address: `http://127.0.0.1:1984/`.
|
||||
|
||||
- you can use WebRTC only when HTTP API enabled
|
||||
go2rtc has its own JS video player (`video-rtc.js`) with:
|
||||
|
||||
- support technologies:
|
||||
- WebRTC over UDP or TCP
|
||||
- MSE or MP4 or MJPEG over WebSocket
|
||||
- automatic selection best technology according on:
|
||||
- codecs inside your stream
|
||||
- current browser capabilities
|
||||
- current network configuration
|
||||
- automatic stop stream while browser or page not active
|
||||
- automatic stop stream while player not inside page viewport
|
||||
- automatic reconnection
|
||||
|
||||
Technology selection based on priorities:
|
||||
|
||||
1. Video and Audio better than just Video
|
||||
2. H265 better than H264
|
||||
3. WebRTC better than MSE, than MP4, than MJPEG
|
||||
|
||||
go2rtc has simple HTML page (`stream.html`) with support params in URL:
|
||||
|
||||
- multiple streams on page `src=camera1&src=camera2...`
|
||||
- stream technology autoselection `mode=webrtc,mse,mp4,mjpeg`
|
||||
- stream technology comparison `src=camera1&mode=webrtc&mode=mse&mode=mp4`
|
||||
- player width setting in pixels `width=320px` or percents `width=50%`
|
||||
|
||||
**Module config**
|
||||
|
||||
- you can disable HTTP API with `listen: ""` and use, for example, only RTSP client/server protocol
|
||||
- you can enable HTTP API only on localhost with `listen: "127.0.0.1:1984"` setting
|
||||
- you can change API `base_path` and host go2rtc on your main app webserver suburl
|
||||
@@ -337,16 +365,20 @@ The HTTP API is the main part for interacting with the application. Default addr
|
||||
|
||||
```yaml
|
||||
api:
|
||||
listen: ":1984" # HTTP API port ("" - disabled)
|
||||
base_path: "/rtc" # API prefix for serve on suburl (/api => /rtc/api)
|
||||
static_dir: "www" # folder for static files (custom web interface)
|
||||
origin: "*" # allow CORS requests (only * supported)
|
||||
listen: ":1984" # default ":1984", HTTP API port ("" - disabled)
|
||||
base_path: "/rtc" # default "", API prefix for serve on suburl (/api => /rtc/api)
|
||||
static_dir: "www" # default "", folder for static files (custom web interface)
|
||||
origin: "*" # default "", allow CORS requests (only * supported)
|
||||
```
|
||||
|
||||
**PS. go2rtc** doesn't provide HTTPS or password protection. Use [Nginx](https://nginx.org/) or [Ngrok](#module-ngrok) or [Home Assistant Add-on](#go2rtc-home-assistant-add-on) for this tasks.
|
||||
|
||||
**PS2.** You can access microphone (for 2-way audio) only with HTTPS ([read more](https://stackoverflow.com/questions/52759992/how-to-access-camera-and-microphone-in-chrome-without-https)).
|
||||
|
||||
**PS3.** MJPEG over WebSocket plays better than native MJPEG because Chrome [bug](https://bugs.chromium.org/p/chromium/issues/detail?id=527446).
|
||||
|
||||
**PS4.** MP4 over WebSocket was created only for Apple iOS because it doesn't support MSE and native MP4.
|
||||
|
||||
### Module: RTSP
|
||||
|
||||
You can get any stream as RTSP-stream: `rtsp://192.168.1.123:8554/{stream_name}`
|
||||
@@ -501,7 +533,8 @@ View almost any Hass camera using `WebRTC` technology, supported codecs `H264`/`
|
||||
When the stream starts - the camera `entity_id` will be added to go2rtc "on the fly". You don't need to add cameras manually to [go2rtc config](#configuration). Some cameras (like [Nest](https://www.home-assistant.io/integrations/nest/)) have a dynamic link to the stream, it will be updated each time a stream is started from the Hass interface.
|
||||
|
||||
1. Hass > Settings > Integrations > Add Integration > [RTSPtoWebRTC](https://my.home-assistant.io/redirect/config_flow_start/?domain=rtsp_to_webrtc) > `http://127.0.0.1:1984/`
|
||||
2. Use Picture Entity or Picture Glance lovelace card
|
||||
2. RTSPtoWebRTC > Configure > STUN server: `stun.l.google.com:19302`
|
||||
3. Use Picture Entity or Picture Glance lovelace card
|
||||
|
||||
You can add camera `entity_id` to [go2rtc config](#configuration) if you need transcoding:
|
||||
|
||||
|
19
build/docker/run.sh
Normal file
19
build/docker/run.sh
Normal file
@@ -0,0 +1,19 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "Starting go2rtc..." >&2
|
||||
|
||||
readonly config_path="/config"
|
||||
|
||||
if [[ -x "${config_path}/go2rtc" ]]; then
|
||||
readonly binary_path="${config_path}/go2rtc"
|
||||
echo "Using go2rtc binary from '${binary_path}' instead of the embedded one" >&2
|
||||
else
|
||||
readonly binary_path="/usr/local/bin/go2rtc"
|
||||
fi
|
||||
|
||||
# set cwd for go2rtc (for config file, Hass integration, etc)
|
||||
cd "${config_path}" || echo "Could not change working directory to '${config_path}'" >&2
|
||||
|
||||
exec "${binary_path}"
|
@@ -1,41 +0,0 @@
|
||||
ARG BUILD_FROM
|
||||
|
||||
FROM $BUILD_FROM as build
|
||||
|
||||
# 1. Build go2rtc
|
||||
RUN apk add --no-cache git go
|
||||
|
||||
RUN git clone https://github.com/AlexxIT/go2rtc \
|
||||
&& cd go2rtc \
|
||||
&& CGO_ENABLED=0 go build -ldflags "-s -w" -trimpath
|
||||
|
||||
# 2. Download ngrok
|
||||
ARG BUILD_ARCH
|
||||
|
||||
# https://github.com/home-assistant/docker-base/blob/master/alpine/Dockerfile
|
||||
RUN if [ "${BUILD_ARCH}" = "aarch64" ]; then BUILD_ARCH="arm64"; \
|
||||
elif [ "${BUILD_ARCH}" = "armv7" ]; then BUILD_ARCH="arm"; fi \
|
||||
&& cd go2rtc \
|
||||
&& curl $(curl -s "https://raw.githubusercontent.com/ngrok/docker-ngrok/main/releases.json" | jq -r ".${BUILD_ARCH}.url") -o ngrok.zip \
|
||||
&& unzip ngrok
|
||||
|
||||
|
||||
|
||||
# https://devopscube.com/reduce-docker-image-size/
|
||||
FROM $BUILD_FROM
|
||||
|
||||
# 3. Copy go2rtc and ngrok to release
|
||||
COPY --from=build /go2rtc/go2rtc /usr/local/bin
|
||||
COPY --from=build /go2rtc/ngrok /usr/local/bin
|
||||
|
||||
# 4. Install ffmpeg
|
||||
# apk base OK: 22 MiB in 40 packages
|
||||
# ffmpeg OK: 113 MiB in 110 packages
|
||||
# python3 OK: 161 MiB in 114 packages
|
||||
RUN apk add --no-cache ffmpeg python3
|
||||
|
||||
# 5. Copy run to release
|
||||
COPY run.sh /
|
||||
RUN chmod a+x /run.sh
|
||||
|
||||
CMD [ "/run.sh" ]
|
@@ -1,6 +0,0 @@
|
||||
# https://github.com/home-assistant/builder/blob/master/builder.sh
|
||||
name: go2rtc
|
||||
description: Ultimate camera streaming application
|
||||
url: https://github.com/AlexxIT/go2rtc
|
||||
image: alexxit/go2rtc
|
||||
arch: [ amd64, aarch64, i386, armv7 ]
|
@@ -1,14 +0,0 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
|
||||
set +e
|
||||
|
||||
# set cwd for go2rtc (for config file, Hass integration, etc)
|
||||
cd /config
|
||||
|
||||
# add the feature to override go2rtc binary from Hass config folder
|
||||
export PATH="/config:$PATH"
|
||||
|
||||
while true; do
|
||||
go2rtc
|
||||
sleep 5
|
||||
done
|
@@ -25,7 +25,7 @@ var wsHandlers = make(map[string]WSHandler)
|
||||
func initWS(origin string) {
|
||||
wsUp = &websocket.Upgrader{
|
||||
ReadBufferSize: 1024,
|
||||
WriteBufferSize: 512000,
|
||||
WriteBufferSize: 2028,
|
||||
}
|
||||
|
||||
switch origin {
|
||||
|
@@ -10,7 +10,7 @@ import (
|
||||
"runtime"
|
||||
)
|
||||
|
||||
var Version = "0.1-rc.5"
|
||||
var Version = "0.1-rc.6"
|
||||
var UserAgent = "go2rtc/" + Version
|
||||
|
||||
func Init() {
|
||||
|
@@ -1,6 +1,8 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"github.com/AlexxIT/go2rtc/cmd/app"
|
||||
"github.com/AlexxIT/go2rtc/cmd/exec"
|
||||
"github.com/AlexxIT/go2rtc/cmd/ffmpeg/device"
|
||||
@@ -17,10 +19,29 @@ func Init() {
|
||||
Mod map[string]string `yaml:"ffmpeg"`
|
||||
}
|
||||
|
||||
// defaults
|
||||
cfg.Mod = defaults // will be overriden from yaml
|
||||
|
||||
cfg.Mod = map[string]string{
|
||||
app.LoadConfig(&cfg)
|
||||
|
||||
if app.GetLogger("exec").GetLevel() >= 0 {
|
||||
defaults["global"] += " -v error"
|
||||
}
|
||||
|
||||
streams.HandleFunc("ffmpeg", func(url string) (streamer.Producer, error) {
|
||||
args := parseArgs(url[7:]) // remove `ffmpeg:`
|
||||
if args == nil {
|
||||
return nil, errors.New("can't generate ffmpeg command")
|
||||
}
|
||||
return exec.Handle("exec:" + args.String())
|
||||
})
|
||||
|
||||
device.Bin = defaults["bin"]
|
||||
device.Init()
|
||||
}
|
||||
|
||||
var defaults = map[string]string{
|
||||
"bin": "ffmpeg",
|
||||
"global": "-hide_banner",
|
||||
|
||||
// inputs
|
||||
"file": "-re -stream_loop -1 -i {input}",
|
||||
@@ -30,16 +51,13 @@ func Init() {
|
||||
// output
|
||||
"output": "-user_agent ffmpeg/go2rtc -rtsp_transport tcp -f rtsp {output}",
|
||||
|
||||
// `-g 30` - group of picture, GOP, keyframe interval
|
||||
// `-preset superfast` - we can't use ultrafast because it doesn't support `-profile main -level 4.1`
|
||||
// `-tune zerolatency` - for minimal latency
|
||||
// `-profile main -level 4.1` - most used streaming profile
|
||||
// `-pix_fmt yuv420p` - if input pix format 4:2:2
|
||||
"h264": "-c:v libx264 -g:v 30 -preset:v superfast -tune:v zerolatency -profile:v main -level:v 4.1 -pix_fmt:v yuv420p",
|
||||
"h264/ultra": "-c:v libx264 -g:v 30 -preset:v ultrafast -tune:v zerolatency",
|
||||
"h264/high": "-c:v libx264 -g:v 30 -preset:v superfast -tune:v zerolatency",
|
||||
"h265": "-c:v libx265 -g:v 30 -preset:v superfast -tune:v zerolatency -profile:v main -level:v 5.1 -pix_fmt:v yuv420p",
|
||||
// `-profile high -level 4.1` - most used streaming profile
|
||||
"h264": "-c:v libx264 -g 50 -profile:v high -level:v 4.1 -preset:v superfast -tune:v zerolatency",
|
||||
"h265": "-c:v libx265 -g 50 -profile:v high -level:v 5.1 -preset:v superfast -tune:v zerolatency",
|
||||
"mjpeg": "-c:v mjpeg -force_duplicated_matrix:v 1 -huffman:v 0 -pix_fmt:v yuvj420p",
|
||||
|
||||
"opus": "-c:a libopus -ar:a 48000 -ac:a 2",
|
||||
"pcmu": "-c:a pcm_mulaw -ar:a 8000 -ac:a 1",
|
||||
"pcmu/16000": "-c:a pcm_mulaw -ar:a 16000 -ac:a 1",
|
||||
@@ -49,156 +67,176 @@ func Init() {
|
||||
"pcma/48000": "-c:a pcm_alaw -ar:a 48000 -ac:a 1",
|
||||
"aac": "-c:a aac", // keep sample rate and channels
|
||||
"aac/16000": "-c:a aac -ar:a 16000 -ac:a 1",
|
||||
|
||||
// hardware Intel and AMD on Linux
|
||||
// better not to set `-async_depth:v 1` like for QSV, because framedrops
|
||||
// `-bf 0` - disable B-frames is very important
|
||||
"h264/vaapi": "-c:v h264_vaapi -g 50 -bf 0 -profile:v high -level:v 4.1 -sei:v 0",
|
||||
"h265/vaapi": "-c:v hevc_vaapi -g 50 -bf 0 -profile:v high -level:v 5.1 -sei:v 0",
|
||||
"mjpeg/vaapi": "-c:v mjpeg_vaapi",
|
||||
|
||||
// hardware Raspberry
|
||||
"h264/v4l2m2m": "-c:v h264_v4l2m2m -g 50 -bf 0",
|
||||
"h265/v4l2m2m": "-c:v hevc_v4l2m2m -g 50 -bf 0",
|
||||
|
||||
// hardware NVidia on Linux and Windows
|
||||
// preset=p2 - faster, tune=ll - low latency
|
||||
"h264/cuda": "-c:v h264_nvenc -g 50 -profile:v high -level:v auto -preset:v p2 -tune:v ll",
|
||||
"h265/cuda": "-c:v hevc_nvenc -g 50 -profile:v high -level:v auto",
|
||||
|
||||
// hardware Intel on Windows
|
||||
"h264/dxva2": "-c:v h264_qsv -g 50 -bf 0 -profile:v high -level:v 4.1 -async_depth:v 1",
|
||||
"h265/dxva2": "-c:v hevc_qsv -g 50 -bf 0 -profile:v high -level:v 5.1 -async_depth:v 1",
|
||||
"mjpeg/dxva2": "-c:v mjpeg_qsv -profile:v high -level:v 5.1",
|
||||
|
||||
// hardware macOS
|
||||
"h264/videotoolbox": "-c:v h264_videotoolbox -g 50 -bf 0 -profile:v high -level:v 4.1",
|
||||
"h265/videotoolbox": "-c:v hevc_videotoolbox -g 50 -bf 0 -profile:v high -level:v 5.1",
|
||||
}
|
||||
|
||||
func parseArgs(s string) *Args {
|
||||
// init FFmpeg arguments
|
||||
args := &Args{
|
||||
bin: defaults["bin"],
|
||||
global: defaults["global"],
|
||||
output: defaults["output"],
|
||||
}
|
||||
|
||||
app.LoadConfig(&cfg)
|
||||
|
||||
tpl := cfg.Mod
|
||||
|
||||
cmd := "exec:" + tpl["bin"] + " -hide_banner "
|
||||
|
||||
if app.GetLogger("exec").GetLevel() >= 0 {
|
||||
cmd += "-v error "
|
||||
}
|
||||
|
||||
streams.HandleFunc("ffmpeg", func(s string) (streamer.Producer, error) {
|
||||
s = s[7:] // remove `ffmpeg:`
|
||||
|
||||
var query url.Values
|
||||
var queryVideo, queryAudio bool
|
||||
|
||||
if i := strings.IndexByte(s, '#'); i > 0 {
|
||||
query = parseQuery(s[i+1:])
|
||||
queryVideo = query["video"] != nil
|
||||
queryAudio = query["audio"] != nil
|
||||
args.video = len(query["video"])
|
||||
args.audio = len(query["audio"])
|
||||
s = s[:i]
|
||||
} else {
|
||||
// by default query both video and audio
|
||||
queryVideo = true
|
||||
queryAudio = true
|
||||
}
|
||||
|
||||
var input string
|
||||
// Parse input:
|
||||
// 1. Input as xxxx:// link (http or rtsp or any other)
|
||||
// 2. Input as stream name
|
||||
// 3. Input as FFmpeg device (local USB camera)
|
||||
if i := strings.Index(s, "://"); i > 0 {
|
||||
switch s[:i] {
|
||||
case "http", "https", "rtmp":
|
||||
input = strings.Replace(tpl["http"], "{input}", s, 1)
|
||||
args.input = strings.Replace(defaults["http"], "{input}", s, 1)
|
||||
case "rtsp", "rtsps":
|
||||
// https://ffmpeg.org/ffmpeg-protocols.html#rtsp
|
||||
// skip unnecessary input tracks
|
||||
switch {
|
||||
case queryVideo && queryAudio:
|
||||
input = "-allowed_media_types video+audio "
|
||||
case queryVideo:
|
||||
input = "-allowed_media_types video "
|
||||
case queryAudio:
|
||||
input = "-allowed_media_types audio "
|
||||
case (args.video > 0 && args.audio > 0) || (args.video == 0 && args.audio == 0):
|
||||
args.input = "-allowed_media_types video+audio "
|
||||
case args.video > 0:
|
||||
args.input = "-allowed_media_types video "
|
||||
case args.audio > 0:
|
||||
args.input = "-allowed_media_types audio "
|
||||
}
|
||||
|
||||
input += strings.Replace(tpl["rtsp"], "{input}", s, 1)
|
||||
args.input += strings.Replace(defaults["rtsp"], "{input}", s, 1)
|
||||
default:
|
||||
input = "-i " + s
|
||||
args.input = "-i " + s
|
||||
}
|
||||
} else if streams.Get(s) != nil {
|
||||
s = "rtsp://localhost:" + rtsp.Port + "/" + s
|
||||
switch {
|
||||
case queryVideo && !queryAudio:
|
||||
case args.video > 0 && args.audio == 0:
|
||||
s += "?video"
|
||||
case queryAudio && !queryVideo:
|
||||
case args.audio > 0 && args.video == 0:
|
||||
s += "?audio"
|
||||
}
|
||||
input = strings.Replace(tpl["rtsp"], "{input}", s, 1)
|
||||
args.input = strings.Replace(defaults["rtsp"], "{input}", s, 1)
|
||||
} else if strings.HasPrefix(s, "device?") {
|
||||
var err error
|
||||
input, err = device.GetInput(s)
|
||||
args.input, err = device.GetInput(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
input = strings.Replace(tpl["file"], "{input}", s, 1)
|
||||
args.input = strings.Replace(defaults["file"], "{input}", s, 1)
|
||||
}
|
||||
|
||||
if _, ok := query["async"]; ok {
|
||||
input = "-use_wallclock_as_timestamps 1 -async 1 " + input
|
||||
if query["async"] != nil {
|
||||
args.input = "-use_wallclock_as_timestamps 1 -async 1 " + args.input
|
||||
}
|
||||
|
||||
s = cmd + input
|
||||
|
||||
// Parse query params:
|
||||
// 1. `width`/`height` params
|
||||
// 2. `rotate` param
|
||||
// 3. `video` params (support multiple)
|
||||
// 4. `audio` params (support multiple)
|
||||
// 5. `hardware` param
|
||||
if query != nil {
|
||||
// 1. Process raw params for FFmpeg
|
||||
for _, raw := range query["raw"] {
|
||||
s += " " + raw
|
||||
args.AddCodec(raw)
|
||||
}
|
||||
|
||||
for _, rotate := range query["rotate"] {
|
||||
switch rotate {
|
||||
// 2. Process video filters (resize and rotation)
|
||||
if query["width"] != nil || query["height"] != nil {
|
||||
filter := "scale="
|
||||
if query["width"] != nil {
|
||||
filter += query["width"][0]
|
||||
} else {
|
||||
filter += "-1"
|
||||
}
|
||||
filter += ":"
|
||||
if query["height"] != nil {
|
||||
filter += query["height"][0]
|
||||
} else {
|
||||
filter += "-1"
|
||||
}
|
||||
args.AddFilter(filter)
|
||||
}
|
||||
|
||||
if query["rotate"] != nil {
|
||||
var filter string
|
||||
switch query["rotate"][0] {
|
||||
case "90":
|
||||
s += " -vf transpose=1" // 90 degrees clockwise
|
||||
filter = "transpose=1" // 90 degrees clockwise
|
||||
case "180":
|
||||
s += " -vf transpose=1,transpose=1"
|
||||
filter = "transpose=1,transpose=1"
|
||||
case "-90", "270":
|
||||
s += " -vf transpose=2" // 90 degrees counterclockwise
|
||||
filter = "transpose=2" // 90 degrees counterclockwise
|
||||
}
|
||||
if filter != "" {
|
||||
args.AddFilter(filter)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
switch len(query["video"]) {
|
||||
case 0:
|
||||
s += " -vn"
|
||||
case 1:
|
||||
if len(query["audio"]) > 1 {
|
||||
s += " -map 0:v:0?"
|
||||
}
|
||||
// 3. Process video codecs
|
||||
if args.video > 0 {
|
||||
for _, video := range query["video"] {
|
||||
if video == "copy" {
|
||||
s += " -c:v copy"
|
||||
if video != "copy" {
|
||||
args.AddCodec(defaults[video])
|
||||
} else {
|
||||
s += " " + tpl[video]
|
||||
args.AddCodec("-c:v copy")
|
||||
}
|
||||
}
|
||||
default:
|
||||
for i, video := range query["video"] {
|
||||
if video == "copy" {
|
||||
s += " -map 0:v:0? -c:v:" + strconv.Itoa(i) + " copy"
|
||||
} else {
|
||||
s += " -map 0:v:0? " + strings.ReplaceAll(tpl[video], ":v ", ":v:"+strconv.Itoa(i)+" ")
|
||||
}
|
||||
}
|
||||
args.AddCodec("-vn")
|
||||
}
|
||||
|
||||
switch len(query["audio"]) {
|
||||
case 0:
|
||||
s += " -an"
|
||||
case 1:
|
||||
if len(query["video"]) > 1 {
|
||||
s += " -map 0:a:0?"
|
||||
}
|
||||
// 4. Process audio codecs
|
||||
if args.audio > 0 {
|
||||
for _, audio := range query["audio"] {
|
||||
if audio == "copy" {
|
||||
s += " -c:a copy"
|
||||
if audio != "copy" {
|
||||
args.AddCodec(defaults[audio])
|
||||
} else {
|
||||
s += " " + tpl[audio]
|
||||
}
|
||||
}
|
||||
default:
|
||||
for i, audio := range query["audio"] {
|
||||
if audio == "copy" {
|
||||
s += " -map 0:a:0? -c:a:" + strconv.Itoa(i) + " copy"
|
||||
} else {
|
||||
s += " -map 0:a:0? " + strings.ReplaceAll(tpl[audio], ":a ", ":a:"+strconv.Itoa(i)+" ")
|
||||
}
|
||||
args.AddCodec("-c:a copy")
|
||||
}
|
||||
}
|
||||
} else {
|
||||
s += " -c copy"
|
||||
args.AddCodec("-an")
|
||||
}
|
||||
|
||||
s += " " + tpl["output"]
|
||||
if query["hardware"] != nil {
|
||||
MakeHardware(args, query["hardware"][0])
|
||||
}
|
||||
}
|
||||
|
||||
return exec.Handle(s)
|
||||
})
|
||||
if args.codecs == nil {
|
||||
args.AddCodec("-c copy")
|
||||
}
|
||||
|
||||
device.Bin = cfg.Mod["bin"]
|
||||
device.Init()
|
||||
return args
|
||||
}
|
||||
|
||||
func parseQuery(s string) map[string][]string {
|
||||
@@ -213,3 +251,76 @@ func parseQuery(s string) map[string][]string {
|
||||
}
|
||||
return query
|
||||
}
|
||||
|
||||
type Args struct {
|
||||
bin string // ffmpeg
|
||||
global string // -hide_banner -v error
|
||||
input string // -re -stream_loop -1 -i /media/bunny.mp4
|
||||
codecs []string // -c:v libx264 -g:v 30 -preset:v ultrafast -tune:v zerolatency
|
||||
filters []string // scale=1920:1080
|
||||
output string // -f rtsp {output}
|
||||
|
||||
video, audio int // count of video and audio params
|
||||
}
|
||||
|
||||
func (a *Args) AddCodec(codec string) {
|
||||
a.codecs = append(a.codecs, codec)
|
||||
}
|
||||
|
||||
func (a *Args) AddFilter(filter string) {
|
||||
a.filters = append(a.filters, filter)
|
||||
}
|
||||
|
||||
func (a *Args) InsertFilter(filter string) {
|
||||
a.filters = append([]string{filter}, a.filters...)
|
||||
}
|
||||
|
||||
func (a *Args) String() string {
|
||||
b := bytes.NewBuffer(make([]byte, 0, 512))
|
||||
|
||||
b.WriteString(a.bin)
|
||||
|
||||
if a.global != "" {
|
||||
b.WriteByte(' ')
|
||||
b.WriteString(a.global)
|
||||
}
|
||||
|
||||
b.WriteByte(' ')
|
||||
b.WriteString(a.input)
|
||||
|
||||
multimode := a.video > 1 || a.audio > 1
|
||||
var iv, ia int
|
||||
|
||||
for _, codec := range a.codecs {
|
||||
// support multiple video and/or audio codecs
|
||||
if multimode && len(codec) >= 5 {
|
||||
switch codec[:5] {
|
||||
case "-c:v ":
|
||||
codec = "-map 0:v:0? " + strings.ReplaceAll(codec, ":v ", ":v:"+strconv.Itoa(iv)+" ")
|
||||
iv++
|
||||
case "-c:a ":
|
||||
codec = "-map 0:a:0? " + strings.ReplaceAll(codec, ":a ", ":a:"+strconv.Itoa(ia)+" ")
|
||||
ia++
|
||||
}
|
||||
}
|
||||
|
||||
b.WriteByte(' ')
|
||||
b.WriteString(codec)
|
||||
}
|
||||
|
||||
if a.filters != nil {
|
||||
for i, filter := range a.filters {
|
||||
if i == 0 {
|
||||
b.WriteString(" -vf ")
|
||||
} else {
|
||||
b.WriteByte(',')
|
||||
}
|
||||
b.WriteString(filter)
|
||||
}
|
||||
}
|
||||
|
||||
b.WriteByte(' ')
|
||||
b.WriteString(a.output)
|
||||
|
||||
return b.String()
|
||||
}
|
||||
|
112
cmd/ffmpeg/hardware.go
Normal file
112
cmd/ffmpeg/hardware.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"github.com/rs/zerolog/log"
|
||||
"os/exec"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
EngineSoftware = "software"
|
||||
EngineVAAPI = "vaapi" // Intel iGPU and AMD GPU
|
||||
EngineV4L2M2M = "v4l2m2m" // Raspberry Pi 3 and 4
|
||||
EngineCUDA = "cuda" // NVidia on Windows and Linux
|
||||
EngineDXVA2 = "dxva2" // Intel on Windows
|
||||
EngineVideoToolbox = "videotoolbox" // macOS
|
||||
)
|
||||
|
||||
var cache = map[string]string{}
|
||||
|
||||
// MakeHardware converts software FFmpeg args to hardware args
|
||||
// empty engine for autoselect
|
||||
func MakeHardware(args *Args, engine string) {
|
||||
for i, codec := range args.codecs {
|
||||
if len(codec) < 12 {
|
||||
continue // skip short line (-c:v libx264...)
|
||||
}
|
||||
|
||||
// get current codec name
|
||||
name := cut(codec, ' ', 1)
|
||||
switch name {
|
||||
case "libx264":
|
||||
name = "h264"
|
||||
case "libx265":
|
||||
name = "h265"
|
||||
case "mjpeg":
|
||||
default:
|
||||
continue // skip unsupported codec
|
||||
}
|
||||
|
||||
// temporary disable probe for H265 and MJPEG
|
||||
if engine == "" && name == "h264" {
|
||||
if engine = cache[name]; engine == "" {
|
||||
engine = ProbeHardware(name)
|
||||
cache[name] = engine
|
||||
}
|
||||
}
|
||||
|
||||
switch engine {
|
||||
case EngineVAAPI:
|
||||
args.input = "-hwaccel vaapi -hwaccel_output_format vaapi " + args.input
|
||||
args.codecs[i] = defaults[name+"/"+engine]
|
||||
|
||||
for i, filter := range args.filters {
|
||||
if strings.HasPrefix(filter, "scale=") {
|
||||
args.filters[i] = "scale_vaapi=" + filter[6:]
|
||||
}
|
||||
}
|
||||
|
||||
// fix if input doesn't support hwaccel, do nothing when support
|
||||
args.InsertFilter("format=vaapi|nv12,hwupload")
|
||||
|
||||
case EngineCUDA:
|
||||
args.input = "-hwaccel cuda -hwaccel_output_format cuda -extra_hw_frames 2 " + args.input
|
||||
args.codecs[i] = defaults[name+"/"+engine]
|
||||
|
||||
for i, filter := range args.filters {
|
||||
if strings.HasPrefix(filter, "scale=") {
|
||||
args.filters[i] = "scale_cuda=" + filter[6:]
|
||||
}
|
||||
}
|
||||
|
||||
case EngineDXVA2:
|
||||
args.input = "-hwaccel dxva2 -hwaccel_output_format dxva2_vld " + args.input
|
||||
args.codecs[i] = defaults[name+"/"+engine]
|
||||
|
||||
for i, filter := range args.filters {
|
||||
if strings.HasPrefix(filter, "scale=") {
|
||||
args.filters[i] = "scale_qsv=" + filter[6:]
|
||||
}
|
||||
}
|
||||
|
||||
args.InsertFilter("hwmap=derive_device=qsv,format=qsv")
|
||||
|
||||
case EngineVideoToolbox:
|
||||
args.input = "-hwaccel videotoolbox -hwaccel_output_format videotoolbox_vld " + args.input
|
||||
args.codecs[i] = defaults[name+"/"+engine]
|
||||
|
||||
case EngineV4L2M2M:
|
||||
args.codecs[i] = defaults[name+"/"+engine]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func run(arg ...string) bool {
|
||||
err := exec.Command(defaults["bin"], arg...).Run()
|
||||
log.Printf("%v %v", arg, err)
|
||||
return err == nil
|
||||
}
|
||||
|
||||
func cut(s string, sep byte, pos int) string {
|
||||
for n := 0; n < pos; n++ {
|
||||
if i := strings.IndexByte(s, sep); i > 0 {
|
||||
s = s[i+1:]
|
||||
} else {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
if i := strings.IndexByte(s, sep); i > 0 {
|
||||
return s[:i]
|
||||
}
|
||||
return s
|
||||
}
|
21
cmd/ffmpeg/hardware_darwin.go
Normal file
21
cmd/ffmpeg/hardware_darwin.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package ffmpeg
|
||||
|
||||
func ProbeHardware(name string) string {
|
||||
switch name {
|
||||
case "h264":
|
||||
if run(
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "h264_videotoolbox", "-f", "null", "-") {
|
||||
return EngineVideoToolbox
|
||||
}
|
||||
|
||||
case "h265":
|
||||
if run(
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "hevc_videotoolbox", "-f", "null", "-") {
|
||||
return EngineVideoToolbox
|
||||
}
|
||||
}
|
||||
|
||||
return EngineSoftware
|
||||
}
|
67
cmd/ffmpeg/hardware_linux.go
Normal file
67
cmd/ffmpeg/hardware_linux.go
Normal file
@@ -0,0 +1,67 @@
|
||||
package ffmpeg
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
)
|
||||
|
||||
func ProbeHardware(name string) string {
|
||||
if runtime.GOARCH == "arm64" || runtime.GOARCH == "arm" {
|
||||
switch name {
|
||||
case "h264":
|
||||
if run(
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "h264_v4l2m2m", "-f", "null", "-") {
|
||||
return EngineV4L2M2M
|
||||
}
|
||||
|
||||
case "h265":
|
||||
if run(
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "hevc_v4l2m2m", "-f", "null", "-") {
|
||||
return EngineV4L2M2M
|
||||
}
|
||||
}
|
||||
|
||||
return EngineSoftware
|
||||
}
|
||||
|
||||
switch name {
|
||||
case "h264":
|
||||
if run("-init_hw_device", "cuda",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "h264_nvenc", "-f", "null", "-") {
|
||||
return EngineCUDA
|
||||
}
|
||||
|
||||
if run("-init_hw_device", "vaapi",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-vf", "format=nv12,hwupload",
|
||||
"-c", "h264_vaapi", "-f", "null", "-") {
|
||||
return EngineVAAPI
|
||||
}
|
||||
|
||||
case "h265":
|
||||
if run("-init_hw_device", "cuda",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "hevc_nvenc", "-f", "null", "-") {
|
||||
return EngineCUDA
|
||||
}
|
||||
|
||||
if run("-init_hw_device", "vaapi",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-vf", "format=nv12,hwupload",
|
||||
"-c", "hevc_vaapi", "-f", "null", "-") {
|
||||
return EngineVAAPI
|
||||
}
|
||||
|
||||
case "mjpeg":
|
||||
if run("-init_hw_device", "vaapi",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-vf", "format=nv12,hwupload",
|
||||
"-c", "mjpeg_vaapi", "-f", "null", "-") {
|
||||
return EngineVAAPI
|
||||
}
|
||||
}
|
||||
|
||||
return EngineSoftware
|
||||
}
|
40
cmd/ffmpeg/hardware_windows.go
Normal file
40
cmd/ffmpeg/hardware_windows.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package ffmpeg
|
||||
|
||||
func ProbeHardware(name string) string {
|
||||
switch name {
|
||||
case "h264":
|
||||
if run("-init_hw_device", "cuda",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "h264_nvenc", "-f", "null", "-") {
|
||||
return EngineCUDA
|
||||
}
|
||||
|
||||
if run("-init_hw_device", "dxva2",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "h264_qsv", "-f", "null", "-") {
|
||||
return EngineDXVA2
|
||||
}
|
||||
|
||||
case "h265":
|
||||
if run("-init_hw_device", "cuda",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "hevc_nvenc", "-f", "null", "-") {
|
||||
return EngineCUDA
|
||||
}
|
||||
|
||||
if run("-init_hw_device", "dxva2",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "hevc_qsv", "-f", "null", "-") {
|
||||
return EngineDXVA2
|
||||
}
|
||||
|
||||
case "mjpeg":
|
||||
if run("-init_hw_device", "dxva2",
|
||||
"-f", "lavfi", "-i", "testsrc2", "-t", "1",
|
||||
"-c", "mjpeg_qsv", "-f", "null", "-") {
|
||||
return EngineDXVA2
|
||||
}
|
||||
}
|
||||
|
||||
return EngineSoftware
|
||||
}
|
@@ -121,6 +121,8 @@ func handlerWS(tr *api.Transport, _ *api.Message) error {
|
||||
return err
|
||||
}
|
||||
|
||||
tr.Write(&api.Message{Type: "mjpeg"})
|
||||
|
||||
tr.OnClose(func() {
|
||||
stream.RemoveConsumer(cons)
|
||||
})
|
||||
|
@@ -9,7 +9,7 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
const packetSize = 8192
|
||||
const packetSize = 1400
|
||||
|
||||
func handlerWSMSE(tr *api.Transport, msg *api.Message) error {
|
||||
src := tr.Request.URL.Query().Get("src")
|
||||
@@ -86,6 +86,8 @@ func handlerWSMP4(tr *api.Transport, msg *api.Message) error {
|
||||
return err
|
||||
}
|
||||
|
||||
tr.Write(&api.Message{Type: "mp4", Value: cons.MimeType})
|
||||
|
||||
tr.OnClose(func() {
|
||||
stream.RemoveConsumer(cons)
|
||||
})
|
||||
|
@@ -24,6 +24,7 @@ type Producer struct {
|
||||
template string
|
||||
|
||||
element streamer.Producer
|
||||
lastErr error
|
||||
tracks []*streamer.Track
|
||||
|
||||
state state
|
||||
@@ -45,10 +46,9 @@ func (p *Producer) GetMedias() []*streamer.Media {
|
||||
if p.state == stateNone {
|
||||
log.Debug().Msgf("[streams] probe producer url=%s", p.url)
|
||||
|
||||
var err error
|
||||
p.element, err = GetProducer(p.url)
|
||||
if err != nil || p.element == nil {
|
||||
log.Error().Err(err).Caller().Send()
|
||||
p.element, p.lastErr = GetProducer(p.url)
|
||||
if p.lastErr != nil || p.element == nil {
|
||||
log.Error().Err(p.lastErr).Caller().Send()
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@@ -99,11 +99,18 @@ func (s *Stream) AddConsumer(cons streamer.Consumer) (err error) {
|
||||
|
||||
if len(producers) == 0 {
|
||||
s.stopProducers()
|
||||
|
||||
if len(codecs) > 0 {
|
||||
return errors.New("codecs not match: " + codecs)
|
||||
} else {
|
||||
return fmt.Errorf("sources unavailable: %d", len(s.producers))
|
||||
}
|
||||
|
||||
for i, producer := range s.producers {
|
||||
if producer.lastErr != nil {
|
||||
return fmt.Errorf("source %d error: %w", i, producer.lastErr)
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Errorf("sources unavailable: %d", len(s.producers))
|
||||
}
|
||||
|
||||
s.mu.Lock()
|
||||
|
@@ -68,7 +68,7 @@ var NewPConn func() (*pion.PeerConnection, error)
|
||||
|
||||
func asyncHandler(tr *api.Transport, msg *api.Message) error {
|
||||
src := tr.Request.URL.Query().Get("src")
|
||||
stream := streams.Get(src)
|
||||
stream := streams.GetOrNew(src)
|
||||
if stream == nil {
|
||||
return errors.New(api.StreamNotFound)
|
||||
}
|
||||
|
52
hardware.Dockerfile
Normal file
52
hardware.Dockerfile
Normal file
@@ -0,0 +1,52 @@
|
||||
# 0. Prepare images
|
||||
# only debian 12 (bookworm) has latest ffmpeg
|
||||
ARG DEBIAN_VERSION="bookworm-slim"
|
||||
ARG GO_VERSION="1.19-buster"
|
||||
ARG NGROK_VERSION="3"
|
||||
|
||||
FROM debian:${DEBIAN_VERSION} AS base
|
||||
FROM golang:${GO_VERSION} AS go
|
||||
FROM ngrok/ngrok:${NGROK_VERSION} AS ngrok
|
||||
|
||||
|
||||
# 1. Build go2rtc binary
|
||||
FROM go AS build
|
||||
|
||||
WORKDIR /build
|
||||
|
||||
# Cache dependencies
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
|
||||
COPY . .
|
||||
RUN CGO_ENABLED=0 go build -ldflags "-s -w" -trimpath
|
||||
|
||||
|
||||
# 2. Collect all files
|
||||
FROM scratch AS rootfs
|
||||
|
||||
COPY --from=build /build/go2rtc /usr/local/bin/
|
||||
COPY --from=ngrok /bin/ngrok /usr/local/bin/
|
||||
COPY ./build/docker/run.sh /
|
||||
|
||||
|
||||
# 3. Final image
|
||||
FROM base
|
||||
|
||||
# Install ffmpeg, bash (for run.sh), tini (for signal handling),
|
||||
# and other common tools for the echo source.
|
||||
# non-free for Intel QSV support (not used by go2rtc, just for tests)
|
||||
RUN echo 'deb http://deb.debian.org/debian bookworm non-free' > /etc/apt/sources.list.d/debian-non-free.list && \
|
||||
apt-get -y update && apt-get -y install tini ffmpeg python3 curl jq intel-media-va-driver-non-free
|
||||
|
||||
COPY --from=rootfs / /
|
||||
|
||||
RUN chmod a+x /run.sh && mkdir -p /config
|
||||
|
||||
ENTRYPOINT ["/usr/bin/tini", "--"]
|
||||
|
||||
# https://github.com/NVIDIA/nvidia-docker/wiki/Installation-(Native-GPU-Support)
|
||||
ENV NVIDIA_VISIBLE_DEVICES all
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES compute,video,utility
|
||||
|
||||
CMD ["/run.sh"]
|
@@ -36,6 +36,7 @@ func RTPDepay(track *streamer.Track) streamer.WrapperFunc {
|
||||
}
|
||||
|
||||
if len(buf) == 0 {
|
||||
for {
|
||||
// Amcrest IP4M-1051: 9, 7, 8, 6, 28...
|
||||
// Amcrest IP4M-1051: 9, 6, 1
|
||||
switch NALUType(payload) {
|
||||
@@ -52,10 +53,9 @@ func RTPDepay(track *streamer.Track) streamer.WrapperFunc {
|
||||
}
|
||||
|
||||
payload = payload[i:]
|
||||
|
||||
if NALUType(payload) == NALUTypeIFrame {
|
||||
buf = append(buf, ps...)
|
||||
continue
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -138,9 +138,9 @@ var chm_ac_symbols = []byte{
|
||||
0xf9, 0xfa,
|
||||
}
|
||||
|
||||
func MakeHeaders(t byte, w, h uint16, lqt, cqt []byte) []byte {
|
||||
func MakeHeaders(p []byte, t byte, w, h uint16, lqt, cqt []byte) []byte {
|
||||
// Appendix A from https://www.rfc-editor.org/rfc/rfc2435
|
||||
p := []byte{0xFF, 0xD8}
|
||||
p = append(p, 0xFF, 0xD8)
|
||||
|
||||
p = MakeQuantHeader(p, lqt, 0)
|
||||
p = MakeQuantHeader(p, cqt, 1)
|
||||
|
@@ -6,7 +6,7 @@ import (
|
||||
)
|
||||
|
||||
func RTPDepay(track *streamer.Track) streamer.WrapperFunc {
|
||||
var header, payload []byte
|
||||
buf := make([]byte, 0, 512*1024) // 512K
|
||||
|
||||
return func(push streamer.WriterFunc) streamer.WriterFunc {
|
||||
return func(packet *rtp.Packet) error {
|
||||
@@ -25,7 +25,7 @@ func RTPDepay(track *streamer.Track) streamer.WrapperFunc {
|
||||
b = b[8:]
|
||||
}
|
||||
|
||||
if header == nil {
|
||||
if len(buf) == 0 {
|
||||
var lqt, cqt []byte
|
||||
|
||||
// 3.1.8. Quantization Table header
|
||||
@@ -49,26 +49,26 @@ func RTPDepay(track *streamer.Track) streamer.WrapperFunc {
|
||||
}
|
||||
|
||||
//fmt.Printf("t: %d, q: %d, w: %d, h: %d\n", t, q, w, h)
|
||||
header = MakeHeaders(t, w, h, lqt, cqt)
|
||||
buf = MakeHeaders(buf, t, w, h, lqt, cqt)
|
||||
}
|
||||
|
||||
// 3.1.9. JPEG Payload
|
||||
payload = append(payload, b...)
|
||||
buf = append(buf, b...)
|
||||
|
||||
if !packet.Marker {
|
||||
return nil
|
||||
}
|
||||
|
||||
b = append(header, payload...)
|
||||
if end := b[len(b)-2:]; end[0] != 0xFF && end[1] != 0xD9 {
|
||||
b = append(b, 0xFF, 0xD9)
|
||||
if end := buf[len(buf)-2:]; end[0] != 0xFF && end[1] != 0xD9 {
|
||||
buf = append(buf, 0xFF, 0xD9)
|
||||
}
|
||||
|
||||
header = nil
|
||||
payload = nil
|
||||
clone := *packet
|
||||
clone.Payload = buf
|
||||
|
||||
packet.Payload = b
|
||||
return push(packet)
|
||||
buf = buf[:0] // clear buffer
|
||||
|
||||
return push(&clone)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -3,17 +3,17 @@
|
||||
@SET GOOS=windows
|
||||
@SET GOARCH=amd64
|
||||
@SET FILENAME=go2rtc_win64.zip
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -sdel %FILENAME% go2rtc.exe
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -mx9 -sdel %FILENAME% go2rtc.exe
|
||||
|
||||
@SET GOOS=windows
|
||||
@SET GOARCH=386
|
||||
@SET FILENAME=go2rtc_win32.zip
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -sdel %FILENAME% go2rtc.exe
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -mx9 -sdel %FILENAME% go2rtc.exe
|
||||
|
||||
@SET GOOS=windows
|
||||
@SET GOARCH=arm64
|
||||
@SET FILENAME=go2rtc_win_arm64.zip
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -sdel %FILENAME% go2rtc.exe
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -mx9 -sdel %FILENAME% go2rtc.exe
|
||||
|
||||
@SET GOOS=linux
|
||||
@SET GOARCH=amd64
|
||||
@@ -44,9 +44,9 @@ go build -ldflags "-s -w" -trimpath -o %FILENAME% && upx %FILENAME%
|
||||
@SET GOOS=darwin
|
||||
@SET GOARCH=amd64
|
||||
@SET FILENAME=go2rtc_mac_amd64.zip
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -sdel %FILENAME% go2rtc
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -mx9 -sdel %FILENAME% go2rtc
|
||||
|
||||
@SET GOOS=darwin
|
||||
@SET GOARCH=arm64
|
||||
@SET FILENAME=go2rtc_mac_arm64.zip
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -sdel %FILENAME% go2rtc
|
||||
go build -ldflags "-s -w" -trimpath && 7z a -mx9 -sdel %FILENAME% go2rtc
|
||||
|
@@ -3,13 +3,13 @@
|
||||
<head>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<title>go2rtc - Stream</title>
|
||||
<script src="video-rtc.js"></script>
|
||||
<style>
|
||||
body {
|
||||
background: black;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
display: flex;
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
}
|
||||
|
||||
html, body {
|
||||
@@ -25,7 +25,8 @@
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<script>
|
||||
<script type="module" src="./video-stream.js"></script>
|
||||
<script type="module">
|
||||
const params = new URLSearchParams(location.search);
|
||||
|
||||
// support multiple streams and multiple modes
|
||||
@@ -44,16 +45,16 @@
|
||||
document.body.className = "flex";
|
||||
}
|
||||
|
||||
const background = params.get("background") === "true";
|
||||
const background = params.get("background") !== "false";
|
||||
const width = "1 0 " + (params.get("width") || "320px");
|
||||
|
||||
for (let i = 0; i < streams.length; i++) {
|
||||
/** @type {VideoRTC} */
|
||||
const video = document.createElement("video-rtc");
|
||||
/** @type {VideoStream} */
|
||||
const video = document.createElement("video-stream");
|
||||
video.background = background;
|
||||
video.mode = modes[i] || video.mode;
|
||||
video.style.flex = width;
|
||||
video.src = new URL("api/ws?src=" + streams[i], location.href);
|
||||
video.src = new URL("api/ws?src=" + encodeURIComponent(streams[i]), location.href);
|
||||
document.body.appendChild(video);
|
||||
}
|
||||
</script>
|
||||
|
343
www/video-rtc.js
343
www/video-rtc.js
@@ -1,5 +1,5 @@
|
||||
/**
|
||||
* Video player for MSE and WebRTC connections.
|
||||
* Video player for go2rtc streaming application.
|
||||
*
|
||||
* All modern web technologies are supported in almost any browser except Apple Safari.
|
||||
*
|
||||
@@ -12,7 +12,7 @@
|
||||
* - Customized built-in elements (extends HTMLVideoElement) because all Safari
|
||||
* - Public class fields because old Safari (before 14.0)
|
||||
*/
|
||||
class VideoRTC extends HTMLElement {
|
||||
export class VideoRTC extends HTMLElement {
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
@@ -31,32 +31,50 @@ class VideoRTC extends HTMLElement {
|
||||
];
|
||||
|
||||
/**
|
||||
* Supported modes (webrtc, mse, mp4, mjpeg).
|
||||
* [config] Supported modes (webrtc, mse, mp4, mjpeg).
|
||||
* @type {string}
|
||||
*/
|
||||
this.mode = "webrtc,mse,mp4,mjpeg";
|
||||
|
||||
/**
|
||||
* Run stream when not displayed on the screen. Default `false`.
|
||||
* [config] Run stream when not displayed on the screen. Default `false`.
|
||||
* @type {boolean}
|
||||
*/
|
||||
this.background = false;
|
||||
|
||||
/**
|
||||
* Run stream only when player in the viewport. Stop when user scroll out player.
|
||||
* [config] Run stream only when player in the viewport. Stop when user scroll out player.
|
||||
* Value is percentage of visibility from `0` (not visible) to `1` (full visible).
|
||||
* Default `0` - disable;
|
||||
* @type {number}
|
||||
*/
|
||||
this.intersectionThreshold = 0;
|
||||
this.visibilityThreshold = 0;
|
||||
|
||||
/**
|
||||
* Run stream only when browser page on the screen. Stop when user change browser
|
||||
* [config] Run stream only when browser page on the screen. Stop when user change browser
|
||||
* tab or minimise browser windows.
|
||||
* @type {boolean}
|
||||
*/
|
||||
this.visibilityCheck = true;
|
||||
|
||||
/**
|
||||
* [config] WebRTC configuration
|
||||
* @type {RTCConfiguration}
|
||||
*/
|
||||
this.pcConfig = {iceServers: [{urls: "stun:stun.l.google.com:19302"}]};
|
||||
|
||||
/**
|
||||
* [info] WebSocket connection state. Values: CONNECTING, OPEN, CLOSED
|
||||
* @type {number}
|
||||
*/
|
||||
this.wsState = WebSocket.CLOSED;
|
||||
|
||||
/**
|
||||
* [info] WebRTC connection state.
|
||||
* @type {number}
|
||||
*/
|
||||
this.pcState = WebSocket.CLOSED;
|
||||
|
||||
/**
|
||||
* @type {HTMLVideoElement}
|
||||
*/
|
||||
@@ -68,16 +86,9 @@ class VideoRTC extends HTMLElement {
|
||||
this.ws = null;
|
||||
|
||||
/**
|
||||
* Internal WebSocket connection state. Values: CONNECTING, OPEN, CLOSED
|
||||
* @type {number}
|
||||
*/
|
||||
this.wsState = WebSocket.CLOSED;
|
||||
|
||||
/**
|
||||
* Internal WebSocket URL.
|
||||
* @type {string|URL}
|
||||
*/
|
||||
this.url = "";
|
||||
this.wsURL = "";
|
||||
|
||||
/**
|
||||
* @type {RTCPeerConnection}
|
||||
@@ -87,37 +98,38 @@ class VideoRTC extends HTMLElement {
|
||||
/**
|
||||
* @type {number}
|
||||
*/
|
||||
this.pcState = WebSocket.CLOSED;
|
||||
|
||||
this.pcConfig = {iceServers: [{urls: "stun:stun.l.google.com:19302"}]};
|
||||
this.connectTS = 0;
|
||||
|
||||
/**
|
||||
* Internal disconnect TimeoutID.
|
||||
* @type {string}
|
||||
*/
|
||||
this.mseCodecs = "";
|
||||
|
||||
/**
|
||||
* [internal] Disconnect TimeoutID.
|
||||
* @type {number}
|
||||
*/
|
||||
this.disconnectTimeout = 0;
|
||||
this.disconnectTID = 0;
|
||||
|
||||
/**
|
||||
* Internal reconnect TimeoutID.
|
||||
* [internal] Reconnect TimeoutID.
|
||||
* @type {number}
|
||||
*/
|
||||
this.reconnectTimeout = 0;
|
||||
this.reconnectTID = 0;
|
||||
|
||||
/**
|
||||
* Handler for receiving Binary from WebSocket
|
||||
* [internal] Handler for receiving Binary from WebSocket.
|
||||
* @type {Function}
|
||||
*/
|
||||
this.ondata = null;
|
||||
|
||||
/**
|
||||
* Handlers list for receiving JSON from WebSocket
|
||||
* [internal] Handlers list for receiving JSON from WebSocket
|
||||
* @type {Object.<string,Function>}}
|
||||
*/
|
||||
this.onmessage = null;
|
||||
}
|
||||
|
||||
/** public properties **/
|
||||
|
||||
/**
|
||||
* Set video source (WebSocket URL). Support relative path.
|
||||
* @param {string|URL} value
|
||||
@@ -130,9 +142,9 @@ class VideoRTC extends HTMLElement {
|
||||
value = "ws" + location.origin.substring(4) + value;
|
||||
}
|
||||
|
||||
this.url = value;
|
||||
this.wsURL = value;
|
||||
|
||||
if (this.isConnected) this.connectedCallback();
|
||||
this.onconnect();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -156,10 +168,6 @@ class VideoRTC extends HTMLElement {
|
||||
if (this.ws) this.ws.send(JSON.stringify(value));
|
||||
}
|
||||
|
||||
get closed() {
|
||||
return this.wsState === WebSocket.CLOSED && this.pcState === WebSocket.CLOSED;
|
||||
}
|
||||
|
||||
codecs(type) {
|
||||
const test = type === "mse"
|
||||
? codec => MediaSource.isTypeSupported(`video/mp4; codecs="${codec}"`)
|
||||
@@ -172,11 +180,9 @@ class VideoRTC extends HTMLElement {
|
||||
* document-connected element.
|
||||
*/
|
||||
connectedCallback() {
|
||||
console.debug("VideoRTC.connectedCallback", this.wsState, this.pcState);
|
||||
|
||||
if (this.disconnectTimeout) {
|
||||
clearTimeout(this.disconnectTimeout);
|
||||
this.disconnectTimeout = 0;
|
||||
if (this.disconnectTID) {
|
||||
clearTimeout(this.disconnectTID);
|
||||
this.disconnectTID = 0;
|
||||
}
|
||||
|
||||
// because video autopause on disconnected from DOM
|
||||
@@ -186,15 +192,11 @@ class VideoRTC extends HTMLElement {
|
||||
this.video.currentTime = seek.end(seek.length - 1);
|
||||
this.play();
|
||||
}
|
||||
} else {
|
||||
this.oninit();
|
||||
}
|
||||
|
||||
if (!this.url || !this.closed) return;
|
||||
|
||||
// CLOSED => CONNECTING
|
||||
this.wsState = WebSocket.CONNECTING;
|
||||
|
||||
this.internalInit();
|
||||
this.internalWS();
|
||||
this.onconnect();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -202,35 +204,25 @@ class VideoRTC extends HTMLElement {
|
||||
* document's DOM.
|
||||
*/
|
||||
disconnectedCallback() {
|
||||
console.debug("VideoRTC.disconnectedCallback", this.wsState, this.pcState);
|
||||
if (this.background || this.disconnectTID) return;
|
||||
if (this.wsState === WebSocket.CLOSED && this.pcState === WebSocket.CLOSED) return;
|
||||
|
||||
if (this.background || this.disconnectTimeout || this.closed) return;
|
||||
|
||||
this.disconnectTimeout = setTimeout(() => {
|
||||
if (this.reconnectTimeout) {
|
||||
clearTimeout(this.reconnectTimeout);
|
||||
this.reconnectTimeout = 0;
|
||||
this.disconnectTID = setTimeout(() => {
|
||||
if (this.reconnectTID) {
|
||||
clearTimeout(this.reconnectTID);
|
||||
this.reconnectTID = 0;
|
||||
}
|
||||
|
||||
this.disconnectTimeout = 0;
|
||||
this.disconnectTID = 0;
|
||||
|
||||
this.wsState = WebSocket.CLOSED;
|
||||
if (this.ws) {
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
}
|
||||
|
||||
this.pcState = WebSocket.CLOSED;
|
||||
if (this.pc) {
|
||||
this.pc.close();
|
||||
this.pc = null;
|
||||
}
|
||||
this.ondisconnect();
|
||||
}, this.DISCONNECT_TIMEOUT);
|
||||
}
|
||||
|
||||
internalInit() {
|
||||
if (this.childElementCount) return;
|
||||
|
||||
/**
|
||||
* Creates child DOM elements. Called automatically once on `connectedCallback`.
|
||||
*/
|
||||
oninit() {
|
||||
this.video = document.createElement("video");
|
||||
this.video.controls = true;
|
||||
this.video.playsInline = true;
|
||||
@@ -258,7 +250,7 @@ class VideoRTC extends HTMLElement {
|
||||
})
|
||||
}
|
||||
|
||||
if ("IntersectionObserver" in window && this.intersectionThreshold) {
|
||||
if ("IntersectionObserver" in window && this.visibilityThreshold) {
|
||||
const observer = new IntersectionObserver(entries => {
|
||||
entries.forEach(entry => {
|
||||
if (!entry.isIntersecting) {
|
||||
@@ -267,23 +259,49 @@ class VideoRTC extends HTMLElement {
|
||||
this.connectedCallback();
|
||||
}
|
||||
});
|
||||
}, {threshold: this.intersectionThreshold});
|
||||
}, {threshold: this.visibilityThreshold});
|
||||
observer.observe(this);
|
||||
}
|
||||
}
|
||||
|
||||
internalWS() {
|
||||
if (this.wsState !== WebSocket.CONNECTING) return;
|
||||
if (this.ws) throw "connect with non null WebSocket";
|
||||
/**
|
||||
* Connect to WebSocket. Called automatically on `connectedCallback`.
|
||||
* @return {boolean} true if the connection has started.
|
||||
*/
|
||||
onconnect() {
|
||||
if (!this.isConnected || !this.wsURL || this.ws || this.pc) return false;
|
||||
|
||||
const ts = Date.now();
|
||||
// CLOSED or CONNECTING => CONNECTING
|
||||
this.wsState = WebSocket.CONNECTING;
|
||||
|
||||
this.ws = new WebSocket(this.url);
|
||||
this.connectTS = Date.now();
|
||||
|
||||
this.ws = new WebSocket(this.wsURL);
|
||||
this.ws.binaryType = "arraybuffer";
|
||||
this.ws.addEventListener("open", ev => this.onopen(ev));
|
||||
this.ws.addEventListener("close", ev => this.onclose(ev));
|
||||
|
||||
this.ws.addEventListener("open", () => {
|
||||
console.debug("VideoRTC.ws.open", this.wsState);
|
||||
return true;
|
||||
}
|
||||
|
||||
ondisconnect() {
|
||||
this.wsState = WebSocket.CLOSED;
|
||||
if (this.ws) {
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
}
|
||||
|
||||
this.pcState = WebSocket.CLOSED;
|
||||
if (this.pc) {
|
||||
this.pc.close();
|
||||
this.pc = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Array.<string>} of modes (mse, webrtc, etc.)
|
||||
*/
|
||||
onopen() {
|
||||
// CONNECTING => OPEN
|
||||
this.wsState = WebSocket.OPEN;
|
||||
|
||||
@@ -301,58 +319,60 @@ class VideoRTC extends HTMLElement {
|
||||
this.ondata = null;
|
||||
this.onmessage = {};
|
||||
|
||||
let firstMode = "";
|
||||
const modes = [];
|
||||
|
||||
if (this.mode.indexOf("mse") >= 0 && "MediaSource" in window) { // iPhone
|
||||
firstMode ||= "mse";
|
||||
this.internalMSE();
|
||||
modes.push("mse");
|
||||
this.onmse();
|
||||
} else if (this.mode.indexOf("mp4") >= 0) {
|
||||
firstMode ||= "mp4";
|
||||
this.internalMP4();
|
||||
modes.push("mp4");
|
||||
this.onmp4();
|
||||
}
|
||||
|
||||
if (this.mode.indexOf("webrtc") >= 0 && "RTCPeerConnection" in window) { // macOS Desktop app
|
||||
firstMode ||= "webrtc";
|
||||
this.internalRTC();
|
||||
modes.push("webrtc");
|
||||
this.onwebrtc();
|
||||
}
|
||||
|
||||
if (this.mode.indexOf("mjpeg") >= 0) {
|
||||
if (firstMode) {
|
||||
if (modes.length) {
|
||||
this.onmessage["mjpeg"] = msg => {
|
||||
if (msg.type !== "error" || msg.value.indexOf(firstMode) !== 0) return;
|
||||
this.internalMJPEG();
|
||||
if (msg.type !== "error" || msg.value.indexOf(modes[0]) !== 0) return;
|
||||
this.onmjpeg();
|
||||
}
|
||||
} else {
|
||||
this.internalMJPEG();
|
||||
modes.push("mjpeg");
|
||||
this.onmjpeg();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
this.ws.addEventListener("close", () => {
|
||||
console.debug("VideoRTC.ws.close", this.wsState);
|
||||
return modes;
|
||||
}
|
||||
|
||||
if (this.wsState === WebSocket.CLOSED) return;
|
||||
/**
|
||||
* @return {boolean} true if reconnection has started.
|
||||
*/
|
||||
onclose() {
|
||||
if (this.wsState === WebSocket.CLOSED) return false;
|
||||
|
||||
// CONNECTING, OPEN => CONNECTING
|
||||
this.wsState = WebSocket.CONNECTING;
|
||||
this.ws = null;
|
||||
|
||||
// reconnect no more than once every X seconds
|
||||
const delay = Math.max(this.RECONNECT_TIMEOUT - (Date.now() - ts), 0);
|
||||
const delay = Math.max(this.RECONNECT_TIMEOUT - (Date.now() - this.connectTS), 0);
|
||||
|
||||
this.reconnectTimeout = setTimeout(() => {
|
||||
this.reconnectTimeout = 0;
|
||||
this.internalWS();
|
||||
this.reconnectTID = setTimeout(() => {
|
||||
this.reconnectTID = 0;
|
||||
this.onconnect();
|
||||
}, delay);
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
internalMSE() {
|
||||
console.debug("VideoRTC.internalMSE");
|
||||
|
||||
onmse() {
|
||||
const ms = new MediaSource();
|
||||
ms.addEventListener("sourceopen", () => {
|
||||
console.debug("VideoRTC.ms.sourceopen");
|
||||
URL.revokeObjectURL(this.video.src);
|
||||
this.send({type: "mse", value: this.codecs("mse")});
|
||||
}, {once: true});
|
||||
@@ -361,9 +381,13 @@ class VideoRTC extends HTMLElement {
|
||||
this.video.srcObject = null;
|
||||
this.play();
|
||||
|
||||
this.mseCodecs = "";
|
||||
|
||||
this.onmessage["mse"] = msg => {
|
||||
if (msg.type !== "mse") return;
|
||||
|
||||
this.mseCodecs = msg.value;
|
||||
|
||||
const sb = ms.addSourceBuffer(msg.value);
|
||||
sb.mode = "segments"; // segments or sequence
|
||||
sb.addEventListener("updateend", () => {
|
||||
@@ -372,10 +396,10 @@ class VideoRTC extends HTMLElement {
|
||||
try {
|
||||
sb.appendBuffer(buf.slice(0, bufLen));
|
||||
} catch (e) {
|
||||
console.debug(e);
|
||||
// console.debug(e);
|
||||
}
|
||||
bufLen = 0;
|
||||
} else if (sb.buffered.length) {
|
||||
} else if (sb.buffered && sb.buffered.length) {
|
||||
const end = sb.buffered.end(sb.buffered.length - 1) - 5;
|
||||
const start = sb.buffered.start(0);
|
||||
if (end > start) {
|
||||
@@ -406,55 +430,12 @@ class VideoRTC extends HTMLElement {
|
||||
}
|
||||
}
|
||||
|
||||
internalRTC() {
|
||||
console.debug("VideoRTC.internalRTC");
|
||||
|
||||
onwebrtc() {
|
||||
const pc = new RTCPeerConnection(this.pcConfig);
|
||||
|
||||
let mseCodecs = "";
|
||||
|
||||
/** @type {HTMLVideoElement} */
|
||||
const video2 = document.createElement("video");
|
||||
video2.addEventListener("loadeddata", () => {
|
||||
console.debug("VideoRTC.video.loadeddata", video2.readyState, pc.connectionState);
|
||||
|
||||
if (pc.connectionState === "connected" || pc.connectionState === "connecting") {
|
||||
// Video+Audio > Video, H265 > H264, Video > Audio, WebRTC > MSE
|
||||
let rtcPriority = 0, msePriority = 0;
|
||||
|
||||
/** @type {MediaStream} */
|
||||
const rtc = video2.srcObject;
|
||||
if (rtc.getVideoTracks().length > 0) rtcPriority += 0x220;
|
||||
if (rtc.getAudioTracks().length > 0) rtcPriority += 0x102;
|
||||
|
||||
if (mseCodecs.indexOf("hvc1.") >= 0) msePriority += 0x230;
|
||||
if (mseCodecs.indexOf("avc1.") >= 0) msePriority += 0x210;
|
||||
if (mseCodecs.indexOf("mp4a.") >= 0) msePriority += 0x101;
|
||||
|
||||
if (rtcPriority >= msePriority) {
|
||||
console.debug("VideoRTC.select RTC mode", rtcPriority, msePriority);
|
||||
|
||||
this.video.controls = true;
|
||||
this.video.srcObject = rtc;
|
||||
this.play();
|
||||
|
||||
this.pcState = WebSocket.OPEN;
|
||||
|
||||
this.wsState = WebSocket.CLOSED;
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
} else {
|
||||
console.debug("VideoRTC.select MSE mode", rtcPriority, msePriority);
|
||||
|
||||
pc.close();
|
||||
|
||||
this.pcState = WebSocket.CLOSED;
|
||||
this.pc = null;
|
||||
}
|
||||
}
|
||||
|
||||
video2.srcObject = null;
|
||||
}, {once: true});
|
||||
video2.addEventListener("loadeddata", ev => this.onpcvideo(ev), {once: true});
|
||||
|
||||
pc.addEventListener("icecandidate", ev => {
|
||||
const candidate = ev.candidate ? ev.candidate.toJSON().candidate : "";
|
||||
@@ -462,8 +443,6 @@ class VideoRTC extends HTMLElement {
|
||||
});
|
||||
|
||||
pc.addEventListener("track", ev => {
|
||||
console.debug("VideoRTC.pc.track", ev.streams.length);
|
||||
|
||||
// when stream already init
|
||||
if (video2.srcObject !== null) return;
|
||||
|
||||
@@ -477,30 +456,29 @@ class VideoRTC extends HTMLElement {
|
||||
});
|
||||
|
||||
pc.addEventListener("connectionstatechange", () => {
|
||||
console.debug("VideoRTC.pc.connectionstatechange", this.pc.connectionState);
|
||||
|
||||
if (pc.connectionState === "failed" || pc.connectionState === "disconnected") {
|
||||
pc.close(); // stop next events
|
||||
|
||||
this.pcState = WebSocket.CLOSED;
|
||||
this.pc = null;
|
||||
|
||||
if (this.wsState === WebSocket.CLOSED && this.isConnected) {
|
||||
this.connectedCallback();
|
||||
}
|
||||
this.onconnect();
|
||||
}
|
||||
});
|
||||
|
||||
this.onmessage["webrtc"] = msg => {
|
||||
switch (msg.type) {
|
||||
case "webrtc/candidate":
|
||||
pc.addIceCandidate({candidate: msg.value, sdpMid: ""}).catch(() => console.debug);
|
||||
pc.addIceCandidate({
|
||||
candidate: msg.value,
|
||||
sdpMid: "0"
|
||||
}).catch(() => console.debug);
|
||||
break;
|
||||
case "webrtc/answer":
|
||||
pc.setRemoteDescription({type: "answer", sdp: msg.value}).catch(() => console.debug);
|
||||
break;
|
||||
case "mse":
|
||||
mseCodecs = msg.value;
|
||||
pc.setRemoteDescription({
|
||||
type: "answer",
|
||||
sdp: msg.value
|
||||
}).catch(() => console.debug);
|
||||
break;
|
||||
case "error":
|
||||
if (msg.value.indexOf("webrtc/offer") < 0) return;
|
||||
@@ -522,9 +500,48 @@ class VideoRTC extends HTMLElement {
|
||||
this.pc = pc;
|
||||
}
|
||||
|
||||
internalMJPEG() {
|
||||
console.debug("VideoRTC.internalMJPEG");
|
||||
/**
|
||||
* @param ev {Event}
|
||||
*/
|
||||
onpcvideo(ev) {
|
||||
/** @type {HTMLVideoElement} */
|
||||
const video2 = ev.target;
|
||||
const state = this.pc.connectionState;
|
||||
|
||||
// Firefox doesn't support pc.connectionState
|
||||
if (state === "connected" || state === "connecting" || !state) {
|
||||
// Video+Audio > Video, H265 > H264, Video > Audio, WebRTC > MSE
|
||||
let rtcPriority = 0, msePriority = 0;
|
||||
|
||||
/** @type {MediaStream} */
|
||||
const ms = video2.srcObject;
|
||||
if (ms.getVideoTracks().length > 0) rtcPriority += 0x220;
|
||||
if (ms.getAudioTracks().length > 0) rtcPriority += 0x102;
|
||||
|
||||
if (this.mseCodecs.indexOf("hvc1.") >= 0) msePriority += 0x230;
|
||||
if (this.mseCodecs.indexOf("avc1.") >= 0) msePriority += 0x210;
|
||||
if (this.mseCodecs.indexOf("mp4a.") >= 0) msePriority += 0x101;
|
||||
|
||||
if (rtcPriority >= msePriority) {
|
||||
this.video.srcObject = ms;
|
||||
this.play();
|
||||
|
||||
this.pcState = WebSocket.OPEN;
|
||||
|
||||
this.wsState = WebSocket.CLOSED;
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
} else {
|
||||
this.pcState = WebSocket.CLOSED;
|
||||
this.pc.close();
|
||||
this.pc = null;
|
||||
}
|
||||
}
|
||||
|
||||
video2.srcObject = null;
|
||||
}
|
||||
|
||||
onmjpeg() {
|
||||
this.ondata = data => {
|
||||
this.video.poster = "data:image/jpeg;base64," + VideoRTC.btoa(data);
|
||||
};
|
||||
@@ -533,9 +550,7 @@ class VideoRTC extends HTMLElement {
|
||||
this.video.controls = false;
|
||||
}
|
||||
|
||||
internalMP4() {
|
||||
console.debug("VideoRTC.internalMP4");
|
||||
|
||||
onmp4() {
|
||||
/** @type {HTMLVideoElement} */
|
||||
let video2;
|
||||
|
||||
@@ -580,5 +595,3 @@ class VideoRTC extends HTMLElement {
|
||||
return window.btoa(binary);
|
||||
}
|
||||
}
|
||||
|
||||
customElements.define("video-rtc", VideoRTC);
|
||||
|
85
www/video-stream.js
Normal file
85
www/video-stream.js
Normal file
@@ -0,0 +1,85 @@
|
||||
import {VideoRTC} from "./video-rtc.js";
|
||||
|
||||
class VideoStream extends VideoRTC {
|
||||
constructor() {
|
||||
super();
|
||||
|
||||
/** @type {HTMLDivElement} */
|
||||
this.divMode = null;
|
||||
/** @type {HTMLDivElement} */
|
||||
this.divStatus = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom GUI
|
||||
*/
|
||||
oninit() {
|
||||
super.oninit();
|
||||
|
||||
this.innerHTML = `
|
||||
<style>
|
||||
.info {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
padding: 12px;
|
||||
color: white;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
pointer-events: none;
|
||||
}
|
||||
</style>
|
||||
<div class="info">
|
||||
<div class="status"></div>
|
||||
<div class="mode"></div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
this.divStatus = this.querySelector(".status");
|
||||
this.divMode = this.querySelector(".mode");
|
||||
|
||||
const info = this.querySelector(".info")
|
||||
this.insertBefore(this.video, info);
|
||||
}
|
||||
|
||||
onconnect() {
|
||||
const result = super.onconnect();
|
||||
if (result) {
|
||||
this.divMode.innerText = "loading";
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
onopen() {
|
||||
const result = super.onopen();
|
||||
|
||||
this.onmessage["stream"] = msg => {
|
||||
switch (msg.type) {
|
||||
case "error":
|
||||
this.divMode.innerText = "error";
|
||||
this.divStatus.innerText = msg.value;
|
||||
break;
|
||||
case "mse":
|
||||
case "mp4":
|
||||
case "mjpeg":
|
||||
this.divMode.innerText = msg.type.toUpperCase();
|
||||
this.divStatus.innerText = "";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
onpcvideo(ev) {
|
||||
super.onpcvideo(ev);
|
||||
|
||||
if (this.pcState !== WebSocket.CLOSED) {
|
||||
this.divMode.innerText = "RTC";
|
||||
this.divStatus.innerText = "";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
customElements.define("video-stream", VideoStream);
|
@@ -48,7 +48,7 @@
|
||||
console.debug('ws.onmessage', msg);
|
||||
|
||||
if (msg.type === 'webrtc/candidate') {
|
||||
pc.addIceCandidate({candidate: msg.value, sdpMid: ''});
|
||||
pc.addIceCandidate({candidate: msg.value, sdpMid: '0'});
|
||||
} else if (msg.type === 'webrtc/answer') {
|
||||
pc.setRemoteDescription({type: 'answer', sdp: msg.value});
|
||||
}
|
||||
|
Reference in New Issue
Block a user