diff --git a/docs/docs/configuration/index.md b/docs/docs/configuration/index.md index fa3a6f2fa..6c31ca514 100644 --- a/docs/docs/configuration/index.md +++ b/docs/docs/configuration/index.md @@ -356,6 +356,9 @@ restream: enabled: True # Optional: Force audio compatibility with browsers (default: shown below) force_audio: True + # Optional: Restream birdseye via RTSP (default: shown below) + # NOTE: Enabling this will set birdseye to run 24/7 which may increase CPU usage somewhat. + birdseye: False # Optional: jsmpeg stream configuration for WebUI jsmpeg: # Optional: Set the height of the jsmpeg stream. (default: 720) diff --git a/docs/docs/configuration/live.md b/docs/docs/configuration/live.md index e537c4ff1..a6c48ce51 100644 --- a/docs/docs/configuration/live.md +++ b/docs/docs/configuration/live.md @@ -12,7 +12,7 @@ Live view options can be selected while viewing the live stream. The options are | Source | Latency | Frame Rate | Resolution | Audio | Requires Restream | Other Limitations | | ------ | ------- | -------------------------------------- | -------------- | ---------------------------- | ----------------- | --------------------- | | jsmpeg | low | same as `detect -> fps`, capped at 10 | same as detect | no | no | none | -| mse | low | native | native | yes (depends on audio codec) | yes | none | +| mse | low | native | native | yes (depends on audio codec) | yes | not supported on iOS | | webrtc | lowest | native | native | yes (depends on audio codec) | yes | requires extra config | ### WebRTC extra configuration: diff --git a/docs/docs/configuration/restream.md b/docs/docs/configuration/restream.md index 1f21e8fdc..2a43616db 100644 --- a/docs/docs/configuration/restream.md +++ b/docs/docs/configuration/restream.md @@ -7,6 +7,14 @@ title: Restream Frigate can restream your video feed as an RTSP feed for other applications such as Home Assistant to utilize it at `rtsp://:8554/`. Port 8554 must be open. [This allows you to use a video feed for detection in frigate and Home Assistant live view at the same time without having to make two separate connections to the camera](#reduce-connections-to-camera). The video feed is copied from the original video feed directly to avoid re-encoding. This feed does not include any annotation by Frigate. +#### Force Audio + +Different live view technologies (ex: MSE, WebRTC) support different audio codecs. The `restream -> force_audio` flag tells the restream to make multiple streams available so that all live view technologies are supported. Some camera streams don't work well with this, in which case `restream -> force_audio` should be disabled. + +#### Birdseye Restream + +Birdseye RTSP restream can be enabled at `restream -> birdseye` and accessed at `rtsp://:8554/birdseye`. Enabling the restream will cause birdseye to run 24/7 which may increase CPU usage somewhat. + ### RTMP (Deprecated) In previous Frigate versions RTMP was used for re-streaming. RTMP has disadvantages however including being incompatible with H.265, high bitrates, and certain audio codecs. RTMP is deprecated and it is recommended to move to the new restream role. diff --git a/frigate/config.py b/frigate/config.py index e95e892f3..6b97b2e1a 100644 --- a/frigate/config.py +++ b/frigate/config.py @@ -519,6 +519,7 @@ class RestreamConfig(FrigateBaseModel): force_audio: bool = Field( default=True, title="Force audio compatibility with the browser." ) + birdseye: bool = Field(default=False, title="Restream the birdseye feed via RTSP.") jsmpeg: JsmpegStreamConfig = Field( default_factory=JsmpegStreamConfig, title="Jsmpeg Stream Configuration." ) diff --git a/frigate/const.py b/frigate/const.py index 86be952f4..6fd336166 100644 --- a/frigate/const.py +++ b/frigate/const.py @@ -1,6 +1,7 @@ BASE_DIR = "/media/frigate" CLIPS_DIR = f"{BASE_DIR}/clips" RECORD_DIR = f"{BASE_DIR}/recordings" +BIRDSEYE_PIPE = "/tmp/cache/birdseye" CACHE_DIR = "/tmp/cache" YAML_EXT = (".yaml", ".yml") PLUS_ENV_VAR = "PLUS_API_KEY" diff --git a/frigate/ffmpeg_presets.py b/frigate/ffmpeg_presets.py index e15730530..c45cab7f1 100644 --- a/frigate/ffmpeg_presets.py +++ b/frigate/ffmpeg_presets.py @@ -129,6 +129,107 @@ PRESETS_HW_ACCEL_SCALE = { ], } +PRESETS_HW_ACCEL_ENCODE = { + "preset-intel-vaapi": [ + "-c:v", + "h264_vaapi", + "-g", + "50", + "-bf", + "0", + "-profile:v", + "high", + "-level:v", + "4.1", + "-sei:v", + "0", + ], + "preset-intel-qsv-h264": [ + "-c:v", + "h264_qsv", + "-g", + "50", + "-bf", + "0", + "-profile:v", + "high", + "-level:v", + "4.1", + "-async_depth:v", + "1", + ], + "preset-intel-qsv-h265": [ + "-c:v", + "h264_qsv", + "-g", + "50", + "-bf", + "0", + "-profile:v", + "high", + "-level:v", + "4.1", + "-async_depth:v", + "1", + ], + "preset-amd-vaapi": [ + "-c:v", + "h264_vaapi", + "-g", + "50", + "-bf", + "0", + "-profile:v", + "high", + "-level:v", + "4.1", + "-sei:v", + "0", + ], + "preset-nvidia-h264": [ + "-c:v", + "h264_nvenc", + "-g", + "50", + "-profile:v", + "high", + "-level:v", + "auto", + "-preset:v", + "p2", + "-tune:v", + "ll", + ], + "preset-nvidia-h265": [ + "-c:v", + "h264_nvenc", + "-g", + "50", + "-profile:v", + "high", + "-level:v", + "auto", + "-preset:v", + "p2", + "-tune:v", + "ll", + ], + "default": [ + "-c:v", + "libx264", + "-g", + "50", + "-profile:v", + "high", + "-level:v", + "4.1", + "-preset:v", + "superfast", + "-tune:v", + "zerolatency", + ], +} + def parse_preset_hardware_acceleration_decode(arg: Any) -> list[str]: """Return the correct preset if in preset format otherwise return None.""" @@ -158,6 +259,14 @@ def parse_preset_hardware_acceleration_scale( return scale +def parse_preset_hardware_acceleration_encode(arg: Any) -> list[str]: + """Return the correct scaling preset or default preset if none is set.""" + if not isinstance(arg, str): + return PRESETS_HW_ACCEL_ENCODE["default"] + + return PRESETS_HW_ACCEL_ENCODE.get(arg, PRESETS_HW_ACCEL_ENCODE["default"]) + + PRESETS_INPUT = { "preset-http-jpeg-generic": _user_agent_args + [ diff --git a/frigate/http.py b/frigate/http.py index ce7e3338c..1df1d5fcd 100644 --- a/frigate/http.py +++ b/frigate/http.py @@ -825,6 +825,24 @@ def latest_frame(camera_name): frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA) + ret, jpg = cv2.imencode( + ".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality] + ) + response = make_response(jpg.tobytes()) + response.headers["Content-Type"] = "image/jpeg" + response.headers["Cache-Control"] = "no-store" + return response + elif camera_name == "birdseye" and current_app.frigate_config.restream.birdseye: + frame = cv2.cvtColor( + current_app.detected_frames_processor.get_current_frame(camera_name), + cv2.COLOR_YUV2BGR_I420, + ) + + height = int(request.args.get("h", str(frame.shape[0]))) + width = int(height * frame.shape[1] / frame.shape[0]) + + frame = cv2.resize(frame, dsize=(width, height), interpolation=cv2.INTER_AREA) + ret, jpg = cv2.imencode( ".jpg", frame, [int(cv2.IMWRITE_JPEG_QUALITY), resize_quality] ) diff --git a/frigate/object_processing.py b/frigate/object_processing.py index 49679f9b0..5477f57b9 100644 --- a/frigate/object_processing.py +++ b/frigate/object_processing.py @@ -880,6 +880,12 @@ class TrackedObjectProcessor(threading.Thread): return {} def get_current_frame(self, camera, draw_options={}): + if camera == "birdseye": + return self.frame_manager.get( + "birdseye", + (self.config.birdseye.height * 3 // 2, self.config.birdseye.width), + ) + return self.camera_states[camera].get_current_frame(draw_options) def get_current_frame_time(self, camera) -> int: diff --git a/frigate/output.py b/frigate/output.py index 865a8d367..6d2c7177b 100644 --- a/frigate/output.py +++ b/frigate/output.py @@ -3,6 +3,7 @@ import glob import logging import math import multiprocessing as mp +import os import queue import signal import subprocess as sp @@ -21,17 +22,56 @@ from ws4py.server.wsgiutils import WebSocketWSGIApplication from ws4py.websocket import WebSocket from frigate.config import BirdseyeModeEnum, FrigateConfig -from frigate.const import BASE_DIR +from frigate.const import BASE_DIR, BIRDSEYE_PIPE from frigate.util import SharedMemoryFrameManager, copy_yuv_to_position, get_yuv_crop logger = logging.getLogger(__name__) class FFMpegConverter: - def __init__(self, in_width, in_height, out_width, out_height, quality): - ffmpeg_cmd = f"ffmpeg -f rawvideo -pix_fmt yuv420p -video_size {in_width}x{in_height} -i pipe: -f mpegts -s {out_width}x{out_height} -codec:v mpeg1video -q {quality} -bf 0 pipe:".split( - " " - ) + def __init__( + self, + in_width: int, + in_height: int, + out_width: int, + out_height: int, + quality: int, + birdseye_rtsp: bool = False, + ): + if birdseye_rtsp: + if os.path.exists(BIRDSEYE_PIPE): + os.remove(BIRDSEYE_PIPE) + + os.mkfifo(BIRDSEYE_PIPE, mode=0o777) + stdin = os.open(BIRDSEYE_PIPE, os.O_RDONLY | os.O_NONBLOCK) + self.bd_pipe = os.open(BIRDSEYE_PIPE, os.O_WRONLY) + os.close(stdin) + else: + self.bd_pipe = None + + ffmpeg_cmd = [ + "ffmpeg", + "-f", + "rawvideo", + "-pix_fmt", + "yuv420p", + "-video_size", + f"{in_width}x{in_height}", + "-i", + "pipe:", + "-f", + "mpegts", + "-s", + f"{out_width}x{out_height}", + "-codec:v", + "mpeg1video", + "-q", + f"{quality}", + "-bf", + "0", + "pipe:", + ] + self.process = sp.Popen( ffmpeg_cmd, stdout=sp.PIPE, @@ -40,9 +80,16 @@ class FFMpegConverter: start_new_session=True, ) - def write(self, b): + def write(self, b) -> None: self.process.stdin.write(b) + if self.bd_pipe: + try: + os.write(self.bd_pipe, b) + except BrokenPipeError: + # catch error when no one is listening + return + def read(self, length): try: return self.process.stdout.read1(length) @@ -50,6 +97,9 @@ class FFMpegConverter: return False def exit(self): + if self.bd_pipe: + os.close(self.bd_pipe) + self.process.terminate() try: self.process.communicate(timeout=30) @@ -88,7 +138,7 @@ class BroadcastThread(threading.Thread): class BirdsEyeFrameManager: - def __init__(self, config, frame_manager: SharedMemoryFrameManager): + def __init__(self, config: FrigateConfig, frame_manager: SharedMemoryFrameManager): self.config = config self.mode = config.birdseye.mode self.frame_manager = frame_manager @@ -386,6 +436,7 @@ def output_frames(config: FrigateConfig, video_output_queue): config.birdseye.width, config.birdseye.height, config.birdseye.quality, + config.restream.birdseye, ) broadcasters["birdseye"] = BroadcastThread( "birdseye", converters["birdseye"], websocket_server @@ -398,6 +449,12 @@ def output_frames(config: FrigateConfig, video_output_queue): birdseye_manager = BirdsEyeFrameManager(config, frame_manager) + if config.restream.birdseye: + birdseye_buffer = frame_manager.create( + "birdseye", + birdseye_manager.yuv_shape[0] * birdseye_manager.yuv_shape[1], + ) + while not stop_event.is_set(): try: ( @@ -421,10 +478,12 @@ def output_frames(config: FrigateConfig, video_output_queue): # write to the converter for the camera if clients are listening to the specific camera converters[camera].write(frame.tobytes()) - # update birdseye if websockets are connected - if config.birdseye.enabled and any( - ws.environ["PATH_INFO"].endswith("birdseye") - for ws in websocket_server.manager + if config.birdseye.enabled and ( + config.restream.birdseye + or any( + ws.environ["PATH_INFO"].endswith("birdseye") + for ws in websocket_server.manager + ) ): if birdseye_manager.update( camera, @@ -433,7 +492,12 @@ def output_frames(config: FrigateConfig, video_output_queue): frame_time, frame, ): - converters["birdseye"].write(birdseye_manager.frame.tobytes()) + frame_bytes = birdseye_manager.frame.tobytes() + + if config.restream.birdseye: + birdseye_buffer[:] = frame_bytes + + converters["birdseye"].write(frame_bytes) if camera in previous_frames: frame_manager.delete(f"{camera}{previous_frames[camera]}") diff --git a/frigate/restream.py b/frigate/restream.py index 0d72c3f5a..6642fbeb4 100644 --- a/frigate/restream.py +++ b/frigate/restream.py @@ -6,6 +6,8 @@ import requests from frigate.util import escape_special_characters from frigate.config import FrigateConfig +from frigate.const import BIRDSEYE_PIPE +from frigate.ffmpeg_presets import parse_preset_hardware_acceleration_encode logger = logging.getLogger(__name__) @@ -42,6 +44,11 @@ class RestreamApi: escape_special_characters(input.path) ) + if self.config.restream.birdseye: + self.relays[ + "birdseye" + ] = f"exec:ffmpeg -hide_banner -f rawvideo -pix_fmt yuv420p -video_size {self.config.birdseye.width}x{self.config.birdseye.height} -r 10 -i {BIRDSEYE_PIPE} {' '.join(parse_preset_hardware_acceleration_encode(self.config.ffmpeg.hwaccel_args))} -rtsp_transport tcp -f rtsp {{output}}" + for name, path in self.relays.items(): params = {"src": path, "name": name} requests.put("http://127.0.0.1:1984/api/streams", params=params) diff --git a/web/src/routes/Birdseye.jsx b/web/src/routes/Birdseye.jsx index 75b356054..1097286d3 100644 --- a/web/src/routes/Birdseye.jsx +++ b/web/src/routes/Birdseye.jsx @@ -1,14 +1,83 @@ -import { h } from 'preact'; +import { h, Fragment } from 'preact'; +import { usePersistence } from '../context'; +import ActivityIndicator from '../components/ActivityIndicator'; import JSMpegPlayer from '../components/JSMpegPlayer'; import Heading from '../components/Heading'; +import WebRtcPlayer from '../components/WebRtcPlayer'; +import MsePlayer from '../components/MsePlayer'; +import useSWR from 'swr'; +import videojs from 'video.js'; export default function Birdseye() { + const { data: config } = useSWR('config'); + + const [viewSource, setViewSource, sourceIsLoaded] = usePersistence('birdseye-source', 'mse'); + const sourceValues = ['mse', 'webrtc', 'jsmpeg']; + + if (!config || !sourceIsLoaded) { + return ; + } + + let player; + if (viewSource == 'mse' && config.restream.birdseye) { + if (videojs.browser.IS_IOS) { + player = ( + +
+ MSE is not supported on iOS devices. You'll need to use jsmpeg or webRTC. See the docs for more info. +
+
+ ); + } else { + player = ( + +
+ +
+
+ ); + } + } else if (viewSource == 'webrtc' && config.restream.birdseye) { + player = ( + +
+ +
+
+ ); + } else { + player = ( + +
+ +
+
+ ); + } + return (
- Birdseye -
- +
+ + Birdseye + + + {config.restream.birdseye && ( + + )}
+ + {player}
); }