mirror of
https://github.com/blakeblackshear/frigate.git
synced 2025-09-30 21:33:01 +08:00
Compare commits
1 Commits
v0.15.1
...
release_wo
Author | SHA1 | Date | |
---|---|---|---|
![]() |
1d58e419f4 |
@@ -1,311 +0,0 @@
|
||||
aarch
|
||||
absdiff
|
||||
airockchip
|
||||
Alloc
|
||||
Amcrest
|
||||
amdgpu
|
||||
analyzeduration
|
||||
Annke
|
||||
apexcharts
|
||||
arange
|
||||
argmax
|
||||
argmin
|
||||
argpartition
|
||||
ascontiguousarray
|
||||
astype
|
||||
authelia
|
||||
authentik
|
||||
autodetected
|
||||
automations
|
||||
autotrack
|
||||
autotracked
|
||||
autotracker
|
||||
autotracking
|
||||
balena
|
||||
Beelink
|
||||
BGRA
|
||||
BHWC
|
||||
blackshear
|
||||
blakeblackshear
|
||||
bottombar
|
||||
buildx
|
||||
castable
|
||||
cdist
|
||||
Celeron
|
||||
cgroups
|
||||
chipset
|
||||
chromadb
|
||||
Chromecast
|
||||
cmdline
|
||||
codeowner
|
||||
CODEOWNERS
|
||||
codeproject
|
||||
colormap
|
||||
colorspace
|
||||
comms
|
||||
coro
|
||||
ctypeslib
|
||||
CUDA
|
||||
Cuvid
|
||||
Dahua
|
||||
datasheet
|
||||
debconf
|
||||
deci
|
||||
deepstack
|
||||
defragment
|
||||
devcontainer
|
||||
DEVICEMAP
|
||||
discardcorrupt
|
||||
dpkg
|
||||
dsize
|
||||
dtype
|
||||
ECONNRESET
|
||||
edgetpu
|
||||
fastapi
|
||||
faststart
|
||||
fflags
|
||||
ffprobe
|
||||
fillna
|
||||
flac
|
||||
foscam
|
||||
fourcc
|
||||
framebuffer
|
||||
fregate
|
||||
frégate
|
||||
fromarray
|
||||
frombuffer
|
||||
frontdoor
|
||||
fstype
|
||||
fullchain
|
||||
fullscreen
|
||||
genai
|
||||
generativeai
|
||||
genpts
|
||||
getpid
|
||||
gpuload
|
||||
HACS
|
||||
Hailo
|
||||
hass
|
||||
hconcat
|
||||
healthcheck
|
||||
hideable
|
||||
Hikvision
|
||||
homeassistant
|
||||
homekit
|
||||
homography
|
||||
hsize
|
||||
hstack
|
||||
httpx
|
||||
hwaccel
|
||||
hwdownload
|
||||
hwmap
|
||||
hwupload
|
||||
iloc
|
||||
imagestream
|
||||
imdecode
|
||||
imencode
|
||||
imread
|
||||
imutils
|
||||
imwrite
|
||||
interp
|
||||
iostat
|
||||
iotop
|
||||
itemsize
|
||||
Jellyfin
|
||||
jetson
|
||||
jetsons
|
||||
joserfc
|
||||
jsmpeg
|
||||
jsonify
|
||||
Kalman
|
||||
keepalive
|
||||
keepdims
|
||||
labelmap
|
||||
letsencrypt
|
||||
levelname
|
||||
LIBAVFORMAT
|
||||
libedgetpu
|
||||
libnvinfer
|
||||
libva
|
||||
libwebp
|
||||
libx
|
||||
libyolo
|
||||
linalg
|
||||
localzone
|
||||
logpipe
|
||||
Loryta
|
||||
lstsq
|
||||
lsusb
|
||||
markupsafe
|
||||
maxsplit
|
||||
MEMHOSTALLOC
|
||||
memlimit
|
||||
meshgrid
|
||||
metadatas
|
||||
migraphx
|
||||
minilm
|
||||
mjpeg
|
||||
mkfifo
|
||||
mobiledet
|
||||
mobilenet
|
||||
modelpath
|
||||
mosquitto
|
||||
mountpoint
|
||||
movflags
|
||||
mpegts
|
||||
mqtt
|
||||
mse
|
||||
msenc
|
||||
namedtuples
|
||||
nbytes
|
||||
nchw
|
||||
ndarray
|
||||
ndimage
|
||||
nethogs
|
||||
newaxis
|
||||
nhwc
|
||||
NOBLOCK
|
||||
nobuffer
|
||||
nokey
|
||||
NONBLOCK
|
||||
noninteractive
|
||||
noprint
|
||||
Norfair
|
||||
nptype
|
||||
NTSC
|
||||
numpy
|
||||
nvenc
|
||||
nvhost
|
||||
nvml
|
||||
nvmpi
|
||||
ollama
|
||||
onnx
|
||||
onnxruntime
|
||||
onvif
|
||||
ONVIF
|
||||
openai
|
||||
opencv
|
||||
openvino
|
||||
OWASP
|
||||
paho
|
||||
passwordless
|
||||
popleft
|
||||
posthog
|
||||
postprocess
|
||||
poweroff
|
||||
preexec
|
||||
probesize
|
||||
protobuf
|
||||
pstate
|
||||
psutil
|
||||
pubkey
|
||||
putenv
|
||||
pycache
|
||||
pydantic
|
||||
pyobj
|
||||
pysqlite
|
||||
pytz
|
||||
pywebpush
|
||||
qnap
|
||||
quantisation
|
||||
Radeon
|
||||
radeonsi
|
||||
radeontop
|
||||
rawvideo
|
||||
rcond
|
||||
RDONLY
|
||||
rebranded
|
||||
referer
|
||||
reindex
|
||||
Reolink
|
||||
restream
|
||||
restreamed
|
||||
restreaming
|
||||
rkmpp
|
||||
rknn
|
||||
rkrga
|
||||
rockchip
|
||||
rocm
|
||||
rocminfo
|
||||
rootfs
|
||||
rtmp
|
||||
RTSP
|
||||
ruamel
|
||||
scroller
|
||||
setproctitle
|
||||
setpts
|
||||
shms
|
||||
SIGUSR
|
||||
skylake
|
||||
sleeptime
|
||||
SNDMORE
|
||||
socs
|
||||
sqliteq
|
||||
sqlitevecq
|
||||
ssdlite
|
||||
statm
|
||||
stimeout
|
||||
stylelint
|
||||
subclassing
|
||||
substream
|
||||
superfast
|
||||
surveillance
|
||||
svscan
|
||||
Swipeable
|
||||
sysconf
|
||||
tailscale
|
||||
Tapo
|
||||
tensorrt
|
||||
tflite
|
||||
thresholded
|
||||
timelapse
|
||||
tmpfs
|
||||
tobytes
|
||||
toggleable
|
||||
traefik
|
||||
tzlocal
|
||||
Ubiquiti
|
||||
udev
|
||||
udevadm
|
||||
ultrafast
|
||||
unichip
|
||||
unidecode
|
||||
Unifi
|
||||
unixepoch
|
||||
unraid
|
||||
unreviewed
|
||||
userdata
|
||||
usermod
|
||||
uvicorn
|
||||
vaapi
|
||||
vainfo
|
||||
variations
|
||||
vbios
|
||||
vconcat
|
||||
vitb
|
||||
vstream
|
||||
vsync
|
||||
wallclock
|
||||
webp
|
||||
webpush
|
||||
webrtc
|
||||
websockets
|
||||
webui
|
||||
werkzeug
|
||||
workdir
|
||||
WRONLY
|
||||
wsgirefserver
|
||||
wsgiutils
|
||||
wsize
|
||||
xaddr
|
||||
xmaxs
|
||||
xmins
|
||||
XPUB
|
||||
XSUB
|
||||
ymaxs
|
||||
ymins
|
||||
yolo
|
||||
yolonas
|
||||
yolox
|
||||
zeep
|
||||
zerolatency
|
@@ -10,14 +10,10 @@
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/common-utils:1": {}
|
||||
},
|
||||
"forwardPorts": [8971, 5000, 5001, 5173, 8554, 8555],
|
||||
"forwardPorts": [5000, 5001, 5173, 1935, 8554, 8555],
|
||||
"portsAttributes": {
|
||||
"8971": {
|
||||
"label": "External NGINX",
|
||||
"onAutoForward": "silent"
|
||||
},
|
||||
"5000": {
|
||||
"label": "Internal NGINX",
|
||||
"label": "NGINX",
|
||||
"onAutoForward": "silent"
|
||||
},
|
||||
"5001": {
|
||||
@@ -28,6 +24,10 @@
|
||||
"label": "Vite Server",
|
||||
"onAutoForward": "silent"
|
||||
},
|
||||
"1935": {
|
||||
"label": "RTMP",
|
||||
"onAutoForward": "silent"
|
||||
},
|
||||
"8554": {
|
||||
"label": "gortc RTSP",
|
||||
"onAutoForward": "silent"
|
||||
@@ -42,6 +42,7 @@
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.black-formatter",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"mhutchie.git-graph",
|
||||
"ms-azuretools.vscode-docker",
|
||||
@@ -52,11 +53,13 @@
|
||||
"csstools.postcss",
|
||||
"blanu.vscode-styled-jsx",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"charliermarsh.ruff",
|
||||
"eamodio.gitlens"
|
||||
"ms-python.isort",
|
||||
"charliermarsh.ruff"
|
||||
],
|
||||
"settings": {
|
||||
"remote.autoForwardPorts": false,
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "none",
|
||||
"python.languageServer": "Pylance",
|
||||
"editor.formatOnPaste": false,
|
||||
@@ -69,7 +72,7 @@
|
||||
"eslint.workingDirectories": ["./web"],
|
||||
"isort.args": ["--settings-path=./pyproject.toml"],
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff",
|
||||
"editor.defaultFormatter": "ms-python.black-formatter",
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.fixAll": true,
|
||||
|
@@ -3,12 +3,10 @@
|
||||
set -euxo pipefail
|
||||
|
||||
# Cleanup the old github host key
|
||||
if [[ -f ~/.ssh/known_hosts ]]; then
|
||||
# Add new github host key
|
||||
sed -i -e '/AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31\/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi\/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==/d' ~/.ssh/known_hosts
|
||||
curl -L https://api.github.com/meta | jq -r '.ssh_keys | .[]' | \
|
||||
sed -e 's/^/github.com /' >> ~/.ssh/known_hosts
|
||||
fi
|
||||
sed -i -e '/AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31\/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi\/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==/d' ~/.ssh/known_hosts
|
||||
# Add new github host key
|
||||
curl -L https://api.github.com/meta | jq -r '.ssh_keys | .[]' | \
|
||||
sed -e 's/^/github.com /' >> ~/.ssh/known_hosts
|
||||
|
||||
# Frigate normal container runs as root, so it have permission to create
|
||||
# the folders. But the devcontainer runs as the host user, so we need to
|
||||
@@ -19,7 +17,7 @@ sudo chown -R "$(id -u):$(id -g)" /media/frigate
|
||||
# When started as a service, LIBAVFORMAT_VERSION_MAJOR is defined in the
|
||||
# s6 service file. For dev, where frigate is started from an interactive
|
||||
# shell, we define it in .bashrc instead.
|
||||
echo 'export LIBAVFORMAT_VERSION_MAJOR=$(/usr/lib/ffmpeg/7.0/bin/ffmpeg -version | grep -Po "libavformat\W+\K\d+")' >> $HOME/.bashrc
|
||||
echo 'export LIBAVFORMAT_VERSION_MAJOR=$(ffmpeg -version | grep -Po "libavformat\W+\K\d+")' >> $HOME/.bashrc
|
||||
|
||||
make version
|
||||
|
||||
|
138
.github/DISCUSSION_TEMPLATE/camera-support.yml
vendored
138
.github/DISCUSSION_TEMPLATE/camera-support.yml
vendored
@@ -1,138 +0,0 @@
|
||||
title: "[Camera Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for support or questions for an issue with your cameras.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: What browser(s) are you using?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` from within the Frigate container if possible, and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: stats
|
||||
attributes:
|
||||
label: Frigate stats
|
||||
description: Output from frigate's /api/stats endpoint
|
||||
render: json
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: object-detector
|
||||
attributes:
|
||||
label: Object Detector
|
||||
options:
|
||||
- Coral
|
||||
- OpenVino
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field. Please post screenshots of at least General and Cameras tabs.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
113
.github/DISCUSSION_TEMPLATE/config-support.yml
vendored
113
.github/DISCUSSION_TEMPLATE/config-support.yml
vendored
@@ -1,113 +0,0 @@
|
||||
title: "[Config Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for support or questions related to Frigate's configuration and config file.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: stats
|
||||
attributes:
|
||||
label: Frigate stats
|
||||
description: Output from frigate's /api/stats endpoint
|
||||
render: json
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: object-detector
|
||||
attributes:
|
||||
label: Object Detector
|
||||
options:
|
||||
- Coral
|
||||
- OpenVino
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop or simple cut/paste is possible in this field
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
87
.github/DISCUSSION_TEMPLATE/detector-support.yml
vendored
87
.github/DISCUSSION_TEMPLATE/detector-support.yml
vendored
@@ -1,87 +0,0 @@
|
||||
title: "[Detector Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for support or questions related to Frigate's object detectors.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: object-detector
|
||||
attributes:
|
||||
label: Object Detector
|
||||
options:
|
||||
- Coral
|
||||
- OpenVino
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field. Please post screenshots of at least General and Cameras tabs.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
130
.github/DISCUSSION_TEMPLATE/general-support.yml
vendored
130
.github/DISCUSSION_TEMPLATE/general-support.yml
vendored
@@ -1,130 +0,0 @@
|
||||
title: "[Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for support for issues that don't fall into any specific category.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: What browser(s) are you using?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` from within the Frigate container if possible, and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: stats
|
||||
attributes:
|
||||
label: Frigate stats
|
||||
description: Output from frigate's /api/stats endpoint
|
||||
render: json
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: object-detector
|
||||
attributes:
|
||||
label: Object Detector
|
||||
options:
|
||||
- Coral
|
||||
- OpenVino
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
@@ -1,120 +0,0 @@
|
||||
title: "[HW Accel Support]: "
|
||||
labels: ["support", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form to submit a support request for hardware acceleration issues.
|
||||
|
||||
Before submitting your support request, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` from within the Frigate container if possible, and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
- Proxmox via Docker
|
||||
- Proxmox via TTeck Script
|
||||
- Windows WSL2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: object-detector
|
||||
attributes:
|
||||
label: Object Detector
|
||||
options:
|
||||
- Coral
|
||||
- OpenVino
|
||||
- TensorRT
|
||||
- RKNN
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field. Please post screenshots of at least General and Cameras tabs.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
21
.github/DISCUSSION_TEMPLATE/question.yml
vendored
21
.github/DISCUSSION_TEMPLATE/question.yml
vendored
@@ -1,21 +0,0 @@
|
||||
title: "[Question]: "
|
||||
labels: ["question"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form for questions you have about Frigate.
|
||||
|
||||
Before submitting your question, please [search the discussions][discussions], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your question has already been answered by the community.
|
||||
|
||||
**If you are looking for support, start a new discussion and use a support category.**
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: "What is your question?"
|
||||
validations:
|
||||
required: true
|
146
.github/DISCUSSION_TEMPLATE/report-a-bug.yml
vendored
146
.github/DISCUSSION_TEMPLATE/report-a-bug.yml
vendored
@@ -1,146 +0,0 @@
|
||||
title: "[Bug]: "
|
||||
labels: ["bug", "triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Use this form to submit a reproducible bug in Frigate or Frigate's UI.
|
||||
|
||||
Before submitting your bug report, please [search the discussions][discussions], look at recent open and closed [pull requests][prs], read the [official Frigate documentation][docs], and read the [Frigate FAQ][faq] pinned at the Discussion page to see if your bug has already been fixed by the developers or reported by the community.
|
||||
|
||||
**If you are unsure if your issue is actually a bug or not, please submit a support request first.**
|
||||
|
||||
[discussions]: https://www.github.com/blakeblackshear/frigate/discussions
|
||||
[prs]: https://www.github.com/blakeblackshear/frigate/pulls
|
||||
[docs]: https://docs.frigate.video
|
||||
[faq]: https://github.com/blakeblackshear/frigate/discussions/12724
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Checklist
|
||||
description: Please verify that you've followed these steps
|
||||
options:
|
||||
- label: I have updated to the latest available Frigate version.
|
||||
required: true
|
||||
- label: I have cleared the cache of my browser.
|
||||
required: true
|
||||
- label: I have tried a different browser to see if it is related to my browser.
|
||||
required: true
|
||||
- label: I have tried reproducing the issue in [incognito mode](https://www.computerworld.com/article/1719851/how-to-go-incognito-in-chrome-firefox-safari-and-edge.html) to rule out problems with any third party extensions or plugins I have installed.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
description: Provide a clear and concise description of what the bug is.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: Steps to reproduce
|
||||
description: |
|
||||
Please tell us exactly how to reproduce your issue.
|
||||
Provide clear and concise step by step instructions and add code snippets if needed.
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
...
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the System page in the Web UI. Please include the full version including the build identifier (eg. 0.14.0-ea36ds1)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
attributes:
|
||||
label: In which browser(s) are you experiencing the issue with?
|
||||
placeholder: Google Chrome 88.0.4324.150
|
||||
description: >
|
||||
Provide the full name and don't forget to add the version!
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: frigatelogs
|
||||
attributes:
|
||||
label: Relevant Frigate log output
|
||||
description: Please copy and paste any relevant Frigate log output. Include logs before and after your exact error when possible. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: go2rtclogs
|
||||
attributes:
|
||||
label: Relevant go2rtc log output
|
||||
description: Please copy and paste any relevant go2rtc log output. Include logs before and after your exact error when possible. Logs can be viewed via the Frigate UI, Docker, or the go2rtc dashboard. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots
|
||||
attributes:
|
||||
label: Screenshots of the Frigate UI's System metrics pages
|
||||
description: Drag and drop for images is possible in this field. Please post screenshots of all tabs.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
@@ -1,4 +1,3 @@
|
||||
github:
|
||||
- blakeblackshear
|
||||
- NickM-27
|
||||
- hawkeye217
|
||||
|
107
.github/ISSUE_TEMPLATE/camera_support_request.yml
vendored
Normal file
107
.github/ISSUE_TEMPLATE/camera_support_request.yml
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
name: Camera Support Request
|
||||
description: Support for setting up cameras in Frigate
|
||||
title: "[Camera Support]: "
|
||||
labels: ["support", "triage"]
|
||||
assignees: []
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the Debug page in the Web UI
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: stats
|
||||
attributes:
|
||||
label: Frigate stats
|
||||
description: Output from frigate's /api/stats endpoint
|
||||
render: json
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: coral
|
||||
attributes:
|
||||
label: Coral version
|
||||
options:
|
||||
- USB
|
||||
- PCIe
|
||||
- M.2
|
||||
- Dev Board
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
7
.github/ISSUE_TEMPLATE/config.yml
vendored
7
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,8 +1 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Frigate Support
|
||||
url: https://github.com/blakeblackshear/frigate/discussions/new/choose
|
||||
about: Get support for setting up or troubleshooting Frigate.
|
||||
- name: Frigate Bug Report
|
||||
url: https://github.com/blakeblackshear/frigate/discussions/new/choose
|
||||
about: Report a specific UI or backend bug.
|
||||
|
82
.github/ISSUE_TEMPLATE/config_support_request.yml
vendored
Normal file
82
.github/ISSUE_TEMPLATE/config_support_request.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: Config Support Request
|
||||
description: Support for Frigate configuration
|
||||
title: "[Config Support]: "
|
||||
labels: ["support", "triage"]
|
||||
assignees: []
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the Debug page in the Web UI
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: stats
|
||||
attributes:
|
||||
label: Frigate stats
|
||||
description: Output from frigate's /api/stats endpoint
|
||||
render: json
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: coral
|
||||
attributes:
|
||||
label: Coral version
|
||||
options:
|
||||
- USB
|
||||
- PCIe
|
||||
- M.2
|
||||
- Dev Board
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
84
.github/ISSUE_TEMPLATE/detector_support_request.yml
vendored
Normal file
84
.github/ISSUE_TEMPLATE/detector_support_request.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: Detector Support Request
|
||||
description: Support for setting up object detector in Frigate (Coral, OpenVINO, TensorRT, etc.)
|
||||
title: "[Detector Support]: "
|
||||
labels: ["support", "triage"]
|
||||
assignees: []
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the Debug page in the Web UI
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: coral
|
||||
attributes:
|
||||
label: Coral version
|
||||
options:
|
||||
- USB
|
||||
- PCIe
|
||||
- M.2
|
||||
- Dev Board
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
107
.github/ISSUE_TEMPLATE/general_support_request.yml
vendored
Normal file
107
.github/ISSUE_TEMPLATE/general_support_request.yml
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
name: General Support Request
|
||||
description: General support request for Frigate
|
||||
title: "[Support]: "
|
||||
labels: ["support", "triage"]
|
||||
assignees: []
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the Debug page in the Web UI
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: stats
|
||||
attributes:
|
||||
label: Frigate stats
|
||||
description: Output from frigate's /api/stats endpoint
|
||||
render: json
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: coral
|
||||
attributes:
|
||||
label: Coral version
|
||||
options:
|
||||
- USB
|
||||
- PCIe
|
||||
- M.2
|
||||
- Dev Board
|
||||
- Other
|
||||
- CPU (no coral)
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
96
.github/ISSUE_TEMPLATE/hwaccel_support_request.yml
vendored
Normal file
96
.github/ISSUE_TEMPLATE/hwaccel_support_request.yml
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
name: Hardware Acceleration Support Request
|
||||
description: Support for setting up GPU hardware acceleration in Frigate
|
||||
title: "[HW Accel Support]: "
|
||||
labels: ["support", "triage"]
|
||||
assignees: []
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the problem you are having
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Version
|
||||
description: Visible on the Debug page in the Web UI
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: config
|
||||
attributes:
|
||||
label: Frigate config file
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: docker
|
||||
attributes:
|
||||
label: docker-compose file or Docker CLI command
|
||||
description: This will be automatically formatted into code, so no need for backticks.
|
||||
render: yaml
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks.
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: ffprobe
|
||||
attributes:
|
||||
label: FFprobe output from your camera
|
||||
description: Run `ffprobe <camera_url>` and provide output below
|
||||
render: shell
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
options:
|
||||
- HassOS
|
||||
- Debian
|
||||
- Other Linux
|
||||
- Proxmox
|
||||
- UNRAID
|
||||
- Windows
|
||||
- Other
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: install-method
|
||||
attributes:
|
||||
label: Install method
|
||||
options:
|
||||
- HassOS Addon
|
||||
- Docker Compose
|
||||
- Docker CLI
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: network
|
||||
attributes:
|
||||
label: Network connection
|
||||
options:
|
||||
- Wired
|
||||
- Wireless
|
||||
- Mixed
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: camera
|
||||
attributes:
|
||||
label: Camera make and model
|
||||
description: Dahua, hikvision, amcrest, reolink, etc and model number
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: other
|
||||
attributes:
|
||||
label: Any other information that may be helpful
|
27
.github/actions/setup/action.yml
vendored
27
.github/actions/setup/action.yml
vendored
@@ -5,37 +5,26 @@ inputs:
|
||||
required: true
|
||||
outputs:
|
||||
image-name:
|
||||
value: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ steps.create-short-sha.outputs.SHORT_SHA }}
|
||||
value: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ steps.create-short-sha.outputs.SHORT_SHA }}
|
||||
cache-name:
|
||||
value: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:cache
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
# Stop docker so we can mount more space at /var/lib/docker
|
||||
- name: Stop docker
|
||||
run: sudo systemctl stop docker
|
||||
shell: bash
|
||||
# This creates a virtual volume at /var/lib/docker to maximize the size
|
||||
# As of 2/14/2024, this results in 97G for docker images
|
||||
- name: Maximize build space
|
||||
uses: easimon/maximize-build-space@master
|
||||
with:
|
||||
remove-dotnet: 'true'
|
||||
remove-android: 'true'
|
||||
remove-haskell: 'true'
|
||||
remove-codeql: 'true'
|
||||
build-mount-path: '/var/lib/docker'
|
||||
- name: Start docker
|
||||
run: sudo systemctl start docker
|
||||
- name: Remove unnecessary files
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /opt/ghc
|
||||
shell: bash
|
||||
- id: lowercaseRepo
|
||||
uses: ASzc/change-string-case-action@v5
|
||||
with:
|
||||
string: ${{ github.repository }}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@465a07811f14bebb1938fbed4728c6a1ff8901fc
|
||||
with:
|
||||
|
8
.github/dependabot.yml
vendored
8
.github/dependabot.yml
vendored
@@ -18,12 +18,6 @@ updates:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: dev
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/docker/tensorrt"
|
||||
schedule:
|
||||
interval: daily
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: dev
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/web"
|
||||
schedule:
|
||||
@@ -34,7 +28,5 @@ updates:
|
||||
directory: "/docs"
|
||||
schedule:
|
||||
interval: daily
|
||||
allow:
|
||||
- dependency-name: "@docusaurus/*"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: dev
|
||||
|
32
.github/pull_request_template.md
vendored
32
.github/pull_request_template.md
vendored
@@ -1,32 +0,0 @@
|
||||
## Proposed change
|
||||
<!--
|
||||
Describe what this pull request does and how it will benefit users of Frigate.
|
||||
Please describe in detail any considerations, breaking changes, etc. that are
|
||||
made in this pull request.
|
||||
-->
|
||||
|
||||
|
||||
## Type of change
|
||||
|
||||
- [ ] Dependency upgrade
|
||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature
|
||||
- [ ] Breaking change (fix/feature causing existing functionality to break)
|
||||
- [ ] Code quality improvements to existing code
|
||||
- [ ] Documentation Update
|
||||
|
||||
## Additional information
|
||||
|
||||
- This PR fixes or closes issue: fixes #
|
||||
- This PR is related to issue:
|
||||
|
||||
## Checklist
|
||||
|
||||
<!--
|
||||
Put an `x` in the boxes that apply.
|
||||
-->
|
||||
|
||||
- [ ] The code change is tested and works locally.
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] The code has been formatted using Ruff (`ruff format frigate`)
|
146
.github/workflows/ci.yml
vendored
146
.github/workflows/ci.yml
vendored
@@ -6,8 +6,6 @@ on:
|
||||
branches:
|
||||
- dev
|
||||
- master
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
|
||||
# only run the latest commit to avoid cache overwrites
|
||||
concurrency:
|
||||
@@ -19,13 +17,11 @@ env:
|
||||
|
||||
jobs:
|
||||
amd64_build:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-latest
|
||||
name: AMD64 Build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -41,14 +37,22 @@ jobs:
|
||||
target: frigate
|
||||
tags: ${{ steps.setup.outputs.image-name }}-amd64
|
||||
cache-from: type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64
|
||||
- name: Build and push TensorRT (x86 GPU)
|
||||
uses: docker/bake-action@v4
|
||||
with:
|
||||
push: true
|
||||
targets: tensorrt
|
||||
files: docker/tensorrt/trt.hcl
|
||||
set: |
|
||||
tensorrt.tags=${{ steps.setup.outputs.image-name }}-tensorrt
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64,mode=max
|
||||
arm64_build:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-latest
|
||||
name: ARM Build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -66,9 +70,8 @@ jobs:
|
||||
${{ steps.setup.outputs.image-name }}-standard-arm64
|
||||
cache-from: type=registry,ref=${{ steps.setup.outputs.cache-name }}-arm64
|
||||
- name: Build and push RPi build
|
||||
uses: docker/bake-action@v6
|
||||
uses: docker/bake-action@v4
|
||||
with:
|
||||
source: .
|
||||
push: true
|
||||
targets: rpi
|
||||
files: docker/rpi/rpi.hcl
|
||||
@@ -77,13 +80,11 @@ jobs:
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-arm64
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-arm64,mode=max
|
||||
jetson_jp4_build:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-latest
|
||||
name: Jetson Jetpack 4
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -95,9 +96,8 @@ jobs:
|
||||
BASE_IMAGE: timongentzsch/l4t-ubuntu20-opencv:latest
|
||||
SLIM_BASE: timongentzsch/l4t-ubuntu20-opencv:latest
|
||||
TRT_BASE: timongentzsch/l4t-ubuntu20-opencv:latest
|
||||
uses: docker/bake-action@v6
|
||||
uses: docker/bake-action@v4
|
||||
with:
|
||||
source: .
|
||||
push: true
|
||||
targets: tensorrt
|
||||
files: docker/tensorrt/trt.hcl
|
||||
@@ -106,13 +106,11 @@ jobs:
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-jp4
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-jp4,mode=max
|
||||
jetson_jp5_build:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-latest
|
||||
name: Jetson Jetpack 5
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
@@ -124,9 +122,8 @@ jobs:
|
||||
BASE_IMAGE: nvcr.io/nvidia/l4t-tensorrt:r8.5.2-runtime
|
||||
SLIM_BASE: nvcr.io/nvidia/l4t-tensorrt:r8.5.2-runtime
|
||||
TRT_BASE: nvcr.io/nvidia/l4t-tensorrt:r8.5.2-runtime
|
||||
uses: docker/bake-action@v6
|
||||
uses: docker/bake-action@v4
|
||||
with:
|
||||
source: .
|
||||
push: true
|
||||
targets: tensorrt
|
||||
files: docker/tensorrt/trt.hcl
|
||||
@@ -134,123 +131,30 @@ jobs:
|
||||
tensorrt.tags=${{ steps.setup.outputs.image-name }}-tensorrt-jp5
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-jp5
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-jp5,mode=max
|
||||
amd64_extra_builds:
|
||||
runs-on: ubuntu-22.04
|
||||
name: AMD64 Extra Build
|
||||
needs:
|
||||
- amd64_build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push TensorRT (x86 GPU)
|
||||
env:
|
||||
COMPUTE_LEVEL: "50 60 70 80 90"
|
||||
uses: docker/bake-action@v6
|
||||
with:
|
||||
source: .
|
||||
push: true
|
||||
targets: tensorrt
|
||||
files: docker/tensorrt/trt.hcl
|
||||
set: |
|
||||
tensorrt.tags=${{ steps.setup.outputs.image-name }}-tensorrt
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-amd64,mode=max
|
||||
arm64_extra_builds:
|
||||
runs-on: ubuntu-22.04
|
||||
name: ARM Extra Build
|
||||
needs:
|
||||
- arm64_build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push Rockchip build
|
||||
uses: docker/bake-action@v6
|
||||
with:
|
||||
source: .
|
||||
push: true
|
||||
targets: rk
|
||||
files: docker/rockchip/rk.hcl
|
||||
set: |
|
||||
rk.tags=${{ steps.setup.outputs.image-name }}-rk
|
||||
*.cache-from=type=gha
|
||||
combined_extra_builds:
|
||||
runs-on: ubuntu-22.04
|
||||
name: Combined Extra Builds
|
||||
needs:
|
||||
- amd64_build
|
||||
- arm64_build
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up QEMU and Buildx
|
||||
id: setup
|
||||
uses: ./.github/actions/setup
|
||||
with:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Build and push Hailo-8l build
|
||||
uses: docker/bake-action@v6
|
||||
with:
|
||||
source: .
|
||||
push: true
|
||||
targets: h8l
|
||||
files: docker/hailo8l/h8l.hcl
|
||||
set: |
|
||||
h8l.tags=${{ steps.setup.outputs.image-name }}-h8l
|
||||
*.cache-from=type=registry,ref=${{ steps.setup.outputs.cache-name }}-h8l
|
||||
*.cache-to=type=registry,ref=${{ steps.setup.outputs.cache-name }}-h8l,mode=max
|
||||
- name: AMD/ROCm general build
|
||||
env:
|
||||
AMDGPU: gfx
|
||||
HSA_OVERRIDE: 0
|
||||
uses: docker/bake-action@v6
|
||||
with:
|
||||
source: .
|
||||
push: true
|
||||
targets: rocm
|
||||
files: docker/rocm/rocm.hcl
|
||||
set: |
|
||||
rocm.tags=${{ steps.setup.outputs.image-name }}-rocm
|
||||
*.cache-from=type=gha
|
||||
# The majority of users running arm64 are rpi users, so the rpi
|
||||
# build should be the primary arm64 image
|
||||
assemble_default_build:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-latest
|
||||
name: Assemble and push default build
|
||||
needs:
|
||||
- amd64_build
|
||||
- arm64_build
|
||||
steps:
|
||||
- id: lowercaseRepo
|
||||
uses: ASzc/change-string-case-action@v6
|
||||
uses: ASzc/change-string-case-action@v5
|
||||
with:
|
||||
string: ${{ github.repository }}
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create short sha
|
||||
run: echo "SHORT_SHA=${GITHUB_SHA::7}" >> $GITHUB_ENV
|
||||
- uses: int128/docker-manifest-create-action@v2
|
||||
- uses: int128/docker-manifest-create-action@v1
|
||||
with:
|
||||
tags: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}
|
||||
sources: |
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-amd64
|
||||
ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ env.SHORT_SHA }}-rpi
|
||||
tags: ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}:${{ github.ref_name }}-${{ env.SHORT_SHA }}
|
||||
suffixes: |
|
||||
-amd64
|
||||
-rpi
|
||||
|
24
.github/workflows/dependabot-auto-merge.yaml
vendored
Normal file
24
.github/workflows/dependabot-auto-merge.yaml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: dependabot-auto-merge
|
||||
on: pull_request
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
dependabot-auto-merge:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.actor == 'dependabot[bot]'
|
||||
steps:
|
||||
- name: Get Dependabot metadata
|
||||
id: metadata
|
||||
uses: dependabot/fetch-metadata@v1
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Enable auto-merge for Dependabot PRs
|
||||
if: steps.metadata.outputs.dependency-type == 'direct:development' && (steps.metadata.outputs.update-type == 'version-update:semver-minor' || steps.metadata.outputs.update-type == 'version-update:semver-patch')
|
||||
run: |
|
||||
gh pr review --approve "$PR_URL"
|
||||
gh pr merge --auto --squash "$PR_URL"
|
||||
env:
|
||||
PR_URL: ${{ github.event.pull_request.html_url }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
34
.github/workflows/pull_request.yml
vendored
34
.github/workflows/pull_request.yml
vendored
@@ -1,9 +1,6 @@
|
||||
name: On pull request
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
on: pull_request
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.9
|
||||
@@ -19,8 +16,6 @@ jobs:
|
||||
DOCKER_BUILDKIT: "1"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-node@master
|
||||
with:
|
||||
node-version: 16.x
|
||||
@@ -40,8 +35,6 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-node@master
|
||||
with:
|
||||
node-version: 16.x
|
||||
@@ -56,16 +49,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-node@master
|
||||
with:
|
||||
node-version: 20.x
|
||||
node-version: 16.x
|
||||
- run: npm install
|
||||
working-directory: ./web
|
||||
# - name: Test
|
||||
# run: npm run test
|
||||
# working-directory: ./web
|
||||
- name: Test
|
||||
run: npm run test
|
||||
working-directory: ./web
|
||||
|
||||
python_checks:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -73,20 +64,21 @@ jobs:
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@v4.7.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Install requirements
|
||||
run: |
|
||||
python3 -m pip install -U pip
|
||||
python3 -m pip install -r docker/main/requirements-dev.txt
|
||||
- name: Check formatting
|
||||
- name: Check black
|
||||
run: |
|
||||
ruff format --check --diff frigate migrations docker *.py
|
||||
- name: Check lint
|
||||
black --check --diff frigate migrations docker *.py
|
||||
- name: Check isort
|
||||
run: |
|
||||
isort --check --diff frigate migrations docker *.py
|
||||
- name: Check ruff
|
||||
run: |
|
||||
ruff check frigate migrations docker *.py
|
||||
|
||||
@@ -96,8 +88,6 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-node@master
|
||||
with:
|
||||
node-version: 16.x
|
||||
|
60
.github/workflows/release.yml
vendored
60
.github/workflows/release.yml
vendored
@@ -1,7 +1,6 @@
|
||||
name: On release
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
@@ -11,42 +10,53 @@ jobs:
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- id: lowercaseRepo
|
||||
uses: ASzc/change-string-case-action@v6
|
||||
uses: ASzc/change-string-case-action@v5
|
||||
with:
|
||||
string: ${{ github.repository }}
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567
|
||||
uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: Create tag variables
|
||||
env:
|
||||
TAG: ${{ github.ref_name }}
|
||||
LOWERCASE_REPO: ${{ steps.lowercaseRepo.outputs.lowercase }}
|
||||
run: |
|
||||
BUILD_TYPE=$([[ "${TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]] && echo "stable" || echo "beta")
|
||||
echo "BUILD_TYPE=${BUILD_TYPE}" >> $GITHUB_ENV
|
||||
echo "BASE=ghcr.io/${LOWERCASE_REPO}" >> $GITHUB_ENV
|
||||
echo "BUILD_TAG=${GITHUB_SHA::7}" >> $GITHUB_ENV
|
||||
echo "BASE=ghcr.io/${{ steps.lowercaseRepo.outputs.lowercase }}" >> $GITHUB_ENV
|
||||
echo "BUILD_TAG=${{ github.ref_name }}-${GITHUB_SHA::7}" >> $GITHUB_ENV
|
||||
echo "CLEAN_VERSION=$(echo ${GITHUB_REF##*/} | tr '[:upper:]' '[:lower:]' | sed 's/^[v]//')" >> $GITHUB_ENV
|
||||
- name: Tag and push the main image
|
||||
run: |
|
||||
VERSION_TAG=${BASE}:${CLEAN_VERSION}
|
||||
STABLE_TAG=${BASE}:stable
|
||||
PULL_TAG=${BASE}:${BUILD_TAG}
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG} docker://${VERSION_TAG}
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk h8l rocm; do
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG}-${variant} docker://${VERSION_TAG}-${variant}
|
||||
done
|
||||
|
||||
# stable tag
|
||||
if [[ "${BUILD_TYPE}" == "stable" ]]; then
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG} docker://${STABLE_TAG}
|
||||
for variant in standard-arm64 tensorrt tensorrt-jp4 tensorrt-jp5 rk h8l rocm; do
|
||||
docker run --rm -v $HOME/.docker/config.json:/config.json quay.io/skopeo/stable:latest copy --authfile /config.json --multi-arch all docker://${PULL_TAG}-${variant} docker://${STABLE_TAG}-${variant}
|
||||
done
|
||||
fi
|
||||
docker pull ${PULL_TAG}
|
||||
docker tag ${PULL_TAG} ${VERSION_TAG}
|
||||
docker push ${VERSION_TAG}
|
||||
- name: Tag and push standard arm64
|
||||
run: |
|
||||
VERSION_TAG=${BASE}:${CLEAN_VERSION}-standard-arm64
|
||||
PULL_TAG=${BASE}:${BUILD_TAG}-standard-arm64
|
||||
docker pull ${PULL_TAG}
|
||||
docker tag ${PULL_TAG} ${VERSION_TAG}
|
||||
docker push ${VERSION_TAG}
|
||||
- name: Tag and push tensorrt
|
||||
run: |
|
||||
VERSION_TAG=${BASE}:${CLEAN_VERSION}-tensorrt
|
||||
PULL_TAG=${BASE}:${BUILD_TAG}-tensorrt
|
||||
docker pull ${PULL_TAG}
|
||||
docker tag ${PULL_TAG} ${VERSION_TAG}
|
||||
docker push ${VERSION_TAG}
|
||||
- name: Tag and push tensorrt-jp4
|
||||
run: |
|
||||
VERSION_TAG=${BASE}:${CLEAN_VERSION}-tensorrt-jp4
|
||||
PULL_TAG=${BASE}:${BUILD_TAG}-tensorrt-jp4
|
||||
docker pull ${PULL_TAG}
|
||||
docker tag ${PULL_TAG} ${VERSION_TAG}
|
||||
docker push ${VERSION_TAG}
|
||||
- name: Tag and push tensorrt-jp5
|
||||
run: |
|
||||
VERSION_TAG=${BASE}:${CLEAN_VERSION}-tensorrt-jp5
|
||||
PULL_TAG=${BASE}:${BUILD_TAG}-tensorrt-jp5
|
||||
docker pull ${PULL_TAG}
|
||||
docker tag ${PULL_TAG} ${VERSION_TAG}
|
||||
docker push ${VERSION_TAG}
|
||||
|
18
.github/workflows/stale.yml
vendored
18
.github/workflows/stale.yml
vendored
@@ -23,20 +23,4 @@ jobs:
|
||||
exempt-pr-labels: "pinned,security,dependencies"
|
||||
operations-per-run: 120
|
||||
- name: Print outputs
|
||||
env:
|
||||
STALE_OUTPUT: ${{ join(steps.stale.outputs.*, ',') }}
|
||||
run: echo "$STALE_OUTPUT"
|
||||
|
||||
# clean_ghcr:
|
||||
# name: Delete outdated dev container images
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - name: Delete old images
|
||||
# uses: snok/container-retention-policy@v2
|
||||
# with:
|
||||
# image-names: dev-*
|
||||
# cut-off: 60 days ago UTC
|
||||
# keep-at-least: 5
|
||||
# account-type: personal
|
||||
# token: ${{ secrets.GITHUB_TOKEN }}
|
||||
# token-type: github-token
|
||||
run: echo ${{ join(steps.stale.outputs.*, ',') }}
|
||||
|
7
.gitignore
vendored
7
.gitignore
vendored
@@ -1,6 +1,5 @@
|
||||
.DS_Store
|
||||
__pycache__
|
||||
.mypy_cache
|
||||
*.pyc
|
||||
*.swp
|
||||
debug
|
||||
.vscode/*
|
||||
@@ -9,6 +8,7 @@ config/*
|
||||
!config/*.example
|
||||
models
|
||||
*.mp4
|
||||
*.ts
|
||||
*.db
|
||||
*.csv
|
||||
frigate/version.py
|
||||
@@ -17,5 +17,4 @@ web/node_modules
|
||||
web/coverage
|
||||
core
|
||||
!/web/**/*.ts
|
||||
.idea/*
|
||||
.ipynb_checkpoints
|
||||
.idea/*
|
5
.vscode/launch.json
vendored
5
.vscode/launch.json
vendored
@@ -3,9 +3,10 @@
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Python: Launch Frigate",
|
||||
"type": "debugpy",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "frigate"
|
||||
"module": "frigate",
|
||||
"justMyCode": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@@ -2,6 +2,3 @@
|
||||
/docker/tensorrt/ @madsciencetist @NateMeyer
|
||||
/docker/tensorrt/*arm64* @madsciencetist
|
||||
/docker/tensorrt/*jetson* @madsciencetist
|
||||
/docker/rockchip/ @MarcA711
|
||||
/docker/rocm/ @harakas
|
||||
/docker/hailo8l/ @spanner3003
|
||||
|
31
Makefile
31
Makefile
@@ -1,9 +1,11 @@
|
||||
default_target: local
|
||||
|
||||
COMMIT_HASH := $(shell git log -1 --pretty=format:"%h"|tail -1)
|
||||
VERSION = 0.15.0
|
||||
VERSION = 0.13.0
|
||||
IMAGE_REPO ?= ghcr.io/blakeblackshear/frigate
|
||||
GITHUB_REF_NAME ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
CURRENT_UID := $(shell id -u)
|
||||
CURRENT_GID := $(shell id -g)
|
||||
BOARDS= #Initialized empty
|
||||
|
||||
include docker/*/*.mk
|
||||
@@ -16,38 +18,25 @@ version:
|
||||
echo 'VERSION = "$(VERSION)-$(COMMIT_HASH)"' > frigate/version.py
|
||||
|
||||
local: version
|
||||
docker buildx build --target=frigate --file docker/main/Dockerfile . \
|
||||
--tag frigate:latest \
|
||||
--load
|
||||
docker buildx build --target=frigate --tag frigate:latest --load --file docker/main/Dockerfile .
|
||||
|
||||
amd64:
|
||||
docker buildx build --target=frigate --file docker/main/Dockerfile . \
|
||||
--tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) \
|
||||
--platform linux/amd64
|
||||
docker buildx build --platform linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/main/Dockerfile .
|
||||
|
||||
arm64:
|
||||
docker buildx build --target=frigate --file docker/main/Dockerfile . \
|
||||
--tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) \
|
||||
--platform linux/arm64
|
||||
docker buildx build --platform linux/arm64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/main/Dockerfile .
|
||||
|
||||
build: version amd64 arm64
|
||||
docker buildx build --target=frigate --file docker/main/Dockerfile . \
|
||||
--tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) \
|
||||
--platform linux/arm64/v8,linux/amd64
|
||||
docker buildx build --platform linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):$(VERSION)-$(COMMIT_HASH) --file docker/main/Dockerfile .
|
||||
|
||||
push: push-boards
|
||||
docker buildx build --target=frigate --file docker/main/Dockerfile . \
|
||||
--tag $(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH) \
|
||||
--platform linux/arm64/v8,linux/amd64 \
|
||||
--push
|
||||
docker buildx build --push --platform linux/arm64/v8,linux/amd64 --target=frigate --tag $(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH) --file docker/main/Dockerfile .
|
||||
|
||||
run: local
|
||||
docker run --rm --publish=5000:5000 --volume=${PWD}/config:/config frigate:latest
|
||||
|
||||
run_tests: local
|
||||
docker run --rm --workdir=/opt/frigate --entrypoint= frigate:latest \
|
||||
python3 -u -m unittest
|
||||
docker run --rm --workdir=/opt/frigate --entrypoint= frigate:latest \
|
||||
python3 -u -m mypy --config-file frigate/mypy.ini frigate
|
||||
docker run --rm --workdir=/opt/frigate --entrypoint= frigate:latest python3 -u -m unittest
|
||||
docker run --rm --workdir=/opt/frigate --entrypoint= frigate:latest python3 -u -m mypy --config-file frigate/mypy.ini frigate
|
||||
|
||||
.PHONY: run_tests
|
||||
|
22
README.md
22
README.md
@@ -29,22 +29,18 @@ If you would like to make a donation to support development, please use [Github
|
||||
|
||||
## Screenshots
|
||||
|
||||
### Live dashboard
|
||||
Integration into Home Assistant
|
||||
|
||||
<div>
|
||||
<img width="800" alt="Live dashboard" src="https://github.com/blakeblackshear/frigate/assets/569905/5e713cb9-9db5-41dc-947a-6937c3bc376e">
|
||||
<a href="docs/static/img/media_browser.png"><img src="docs/static/img/media_browser.png" height=400></a>
|
||||
<a href="docs/static/img/notification.png"><img src="docs/static/img/notification.png" height=400></a>
|
||||
</div>
|
||||
|
||||
### Streamlined review workflow
|
||||
Also comes with a builtin UI:
|
||||
|
||||
<div>
|
||||
<img width="800" alt="Streamlined review workflow" src="https://github.com/blakeblackshear/frigate/assets/569905/6fed96e8-3b18-40e5-9ddc-31e6f3c9f2ff">
|
||||
<a href="docs/static/img/home-ui.png"><img src="docs/static/img/home-ui.png" height=400></a>
|
||||
<a href="docs/static/img/camera-ui.png"><img src="docs/static/img/camera-ui.png" height=400></a>
|
||||
</div>
|
||||
|
||||
### Multi-camera scrubbing
|
||||
<div>
|
||||
<img width="800" alt="Multi-camera scrubbing" src="https://github.com/blakeblackshear/frigate/assets/569905/d6788a15-0eeb-4427-a8d4-80b93cae3d74">
|
||||
</div>
|
||||
|
||||
### Built-in mask and zone editor
|
||||
<div>
|
||||
<img width="800" alt="Multi-camera scrubbing" src="https://github.com/blakeblackshear/frigate/assets/569905/d7885fc3-bfe6-452f-b7d0-d957cb3e31f5">
|
||||
</div>
|
||||

|
||||
|
@@ -4,7 +4,6 @@ from statistics import mean
|
||||
|
||||
import numpy as np
|
||||
|
||||
import frigate.util as util
|
||||
from frigate.config import DetectorTypeEnum
|
||||
from frigate.object_detection import (
|
||||
ObjectDetectProcess,
|
||||
@@ -61,7 +60,7 @@ def start(id, num_detections, detection_queue, event):
|
||||
object_detector.cleanup()
|
||||
print(f"{id} - Processed for {duration:.2f} seconds.")
|
||||
print(f"{id} - FPS: {object_detector.fps.eps():.2f}")
|
||||
print(f"{id} - Average frame processing time: {mean(frame_times) * 1000:.2f}ms")
|
||||
print(f"{id} - Average frame processing time: {mean(frame_times)*1000:.2f}ms")
|
||||
|
||||
|
||||
######
|
||||
@@ -91,7 +90,7 @@ edgetpu_process_2 = ObjectDetectProcess(
|
||||
)
|
||||
|
||||
for x in range(0, 10):
|
||||
camera_process = util.Process(
|
||||
camera_process = mp.Process(
|
||||
target=start, args=(x, 300, detection_queue, events[str(x)])
|
||||
)
|
||||
camera_process.daemon = True
|
||||
|
22
cspell.json
22
cspell.json
@@ -1,22 +0,0 @@
|
||||
{
|
||||
"version": "0.2",
|
||||
"ignorePaths": [
|
||||
"Dockerfile",
|
||||
"Dockerfile.*",
|
||||
"CMakeLists.txt",
|
||||
"*.db",
|
||||
"node_modules",
|
||||
"__pycache__",
|
||||
"dist",
|
||||
"/audio-labelmap.txt"
|
||||
],
|
||||
"language": "en",
|
||||
"dictionaryDefinitions": [
|
||||
{
|
||||
"name": "frigate-dictionary",
|
||||
"path": "./.cspell/frigate-dictionary.txt",
|
||||
"addWords": true
|
||||
}
|
||||
],
|
||||
"dictionaries": ["frigate-dictionary"]
|
||||
}
|
@@ -14,16 +14,15 @@ services:
|
||||
dockerfile: docker/main/Dockerfile
|
||||
# Use target devcontainer-trt for TensorRT dev
|
||||
target: devcontainer
|
||||
## Uncomment this block for nvidia gpu support
|
||||
# deploy:
|
||||
# resources:
|
||||
# reservations:
|
||||
# devices:
|
||||
# - driver: nvidia
|
||||
# count: 1
|
||||
# capabilities: [gpu]
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: 1
|
||||
capabilities: [gpu]
|
||||
environment:
|
||||
YOLO_MODELS: ""
|
||||
YOLO_MODELS: yolov7-320
|
||||
devices:
|
||||
- /dev/bus/usb:/dev/bus/usb
|
||||
# - /dev/dri:/dev/dri # for intel hwaccel, needs to be updated for your hardware
|
||||
|
@@ -1,40 +0,0 @@
|
||||
# syntax=docker/dockerfile:1.6
|
||||
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Build Python wheels
|
||||
FROM wheels AS h8l-wheels
|
||||
|
||||
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
|
||||
COPY docker/hailo8l/requirements-wheels-h8l.txt /requirements-wheels-h8l.txt
|
||||
|
||||
RUN sed -i "/https:\/\//d" /requirements-wheels.txt
|
||||
|
||||
# Create a directory to store the built wheels
|
||||
RUN mkdir /h8l-wheels
|
||||
|
||||
# Build the wheels
|
||||
RUN pip3 wheel --wheel-dir=/h8l-wheels -c /requirements-wheels.txt -r /requirements-wheels-h8l.txt
|
||||
|
||||
FROM wget AS hailort
|
||||
ARG TARGETARCH
|
||||
RUN --mount=type=bind,source=docker/hailo8l/install_hailort.sh,target=/deps/install_hailort.sh \
|
||||
/deps/install_hailort.sh
|
||||
|
||||
# Use deps as the base image
|
||||
FROM deps AS h8l-frigate
|
||||
|
||||
# Copy the wheels from the wheels stage
|
||||
COPY --from=h8l-wheels /h8l-wheels /deps/h8l-wheels
|
||||
COPY --from=hailort /hailo-wheels /deps/hailo-wheels
|
||||
COPY --from=hailort /rootfs/ /
|
||||
|
||||
# Install the wheels
|
||||
RUN pip3 install -U /deps/h8l-wheels/*.whl
|
||||
RUN pip3 install -U /deps/hailo-wheels/*.whl
|
||||
|
||||
# Copy base files from the rootfs stage
|
||||
COPY --from=rootfs / /
|
||||
|
||||
# Set workdir
|
||||
WORKDIR /opt/frigate/
|
@@ -1,34 +0,0 @@
|
||||
target wget {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64","linux/amd64"]
|
||||
target = "wget"
|
||||
}
|
||||
|
||||
target wheels {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64","linux/amd64"]
|
||||
target = "wheels"
|
||||
}
|
||||
|
||||
target deps {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64","linux/amd64"]
|
||||
target = "deps"
|
||||
}
|
||||
|
||||
target rootfs {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64","linux/amd64"]
|
||||
target = "rootfs"
|
||||
}
|
||||
|
||||
target h8l {
|
||||
dockerfile = "docker/hailo8l/Dockerfile"
|
||||
contexts = {
|
||||
wget = "target:wget"
|
||||
wheels = "target:wheels"
|
||||
deps = "target:deps"
|
||||
rootfs = "target:rootfs"
|
||||
}
|
||||
platforms = ["linux/arm64","linux/amd64"]
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
BOARDS += h8l
|
||||
|
||||
local-h8l: version
|
||||
docker buildx bake --file=docker/hailo8l/h8l.hcl h8l \
|
||||
--set h8l.tags=frigate:latest-h8l \
|
||||
--load
|
||||
|
||||
build-h8l: version
|
||||
docker buildx bake --file=docker/hailo8l/h8l.hcl h8l \
|
||||
--set h8l.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-h8l
|
||||
|
||||
push-h8l: build-h8l
|
||||
docker buildx bake --file=docker/hailo8l/h8l.hcl h8l \
|
||||
--set h8l.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-h8l \
|
||||
--push
|
@@ -1,19 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
hailo_version="4.19.0"
|
||||
|
||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
arch="x86_64"
|
||||
elif [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||
arch="aarch64"
|
||||
fi
|
||||
|
||||
wget -qO- "https://github.com/frigate-nvr/hailort/releases/download/v${hailo_version}/hailort-${TARGETARCH}.tar.gz" |
|
||||
tar -C / -xzf -
|
||||
|
||||
mkdir -p /hailo-wheels
|
||||
|
||||
wget -P /hailo-wheels/ "https://github.com/frigate-nvr/hailort/releases/download/v${hailo_version}/hailort-${hailo_version}-cp39-cp39-linux_${arch}.whl"
|
||||
|
@@ -1,12 +0,0 @@
|
||||
appdirs==1.4.*
|
||||
argcomplete==2.0.*
|
||||
contextlib2==0.6.*
|
||||
distlib==0.3.*
|
||||
filelock==3.8.*
|
||||
future==0.18.*
|
||||
importlib-metadata==5.1.*
|
||||
importlib-resources==5.1.*
|
||||
netaddr==0.8.*
|
||||
netifaces==0.10.*
|
||||
verboselogs==1.7.*
|
||||
virtualenv==20.17.*
|
@@ -1,48 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Update package list and install dependencies
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y build-essential cmake git wget
|
||||
|
||||
arch=$(uname -m)
|
||||
|
||||
if [[ $arch == "x86_64" ]]; then
|
||||
sudo apt install -y linux-headers-$(uname -r);
|
||||
else
|
||||
sudo apt install -y linux-modules-extra-$(uname -r);
|
||||
fi
|
||||
|
||||
# Clone the HailoRT driver repository
|
||||
git clone --depth 1 --branch v4.19.0 https://github.com/hailo-ai/hailort-drivers.git
|
||||
|
||||
# Build and install the HailoRT driver
|
||||
cd hailort-drivers/linux/pcie
|
||||
sudo make all
|
||||
sudo make install
|
||||
|
||||
# Load the Hailo PCI driver
|
||||
sudo modprobe hailo_pci
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Unable to load hailo_pci module, common reasons for this are:"
|
||||
echo "- Key was rejected by service: Secure Boot is enabling disallowing install."
|
||||
echo "- Permissions are not setup correctly."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Download and install the firmware
|
||||
cd ../../
|
||||
./download_firmware.sh
|
||||
|
||||
# verify the firmware folder is present
|
||||
if [ ! -d /lib/firmware/hailo ]; then
|
||||
sudo mkdir /lib/firmware/hailo
|
||||
fi
|
||||
sudo mv hailo8_fw.*.bin /lib/firmware/hailo/hailo8_fw.bin
|
||||
|
||||
# Install udev rules
|
||||
sudo cp ./linux/pcie/51-hailo-udev.rules /etc/udev/rules.d/
|
||||
sudo udevadm control --reload-rules && sudo udevadm trigger
|
||||
|
||||
echo "HailoRT driver installation complete."
|
||||
echo "reboot your system to load the firmware!"
|
@@ -30,31 +30,18 @@ RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
||||
--mount=type=cache,target=/root/.ccache \
|
||||
/deps/build_nginx.sh
|
||||
|
||||
FROM wget AS sqlite-vec
|
||||
ARG DEBIAN_FRONTEND
|
||||
|
||||
# Build sqlite_vec from source
|
||||
COPY docker/main/build_sqlite_vec.sh /deps/build_sqlite_vec.sh
|
||||
RUN --mount=type=tmpfs,target=/tmp --mount=type=tmpfs,target=/var/cache/apt \
|
||||
--mount=type=bind,source=docker/main/build_sqlite_vec.sh,target=/deps/build_sqlite_vec.sh \
|
||||
--mount=type=cache,target=/root/.ccache \
|
||||
/deps/build_sqlite_vec.sh
|
||||
|
||||
FROM scratch AS go2rtc
|
||||
ARG TARGETARCH
|
||||
WORKDIR /rootfs/usr/local/go2rtc/bin
|
||||
ADD --link --chmod=755 "https://github.com/AlexxIT/go2rtc/releases/download/v1.9.2/go2rtc_linux_${TARGETARCH}" go2rtc
|
||||
ADD --link --chmod=755 "https://github.com/AlexxIT/go2rtc/releases/download/v1.8.1/go2rtc_linux_${TARGETARCH}" go2rtc
|
||||
|
||||
FROM wget AS tempio
|
||||
ARG TARGETARCH
|
||||
RUN --mount=type=bind,source=docker/main/install_tempio.sh,target=/deps/install_tempio.sh \
|
||||
/deps/install_tempio.sh
|
||||
|
||||
####
|
||||
#
|
||||
# OpenVino Support
|
||||
#
|
||||
# 1. Download and convert a model from Intel's Public Open Model Zoo
|
||||
# 2. Build libUSB without udev to handle NCS2 enumeration
|
||||
#
|
||||
####
|
||||
# Download and Convert OpenVino model
|
||||
@@ -64,24 +51,17 @@ ARG DEBIAN_FRONTEND
|
||||
# Install OpenVino Runtime and Dev library
|
||||
COPY docker/main/requirements-ov.txt /requirements-ov.txt
|
||||
RUN apt-get -qq update \
|
||||
&& apt-get -qq install -y wget python3 python3-dev python3-distutils gcc pkg-config libhdf5-dev \
|
||||
&& apt-get -qq install -y wget python3 python3-distutils \
|
||||
&& wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
||||
&& python3 get-pip.py "pip" \
|
||||
&& pip install -r /requirements-ov.txt
|
||||
|
||||
# Get OpenVino Model
|
||||
RUN --mount=type=bind,source=docker/main/build_ov_model.py,target=/build_ov_model.py \
|
||||
mkdir /models && cd /models \
|
||||
&& wget http://download.tensorflow.org/models/object_detection/ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz \
|
||||
&& tar -xvf ssdlite_mobilenet_v2_coco_2018_05_09.tar.gz \
|
||||
&& python3 /build_ov_model.py
|
||||
RUN mkdir /models \
|
||||
&& cd /models && omz_downloader --name ssdlite_mobilenet_v2 \
|
||||
&& cd /models && omz_converter --name ssdlite_mobilenet_v2 --precision FP16
|
||||
|
||||
|
||||
####
|
||||
#
|
||||
# Coral Compatibility
|
||||
#
|
||||
# Builds libusb without udev. Needed for synology and other devices with USB coral
|
||||
####
|
||||
# libUSB - No Udev
|
||||
FROM wget as libusb-build
|
||||
ARG TARGETARCH
|
||||
@@ -117,12 +97,11 @@ RUN wget -qO edgetpu_model.tflite https://github.com/google-coral/test_data/raw/
|
||||
RUN wget -qO cpu_model.tflite https://github.com/google-coral/test_data/raw/release-frogfish/ssdlite_mobiledet_coco_qat_postprocess.tflite
|
||||
COPY labelmap.txt .
|
||||
# Copy OpenVino model
|
||||
COPY --from=ov-converter /models/ssdlite_mobilenet_v2.xml openvino-model/
|
||||
COPY --from=ov-converter /models/ssdlite_mobilenet_v2.bin openvino-model/
|
||||
COPY --from=ov-converter /models/public/ssdlite_mobilenet_v2/FP16 openvino-model
|
||||
RUN wget -q https://github.com/openvinotoolkit/open_model_zoo/raw/master/data/dataset_classes/coco_91cl_bkgr.txt -O openvino-model/coco_91cl_bkgr.txt && \
|
||||
sed -i 's/truck/car/g' openvino-model/coco_91cl_bkgr.txt
|
||||
# Get Audio Model and labels
|
||||
RUN wget -qO - https://www.kaggle.com/api/v1/models/google/yamnet/tfLite/classification-tflite/1/download | tar xvz && mv 1.tflite cpu_audio_model.tflite
|
||||
RUN wget -qO cpu_audio_model.tflite https://tfhub.dev/google/lite-model/yamnet/classification/tflite/1?lite-format=tflite
|
||||
COPY audio-labelmap.txt .
|
||||
|
||||
|
||||
@@ -158,8 +137,6 @@ RUN apt-get -qq update \
|
||||
gfortran openexr libatlas-base-dev libssl-dev\
|
||||
libtbb2 libtbb-dev libdc1394-22-dev libopenexr-dev \
|
||||
libgstreamer-plugins-base1.0-dev libgstreamer1.0-dev \
|
||||
# sqlite3 dependencies
|
||||
tclsh \
|
||||
# scipy dependencies
|
||||
gcc gfortran libopenblas-dev liblapack-dev && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
@@ -173,10 +150,6 @@ RUN wget -q https://bootstrap.pypa.io/get-pip.py -O get-pip.py \
|
||||
COPY docker/main/requirements.txt /requirements.txt
|
||||
RUN pip3 install -r /requirements.txt
|
||||
|
||||
# Build pysqlite3 from source
|
||||
COPY docker/main/build_pysqlite3.sh /build_pysqlite3.sh
|
||||
RUN /build_pysqlite3.sh
|
||||
|
||||
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
|
||||
RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt
|
||||
|
||||
@@ -184,10 +157,8 @@ RUN pip3 wheel --wheel-dir=/wheels -r /requirements-wheels.txt
|
||||
# Collect deps in a single layer
|
||||
FROM scratch AS deps-rootfs
|
||||
COPY --from=nginx /usr/local/nginx/ /usr/local/nginx/
|
||||
COPY --from=sqlite-vec /usr/local/lib/ /usr/local/lib/
|
||||
COPY --from=go2rtc /rootfs/ /
|
||||
COPY --from=libusb-build /usr/local/lib /usr/local/lib
|
||||
COPY --from=tempio /rootfs/ /
|
||||
COPY --from=s6-overlay /rootfs/ /
|
||||
COPY --from=models /rootfs/ /
|
||||
COPY docker/main/rootfs/ /
|
||||
@@ -205,16 +176,7 @@ ARG APT_KEY_DONT_WARN_ON_DANGEROUS_USAGE=DontWarn
|
||||
ENV NVIDIA_VISIBLE_DEVICES=all
|
||||
ENV NVIDIA_DRIVER_CAPABILITIES="compute,video,utility"
|
||||
|
||||
# Disable tokenizer parallelism warning
|
||||
# https://stackoverflow.com/questions/62691279/how-to-disable-tokenizers-parallelism-true-false-warning/72926996#72926996
|
||||
ENV TOKENIZERS_PARALLELISM=true
|
||||
# https://github.com/huggingface/transformers/issues/27214
|
||||
ENV TRANSFORMERS_NO_ADVISORY_WARNINGS=1
|
||||
|
||||
# Set OpenCV ffmpeg loglevel to fatal: https://ffmpeg.org/doxygen/trunk/log_8h.html
|
||||
ENV OPENCV_FFMPEG_LOGLEVEL=8
|
||||
|
||||
ENV PATH="/usr/local/go2rtc/bin:/usr/local/tempio/bin:/usr/local/nginx/sbin:${PATH}"
|
||||
ENV PATH="/usr/lib/btbn-ffmpeg/bin:/usr/local/go2rtc/bin:/usr/local/nginx/sbin:${PATH}"
|
||||
|
||||
# Install dependencies
|
||||
RUN --mount=type=bind,source=docker/main/install_deps.sh,target=/deps/install_deps.sh \
|
||||
@@ -229,18 +191,17 @@ COPY --from=deps-rootfs / /
|
||||
RUN ldconfig
|
||||
|
||||
EXPOSE 5000
|
||||
EXPOSE 1935
|
||||
EXPOSE 8554
|
||||
EXPOSE 8555/tcp 8555/udp
|
||||
|
||||
# Configure logging to prepend timestamps, log to stdout, keep 0 archives and rotate on 10MB
|
||||
ENV S6_LOGGING_SCRIPT="T 1 n0 s10000000 T"
|
||||
# Do not fail on long-running download scripts
|
||||
ENV S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0
|
||||
|
||||
ENTRYPOINT ["/init"]
|
||||
CMD []
|
||||
|
||||
HEALTHCHECK --start-period=300s --start-interval=5s --interval=15s --timeout=5s --retries=3 \
|
||||
HEALTHCHECK --start-period=120s --start-interval=5s --interval=15s --timeout=5s --retries=3 \
|
||||
CMD curl --fail --silent --show-error http://127.0.0.1:5000/api/version || exit 1
|
||||
|
||||
# Frigate deps with Node.js and NPM for devcontainer
|
||||
@@ -254,13 +215,13 @@ COPY docker/main/fake_frigate_run /etc/s6-overlay/s6-rc.d/frigate/run
|
||||
RUN mkdir -p /opt/frigate \
|
||||
&& ln -svf /workspace/frigate/frigate /opt/frigate/frigate
|
||||
|
||||
# Install Node 20
|
||||
RUN curl -SLO https://deb.nodesource.com/nsolid_setup_deb.sh && \
|
||||
chmod 500 nsolid_setup_deb.sh && \
|
||||
./nsolid_setup_deb.sh 20 && \
|
||||
apt-get install nodejs -y \
|
||||
# Install Node 16
|
||||
RUN apt-get update \
|
||||
&& apt-get install wget -y \
|
||||
&& wget -qO- https://deb.nodesource.com/setup_16.x | bash - \
|
||||
&& apt-get install -y nodejs \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& npm install -g npm@10
|
||||
&& npm install -g npm@9
|
||||
|
||||
WORKDIR /workspace/frigate
|
||||
|
||||
@@ -271,14 +232,12 @@ RUN apt-get update \
|
||||
RUN --mount=type=bind,source=./docker/main/requirements-dev.txt,target=/workspace/frigate/requirements-dev.txt \
|
||||
pip3 install -r requirements-dev.txt
|
||||
|
||||
HEALTHCHECK NONE
|
||||
|
||||
CMD ["sleep", "infinity"]
|
||||
|
||||
|
||||
# Frigate web build
|
||||
# This should be architecture agnostic, so speed up the build on multiarch by not using QEMU.
|
||||
FROM --platform=$BUILDPLATFORM node:20 AS web-build
|
||||
FROM --platform=$BUILDPLATFORM node:16 AS web-build
|
||||
|
||||
WORKDIR /work
|
||||
COPY web/package.json web/package-lock.json ./
|
||||
|
@@ -2,11 +2,10 @@
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
NGINX_VERSION="1.25.3"
|
||||
NGINX_VERSION="1.25.2"
|
||||
VOD_MODULE_VERSION="1.31"
|
||||
SECURE_TOKEN_MODULE_VERSION="1.5"
|
||||
SET_MISC_MODULE_VERSION="v0.33"
|
||||
NGX_DEVEL_KIT_VERSION="v0.3.3"
|
||||
RTMP_MODULE_VERSION="1.2.2"
|
||||
|
||||
cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list
|
||||
sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list
|
||||
@@ -50,16 +49,10 @@ mkdir /tmp/nginx-secure-token-module
|
||||
wget https://github.com/kaltura/nginx-secure-token-module/archive/refs/tags/${SECURE_TOKEN_MODULE_VERSION}.tar.gz
|
||||
tar -zxf ${SECURE_TOKEN_MODULE_VERSION}.tar.gz -C /tmp/nginx-secure-token-module --strip-components=1
|
||||
rm ${SECURE_TOKEN_MODULE_VERSION}.tar.gz
|
||||
|
||||
mkdir /tmp/ngx_devel_kit
|
||||
wget https://github.com/vision5/ngx_devel_kit/archive/refs/tags/${NGX_DEVEL_KIT_VERSION}.tar.gz
|
||||
tar -zxf ${NGX_DEVEL_KIT_VERSION}.tar.gz -C /tmp/ngx_devel_kit --strip-components=1
|
||||
rm ${NGX_DEVEL_KIT_VERSION}.tar.gz
|
||||
|
||||
mkdir /tmp/nginx-set-misc-module
|
||||
wget https://github.com/openresty/set-misc-nginx-module/archive/refs/tags/${SET_MISC_MODULE_VERSION}.tar.gz
|
||||
tar -zxf ${SET_MISC_MODULE_VERSION}.tar.gz -C /tmp/nginx-set-misc-module --strip-components=1
|
||||
rm ${SET_MISC_MODULE_VERSION}.tar.gz
|
||||
mkdir /tmp/nginx-rtmp-module
|
||||
wget -nv https://github.com/arut/nginx-rtmp-module/archive/refs/tags/v${RTMP_MODULE_VERSION}.tar.gz
|
||||
tar -zxf v${RTMP_MODULE_VERSION}.tar.gz -C /tmp/nginx-rtmp-module --strip-components=1
|
||||
rm v${RTMP_MODULE_VERSION}.tar.gz
|
||||
|
||||
cd /tmp/nginx
|
||||
|
||||
@@ -67,13 +60,10 @@ cd /tmp/nginx
|
||||
--with-file-aio \
|
||||
--with-http_sub_module \
|
||||
--with-http_ssl_module \
|
||||
--with-http_auth_request_module \
|
||||
--with-http_realip_module \
|
||||
--with-threads \
|
||||
--add-module=../ngx_devel_kit \
|
||||
--add-module=../nginx-set-misc-module \
|
||||
--add-module=../nginx-vod-module \
|
||||
--add-module=../nginx-secure-token-module \
|
||||
--add-module=../nginx-rtmp-module \
|
||||
--with-cc-opt="-O3 -Wno-error=implicit-fallthrough"
|
||||
|
||||
make CC="ccache gcc" -j$(nproc) && make install
|
||||
|
@@ -1,11 +0,0 @@
|
||||
import openvino as ov
|
||||
from openvino.tools import mo
|
||||
|
||||
ov_model = mo.convert_model(
|
||||
"/models/ssdlite_mobilenet_v2_coco_2018_05_09/frozen_inference_graph.pb",
|
||||
compress_to_fp16=True,
|
||||
transformations_config="/usr/local/lib/python3.9/dist-packages/openvino/tools/mo/front/tf/ssd_v2_support.json",
|
||||
tensorflow_object_detection_api_pipeline_config="/models/ssdlite_mobilenet_v2_coco_2018_05_09/pipeline.config",
|
||||
reverse_input_channels=True,
|
||||
)
|
||||
ov.save_model(ov_model, "/models/ssdlite_mobilenet_v2.xml")
|
@@ -1,35 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
SQLITE3_VERSION="96c92aba00c8375bc32fafcdf12429c58bd8aabfcadab6683e35bbb9cdebf19e" # 3.46.0
|
||||
PYSQLITE3_VERSION="0.5.3"
|
||||
|
||||
# Fetch the source code for the latest release of Sqlite.
|
||||
if [[ ! -d "sqlite" ]]; then
|
||||
wget https://www.sqlite.org/src/tarball/sqlite.tar.gz?r=${SQLITE3_VERSION} -O sqlite.tar.gz
|
||||
tar xzf sqlite.tar.gz
|
||||
cd sqlite/
|
||||
LIBS="-lm" ./configure --disable-tcl --enable-tempstore=always
|
||||
make sqlite3.c
|
||||
cd ../
|
||||
rm sqlite.tar.gz
|
||||
fi
|
||||
|
||||
# Grab the pysqlite3 source code.
|
||||
if [[ ! -d "./pysqlite3" ]]; then
|
||||
git clone https://github.com/coleifer/pysqlite3.git
|
||||
fi
|
||||
|
||||
cd pysqlite3/
|
||||
git checkout ${PYSQLITE3_VERSION}
|
||||
|
||||
# Copy the sqlite3 source amalgamation into the pysqlite3 directory so we can
|
||||
# create a self-contained extension module.
|
||||
cp "../sqlite/sqlite3.c" ./
|
||||
cp "../sqlite/sqlite3.h" ./
|
||||
|
||||
# Create the wheel and put it in the /wheels dir.
|
||||
sed -i "s|name='pysqlite3-binary'|name=PACKAGE_NAME|g" setup.py
|
||||
python3 setup.py build_static
|
||||
pip3 wheel . -w /wheels
|
@@ -1,31 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
SQLITE_VEC_VERSION="0.1.3"
|
||||
|
||||
cp /etc/apt/sources.list /etc/apt/sources.list.d/sources-src.list
|
||||
sed -i 's|deb http|deb-src http|g' /etc/apt/sources.list.d/sources-src.list
|
||||
apt-get update
|
||||
apt-get -yqq build-dep sqlite3 gettext git
|
||||
|
||||
mkdir /tmp/sqlite_vec
|
||||
# Grab the sqlite_vec source code.
|
||||
wget -nv https://github.com/asg017/sqlite-vec/archive/refs/tags/v${SQLITE_VEC_VERSION}.tar.gz
|
||||
tar -zxf v${SQLITE_VEC_VERSION}.tar.gz -C /tmp/sqlite_vec
|
||||
|
||||
cd /tmp/sqlite_vec/sqlite-vec-${SQLITE_VEC_VERSION}
|
||||
|
||||
mkdir -p vendor
|
||||
wget -O sqlite-amalgamation.zip https://www.sqlite.org/2024/sqlite-amalgamation-3450300.zip
|
||||
unzip sqlite-amalgamation.zip
|
||||
mv sqlite-amalgamation-3450300/* vendor/
|
||||
rmdir sqlite-amalgamation-3450300
|
||||
rm sqlite-amalgamation.zip
|
||||
|
||||
# build loadable module
|
||||
make loadable
|
||||
|
||||
# install it
|
||||
cp dist/vec0.* /usr/local/lib
|
||||
|
@@ -8,13 +8,11 @@ apt-get -qq install --no-install-recommends -y \
|
||||
apt-transport-https \
|
||||
gnupg \
|
||||
wget \
|
||||
lbzip2 \
|
||||
procps vainfo \
|
||||
unzip locales tzdata libxml2 xz-utils \
|
||||
python3.9 \
|
||||
python3-pip \
|
||||
curl \
|
||||
lsof \
|
||||
jq \
|
||||
nethogs
|
||||
|
||||
@@ -41,68 +39,39 @@ apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
|
||||
# btbn-ffmpeg -> amd64
|
||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
mkdir -p /usr/lib/ffmpeg/5.0
|
||||
mkdir -p /usr/lib/ffmpeg/7.0
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linux64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/5.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/5.0/doc /usr/lib/ffmpeg/5.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2024-09-19-12-51/ffmpeg-n7.0.2-18-g3e6cec1286-linux64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/7.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/7.0/doc /usr/lib/ffmpeg/7.0/bin/ffplay
|
||||
mkdir -p /usr/lib/btbn-ffmpeg
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linux64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/btbn-ffmpeg --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/btbn-ffmpeg/doc /usr/lib/btbn-ffmpeg/bin/ffplay
|
||||
fi
|
||||
|
||||
# ffmpeg -> arm64
|
||||
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||
mkdir -p /usr/lib/ffmpeg/5.0
|
||||
mkdir -p /usr/lib/ffmpeg/7.0
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linuxarm64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/5.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/5.0/doc /usr/lib/ffmpeg/5.0/bin/ffplay
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/NickM-27/FFmpeg-Builds/releases/download/autobuild-2024-09-19-12-51/ffmpeg-n7.0.2-18-g3e6cec1286-linuxarm64-gpl-7.0.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/ffmpeg/7.0 --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/ffmpeg/7.0/doc /usr/lib/ffmpeg/7.0/bin/ffplay
|
||||
mkdir -p /usr/lib/btbn-ffmpeg
|
||||
wget -qO btbn-ffmpeg.tar.xz "https://github.com/BtbN/FFmpeg-Builds/releases/download/autobuild-2022-07-31-12-37/ffmpeg-n5.1-2-g915ef932a3-linuxarm64-gpl-5.1.tar.xz"
|
||||
tar -xf btbn-ffmpeg.tar.xz -C /usr/lib/btbn-ffmpeg --strip-components 1
|
||||
rm -rf btbn-ffmpeg.tar.xz /usr/lib/btbn-ffmpeg/doc /usr/lib/btbn-ffmpeg/bin/ffplay
|
||||
fi
|
||||
|
||||
# arch specific packages
|
||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
# use debian bookworm for amd / intel-i965 driver packages
|
||||
# use debian bookworm for hwaccel packages
|
||||
echo 'deb https://deb.debian.org/debian bookworm main contrib non-free' >/etc/apt/sources.list.d/debian-bookworm.list
|
||||
apt-get -qq update
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
i965-va-driver intel-gpu-tools onevpl-tools \
|
||||
libva-drm2 \
|
||||
mesa-va-drivers radeontop
|
||||
|
||||
intel-opencl-icd \
|
||||
mesa-va-drivers radeontop libva-drm2 intel-media-va-driver-non-free i965-va-driver libmfx1 intel-gpu-tools
|
||||
# something about this dependency requires it to be installed in a separate call rather than in the line above
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
i965-va-driver-shaders
|
||||
|
||||
# intel packages use zst compression so we need to update dpkg
|
||||
apt-get install -y dpkg
|
||||
|
||||
rm -f /etc/apt/sources.list.d/debian-bookworm.list
|
||||
|
||||
# use intel apt intel packages
|
||||
wget -qO - https://repositories.intel.com/gpu/intel-graphics.key | gpg --yes --dearmor --output /usr/share/keyrings/intel-graphics.gpg
|
||||
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/intel-graphics.gpg] https://repositories.intel.com/gpu/ubuntu jammy client" | tee /etc/apt/sources.list.d/intel-gpu-jammy.list
|
||||
apt-get -qq update
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
intel-opencl-icd=24.35.30872.31-996~22.04 intel-level-zero-gpu=1.3.29735.27-914~22.04 intel-media-va-driver-non-free=24.3.3-996~22.04 \
|
||||
libmfx1=23.2.2-880~22.04 libmfxgen1=24.2.4-914~22.04 libvpl2=1:2.13.0.0-996~22.04
|
||||
|
||||
rm -f /usr/share/keyrings/intel-graphics.gpg
|
||||
rm -f /etc/apt/sources.list.d/intel-gpu-jammy.list
|
||||
fi
|
||||
|
||||
if [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
libva-drm2 mesa-va-drivers radeontop
|
||||
libva-drm2 mesa-va-drivers
|
||||
fi
|
||||
|
||||
# install vulkan
|
||||
apt-get -qq install --no-install-recommends --no-install-suggests -y \
|
||||
libvulkan1 mesa-vulkan-drivers
|
||||
|
||||
apt-get purge gnupg apt-transport-https xz-utils -y
|
||||
apt-get clean autoclean -y
|
||||
apt-get autoremove --purge -y
|
||||
|
@@ -1,16 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -euxo pipefail
|
||||
|
||||
tempio_version="2021.09.0"
|
||||
|
||||
if [[ "${TARGETARCH}" == "amd64" ]]; then
|
||||
arch="amd64"
|
||||
elif [[ "${TARGETARCH}" == "arm64" ]]; then
|
||||
arch="aarch64"
|
||||
fi
|
||||
|
||||
mkdir -p /rootfs/usr/local/tempio/bin
|
||||
|
||||
wget -q -O /rootfs/usr/local/tempio/bin/tempio "https://github.com/home-assistant/tempio/releases/download/${tempio_version}/tempio_${arch}"
|
||||
chmod 755 /rootfs/usr/local/tempio/bin/tempio
|
@@ -1 +1,3 @@
|
||||
black == 23.10.*
|
||||
isort
|
||||
ruff
|
||||
|
@@ -1,3 +1,5 @@
|
||||
numpy
|
||||
tensorflow
|
||||
openvino-dev>=2024.0.0
|
||||
# Openvino Library - Custom built with MYRIAD support
|
||||
openvino @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino-2022.3.1-1-cp39-cp39-manylinux_2_31_x86_64.whl; platform_machine == 'x86_64'
|
||||
openvino @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino-2022.3.1-1-cp39-cp39-linux_aarch64.whl; platform_machine == 'aarch64'
|
||||
openvino-dev[tensorflow2] @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino_dev-2022.3.1-1-py3-none-any.whl
|
||||
|
@@ -1,47 +1,28 @@
|
||||
click == 8.1.*
|
||||
# FastAPI
|
||||
aiohttp == 3.11.2
|
||||
starlette == 0.41.2
|
||||
starlette-context == 0.3.6
|
||||
fastapi == 0.115.*
|
||||
uvicorn == 0.30.*
|
||||
slowapi == 0.1.*
|
||||
Flask == 2.3.*
|
||||
imutils == 0.5.*
|
||||
joserfc == 1.0.*
|
||||
pathvalidate == 3.2.*
|
||||
markupsafe == 2.1.*
|
||||
matplotlib == 3.7.*
|
||||
mypy == 1.6.1
|
||||
numpy == 1.26.*
|
||||
numpy == 1.23.*
|
||||
onvif_zeep == 0.2.12
|
||||
opencv-python-headless == 4.9.0.*
|
||||
paho-mqtt == 2.1.*
|
||||
pandas == 2.2.*
|
||||
opencv-python-headless == 4.7.0.*
|
||||
paho-mqtt == 1.6.*
|
||||
peewee == 3.17.*
|
||||
peewee_migrate == 1.13.*
|
||||
psutil == 6.1.*
|
||||
pydantic == 2.8.*
|
||||
peewee_migrate == 1.12.*
|
||||
psutil == 5.9.*
|
||||
pydantic == 1.10.*
|
||||
git+https://github.com/fbcotter/py3nvml#egg=py3nvml
|
||||
pytz == 2024.*
|
||||
pyzmq == 26.2.*
|
||||
ruamel.yaml == 0.18.*
|
||||
tzlocal == 5.2
|
||||
requests == 2.32.*
|
||||
types-requests == 2.32.*
|
||||
scipy == 1.13.*
|
||||
PyYAML == 6.0.*
|
||||
pytz == 2023.3
|
||||
ruamel.yaml == 0.17.*
|
||||
tzlocal == 5.1
|
||||
types-PyYAML == 6.0.*
|
||||
requests == 2.31.*
|
||||
types-requests == 2.31.*
|
||||
scipy == 1.11.*
|
||||
norfair == 2.2.*
|
||||
setproctitle == 1.3.*
|
||||
ws4py == 0.5.*
|
||||
unidecode == 1.3.*
|
||||
# OpenVino & ONNX
|
||||
openvino == 2024.3.*
|
||||
onnxruntime-openvino == 1.19.* ; platform_machine == 'x86_64'
|
||||
onnxruntime == 1.19.* ; platform_machine == 'aarch64'
|
||||
# Embeddings
|
||||
transformers == 4.45.*
|
||||
# Generative AI
|
||||
google-generativeai == 0.8.*
|
||||
ollama == 0.3.*
|
||||
openai == 1.51.*
|
||||
# push notifications
|
||||
py-vapid == 1.9.*
|
||||
pywebpush == 2.0.*
|
||||
# Openvino Library - Custom built with MYRIAD support
|
||||
openvino @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino-2022.3.1-1-cp39-cp39-manylinux_2_31_x86_64.whl; platform_machine == 'x86_64'
|
||||
openvino @ https://github.com/NateMeyer/openvino-wheels/releases/download/multi-arch_2022.3.1/openvino-2022.3.1-1-cp39-cp39-linux_aarch64.whl; platform_machine == 'aarch64'
|
||||
|
@@ -1 +0,0 @@
|
||||
certsync
|
@@ -1 +0,0 @@
|
||||
certsync-pipeline
|
@@ -1,4 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
|
||||
exec logutil-service /dev/shm/logs/certsync
|
@@ -1 +0,0 @@
|
||||
longrun
|
@@ -1,30 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Take down the S6 supervision tree when the service fails
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
declare exit_code_container
|
||||
exit_code_container=$(cat /run/s6-linux-init-container-results/exitcode)
|
||||
readonly exit_code_container
|
||||
readonly exit_code_service="${1}"
|
||||
readonly exit_code_signal="${2}"
|
||||
readonly service="CERTSYNC"
|
||||
|
||||
echo "[INFO] Service ${service} exited with code ${exit_code_service} (by signal ${exit_code_signal})"
|
||||
|
||||
if [[ "${exit_code_service}" -eq 256 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo $((128 + exit_code_signal)) >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
if [[ "${exit_code_signal}" -eq 15 ]]; then
|
||||
exec /run/s6/basedir/bin/halt
|
||||
fi
|
||||
elif [[ "${exit_code_service}" -ne 0 ]]; then
|
||||
if [[ "${exit_code_container}" -eq 0 ]]; then
|
||||
echo "${exit_code_service}" >/run/s6-linux-init-container-results/exitcode
|
||||
fi
|
||||
exec /run/s6/basedir/bin/halt
|
||||
fi
|
@@ -1 +0,0 @@
|
||||
certsync-log
|
@@ -1,58 +0,0 @@
|
||||
#!/command/with-contenv bash
|
||||
# shellcheck shell=bash
|
||||
# Start the CERTSYNC service
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
# Logs should be sent to stdout so that s6 can collect them
|
||||
|
||||
echo "[INFO] Starting certsync..."
|
||||
|
||||
lefile="/etc/letsencrypt/live/frigate/fullchain.pem"
|
||||
|
||||
tls_enabled=`python3 /usr/local/nginx/get_tls_settings.py | jq -r .enabled`
|
||||
|
||||
while true
|
||||
do
|
||||
if [[ "$tls_enabled" == 'false' ]]; then
|
||||
sleep 9999
|
||||
continue
|
||||
fi
|
||||
|
||||
if [ ! -e $lefile ]
|
||||
then
|
||||
echo "[ERROR] TLS certificate does not exist: $lefile"
|
||||
fi
|
||||
|
||||
leprint=`openssl x509 -in $lefile -fingerprint -noout 2>&1 || echo 'failed'`
|
||||
|
||||
case "$leprint" in
|
||||
*Fingerprint*)
|
||||
;;
|
||||
*)
|
||||
echo "[ERROR] Missing fingerprint from $lefile"
|
||||
;;
|
||||
esac
|
||||
|
||||
liveprint=`echo | openssl s_client -showcerts -connect 127.0.0.1:8971 2>&1 | openssl x509 -fingerprint 2>&1 | grep -i fingerprint || echo 'failed'`
|
||||
|
||||
case "$liveprint" in
|
||||
*Fingerprint*)
|
||||
;;
|
||||
*)
|
||||
echo "[ERROR] Missing fingerprint from current nginx TLS cert"
|
||||
;;
|
||||
esac
|
||||
|
||||
if [[ "$leprint" != "failed" && "$liveprint" != "failed" && "$leprint" != "$liveprint" ]]
|
||||
then
|
||||
echo "[INFO] Reloading nginx to refresh TLS certificate"
|
||||
echo "$lefile: $leprint"
|
||||
/usr/local/nginx/sbin/nginx -s reload
|
||||
fi
|
||||
|
||||
sleep 60
|
||||
|
||||
done
|
||||
|
||||
exit 0
|
@@ -1 +0,0 @@
|
||||
30000
|
@@ -1 +0,0 @@
|
||||
longrun
|
@@ -16,8 +16,8 @@ function migrate_db_path() {
|
||||
if [[ -f "${config_file_yaml}" ]]; then
|
||||
config_file="${config_file_yaml}"
|
||||
elif [[ ! -f "${config_file}" ]]; then
|
||||
# Frigate will create the config file on startup
|
||||
return 0
|
||||
echo "[ERROR] Frigate config file not found"
|
||||
return 1
|
||||
fi
|
||||
unset config_file_yaml
|
||||
|
||||
@@ -42,14 +42,10 @@ function migrate_db_path() {
|
||||
fi
|
||||
}
|
||||
|
||||
function set_libva_version() {
|
||||
local ffmpeg_path=$(python3 /usr/local/ffmpeg/get_ffmpeg_path.py)
|
||||
export LIBAVFORMAT_VERSION_MAJOR=$($ffmpeg_path -version | grep -Po "libavformat\W+\K\d+")
|
||||
}
|
||||
|
||||
echo "[INFO] Preparing Frigate..."
|
||||
migrate_db_path
|
||||
set_libva_version
|
||||
export LIBAVFORMAT_VERSION_MAJOR=$(ffmpeg -version | grep -Po 'libavformat\W+\K\d+')
|
||||
|
||||
echo "[INFO] Starting Frigate..."
|
||||
|
||||
cd /opt/frigate || echo "[ERROR] Failed to change working directory to /opt/frigate"
|
||||
|
@@ -43,18 +43,10 @@ function get_ip_and_port_from_supervisor() {
|
||||
export FRIGATE_GO2RTC_WEBRTC_CANDIDATE_INTERNAL="${ip_address}:${webrtc_port}"
|
||||
}
|
||||
|
||||
function set_libva_version() {
|
||||
local ffmpeg_path=$(python3 /usr/local/ffmpeg/get_ffmpeg_path.py)
|
||||
export LIBAVFORMAT_VERSION_MAJOR=$($ffmpeg_path -version | grep -Po "libavformat\W+\K\d+")
|
||||
}
|
||||
|
||||
if [[ -f "/dev/shm/go2rtc.yaml" ]]; then
|
||||
echo "[INFO] Removing stale config from last run..."
|
||||
rm /dev/shm/go2rtc.yaml
|
||||
fi
|
||||
export LIBAVFORMAT_VERSION_MAJOR=$(ffmpeg -version | grep -Po 'libavformat\W+\K\d+')
|
||||
|
||||
if [[ ! -f "/dev/shm/go2rtc.yaml" ]]; then
|
||||
echo "[INFO] Preparing new go2rtc config..."
|
||||
echo "[INFO] Preparing go2rtc config..."
|
||||
|
||||
if [[ -n "${SUPERVISOR_TOKEN:-}" ]]; then
|
||||
# Running as a Home Assistant add-on, infer the IP address and port
|
||||
@@ -62,12 +54,8 @@ if [[ ! -f "/dev/shm/go2rtc.yaml" ]]; then
|
||||
fi
|
||||
|
||||
python3 /usr/local/go2rtc/create_config.py
|
||||
else
|
||||
echo "[WARNING] Unable to remove existing go2rtc config. Changes made to your frigate config file may not be recognized. Please remove the /dev/shm/go2rtc.yaml from your docker host manually."
|
||||
fi
|
||||
|
||||
set_libva_version
|
||||
|
||||
readonly config_path="/config"
|
||||
|
||||
if [[ -x "${config_path}/go2rtc" ]]; then
|
||||
|
@@ -4,7 +4,7 @@
|
||||
|
||||
set -o errexit -o nounset -o pipefail
|
||||
|
||||
dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx /dev/shm/logs/certsync)
|
||||
dirs=(/dev/shm/logs/frigate /dev/shm/logs/go2rtc /dev/shm/logs/nginx)
|
||||
|
||||
mkdir -p "${dirs[@]}"
|
||||
chown nobody:nogroup "${dirs[@]}"
|
||||
|
@@ -1,5 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# Wait for PID file to exist.
|
||||
while ! test -f /run/nginx.pid; do sleep 1; done
|
@@ -1 +0,0 @@
|
||||
3
|
@@ -8,84 +8,6 @@ set -o errexit -o nounset -o pipefail
|
||||
|
||||
echo "[INFO] Starting NGINX..."
|
||||
|
||||
# Taken from https://github.com/felipecrs/cgroup-scripts/commits/master/get_cpus.sh
|
||||
function get_cpus() {
|
||||
local quota=""
|
||||
local period=""
|
||||
|
||||
if [ -f /sys/fs/cgroup/cgroup.controllers ]; then
|
||||
if [ -f /sys/fs/cgroup/cpu.max ]; then
|
||||
read -r quota period </sys/fs/cgroup/cpu.max
|
||||
if [ "$quota" = "max" ]; then
|
||||
quota=""
|
||||
period=""
|
||||
fi
|
||||
else
|
||||
echo "[WARN] /sys/fs/cgroup/cpu.max not found. Falling back to /proc/cpuinfo." >&2
|
||||
fi
|
||||
else
|
||||
if [ -f /sys/fs/cgroup/cpu/cpu.cfs_quota_us ] && [ -f /sys/fs/cgroup/cpu/cpu.cfs_period_us ]; then
|
||||
quota=$(cat /sys/fs/cgroup/cpu/cpu.cfs_quota_us)
|
||||
period=$(cat /sys/fs/cgroup/cpu/cpu.cfs_period_us)
|
||||
|
||||
if [ "$quota" = "-1" ]; then
|
||||
quota=""
|
||||
period=""
|
||||
fi
|
||||
else
|
||||
echo "[WARN] /sys/fs/cgroup/cpu/cpu.cfs_quota_us or /sys/fs/cgroup/cpu/cpu.cfs_period_us not found. Falling back to /proc/cpuinfo." >&2
|
||||
fi
|
||||
fi
|
||||
|
||||
local cpus
|
||||
if [ "${period}" != "0" ] && [ -n "${quota}" ] && [ -n "${period}" ]; then
|
||||
cpus=$((quota / period))
|
||||
if [ "$cpus" -eq 0 ]; then
|
||||
cpus=1
|
||||
fi
|
||||
else
|
||||
cpus=$(grep -c ^processor /proc/cpuinfo)
|
||||
fi
|
||||
|
||||
printf '%s' "$cpus"
|
||||
}
|
||||
|
||||
function set_worker_processes() {
|
||||
# Capture number of assigned CPUs to calculate worker processes
|
||||
local cpus
|
||||
|
||||
cpus=$(get_cpus)
|
||||
if [[ "${cpus}" -gt 4 ]]; then
|
||||
cpus=4
|
||||
fi
|
||||
|
||||
# we need to catch any errors because sed will fail if user has bind mounted a custom nginx file
|
||||
sed -i "s/worker_processes auto;/worker_processes ${cpus};/" /usr/local/nginx/conf/nginx.conf || true
|
||||
}
|
||||
|
||||
set_worker_processes
|
||||
|
||||
# ensure the directory for ACME challenges exists
|
||||
mkdir -p /etc/letsencrypt/www
|
||||
|
||||
# Create self signed certs if needed
|
||||
letsencrypt_path=/etc/letsencrypt/live/frigate
|
||||
mkdir -p $letsencrypt_path
|
||||
|
||||
if [ ! \( -f "$letsencrypt_path/privkey.pem" -a -f "$letsencrypt_path/fullchain.pem" \) ]; then
|
||||
echo "[INFO] No TLS certificate found. Generating a self signed certificate..."
|
||||
openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 \
|
||||
-subj "/O=FRIGATE DEFAULT CERT/CN=*" \
|
||||
-keyout "$letsencrypt_path/privkey.pem" -out "$letsencrypt_path/fullchain.pem" 2>/dev/null
|
||||
fi
|
||||
|
||||
# build templates for optional TLS support
|
||||
python3 /usr/local/nginx/get_tls_settings.py | \
|
||||
tempio -template /usr/local/nginx/templates/listen.gotmpl \
|
||||
-out /usr/local/nginx/conf/listen.conf
|
||||
|
||||
# Replace the bash process with the NGINX process, redirecting stderr to stdout
|
||||
exec 2>&1
|
||||
exec \
|
||||
s6-notifyoncheck -t 30000 -n 1 \
|
||||
nginx
|
||||
exec nginx
|
||||
|
@@ -1,80 +0,0 @@
|
||||
0 person
|
||||
1 bicycle
|
||||
2 car
|
||||
3 motorcycle
|
||||
4 airplane
|
||||
5 car
|
||||
6 train
|
||||
7 car
|
||||
8 boat
|
||||
9 traffic light
|
||||
10 fire hydrant
|
||||
11 stop sign
|
||||
12 parking meter
|
||||
13 bench
|
||||
14 bird
|
||||
15 cat
|
||||
16 dog
|
||||
17 horse
|
||||
18 sheep
|
||||
19 cow
|
||||
20 elephant
|
||||
21 bear
|
||||
22 zebra
|
||||
23 giraffe
|
||||
24 backpack
|
||||
25 umbrella
|
||||
26 handbag
|
||||
27 tie
|
||||
28 suitcase
|
||||
29 frisbee
|
||||
30 skis
|
||||
31 snowboard
|
||||
32 sports ball
|
||||
33 kite
|
||||
34 baseball bat
|
||||
35 baseball glove
|
||||
36 skateboard
|
||||
37 surfboard
|
||||
38 tennis racket
|
||||
39 bottle
|
||||
40 wine glass
|
||||
41 cup
|
||||
42 fork
|
||||
43 knife
|
||||
44 spoon
|
||||
45 bowl
|
||||
46 banana
|
||||
47 apple
|
||||
48 sandwich
|
||||
49 orange
|
||||
50 broccoli
|
||||
51 carrot
|
||||
52 hot dog
|
||||
53 pizza
|
||||
54 donut
|
||||
55 cake
|
||||
56 chair
|
||||
57 couch
|
||||
58 potted plant
|
||||
59 bed
|
||||
60 dining table
|
||||
61 toilet
|
||||
62 tv
|
||||
63 laptop
|
||||
64 mouse
|
||||
65 remote
|
||||
66 keyboard
|
||||
67 cell phone
|
||||
68 microwave
|
||||
69 oven
|
||||
70 toaster
|
||||
71 sink
|
||||
72 refrigerator
|
||||
73 book
|
||||
74 clock
|
||||
75 vase
|
||||
76 scissors
|
||||
77 teddy bear
|
||||
78 hair drier
|
||||
79 toothbrush
|
@@ -1,91 +0,0 @@
|
||||
0 person
|
||||
1 bicycle
|
||||
2 car
|
||||
3 motorcycle
|
||||
4 airplane
|
||||
5 bus
|
||||
6 train
|
||||
7 car
|
||||
8 boat
|
||||
9 traffic light
|
||||
10 fire hydrant
|
||||
11 street sign
|
||||
12 stop sign
|
||||
13 parking meter
|
||||
14 bench
|
||||
15 bird
|
||||
16 cat
|
||||
17 dog
|
||||
18 horse
|
||||
19 sheep
|
||||
20 cow
|
||||
21 elephant
|
||||
22 bear
|
||||
23 zebra
|
||||
24 giraffe
|
||||
25 hat
|
||||
26 backpack
|
||||
27 umbrella
|
||||
28 shoe
|
||||
29 eye glasses
|
||||
30 handbag
|
||||
31 tie
|
||||
32 suitcase
|
||||
33 frisbee
|
||||
34 skis
|
||||
35 snowboard
|
||||
36 sports ball
|
||||
37 kite
|
||||
38 baseball bat
|
||||
39 baseball glove
|
||||
40 skateboard
|
||||
41 surfboard
|
||||
42 tennis racket
|
||||
43 bottle
|
||||
44 plate
|
||||
45 wine glass
|
||||
46 cup
|
||||
47 fork
|
||||
48 knife
|
||||
49 spoon
|
||||
50 bowl
|
||||
51 banana
|
||||
52 apple
|
||||
53 sandwich
|
||||
54 orange
|
||||
55 broccoli
|
||||
56 carrot
|
||||
57 hot dog
|
||||
58 pizza
|
||||
59 donut
|
||||
60 cake
|
||||
61 chair
|
||||
62 couch
|
||||
63 potted plant
|
||||
64 bed
|
||||
65 mirror
|
||||
66 dining table
|
||||
67 window
|
||||
68 desk
|
||||
69 toilet
|
||||
70 door
|
||||
71 tv
|
||||
72 laptop
|
||||
73 mouse
|
||||
74 remote
|
||||
75 keyboard
|
||||
76 cell phone
|
||||
77 microwave
|
||||
78 oven
|
||||
79 toaster
|
||||
80 sink
|
||||
81 refrigerator
|
||||
82 blender
|
||||
83 book
|
||||
84 clock
|
||||
85 vase
|
||||
86 scissors
|
||||
87 teddy bear
|
||||
88 hair drier
|
||||
89 toothbrush
|
||||
90 hair brush
|
@@ -1,45 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
sys.path.insert(0, "/opt/frigate")
|
||||
from frigate.const import (
|
||||
DEFAULT_FFMPEG_VERSION,
|
||||
INCLUDED_FFMPEG_VERSIONS,
|
||||
)
|
||||
|
||||
sys.path.remove("/opt/frigate")
|
||||
|
||||
yaml = YAML()
|
||||
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
# Check if we can use .yaml instead of .yml
|
||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||
if os.path.isfile(config_file_yaml):
|
||||
config_file = config_file_yaml
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
raw_config = f.read()
|
||||
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config: dict[str, any] = yaml.load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
except FileNotFoundError:
|
||||
config: dict[str, any] = {}
|
||||
|
||||
path = config.get("ffmpeg", {}).get("path", "default")
|
||||
if path == "default":
|
||||
if shutil.which("ffmpeg") is None:
|
||||
print(f"/usr/lib/ffmpeg/{DEFAULT_FFMPEG_VERSION}/bin/ffmpeg")
|
||||
else:
|
||||
print("ffmpeg")
|
||||
elif path in INCLUDED_FFMPEG_VERSIONS:
|
||||
print(f"/usr/lib/ffmpeg/{path}/bin/ffmpeg")
|
||||
else:
|
||||
print(f"{path}/bin/ffmpeg")
|
@@ -2,33 +2,20 @@
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from ruamel.yaml import YAML
|
||||
import yaml
|
||||
|
||||
sys.path.insert(0, "/opt/frigate")
|
||||
from frigate.const import (
|
||||
BIRDSEYE_PIPE,
|
||||
DEFAULT_FFMPEG_VERSION,
|
||||
INCLUDED_FFMPEG_VERSIONS,
|
||||
from frigate.const import BIRDSEYE_PIPE # noqa: E402
|
||||
from frigate.ffmpeg_presets import ( # noqa: E402
|
||||
parse_preset_hardware_acceleration_encode,
|
||||
)
|
||||
from frigate.ffmpeg_presets import parse_preset_hardware_acceleration_encode
|
||||
|
||||
sys.path.remove("/opt/frigate")
|
||||
|
||||
yaml = YAML()
|
||||
|
||||
FRIGATE_ENV_VARS = {k: v for k, v in os.environ.items() if k.startswith("FRIGATE_")}
|
||||
# read docker secret files as env vars too
|
||||
if os.path.isdir("/run/secrets"):
|
||||
for secret_file in os.listdir("/run/secrets"):
|
||||
if secret_file.startswith("FRIGATE_"):
|
||||
FRIGATE_ENV_VARS[secret_file] = (
|
||||
Path(os.path.join("/run/secrets", secret_file)).read_text().strip()
|
||||
)
|
||||
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
# Check if we can use .yaml instead of .yml
|
||||
@@ -36,16 +23,13 @@ config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||
if os.path.isfile(config_file_yaml):
|
||||
config_file = config_file_yaml
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
raw_config = f.read()
|
||||
with open(config_file) as f:
|
||||
raw_config = f.read()
|
||||
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config: dict[str, any] = yaml.load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
except FileNotFoundError:
|
||||
config: dict[str, any] = {}
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config: dict[str, any] = yaml.safe_load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
|
||||
go2rtc_config: dict[str, any] = config.get("go2rtc", {})
|
||||
|
||||
@@ -65,15 +49,7 @@ if go2rtc_config.get("log") is None:
|
||||
elif go2rtc_config["log"].get("format") is None:
|
||||
go2rtc_config["log"]["format"] = "text"
|
||||
|
||||
# ensure there is a default webrtc config
|
||||
if not go2rtc_config.get("webrtc"):
|
||||
go2rtc_config["webrtc"] = {}
|
||||
|
||||
# go2rtc should listen on 8555 tcp & udp by default
|
||||
if not go2rtc_config["webrtc"].get("listen"):
|
||||
go2rtc_config["webrtc"]["listen"] = ":8555"
|
||||
|
||||
if not go2rtc_config["webrtc"].get("candidates", []):
|
||||
if not go2rtc_config.get("webrtc", {}).get("candidates", []):
|
||||
default_candidates = []
|
||||
# use internal candidate if it was discovered when running through the add-on
|
||||
internal_candidate = os.environ.get(
|
||||
@@ -109,32 +85,16 @@ else:
|
||||
**FRIGATE_ENV_VARS
|
||||
)
|
||||
|
||||
# ensure ffmpeg path is set correctly
|
||||
path = config.get("ffmpeg", {}).get("path", "default")
|
||||
if path == "default":
|
||||
if shutil.which("ffmpeg") is None:
|
||||
ffmpeg_path = f"/usr/lib/ffmpeg/{DEFAULT_FFMPEG_VERSION}/bin/ffmpeg"
|
||||
else:
|
||||
ffmpeg_path = "ffmpeg"
|
||||
elif path in INCLUDED_FFMPEG_VERSIONS:
|
||||
ffmpeg_path = f"/usr/lib/ffmpeg/{path}/bin/ffmpeg"
|
||||
else:
|
||||
ffmpeg_path = f"{path}/bin/ffmpeg"
|
||||
|
||||
if go2rtc_config.get("ffmpeg") is None:
|
||||
go2rtc_config["ffmpeg"] = {"bin": ffmpeg_path}
|
||||
elif go2rtc_config["ffmpeg"].get("bin") is None:
|
||||
go2rtc_config["ffmpeg"]["bin"] = ffmpeg_path
|
||||
|
||||
# need to replace ffmpeg command when using ffmpeg4
|
||||
if int(os.environ.get("LIBAVFORMAT_VERSION_MAJOR", "59") or "59") < 59:
|
||||
if go2rtc_config["ffmpeg"].get("rtsp") is None:
|
||||
go2rtc_config["ffmpeg"]["rtsp"] = (
|
||||
"-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
||||
)
|
||||
else:
|
||||
if int(os.environ["LIBAVFORMAT_VERSION_MAJOR"]) < 59:
|
||||
if go2rtc_config.get("ffmpeg") is None:
|
||||
go2rtc_config["ffmpeg"] = {"path": ""}
|
||||
go2rtc_config["ffmpeg"] = {
|
||||
"rtsp": "-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
||||
}
|
||||
elif go2rtc_config["ffmpeg"].get("rtsp") is None:
|
||||
go2rtc_config["ffmpeg"][
|
||||
"rtsp"
|
||||
] = "-fflags nobuffer -flags low_delay -stimeout 5000000 -user_agent go2rtc/ffmpeg -rtsp_transport tcp -i {input}"
|
||||
|
||||
for name in go2rtc_config.get("streams", {}):
|
||||
stream = go2rtc_config["streams"][name]
|
||||
@@ -165,7 +125,7 @@ if config.get("birdseye", {}).get("restream", False):
|
||||
birdseye: dict[str, any] = config.get("birdseye")
|
||||
|
||||
input = f"-f rawvideo -pix_fmt yuv420p -video_size {birdseye.get('width', 1280)}x{birdseye.get('height', 720)} -r 10 -i {BIRDSEYE_PIPE}"
|
||||
ffmpeg_cmd = f"exec:{parse_preset_hardware_acceleration_encode(ffmpeg_path, config.get('ffmpeg', {}).get('hwaccel_args', ''), input, '-rtsp_transport tcp -f rtsp {output}')}"
|
||||
ffmpeg_cmd = f"exec:{parse_preset_hardware_acceleration_encode(config.get('ffmpeg', {}).get('hwaccel_args'), input, '-rtsp_transport tcp -f rtsp {output}')}"
|
||||
|
||||
if go2rtc_config.get("streams"):
|
||||
go2rtc_config["streams"]["birdseye"] = ffmpeg_cmd
|
||||
|
@@ -1,43 +0,0 @@
|
||||
set $upstream_auth http://127.0.0.1:5001/auth;
|
||||
|
||||
## Virtual endpoint created by nginx to forward auth requests.
|
||||
location /auth {
|
||||
## Essential Proxy Configuration
|
||||
internal;
|
||||
proxy_pass $upstream_auth;
|
||||
|
||||
## Headers
|
||||
|
||||
# First strip out all the request headers
|
||||
# Note: This is important to ensure that upgrade requests for secure
|
||||
# websockets dont cause the backend to fail
|
||||
proxy_pass_request_headers off;
|
||||
# Pass info about the request
|
||||
proxy_set_header X-Original-Method $request_method;
|
||||
proxy_set_header X-Original-URL $scheme://$http_host$request_uri;
|
||||
proxy_set_header X-Server-Port $server_port;
|
||||
proxy_set_header Content-Length "";
|
||||
# Pass along auth related info
|
||||
proxy_set_header Authorization $http_authorization;
|
||||
proxy_set_header Cookie $http_cookie;
|
||||
proxy_set_header X-CSRF-TOKEN "1";
|
||||
|
||||
# include headers from common auth proxies
|
||||
include proxy_trusted_headers.conf;
|
||||
|
||||
## Basic Proxy Configuration
|
||||
proxy_pass_request_body off;
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503; # Timeout if the real server is dead
|
||||
proxy_redirect http:// $scheme://;
|
||||
proxy_http_version 1.1;
|
||||
proxy_cache_bypass $cookie_session;
|
||||
proxy_no_cache $cookie_session;
|
||||
proxy_buffers 4 32k;
|
||||
client_body_buffer_size 128k;
|
||||
|
||||
## Advanced Proxy Configuration
|
||||
send_timeout 5m;
|
||||
proxy_read_timeout 240;
|
||||
proxy_send_timeout 240;
|
||||
proxy_connect_timeout 240;
|
||||
}
|
@@ -1,22 +0,0 @@
|
||||
## Send a subrequest to verify if the user is authenticated and has permission to access the resource.
|
||||
auth_request /auth;
|
||||
|
||||
## Save the upstream metadata response headers from Authelia to variables.
|
||||
auth_request_set $user $upstream_http_remote_user;
|
||||
auth_request_set $groups $upstream_http_remote_groups;
|
||||
auth_request_set $name $upstream_http_remote_name;
|
||||
auth_request_set $email $upstream_http_remote_email;
|
||||
|
||||
## Inject the metadata response headers from the variables into the request made to the backend.
|
||||
proxy_set_header Remote-User $user;
|
||||
proxy_set_header Remote-Groups $groups;
|
||||
proxy_set_header Remote-Email $email;
|
||||
proxy_set_header Remote-Name $name;
|
||||
|
||||
## Refresh the cookie as needed
|
||||
auth_request_set $auth_cookie $upstream_http_set_cookie;
|
||||
add_header Set-Cookie $auth_cookie;
|
||||
|
||||
## Pass the location header back up if it exists
|
||||
auth_request_set $redirection_url $upstream_http_location;
|
||||
add_header Location $redirection_url;
|
@@ -1,4 +0,0 @@
|
||||
upstream go2rtc {
|
||||
server 127.0.0.1:1984;
|
||||
keepalive 1024;
|
||||
}
|
@@ -10,8 +10,6 @@ events {
|
||||
}
|
||||
|
||||
http {
|
||||
map_hash_bucket_size 256;
|
||||
|
||||
include mime.types;
|
||||
default_type application/octet-stream;
|
||||
|
||||
@@ -34,13 +32,6 @@ http {
|
||||
gzip_proxied no-cache no-store private expired auth;
|
||||
gzip_vary on;
|
||||
|
||||
proxy_cache_path /dev/shm/nginx_cache levels=1:2 keys_zone=api_cache:10m max_size=10m inactive=1m use_temp_path=off;
|
||||
|
||||
map $sent_http_content_type $should_not_cache {
|
||||
'application/json' 0;
|
||||
default 1;
|
||||
}
|
||||
|
||||
upstream frigate_api {
|
||||
server 127.0.0.1:5001;
|
||||
keepalive 1024;
|
||||
@@ -56,10 +47,13 @@ http {
|
||||
keepalive 1024;
|
||||
}
|
||||
|
||||
include go2rtc_upstream.conf;
|
||||
upstream go2rtc {
|
||||
server 127.0.0.1:1984;
|
||||
keepalive 1024;
|
||||
}
|
||||
|
||||
server {
|
||||
include listen.conf;
|
||||
listen 5000;
|
||||
|
||||
# vod settings
|
||||
vod_base_url '';
|
||||
@@ -92,10 +86,7 @@ http {
|
||||
gzip on;
|
||||
gzip_types application/vnd.apple.mpegurl;
|
||||
|
||||
include auth_location.conf;
|
||||
|
||||
location /vod/ {
|
||||
include auth_request.conf;
|
||||
aio threads;
|
||||
vod hls;
|
||||
|
||||
@@ -104,12 +95,9 @@ http {
|
||||
|
||||
add_header Cache-Control "no-store";
|
||||
expires off;
|
||||
|
||||
keepalive_disable safari;
|
||||
}
|
||||
|
||||
location /stream/ {
|
||||
include auth_request.conf;
|
||||
add_header Cache-Control "no-store";
|
||||
expires off;
|
||||
|
||||
@@ -124,14 +112,12 @@ http {
|
||||
}
|
||||
|
||||
location /clips/ {
|
||||
include auth_request.conf;
|
||||
|
||||
types {
|
||||
video/mp4 mp4;
|
||||
image/jpeg jpg;
|
||||
}
|
||||
|
||||
expires 7d;
|
||||
add_header Cache-Control "public";
|
||||
autoindex on;
|
||||
root /media/frigate;
|
||||
}
|
||||
@@ -142,7 +128,6 @@ http {
|
||||
}
|
||||
|
||||
location /recordings/ {
|
||||
include auth_request.conf;
|
||||
types {
|
||||
video/mp4 mp4;
|
||||
}
|
||||
@@ -153,7 +138,6 @@ http {
|
||||
}
|
||||
|
||||
location /exports/ {
|
||||
include auth_request.conf;
|
||||
types {
|
||||
video/mp4 mp4;
|
||||
}
|
||||
@@ -164,121 +148,59 @@ http {
|
||||
}
|
||||
|
||||
location /ws {
|
||||
include auth_request.conf;
|
||||
proxy_pass http://mqtt_ws/;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
location /live/jsmpeg/ {
|
||||
include auth_request.conf;
|
||||
proxy_pass http://jsmpeg/;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
# frigate lovelace card uses this path
|
||||
location /live/mse/api/ws {
|
||||
include auth_request.conf;
|
||||
limit_except GET {
|
||||
deny all;
|
||||
}
|
||||
proxy_pass http://go2rtc/api/ws;
|
||||
location /live/mse/ {
|
||||
proxy_pass http://go2rtc/;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
location /live/webrtc/api/ws {
|
||||
include auth_request.conf;
|
||||
limit_except GET {
|
||||
deny all;
|
||||
}
|
||||
proxy_pass http://go2rtc/api/ws;
|
||||
location /live/webrtc/ {
|
||||
proxy_pass http://go2rtc/;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
# pass through go2rtc player
|
||||
location /live/webrtc/webrtc.html {
|
||||
include auth_request.conf;
|
||||
limit_except GET {
|
||||
deny all;
|
||||
}
|
||||
proxy_pass http://go2rtc/webrtc.html;
|
||||
location ~* /api/go2rtc([/]?.*)$ {
|
||||
proxy_pass http://go2rtc;
|
||||
rewrite ^/api/go2rtc(.*)$ /api$1 break;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
# frontend uses this to fetch the version
|
||||
location /api/go2rtc/api {
|
||||
include auth_request.conf;
|
||||
limit_except GET {
|
||||
deny all;
|
||||
}
|
||||
proxy_pass http://go2rtc/api;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
# integration uses this to add webrtc candidate
|
||||
location /api/go2rtc/webrtc {
|
||||
include auth_request.conf;
|
||||
limit_except POST {
|
||||
deny all;
|
||||
}
|
||||
proxy_pass http://go2rtc/api/webrtc;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
location ~* /api/.*\.(jpg|jpeg|png|webp|gif)$ {
|
||||
include auth_request.conf;
|
||||
rewrite ^/api/(.*)$ /$1 break;
|
||||
location ~* /api/.*\.(jpg|jpeg|png)$ {
|
||||
rewrite ^/api/(.*)$ $1 break;
|
||||
proxy_pass http://frigate_api;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
include auth_request.conf;
|
||||
add_header Cache-Control "no-store";
|
||||
expires off;
|
||||
proxy_pass http://frigate_api/;
|
||||
include proxy.conf;
|
||||
|
||||
proxy_cache api_cache;
|
||||
proxy_cache_lock on;
|
||||
proxy_cache_use_stale updating;
|
||||
proxy_cache_valid 200 5s;
|
||||
proxy_cache_bypass $http_x_cache_bypass;
|
||||
proxy_no_cache $should_not_cache;
|
||||
add_header X-Cache-Status $upstream_cache_status;
|
||||
|
||||
location /api/vod/ {
|
||||
include auth_request.conf;
|
||||
proxy_pass http://frigate_api/vod/;
|
||||
include proxy.conf;
|
||||
proxy_cache off;
|
||||
}
|
||||
|
||||
location /api/login {
|
||||
auth_request off;
|
||||
rewrite ^/api(/.*)$ $1 break;
|
||||
proxy_pass http://frigate_api;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
location /api/stats {
|
||||
include auth_request.conf;
|
||||
access_log off;
|
||||
rewrite ^/api(/.*)$ $1 break;
|
||||
rewrite ^/api/(.*)$ $1 break;
|
||||
proxy_pass http://frigate_api;
|
||||
include proxy.conf;
|
||||
}
|
||||
|
||||
location /api/version {
|
||||
include auth_request.conf;
|
||||
access_log off;
|
||||
rewrite ^/api(/.*)$ $1 break;
|
||||
rewrite ^/api/(.*)$ $1 break;
|
||||
proxy_pass http://frigate_api;
|
||||
include proxy.conf;
|
||||
}
|
||||
}
|
||||
|
||||
location / {
|
||||
# do not require auth for static assets
|
||||
add_header Cache-Control "no-store";
|
||||
expires off;
|
||||
|
||||
@@ -300,7 +222,22 @@ http {
|
||||
sub_filter_once off;
|
||||
|
||||
root /opt/frigate/web;
|
||||
try_files $uri $uri.html $uri/ /index.html;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rtmp {
|
||||
server {
|
||||
listen 1935;
|
||||
chunk_size 4096;
|
||||
allow publish 127.0.0.1;
|
||||
deny publish all;
|
||||
allow play all;
|
||||
application live {
|
||||
live on;
|
||||
record off;
|
||||
meta copy;
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,26 +1,4 @@
|
||||
## Headers
|
||||
proxy_set_header Host $host;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "Upgrade";
|
||||
proxy_set_header X-Original-URL $scheme://$http_host$request_uri;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $http_host;
|
||||
proxy_set_header X-Forwarded-URI $request_uri;
|
||||
proxy_set_header X-Forwarded-Ssl on;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
|
||||
## Basic Proxy Configuration
|
||||
client_body_buffer_size 128k;
|
||||
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503; ## Timeout if the real server is dead.
|
||||
proxy_redirect http:// $scheme://;
|
||||
proxy_http_version 1.1;
|
||||
proxy_cache_bypass $cookie_session;
|
||||
proxy_no_cache $cookie_session;
|
||||
proxy_buffers 64 256k;
|
||||
|
||||
## Advanced Proxy Configuration
|
||||
send_timeout 5m;
|
||||
proxy_read_timeout 360;
|
||||
proxy_send_timeout 360;
|
||||
proxy_connect_timeout 360;
|
||||
proxy_set_header Host $host;
|
@@ -1,25 +0,0 @@
|
||||
# Header used to validate reverse proxy trust
|
||||
proxy_set_header X-Proxy-Secret $http_x_proxy_secret;
|
||||
|
||||
# these headers will be copied to the /auth request and are available
|
||||
# to be mapped in the config to Frigate's remote-user header
|
||||
|
||||
# List of headers sent by common authentication proxies:
|
||||
# - Authelia
|
||||
# - Traefik forward auth
|
||||
# - oauth2_proxy
|
||||
# - Authentik
|
||||
|
||||
proxy_set_header Remote-User $http_remote_user;
|
||||
proxy_set_header Remote-Groups $http_remote_groups;
|
||||
proxy_set_header Remote-Email $http_remote_email;
|
||||
proxy_set_header Remote-Name $http_remote_name;
|
||||
proxy_set_header X-Forwarded-User $http_x_forwarded_user;
|
||||
proxy_set_header X-Forwarded-Groups $http_x_forwarded_groups;
|
||||
proxy_set_header X-Forwarded-Email $http_x_forwarded_email;
|
||||
proxy_set_header X-Forwarded-Preferred-Username $http_x_forwarded_preferred_username;
|
||||
proxy_set_header X-authentik-username $http_x_authentik_username;
|
||||
proxy_set_header X-authentik-groups $http_x_authentik_groups;
|
||||
proxy_set_header X-authentik-email $http_x_authentik_email;
|
||||
proxy_set_header X-authentik-name $http_x_authentik_name;
|
||||
proxy_set_header X-authentik-uid $http_x_authentik_uid;
|
@@ -1,30 +0,0 @@
|
||||
"""Prints the tls config as json to stdout."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
yaml = YAML()
|
||||
|
||||
config_file = os.environ.get("CONFIG_FILE", "/config/config.yml")
|
||||
|
||||
# Check if we can use .yaml instead of .yml
|
||||
config_file_yaml = config_file.replace(".yml", ".yaml")
|
||||
if os.path.isfile(config_file_yaml):
|
||||
config_file = config_file_yaml
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
raw_config = f.read()
|
||||
|
||||
if config_file.endswith((".yaml", ".yml")):
|
||||
config: dict[str, any] = yaml.load(raw_config)
|
||||
elif config_file.endswith(".json"):
|
||||
config: dict[str, any] = json.loads(raw_config)
|
||||
except FileNotFoundError:
|
||||
config: dict[str, any] = {}
|
||||
|
||||
tls_config: dict[str, any] = config.get("tls", {"enabled": True})
|
||||
|
||||
print(json.dumps(tls_config))
|
@@ -1,33 +0,0 @@
|
||||
# intended for internal traffic, not protected by auth
|
||||
listen 5000;
|
||||
|
||||
{{ if not .enabled }}
|
||||
# intended for external traffic, protected by auth
|
||||
listen 8971;
|
||||
{{ else }}
|
||||
# intended for external traffic, protected by auth
|
||||
listen 8971 ssl;
|
||||
|
||||
ssl_certificate /etc/letsencrypt/live/frigate/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/frigate/privkey.pem;
|
||||
|
||||
# generated 2024-06-01, Mozilla Guideline v5.7, nginx 1.25.3, OpenSSL 1.1.1w, modern configuration, no OCSP
|
||||
# https://ssl-config.mozilla.org/#server=nginx&version=1.25.3&config=modern&openssl=1.1.1w&ocsp=false&guideline=5.7
|
||||
ssl_session_timeout 1d;
|
||||
ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
|
||||
ssl_session_tickets off;
|
||||
|
||||
# modern configuration
|
||||
ssl_protocols TLSv1.3;
|
||||
ssl_prefer_server_ciphers off;
|
||||
|
||||
# HSTS (ngx_http_headers_module is required) (63072000 seconds)
|
||||
add_header Strict-Transport-Security "max-age=63072000" always;
|
||||
|
||||
# ACME challenge location
|
||||
location /.well-known/acme-challenge/ {
|
||||
default_type "text/plain";
|
||||
root /etc/letsencrypt/www;
|
||||
}
|
||||
{{ end }}
|
||||
|
@@ -1,27 +0,0 @@
|
||||
# syntax=docker/dockerfile:1.6
|
||||
|
||||
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
FROM wheels as rk-wheels
|
||||
COPY docker/main/requirements-wheels.txt /requirements-wheels.txt
|
||||
COPY docker/rockchip/requirements-wheels-rk.txt /requirements-wheels-rk.txt
|
||||
RUN sed -i "/https:\/\//d" /requirements-wheels.txt
|
||||
RUN pip3 wheel --wheel-dir=/rk-wheels -c /requirements-wheels.txt -r /requirements-wheels-rk.txt
|
||||
|
||||
FROM deps AS rk-frigate
|
||||
ARG TARGETARCH
|
||||
|
||||
RUN --mount=type=bind,from=rk-wheels,source=/rk-wheels,target=/deps/rk-wheels \
|
||||
pip3 install -U /deps/rk-wheels/*.whl
|
||||
|
||||
WORKDIR /opt/frigate/
|
||||
COPY --from=rootfs / /
|
||||
|
||||
ADD https://github.com/MarcA711/rknn-toolkit2/releases/download/v2.0.0/librknnrt.so /usr/lib/
|
||||
|
||||
RUN rm -rf /usr/lib/btbn-ffmpeg/bin/ffmpeg
|
||||
RUN rm -rf /usr/lib/btbn-ffmpeg/bin/ffprobe
|
||||
ADD --chmod=111 https://github.com/MarcA711/Rockchip-FFmpeg-Builds/releases/download/6.1-7/ffmpeg /usr/lib/ffmpeg/6.0/bin/
|
||||
ADD --chmod=111 https://github.com/MarcA711/Rockchip-FFmpeg-Builds/releases/download/6.1-7/ffprobe /usr/lib/ffmpeg/6.0/bin/
|
||||
ENV PATH="/usr/lib/ffmpeg/6.0/bin/:${PATH}"
|
@@ -1 +0,0 @@
|
||||
rknn-toolkit-lite2 @ https://github.com/MarcA711/rknn-toolkit2/releases/download/v2.0.0/rknn_toolkit_lite2-2.0.0b0-cp39-cp39-linux_aarch64.whl
|
@@ -1,27 +0,0 @@
|
||||
target wheels {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64"]
|
||||
target = "wheels"
|
||||
}
|
||||
|
||||
target deps {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64"]
|
||||
target = "deps"
|
||||
}
|
||||
|
||||
target rootfs {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/arm64"]
|
||||
target = "rootfs"
|
||||
}
|
||||
|
||||
target rk {
|
||||
dockerfile = "docker/rockchip/Dockerfile"
|
||||
contexts = {
|
||||
wheels = "target:wheels",
|
||||
deps = "target:deps",
|
||||
rootfs = "target:rootfs"
|
||||
}
|
||||
platforms = ["linux/arm64"]
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
BOARDS += rk
|
||||
|
||||
local-rk: version
|
||||
docker buildx bake --file=docker/rockchip/rk.hcl rk \
|
||||
--set rk.tags=frigate:latest-rk \
|
||||
--load
|
||||
|
||||
build-rk: version
|
||||
docker buildx bake --file=docker/rockchip/rk.hcl rk \
|
||||
--set rk.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rk
|
||||
|
||||
push-rk: build-rk
|
||||
docker buildx bake --file=docker/rockchip/rk.hcl rk \
|
||||
--set rk.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rk \
|
||||
--push
|
@@ -1,108 +0,0 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
|
||||
# https://askubuntu.com/questions/972516/debian-frontend-environment-variable
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG ROCM=5.7.3
|
||||
ARG AMDGPU=gfx900
|
||||
ARG HSA_OVERRIDE_GFX_VERSION
|
||||
ARG HSA_OVERRIDE
|
||||
|
||||
#######################################################################
|
||||
FROM ubuntu:focal as rocm
|
||||
|
||||
ARG ROCM
|
||||
|
||||
RUN apt-get update && apt-get -y upgrade
|
||||
RUN apt-get -y install gnupg wget
|
||||
|
||||
RUN mkdir --parents --mode=0755 /etc/apt/keyrings
|
||||
|
||||
RUN wget https://repo.radeon.com/rocm/rocm.gpg.key -O - | gpg --dearmor | tee /etc/apt/keyrings/rocm.gpg > /dev/null
|
||||
COPY docker/rocm/rocm.list /etc/apt/sources.list.d/
|
||||
COPY docker/rocm/rocm-pin-600 /etc/apt/preferences.d/
|
||||
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get -y install --no-install-recommends migraphx hipfft roctracer
|
||||
RUN apt-get -y install --no-install-recommends migraphx-dev
|
||||
|
||||
RUN mkdir -p /opt/rocm-dist/opt/rocm-$ROCM/lib
|
||||
RUN cd /opt/rocm-$ROCM/lib && cp -dpr libMIOpen*.so* libamd*.so* libhip*.so* libhsa*.so* libmigraphx*.so* librocm*.so* librocblas*.so* libroctracer*.so* librocfft*.so* /opt/rocm-dist/opt/rocm-$ROCM/lib/
|
||||
RUN cd /opt/rocm-dist/opt/ && ln -s rocm-$ROCM rocm
|
||||
|
||||
RUN mkdir -p /opt/rocm-dist/etc/ld.so.conf.d/
|
||||
RUN echo /opt/rocm/lib|tee /opt/rocm-dist/etc/ld.so.conf.d/rocm.conf
|
||||
|
||||
#######################################################################
|
||||
FROM --platform=linux/amd64 debian:11 as debian-base
|
||||
|
||||
RUN apt-get update && apt-get -y upgrade
|
||||
RUN apt-get -y install --no-install-recommends libelf1 libdrm2 libdrm-amdgpu1 libnuma1 kmod
|
||||
|
||||
RUN apt-get -y install python3
|
||||
|
||||
#######################################################################
|
||||
# ROCm does not come with migraphx wrappers for python 3.9, so we build it here
|
||||
FROM debian-base as debian-build
|
||||
|
||||
ARG ROCM
|
||||
|
||||
COPY --from=rocm /opt/rocm-$ROCM /opt/rocm-$ROCM
|
||||
RUN ln -s /opt/rocm-$ROCM /opt/rocm
|
||||
|
||||
RUN apt-get -y install g++ cmake
|
||||
RUN apt-get -y install python3-pybind11 python3.9-distutils python3-dev
|
||||
|
||||
WORKDIR /opt/build
|
||||
|
||||
COPY docker/rocm/migraphx .
|
||||
|
||||
RUN mkdir build && cd build && cmake .. && make install
|
||||
|
||||
#######################################################################
|
||||
FROM deps AS deps-prelim
|
||||
|
||||
# need this to install libnuma1
|
||||
RUN apt-get update
|
||||
# no ugprade?!?!
|
||||
RUN apt-get -y install libnuma1
|
||||
|
||||
WORKDIR /opt/frigate/
|
||||
COPY --from=rootfs / /
|
||||
|
||||
COPY docker/rocm/requirements-wheels-rocm.txt /requirements.txt
|
||||
RUN python3 -m pip install --upgrade pip \
|
||||
&& pip3 uninstall -y onnxruntime-openvino \
|
||||
&& pip3 install -r /requirements.txt
|
||||
|
||||
#######################################################################
|
||||
FROM scratch AS rocm-dist
|
||||
|
||||
ARG ROCM
|
||||
ARG AMDGPU
|
||||
|
||||
COPY --from=rocm /opt/rocm-$ROCM/bin/rocminfo /opt/rocm-$ROCM/bin/migraphx-driver /opt/rocm-$ROCM/bin/
|
||||
COPY --from=rocm /opt/rocm-$ROCM/share/miopen/db/*$AMDGPU* /opt/rocm-$ROCM/share/miopen/db/
|
||||
COPY --from=rocm /opt/rocm-$ROCM/share/miopen/db/*gfx908* /opt/rocm-$ROCM/share/miopen/db/
|
||||
COPY --from=rocm /opt/rocm-$ROCM/lib/rocblas/library/*$AMDGPU* /opt/rocm-$ROCM/lib/rocblas/library/
|
||||
COPY --from=rocm /opt/rocm-dist/ /
|
||||
COPY --from=debian-build /opt/rocm/lib/migraphx.cpython-39-x86_64-linux-gnu.so /opt/rocm-$ROCM/lib/
|
||||
|
||||
#######################################################################
|
||||
FROM deps-prelim AS rocm-prelim-hsa-override0
|
||||
|
||||
ENV HSA_ENABLE_SDMA=0
|
||||
|
||||
COPY --from=rocm-dist / /
|
||||
|
||||
RUN ldconfig
|
||||
|
||||
#######################################################################
|
||||
FROM rocm-prelim-hsa-override0 as rocm-prelim-hsa-override1
|
||||
|
||||
ARG HSA_OVERRIDE_GFX_VERSION
|
||||
ENV HSA_OVERRIDE_GFX_VERSION=$HSA_OVERRIDE_GFX_VERSION
|
||||
|
||||
#######################################################################
|
||||
FROM rocm-prelim-hsa-override$HSA_OVERRIDE as rocm-deps
|
||||
|
@@ -1,26 +0,0 @@
|
||||
|
||||
cmake_minimum_required(VERSION 3.1)
|
||||
|
||||
set(CMAKE_CXX_STANDARD 17)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
||||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
|
||||
if(NOT CMAKE_BUILD_TYPE)
|
||||
set(CMAKE_BUILD_TYPE Release)
|
||||
endif()
|
||||
|
||||
SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
|
||||
|
||||
project(migraphx_py)
|
||||
|
||||
include_directories(/opt/rocm/include)
|
||||
|
||||
find_package(pybind11 REQUIRED)
|
||||
pybind11_add_module(migraphx migraphx_py.cpp)
|
||||
|
||||
target_link_libraries(migraphx PRIVATE /opt/rocm/lib/libmigraphx.so /opt/rocm/lib/libmigraphx_tf.so /opt/rocm/lib/libmigraphx_onnx.so)
|
||||
|
||||
install(TARGETS migraphx
|
||||
COMPONENT python
|
||||
LIBRARY DESTINATION /opt/rocm/lib
|
||||
)
|
@@ -1,582 +0,0 @@
|
||||
/*
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2015-2022 Advanced Micro Devices, Inc. All rights reserved.
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in
|
||||
* all copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
* THE SOFTWARE.
|
||||
*/
|
||||
|
||||
#include <pybind11/pybind11.h>
|
||||
#include <pybind11/stl.h>
|
||||
#include <pybind11/numpy.h>
|
||||
#include <migraphx/program.hpp>
|
||||
#include <migraphx/instruction_ref.hpp>
|
||||
#include <migraphx/operation.hpp>
|
||||
#include <migraphx/quantization.hpp>
|
||||
#include <migraphx/generate.hpp>
|
||||
#include <migraphx/instruction.hpp>
|
||||
#include <migraphx/ref/target.hpp>
|
||||
#include <migraphx/stringutils.hpp>
|
||||
#include <migraphx/tf.hpp>
|
||||
#include <migraphx/onnx.hpp>
|
||||
#include <migraphx/load_save.hpp>
|
||||
#include <migraphx/register_target.hpp>
|
||||
#include <migraphx/json.hpp>
|
||||
#include <migraphx/make_op.hpp>
|
||||
#include <migraphx/op/common.hpp>
|
||||
|
||||
#ifdef HAVE_GPU
|
||||
#include <migraphx/gpu/hip.hpp>
|
||||
#endif
|
||||
|
||||
using half = half_float::half;
|
||||
namespace py = pybind11;
|
||||
|
||||
#ifdef __clang__
|
||||
#define MIGRAPHX_PUSH_UNUSED_WARNING \
|
||||
_Pragma("clang diagnostic push") \
|
||||
_Pragma("clang diagnostic ignored \"-Wused-but-marked-unused\"")
|
||||
#define MIGRAPHX_POP_WARNING _Pragma("clang diagnostic pop")
|
||||
#else
|
||||
#define MIGRAPHX_PUSH_UNUSED_WARNING
|
||||
#define MIGRAPHX_POP_WARNING
|
||||
#endif
|
||||
#define MIGRAPHX_PYBIND11_MODULE(...) \
|
||||
MIGRAPHX_PUSH_UNUSED_WARNING \
|
||||
PYBIND11_MODULE(__VA_ARGS__) \
|
||||
MIGRAPHX_POP_WARNING
|
||||
|
||||
#define MIGRAPHX_PYTHON_GENERATE_SHAPE_ENUM(x, t) .value(#x, migraphx::shape::type_t::x)
|
||||
namespace migraphx {
|
||||
|
||||
migraphx::value to_value(py::kwargs kwargs);
|
||||
migraphx::value to_value(py::list lst);
|
||||
|
||||
template <class T, class F>
|
||||
void visit_py(T x, F f)
|
||||
{
|
||||
if(py::isinstance<py::kwargs>(x))
|
||||
{
|
||||
f(to_value(x.template cast<py::kwargs>()));
|
||||
}
|
||||
else if(py::isinstance<py::list>(x))
|
||||
{
|
||||
f(to_value(x.template cast<py::list>()));
|
||||
}
|
||||
else if(py::isinstance<py::bool_>(x))
|
||||
{
|
||||
f(x.template cast<bool>());
|
||||
}
|
||||
else if(py::isinstance<py::int_>(x) or py::hasattr(x, "__index__"))
|
||||
{
|
||||
f(x.template cast<int>());
|
||||
}
|
||||
else if(py::isinstance<py::float_>(x))
|
||||
{
|
||||
f(x.template cast<float>());
|
||||
}
|
||||
else if(py::isinstance<py::str>(x))
|
||||
{
|
||||
f(x.template cast<std::string>());
|
||||
}
|
||||
else if(py::isinstance<migraphx::shape::dynamic_dimension>(x))
|
||||
{
|
||||
f(migraphx::to_value(x.template cast<migraphx::shape::dynamic_dimension>()));
|
||||
}
|
||||
else
|
||||
{
|
||||
MIGRAPHX_THROW("VISIT_PY: Unsupported data type!");
|
||||
}
|
||||
}
|
||||
|
||||
migraphx::value to_value(py::list lst)
|
||||
{
|
||||
migraphx::value v = migraphx::value::array{};
|
||||
for(auto val : lst)
|
||||
{
|
||||
visit_py(val, [&](auto py_val) { v.push_back(py_val); });
|
||||
}
|
||||
|
||||
return v;
|
||||
}
|
||||
|
||||
migraphx::value to_value(py::kwargs kwargs)
|
||||
{
|
||||
migraphx::value v = migraphx::value::object{};
|
||||
|
||||
for(auto arg : kwargs)
|
||||
{
|
||||
auto&& key = py::str(arg.first);
|
||||
auto&& val = arg.second;
|
||||
visit_py(val, [&](auto py_val) { v[key] = py_val; });
|
||||
}
|
||||
return v;
|
||||
}
|
||||
} // namespace migraphx
|
||||
|
||||
namespace pybind11 {
|
||||
namespace detail {
|
||||
|
||||
template <>
|
||||
struct npy_format_descriptor<half>
|
||||
{
|
||||
static std::string format()
|
||||
{
|
||||
// following: https://docs.python.org/3/library/struct.html#format-characters
|
||||
return "e";
|
||||
}
|
||||
static constexpr auto name() { return _("half"); }
|
||||
};
|
||||
|
||||
} // namespace detail
|
||||
} // namespace pybind11
|
||||
|
||||
template <class F>
|
||||
void visit_type(const migraphx::shape& s, F f)
|
||||
{
|
||||
s.visit_type(f);
|
||||
}
|
||||
|
||||
template <class T, class F>
|
||||
void visit(const migraphx::raw_data<T>& x, F f)
|
||||
{
|
||||
x.visit(f);
|
||||
}
|
||||
|
||||
template <class F>
|
||||
void visit_types(F f)
|
||||
{
|
||||
migraphx::shape::visit_types(f);
|
||||
}
|
||||
|
||||
template <class T>
|
||||
py::buffer_info to_buffer_info(T& x)
|
||||
{
|
||||
migraphx::shape s = x.get_shape();
|
||||
assert(s.type() != migraphx::shape::tuple_type);
|
||||
if(s.dynamic())
|
||||
MIGRAPHX_THROW("MIGRAPHX PYTHON: dynamic shape argument passed to to_buffer_info");
|
||||
auto strides = s.strides();
|
||||
std::transform(
|
||||
strides.begin(), strides.end(), strides.begin(), [&](auto i) { return i * s.type_size(); });
|
||||
py::buffer_info b;
|
||||
visit_type(s, [&](auto as) {
|
||||
// migraphx use int8_t data to store bool type, we need to
|
||||
// explicitly specify the data type as bool for python
|
||||
if(s.type() == migraphx::shape::bool_type)
|
||||
{
|
||||
b = py::buffer_info(x.data(),
|
||||
as.size(),
|
||||
py::format_descriptor<bool>::format(),
|
||||
s.ndim(),
|
||||
s.lens(),
|
||||
strides);
|
||||
}
|
||||
else
|
||||
{
|
||||
b = py::buffer_info(x.data(),
|
||||
as.size(),
|
||||
py::format_descriptor<decltype(as())>::format(),
|
||||
s.ndim(),
|
||||
s.lens(),
|
||||
strides);
|
||||
}
|
||||
});
|
||||
return b;
|
||||
}
|
||||
|
||||
migraphx::shape to_shape(const py::buffer_info& info)
|
||||
{
|
||||
migraphx::shape::type_t t;
|
||||
std::size_t n = 0;
|
||||
visit_types([&](auto as) {
|
||||
if(info.format == py::format_descriptor<decltype(as())>::format() or
|
||||
(info.format == "l" and py::format_descriptor<decltype(as())>::format() == "q") or
|
||||
(info.format == "L" and py::format_descriptor<decltype(as())>::format() == "Q"))
|
||||
{
|
||||
t = as.type_enum();
|
||||
n = sizeof(as());
|
||||
}
|
||||
else if(info.format == "?" and py::format_descriptor<decltype(as())>::format() == "b")
|
||||
{
|
||||
t = migraphx::shape::bool_type;
|
||||
n = sizeof(bool);
|
||||
}
|
||||
});
|
||||
|
||||
if(n == 0)
|
||||
{
|
||||
MIGRAPHX_THROW("MIGRAPHX PYTHON: Unsupported data type " + info.format);
|
||||
}
|
||||
|
||||
auto strides = info.strides;
|
||||
std::transform(strides.begin(), strides.end(), strides.begin(), [&](auto i) -> std::size_t {
|
||||
return n > 0 ? i / n : 0;
|
||||
});
|
||||
|
||||
// scalar support
|
||||
if(info.shape.empty())
|
||||
{
|
||||
return migraphx::shape{t};
|
||||
}
|
||||
else
|
||||
{
|
||||
return migraphx::shape{t, info.shape, strides};
|
||||
}
|
||||
}
|
||||
|
||||
MIGRAPHX_PYBIND11_MODULE(migraphx, m)
|
||||
{
|
||||
py::class_<migraphx::shape> shape_cls(m, "shape");
|
||||
shape_cls
|
||||
.def(py::init([](py::kwargs kwargs) {
|
||||
auto v = migraphx::to_value(kwargs);
|
||||
auto t = migraphx::shape::parse_type(v.get("type", "float"));
|
||||
if(v.contains("dyn_dims"))
|
||||
{
|
||||
auto dyn_dims =
|
||||
migraphx::from_value<std::vector<migraphx::shape::dynamic_dimension>>(
|
||||
v.at("dyn_dims"));
|
||||
return migraphx::shape(t, dyn_dims);
|
||||
}
|
||||
auto lens = v.get<std::size_t>("lens", {1});
|
||||
if(v.contains("strides"))
|
||||
return migraphx::shape(t, lens, v.at("strides").to_vector<std::size_t>());
|
||||
else
|
||||
return migraphx::shape(t, lens);
|
||||
}))
|
||||
.def("type", &migraphx::shape::type)
|
||||
.def("lens", &migraphx::shape::lens)
|
||||
.def("strides", &migraphx::shape::strides)
|
||||
.def("ndim", &migraphx::shape::ndim)
|
||||
.def("elements", &migraphx::shape::elements)
|
||||
.def("bytes", &migraphx::shape::bytes)
|
||||
.def("type_string", &migraphx::shape::type_string)
|
||||
.def("type_size", &migraphx::shape::type_size)
|
||||
.def("dyn_dims", &migraphx::shape::dyn_dims)
|
||||
.def("packed", &migraphx::shape::packed)
|
||||
.def("transposed", &migraphx::shape::transposed)
|
||||
.def("broadcasted", &migraphx::shape::broadcasted)
|
||||
.def("standard", &migraphx::shape::standard)
|
||||
.def("scalar", &migraphx::shape::scalar)
|
||||
.def("dynamic", &migraphx::shape::dynamic)
|
||||
.def("__eq__", std::equal_to<migraphx::shape>{})
|
||||
.def("__ne__", std::not_equal_to<migraphx::shape>{})
|
||||
.def("__repr__", [](const migraphx::shape& s) { return migraphx::to_string(s); });
|
||||
|
||||
py::enum_<migraphx::shape::type_t>(shape_cls, "type_t")
|
||||
MIGRAPHX_SHAPE_VISIT_TYPES(MIGRAPHX_PYTHON_GENERATE_SHAPE_ENUM);
|
||||
|
||||
py::class_<migraphx::shape::dynamic_dimension>(shape_cls, "dynamic_dimension")
|
||||
.def(py::init<>())
|
||||
.def(py::init<std::size_t, std::size_t>())
|
||||
.def(py::init<std::size_t, std::size_t, std::set<std::size_t>>())
|
||||
.def_readwrite("min", &migraphx::shape::dynamic_dimension::min)
|
||||
.def_readwrite("max", &migraphx::shape::dynamic_dimension::max)
|
||||
.def_readwrite("optimals", &migraphx::shape::dynamic_dimension::optimals)
|
||||
.def("is_fixed", &migraphx::shape::dynamic_dimension::is_fixed);
|
||||
|
||||
py::class_<migraphx::argument>(m, "argument", py::buffer_protocol())
|
||||
.def_buffer([](migraphx::argument& x) -> py::buffer_info { return to_buffer_info(x); })
|
||||
.def(py::init([](py::buffer b) {
|
||||
py::buffer_info info = b.request();
|
||||
return migraphx::argument(to_shape(info), info.ptr);
|
||||
}))
|
||||
.def("get_shape", &migraphx::argument::get_shape)
|
||||
.def("data_ptr",
|
||||
[](migraphx::argument& x) { return reinterpret_cast<std::uintptr_t>(x.data()); })
|
||||
.def("tolist",
|
||||
[](migraphx::argument& x) {
|
||||
py::list l{x.get_shape().elements()};
|
||||
visit(x, [&](auto data) { l = py::cast(data.to_vector()); });
|
||||
return l;
|
||||
})
|
||||
.def("__eq__", std::equal_to<migraphx::argument>{})
|
||||
.def("__ne__", std::not_equal_to<migraphx::argument>{})
|
||||
.def("__repr__", [](const migraphx::argument& x) { return migraphx::to_string(x); });
|
||||
|
||||
py::class_<migraphx::target>(m, "target");
|
||||
|
||||
py::class_<migraphx::instruction_ref>(m, "instruction_ref")
|
||||
.def("shape", [](migraphx::instruction_ref i) { return i->get_shape(); })
|
||||
.def("op", [](migraphx::instruction_ref i) { return i->get_operator(); });
|
||||
|
||||
py::class_<migraphx::module, std::unique_ptr<migraphx::module, py::nodelete>>(m, "module")
|
||||
.def("print", [](const migraphx::module& mm) { std::cout << mm << std::endl; })
|
||||
.def(
|
||||
"add_instruction",
|
||||
[](migraphx::module& mm,
|
||||
const migraphx::operation& op,
|
||||
std::vector<migraphx::instruction_ref>& args,
|
||||
std::vector<migraphx::module*>& mod_args) {
|
||||
return mm.add_instruction(op, args, mod_args);
|
||||
},
|
||||
py::arg("op"),
|
||||
py::arg("args"),
|
||||
py::arg("mod_args") = std::vector<migraphx::module*>{})
|
||||
.def(
|
||||
"add_literal",
|
||||
[](migraphx::module& mm, py::buffer data) {
|
||||
py::buffer_info info = data.request();
|
||||
auto literal_shape = to_shape(info);
|
||||
return mm.add_literal(literal_shape, reinterpret_cast<char*>(info.ptr));
|
||||
},
|
||||
py::arg("data"))
|
||||
.def(
|
||||
"add_parameter",
|
||||
[](migraphx::module& mm, const std::string& name, const migraphx::shape shape) {
|
||||
return mm.add_parameter(name, shape);
|
||||
},
|
||||
py::arg("name"),
|
||||
py::arg("shape"))
|
||||
.def(
|
||||
"add_return",
|
||||
[](migraphx::module& mm, std::vector<migraphx::instruction_ref>& args) {
|
||||
return mm.add_return(args);
|
||||
},
|
||||
py::arg("args"))
|
||||
.def("__repr__", [](const migraphx::module& mm) { return migraphx::to_string(mm); });
|
||||
|
||||
py::class_<migraphx::program>(m, "program")
|
||||
.def(py::init([]() { return migraphx::program(); }))
|
||||
.def("get_parameter_names", &migraphx::program::get_parameter_names)
|
||||
.def("get_parameter_shapes", &migraphx::program::get_parameter_shapes)
|
||||
.def("get_output_shapes", &migraphx::program::get_output_shapes)
|
||||
.def("is_compiled", &migraphx::program::is_compiled)
|
||||
.def(
|
||||
"compile",
|
||||
[](migraphx::program& p,
|
||||
const migraphx::target& t,
|
||||
bool offload_copy,
|
||||
bool fast_math,
|
||||
bool exhaustive_tune) {
|
||||
migraphx::compile_options options;
|
||||
options.offload_copy = offload_copy;
|
||||
options.fast_math = fast_math;
|
||||
options.exhaustive_tune = exhaustive_tune;
|
||||
p.compile(t, options);
|
||||
},
|
||||
py::arg("t"),
|
||||
py::arg("offload_copy") = true,
|
||||
py::arg("fast_math") = true,
|
||||
py::arg("exhaustive_tune") = false)
|
||||
.def("get_main_module", [](const migraphx::program& p) { return p.get_main_module(); })
|
||||
.def(
|
||||
"create_module",
|
||||
[](migraphx::program& p, const std::string& name) { return p.create_module(name); },
|
||||
py::arg("name"))
|
||||
.def("run",
|
||||
[](migraphx::program& p, py::dict params) {
|
||||
migraphx::parameter_map pm;
|
||||
for(auto x : params)
|
||||
{
|
||||
std::string key = x.first.cast<std::string>();
|
||||
py::buffer b = x.second.cast<py::buffer>();
|
||||
py::buffer_info info = b.request();
|
||||
pm[key] = migraphx::argument(to_shape(info), info.ptr);
|
||||
}
|
||||
return p.eval(pm);
|
||||
})
|
||||
.def("run_async",
|
||||
[](migraphx::program& p,
|
||||
py::dict params,
|
||||
std::uintptr_t stream,
|
||||
std::string stream_name) {
|
||||
migraphx::parameter_map pm;
|
||||
for(auto x : params)
|
||||
{
|
||||
std::string key = x.first.cast<std::string>();
|
||||
py::buffer b = x.second.cast<py::buffer>();
|
||||
py::buffer_info info = b.request();
|
||||
pm[key] = migraphx::argument(to_shape(info), info.ptr);
|
||||
}
|
||||
migraphx::execution_environment exec_env{
|
||||
migraphx::any_ptr(reinterpret_cast<void*>(stream), stream_name), true};
|
||||
return p.eval(pm, exec_env);
|
||||
})
|
||||
.def("sort", &migraphx::program::sort)
|
||||
.def("print", [](const migraphx::program& p) { std::cout << p << std::endl; })
|
||||
.def("__eq__", std::equal_to<migraphx::program>{})
|
||||
.def("__ne__", std::not_equal_to<migraphx::program>{})
|
||||
.def("__repr__", [](const migraphx::program& p) { return migraphx::to_string(p); });
|
||||
|
||||
py::class_<migraphx::operation> op(m, "op");
|
||||
op.def(py::init([](const std::string& name, py::kwargs kwargs) {
|
||||
migraphx::value v = migraphx::value::object{};
|
||||
if(kwargs)
|
||||
{
|
||||
v = migraphx::to_value(kwargs);
|
||||
}
|
||||
return migraphx::make_op(name, v);
|
||||
}))
|
||||
.def("name", &migraphx::operation::name);
|
||||
|
||||
py::enum_<migraphx::op::pooling_mode>(op, "pooling_mode")
|
||||
.value("average", migraphx::op::pooling_mode::average)
|
||||
.value("max", migraphx::op::pooling_mode::max)
|
||||
.value("lpnorm", migraphx::op::pooling_mode::lpnorm);
|
||||
|
||||
py::enum_<migraphx::op::rnn_direction>(op, "rnn_direction")
|
||||
.value("forward", migraphx::op::rnn_direction::forward)
|
||||
.value("reverse", migraphx::op::rnn_direction::reverse)
|
||||
.value("bidirectional", migraphx::op::rnn_direction::bidirectional);
|
||||
|
||||
m.def(
|
||||
"argument_from_pointer",
|
||||
[](const migraphx::shape shape, const int64_t address) {
|
||||
return migraphx::argument(shape, reinterpret_cast<void*>(address));
|
||||
},
|
||||
py::arg("shape"),
|
||||
py::arg("address"));
|
||||
|
||||
m.def(
|
||||
"parse_tf",
|
||||
[](const std::string& filename,
|
||||
bool is_nhwc,
|
||||
unsigned int batch_size,
|
||||
std::unordered_map<std::string, std::vector<std::size_t>> map_input_dims,
|
||||
std::vector<std::string> output_names) {
|
||||
return migraphx::parse_tf(
|
||||
filename, migraphx::tf_options{is_nhwc, batch_size, map_input_dims, output_names});
|
||||
},
|
||||
"Parse tf protobuf (default format is nhwc)",
|
||||
py::arg("filename"),
|
||||
py::arg("is_nhwc") = true,
|
||||
py::arg("batch_size") = 1,
|
||||
py::arg("map_input_dims") = std::unordered_map<std::string, std::vector<std::size_t>>(),
|
||||
py::arg("output_names") = std::vector<std::string>());
|
||||
|
||||
m.def(
|
||||
"parse_onnx",
|
||||
[](const std::string& filename,
|
||||
unsigned int default_dim_value,
|
||||
migraphx::shape::dynamic_dimension default_dyn_dim_value,
|
||||
std::unordered_map<std::string, std::vector<std::size_t>> map_input_dims,
|
||||
std::unordered_map<std::string, std::vector<migraphx::shape::dynamic_dimension>>
|
||||
map_dyn_input_dims,
|
||||
bool skip_unknown_operators,
|
||||
bool print_program_on_error,
|
||||
int64_t max_loop_iterations) {
|
||||
migraphx::onnx_options options;
|
||||
options.default_dim_value = default_dim_value;
|
||||
options.default_dyn_dim_value = default_dyn_dim_value;
|
||||
options.map_input_dims = map_input_dims;
|
||||
options.map_dyn_input_dims = map_dyn_input_dims;
|
||||
options.skip_unknown_operators = skip_unknown_operators;
|
||||
options.print_program_on_error = print_program_on_error;
|
||||
options.max_loop_iterations = max_loop_iterations;
|
||||
return migraphx::parse_onnx(filename, options);
|
||||
},
|
||||
"Parse onnx file",
|
||||
py::arg("filename"),
|
||||
py::arg("default_dim_value") = 0,
|
||||
py::arg("default_dyn_dim_value") = migraphx::shape::dynamic_dimension{1, 1},
|
||||
py::arg("map_input_dims") = std::unordered_map<std::string, std::vector<std::size_t>>(),
|
||||
py::arg("map_dyn_input_dims") =
|
||||
std::unordered_map<std::string, std::vector<migraphx::shape::dynamic_dimension>>(),
|
||||
py::arg("skip_unknown_operators") = false,
|
||||
py::arg("print_program_on_error") = false,
|
||||
py::arg("max_loop_iterations") = 10);
|
||||
|
||||
m.def(
|
||||
"parse_onnx_buffer",
|
||||
[](const std::string& onnx_buffer,
|
||||
unsigned int default_dim_value,
|
||||
migraphx::shape::dynamic_dimension default_dyn_dim_value,
|
||||
std::unordered_map<std::string, std::vector<std::size_t>> map_input_dims,
|
||||
std::unordered_map<std::string, std::vector<migraphx::shape::dynamic_dimension>>
|
||||
map_dyn_input_dims,
|
||||
bool skip_unknown_operators,
|
||||
bool print_program_on_error) {
|
||||
migraphx::onnx_options options;
|
||||
options.default_dim_value = default_dim_value;
|
||||
options.default_dyn_dim_value = default_dyn_dim_value;
|
||||
options.map_input_dims = map_input_dims;
|
||||
options.map_dyn_input_dims = map_dyn_input_dims;
|
||||
options.skip_unknown_operators = skip_unknown_operators;
|
||||
options.print_program_on_error = print_program_on_error;
|
||||
return migraphx::parse_onnx_buffer(onnx_buffer, options);
|
||||
},
|
||||
"Parse onnx file",
|
||||
py::arg("filename"),
|
||||
py::arg("default_dim_value") = 0,
|
||||
py::arg("default_dyn_dim_value") = migraphx::shape::dynamic_dimension{1, 1},
|
||||
py::arg("map_input_dims") = std::unordered_map<std::string, std::vector<std::size_t>>(),
|
||||
py::arg("map_dyn_input_dims") =
|
||||
std::unordered_map<std::string, std::vector<migraphx::shape::dynamic_dimension>>(),
|
||||
py::arg("skip_unknown_operators") = false,
|
||||
py::arg("print_program_on_error") = false);
|
||||
|
||||
m.def(
|
||||
"load",
|
||||
[](const std::string& name, const std::string& format) {
|
||||
migraphx::file_options options;
|
||||
options.format = format;
|
||||
return migraphx::load(name, options);
|
||||
},
|
||||
"Load MIGraphX program",
|
||||
py::arg("filename"),
|
||||
py::arg("format") = "msgpack");
|
||||
|
||||
m.def(
|
||||
"save",
|
||||
[](const migraphx::program& p, const std::string& name, const std::string& format) {
|
||||
migraphx::file_options options;
|
||||
options.format = format;
|
||||
return migraphx::save(p, name, options);
|
||||
},
|
||||
"Save MIGraphX program",
|
||||
py::arg("p"),
|
||||
py::arg("filename"),
|
||||
py::arg("format") = "msgpack");
|
||||
|
||||
m.def("get_target", &migraphx::make_target);
|
||||
m.def("create_argument", [](const migraphx::shape& s, const std::vector<double>& values) {
|
||||
if(values.size() != s.elements())
|
||||
MIGRAPHX_THROW("Values and shape elements do not match");
|
||||
migraphx::argument a{s};
|
||||
a.fill(values.begin(), values.end());
|
||||
return a;
|
||||
});
|
||||
m.def("generate_argument", &migraphx::generate_argument, py::arg("s"), py::arg("seed") = 0);
|
||||
m.def("fill_argument", &migraphx::fill_argument, py::arg("s"), py::arg("value"));
|
||||
m.def("quantize_fp16",
|
||||
&migraphx::quantize_fp16,
|
||||
py::arg("prog"),
|
||||
py::arg("ins_names") = std::vector<std::string>{"all"});
|
||||
m.def("quantize_int8",
|
||||
&migraphx::quantize_int8,
|
||||
py::arg("prog"),
|
||||
py::arg("t"),
|
||||
py::arg("calibration") = std::vector<migraphx::parameter_map>{},
|
||||
py::arg("ins_names") = std::vector<std::string>{"dot", "convolution"});
|
||||
|
||||
#ifdef HAVE_GPU
|
||||
m.def("allocate_gpu", &migraphx::gpu::allocate_gpu, py::arg("s"), py::arg("host") = false);
|
||||
m.def("to_gpu", &migraphx::gpu::to_gpu, py::arg("arg"), py::arg("host") = false);
|
||||
m.def("from_gpu", &migraphx::gpu::from_gpu);
|
||||
m.def("gpu_sync", [] { migraphx::gpu::gpu_sync(); });
|
||||
#endif
|
||||
|
||||
#ifdef VERSION_INFO
|
||||
m.attr("__version__") = VERSION_INFO;
|
||||
#else
|
||||
m.attr("__version__") = "dev";
|
||||
#endif
|
||||
}
|
@@ -1 +0,0 @@
|
||||
onnxruntime-rocm @ https://github.com/NickM-27/frigate-onnxruntime-rocm/releases/download/v1.0.0/onnxruntime_rocm-1.17.3-cp39-cp39-linux_x86_64.whl
|
@@ -1,3 +0,0 @@
|
||||
Package: *
|
||||
Pin: release o=repo.radeon.com
|
||||
Pin-Priority: 600
|
@@ -1,38 +0,0 @@
|
||||
variable "AMDGPU" {
|
||||
default = "gfx900"
|
||||
}
|
||||
variable "ROCM" {
|
||||
default = "5.7.3"
|
||||
}
|
||||
variable "HSA_OVERRIDE_GFX_VERSION" {
|
||||
default = ""
|
||||
}
|
||||
variable "HSA_OVERRIDE" {
|
||||
default = "1"
|
||||
}
|
||||
target deps {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/amd64"]
|
||||
target = "deps"
|
||||
}
|
||||
|
||||
target rootfs {
|
||||
dockerfile = "docker/main/Dockerfile"
|
||||
platforms = ["linux/amd64"]
|
||||
target = "rootfs"
|
||||
}
|
||||
|
||||
target rocm {
|
||||
dockerfile = "docker/rocm/Dockerfile"
|
||||
contexts = {
|
||||
deps = "target:deps",
|
||||
rootfs = "target:rootfs"
|
||||
}
|
||||
platforms = ["linux/amd64"]
|
||||
args = {
|
||||
AMDGPU = AMDGPU,
|
||||
ROCM = ROCM,
|
||||
HSA_OVERRIDE_GFX_VERSION = HSA_OVERRIDE_GFX_VERSION,
|
||||
HSA_OVERRIDE = HSA_OVERRIDE
|
||||
}
|
||||
}
|
@@ -1 +0,0 @@
|
||||
deb [arch=amd64 signed-by=/etc/apt/keyrings/rocm.gpg] https://repo.radeon.com/rocm/apt/5.7.3 focal main
|
@@ -1,53 +0,0 @@
|
||||
BOARDS += rocm
|
||||
|
||||
# AMD/ROCm is chunky so we build couple of smaller images for specific chipsets
|
||||
ROCM_CHIPSETS:=gfx900:9.0.0 gfx1030:10.3.0 gfx1100:11.0.0
|
||||
|
||||
local-rocm: version
|
||||
$(foreach chipset,$(ROCM_CHIPSETS), \
|
||||
AMDGPU=$(word 1,$(subst :, ,$(chipset))) \
|
||||
HSA_OVERRIDE_GFX_VERSION=$(word 2,$(subst :, ,$(chipset))) \
|
||||
HSA_OVERRIDE=1 \
|
||||
docker buildx bake --file=docker/rocm/rocm.hcl rocm \
|
||||
--set rocm.tags=frigate:latest-rocm-$(word 1,$(subst :, ,$(chipset))) \
|
||||
--load \
|
||||
&&) true
|
||||
|
||||
unset HSA_OVERRIDE_GFX_VERSION && \
|
||||
HSA_OVERRIDE=0 \
|
||||
AMDGPU=gfx \
|
||||
docker buildx bake --file=docker/rocm/rocm.hcl rocm \
|
||||
--set rocm.tags=frigate:latest-rocm \
|
||||
--load
|
||||
|
||||
build-rocm: version
|
||||
$(foreach chipset,$(ROCM_CHIPSETS), \
|
||||
AMDGPU=$(word 1,$(subst :, ,$(chipset))) \
|
||||
HSA_OVERRIDE_GFX_VERSION=$(word 2,$(subst :, ,$(chipset))) \
|
||||
HSA_OVERRIDE=1 \
|
||||
docker buildx bake --file=docker/rocm/rocm.hcl rocm \
|
||||
--set rocm.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rocm-$(chipset) \
|
||||
&&) true
|
||||
|
||||
unset HSA_OVERRIDE_GFX_VERSION && \
|
||||
HSA_OVERRIDE=0 \
|
||||
AMDGPU=gfx \
|
||||
docker buildx bake --file=docker/rocm/rocm.hcl rocm \
|
||||
--set rocm.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rocm
|
||||
|
||||
push-rocm: build-rocm
|
||||
$(foreach chipset,$(ROCM_CHIPSETS), \
|
||||
AMDGPU=$(word 1,$(subst :, ,$(chipset))) \
|
||||
HSA_OVERRIDE_GFX_VERSION=$(word 2,$(subst :, ,$(chipset))) \
|
||||
HSA_OVERRIDE=1 \
|
||||
docker buildx bake --file=docker/rocm/rocm.hcl rocm \
|
||||
--set rocm.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rocm-$(chipset) \
|
||||
--push \
|
||||
&&) true
|
||||
|
||||
unset HSA_OVERRIDE_GFX_VERSION && \
|
||||
HSA_OVERRIDE=0 \
|
||||
AMDGPU=gfx \
|
||||
docker buildx bake --file=docker/rocm/rocm.hcl rocm \
|
||||
--set rocm.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rocm \
|
||||
--push
|
@@ -1,15 +1,10 @@
|
||||
BOARDS += rpi
|
||||
|
||||
local-rpi: version
|
||||
docker buildx bake --file=docker/rpi/rpi.hcl rpi \
|
||||
--set rpi.tags=frigate:latest-rpi \
|
||||
--load
|
||||
docker buildx bake --load --file=docker/rpi/rpi.hcl --set rpi.tags=frigate:latest-rpi rpi
|
||||
|
||||
build-rpi: version
|
||||
docker buildx bake --file=docker/rpi/rpi.hcl rpi \
|
||||
--set rpi.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rpi
|
||||
docker buildx bake --file=docker/rpi/rpi.hcl --set rpi.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rpi rpi
|
||||
|
||||
push-rpi: build-rpi
|
||||
docker buildx bake --file=docker/rpi/rpi.hcl rpi \
|
||||
--set rpi.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rpi \
|
||||
--push
|
||||
docker buildx bake --push --file=docker/rpi/rpi.hcl --set rpi.tags=$(IMAGE_REPO):${GITHUB_REF_NAME}-$(COMMIT_HASH)-rpi rpi
|
@@ -18,7 +18,6 @@ RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels
|
||||
pip3 install -U /deps/trt-wheels/*.whl && \
|
||||
ldconfig
|
||||
|
||||
ENV LD_LIBRARY_PATH=/usr/local/lib/python3.9/dist-packages/tensorrt:/usr/local/cuda/lib64:/usr/local/lib/python3.9/dist-packages/nvidia/cufft/lib
|
||||
WORKDIR /opt/frigate/
|
||||
COPY --from=rootfs / /
|
||||
|
||||
@@ -27,7 +26,6 @@ FROM devcontainer AS devcontainer-trt
|
||||
|
||||
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
COPY --from=trt-deps /usr/local/src/tensorrt_demos /usr/local/src/tensorrt_demos
|
||||
COPY --from=trt-deps /usr/local/cuda-12.1 /usr/local/cuda
|
||||
COPY docker/tensorrt/detector/rootfs/ /
|
||||
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
RUN --mount=type=bind,from=trt-wheels,source=/trt-wheels,target=/deps/trt-wheels \
|
||||
|
@@ -10,8 +10,8 @@ ARG DEBIAN_FRONTEND
|
||||
# Use a separate container to build wheels to prevent build dependencies in final image
|
||||
RUN apt-get -qq update \
|
||||
&& apt-get -qq install -y --no-install-recommends \
|
||||
python3.9 python3.9-dev \
|
||||
wget build-essential cmake git \
|
||||
python3.9 python3.9-dev \
|
||||
wget build-essential cmake git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Ensure python3 defaults to python3.9
|
||||
@@ -41,11 +41,7 @@ RUN --mount=type=bind,source=docker/tensorrt/detector/build_python_tensorrt.sh,t
|
||||
&& TENSORRT_VER=$(cat /etc/TENSORRT_VER) /deps/build_python_tensorrt.sh
|
||||
|
||||
COPY docker/tensorrt/requirements-arm64.txt /requirements-tensorrt.txt
|
||||
ADD https://nvidia.box.com/shared/static/9aemm4grzbbkfaesg5l7fplgjtmswhj8.whl /tmp/onnxruntime_gpu-1.15.1-cp39-cp39-linux_aarch64.whl
|
||||
|
||||
RUN pip3 uninstall -y onnxruntime-openvino \
|
||||
&& pip3 wheel --wheel-dir=/trt-wheels -r /requirements-tensorrt.txt \
|
||||
&& pip3 install --no-deps /tmp/onnxruntime_gpu-1.15.1-cp39-cp39-linux_aarch64.whl
|
||||
RUN pip3 wheel --wheel-dir=/trt-wheels -r /requirements-tensorrt.txt
|
||||
|
||||
FROM build-wheels AS trt-model-wheels
|
||||
ARG DEBIAN_FRONTEND
|
||||
|
@@ -8,8 +8,6 @@ ARG TRT_BASE=nvcr.io/nvidia/tensorrt:23.03-py3
|
||||
# Build TensorRT-specific library
|
||||
FROM ${TRT_BASE} AS trt-deps
|
||||
|
||||
ARG COMPUTE_LEVEL
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y git build-essential cuda-nvcc-* cuda-nvtx-* libnvinfer-dev libnvinfer-plugin-dev libnvparsers-dev libnvonnxparsers-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
@@ -24,9 +22,8 @@ ENV S6_CMD_WAIT_FOR_SERVICES_MAXTIME=0
|
||||
|
||||
COPY --from=trt-deps /usr/local/lib/libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
COPY --from=trt-deps /usr/local/src/tensorrt_demos /usr/local/src/tensorrt_demos
|
||||
COPY --from=trt-deps /usr/local/cuda-12.* /usr/local/cuda
|
||||
COPY docker/tensorrt/detector/rootfs/ /
|
||||
ENV YOLO_MODELS=""
|
||||
ENV YOLO_MODELS="yolov7-320"
|
||||
|
||||
HEALTHCHECK --start-period=600s --start-interval=5s --interval=15s --timeout=5s --retries=3 \
|
||||
CMD curl --fail --silent --show-error http://127.0.0.1:5000/api/version || exit 1
|
||||
|
@@ -23,8 +23,8 @@ else
|
||||
fi
|
||||
tar xaf jetson_multimedia_api.tbz2 -C / && rm jetson_multimedia_api.tbz2
|
||||
|
||||
wget -q https://github.com/AndBobsYourUncle/jetson-ffmpeg/archive/9c17b09.zip -O jetson-ffmpeg.zip
|
||||
unzip jetson-ffmpeg.zip && rm jetson-ffmpeg.zip && mv jetson-ffmpeg-* jetson-ffmpeg && cd jetson-ffmpeg
|
||||
wget -q https://github.com/madsciencetist/jetson-ffmpeg/archive/refs/heads/master.zip
|
||||
unzip master.zip && rm master.zip && cd jetson-ffmpeg-master
|
||||
LD_LIBRARY_PATH=$(pwd)/stubs:$LD_LIBRARY_PATH # tegra multimedia libs aren't available in image, so use stubs for ffmpeg build
|
||||
mkdir build
|
||||
cd build
|
||||
@@ -42,7 +42,7 @@ cd ../ && rm -rf nv-codec-headers-master
|
||||
# Build ffmpeg with nvmpi patch
|
||||
wget -q https://ffmpeg.org/releases/ffmpeg-6.0.tar.xz
|
||||
tar xaf ffmpeg-*.tar.xz && rm ffmpeg-*.tar.xz && cd ffmpeg-*
|
||||
patch -p1 < ../jetson-ffmpeg/ffmpeg_patches/ffmpeg6.0_nvmpi.patch
|
||||
patch -p1 < ../jetson-ffmpeg-master/ffmpeg_patches/ffmpeg6.0_nvmpi.patch
|
||||
export PKG_CONFIG_PATH=$INSTALL_PREFIX/lib/pkgconfig
|
||||
# enable Jetson codecs but disable dGPU codecs
|
||||
./configure --cc='ccache gcc' --cxx='ccache g++' \
|
||||
|
@@ -11,7 +11,6 @@ set -o errexit -o nounset -o pipefail
|
||||
MODEL_CACHE_DIR=${MODEL_CACHE_DIR:-"/config/model_cache/tensorrt"}
|
||||
TRT_VER=${TRT_VER:-$(cat /etc/TENSORRT_VER)}
|
||||
OUTPUT_FOLDER="${MODEL_CACHE_DIR}/${TRT_VER}"
|
||||
YOLO_MODELS=${YOLO_MODELS:-""}
|
||||
|
||||
# Create output folder
|
||||
mkdir -p ${OUTPUT_FOLDER}
|
||||
@@ -20,11 +19,6 @@ FIRST_MODEL=true
|
||||
MODEL_DOWNLOAD=""
|
||||
MODEL_CONVERT=""
|
||||
|
||||
if [ -z "$YOLO_MODELS" ]; then
|
||||
echo "tensorrt model preparation disabled"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
for model in ${YOLO_MODELS//,/ }
|
||||
do
|
||||
# Remove old link in case path/version changed
|
||||
|
@@ -11,7 +11,7 @@ git clone --depth 1 https://github.com/NateMeyer/tensorrt_demos.git -b condition
|
||||
if [ ! -e /usr/local/cuda ]; then
|
||||
ln -s /usr/local/cuda-* /usr/local/cuda
|
||||
fi
|
||||
cd ./tensorrt_demos/plugins && make all -j$(nproc) computes="${COMPUTE_LEVEL:-}"
|
||||
cd ./tensorrt_demos/plugins && make all -j$(nproc)
|
||||
cp libyolo_layer.so /usr/local/lib/libyolo_layer.so
|
||||
|
||||
# Store yolo scripts for later conversion
|
||||
|
@@ -8,7 +8,5 @@ nvidia-cuda-runtime-cu12 == 12.1.*; platform_machine == 'x86_64'
|
||||
nvidia-cuda-runtime-cu11 == 11.8.*; platform_machine == 'x86_64'
|
||||
nvidia-cublas-cu11 == 11.11.3.6; platform_machine == 'x86_64'
|
||||
nvidia-cudnn-cu11 == 8.6.0.*; platform_machine == 'x86_64'
|
||||
nvidia-cufft-cu11==10.*; platform_machine == 'x86_64'
|
||||
onnx==1.16.*; platform_machine == 'x86_64'
|
||||
onnxruntime-gpu==1.18.*; platform_machine == 'x86_64'
|
||||
protobuf==3.20.3; platform_machine == 'x86_64'
|
||||
onnx==1.14.0; platform_machine == 'x86_64'
|
||||
protobuf==3.20.3; platform_machine == 'x86_64'
|
@@ -1 +1 @@
|
||||
cuda-python == 11.7; platform_machine == 'aarch64'
|
||||
cuda-python == 11.7; platform_machine == 'aarch64'
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user