Compare commits

...

7 Commits

Author SHA1 Message Date
langhuihui
78c9201552 feat: add loop arg to pull api 2025-11-04 17:27:43 +08:00
pggiroro
f0666f43db fix: port recyle 2025-10-30 22:46:29 +08:00
pggiroro
15f9d420d9 feat: download gb28181 history record 2025-10-30 22:06:59 +08:00
langhuihui
4e5552484d fix: port range use 2025-10-30 18:35:36 +08:00
pggiroro
f5fe7c7542 fix: recyle tcp port,when streammode is tcppassive 2025-10-21 22:29:30 +08:00
langhuihui
331b61c5ff fix: rtmp h265 ctx panic 2025-10-21 17:07:03 +08:00
cto-new[bot]
de348725b7 feat(codec): Add unified AV1 raw format and protocol mux/demux support (#354)
* cherry-pick 95191a3: AV1 raw format support and protocol mux/demux integration

* feat(rtp/av1): 完善 AV1 RTP 封装分片及关键帧检测

- Implements RTP packetization for AV1 with OBU fragmentation per RFC9304
- Adds accurate detection of AV1 keyframes using OBU inspection
- Updates AV1 RTP demuxing to reconstruct fragmented OBUs
- Ensures keyframe (IDR) flag is set correctly throughout mux/demux pipeline

---------

Co-authored-by: engine-labs-app[bot] <140088366+engine-labs-app[bot]@users.noreply.github.com>
2025-10-21 09:38:00 +08:00
33 changed files with 3129 additions and 345 deletions

3
api.go
View File

@@ -1080,6 +1080,9 @@ func (s *Server) StartPull(ctx context.Context, req *pb.GlobalPullRequest) (res
if req.PubType != nil {
pubConfig.PubType = *req.PubType
}
if req.Loop != nil {
pullConfig.Loop = int(*req.Loop)
}
if req.Dump != nil {
pubConfig.Dump = *req.Dump
}

View File

@@ -3,7 +3,8 @@ snap:
transform:
.+:
output:
- watermark:
- conf:
watermark:
text: "abcd" # 水印文字内容
fontpath: /Users/dexter/Library/Fonts/MapleMono-NF-CN-Medium.ttf # 水印字体文件路径
fontcolor: "rgba(255,165,0,1)" # 水印字体颜色支持rgba格式

View File

@@ -128,17 +128,19 @@ snap:
transform:
.+:
output:
- watermark:
text: "abcd" # 水印文字内容
fontpath: /Users/dexter/Library/Fonts/MapleMono-NF-CN-Medium.ttf # 水印字体文件路径
fontcolor: "rgba(255,165,0,1)" # 水印字体颜色支持rgba格式
fontsize: 36 # 水印字体大小
offsetx: 0 # 水印位置X偏移
offsety: 0 # 水印位置Y偏移
timeinterval: 1s # 截图时间间隔
savepath: "snaps" # 截图保存路径
iframeinterval: 3 # 间隔多少帧截图
querytimedelta: 3 # 查询截图时允许的最大时间差(秒)
-
conf:
watermark:
text: "abcd" # 水印文字内容
fontpath: /Users/dexter/Library/Fonts/MapleMono-NF-CN-Medium.ttf # 水印字体文件路径
fontcolor: "rgba(255,165,0,1)" # 水印字体颜色支持rgba格式
fontsize: 36 # 水印字体大小
offsetx: 0 # 水印位置X偏移
offsety: 0 # 水印位置Y偏移
timeinterval: 1s # 截图时间间隔
savepath: "snaps" # 截图保存路径
iframeinterval: 3 # 间隔多少帧截图
querytimedelta: 3 # 查询截图时允许的最大时间差(秒)
onvif:
enable: false
discoverinterval: 3 # 发现设备的间隔单位秒默认30秒建议比rtsp插件的重连间隔大点

View File

@@ -10,6 +10,8 @@ builds:
- CGO_ENABLED=0
tags:
- sqlite
- mysql
- postgres
ldflags:
- -s -w -X m7s.live/v5.Version={{.Tag}}
goos:

View File

@@ -1,6 +1,6 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.36.6
// protoc-gen-go v1.36.10
// protoc v6.31.1
// source: auth.proto

View File

@@ -1,6 +1,6 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.36.6
// protoc-gen-go v1.36.10
// protoc v6.31.1
// source: global.proto
@@ -5568,6 +5568,7 @@ type GlobalPullRequest struct {
Protocol string `protobuf:"bytes,2,opt,name=protocol,proto3" json:"protocol,omitempty"`
TestMode int32 `protobuf:"varint,3,opt,name=testMode,proto3" json:"testMode,omitempty"` // 0: pull, 1: pull without publish
StreamPath string `protobuf:"bytes,4,opt,name=streamPath,proto3" json:"streamPath,omitempty"` // 流路径
Loop *int32 `protobuf:"varint,22,opt,name=loop,proto3,oneof" json:"loop,omitempty"` // 拉流循环次数,-1:无限循环
// Publish configuration
PubAudio *bool `protobuf:"varint,5,opt,name=pubAudio,proto3,oneof" json:"pubAudio,omitempty"`
PubVideo *bool `protobuf:"varint,6,opt,name=pubVideo,proto3,oneof" json:"pubVideo,omitempty"`
@@ -5648,6 +5649,13 @@ func (x *GlobalPullRequest) GetStreamPath() string {
return ""
}
func (x *GlobalPullRequest) GetLoop() int32 {
if x != nil && x.Loop != nil {
return *x.Loop
}
return 0
}
func (x *GlobalPullRequest) GetPubAudio() bool {
if x != nil && x.PubAudio != nil {
return *x.PubAudio
@@ -6366,35 +6374,37 @@ const file_global_proto_rawDesc = "" +
"\x1cSubscriptionProgressResponse\x12\x12\n" +
"\x04code\x18\x01 \x01(\x05R\x04code\x12\x18\n" +
"\amessage\x18\x02 \x01(\tR\amessage\x124\n" +
"\x04data\x18\x03 \x01(\v2 .global.SubscriptionProgressDataR\x04data\"\xe9\b\n" +
"\x04data\x18\x03 \x01(\v2 .global.SubscriptionProgressDataR\x04data\"\x8b\t\n" +
"\x11GlobalPullRequest\x12\x1c\n" +
"\tremoteURL\x18\x01 \x01(\tR\tremoteURL\x12\x1a\n" +
"\bprotocol\x18\x02 \x01(\tR\bprotocol\x12\x1a\n" +
"\btestMode\x18\x03 \x01(\x05R\btestMode\x12\x1e\n" +
"\n" +
"streamPath\x18\x04 \x01(\tR\n" +
"streamPath\x12\x1f\n" +
"\bpubAudio\x18\x05 \x01(\bH\x00R\bpubAudio\x88\x01\x01\x12\x1f\n" +
"\bpubVideo\x18\x06 \x01(\bH\x01R\bpubVideo\x88\x01\x01\x12L\n" +
"\x11delayCloseTimeout\x18\a \x01(\v2\x19.google.protobuf.DurationH\x02R\x11delayCloseTimeout\x88\x01\x01\x12\x19\n" +
"\x05speed\x18\b \x01(\x01H\x03R\x05speed\x88\x01\x01\x12\x1f\n" +
"\bmaxCount\x18\t \x01(\x05H\x04R\bmaxCount\x88\x01\x01\x12!\n" +
"streamPath\x12\x17\n" +
"\x04loop\x18\x16 \x01(\x05H\x00R\x04loop\x88\x01\x01\x12\x1f\n" +
"\bpubAudio\x18\x05 \x01(\bH\x01R\bpubAudio\x88\x01\x01\x12\x1f\n" +
"\bpubVideo\x18\x06 \x01(\bH\x02R\bpubVideo\x88\x01\x01\x12L\n" +
"\x11delayCloseTimeout\x18\a \x01(\v2\x19.google.protobuf.DurationH\x03R\x11delayCloseTimeout\x88\x01\x01\x12\x19\n" +
"\x05speed\x18\b \x01(\x01H\x04R\x05speed\x88\x01\x01\x12\x1f\n" +
"\bmaxCount\x18\t \x01(\x05H\x05R\bmaxCount\x88\x01\x01\x12!\n" +
"\tkickExist\x18\n" +
" \x01(\bH\x05R\tkickExist\x88\x01\x01\x12F\n" +
"\x0epublishTimeout\x18\v \x01(\v2\x19.google.protobuf.DurationH\x06R\x0epublishTimeout\x88\x01\x01\x12J\n" +
"\x10waitCloseTimeout\x18\f \x01(\v2\x19.google.protobuf.DurationH\aR\x10waitCloseTimeout\x88\x01\x01\x12@\n" +
"\vidleTimeout\x18\r \x01(\v2\x19.google.protobuf.DurationH\bR\vidleTimeout\x88\x01\x01\x12B\n" +
"\fpauseTimeout\x18\x0e \x01(\v2\x19.google.protobuf.DurationH\tR\fpauseTimeout\x88\x01\x01\x12>\n" +
" \x01(\bH\x06R\tkickExist\x88\x01\x01\x12F\n" +
"\x0epublishTimeout\x18\v \x01(\v2\x19.google.protobuf.DurationH\aR\x0epublishTimeout\x88\x01\x01\x12J\n" +
"\x10waitCloseTimeout\x18\f \x01(\v2\x19.google.protobuf.DurationH\bR\x10waitCloseTimeout\x88\x01\x01\x12@\n" +
"\vidleTimeout\x18\r \x01(\v2\x19.google.protobuf.DurationH\tR\vidleTimeout\x88\x01\x01\x12B\n" +
"\fpauseTimeout\x18\x0e \x01(\v2\x19.google.protobuf.DurationH\n" +
"R\fpauseTimeout\x88\x01\x01\x12>\n" +
"\n" +
"bufferTime\x18\x0f \x01(\v2\x19.google.protobuf.DurationH\n" +
"R\n" +
"bufferTime\x18\x0f \x01(\v2\x19.google.protobuf.DurationH\vR\n" +
"bufferTime\x88\x01\x01\x12\x19\n" +
"\x05scale\x18\x10 \x01(\x01H\vR\x05scale\x88\x01\x01\x12\x1b\n" +
"\x06maxFPS\x18\x11 \x01(\x05H\fR\x06maxFPS\x88\x01\x01\x12\x15\n" +
"\x03key\x18\x12 \x01(\tH\rR\x03key\x88\x01\x01\x12!\n" +
"\trelayMode\x18\x13 \x01(\tH\x0eR\trelayMode\x88\x01\x01\x12\x1d\n" +
"\apubType\x18\x14 \x01(\tH\x0fR\apubType\x88\x01\x01\x12\x17\n" +
"\x04dump\x18\x15 \x01(\bH\x10R\x04dump\x88\x01\x01B\v\n" +
"\x05scale\x18\x10 \x01(\x01H\fR\x05scale\x88\x01\x01\x12\x1b\n" +
"\x06maxFPS\x18\x11 \x01(\x05H\rR\x06maxFPS\x88\x01\x01\x12\x15\n" +
"\x03key\x18\x12 \x01(\tH\x0eR\x03key\x88\x01\x01\x12!\n" +
"\trelayMode\x18\x13 \x01(\tH\x0fR\trelayMode\x88\x01\x01\x12\x1d\n" +
"\apubType\x18\x14 \x01(\tH\x10R\apubType\x88\x01\x01\x12\x17\n" +
"\x04dump\x18\x15 \x01(\bH\x11R\x04dump\x88\x01\x01B\a\n" +
"\x05_loopB\v\n" +
"\t_pubAudioB\v\n" +
"\t_pubVideoB\x14\n" +
"\x12_delayCloseTimeoutB\b\n" +

View File

@@ -852,6 +852,7 @@ message GlobalPullRequest {
string protocol = 2;
int32 testMode = 3; // 0: pull, 1: pull without publish
string streamPath = 4; // 流路径
optional int32 loop = 22; // 拉流循环次数,-1:无限循环
// Publish configuration
optional bool pubAudio = 5;

274
pkg/av1_parse_test.go Normal file
View File

@@ -0,0 +1,274 @@
package pkg
import (
"testing"
"github.com/bluenviron/mediacommon/pkg/codecs/av1"
"github.com/langhuihui/gomem"
"m7s.live/v5/pkg/codec"
)
// TestParseAV1OBUs tests the ParseAV1OBUs method
func TestParseAV1OBUs(t *testing.T) {
t.Run("empty reader", func(t *testing.T) {
sample := &BaseSample{}
mem := gomem.Memory{}
reader := mem.NewReader()
err := sample.ParseAV1OBUs(&reader)
if err != nil {
t.Errorf("Expected no error for empty reader, got: %v", err)
}
})
t.Run("single OBU - Sequence Header", func(t *testing.T) {
sample := &BaseSample{}
// Create a simple AV1 OBU (Sequence Header)
// OBU Header: type=1 (SEQUENCE_HEADER), extension_flag=0, has_size_field=1
obuHeader := byte(0b00001010) // type=1, has_size=1
obuSize := byte(4) // Size of OBU payload
payload := []byte{0x08, 0x0C, 0x00, 0x00}
mem := gomem.Memory{}
mem.PushOne([]byte{obuHeader, obuSize})
mem.PushOne(payload)
reader := mem.NewReader()
err := sample.ParseAV1OBUs(&reader)
if err != nil {
t.Errorf("ParseAV1OBUs failed: %v", err)
}
nalus := sample.Raw.(*Nalus)
if nalus.Count() != 1 {
t.Errorf("Expected 1 OBU, got %d", nalus.Count())
}
})
t.Run("multiple OBUs", func(t *testing.T) {
sample := &BaseSample{}
mem := gomem.Memory{}
// First OBU - Temporal Delimiter
obuHeader1 := byte(0b00010010) // type=2 (TEMPORAL_DELIMITER), has_size=1
obuSize1 := byte(0)
mem.PushOne([]byte{obuHeader1, obuSize1})
// Second OBU - Frame Header with some payload
obuHeader2 := byte(0b00011010) // type=3 (FRAME_HEADER), has_size=1
obuSize2 := byte(3)
payload2 := []byte{0x01, 0x02, 0x03}
mem.PushOne([]byte{obuHeader2, obuSize2})
mem.PushOne(payload2)
reader := mem.NewReader()
err := sample.ParseAV1OBUs(&reader)
if err != nil {
t.Errorf("ParseAV1OBUs failed: %v", err)
}
nalus := sample.Raw.(*Nalus)
if nalus.Count() != 2 {
t.Errorf("Expected 2 OBUs, got %d", nalus.Count())
}
})
}
// TestGetOBUs tests the GetOBUs method
func TestGetOBUs(t *testing.T) {
t.Run("initialize empty OBUs", func(t *testing.T) {
sample := &BaseSample{}
obus := sample.GetOBUs()
if obus == nil {
t.Error("GetOBUs should return non-nil OBUs")
}
if sample.Raw != obus {
t.Error("Raw should be set to the returned OBUs")
}
})
t.Run("return existing OBUs", func(t *testing.T) {
existingOBUs := &OBUs{}
sample := &BaseSample{
Raw: existingOBUs,
}
obus := sample.GetOBUs()
if obus != existingOBUs {
t.Error("GetOBUs should return the existing OBUs")
}
})
}
// TestAV1OBUTypes tests all AV1 OBU type constants
func TestAV1OBUTypes(t *testing.T) {
tests := []struct {
name string
obuType int
expected int
}{
{"SEQUENCE_HEADER", codec.AV1_OBU_SEQUENCE_HEADER, 1},
{"TEMPORAL_DELIMITER", codec.AV1_OBU_TEMPORAL_DELIMITER, 2},
{"FRAME_HEADER", codec.AV1_OBU_FRAME_HEADER, 3},
{"TILE_GROUP", codec.AV1_OBU_TILE_GROUP, 4},
{"METADATA", codec.AV1_OBU_METADATA, 5},
{"FRAME", codec.AV1_OBU_FRAME, 6},
{"REDUNDANT_FRAME_HEADER", codec.AV1_OBU_REDUNDANT_FRAME_HEADER, 7},
{"TILE_LIST", codec.AV1_OBU_TILE_LIST, 8},
{"PADDING", codec.AV1_OBU_PADDING, 15},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.obuType != tt.expected {
t.Errorf("OBU type %s: expected %d, got %d", tt.name, tt.expected, tt.obuType)
}
})
}
}
// TestAV1Integration tests the full integration of AV1 codec
func TestAV1Integration(t *testing.T) {
t.Run("create AV1 context and parse OBUs", func(t *testing.T) {
// Create AV1 codec context
ctx := &codec.AV1Ctx{
ConfigOBUs: []byte{0x0A, 0x0B, 0x00},
}
// Verify context properties
if ctx.GetInfo() != "AV1" {
t.Errorf("Expected 'AV1', got '%s'", ctx.GetInfo())
}
if ctx.FourCC() != codec.FourCC_AV1 {
t.Error("FourCC should be AV1")
}
// Create a sample with OBUs
sample := &Sample{
ICodecCtx: ctx,
BaseSample: &BaseSample{},
}
// Add some OBUs
obus := sample.GetOBUs()
obu := obus.GetNextPointer()
obu.PushOne([]byte{0x0A, 0x01, 0x02, 0x03})
// Verify OBU count
if obus.Count() != 1 {
t.Errorf("Expected 1 OBU, got %d", obus.Count())
}
})
}
// TestAV1OBUHeaderParsing tests parsing of actual AV1 OBU headers
func TestAV1OBUHeaderParsing(t *testing.T) {
tests := []struct {
name string
headerByte byte
obuType uint
hasSize bool
}{
{
name: "Sequence Header with size",
headerByte: 0b00001010, // type=1, has_size=1
obuType: 1,
hasSize: true,
},
{
name: "Frame with size",
headerByte: 0b00110010, // type=6, has_size=1
obuType: 6,
hasSize: true,
},
{
name: "Temporal Delimiter with size",
headerByte: 0b00010010, // type=2, has_size=1
obuType: 2,
hasSize: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var header av1.OBUHeader
err := header.Unmarshal([]byte{tt.headerByte})
if err != nil {
t.Fatalf("Failed to unmarshal OBU header: %v", err)
}
if uint(header.Type) != tt.obuType {
t.Errorf("Expected OBU type %d, got %d", tt.obuType, header.Type)
}
if header.HasSize != tt.hasSize {
t.Errorf("Expected HasSize %v, got %v", tt.hasSize, header.HasSize)
}
})
}
}
// BenchmarkParseAV1OBUs benchmarks the OBU parsing performance
func BenchmarkParseAV1OBUs(b *testing.B) {
// Prepare test data
mem := gomem.Memory{}
for i := 0; i < 10; i++ {
obuHeader := byte(0b00110010) // Frame OBU
obuSize := byte(10)
payload := make([]byte, 10)
for j := range payload {
payload[j] = byte(j)
}
mem.PushOne([]byte{obuHeader, obuSize})
mem.PushOne(payload)
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
sample := &BaseSample{}
reader := mem.NewReader()
_ = sample.ParseAV1OBUs(&reader)
}
}
// TestOBUsReuseArray tests the reuse array functionality with OBUs
func TestOBUsReuseArray(t *testing.T) {
t.Run("reuse OBU memory", func(t *testing.T) {
obus := &OBUs{}
// First allocation
obu1 := obus.GetNextPointer()
obu1.PushOne([]byte{1, 2, 3})
if obus.Count() != 1 {
t.Errorf("Expected count 1, got %d", obus.Count())
}
// Second allocation
obu2 := obus.GetNextPointer()
obu2.PushOne([]byte{4, 5, 6})
if obus.Count() != 2 {
t.Errorf("Expected count 2, got %d", obus.Count())
}
// Reset and reuse
obus.Reset()
if obus.Count() != 0 {
t.Errorf("Expected count 0 after reset, got %d", obus.Count())
}
// Reuse memory
obu3 := obus.GetNextPointer()
obu3.PushOne([]byte{7, 8, 9})
if obus.Count() != 1 {
t.Errorf("Expected count 1 after reuse, got %d", obus.Count())
}
})
}

View File

@@ -51,7 +51,7 @@ type (
AudioData = gomem.Memory
OBUs AudioData
OBUs = util.ReuseArray[gomem.Memory]
AVFrame struct {
DataFrame
@@ -148,6 +148,13 @@ func (b *BaseSample) GetNalus() *Nalus {
return b.Raw.(*Nalus)
}
func (b *BaseSample) GetOBUs() *OBUs {
if b.Raw == nil {
b.Raw = &OBUs{}
}
return b.Raw.(*OBUs)
}
func (b *BaseSample) GetAudioData() *AudioData {
if b.Raw == nil {
b.Raw = &AudioData{}
@@ -203,21 +210,21 @@ func (df *DataFrame) Ready() {
df.Unlock()
}
func (obus *OBUs) ParseAVCC(reader *gomem.MemoryReader) error {
func (b *BaseSample) ParseAV1OBUs(reader *gomem.MemoryReader) error {
var obuHeader av1.OBUHeader
startLen := reader.Length
for reader.Length > 0 {
offset := reader.Size - reader.Length
b, err := reader.ReadByte()
b0, err := reader.ReadByte()
if err != nil {
return err
}
err = obuHeader.Unmarshal([]byte{b})
err = obuHeader.Unmarshal([]byte{b0})
if err != nil {
return err
}
// if log.Trace {
// vt.Trace("obu", zap.Any("type", obuHeader.Type), zap.Bool("iframe", vt.Value.IFrame))
// vt.Trace("obu", zap.Any("type", obuHeader.Type), zap.Bool("iframe", vt.Value.IFrame))
// }
obuSize, _, _ := reader.LEB128Unmarshal()
end := reader.Size - reader.Length
@@ -227,15 +234,7 @@ func (obus *OBUs) ParseAVCC(reader *gomem.MemoryReader) error {
if err != nil {
return err
}
(*AudioData)(obus).PushOne(obu)
b.GetNalus().GetNextPointer().PushOne(obu)
}
return nil
}
func (obus *OBUs) Reset() {
((*gomem.Memory)(obus)).Reset()
}
func (obus *OBUs) Count() int {
return (*gomem.Memory)(obus).Count()
}

187
pkg/codec/av1_test.go Normal file
View File

@@ -0,0 +1,187 @@
package codec
import (
"testing"
)
func TestAV1Ctx_GetInfo(t *testing.T) {
ctx := &AV1Ctx{
ConfigOBUs: []byte{0x0A, 0x0B, 0x00},
}
info := ctx.GetInfo()
if info != "AV1" {
t.Errorf("Expected 'AV1', got '%s'", info)
}
}
func TestAV1Ctx_GetBase(t *testing.T) {
ctx := &AV1Ctx{
ConfigOBUs: []byte{0x0A, 0x0B, 0x00},
}
base := ctx.GetBase()
if base != ctx {
t.Error("GetBase should return itself")
}
}
func TestAV1Ctx_Width(t *testing.T) {
ctx := &AV1Ctx{
ConfigOBUs: []byte{0x0A, 0x0B, 0x00},
}
width := ctx.Width()
if width != 0 {
t.Errorf("Expected width 0, got %d", width)
}
}
func TestAV1Ctx_Height(t *testing.T) {
ctx := &AV1Ctx{
ConfigOBUs: []byte{0x0A, 0x0B, 0x00},
}
height := ctx.Height()
if height != 0 {
t.Errorf("Expected height 0, got %d", height)
}
}
func TestAV1Ctx_FourCC(t *testing.T) {
ctx := &AV1Ctx{}
fourcc := ctx.FourCC()
expected := FourCC_AV1
if fourcc != expected {
t.Errorf("Expected %v, got %v", expected, fourcc)
}
// Verify the actual FourCC string
if fourcc.String() != "av01" {
t.Errorf("Expected 'av01', got '%s'", fourcc.String())
}
}
func TestAV1Ctx_GetRecord(t *testing.T) {
configOBUs := []byte{0x0A, 0x0B, 0x00, 0x01, 0x02}
ctx := &AV1Ctx{
ConfigOBUs: configOBUs,
}
record := ctx.GetRecord()
if len(record) != len(configOBUs) {
t.Errorf("Expected record length %d, got %d", len(configOBUs), len(record))
}
for i, b := range record {
if b != configOBUs[i] {
t.Errorf("Byte mismatch at index %d: expected %02X, got %02X", i, configOBUs[i], b)
}
}
}
func TestAV1Ctx_String(t *testing.T) {
tests := []struct {
name string
configOBUs []byte
expected string
}{
{
name: "Standard config",
configOBUs: []byte{0x0A, 0x0B, 0x00},
expected: "av01.0A0B00",
},
{
name: "Different config",
configOBUs: []byte{0x08, 0x0C, 0x00},
expected: "av01.080C00",
},
{
name: "High profile config",
configOBUs: []byte{0x0C, 0x10, 0x00},
expected: "av01.0C1000",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ctx := &AV1Ctx{
ConfigOBUs: tt.configOBUs,
}
result := ctx.String()
if result != tt.expected {
t.Errorf("Expected '%s', got '%s'", tt.expected, result)
}
})
}
}
func TestAV1Ctx_EmptyConfigOBUs(t *testing.T) {
ctx := &AV1Ctx{
ConfigOBUs: []byte{},
}
// Should not panic when calling methods with empty ConfigOBUs
defer func() {
if r := recover(); r != nil {
t.Errorf("Panic occurred with empty ConfigOBUs: %v", r)
}
}()
_ = ctx.GetInfo()
_ = ctx.GetBase()
_ = ctx.FourCC()
_ = ctx.GetRecord()
// Note: String() will panic with empty ConfigOBUs due to array indexing
}
func TestAV1Ctx_NilConfigOBUs(t *testing.T) {
ctx := &AV1Ctx{
ConfigOBUs: nil,
}
// Should not panic for most methods
defer func() {
if r := recover(); r != nil {
t.Errorf("Panic occurred with nil ConfigOBUs: %v", r)
}
}()
_ = ctx.GetInfo()
_ = ctx.GetBase()
_ = ctx.FourCC()
record := ctx.GetRecord()
if record != nil {
t.Error("Expected nil record for nil ConfigOBUs")
}
}
// Test AV1 OBU Type Constants
func TestAV1_OBUTypeConstants(t *testing.T) {
tests := []struct {
name string
obuType int
expected int
}{
{"SEQUENCE_HEADER", AV1_OBU_SEQUENCE_HEADER, 1},
{"TEMPORAL_DELIMITER", AV1_OBU_TEMPORAL_DELIMITER, 2},
{"FRAME_HEADER", AV1_OBU_FRAME_HEADER, 3},
{"TILE_GROUP", AV1_OBU_TILE_GROUP, 4},
{"METADATA", AV1_OBU_METADATA, 5},
{"FRAME", AV1_OBU_FRAME, 6},
{"REDUNDANT_FRAME_HEADER", AV1_OBU_REDUNDANT_FRAME_HEADER, 7},
{"TILE_LIST", AV1_OBU_TILE_LIST, 8},
{"PADDING", AV1_OBU_PADDING, 15},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.obuType != tt.expected {
t.Errorf("Expected OBU type %d, got %d", tt.expected, tt.obuType)
}
})
}
}

View File

@@ -1,3 +1,97 @@
/*
Package config provides a flexible, multi-source configuration system with priority-based value resolution.
## Overview
The config package implements a hierarchical configuration system that allows values to be set from
multiple sources with a defined priority order. This enables powerful features like:
- Environment variable overrides
- Dynamic runtime modifications
- Global and per-instance defaults
- Type-safe configuration using Go structs
## Configuration Priority
The system resolves values using the following priority order (highest to lowest):
1. Modify - Dynamic runtime modifications
2. Env - Environment variables
3. File - Values from config file
4. defaultYaml - Embedded default YAML configs
5. Global - Global/shared configuration
6. Default - Struct tag defaults or zero values
## Core Workflow
The configuration resolution follows a 5-step initialization process:
### Step 1: Parse
- Initialize the configuration tree from Go struct definitions
- Apply default values using struct tags
- Build the property map for all exported fields
- Set up environment variable prefixes
### Step 2: ParseGlobal
- Apply global/shared configuration values
- Useful for settings that should be consistent across instances
### Step 3: ParseDefaultYaml
- Load embedded default YAML configurations
- Provides sensible defaults without hardcoding in Go
### Step 4: ParseUserFile
- Read and apply user-provided configuration files
- Normalizes key names (removes hyphens, underscores, lowercases)
- Handles both struct mappings and single-value assignments
### Step 5: ParseModifyFile
- Apply dynamic runtime modifications
- Tracks changes separately for API purposes
- Automatically cleans up empty/unchanged values
## Key Features
### Type Conversion
The unmarshal function handles automatic conversion between different types:
- Basic types (int, string, bool, etc.)
- Duration strings with unit validation
- Regexp patterns
- Nested structs (with special handling for single non-struct values)
- Pointers, maps, slices, and arrays
- Fallback to YAML marshaling for unknown types
### Special Behaviors
- Single non-struct values are automatically assigned to the first field of struct types
- Key names are normalized (lowercase, remove hyphens/underscores)
- Environment variables use underscore-separated uppercase prefixes
- The "plugin" field is always skipped during parsing
- Fields with yaml:"-" tag are ignored
## Usage Example
```go
type Config struct {
Host string `yaml:"host" default:"localhost"`
Port int `yaml:"port" default:"8080"`
Timeout time.Duration `yaml:"timeout" default:"30s"`
}
cfg := &Config{}
var c Config
c.Parse(cfg)
// Load from various sources...
config := c.GetValue().(*Config)
```
## API Structure
The main types and functions:
- Config: Core configuration node with value priority tracking
- Parse: Initialize configuration from struct
- ParseGlobal/ParseDefaultYaml/ParseUserFile/ParseModifyFile: Load from sources
- GetValue/GetMap: Retrieve resolved values
- MarshalJSON: Serialize configuration for API responses
*/
package config
import (
@@ -16,17 +110,17 @@ import (
)
type Config struct {
Ptr reflect.Value //指向配置结构体值,优先级:动态修改值>环境变量>配置文件>defaultYaml>全局配置>默认值
Modify any //动态修改的值
Env any //环境变量中的值
File any //配置文件中的值
Global *Config //全局配置中的值,指针类型
Default any //默认值
Ptr reflect.Value // Points to config struct value, priority: Modify > Env > File > defaultYaml > Global > Default
Modify any // Dynamic modified value
Env any // Value from environment variable
File any // Value from config file
Global *Config // Value from global config (pointer type)
Default any // Default value
Enum []struct {
Label string `json:"label"`
Value any `json:"value"`
}
name string // 小写
name string // Lowercase key name
propsMap map[string]*Config
props []*Config
tag reflect.StructTag
@@ -102,7 +196,7 @@ func (config *Config) GetValue() any {
return config.Ptr.Interface()
}
// Parse 第一步读取配置结构体的默认值
// Parse step 1: Read default values from config struct
func (config *Config) Parse(s any, prefix ...string) {
var t reflect.Type
var v reflect.Value
@@ -123,15 +217,14 @@ func (config *Config) Parse(s any, prefix ...string) {
fmt.Println("parse to ", prefix, config.name, s, "is not valid")
return
}
if l := len(prefix); l > 0 { // 读取环境变量
name := strings.ToLower(prefix[l-1])
if l := len(prefix); l > 0 { // Read environment variables
_, isUnmarshaler := v.Addr().Interface().(yaml.Unmarshaler)
tag := config.tag.Get("default")
if tag != "" && isUnmarshaler {
v.Set(config.assign(name, tag))
v.Set(config.assign(tag))
}
if envValue := os.Getenv(strings.Join(prefix, "_")); envValue != "" {
v.Set(config.assign(name, envValue))
v.Set(config.assign(envValue))
config.Env = v.Interface()
}
}
@@ -145,23 +238,23 @@ func (config *Config) Parse(s any, prefix ...string) {
}
name := strings.ToLower(ft.Name)
if name == "plugin" {
continue
continue // Skip plugin field
}
if tag := ft.Tag.Get("yaml"); tag != "" {
if tag == "-" {
continue
continue // Skip field if tag is "-"
}
name, _, _ = strings.Cut(tag, ",")
name, _, _ = strings.Cut(tag, ",") // Use yaml tag name, ignore options
}
prop := config.Get(name)
prop.tag = ft.Tag
if len(prefix) > 0 {
prop.Parse(fv, append(prefix, strings.ToUpper(ft.Name))...)
prop.Parse(fv, append(prefix, strings.ToUpper(ft.Name))...) // Recursive parse with env prefix
} else {
prop.Parse(fv)
}
for _, kv := range strings.Split(ft.Tag.Get("enum"), ",") {
for _, kv := range strings.Split(ft.Tag.Get("enum"), ",") { // Parse enum options from tag
kvs := strings.Split(kv, ":")
if len(kvs) != 2 {
continue
@@ -182,7 +275,7 @@ func (config *Config) Parse(s any, prefix ...string) {
}
}
// ParseDefaultYaml 第二步读取全局配置
// ParseGlobal step 2: Read global config
func (config *Config) ParseGlobal(g *Config) {
config.Global = g
if config.propsMap != nil {
@@ -190,11 +283,11 @@ func (config *Config) ParseGlobal(g *Config) {
v.ParseGlobal(g.Get(k))
}
} else {
config.Ptr.Set(g.Ptr)
config.Ptr.Set(g.Ptr) // If no sub-properties, copy value directly
}
}
// ParseDefaultYaml 第三步读取内嵌默认配置
// ParseDefaultYaml step 3: Read embedded default config
func (config *Config) ParseDefaultYaml(defaultYaml map[string]any) {
if defaultYaml == nil {
return
@@ -206,9 +299,9 @@ func (config *Config) ParseDefaultYaml(defaultYaml map[string]any) {
prop.ParseDefaultYaml(v.(map[string]any))
}
} else {
dv := prop.assign(k, v)
dv := prop.assign(v)
prop.Default = dv.Interface()
if prop.Env == nil {
if prop.Env == nil { // Only set if no env var override
prop.Ptr.Set(dv)
}
}
@@ -216,15 +309,15 @@ func (config *Config) ParseDefaultYaml(defaultYaml map[string]any) {
}
}
// ParseFile 第四步读取用户配置文件
// ParseFile step 4: Read user config file
func (config *Config) ParseUserFile(conf map[string]any) {
if conf == nil {
return
}
config.File = conf
for k, v := range conf {
k = strings.ReplaceAll(k, "-", "")
k = strings.ReplaceAll(k, "_", "")
k = strings.ReplaceAll(k, "-", "") // Normalize key name: remove hyphens
k = strings.ReplaceAll(k, "_", "") // Normalize key name: remove underscores
k = strings.ToLower(k)
if config.Has(k) {
if prop := config.Get(k); prop.props != nil {
@@ -233,18 +326,19 @@ func (config *Config) ParseUserFile(conf map[string]any) {
case map[string]any:
prop.ParseUserFile(vv)
default:
// If the value is not a map (single non-struct value), assign it to the first field
prop.props[0].Ptr.Set(reflect.ValueOf(v))
}
}
} else {
fv := prop.assign(k, v)
fv := prop.assign(v)
if fv.IsValid() {
prop.File = fv.Interface()
if prop.Env == nil {
if prop.Env == nil { // Only set if no env var override
prop.Ptr.Set(fv)
}
} else {
// continue invalid field
// Continue with invalid field
slog.Error("Attempted to access invalid field during config parsing: %s", v)
}
}
@@ -252,7 +346,7 @@ func (config *Config) ParseUserFile(conf map[string]any) {
}
}
// ParseModifyFile 第五步读取动态修改配置文件
// ParseModifyFile step 5: Read dynamic modified config
func (config *Config) ParseModifyFile(conf map[string]any) {
if conf == nil {
return
@@ -264,15 +358,15 @@ func (config *Config) ParseModifyFile(conf map[string]any) {
if v != nil {
vmap := v.(map[string]any)
prop.ParseModifyFile(vmap)
if len(vmap) == 0 {
if len(vmap) == 0 { // Remove empty map
delete(conf, k)
}
}
} else {
mv := prop.assign(k, v)
mv := prop.assign(v)
v = mv.Interface()
vwm := prop.valueWithoutModify()
if equal(vwm, v) {
vwm := prop.valueWithoutModify() // Get value without modify
if equal(vwm, v) { // No change, remove from modify
delete(conf, k)
if prop.Modify != nil {
prop.Modify = nil
@@ -285,12 +379,13 @@ func (config *Config) ParseModifyFile(conf map[string]any) {
}
}
}
if len(conf) == 0 {
if len(conf) == 0 { // Clear modify if empty
config.Modify = nil
}
}
func (config *Config) valueWithoutModify() any {
// Return value with priority: Env > File > Global > Default (excluding Modify)
if config.Env != nil {
return config.Env
}
@@ -317,13 +412,14 @@ func equal(vwm, v any) bool {
}
func (config *Config) GetMap() map[string]any {
// Convert config tree to map representation
m := make(map[string]any)
for k, v := range config.propsMap {
if v.props != nil {
if v.props != nil { // Has sub-properties
if vv := v.GetMap(); vv != nil {
m[k] = vv
}
} else if v.GetValue() != nil {
} else if v.GetValue() != nil { // Leaf value
m[k] = v.GetValue()
}
}
@@ -337,6 +433,7 @@ var regexPureNumber = regexp.MustCompile(`^\d+$`)
func unmarshal(ft reflect.Type, v any) (target reflect.Value) {
source := reflect.ValueOf(v)
// Fast path: directly return if both are basic types
for _, t := range basicTypes {
if source.Kind() == t && ft.Kind() == t {
return source
@@ -351,6 +448,7 @@ func unmarshal(ft reflect.Type, v any) (target reflect.Value) {
target.SetInt(0)
} else {
timeStr := source.String()
// Parse duration string, but reject pure numbers (must have unit)
if d, err := time.ParseDuration(timeStr); err == nil && !regexPureNumber.MatchString(timeStr) {
target.SetInt(int64(d))
} else {
@@ -365,11 +463,12 @@ func unmarshal(ft reflect.Type, v any) (target reflect.Value) {
default:
switch ft.Kind() {
case reflect.Pointer:
return unmarshal(ft.Elem(), v).Addr()
return unmarshal(ft.Elem(), v).Addr() // Recurse to element type
case reflect.Struct:
newStruct := reflect.New(ft)
defaults.SetDefaults(newStruct.Interface())
if value, ok := v.(map[string]any); ok {
// If the value is a map, unmarshal each field by matching keys
for i := 0; i < ft.NumField(); i++ {
key := strings.ToLower(ft.Field(i).Name)
if vv, ok := value[key]; ok {
@@ -377,6 +476,7 @@ func unmarshal(ft reflect.Type, v any) (target reflect.Value) {
}
}
} else {
// If the value is not a map (single non-struct value), assign it to the first field
newStruct.Elem().Field(0).Set(unmarshal(ft.Field(0).Type, v))
}
return newStruct.Elem()
@@ -384,6 +484,7 @@ func unmarshal(ft reflect.Type, v any) (target reflect.Value) {
if v != nil {
target = reflect.MakeMap(ft)
for k, v := range v.(map[string]any) {
// Unmarshal key and value recursively
target.SetMapIndex(unmarshal(ft.Key(), k), unmarshal(ft.Elem(), v))
}
}
@@ -392,11 +493,12 @@ func unmarshal(ft reflect.Type, v any) (target reflect.Value) {
s := v.([]any)
target = reflect.MakeSlice(ft, len(s), len(s))
for i, v := range s {
target.Index(i).Set(unmarshal(ft.Elem(), v))
target.Index(i).Set(unmarshal(ft.Elem(), v)) // Unmarshal each element
}
}
default:
if v != nil {
// For unknown types, use YAML marshal/unmarshal as fallback
var out []byte
var err error
if vv, ok := v.(string); ok {
@@ -407,6 +509,7 @@ func unmarshal(ft reflect.Type, v any) (target reflect.Value) {
panic(err)
}
}
// Create temporary struct with single Value field
tmpValue := reflect.New(reflect.StructOf([]reflect.StructField{
{
Name: "Value",
@@ -424,7 +527,8 @@ func unmarshal(ft reflect.Type, v any) (target reflect.Value) {
return
}
func (config *Config) assign(k string, v any) reflect.Value {
func (config *Config) assign(v any) reflect.Value {
// Convert value to the same type as Ptr
return unmarshal(config.Ptr.Type(), v)
}

254
pkg/format/av1_test.go Normal file
View File

@@ -0,0 +1,254 @@
package format
import (
"testing"
"time"
"github.com/langhuihui/gomem"
"m7s.live/v5/pkg"
"m7s.live/v5/pkg/codec"
)
func TestAV1Frame_CheckCodecChange(t *testing.T) {
// Test with nil codec context - should return error
t.Run("nil codec context", func(t *testing.T) {
frame := &AV1Frame{}
err := frame.CheckCodecChange()
if err != pkg.ErrUnsupportCodec {
t.Errorf("Expected ErrUnsupportCodec, got %v", err)
}
})
// Test with valid AV1 codec context
t.Run("valid codec context", func(t *testing.T) {
frame := &AV1Frame{
Sample: pkg.Sample{
ICodecCtx: &codec.AV1Ctx{
ConfigOBUs: []byte{0x0A, 0x0B, 0x00},
},
},
}
err := frame.CheckCodecChange()
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
})
}
func TestAV1Frame_GetSize(t *testing.T) {
t.Run("empty OBUs", func(t *testing.T) {
frame := &AV1Frame{
Sample: pkg.Sample{
BaseSample: &pkg.BaseSample{
Raw: &pkg.OBUs{},
},
},
}
size := frame.GetSize()
if size != 0 {
t.Errorf("Expected size 0, got %d", size)
}
})
t.Run("with OBUs", func(t *testing.T) {
obus := &pkg.OBUs{}
// Add first OBU
obu1 := obus.GetNextPointer()
obu1.PushOne([]byte{1, 2, 3, 4})
// Add second OBU
obu2 := obus.GetNextPointer()
obu2.PushOne([]byte{5, 6, 7, 8, 9})
frame := &AV1Frame{
Sample: pkg.Sample{
BaseSample: &pkg.BaseSample{
Raw: obus,
},
},
}
size := frame.GetSize()
expectedSize := 4 + 5 // Total bytes in both OBUs
if size != expectedSize {
t.Errorf("Expected size %d, got %d", expectedSize, size)
}
})
t.Run("non-OBUs raw data", func(t *testing.T) {
frame := &AV1Frame{
Sample: pkg.Sample{
BaseSample: &pkg.BaseSample{
Raw: &gomem.Memory{},
},
},
}
size := frame.GetSize()
if size != 0 {
t.Errorf("Expected size 0 for non-OBUs raw data, got %d", size)
}
})
}
func TestAV1Frame_Demux(t *testing.T) {
mem := gomem.Memory{}
mem.PushOne([]byte{1, 2, 3, 4, 5})
frame := &AV1Frame{
Sample: pkg.Sample{
RecyclableMemory: gomem.RecyclableMemory{
Memory: mem,
},
BaseSample: &pkg.BaseSample{},
},
}
err := frame.Demux()
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
// After demux, Raw should point to the Memory
if frame.Sample.BaseSample.Raw != &frame.Sample.RecyclableMemory.Memory {
t.Error("Raw should point to Memory after Demux")
}
}
func TestAV1Frame_Mux(t *testing.T) {
// Create source sample with OBUs
obus := &pkg.OBUs{}
obu1 := obus.GetNextPointer()
obu1.PushOne([]byte{1, 2, 3})
obu2 := obus.GetNextPointer()
obu2.PushOne([]byte{4, 5, 6, 7})
ctx := &codec.AV1Ctx{
ConfigOBUs: []byte{0x0A, 0x0B, 0x00},
}
sourceSample := &pkg.Sample{
ICodecCtx: ctx,
BaseSample: &pkg.BaseSample{
Raw: obus,
Timestamp: time.Second,
CTS: 100 * time.Millisecond,
},
}
// Create destination frame
destFrame := &AV1Frame{
Sample: pkg.Sample{
BaseSample: &pkg.BaseSample{},
},
}
// Perform mux
err := destFrame.Mux(sourceSample)
if err != nil {
t.Errorf("Unexpected error: %v", err)
}
// Verify codec context is set
if destFrame.ICodecCtx != ctx {
t.Error("Codec context not set correctly")
}
// Verify data was copied
if destFrame.Memory.Size != 7 { // 3 + 4 bytes
t.Errorf("Expected memory size 7, got %d", destFrame.Memory.Size)
}
}
func TestAV1Frame_String(t *testing.T) {
frame := &AV1Frame{
Sample: pkg.Sample{
ICodecCtx: &codec.AV1Ctx{
ConfigOBUs: []byte{0x0A, 0x0B, 0x00},
},
BaseSample: &pkg.BaseSample{
Timestamp: time.Second,
CTS: 100 * time.Millisecond,
},
},
}
str := frame.String()
// Should contain AV1Frame, FourCC, Timestamp, and CTS
if len(str) == 0 {
t.Error("String() should not return empty string")
}
// The string should contain key information
t.Logf("AV1Frame.String() output: %s", str)
}
func TestAV1Frame_Workflow(t *testing.T) {
// Test the complete workflow: create -> demux -> mux
t.Run("complete workflow", func(t *testing.T) {
// Step 1: Create a frame with sample data
mem := gomem.Memory{}
mem.PushOne([]byte{1, 2, 3, 4, 5})
ctx := &codec.AV1Ctx{
ConfigOBUs: []byte{0x0A, 0x0B, 0x00},
}
originalFrame := &AV1Frame{
Sample: pkg.Sample{
ICodecCtx: ctx,
RecyclableMemory: gomem.RecyclableMemory{
Memory: mem,
},
BaseSample: &pkg.BaseSample{
Timestamp: time.Second,
CTS: 100 * time.Millisecond,
IDR: true,
},
},
}
// Step 2: Demux
err := originalFrame.Demux()
if err != nil {
t.Fatalf("Demux failed: %v", err)
}
// Step 3: Create OBUs for muxing
obus := &pkg.OBUs{}
obu := obus.GetNextPointer()
obu.PushOne([]byte{10, 20, 30})
sourceSample := &pkg.Sample{
ICodecCtx: ctx,
BaseSample: &pkg.BaseSample{
Raw: obus,
},
}
// Step 4: Mux into new frame
newFrame := &AV1Frame{
Sample: pkg.Sample{
BaseSample: &pkg.BaseSample{},
},
}
err = newFrame.Mux(sourceSample)
if err != nil {
t.Fatalf("Mux failed: %v", err)
}
// Step 5: Verify codec context
if newFrame.ICodecCtx != ctx {
t.Error("Codec context not preserved")
}
// Step 6: Check codec change should not return error
err = newFrame.CheckCodecChange()
if err != nil {
t.Errorf("CheckCodecChange failed: %v", err)
}
})
}

View File

@@ -127,5 +127,46 @@ func (r *H26xFrame) GetSize() (ret int) {
}
func (h *H26xFrame) String() string {
return fmt.Sprintf("H26xFrame{FourCC: %s, Timestamp: %s, CTS: %s}", h.FourCC, h.Timestamp, h.CTS)
return fmt.Sprintf("H26xFrame{FourCC: %s, Timestamp: %s, CTS: %s}", h.FourCC(), h.Timestamp, h.CTS)
}
var _ pkg.IAVFrame = (*AV1Frame)(nil)
type AV1Frame struct {
pkg.Sample
}
func (a *AV1Frame) CheckCodecChange() (err error) {
if a.ICodecCtx == nil {
return pkg.ErrUnsupportCodec
}
return nil
}
func (a *AV1Frame) GetSize() (ret int) {
if obus, ok := a.Raw.(*pkg.OBUs); ok {
for obu := range obus.RangePoint {
ret += obu.Size
}
}
return
}
func (a *AV1Frame) Demux() error {
a.Raw = &a.Memory
return nil
}
func (a *AV1Frame) Mux(from *pkg.Sample) (err error) {
a.InitRecycleIndexes(0)
obus := from.Raw.(*pkg.OBUs)
for obu := range obus.RangePoint {
a.Push(obu.Buffers...)
}
a.ICodecCtx = from.GetBase()
return
}
func (a *AV1Frame) String() string {
return fmt.Sprintf("AV1Frame{FourCC: %s, Timestamp: %s, CTS: %s}", a.FourCC(), a.Timestamp, a.CTS)
}

View File

@@ -1225,9 +1225,9 @@ func (gb *GB28181Plugin) QueryRecord(ctx context.Context, req *pb.QueryRecordReq
resp.Code = 0
resp.Message = fmt.Sprintf("success, received %d/%d records", recordReq.ReceivedNum, recordReq.SumNum)
// 排序录像列表按StartTime序排序
// 排序录像列表按StartTime序排序(最新的在前)
sort.Slice(resp.Data, func(i, j int) bool {
return resp.Data[i].StartTime < resp.Data[j].StartTime
return resp.Data[i].StartTime > resp.Data[j].StartTime
})
// 清理请求
@@ -3353,3 +3353,142 @@ func (gb *GB28181Plugin) DeleteChannelWithProxy(ctx context.Context, req *pb.Del
resp.Message = "通道删除成功"
return resp, nil
}
// StartDownload 实现发起录像下载接口
func (gb *GB28181Plugin) StartDownload(ctx context.Context, req *pb.StartDownloadRequest) (*pb.StartDownloadResponse, error) {
resp := &pb.StartDownloadResponse{}
// 1. 参数验证
if req.DeviceId == "" || req.ChannelId == "" {
resp.Code = 400
resp.Message = "deviceId 和 channelId 不能为空"
return resp, nil
}
if req.Start == "" || req.End == "" {
resp.Code = 400
resp.Message = "start 和 end 时间不能为空"
return resp, nil
}
// 2. 解析时间范围
startTime, endTime, err := util.TimeRangeQueryParse(url.Values{
"start": []string{req.Start},
"end": []string{req.End},
})
if err != nil {
resp.Code = 400
resp.Message = fmt.Sprintf("时间解析失败: %v", err)
return resp, nil
}
// 3. 验证设备和通道是否存在
device, ok := gb.devices.Get(req.DeviceId)
if !ok {
resp.Code = 404
resp.Message = "设备不存在"
return resp, nil
}
channelKey := req.DeviceId + "_" + req.ChannelId
_, ok = device.channels.Get(channelKey)
if !ok {
resp.Code = 404
resp.Message = "通道不存在"
return resp, nil
}
// 4. 生成下载任务ID
downloadId := fmt.Sprintf("%d_%d_%s_%s", startTime.Unix(), endTime.Unix(), req.DeviceId, req.ChannelId)
// 5. 检查任务是否已存在
if existingDialog, exists := gb.downloadDialogs.Get(downloadId); exists {
resp.Code = 200
resp.Message = "下载任务已存在"
resp.Total = 0
resp.Data = &pb.StartDownloadData{
DownloadId: downloadId,
Status: existingDialog.Status,
DownloadUrl: existingDialog.DownloadUrl,
}
return resp, nil
}
// 6. 下载链接将在录制开始后动态生成
// 初始为空,等进度更新时从数据库查询后填充
downloadUrl := ""
// 7. 创建下载对话
downloadSpeed := int(req.DownloadSpeed)
if downloadSpeed <= 0 || downloadSpeed > 4 {
downloadSpeed = 1 // 默认1倍速避免丢帧
}
dialog := &DownloadDialog{
gb: gb,
DownloadId: downloadId,
DeviceId: req.DeviceId,
ChannelId: req.ChannelId,
StartTime: startTime,
EndTime: endTime,
DownloadSpeed: downloadSpeed,
DownloadUrl: downloadUrl,
Status: "pending",
Progress: 0,
}
dialog.Task.Context = ctx
// 8. 添加到下载对话集合(会自动调用 Start 方法)
gb.downloadDialogs.AddTask(dialog)
resp.Code = 0
resp.Message = "下载任务已创建"
resp.Total = 0
resp.Data = &pb.StartDownloadData{
DownloadId: downloadId,
Status: "pending",
DownloadUrl: downloadUrl,
}
return resp, nil
}
// GetDownloadProgress 实现查询下载进度接口
func (gb *GB28181Plugin) GetDownloadProgress(ctx context.Context, req *pb.GetDownloadProgressRequest) (*pb.DownloadProgressResponse, error) {
resp := &pb.DownloadProgressResponse{}
// 1. 参数验证
if req.DownloadId == "" {
resp.Code = 400
resp.Message = "downloadId 不能为空"
return resp, nil
}
// 2. 查询任务
dialog, exists := gb.downloadDialogs.Get(req.DownloadId)
if !exists {
resp.Code = 404
resp.Message = "下载任务不存在"
return resp, nil
}
// 3. 构建响应
resp.Code = 0
resp.Message = "success"
resp.Total = 0
resp.Data = &pb.DownloadProgressData{
DownloadId: dialog.DownloadId,
Status: dialog.Status,
Progress: int32(dialog.Progress),
FilePath: dialog.FilePath,
DownloadUrl: dialog.DownloadUrl,
Error: dialog.Error,
DownloadedBytes: dialog.DownloadedBytes,
TotalBytes: dialog.TotalBytes,
StartedAt: timestamppb.New(dialog.StartedAt),
}
if !dialog.CompletedAt.IsZero() {
resp.Data.CompletedAt = timestamppb.New(dialog.CompletedAt)
}
return resp, nil
}

View File

@@ -2,6 +2,7 @@ package plugin_gb28181pro
import (
"context"
"errors"
"fmt"
"net"
"net/http"
@@ -131,7 +132,7 @@ func (d *Device) Dispose() {
// 保存当前内存中的channels
if d.channels.Length > 0 {
d.channels.Range(func(channel *Channel) bool {
if err := d.plugin.DB.Create(channel.DeviceChannel).Error; err != nil {
if err := d.plugin.DB.Save(channel.DeviceChannel).Error; err != nil {
d.Error("保存设备通道记录失败", "error", err)
}
if channel.PullProxyTask != nil {
@@ -144,6 +145,9 @@ func (d *Device) Dispose() {
}
// 保存设备信息
d.plugin.DB.Save(d)
if deviceRegisterQueueTask, ok := d.plugin.deviceRegisterManager.Get(d.DeviceId); ok {
deviceRegisterQueueTask.Stop(errors.New("设备注销"))
}
}
}

View File

@@ -11,7 +11,7 @@ import (
sipgo "github.com/emiago/sipgo"
"github.com/emiago/sipgo/sip"
"github.com/langhuihui/gotask"
task "github.com/langhuihui/gotask"
m7s "m7s.live/v5"
pkg "m7s.live/v5/pkg"
"m7s.live/v5/pkg/util"
@@ -141,9 +141,9 @@ func (d *Dialog) Start() (err error) {
d.MediaPort = d.gb.tcpPort
} else {
if d.gb.MediaPort.Valid() {
select {
case d.MediaPort = <-d.gb.tcpPorts:
default:
var ok bool
d.MediaPort, ok = d.gb.tcpPB.Allocate()
if !ok {
d.pullCtx.Fail("no available tcp port")
return fmt.Errorf("no available tcp port")
}
@@ -156,9 +156,10 @@ func (d *Dialog) Start() (err error) {
d.MediaPort = d.gb.udpPort
} else {
if d.gb.MediaPort.Valid() {
select {
case d.MediaPort = <-d.gb.udpPorts:
default:
var ok bool
d.MediaPort, ok = d.gb.udpPB.Allocate()
if !ok {
d.pullCtx.Fail("no available udp port")
return fmt.Errorf("no available udp port")
}
} else {
@@ -424,15 +425,20 @@ func (d *Dialog) GetKey() string {
}
func (d *Dialog) Dispose() {
if d.StreamMode == mrtp.StreamModeUDP {
if d.gb.udpPort == 0 { //多端口
// 如果没有设置udp端口则将MediaPort设置为0表示不再使用
d.gb.udpPorts <- d.MediaPort
switch d.StreamMode {
case mrtp.StreamModeUDP:
if d.gb.udpPort == 0 { //多端口模式
// 回收端口,防止重复回收
if !d.gb.udpPB.Release(d.MediaPort) {
d.Warn("port already released or not allocated", "port", d.MediaPort, "type", "udp")
}
}
} else {
if d.gb.tcpPort == 0 {
// 如果没有设置tcp端口则将MediaPort设置为0表示不再使用
d.gb.tcpPorts <- d.MediaPort
case mrtp.StreamModeTCPPassive:
if d.gb.tcpPort == 0 { //多端口模式
// 回收端口,防止重复回收
if !d.gb.tcpPB.Release(d.MediaPort) {
d.Warn("port already released or not allocated", "port", d.MediaPort, "type", "tcp")
}
}
}
d.Info("dialog dispose", "ssrc", d.SSRC, "mediaPort", d.MediaPort, "streamMode", d.StreamMode, "deviceId", d.Channel.DeviceId, "channelId", d.Channel.ChannelId)

View File

@@ -0,0 +1,76 @@
package plugin_gb28181pro
import (
"net/http"
"os"
"path/filepath"
"strconv"
"strings"
)
// handleDownloadFile 处理文件下载请求
func (gb *GB28181Plugin) handleDownloadFile(w http.ResponseWriter, r *http.Request) {
// 从 URL 路径中提取参数
// 路径格式:/gb28181/download/{deviceId}/{channelId}/{filename}
pathParts := strings.Split(strings.TrimPrefix(r.URL.Path, "/"), "/")
if len(pathParts) < 3 {
http.Error(w, "Invalid path", http.StatusBadRequest)
return
}
deviceId := pathParts[len(pathParts)-3]
channelId := pathParts[len(pathParts)-2]
filename := pathParts[len(pathParts)-1]
// 验证文件名格式(防止路径遍历攻击)
if strings.Contains(filename, "..") || strings.Contains(filename, "/") {
http.Error(w, "Invalid filename", http.StatusBadRequest)
return
}
// 构建文件路径
filePath := filepath.Join("download", deviceId, channelId, filename)
// 检查文件是否存在
fileInfo, err := os.Stat(filePath)
if err != nil {
if os.IsNotExist(err) {
http.Error(w, "File not found", http.StatusNotFound)
} else {
http.Error(w, "Internal server error", http.StatusInternalServerError)
}
gb.Error("文件访问失败", "filePath", filePath, "error", err)
return
}
// 检查是否是文件(不是目录)
if fileInfo.IsDir() {
http.Error(w, "Path is a directory", http.StatusBadRequest)
return
}
// 打开文件
file, err := os.Open(filePath)
if err != nil {
http.Error(w, "Failed to open file", http.StatusInternalServerError)
gb.Error("打开文件失败", "filePath", filePath, "error", err)
return
}
defer file.Close()
// 设置响应头
w.Header().Set("Content-Type", "video/mp4")
w.Header().Set("Content-Disposition", "attachment; filename="+filename)
w.Header().Set("Content-Length", strconv.FormatInt(fileInfo.Size(), 10))
w.Header().Set("Accept-Ranges", "bytes")
// 支持断点续传
http.ServeContent(w, r, filename, fileInfo.ModTime(), file)
gb.Info("文件下载",
"deviceId", deviceId,
"channelId", channelId,
"filename", filename,
"size", fileInfo.Size(),
"remote", r.RemoteAddr)
}

View File

@@ -0,0 +1,512 @@
package plugin_gb28181pro
import (
"fmt"
"io"
"os"
"path/filepath"
"strconv"
"strings"
"time"
"m7s.live/v5"
sipgo "github.com/emiago/sipgo"
"github.com/emiago/sipgo/sip"
task "github.com/langhuihui/gotask"
"m7s.live/v5/pkg/config"
gb28181 "m7s.live/v5/plugin/gb28181/pkg"
mrtp "m7s.live/v5/plugin/rtp/pkg"
)
// DownloadDialog 下载会话
type DownloadDialog struct {
task.Task
gb28181.InviteOptions
gb *GB28181Plugin
session *sipgo.DialogClientSession
device *Device
channel *Channel
MediaPort uint16
SSRC uint32
targetIP string
targetPort uint16
// 任务信息
DownloadId string
DeviceId string
ChannelId string
StartTime time.Time
EndTime time.Time
DownloadSpeed int // 下载速度倍数1-4倍默认1倍
// 状态信息
Status string // pending/downloading/completed/failed
Progress int // 0-100
FilePath string
DownloadUrl string // 下载链接
Error string
DownloadedBytes int64
TotalBytes int64
StartedAt time.Time
CompletedAt time.Time
}
// GetKey 返回下载任务的唯一标识
func (d *DownloadDialog) GetKey() string {
return d.DownloadId
}
// Start 启动下载会话
func (d *DownloadDialog) Start() error {
// 更新状态
d.Status = "downloading"
d.StartedAt = time.Now()
// 1. 获取设备和通道
device, ok := d.gb.devices.Get(d.DeviceId)
if !ok {
d.Status = "failed"
d.Error = fmt.Sprintf("设备不存在: %s", d.DeviceId)
return fmt.Errorf(d.Error)
}
d.device = device
channelKey := d.DeviceId + "_" + d.ChannelId
channel, ok := device.channels.Get(channelKey)
if !ok {
d.Status = "failed"
d.Error = fmt.Sprintf("通道不存在: %s", d.ChannelId)
return fmt.Errorf(d.Error)
}
d.channel = channel
// 2. 分配媒体端口
switch d.device.StreamMode {
case mrtp.StreamModeTCPPassive:
if d.gb.tcpPort > 0 {
d.MediaPort = d.gb.tcpPort
} else {
if d.gb.MediaPort.Valid() {
var ok bool
d.MediaPort, ok = d.gb.tcpPB.Allocate()
if !ok {
return fmt.Errorf("no available tcp port")
}
} else {
d.MediaPort = d.gb.MediaPort[0]
}
}
case mrtp.StreamModeUDP:
if d.gb.udpPort > 0 {
d.MediaPort = d.gb.udpPort
} else {
if d.gb.MediaPort.Valid() {
var ok bool
d.MediaPort, ok = d.gb.udpPB.Allocate()
if !ok {
return fmt.Errorf("no available udp port")
}
} else {
d.MediaPort = d.gb.MediaPort[0]
}
}
}
// 3. 生成 SSRC
d.SSRC = device.CreateSSRC(d.gb.Serial)
// 4. 构建 SDP
startTimestamp := d.StartTime.Unix()
endTimestamp := d.EndTime.Unix()
sdpInfo := []string{
"v=0",
fmt.Sprintf("o=%s 0 0 IN IP4 %s", device.DeviceId, device.MediaIp),
"s=Download", // 下载模式
fmt.Sprintf("u=%s:0", d.ChannelId),
"c=IN IP4 " + device.MediaIp,
fmt.Sprintf("t=%d %d", startTimestamp, endTimestamp),
}
// 添加媒体行
var mediaLine string
switch device.StreamMode {
case mrtp.StreamModeTCPPassive, mrtp.StreamModeTCPActive:
mediaLine = fmt.Sprintf("m=video %d TCP/RTP/AVP 96", d.MediaPort)
case mrtp.StreamModeUDP:
mediaLine = fmt.Sprintf("m=video %d RTP/AVP 96", d.MediaPort)
default:
mediaLine = fmt.Sprintf("m=video %d TCP/RTP/AVP 96", d.MediaPort)
}
sdpInfo = append(sdpInfo, mediaLine)
sdpInfo = append(sdpInfo, "a=recvonly")
sdpInfo = append(sdpInfo, "a=rtpmap:96 PS/90000")
// 根据传输模式添加 setup 和 connection 属性
switch device.StreamMode {
case mrtp.StreamModeTCPPassive:
sdpInfo = append(sdpInfo, "a=setup:passive", "a=connection:new")
case mrtp.StreamModeTCPActive:
sdpInfo = append(sdpInfo, "a=setup:active", "a=connection:new")
case mrtp.StreamModeUDP:
sdpInfo = append(sdpInfo, "a=setup:active", "a=connection:new")
default:
sdpInfo = append(sdpInfo, "a=setup:passive", "a=connection:new")
}
// 添加下载速度属性默认1倍速避免丢帧
downloadSpeed := d.DownloadSpeed
if downloadSpeed <= 0 || downloadSpeed > 4 {
downloadSpeed = 4 // 默认1倍速
}
sdpInfo = append(sdpInfo, fmt.Sprintf("a=downloadspeed:%d", downloadSpeed))
// 添加 SSRC
ssrcStr := strconv.FormatUint(uint64(d.SSRC), 10)
sdpInfo = append(sdpInfo, fmt.Sprintf("y=%s", ssrcStr))
// 5. 创建 INVITE 请求
request := sip.NewRequest(sip.INVITE, sip.Uri{User: d.ChannelId, Host: device.IP})
subject := fmt.Sprintf("%s:%s,%s:0", d.ChannelId, ssrcStr, device.DeviceId)
contentTypeHeader := sip.ContentTypeHeader("APPLICATION/SDP")
subjectHeader := sip.NewHeader("Subject", subject)
request.SetBody([]byte(strings.Join(sdpInfo, "\r\n") + "\r\n"))
recipient := device.Recipient
recipient.User = d.ChannelId
fromHDR := sip.FromHeader{
Address: sip.Uri{
User: d.gb.Serial,
Host: device.MediaIp,
Port: device.LocalPort,
},
Params: sip.NewParams(),
}
toHeader := sip.ToHeader{
Address: sip.Uri{User: d.ChannelId, Host: d.ChannelId[0:10]},
}
fromHDR.Params.Add("tag", sip.GenerateTagN(16))
// 6. 创建会话并发送 INVITE
dialogClientCache := sipgo.NewDialogClientCache(device.client, device.contactHDR)
d.gb.Info("发送 INVITE 请求",
"deviceId", d.DeviceId,
"channelId", d.ChannelId,
"startTime", d.StartTime,
"endTime", d.EndTime,
"ssrc", ssrcStr)
session, err := dialogClientCache.Invite(d.gb, recipient, request.Body(), &fromHDR, &toHeader, subjectHeader, &contentTypeHeader)
if err != nil {
return fmt.Errorf("发送 INVITE 失败: %w", err)
}
d.session = session
return nil
}
// Go 运行下载会话(异步执行,支持并发)
func (d *DownloadDialog) Go() error {
// 1. 等待 200 OK 响应
err := d.session.WaitAnswer(d.gb, sipgo.AnswerOptions{})
if err != nil {
d.Status = "failed"
d.Error = fmt.Sprintf("等待响应失败: %v", err)
return fmt.Errorf("等待响应失败: %w", err)
}
// 2. 解析响应
inviteResponseBody := string(d.session.InviteResponse.Body())
d.gb.Info("收到 INVITE 响应", "body", inviteResponseBody)
ds := strings.Split(inviteResponseBody, "\r\n")
for _, l := range ds {
if ls := strings.Split(l, "="); len(ls) > 1 {
switch ls[0] {
case "y":
if len(ls[1]) > 0 {
if _ssrc, err := strconv.ParseInt(ls[1], 10, 0); err == nil {
d.SSRC = uint32(_ssrc)
}
}
case "c":
parts := strings.Split(ls[1], " ")
if len(parts) >= 3 {
d.targetIP = parts[len(parts)-1]
}
case "m":
if d.device.StreamMode == mrtp.StreamModeTCPActive {
parts := strings.Split(ls[1], " ")
if len(parts) >= 2 {
if port, err := strconv.Atoi(parts[1]); err == nil {
d.targetPort = uint16(port)
}
}
} else {
d.targetPort = d.MediaPort
}
}
}
}
// 3. 发送 ACK
err = d.session.Ack(d.gb)
if err != nil {
d.Status = "failed"
d.Error = fmt.Sprintf("发送 ACK 失败: %v", err)
return fmt.Errorf("发送 ACK 失败: %w", err)
}
d.gb.Info("下载会话已建立",
"ssrc", d.SSRC,
"targetIP", d.targetIP,
"targetPort", d.targetPort)
// 4. 使用简洁的流路径格式
// 格式:{设备ID}/{通道ID}
streamPath := fmt.Sprintf("%s%s/%s/%s", "gbdownload_", time.Now().Local().Format("20060102150405"), d.DeviceId, d.ChannelId)
// 5. 创建临时 Publisher 用于下载
// 配置更大的缓冲区以支持高速下载,避免丢帧
pubConf := d.gb.GetCommonConf().Publish
pubConf.RingSize[0] = 1024 // 增大最小缓冲区
pubConf.RingSize[1] = 4096 // 增大最大缓冲区
pubConf.MaxFPS = 0 // 禁用FPS限制避免丢帧
pubConf.PubType = m7s.PublishTypeVod
publisher, err := d.gb.PublishWithConfig(d, streamPath, pubConf)
if err != nil {
d.Status = "failed"
d.Error = fmt.Sprintf("创建 Publisher 失败: %v", err)
return fmt.Errorf("创建 Publisher 失败: %w", err)
}
// 6. 创建 PSReceiver 接收 RTP 并解析 PS 流
var psReceiver mrtp.PSReceiver
psReceiver.Publisher = publisher
// 监听 Publisher 停止事件,主动停止 PSReceiver
// 避免 Publisher timeout 后 PSReceiver 仍在阻塞等待数据
publisher.OnStop(func() {
d.gb.Info("Publisher 已停止,主动停止 PSReceiver",
"downloadId", d.DownloadId,
"progress", d.Progress)
psReceiver.Stop(io.EOF)
})
// 配置接收器
switch d.device.StreamMode {
case mrtp.StreamModeTCPActive:
psReceiver.ListenAddr = fmt.Sprintf("%s:%d", d.targetIP, d.targetPort)
case mrtp.StreamModeTCPPassive:
if d.gb.tcpPort > 0 {
// 单端口模式
reader := &gb28181.SinglePortReader{
SSRC: d.SSRC,
Mouth: make(chan []byte, 1),
Context: d,
}
var loaded bool
reader, loaded = d.gb.singlePorts.LoadOrStore(reader)
if loaded {
reader.Context = d
}
psReceiver.SinglePort = reader
d.OnStop(func() {
reader.Close()
d.gb.singlePorts.Remove(reader)
})
}
psReceiver.ListenAddr = fmt.Sprintf(":%d", d.MediaPort)
case mrtp.StreamModeUDP:
if d.gb.udpPort > 0 {
// 单端口模式
reader := &gb28181.SinglePortReader{
SSRC: d.SSRC,
Mouth: make(chan []byte, 100),
Context: d,
}
var loaded bool
reader, loaded = d.gb.singlePorts.LoadOrStore(reader)
if loaded {
reader.Context = d
}
psReceiver.SinglePort = reader
d.OnStop(func() {
reader.Close()
d.gb.singlePorts.Remove(reader)
})
}
psReceiver.ListenAddr = fmt.Sprintf(":%d", d.MediaPort)
}
psReceiver.StreamMode = d.device.StreamMode
// 7. 创建 Recorder 订阅 Publisher 并录制
// 使用 MP4 插件的标准录制配置
if mp4Plugin, ok := d.gb.Server.Plugins.Get("MP4"); ok && mp4Plugin.Meta.NewRecorder != nil {
// 生成文件路径record/{deviceId}/{channelId}/{timestamp}
// Fragment=0 表示不分片FilePath 是完整路径(不含 .mp4 扩展名)
filePath := filepath.Join("record", streamPath)
recConf := config.Record{
Fragment: 0, // 不分片,单个文件
FilePath: filePath, // 完整路径(不含扩展名)
}
// 使用 Plugin.Record 方法创建录制任务
mp4Plugin.Record(publisher, recConf, nil)
d.gb.Info("MP4 录制器已创建", "streamPath", streamPath, "filePath", filePath)
} else {
d.gb.Warn("MP4 插件未加载,无法录制")
}
d.gb.Info("开始接收 RTP 数据并录制", "streamPath", streamPath)
// 8. 设置进度更新回调(在 RTP 读取循环中触发,无需单独协程)
totalDuration := d.EndTime.Sub(d.StartTime).Seconds()
psReceiver.OnProgressUpdate = func() {
d.updateProgress(&psReceiver, totalDuration)
}
// 9. 使用 RunTask 运行 PSReceiver会阻塞直到完成
err = d.RunTask(&psReceiver)
// 10. 任务完成,更新状态
if err != nil {
// 判断是否为正常结束EOF/timeout 且 RTP 时间戳已稳定(说明流真的结束了)
errStr := err.Error()
isNormalEnd := err == io.EOF ||
strings.Contains(errStr, "EOF") ||
strings.Contains(errStr, "timeout")
// 时间戳稳定说明设备已经停止发送数据,流真正结束了
timestampStable := psReceiver.IsTimestampStable()
if isNormalEnd && timestampStable {
d.gb.Info("下载完成RTP 时间戳已稳定,视为成功",
"downloadId", d.DownloadId,
"progress", d.Progress,
"error", errStr)
d.Status = "completed"
d.Progress = 100
d.Error = "" // 清除错误信息
} else {
d.Status = "failed"
d.Error = err.Error()
d.gb.Warn("下载失败",
"downloadId", d.DownloadId,
"progress", d.Progress,
"timestampStable", timestampStable,
"error", errStr)
}
} else {
d.Status = "completed"
d.Progress = 100
}
d.CompletedAt = time.Now()
// 11. 延迟 5 秒后再返回,确保前端能轮询到 100% 状态
if d.Status == "completed" {
d.gb.Info("下载任务已完成,延迟 5 秒后释放资源(确保前端获取到 100% 状态)",
"downloadId", d.DownloadId,
"progress", d.Progress)
time.Sleep(5 * time.Second)
d.gb.Info("延迟时间到,准备释放资源", "downloadId", d.DownloadId)
}
return err
}
// updateProgress 更新下载进度(在 RTP 读取循环中通过回调触发)
func (d *DownloadDialog) updateProgress(psReceiver *mrtp.PSReceiver, totalDuration float64) {
// 基于 RTP 时间戳的进度计算(与倍速无关)
elapsedSeconds := psReceiver.GetElapsedSeconds()
progress := int(elapsedSeconds / totalDuration * 100)
if progress > 100 {
progress = 100
}
if progress < 0 {
progress = 0
}
d.Progress = progress
// 尝试从 MP4 插件的数据库中获取文件信息
if mp4Plugin, ok := d.gb.Server.Plugins.Get("MP4"); ok {
if mp4Plugin.DB != nil {
var record m7s.RecordStream
// 查询最新的录制记录
if err := mp4Plugin.DB.Where("stream_path = ? AND type = ?", psReceiver.Publisher.StreamPath, "mp4").
Order("start_time DESC").First(&record).Error; err == nil {
d.FilePath = record.FilePath
// 使用 record.ID 生成下载链接(单文件下载)
// 这样无论录制是否完成,都能正确下载
d.DownloadUrl = fmt.Sprintf("/mp4/download/%s.mp4?id=%d",
psReceiver.Publisher.StreamPath,
record.ID)
// 获取文件大小
if fileInfo, err := os.Stat(record.FilePath); err == nil {
d.DownloadedBytes = fileInfo.Size()
// 根据当前进度估算总大小
if progress > 0 && progress < 100 {
d.TotalBytes = d.DownloadedBytes * 100 / int64(progress)
} else if progress >= 100 {
d.TotalBytes = d.DownloadedBytes
}
}
}
}
}
d.gb.Info("下载进度更新",
"downloadId", d.DownloadId,
"elapsedSeconds", elapsedSeconds,
"totalDuration", totalDuration,
"progress", progress,
"downloadedBytes", d.DownloadedBytes,
"totalBytes", d.TotalBytes,
"filePath", d.FilePath)
}
// Dispose 释放资源
func (d *DownloadDialog) Dispose() {
switch d.device.StreamMode {
case mrtp.StreamModeUDP:
if d.gb.udpPort == 0 { //多端口模式
// 回收端口,防止重复回收
if !d.gb.udpPB.Release(d.MediaPort) {
d.Warn("port already released or not allocated", "port", d.MediaPort, "type", "udp")
}
}
case mrtp.StreamModeTCPPassive:
if d.gb.tcpPort == 0 { //多端口模式
// 回收端口,防止重复回收
if !d.gb.tcpPB.Release(d.MediaPort) {
d.Warn("port already released or not allocated", "port", d.MediaPort, "type", "tcp")
}
}
}
// 2. 记录日志
d.gb.Info("download dialog dispose",
"downloadId", d.DownloadId,
"ssrc", d.SSRC,
"mediaPort", d.MediaPort,
"deviceId", d.DeviceId,
"channelId", d.ChannelId,
"status", d.Status)
// 3. 发送 BYE 结束会话
if d.session != nil && d.session.InviteResponse != nil {
err := d.session.Bye(d)
if err != nil {
d.gb.Error("发送 BYE 失败", "error", err)
}
}
}

View File

@@ -79,12 +79,9 @@ func (d *ForwardDialog) Start() (err error) {
if device.StreamMode != mrtp.StreamModeTCPActive {
if d.gb.MediaPort.Valid() {
select {
case d.MediaPort = <-d.gb.tcpPorts:
defer func() {
d.gb.tcpPorts <- d.MediaPort
}()
default:
var ok bool
d.MediaPort, ok = d.gb.tcpPB.Allocate()
if !ok {
return fmt.Errorf("no available tcp port")
}
} else {
@@ -283,6 +280,12 @@ func (d *ForwardDialog) Run() (err error) {
// Dispose 释放会话资源
func (d *ForwardDialog) Dispose() {
// 回收端口(如果是多端口模式)
if d.MediaPort > 0 && d.gb.tcpPort == 0 {
if !d.gb.tcpPB.Release(d.MediaPort) {
d.Warn("port already released or not allocated", "port", d.MediaPort, "type", "tcp")
}
}
if d.session != nil && d.session.InviteResponse != nil {
err := d.session.Bye(d)
if err != nil {

View File

@@ -47,31 +47,33 @@ type PositionConfig struct {
type GB28181Plugin struct {
pb.UnimplementedApiServer
m7s.Plugin
Serial string `default:"34020000002000000001" desc:"sip 服务 id"` //sip 服务器 id, 默认 34020000002000000001
Realm string `default:"3402000000" desc:"sip 服务域"` //sip 服务器域,默认 3402000000
Password string
Sip SipConfig
MediaPort util.Range[uint16] `default:"10001-20000" desc:"媒体端口范围"` //媒体端口范围
Position PositionConfig
Parent string `desc:"父级设备"`
AutoMigrate bool `default:"true" desc:"自动迁移数据库结构并初始化根组织"`
ua *sipgo.UserAgent
server *sipgo.Server
devices task.WorkCollection[string, *Device]
dialogs util.Collection[string, *Dialog]
forwardDialogs util.Collection[uint32, *ForwardDialog]
platforms task.WorkCollection[string, *Platform]
tcpPorts chan uint16
tcpPort uint16
Serial string `default:"34020000002000000001" desc:"sip 服务 id"` //sip 服务器 id, 默认 34020000002000000001
Realm string `default:"3402000000" desc:"sip 服务域"` //sip 服务器域,默认 3402000000
Password string
Sip SipConfig
MediaPort util.Range[uint16] `default:"10001-20000" desc:"媒体端口范围"` //媒体端口范围
Position PositionConfig
Parent string `desc:"父级设备"`
AutoMigrate bool `default:"true" desc:"自动迁移数据库结构并初始化根组织"`
ua *sipgo.UserAgent
server *sipgo.Server
devices task.WorkCollection[string, *Device]
dialogs util.Collection[string, *Dialog]
forwardDialogs util.Collection[uint32, *ForwardDialog]
platforms task.WorkCollection[string, *Platform]
tcpPort uint16 // 单端口模式下的 TCP 端口
udpPort uint16 // 单端口模式下的 UDP 端口
// 端口位图管理(多端口模式)
tcpPB PortBitmap
udpPB PortBitmap
sipPorts []int
SipIP string `desc:"sip发送命令的IP一般是本地IP多网卡时需要配置正确的IP"`
MediaIP string `desc:"流媒体IP用于接收流"`
deviceRegisterManager task.WorkCollection[string, *DeviceRegisterQueueTask]
Platforms []*gb28181.PlatformModel
channels util.Collection[string, *Channel]
udpPorts chan uint16
udpPort uint16
singlePorts util.Collection[uint32, *gb28181.SinglePortReader]
downloadDialogs task.WorkCollection[string, *DownloadDialog]
}
var _ = m7s.InstallPlugin[GB28181Plugin](m7s.PluginMeta{
@@ -86,6 +88,13 @@ var _ = m7s.InstallPlugin[GB28181Plugin](m7s.PluginMeta{
NewPullProxy: NewPullProxy,
})
// RegisterHandler 注册自定义 HTTP 路由
func (gb *GB28181Plugin) RegisterHandler() map[string]http.HandlerFunc {
return map[string]http.HandlerFunc{
"/download/{deviceId}/{channelId}/{filename}": gb.handleDownloadFile,
}
}
func init() {
sip.SIPDebug = true
}
@@ -160,6 +169,7 @@ func (gb *GB28181Plugin) Start() (err error) {
gb.AddTask(&gb.devices)
gb.AddTask(&gb.platforms)
gb.AddTask(&gb.deviceRegisterManager)
gb.AddTask(&gb.downloadDialogs)
gb.dialogs.L = new(sync.RWMutex)
gb.forwardDialogs.L = new(sync.RWMutex)
gb.singlePorts.L = new(sync.RWMutex)
@@ -185,12 +195,9 @@ func (gb *GB28181Plugin) Start() (err error) {
Collection: &gb.singlePorts,
})
} else {
gb.tcpPorts = make(chan uint16, gb.MediaPort.Size())
gb.udpPorts = make(chan uint16, gb.MediaPort.Size())
for i := range gb.MediaPort.Size() {
gb.tcpPorts <- gb.MediaPort[0] + i
gb.udpPorts <- gb.MediaPort[0] + i
}
// 初始化位图
gb.tcpPB.Init(gb.MediaPort[0], uint16(gb.MediaPort.Size()))
gb.udpPB.Init(gb.MediaPort[0], uint16(gb.MediaPort.Size()))
}
} else {
gb.SetDescription("tcp", fmt.Sprintf("%d", gb.MediaPort[0]))
@@ -849,15 +856,14 @@ func (gb *GB28181Plugin) OnInvite(req *sip.Request, tx sip.ServerTransaction) {
mediaPort := uint16(0)
if inviteInfo.StreamMode != mrtp.StreamModeTCPPassive {
if gb.MediaPort.Valid() {
select {
case port := <-gb.tcpPorts:
mediaPort = port
gb.Debug("OnInvite", "action", "allocate port", "port", port)
default:
var ok bool
mediaPort, ok = gb.tcpPB.Allocate()
if !ok {
gb.Error("OnInvite", "error", "no available port")
_ = tx.Respond(sip.NewResponseFromRequest(req, sip.StatusServiceUnavailable, "No Available Port", nil))
return
}
gb.Debug("OnInvite", "action", "allocate port", "port", mediaPort)
} else {
mediaPort = gb.MediaPort[0]
gb.Debug("OnInvite", "action", "use default port", "port", mediaPort)

View File

@@ -6915,6 +6915,444 @@ func (x *DeleteChannelWithProxyRequest) GetChannelId() string {
return ""
}
// StartDownloadRequest 发起录像下载请求
type StartDownloadRequest struct {
state protoimpl.MessageState `protogen:"open.v1"`
DeviceId string `protobuf:"bytes,1,opt,name=deviceId,proto3" json:"deviceId,omitempty"` // 设备IDURL路径参数
ChannelId string `protobuf:"bytes,2,opt,name=channelId,proto3" json:"channelId,omitempty"` // 通道IDURL路径参数
Start string `protobuf:"bytes,3,opt,name=start,proto3" json:"start,omitempty"` // 开始时间UTC时间戳秒级或RFC3339格式
End string `protobuf:"bytes,4,opt,name=end,proto3" json:"end,omitempty"` // 结束时间UTC时间戳秒级或RFC3339格式
DownloadSpeed int32 `protobuf:"varint,5,opt,name=downloadSpeed,proto3" json:"downloadSpeed,omitempty"` // 下载速度倍数1-4倍默认1倍避免丢帧
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *StartDownloadRequest) Reset() {
*x = StartDownloadRequest{}
mi := &file_gb28181_proto_msgTypes[91]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *StartDownloadRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*StartDownloadRequest) ProtoMessage() {}
func (x *StartDownloadRequest) ProtoReflect() protoreflect.Message {
mi := &file_gb28181_proto_msgTypes[91]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use StartDownloadRequest.ProtoReflect.Descriptor instead.
func (*StartDownloadRequest) Descriptor() ([]byte, []int) {
return file_gb28181_proto_rawDescGZIP(), []int{91}
}
func (x *StartDownloadRequest) GetDeviceId() string {
if x != nil {
return x.DeviceId
}
return ""
}
func (x *StartDownloadRequest) GetChannelId() string {
if x != nil {
return x.ChannelId
}
return ""
}
func (x *StartDownloadRequest) GetStart() string {
if x != nil {
return x.Start
}
return ""
}
func (x *StartDownloadRequest) GetEnd() string {
if x != nil {
return x.End
}
return ""
}
func (x *StartDownloadRequest) GetDownloadSpeed() int32 {
if x != nil {
return x.DownloadSpeed
}
return 0
}
// StartDownloadData 下载任务数据
type StartDownloadData struct {
state protoimpl.MessageState `protogen:"open.v1"`
DownloadId string `protobuf:"bytes,1,opt,name=downloadId,proto3" json:"downloadId,omitempty"` // 下载任务ID格式startTime_endTime_deviceId_channelId
Status string `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` // 初始状态pending
DownloadUrl string `protobuf:"bytes,3,opt,name=downloadUrl,proto3" json:"downloadUrl,omitempty"` // 下载链接(完成后可直接访问)
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *StartDownloadData) Reset() {
*x = StartDownloadData{}
mi := &file_gb28181_proto_msgTypes[92]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *StartDownloadData) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*StartDownloadData) ProtoMessage() {}
func (x *StartDownloadData) ProtoReflect() protoreflect.Message {
mi := &file_gb28181_proto_msgTypes[92]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use StartDownloadData.ProtoReflect.Descriptor instead.
func (*StartDownloadData) Descriptor() ([]byte, []int) {
return file_gb28181_proto_rawDescGZIP(), []int{92}
}
func (x *StartDownloadData) GetDownloadId() string {
if x != nil {
return x.DownloadId
}
return ""
}
func (x *StartDownloadData) GetStatus() string {
if x != nil {
return x.Status
}
return ""
}
func (x *StartDownloadData) GetDownloadUrl() string {
if x != nil {
return x.DownloadUrl
}
return ""
}
// StartDownloadResponse 发起录像下载响应
type StartDownloadResponse struct {
state protoimpl.MessageState `protogen:"open.v1"`
Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"` // 响应代码
Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` // 响应消息
Total int32 `protobuf:"varint,3,opt,name=total,proto3" json:"total,omitempty"` // 总数统一格式这里为0
Data *StartDownloadData `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` // 下载任务数据
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *StartDownloadResponse) Reset() {
*x = StartDownloadResponse{}
mi := &file_gb28181_proto_msgTypes[93]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *StartDownloadResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*StartDownloadResponse) ProtoMessage() {}
func (x *StartDownloadResponse) ProtoReflect() protoreflect.Message {
mi := &file_gb28181_proto_msgTypes[93]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use StartDownloadResponse.ProtoReflect.Descriptor instead.
func (*StartDownloadResponse) Descriptor() ([]byte, []int) {
return file_gb28181_proto_rawDescGZIP(), []int{93}
}
func (x *StartDownloadResponse) GetCode() int32 {
if x != nil {
return x.Code
}
return 0
}
func (x *StartDownloadResponse) GetMessage() string {
if x != nil {
return x.Message
}
return ""
}
func (x *StartDownloadResponse) GetTotal() int32 {
if x != nil {
return x.Total
}
return 0
}
func (x *StartDownloadResponse) GetData() *StartDownloadData {
if x != nil {
return x.Data
}
return nil
}
// GetDownloadProgressRequest 查询下载进度请求
type GetDownloadProgressRequest struct {
state protoimpl.MessageState `protogen:"open.v1"`
DownloadId string `protobuf:"bytes,1,opt,name=downloadId,proto3" json:"downloadId,omitempty"` // 下载任务IDURL路径参数
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *GetDownloadProgressRequest) Reset() {
*x = GetDownloadProgressRequest{}
mi := &file_gb28181_proto_msgTypes[94]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *GetDownloadProgressRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*GetDownloadProgressRequest) ProtoMessage() {}
func (x *GetDownloadProgressRequest) ProtoReflect() protoreflect.Message {
mi := &file_gb28181_proto_msgTypes[94]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use GetDownloadProgressRequest.ProtoReflect.Descriptor instead.
func (*GetDownloadProgressRequest) Descriptor() ([]byte, []int) {
return file_gb28181_proto_rawDescGZIP(), []int{94}
}
func (x *GetDownloadProgressRequest) GetDownloadId() string {
if x != nil {
return x.DownloadId
}
return ""
}
// DownloadProgressData 下载进度数据
type DownloadProgressData struct {
state protoimpl.MessageState `protogen:"open.v1"`
DownloadId string `protobuf:"bytes,1,opt,name=downloadId,proto3" json:"downloadId,omitempty"` // 下载任务ID
Status string `protobuf:"bytes,2,opt,name=status,proto3" json:"status,omitempty"` // 状态pending/downloading/completed/failed
Progress int32 `protobuf:"varint,3,opt,name=progress,proto3" json:"progress,omitempty"` // 下载进度0-100
FilePath string `protobuf:"bytes,4,opt,name=filePath,proto3" json:"filePath,omitempty"` // 完成后的文件路径
Error string `protobuf:"bytes,5,opt,name=error,proto3" json:"error,omitempty"` // 错误信息(如果失败)
DownloadedBytes int64 `protobuf:"varint,6,opt,name=downloadedBytes,proto3" json:"downloadedBytes,omitempty"` // 已下载字节数
TotalBytes int64 `protobuf:"varint,7,opt,name=totalBytes,proto3" json:"totalBytes,omitempty"` // 总字节数(预估)
StartedAt *timestamppb.Timestamp `protobuf:"bytes,8,opt,name=startedAt,proto3" json:"startedAt,omitempty"` // 开始时间
CompletedAt *timestamppb.Timestamp `protobuf:"bytes,9,opt,name=completedAt,proto3" json:"completedAt,omitempty"` // 完成时间
DownloadUrl string `protobuf:"bytes,10,opt,name=downloadUrl,proto3" json:"downloadUrl,omitempty"` // 下载链接(完成后可直接访问)
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *DownloadProgressData) Reset() {
*x = DownloadProgressData{}
mi := &file_gb28181_proto_msgTypes[95]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *DownloadProgressData) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DownloadProgressData) ProtoMessage() {}
func (x *DownloadProgressData) ProtoReflect() protoreflect.Message {
mi := &file_gb28181_proto_msgTypes[95]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DownloadProgressData.ProtoReflect.Descriptor instead.
func (*DownloadProgressData) Descriptor() ([]byte, []int) {
return file_gb28181_proto_rawDescGZIP(), []int{95}
}
func (x *DownloadProgressData) GetDownloadId() string {
if x != nil {
return x.DownloadId
}
return ""
}
func (x *DownloadProgressData) GetStatus() string {
if x != nil {
return x.Status
}
return ""
}
func (x *DownloadProgressData) GetProgress() int32 {
if x != nil {
return x.Progress
}
return 0
}
func (x *DownloadProgressData) GetFilePath() string {
if x != nil {
return x.FilePath
}
return ""
}
func (x *DownloadProgressData) GetError() string {
if x != nil {
return x.Error
}
return ""
}
func (x *DownloadProgressData) GetDownloadedBytes() int64 {
if x != nil {
return x.DownloadedBytes
}
return 0
}
func (x *DownloadProgressData) GetTotalBytes() int64 {
if x != nil {
return x.TotalBytes
}
return 0
}
func (x *DownloadProgressData) GetStartedAt() *timestamppb.Timestamp {
if x != nil {
return x.StartedAt
}
return nil
}
func (x *DownloadProgressData) GetCompletedAt() *timestamppb.Timestamp {
if x != nil {
return x.CompletedAt
}
return nil
}
func (x *DownloadProgressData) GetDownloadUrl() string {
if x != nil {
return x.DownloadUrl
}
return ""
}
// DownloadProgressResponse 下载进度响应
type DownloadProgressResponse struct {
state protoimpl.MessageState `protogen:"open.v1"`
Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"` // 响应代码
Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"` // 响应消息
Total int32 `protobuf:"varint,3,opt,name=total,proto3" json:"total,omitempty"` // 总数统一格式这里为0
Data *DownloadProgressData `protobuf:"bytes,4,opt,name=data,proto3" json:"data,omitempty"` // 下载进度数据
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *DownloadProgressResponse) Reset() {
*x = DownloadProgressResponse{}
mi := &file_gb28181_proto_msgTypes[96]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *DownloadProgressResponse) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*DownloadProgressResponse) ProtoMessage() {}
func (x *DownloadProgressResponse) ProtoReflect() protoreflect.Message {
mi := &file_gb28181_proto_msgTypes[96]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use DownloadProgressResponse.ProtoReflect.Descriptor instead.
func (*DownloadProgressResponse) Descriptor() ([]byte, []int) {
return file_gb28181_proto_rawDescGZIP(), []int{96}
}
func (x *DownloadProgressResponse) GetCode() int32 {
if x != nil {
return x.Code
}
return 0
}
func (x *DownloadProgressResponse) GetMessage() string {
if x != nil {
return x.Message
}
return ""
}
func (x *DownloadProgressResponse) GetTotal() int32 {
if x != nil {
return x.Total
}
return 0
}
func (x *DownloadProgressResponse) GetData() *DownloadProgressData {
if x != nil {
return x.Data
}
return nil
}
type AddGroupChannelRequest_Channel struct {
state protoimpl.MessageState `protogen:"open.v1"`
ChannelId string `protobuf:"bytes,1,opt,name=channelId,proto3" json:"channelId,omitempty"` // 通道ID
@@ -6925,7 +7363,7 @@ type AddGroupChannelRequest_Channel struct {
func (x *AddGroupChannelRequest_Channel) Reset() {
*x = AddGroupChannelRequest_Channel{}
mi := &file_gb28181_proto_msgTypes[92]
mi := &file_gb28181_proto_msgTypes[98]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -6937,7 +7375,7 @@ func (x *AddGroupChannelRequest_Channel) String() string {
func (*AddGroupChannelRequest_Channel) ProtoMessage() {}
func (x *AddGroupChannelRequest_Channel) ProtoReflect() protoreflect.Message {
mi := &file_gb28181_proto_msgTypes[92]
mi := &file_gb28181_proto_msgTypes[98]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -7588,7 +8026,49 @@ const file_gb28181_proto_rawDesc = "" +
"\tlongitude\x18\" \x01(\tR\tlongitude\x12\x1a\n" +
"\blatitude\x18# \x01(\tR\blatitude\"=\n" +
"\x1dDeleteChannelWithProxyRequest\x12\x1c\n" +
"\tchannelId\x18\x01 \x01(\tR\tchannelId2\xc9C\n" +
"\tchannelId\x18\x01 \x01(\tR\tchannelId\"\x9e\x01\n" +
"\x14StartDownloadRequest\x12\x1a\n" +
"\bdeviceId\x18\x01 \x01(\tR\bdeviceId\x12\x1c\n" +
"\tchannelId\x18\x02 \x01(\tR\tchannelId\x12\x14\n" +
"\x05start\x18\x03 \x01(\tR\x05start\x12\x10\n" +
"\x03end\x18\x04 \x01(\tR\x03end\x12$\n" +
"\rdownloadSpeed\x18\x05 \x01(\x05R\rdownloadSpeed\"m\n" +
"\x11StartDownloadData\x12\x1e\n" +
"\n" +
"downloadId\x18\x01 \x01(\tR\n" +
"downloadId\x12\x16\n" +
"\x06status\x18\x02 \x01(\tR\x06status\x12 \n" +
"\vdownloadUrl\x18\x03 \x01(\tR\vdownloadUrl\"\x8e\x01\n" +
"\x15StartDownloadResponse\x12\x12\n" +
"\x04code\x18\x01 \x01(\x05R\x04code\x12\x18\n" +
"\amessage\x18\x02 \x01(\tR\amessage\x12\x14\n" +
"\x05total\x18\x03 \x01(\x05R\x05total\x121\n" +
"\x04data\x18\x04 \x01(\v2\x1d.gb28181pro.StartDownloadDataR\x04data\"<\n" +
"\x1aGetDownloadProgressRequest\x12\x1e\n" +
"\n" +
"downloadId\x18\x01 \x01(\tR\n" +
"downloadId\"\x80\x03\n" +
"\x14DownloadProgressData\x12\x1e\n" +
"\n" +
"downloadId\x18\x01 \x01(\tR\n" +
"downloadId\x12\x16\n" +
"\x06status\x18\x02 \x01(\tR\x06status\x12\x1a\n" +
"\bprogress\x18\x03 \x01(\x05R\bprogress\x12\x1a\n" +
"\bfilePath\x18\x04 \x01(\tR\bfilePath\x12\x14\n" +
"\x05error\x18\x05 \x01(\tR\x05error\x12(\n" +
"\x0fdownloadedBytes\x18\x06 \x01(\x03R\x0fdownloadedBytes\x12\x1e\n" +
"\n" +
"totalBytes\x18\a \x01(\x03R\n" +
"totalBytes\x128\n" +
"\tstartedAt\x18\b \x01(\v2\x1a.google.protobuf.TimestampR\tstartedAt\x12<\n" +
"\vcompletedAt\x18\t \x01(\v2\x1a.google.protobuf.TimestampR\vcompletedAt\x12 \n" +
"\vdownloadUrl\x18\n" +
" \x01(\tR\vdownloadUrl\"\x94\x01\n" +
"\x18DownloadProgressResponse\x12\x12\n" +
"\x04code\x18\x01 \x01(\x05R\x04code\x12\x18\n" +
"\amessage\x18\x02 \x01(\tR\amessage\x12\x14\n" +
"\x05total\x18\x03 \x01(\x05R\x05total\x124\n" +
"\x04data\x18\x04 \x01(\v2 .gb28181pro.DownloadProgressDataR\x04data2\xf1E\n" +
"\x03api\x12]\n" +
"\x04List\x12\x1d.gb28181pro.GetDevicesRequest\x1a\x1b.gb28181pro.DevicesPageInfo\"\x19\x82\xd3\xe4\x93\x02\x13\x12\x11/gb28181/api/list\x12n\n" +
"\tGetDevice\x12\x1c.gb28181pro.GetDeviceRequest\x1a\x1a.gb28181pro.DeviceResponse\"'\x82\xd3\xe4\x93\x02!\x12\x1f/gb28181/api/devices/{deviceId}\x12f\n" +
@@ -7666,7 +8146,9 @@ const file_gb28181_proto_rawDesc = "" +
"\fReceiveAlarm\x12\x1c.gb28181pro.AlarmInfoRequest\x1a\x18.gb28181pro.BaseResponse\"%\x82\xd3\xe4\x93\x02\x1f:\x01*\"\x1a/gb28181/api/alarm/receive\x12\x97\x01\n" +
"\x13AddChannelWithProxy\x12&.gb28181pro.AddChannelWithProxyRequest\x1a\x18.gb28181pro.BaseResponse\">\x82\xd3\xe4\x93\x028:\x01*\"3/gb28181/api/channel/add_with_proxy/{streamPath=**}\x12\x9c\x01\n" +
"\x16UpdateChannelWithProxy\x12).gb28181pro.UpdateChannelWithProxyRequest\x1a\x18.gb28181pro.BaseResponse\"=\x82\xd3\xe4\x93\x027:\x01*\"2/gb28181/api/channel/update_with_proxy/{channelId}\x12\x99\x01\n" +
"\x16DeleteChannelWithProxy\x12).gb28181pro.DeleteChannelWithProxyRequest\x1a\x18.gb28181pro.BaseResponse\":\x82\xd3\xe4\x93\x024\"2/gb28181/api/channel/delete_with_proxy/{channelId}B\x1fZ\x1dm7s.live/v5/plugin/gb28181/pbb\x06proto3"
"\x16DeleteChannelWithProxy\x12).gb28181pro.DeleteChannelWithProxyRequest\x1a\x18.gb28181pro.BaseResponse\":\x82\xd3\xe4\x93\x024\"2/gb28181/api/channel/delete_with_proxy/{channelId}\x12\x8a\x01\n" +
"\rStartDownload\x12 .gb28181pro.StartDownloadRequest\x1a!.gb28181pro.StartDownloadResponse\"4\x82\xd3\xe4\x93\x02.\x12,/gb28181/api/download/{deviceId}/{channelId}\x12\x98\x01\n" +
"\x13GetDownloadProgress\x12&.gb28181pro.GetDownloadProgressRequest\x1a$.gb28181pro.DownloadProgressResponse\"3\x82\xd3\xe4\x93\x02-\x12+/gb28181/api/download/progress/{downloadId}B\x1fZ\x1dm7s.live/v5/plugin/gb28181/pbb\x06proto3"
var (
file_gb28181_proto_rawDescOnce sync.Once
@@ -7680,7 +8162,7 @@ func file_gb28181_proto_rawDescGZIP() []byte {
return file_gb28181_proto_rawDescData
}
var file_gb28181_proto_msgTypes = make([]protoimpl.MessageInfo, 93)
var file_gb28181_proto_msgTypes = make([]protoimpl.MessageInfo, 99)
var file_gb28181_proto_goTypes = []any{
(*BaseResponse)(nil), // 0: gb28181pro.BaseResponse
(*GetDeviceRequest)(nil), // 1: gb28181pro.GetDeviceRequest
@@ -7773,23 +8255,29 @@ var file_gb28181_proto_goTypes = []any{
(*AddChannelWithProxyRequest)(nil), // 88: gb28181pro.AddChannelWithProxyRequest
(*UpdateChannelWithProxyRequest)(nil), // 89: gb28181pro.UpdateChannelWithProxyRequest
(*DeleteChannelWithProxyRequest)(nil), // 90: gb28181pro.DeleteChannelWithProxyRequest
nil, // 91: gb28181pro.SubscribeInfoResponse.DialogStateEntry
(*AddGroupChannelRequest_Channel)(nil), // 92: gb28181pro.AddGroupChannelRequest.Channel
(*timestamppb.Timestamp)(nil), // 93: google.protobuf.Timestamp
(*emptypb.Empty)(nil), // 94: google.protobuf.Empty
(*StartDownloadRequest)(nil), // 91: gb28181pro.StartDownloadRequest
(*StartDownloadData)(nil), // 92: gb28181pro.StartDownloadData
(*StartDownloadResponse)(nil), // 93: gb28181pro.StartDownloadResponse
(*GetDownloadProgressRequest)(nil), // 94: gb28181pro.GetDownloadProgressRequest
(*DownloadProgressData)(nil), // 95: gb28181pro.DownloadProgressData
(*DownloadProgressResponse)(nil), // 96: gb28181pro.DownloadProgressResponse
nil, // 97: gb28181pro.SubscribeInfoResponse.DialogStateEntry
(*AddGroupChannelRequest_Channel)(nil), // 98: gb28181pro.AddGroupChannelRequest.Channel
(*timestamppb.Timestamp)(nil), // 99: google.protobuf.Timestamp
(*emptypb.Empty)(nil), // 100: google.protobuf.Empty
}
var file_gb28181_proto_depIdxs = []int32{
12, // 0: gb28181pro.DevicesPageInfo.data:type_name -> gb28181pro.Device
11, // 1: gb28181pro.ChannelsPageInfo.list:type_name -> gb28181pro.Channel
93, // 2: gb28181pro.Channel.gpsTime:type_name -> google.protobuf.Timestamp
93, // 3: gb28181pro.Device.registerTime:type_name -> google.protobuf.Timestamp
93, // 4: gb28181pro.Device.updateTime:type_name -> google.protobuf.Timestamp
93, // 5: gb28181pro.Device.keepAliveTime:type_name -> google.protobuf.Timestamp
99, // 2: gb28181pro.Channel.gpsTime:type_name -> google.protobuf.Timestamp
99, // 3: gb28181pro.Device.registerTime:type_name -> google.protobuf.Timestamp
99, // 4: gb28181pro.Device.updateTime:type_name -> google.protobuf.Timestamp
99, // 5: gb28181pro.Device.keepAliveTime:type_name -> google.protobuf.Timestamp
11, // 6: gb28181pro.Device.channels:type_name -> gb28181pro.Channel
12, // 7: gb28181pro.ResponseList.data:type_name -> gb28181pro.Device
20, // 8: gb28181pro.DeviceAlarmResponse.data:type_name -> gb28181pro.AlarmInfo
11, // 9: gb28181pro.UpdateChannelRequest.channel:type_name -> gb28181pro.Channel
91, // 10: gb28181pro.SubscribeInfoResponse.dialogState:type_name -> gb28181pro.SubscribeInfoResponse.DialogStateEntry
97, // 10: gb28181pro.SubscribeInfoResponse.dialogState:type_name -> gb28181pro.SubscribeInfoResponse.DialogStateEntry
12, // 11: gb28181pro.DeviceResponse.data:type_name -> gb28181pro.Device
11, // 12: gb28181pro.ChannelResponse.data:type_name -> gb28181pro.Channel
33, // 13: gb28181pro.PlayResponse.stream_info:type_name -> gb28181pro.StreamInfo
@@ -7797,167 +8285,175 @@ var file_gb28181_proto_depIdxs = []int32{
39, // 15: gb28181pro.PlatformResponse.data:type_name -> gb28181pro.Platform
39, // 16: gb28181pro.PlatformsPageInfo.list:type_name -> gb28181pro.Platform
47, // 17: gb28181pro.QueryRecordResponse.data:type_name -> gb28181pro.RecordItem
93, // 18: gb28181pro.QueryRecordResponse.last_time:type_name -> google.protobuf.Timestamp
99, // 18: gb28181pro.QueryRecordResponse.last_time:type_name -> google.protobuf.Timestamp
65, // 19: gb28181pro.SearchAlarmsResponse.data:type_name -> gb28181pro.AlarmRecord
93, // 20: gb28181pro.AlarmRecord.alarmTime:type_name -> google.protobuf.Timestamp
93, // 21: gb28181pro.AlarmRecord.createTime:type_name -> google.protobuf.Timestamp
93, // 22: gb28181pro.Group.createTime:type_name -> google.protobuf.Timestamp
93, // 23: gb28181pro.Group.updateTime:type_name -> google.protobuf.Timestamp
99, // 20: gb28181pro.AlarmRecord.alarmTime:type_name -> google.protobuf.Timestamp
99, // 21: gb28181pro.AlarmRecord.createTime:type_name -> google.protobuf.Timestamp
99, // 22: gb28181pro.Group.createTime:type_name -> google.protobuf.Timestamp
99, // 23: gb28181pro.Group.updateTime:type_name -> google.protobuf.Timestamp
69, // 24: gb28181pro.Group.children:type_name -> gb28181pro.Group
79, // 25: gb28181pro.Group.channels:type_name -> gb28181pro.GroupChannel
69, // 26: gb28181pro.GroupResponse.data:type_name -> gb28181pro.Group
69, // 27: gb28181pro.GroupsListResponse.data:type_name -> gb28181pro.Group
69, // 28: gb28181pro.GroupsPageInfo.data:type_name -> gb28181pro.Group
92, // 29: gb28181pro.AddGroupChannelRequest.channels:type_name -> gb28181pro.AddGroupChannelRequest.Channel
98, // 29: gb28181pro.AddGroupChannelRequest.channels:type_name -> gb28181pro.AddGroupChannelRequest.Channel
81, // 30: gb28181pro.GroupChannelsResponse.data:type_name -> gb28181pro.GroupChannelsData
79, // 31: gb28181pro.GroupChannelsData.list:type_name -> gb28181pro.GroupChannel
79, // 32: gb28181pro.GroupChannelsData.channels:type_name -> gb28181pro.GroupChannel
93, // 33: gb28181pro.AlarmInfoRequest.createAt:type_name -> google.protobuf.Timestamp
2, // 34: gb28181pro.api.List:input_type -> gb28181pro.GetDevicesRequest
1, // 35: gb28181pro.api.GetDevice:input_type -> gb28181pro.GetDeviceRequest
2, // 36: gb28181pro.api.GetDevices:input_type -> gb28181pro.GetDevicesRequest
4, // 37: gb28181pro.api.GetChannels:input_type -> gb28181pro.GetChannelsRequest
6, // 38: gb28181pro.api.SyncDevice:input_type -> gb28181pro.SyncDeviceRequest
8, // 39: gb28181pro.api.DeleteDevice:input_type -> gb28181pro.DeleteDeviceRequest
10, // 40: gb28181pro.api.GetSubChannels:input_type -> gb28181pro.GetSubChannelsRequest
14, // 41: gb28181pro.api.ChangeAudio:input_type -> gb28181pro.ChangeAudioRequest
11, // 42: gb28181pro.api.UpdateChannelStreamIdentification:input_type -> gb28181pro.Channel
15, // 43: gb28181pro.api.UpdateTransport:input_type -> gb28181pro.UpdateTransportRequest
12, // 44: gb28181pro.api.AddDevice:input_type -> gb28181pro.Device
12, // 45: gb28181pro.api.UpdateDevice:input_type -> gb28181pro.Device
16, // 46: gb28181pro.api.GetDeviceStatus:input_type -> gb28181pro.GetDeviceStatusRequest
18, // 47: gb28181pro.api.GetDeviceAlarm:input_type -> gb28181pro.GetDeviceAlarmRequest
22, // 48: gb28181pro.api.GetSyncStatus:input_type -> gb28181pro.GetSyncStatusRequest
23, // 49: gb28181pro.api.GetSubscribeInfo:input_type -> gb28181pro.GetSubscribeInfoRequest
25, // 50: gb28181pro.api.GetSnap:input_type -> gb28181pro.GetSnapRequest
34, // 51: gb28181pro.api.StopConvert:input_type -> gb28181pro.ConvertStopRequest
35, // 52: gb28181pro.api.StartBroadcast:input_type -> gb28181pro.BroadcastRequest
35, // 53: gb28181pro.api.StopBroadcast:input_type -> gb28181pro.BroadcastRequest
94, // 54: gb28181pro.api.GetAllSSRC:input_type -> google.protobuf.Empty
27, // 55: gb28181pro.api.GetRawChannel:input_type -> gb28181pro.GetRawChannelRequest
39, // 56: gb28181pro.api.AddPlatform:input_type -> gb28181pro.Platform
40, // 57: gb28181pro.api.GetPlatform:input_type -> gb28181pro.GetPlatformRequest
39, // 58: gb28181pro.api.UpdatePlatform:input_type -> gb28181pro.Platform
41, // 59: gb28181pro.api.DeletePlatform:input_type -> gb28181pro.DeletePlatformRequest
42, // 60: gb28181pro.api.ListPlatforms:input_type -> gb28181pro.ListPlatformsRequest
45, // 61: gb28181pro.api.QueryRecord:input_type -> gb28181pro.QueryRecordRequest
48, // 62: gb28181pro.api.PtzControl:input_type -> gb28181pro.PtzControlRequest
49, // 63: gb28181pro.api.IrisControl:input_type -> gb28181pro.IrisControlRequest
50, // 64: gb28181pro.api.FocusControl:input_type -> gb28181pro.FocusControlRequest
51, // 65: gb28181pro.api.QueryPreset:input_type -> gb28181pro.PresetRequest
51, // 66: gb28181pro.api.AddPreset:input_type -> gb28181pro.PresetRequest
51, // 67: gb28181pro.api.CallPreset:input_type -> gb28181pro.PresetRequest
51, // 68: gb28181pro.api.DeletePreset:input_type -> gb28181pro.PresetRequest
53, // 69: gb28181pro.api.AddCruisePoint:input_type -> gb28181pro.CruisePointRequest
53, // 70: gb28181pro.api.DeleteCruisePoint:input_type -> gb28181pro.CruisePointRequest
54, // 71: gb28181pro.api.SetCruiseSpeed:input_type -> gb28181pro.CruiseSpeedRequest
55, // 72: gb28181pro.api.SetCruiseTime:input_type -> gb28181pro.CruiseTimeRequest
56, // 73: gb28181pro.api.StartCruise:input_type -> gb28181pro.CruiseRequest
56, // 74: gb28181pro.api.StopCruise:input_type -> gb28181pro.CruiseRequest
57, // 75: gb28181pro.api.StartScan:input_type -> gb28181pro.ScanRequest
57, // 76: gb28181pro.api.StopScan:input_type -> gb28181pro.ScanRequest
57, // 77: gb28181pro.api.SetScanLeft:input_type -> gb28181pro.ScanRequest
57, // 78: gb28181pro.api.SetScanRight:input_type -> gb28181pro.ScanRequest
58, // 79: gb28181pro.api.SetScanSpeed:input_type -> gb28181pro.ScanSpeedRequest
59, // 80: gb28181pro.api.WiperControl:input_type -> gb28181pro.WiperControlRequest
60, // 81: gb28181pro.api.AuxiliaryControl:input_type -> gb28181pro.AuxiliaryControlRequest
61, // 82: gb28181pro.api.TestSip:input_type -> gb28181pro.TestSipRequest
63, // 83: gb28181pro.api.SearchAlarms:input_type -> gb28181pro.SearchAlarmsRequest
66, // 84: gb28181pro.api.AddPlatformChannel:input_type -> gb28181pro.AddPlatformChannelRequest
67, // 85: gb28181pro.api.Recording:input_type -> gb28181pro.RecordingRequest
68, // 86: gb28181pro.api.UploadJpeg:input_type -> gb28181pro.UploadJpegRequest
21, // 87: gb28181pro.api.UpdateChannel:input_type -> gb28181pro.UpdateChannelRequest
82, // 88: gb28181pro.api.PlaybackPause:input_type -> gb28181pro.PlaybackPauseRequest
83, // 89: gb28181pro.api.PlaybackResume:input_type -> gb28181pro.PlaybackResumeRequest
84, // 90: gb28181pro.api.PlaybackSeek:input_type -> gb28181pro.PlaybackSeekRequest
85, // 91: gb28181pro.api.PlaybackSpeed:input_type -> gb28181pro.PlaybackSpeedRequest
70, // 92: gb28181pro.api.GetGroups:input_type -> gb28181pro.GetGroupsRequest
69, // 93: gb28181pro.api.AddGroup:input_type -> gb28181pro.Group
69, // 94: gb28181pro.api.UpdateGroup:input_type -> gb28181pro.Group
71, // 95: gb28181pro.api.DeleteGroup:input_type -> gb28181pro.DeleteGroupRequest
76, // 96: gb28181pro.api.AddGroupChannel:input_type -> gb28181pro.AddGroupChannelRequest
77, // 97: gb28181pro.api.DeleteGroupChannel:input_type -> gb28181pro.DeleteGroupChannelRequest
78, // 98: gb28181pro.api.GetGroupChannels:input_type -> gb28181pro.GetGroupChannelsRequest
86, // 99: gb28181pro.api.RemoveDevice:input_type -> gb28181pro.RemoveDeviceRequest
87, // 100: gb28181pro.api.ReceiveAlarm:input_type -> gb28181pro.AlarmInfoRequest
88, // 101: gb28181pro.api.AddChannelWithProxy:input_type -> gb28181pro.AddChannelWithProxyRequest
89, // 102: gb28181pro.api.UpdateChannelWithProxy:input_type -> gb28181pro.UpdateChannelWithProxyRequest
90, // 103: gb28181pro.api.DeleteChannelWithProxy:input_type -> gb28181pro.DeleteChannelWithProxyRequest
3, // 104: gb28181pro.api.List:output_type -> gb28181pro.DevicesPageInfo
28, // 105: gb28181pro.api.GetDevice:output_type -> gb28181pro.DeviceResponse
3, // 106: gb28181pro.api.GetDevices:output_type -> gb28181pro.DevicesPageInfo
5, // 107: gb28181pro.api.GetChannels:output_type -> gb28181pro.ChannelsPageInfo
7, // 108: gb28181pro.api.SyncDevice:output_type -> gb28181pro.SyncStatus
9, // 109: gb28181pro.api.DeleteDevice:output_type -> gb28181pro.DeleteDeviceResponse
5, // 110: gb28181pro.api.GetSubChannels:output_type -> gb28181pro.ChannelsPageInfo
0, // 111: gb28181pro.api.ChangeAudio:output_type -> gb28181pro.BaseResponse
0, // 112: gb28181pro.api.UpdateChannelStreamIdentification:output_type -> gb28181pro.BaseResponse
0, // 113: gb28181pro.api.UpdateTransport:output_type -> gb28181pro.BaseResponse
0, // 114: gb28181pro.api.AddDevice:output_type -> gb28181pro.BaseResponse
0, // 115: gb28181pro.api.UpdateDevice:output_type -> gb28181pro.BaseResponse
17, // 116: gb28181pro.api.GetDeviceStatus:output_type -> gb28181pro.DeviceStatusResponse
19, // 117: gb28181pro.api.GetDeviceAlarm:output_type -> gb28181pro.DeviceAlarmResponse
7, // 118: gb28181pro.api.GetSyncStatus:output_type -> gb28181pro.SyncStatus
24, // 119: gb28181pro.api.GetSubscribeInfo:output_type -> gb28181pro.SubscribeInfoResponse
26, // 120: gb28181pro.api.GetSnap:output_type -> gb28181pro.SnapResponse
0, // 121: gb28181pro.api.StopConvert:output_type -> gb28181pro.BaseResponse
36, // 122: gb28181pro.api.StartBroadcast:output_type -> gb28181pro.BroadcastResponse
0, // 123: gb28181pro.api.StopBroadcast:output_type -> gb28181pro.BaseResponse
38, // 124: gb28181pro.api.GetAllSSRC:output_type -> gb28181pro.SSRCListResponse
11, // 125: gb28181pro.api.GetRawChannel:output_type -> gb28181pro.Channel
0, // 126: gb28181pro.api.AddPlatform:output_type -> gb28181pro.BaseResponse
43, // 127: gb28181pro.api.GetPlatform:output_type -> gb28181pro.PlatformResponse
0, // 128: gb28181pro.api.UpdatePlatform:output_type -> gb28181pro.BaseResponse
0, // 129: gb28181pro.api.DeletePlatform:output_type -> gb28181pro.BaseResponse
44, // 130: gb28181pro.api.ListPlatforms:output_type -> gb28181pro.PlatformsPageInfo
46, // 131: gb28181pro.api.QueryRecord:output_type -> gb28181pro.QueryRecordResponse
0, // 132: gb28181pro.api.PtzControl:output_type -> gb28181pro.BaseResponse
0, // 133: gb28181pro.api.IrisControl:output_type -> gb28181pro.BaseResponse
0, // 134: gb28181pro.api.FocusControl:output_type -> gb28181pro.BaseResponse
52, // 135: gb28181pro.api.QueryPreset:output_type -> gb28181pro.PresetResponse
0, // 136: gb28181pro.api.AddPreset:output_type -> gb28181pro.BaseResponse
0, // 137: gb28181pro.api.CallPreset:output_type -> gb28181pro.BaseResponse
0, // 138: gb28181pro.api.DeletePreset:output_type -> gb28181pro.BaseResponse
0, // 139: gb28181pro.api.AddCruisePoint:output_type -> gb28181pro.BaseResponse
0, // 140: gb28181pro.api.DeleteCruisePoint:output_type -> gb28181pro.BaseResponse
0, // 141: gb28181pro.api.SetCruiseSpeed:output_type -> gb28181pro.BaseResponse
0, // 142: gb28181pro.api.SetCruiseTime:output_type -> gb28181pro.BaseResponse
0, // 143: gb28181pro.api.StartCruise:output_type -> gb28181pro.BaseResponse
0, // 144: gb28181pro.api.StopCruise:output_type -> gb28181pro.BaseResponse
0, // 145: gb28181pro.api.StartScan:output_type -> gb28181pro.BaseResponse
0, // 146: gb28181pro.api.StopScan:output_type -> gb28181pro.BaseResponse
0, // 147: gb28181pro.api.SetScanLeft:output_type -> gb28181pro.BaseResponse
0, // 148: gb28181pro.api.SetScanRight:output_type -> gb28181pro.BaseResponse
0, // 149: gb28181pro.api.SetScanSpeed:output_type -> gb28181pro.BaseResponse
0, // 150: gb28181pro.api.WiperControl:output_type -> gb28181pro.BaseResponse
0, // 151: gb28181pro.api.AuxiliaryControl:output_type -> gb28181pro.BaseResponse
62, // 152: gb28181pro.api.TestSip:output_type -> gb28181pro.TestSipResponse
64, // 153: gb28181pro.api.SearchAlarms:output_type -> gb28181pro.SearchAlarmsResponse
0, // 154: gb28181pro.api.AddPlatformChannel:output_type -> gb28181pro.BaseResponse
0, // 155: gb28181pro.api.Recording:output_type -> gb28181pro.BaseResponse
0, // 156: gb28181pro.api.UploadJpeg:output_type -> gb28181pro.BaseResponse
0, // 157: gb28181pro.api.UpdateChannel:output_type -> gb28181pro.BaseResponse
0, // 158: gb28181pro.api.PlaybackPause:output_type -> gb28181pro.BaseResponse
0, // 159: gb28181pro.api.PlaybackResume:output_type -> gb28181pro.BaseResponse
0, // 160: gb28181pro.api.PlaybackSeek:output_type -> gb28181pro.BaseResponse
0, // 161: gb28181pro.api.PlaybackSpeed:output_type -> gb28181pro.BaseResponse
73, // 162: gb28181pro.api.GetGroups:output_type -> gb28181pro.GroupsListResponse
0, // 163: gb28181pro.api.AddGroup:output_type -> gb28181pro.BaseResponse
0, // 164: gb28181pro.api.UpdateGroup:output_type -> gb28181pro.BaseResponse
0, // 165: gb28181pro.api.DeleteGroup:output_type -> gb28181pro.BaseResponse
0, // 166: gb28181pro.api.AddGroupChannel:output_type -> gb28181pro.BaseResponse
0, // 167: gb28181pro.api.DeleteGroupChannel:output_type -> gb28181pro.BaseResponse
80, // 168: gb28181pro.api.GetGroupChannels:output_type -> gb28181pro.GroupChannelsResponse
0, // 169: gb28181pro.api.RemoveDevice:output_type -> gb28181pro.BaseResponse
0, // 170: gb28181pro.api.ReceiveAlarm:output_type -> gb28181pro.BaseResponse
0, // 171: gb28181pro.api.AddChannelWithProxy:output_type -> gb28181pro.BaseResponse
0, // 172: gb28181pro.api.UpdateChannelWithProxy:output_type -> gb28181pro.BaseResponse
0, // 173: gb28181pro.api.DeleteChannelWithProxy:output_type -> gb28181pro.BaseResponse
104, // [104:174] is the sub-list for method output_type
34, // [34:104] is the sub-list for method input_type
34, // [34:34] is the sub-list for extension type_name
34, // [34:34] is the sub-list for extension extendee
0, // [0:34] is the sub-list for field type_name
99, // 33: gb28181pro.AlarmInfoRequest.createAt:type_name -> google.protobuf.Timestamp
92, // 34: gb28181pro.StartDownloadResponse.data:type_name -> gb28181pro.StartDownloadData
99, // 35: gb28181pro.DownloadProgressData.startedAt:type_name -> google.protobuf.Timestamp
99, // 36: gb28181pro.DownloadProgressData.completedAt:type_name -> google.protobuf.Timestamp
95, // 37: gb28181pro.DownloadProgressResponse.data:type_name -> gb28181pro.DownloadProgressData
2, // 38: gb28181pro.api.List:input_type -> gb28181pro.GetDevicesRequest
1, // 39: gb28181pro.api.GetDevice:input_type -> gb28181pro.GetDeviceRequest
2, // 40: gb28181pro.api.GetDevices:input_type -> gb28181pro.GetDevicesRequest
4, // 41: gb28181pro.api.GetChannels:input_type -> gb28181pro.GetChannelsRequest
6, // 42: gb28181pro.api.SyncDevice:input_type -> gb28181pro.SyncDeviceRequest
8, // 43: gb28181pro.api.DeleteDevice:input_type -> gb28181pro.DeleteDeviceRequest
10, // 44: gb28181pro.api.GetSubChannels:input_type -> gb28181pro.GetSubChannelsRequest
14, // 45: gb28181pro.api.ChangeAudio:input_type -> gb28181pro.ChangeAudioRequest
11, // 46: gb28181pro.api.UpdateChannelStreamIdentification:input_type -> gb28181pro.Channel
15, // 47: gb28181pro.api.UpdateTransport:input_type -> gb28181pro.UpdateTransportRequest
12, // 48: gb28181pro.api.AddDevice:input_type -> gb28181pro.Device
12, // 49: gb28181pro.api.UpdateDevice:input_type -> gb28181pro.Device
16, // 50: gb28181pro.api.GetDeviceStatus:input_type -> gb28181pro.GetDeviceStatusRequest
18, // 51: gb28181pro.api.GetDeviceAlarm:input_type -> gb28181pro.GetDeviceAlarmRequest
22, // 52: gb28181pro.api.GetSyncStatus:input_type -> gb28181pro.GetSyncStatusRequest
23, // 53: gb28181pro.api.GetSubscribeInfo:input_type -> gb28181pro.GetSubscribeInfoRequest
25, // 54: gb28181pro.api.GetSnap:input_type -> gb28181pro.GetSnapRequest
34, // 55: gb28181pro.api.StopConvert:input_type -> gb28181pro.ConvertStopRequest
35, // 56: gb28181pro.api.StartBroadcast:input_type -> gb28181pro.BroadcastRequest
35, // 57: gb28181pro.api.StopBroadcast:input_type -> gb28181pro.BroadcastRequest
100, // 58: gb28181pro.api.GetAllSSRC:input_type -> google.protobuf.Empty
27, // 59: gb28181pro.api.GetRawChannel:input_type -> gb28181pro.GetRawChannelRequest
39, // 60: gb28181pro.api.AddPlatform:input_type -> gb28181pro.Platform
40, // 61: gb28181pro.api.GetPlatform:input_type -> gb28181pro.GetPlatformRequest
39, // 62: gb28181pro.api.UpdatePlatform:input_type -> gb28181pro.Platform
41, // 63: gb28181pro.api.DeletePlatform:input_type -> gb28181pro.DeletePlatformRequest
42, // 64: gb28181pro.api.ListPlatforms:input_type -> gb28181pro.ListPlatformsRequest
45, // 65: gb28181pro.api.QueryRecord:input_type -> gb28181pro.QueryRecordRequest
48, // 66: gb28181pro.api.PtzControl:input_type -> gb28181pro.PtzControlRequest
49, // 67: gb28181pro.api.IrisControl:input_type -> gb28181pro.IrisControlRequest
50, // 68: gb28181pro.api.FocusControl:input_type -> gb28181pro.FocusControlRequest
51, // 69: gb28181pro.api.QueryPreset:input_type -> gb28181pro.PresetRequest
51, // 70: gb28181pro.api.AddPreset:input_type -> gb28181pro.PresetRequest
51, // 71: gb28181pro.api.CallPreset:input_type -> gb28181pro.PresetRequest
51, // 72: gb28181pro.api.DeletePreset:input_type -> gb28181pro.PresetRequest
53, // 73: gb28181pro.api.AddCruisePoint:input_type -> gb28181pro.CruisePointRequest
53, // 74: gb28181pro.api.DeleteCruisePoint:input_type -> gb28181pro.CruisePointRequest
54, // 75: gb28181pro.api.SetCruiseSpeed:input_type -> gb28181pro.CruiseSpeedRequest
55, // 76: gb28181pro.api.SetCruiseTime:input_type -> gb28181pro.CruiseTimeRequest
56, // 77: gb28181pro.api.StartCruise:input_type -> gb28181pro.CruiseRequest
56, // 78: gb28181pro.api.StopCruise:input_type -> gb28181pro.CruiseRequest
57, // 79: gb28181pro.api.StartScan:input_type -> gb28181pro.ScanRequest
57, // 80: gb28181pro.api.StopScan:input_type -> gb28181pro.ScanRequest
57, // 81: gb28181pro.api.SetScanLeft:input_type -> gb28181pro.ScanRequest
57, // 82: gb28181pro.api.SetScanRight:input_type -> gb28181pro.ScanRequest
58, // 83: gb28181pro.api.SetScanSpeed:input_type -> gb28181pro.ScanSpeedRequest
59, // 84: gb28181pro.api.WiperControl:input_type -> gb28181pro.WiperControlRequest
60, // 85: gb28181pro.api.AuxiliaryControl:input_type -> gb28181pro.AuxiliaryControlRequest
61, // 86: gb28181pro.api.TestSip:input_type -> gb28181pro.TestSipRequest
63, // 87: gb28181pro.api.SearchAlarms:input_type -> gb28181pro.SearchAlarmsRequest
66, // 88: gb28181pro.api.AddPlatformChannel:input_type -> gb28181pro.AddPlatformChannelRequest
67, // 89: gb28181pro.api.Recording:input_type -> gb28181pro.RecordingRequest
68, // 90: gb28181pro.api.UploadJpeg:input_type -> gb28181pro.UploadJpegRequest
21, // 91: gb28181pro.api.UpdateChannel:input_type -> gb28181pro.UpdateChannelRequest
82, // 92: gb28181pro.api.PlaybackPause:input_type -> gb28181pro.PlaybackPauseRequest
83, // 93: gb28181pro.api.PlaybackResume:input_type -> gb28181pro.PlaybackResumeRequest
84, // 94: gb28181pro.api.PlaybackSeek:input_type -> gb28181pro.PlaybackSeekRequest
85, // 95: gb28181pro.api.PlaybackSpeed:input_type -> gb28181pro.PlaybackSpeedRequest
70, // 96: gb28181pro.api.GetGroups:input_type -> gb28181pro.GetGroupsRequest
69, // 97: gb28181pro.api.AddGroup:input_type -> gb28181pro.Group
69, // 98: gb28181pro.api.UpdateGroup:input_type -> gb28181pro.Group
71, // 99: gb28181pro.api.DeleteGroup:input_type -> gb28181pro.DeleteGroupRequest
76, // 100: gb28181pro.api.AddGroupChannel:input_type -> gb28181pro.AddGroupChannelRequest
77, // 101: gb28181pro.api.DeleteGroupChannel:input_type -> gb28181pro.DeleteGroupChannelRequest
78, // 102: gb28181pro.api.GetGroupChannels:input_type -> gb28181pro.GetGroupChannelsRequest
86, // 103: gb28181pro.api.RemoveDevice:input_type -> gb28181pro.RemoveDeviceRequest
87, // 104: gb28181pro.api.ReceiveAlarm:input_type -> gb28181pro.AlarmInfoRequest
88, // 105: gb28181pro.api.AddChannelWithProxy:input_type -> gb28181pro.AddChannelWithProxyRequest
89, // 106: gb28181pro.api.UpdateChannelWithProxy:input_type -> gb28181pro.UpdateChannelWithProxyRequest
90, // 107: gb28181pro.api.DeleteChannelWithProxy:input_type -> gb28181pro.DeleteChannelWithProxyRequest
91, // 108: gb28181pro.api.StartDownload:input_type -> gb28181pro.StartDownloadRequest
94, // 109: gb28181pro.api.GetDownloadProgress:input_type -> gb28181pro.GetDownloadProgressRequest
3, // 110: gb28181pro.api.List:output_type -> gb28181pro.DevicesPageInfo
28, // 111: gb28181pro.api.GetDevice:output_type -> gb28181pro.DeviceResponse
3, // 112: gb28181pro.api.GetDevices:output_type -> gb28181pro.DevicesPageInfo
5, // 113: gb28181pro.api.GetChannels:output_type -> gb28181pro.ChannelsPageInfo
7, // 114: gb28181pro.api.SyncDevice:output_type -> gb28181pro.SyncStatus
9, // 115: gb28181pro.api.DeleteDevice:output_type -> gb28181pro.DeleteDeviceResponse
5, // 116: gb28181pro.api.GetSubChannels:output_type -> gb28181pro.ChannelsPageInfo
0, // 117: gb28181pro.api.ChangeAudio:output_type -> gb28181pro.BaseResponse
0, // 118: gb28181pro.api.UpdateChannelStreamIdentification:output_type -> gb28181pro.BaseResponse
0, // 119: gb28181pro.api.UpdateTransport:output_type -> gb28181pro.BaseResponse
0, // 120: gb28181pro.api.AddDevice:output_type -> gb28181pro.BaseResponse
0, // 121: gb28181pro.api.UpdateDevice:output_type -> gb28181pro.BaseResponse
17, // 122: gb28181pro.api.GetDeviceStatus:output_type -> gb28181pro.DeviceStatusResponse
19, // 123: gb28181pro.api.GetDeviceAlarm:output_type -> gb28181pro.DeviceAlarmResponse
7, // 124: gb28181pro.api.GetSyncStatus:output_type -> gb28181pro.SyncStatus
24, // 125: gb28181pro.api.GetSubscribeInfo:output_type -> gb28181pro.SubscribeInfoResponse
26, // 126: gb28181pro.api.GetSnap:output_type -> gb28181pro.SnapResponse
0, // 127: gb28181pro.api.StopConvert:output_type -> gb28181pro.BaseResponse
36, // 128: gb28181pro.api.StartBroadcast:output_type -> gb28181pro.BroadcastResponse
0, // 129: gb28181pro.api.StopBroadcast:output_type -> gb28181pro.BaseResponse
38, // 130: gb28181pro.api.GetAllSSRC:output_type -> gb28181pro.SSRCListResponse
11, // 131: gb28181pro.api.GetRawChannel:output_type -> gb28181pro.Channel
0, // 132: gb28181pro.api.AddPlatform:output_type -> gb28181pro.BaseResponse
43, // 133: gb28181pro.api.GetPlatform:output_type -> gb28181pro.PlatformResponse
0, // 134: gb28181pro.api.UpdatePlatform:output_type -> gb28181pro.BaseResponse
0, // 135: gb28181pro.api.DeletePlatform:output_type -> gb28181pro.BaseResponse
44, // 136: gb28181pro.api.ListPlatforms:output_type -> gb28181pro.PlatformsPageInfo
46, // 137: gb28181pro.api.QueryRecord:output_type -> gb28181pro.QueryRecordResponse
0, // 138: gb28181pro.api.PtzControl:output_type -> gb28181pro.BaseResponse
0, // 139: gb28181pro.api.IrisControl:output_type -> gb28181pro.BaseResponse
0, // 140: gb28181pro.api.FocusControl:output_type -> gb28181pro.BaseResponse
52, // 141: gb28181pro.api.QueryPreset:output_type -> gb28181pro.PresetResponse
0, // 142: gb28181pro.api.AddPreset:output_type -> gb28181pro.BaseResponse
0, // 143: gb28181pro.api.CallPreset:output_type -> gb28181pro.BaseResponse
0, // 144: gb28181pro.api.DeletePreset:output_type -> gb28181pro.BaseResponse
0, // 145: gb28181pro.api.AddCruisePoint:output_type -> gb28181pro.BaseResponse
0, // 146: gb28181pro.api.DeleteCruisePoint:output_type -> gb28181pro.BaseResponse
0, // 147: gb28181pro.api.SetCruiseSpeed:output_type -> gb28181pro.BaseResponse
0, // 148: gb28181pro.api.SetCruiseTime:output_type -> gb28181pro.BaseResponse
0, // 149: gb28181pro.api.StartCruise:output_type -> gb28181pro.BaseResponse
0, // 150: gb28181pro.api.StopCruise:output_type -> gb28181pro.BaseResponse
0, // 151: gb28181pro.api.StartScan:output_type -> gb28181pro.BaseResponse
0, // 152: gb28181pro.api.StopScan:output_type -> gb28181pro.BaseResponse
0, // 153: gb28181pro.api.SetScanLeft:output_type -> gb28181pro.BaseResponse
0, // 154: gb28181pro.api.SetScanRight:output_type -> gb28181pro.BaseResponse
0, // 155: gb28181pro.api.SetScanSpeed:output_type -> gb28181pro.BaseResponse
0, // 156: gb28181pro.api.WiperControl:output_type -> gb28181pro.BaseResponse
0, // 157: gb28181pro.api.AuxiliaryControl:output_type -> gb28181pro.BaseResponse
62, // 158: gb28181pro.api.TestSip:output_type -> gb28181pro.TestSipResponse
64, // 159: gb28181pro.api.SearchAlarms:output_type -> gb28181pro.SearchAlarmsResponse
0, // 160: gb28181pro.api.AddPlatformChannel:output_type -> gb28181pro.BaseResponse
0, // 161: gb28181pro.api.Recording:output_type -> gb28181pro.BaseResponse
0, // 162: gb28181pro.api.UploadJpeg:output_type -> gb28181pro.BaseResponse
0, // 163: gb28181pro.api.UpdateChannel:output_type -> gb28181pro.BaseResponse
0, // 164: gb28181pro.api.PlaybackPause:output_type -> gb28181pro.BaseResponse
0, // 165: gb28181pro.api.PlaybackResume:output_type -> gb28181pro.BaseResponse
0, // 166: gb28181pro.api.PlaybackSeek:output_type -> gb28181pro.BaseResponse
0, // 167: gb28181pro.api.PlaybackSpeed:output_type -> gb28181pro.BaseResponse
73, // 168: gb28181pro.api.GetGroups:output_type -> gb28181pro.GroupsListResponse
0, // 169: gb28181pro.api.AddGroup:output_type -> gb28181pro.BaseResponse
0, // 170: gb28181pro.api.UpdateGroup:output_type -> gb28181pro.BaseResponse
0, // 171: gb28181pro.api.DeleteGroup:output_type -> gb28181pro.BaseResponse
0, // 172: gb28181pro.api.AddGroupChannel:output_type -> gb28181pro.BaseResponse
0, // 173: gb28181pro.api.DeleteGroupChannel:output_type -> gb28181pro.BaseResponse
80, // 174: gb28181pro.api.GetGroupChannels:output_type -> gb28181pro.GroupChannelsResponse
0, // 175: gb28181pro.api.RemoveDevice:output_type -> gb28181pro.BaseResponse
0, // 176: gb28181pro.api.ReceiveAlarm:output_type -> gb28181pro.BaseResponse
0, // 177: gb28181pro.api.AddChannelWithProxy:output_type -> gb28181pro.BaseResponse
0, // 178: gb28181pro.api.UpdateChannelWithProxy:output_type -> gb28181pro.BaseResponse
0, // 179: gb28181pro.api.DeleteChannelWithProxy:output_type -> gb28181pro.BaseResponse
93, // 180: gb28181pro.api.StartDownload:output_type -> gb28181pro.StartDownloadResponse
96, // 181: gb28181pro.api.GetDownloadProgress:output_type -> gb28181pro.DownloadProgressResponse
110, // [110:182] is the sub-list for method output_type
38, // [38:110] is the sub-list for method input_type
38, // [38:38] is the sub-list for extension type_name
38, // [38:38] is the sub-list for extension extendee
0, // [0:38] is the sub-list for field type_name
}
func init() { file_gb28181_proto_init() }
@@ -7971,7 +8467,7 @@ func file_gb28181_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: unsafe.Slice(unsafe.StringData(file_gb28181_proto_rawDesc), len(file_gb28181_proto_rawDesc)),
NumEnums: 0,
NumMessages: 93,
NumMessages: 99,
NumExtensions: 0,
NumServices: 1,
},

View File

@@ -3322,6 +3322,110 @@ func local_request_Api_DeleteChannelWithProxy_0(ctx context.Context, marshaler r
return msg, metadata, err
}
var filter_Api_StartDownload_0 = &utilities.DoubleArray{Encoding: map[string]int{"deviceId": 0, "channelId": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}}
func request_Api_StartDownload_0(ctx context.Context, marshaler runtime.Marshaler, client ApiClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var (
protoReq StartDownloadRequest
metadata runtime.ServerMetadata
err error
)
io.Copy(io.Discard, req.Body)
val, ok := pathParams["deviceId"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "deviceId")
}
protoReq.DeviceId, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "deviceId", err)
}
val, ok = pathParams["channelId"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "channelId")
}
protoReq.ChannelId, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "channelId", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Api_StartDownload_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.StartDownload(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Api_StartDownload_0(ctx context.Context, marshaler runtime.Marshaler, server ApiServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var (
protoReq StartDownloadRequest
metadata runtime.ServerMetadata
err error
)
val, ok := pathParams["deviceId"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "deviceId")
}
protoReq.DeviceId, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "deviceId", err)
}
val, ok = pathParams["channelId"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "channelId")
}
protoReq.ChannelId, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "channelId", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Api_StartDownload_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.StartDownload(ctx, &protoReq)
return msg, metadata, err
}
func request_Api_GetDownloadProgress_0(ctx context.Context, marshaler runtime.Marshaler, client ApiClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var (
protoReq GetDownloadProgressRequest
metadata runtime.ServerMetadata
err error
)
io.Copy(io.Discard, req.Body)
val, ok := pathParams["downloadId"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "downloadId")
}
protoReq.DownloadId, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "downloadId", err)
}
msg, err := client.GetDownloadProgress(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Api_GetDownloadProgress_0(ctx context.Context, marshaler runtime.Marshaler, server ApiServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var (
protoReq GetDownloadProgressRequest
metadata runtime.ServerMetadata
err error
)
val, ok := pathParams["downloadId"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "downloadId")
}
protoReq.DownloadId, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "downloadId", err)
}
msg, err := server.GetDownloadProgress(ctx, &protoReq)
return msg, metadata, err
}
// RegisterApiHandlerServer registers the http handlers for service Api to "mux".
// UnaryRPC :call ApiServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
@@ -4728,6 +4832,46 @@ func RegisterApiHandlerServer(ctx context.Context, mux *runtime.ServeMux, server
}
forward_Api_DeleteChannelWithProxy_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle(http.MethodGet, pattern_Api_StartDownload_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/gb28181pro.Api/StartDownload", runtime.WithHTTPPathPattern("/gb28181/api/download/{deviceId}/{channelId}"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Api_StartDownload_0(annotatedContext, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md)
if err != nil {
runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err)
return
}
forward_Api_StartDownload_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle(http.MethodGet, pattern_Api_GetDownloadProgress_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
annotatedContext, err := runtime.AnnotateIncomingContext(ctx, mux, req, "/gb28181pro.Api/GetDownloadProgress", runtime.WithHTTPPathPattern("/gb28181/api/download/progress/{downloadId}"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Api_GetDownloadProgress_0(annotatedContext, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md)
if err != nil {
runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err)
return
}
forward_Api_GetDownloadProgress_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -5958,6 +6102,40 @@ func RegisterApiHandlerClient(ctx context.Context, mux *runtime.ServeMux, client
}
forward_Api_DeleteChannelWithProxy_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle(http.MethodGet, pattern_Api_StartDownload_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/gb28181pro.Api/StartDownload", runtime.WithHTTPPathPattern("/gb28181/api/download/{deviceId}/{channelId}"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Api_StartDownload_0(annotatedContext, inboundMarshaler, client, req, pathParams)
annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md)
if err != nil {
runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err)
return
}
forward_Api_StartDownload_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle(http.MethodGet, pattern_Api_GetDownloadProgress_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
annotatedContext, err := runtime.AnnotateContext(ctx, mux, req, "/gb28181pro.Api/GetDownloadProgress", runtime.WithHTTPPathPattern("/gb28181/api/download/progress/{downloadId}"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Api_GetDownloadProgress_0(annotatedContext, inboundMarshaler, client, req, pathParams)
annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md)
if err != nil {
runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err)
return
}
forward_Api_GetDownloadProgress_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
@@ -6032,6 +6210,8 @@ var (
pattern_Api_AddChannelWithProxy_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 3, 0, 4, 1, 5, 4}, []string{"gb28181", "api", "channel", "add_with_proxy", "streamPath"}, ""))
pattern_Api_UpdateChannelWithProxy_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"gb28181", "api", "channel", "update_with_proxy", "channelId"}, ""))
pattern_Api_DeleteChannelWithProxy_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"gb28181", "api", "channel", "delete_with_proxy", "channelId"}, ""))
pattern_Api_StartDownload_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 1, 0, 4, 1, 5, 3, 1, 0, 4, 1, 5, 4}, []string{"gb28181", "api", "download", "deviceId", "channelId"}, ""))
pattern_Api_GetDownloadProgress_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 3, 1, 0, 4, 1, 5, 4}, []string{"gb28181", "api", "download", "progress", "downloadId"}, ""))
)
var (
@@ -6105,4 +6285,6 @@ var (
forward_Api_AddChannelWithProxy_0 = runtime.ForwardResponseMessage
forward_Api_UpdateChannelWithProxy_0 = runtime.ForwardResponseMessage
forward_Api_DeleteChannelWithProxy_0 = runtime.ForwardResponseMessage
forward_Api_StartDownload_0 = runtime.ForwardResponseMessage
forward_Api_GetDownloadProgress_0 = runtime.ForwardResponseMessage
)

View File

@@ -518,6 +518,20 @@ service api {
post: "/gb28181/api/channel/delete_with_proxy/{channelId}"
};
}
// 发起录像下载
rpc StartDownload (StartDownloadRequest) returns (StartDownloadResponse) {
option (google.api.http) = {
get: "/gb28181/api/download/{deviceId}/{channelId}"
};
}
// 查询下载进度
rpc GetDownloadProgress (GetDownloadProgressRequest) returns (DownloadProgressResponse) {
option (google.api.http) = {
get: "/gb28181/api/download/progress/{downloadId}"
};
}
}
// 请求和响应消息定义
@@ -1275,3 +1289,54 @@ message DeleteChannelWithProxyRequest {
string channelId = 1; // 通道IDURL路径参数
}
// StartDownloadRequest 发起录像下载请求
message StartDownloadRequest {
string deviceId = 1; // 设备IDURL路径参数
string channelId = 2; // 通道IDURL路径参数
string start = 3; // 开始时间UTC时间戳秒级或RFC3339格式
string end = 4; // 结束时间UTC时间戳秒级或RFC3339格式
int32 downloadSpeed = 5; // 下载速度倍数1-4倍默认1倍避免丢帧
}
// StartDownloadData 下载任务数据
message StartDownloadData {
string downloadId = 1; // 下载任务ID格式startTime_endTime_deviceId_channelId
string status = 2; // 初始状态pending
string downloadUrl = 3; // 下载链接(完成后可直接访问)
}
// StartDownloadResponse 发起录像下载响应
message StartDownloadResponse {
int32 code = 1; // 响应代码
string message = 2; // 响应消息
int32 total = 3; // 总数统一格式这里为0
StartDownloadData data = 4; // 下载任务数据
}
// GetDownloadProgressRequest 查询下载进度请求
message GetDownloadProgressRequest {
string downloadId = 1; // 下载任务IDURL路径参数
}
// DownloadProgressData 下载进度数据
message DownloadProgressData {
string downloadId = 1; // 下载任务ID
string status = 2; // 状态pending/downloading/completed/failed
int32 progress = 3; // 下载进度0-100
string filePath = 4; // 完成后的文件路径
string error = 5; // 错误信息(如果失败)
int64 downloadedBytes = 6; // 已下载字节数
int64 totalBytes = 7; // 总字节数(预估)
google.protobuf.Timestamp startedAt = 8; // 开始时间
google.protobuf.Timestamp completedAt = 9; // 完成时间
string downloadUrl = 10; // 下载链接(完成后可直接访问)
}
// DownloadProgressResponse 下载进度响应
message DownloadProgressResponse {
int32 code = 1; // 响应代码
string message = 2; // 响应消息
int32 total = 3; // 总数统一格式这里为0
DownloadProgressData data = 4; // 下载进度数据
}

View File

@@ -92,6 +92,8 @@ const (
Api_AddChannelWithProxy_FullMethodName = "/gb28181pro.api/AddChannelWithProxy"
Api_UpdateChannelWithProxy_FullMethodName = "/gb28181pro.api/UpdateChannelWithProxy"
Api_DeleteChannelWithProxy_FullMethodName = "/gb28181pro.api/DeleteChannelWithProxy"
Api_StartDownload_FullMethodName = "/gb28181pro.api/StartDownload"
Api_GetDownloadProgress_FullMethodName = "/gb28181pro.api/GetDownloadProgress"
)
// ApiClient is the client API for Api service.
@@ -238,6 +240,10 @@ type ApiClient interface {
UpdateChannelWithProxy(ctx context.Context, in *UpdateChannelWithProxyRequest, opts ...grpc.CallOption) (*BaseResponse, error)
// 删除通道
DeleteChannelWithProxy(ctx context.Context, in *DeleteChannelWithProxyRequest, opts ...grpc.CallOption) (*BaseResponse, error)
// 发起录像下载
StartDownload(ctx context.Context, in *StartDownloadRequest, opts ...grpc.CallOption) (*StartDownloadResponse, error)
// 查询下载进度
GetDownloadProgress(ctx context.Context, in *GetDownloadProgressRequest, opts ...grpc.CallOption) (*DownloadProgressResponse, error)
}
type apiClient struct {
@@ -948,6 +954,26 @@ func (c *apiClient) DeleteChannelWithProxy(ctx context.Context, in *DeleteChanne
return out, nil
}
func (c *apiClient) StartDownload(ctx context.Context, in *StartDownloadRequest, opts ...grpc.CallOption) (*StartDownloadResponse, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(StartDownloadResponse)
err := c.cc.Invoke(ctx, Api_StartDownload_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *apiClient) GetDownloadProgress(ctx context.Context, in *GetDownloadProgressRequest, opts ...grpc.CallOption) (*DownloadProgressResponse, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(DownloadProgressResponse)
err := c.cc.Invoke(ctx, Api_GetDownloadProgress_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
return out, nil
}
// ApiServer is the server API for Api service.
// All implementations must embed UnimplementedApiServer
// for forward compatibility.
@@ -1092,6 +1118,10 @@ type ApiServer interface {
UpdateChannelWithProxy(context.Context, *UpdateChannelWithProxyRequest) (*BaseResponse, error)
// 删除通道
DeleteChannelWithProxy(context.Context, *DeleteChannelWithProxyRequest) (*BaseResponse, error)
// 发起录像下载
StartDownload(context.Context, *StartDownloadRequest) (*StartDownloadResponse, error)
// 查询下载进度
GetDownloadProgress(context.Context, *GetDownloadProgressRequest) (*DownloadProgressResponse, error)
mustEmbedUnimplementedApiServer()
}
@@ -1312,6 +1342,12 @@ func (UnimplementedApiServer) UpdateChannelWithProxy(context.Context, *UpdateCha
func (UnimplementedApiServer) DeleteChannelWithProxy(context.Context, *DeleteChannelWithProxyRequest) (*BaseResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method DeleteChannelWithProxy not implemented")
}
func (UnimplementedApiServer) StartDownload(context.Context, *StartDownloadRequest) (*StartDownloadResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method StartDownload not implemented")
}
func (UnimplementedApiServer) GetDownloadProgress(context.Context, *GetDownloadProgressRequest) (*DownloadProgressResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetDownloadProgress not implemented")
}
func (UnimplementedApiServer) mustEmbedUnimplementedApiServer() {}
func (UnimplementedApiServer) testEmbeddedByValue() {}
@@ -2593,6 +2629,42 @@ func _Api_DeleteChannelWithProxy_Handler(srv interface{}, ctx context.Context, d
return interceptor(ctx, in, info, handler)
}
func _Api_StartDownload_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(StartDownloadRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApiServer).StartDownload(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Api_StartDownload_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApiServer).StartDownload(ctx, req.(*StartDownloadRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Api_GetDownloadProgress_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetDownloadProgressRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApiServer).GetDownloadProgress(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Api_GetDownloadProgress_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApiServer).GetDownloadProgress(ctx, req.(*GetDownloadProgressRequest))
}
return interceptor(ctx, in, info, handler)
}
// Api_ServiceDesc is the grpc.ServiceDesc for Api service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
@@ -2880,6 +2952,14 @@ var Api_ServiceDesc = grpc.ServiceDesc{
MethodName: "DeleteChannelWithProxy",
Handler: _Api_DeleteChannelWithProxy_Handler,
},
{
MethodName: "StartDownload",
Handler: _Api_StartDownload_Handler,
},
{
MethodName: "GetDownloadProgress",
Handler: _Api_GetDownloadProgress_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "gb28181.proto",

View File

@@ -0,0 +1,88 @@
package plugin_gb28181pro
import (
"math/bits"
"sync/atomic"
)
// PortBitmap 使用原子位图实现端口分配/回收
type PortBitmap struct {
base uint16
size uint16
bitmap []uint64
cursor uint32
}
func (pb *PortBitmap) Init(base uint16, size uint16) {
pb.base = base
pb.size = size
words := int((uint32(size) + 63) / 64)
pb.bitmap = make([]uint64, words)
atomic.StoreUint32(&pb.cursor, 0)
}
func (pb *PortBitmap) Allocate() (uint16, bool) {
if pb.size == 0 || len(pb.bitmap) == 0 {
return 0, false
}
words := len(pb.bitmap)
start := int(atomic.LoadUint32(&pb.cursor) % uint32(words))
for i := 0; i < words; i++ {
widx := (start + i) % words
for {
old := atomic.LoadUint64(&pb.bitmap[widx])
free := ^old
if free == 0 {
break
}
pick := free & -free
newv := old | pick
if atomic.CompareAndSwapUint64(&pb.bitmap[widx], old, newv) {
bit := uint64(bits.TrailingZeros64(pick))
idx := uint64(widx)*64 + bit
if idx >= uint64(pb.size) {
// 回滚越界位
for {
cur := atomic.LoadUint64(&pb.bitmap[widx])
reverted := cur &^ pick
if atomic.CompareAndSwapUint64(&pb.bitmap[widx], cur, reverted) {
break
}
}
break
}
atomic.StoreUint32(&pb.cursor, uint32(widx))
return pb.base + uint16(idx), true
}
}
}
return 0, false
}
func (pb *PortBitmap) Release(port uint16) bool {
if pb.size == 0 || len(pb.bitmap) == 0 {
return false
}
if port < pb.base {
return false
}
idx := uint32(port - pb.base)
if idx >= uint32(pb.size) {
return false
}
widx := idx / 64
bit := idx % 64
mask := uint64(1) << bit
for {
old := atomic.LoadUint64(&pb.bitmap[widx])
if old&mask == 0 {
return false
}
newv := old &^ mask
if atomic.CompareAndSwapUint64(&pb.bitmap[widx], old, newv) {
return true
}
}
}

View File

@@ -327,7 +327,7 @@ func (task *registerHandlerTask) RecoverDevice(d *Device, req *sip.Request) {
//}
task.gb.DB.Save(d)
}
//d.catalog()
d.catalog()
return
}

View File

@@ -135,13 +135,8 @@ func (r *Recorder) writeTailer(end time.Time) {
}
var CustomFileName = func(job *m7s.RecordJob) string {
if job.RecConf.Fragment == 0 {
return fmt.Sprintf("%s.mp4", job.RecConf.FilePath)
}
// 使用纳秒级时间戳,避免同一秒内多次切片时文件名冲突
// 格式秒_纳秒.mp4 (例如: 1760431346_123456789.mp4)
now := time.Now()
return filepath.Join(job.RecConf.FilePath, fmt.Sprintf("%d_%09d.mp4", now.Unix(), now.Nanosecond()))
return filepath.Join(job.RecConf.FilePath, fmt.Sprintf("%s_%09d.mp4", time.Now().Local().Format("2006-01-02-15-04-05"), now.Nanosecond()))
}
func (r *Recorder) createStream(start time.Time) (err error) {

View File

@@ -32,6 +32,10 @@ func (v *VideoFrame) Demux() (err error) {
if err := v.ParseAVCC(&reader, int(ctx.RecordInfo.LengthSizeMinusOne)+1); err != nil {
return fmt.Errorf("failed to parse H.265 AVCC: %w", err)
}
case *codec.AV1Ctx:
if err := v.ParseAV1OBUs(&reader); err != nil {
return fmt.Errorf("failed to parse AV1 OBUs: %w", err)
}
default:
// 对于其他格式,尝试默认的 AVCC 解析4字节长度前缀
if err := v.ParseAVCC(&reader, 4); err != nil {
@@ -48,17 +52,21 @@ func (v *VideoFrame) Mux(sample *pkg.Sample) (err error) {
v.ICodecCtx = sample.GetBase()
switch rawData := sample.Raw.(type) {
case *pkg.Nalus:
// 根据编解码器类型确定 NALU 长度字段的大小
var naluSizeLen int = 4 // 默认使用 4 字节
var naluSizeLen int = 4
switch ctx := sample.ICodecCtx.(type) {
case *codec.AV1Ctx:
for obu := range rawData.RangePoint {
util.PutBE(v.NextN(4), obu.Size)
v.Push(obu.Buffers...)
}
return
case *codec.H264Ctx:
naluSizeLen = int(ctx.RecordInfo.LengthSizeMinusOne) + 1
case *codec.H265Ctx:
naluSizeLen = int(ctx.RecordInfo.LengthSizeMinusOne) + 1
}
// 为每个 NALU 添加长度前缀
for nalu := range rawData.RangePoint {
util.PutBE(v.NextN(naluSizeLen), nalu.Size) // 写入 NALU 长度
util.PutBE(v.NextN(naluSizeLen), nalu.Size)
v.Push(nalu.Buffers...)
}
}

View File

@@ -3,7 +3,6 @@ package rtmp
import (
"bytes"
"encoding/binary"
"io"
"net"
"time"
@@ -86,7 +85,7 @@ func (avcc *VideoFrame) filterH265(naluSizeLen int) {
func (avcc *VideoFrame) CheckCodecChange() (err error) {
old := avcc.ICodecCtx
if avcc.Size <= 10 {
err = io.ErrShortBuffer
err = ErrSkip
return
}
reader := avcc.NewReader()
@@ -108,12 +107,12 @@ func (avcc *VideoFrame) CheckCodecChange() (err error) {
avcc.ICodecCtx = old
break
}
newCtx := &H264Ctx{}
newCtx := H264Ctx{}
newCtx.SequenceFrame.CopyFrom(&avcc.Memory)
newCtx.SequenceFrame.BaseSample = &BaseSample{}
newCtx.H264Ctx, err = codec.NewH264CtxFromRecord(newCtx.SequenceFrame.Buffers[0][reader.Offset():])
if err == nil {
avcc.ICodecCtx = newCtx
avcc.ICodecCtx = &newCtx
} else {
return
}
@@ -129,7 +128,7 @@ func (avcc *VideoFrame) CheckCodecChange() (err error) {
newCtx.SequenceFrame.BaseSample = &BaseSample{}
newCtx.H265Ctx, err = codec.NewH265CtxFromRecord(newCtx.SequenceFrame.Buffers[0][reader.Offset():])
if err == nil {
avcc.ICodecCtx = newCtx
avcc.ICodecCtx = &newCtx
} else {
return
}
@@ -187,12 +186,12 @@ func (avcc *VideoFrame) CheckCodecChange() (err error) {
} else {
// switch ctx := old.(type) {
// case *codec.H264Ctx:
// avcc.filterH264(int(ctx.RecordInfo.LengthSizeMinusOne) + 1)
// avcc.filterH264(int(ctx.RecordInfo.LengthSizeMinusOne) + 1)
// case *H265Ctx:
// avcc.filterH265(int(ctx.RecordInfo.LengthSizeMinusOne) + 1)
// avcc.filterH265(int(ctx.RecordInfo.LengthSizeMinusOne) + 1)
// }
// if avcc.Size <= 5 {
// return old, ErrSkip
// return old, ErrSkip
// }
}
}
@@ -208,12 +207,7 @@ func (avcc *VideoFrame) parseH265(ctx *H265Ctx, reader *gomem.MemoryReader) (err
}
func (avcc *VideoFrame) parseAV1(reader *gomem.MemoryReader) error {
var obus OBUs
if err := obus.ParseAVCC(reader); err != nil {
return err
}
avcc.Raw = &obus
return nil
return avcc.ParseAV1OBUs(reader)
}
func (avcc *VideoFrame) Demux() error {
@@ -298,7 +292,7 @@ func (avcc *VideoFrame) muxOld26x(codecID VideoCodecID, fromBase *Sample) {
naluLen := uint32(nalu.Size)
binary.BigEndian.PutUint32(naluLenM, naluLen)
// if nalu.Size != len(util.ConcatBuffers(nalu.Buffers)) {
// panic("nalu size mismatch")
// panic("nalu size mismatch")
// }
avcc.Push(nalu.Buffers...)
}
@@ -306,8 +300,29 @@ func (avcc *VideoFrame) muxOld26x(codecID VideoCodecID, fromBase *Sample) {
func (avcc *VideoFrame) Mux(fromBase *Sample) (err error) {
switch c := fromBase.GetBase().(type) {
case *AV1Ctx:
panic(c)
case *codec.AV1Ctx:
if avcc.ICodecCtx == nil || avcc.GetBase() != c {
ctx := &AV1Ctx{AV1Ctx: c}
configBytes := make([]byte, 5+len(c.ConfigOBUs))
configBytes[0] = 0b1001_0000 | byte(PacketTypeSequenceStart)
copy(configBytes[1:], codec.FourCC_AV1[:])
copy(configBytes[5:], c.ConfigOBUs)
ctx.SequenceFrame.PushOne(configBytes)
ctx.SequenceFrame.BaseSample = &BaseSample{}
avcc.ICodecCtx = ctx
}
obus := fromBase.Raw.(*OBUs)
avcc.InitRecycleIndexes(obus.Count())
head := avcc.NextN(5)
if fromBase.IDR {
head[0] = 0b1001_0000 | byte(PacketTypeCodedFrames)
} else {
head[0] = 0b1010_0000 | byte(PacketTypeCodedFrames)
}
copy(head[1:], codec.FourCC_AV1[:])
for obu := range obus.RangePoint {
avcc.Push(obu.Buffers...)
}
case *codec.H264Ctx:
if avcc.ICodecCtx == nil || avcc.GetBase() != c {
ctx := &H264Ctx{H264Ctx: c}

View File

@@ -83,8 +83,9 @@ func (r *RTPTCPReader) Read(packet *rtp.Packet) (err error) {
type RTPPayloadReader struct {
IRTPReader
rtp.Packet
SSRC uint32 // RTP SSRC
buffer gomem.MemoryReader
SSRC uint32 // RTP SSRC
buffer gomem.MemoryReader
onTimestampUpdate func(uint32) // 时间戳更新回调
}
// func NewTCPRTPPayloadReaderForFeed() *RTPPayloadReader {
@@ -125,6 +126,11 @@ func (r *RTPPayloadReader) Read(buf []byte) (n int, err error) {
continue
}
// 更新时间戳
if r.onTimestampUpdate != nil {
r.onTimestampUpdate(r.Timestamp)
}
// 检查序列号是否连续
if lastSeq == 0 || r.SequenceNumber == lastSeq+1 {
// 序列号连续,处理当前包的数据

View File

@@ -6,6 +6,7 @@ import (
"io"
"net"
"strings"
"time"
"github.com/langhuihui/gomem"
task "github.com/langhuihui/gotask"
@@ -60,17 +61,29 @@ type Receiver struct {
StreamMode StreamMode
RTPMouth chan []byte
SinglePort io.ReadCloser
rtpReader *RTPPayloadReader // 保存 RTP 读取器引用
}
type PSReceiver struct {
Receiver
mpegps.MpegPsDemuxer
firstRtpTimestamp uint32 // 第一个 RTP 包的时间戳
currentRtpTimestamp uint32 // 当前 RTP 包的时间戳
hasFirstTimestamp bool // 是否已记录第一个时间戳
lastTimestampUpdate time.Time // 最后一次时间戳更新的时间
OnProgressUpdate func() // 进度更新回调(可选,导出供外部使用)
lastProgressUpdate time.Time // 最后一次进度更新时间
ProgressUpdatePeriod time.Duration // 进度更新周期默认1秒导出供外部配置
}
func (p *PSReceiver) Start() error {
err := p.Receiver.Start()
if err == nil {
p.Using(p.Publisher)
// 设置 RTP 时间戳更新回调
if p.rtpReader != nil {
p.rtpReader.onTimestampUpdate = p.UpdateRtpTimestamp
}
}
return err
}
@@ -81,6 +94,61 @@ func (p *PSReceiver) Run() error {
return p.MpegPsDemuxer.Feed(p.BufReader)
}
// UpdateRtpTimestamp 更新 RTP 时间戳(从 RTP 包中调用)
func (p *PSReceiver) UpdateRtpTimestamp(timestamp uint32) {
now := time.Now()
if !p.hasFirstTimestamp {
p.firstRtpTimestamp = timestamp
p.hasFirstTimestamp = true
p.lastTimestampUpdate = now
p.lastProgressUpdate = now
// 默认进度更新周期为1秒
if p.ProgressUpdatePeriod == 0 {
p.ProgressUpdatePeriod = time.Second
}
}
// 检测时间戳是否变化
if timestamp != p.currentRtpTimestamp {
p.currentRtpTimestamp = timestamp
p.lastTimestampUpdate = now
// 定期触发进度更新回调(避免过于频繁)
if p.OnProgressUpdate != nil && now.Sub(p.lastProgressUpdate) >= p.ProgressUpdatePeriod {
p.lastProgressUpdate = now
p.OnProgressUpdate()
}
}
}
// GetElapsedSeconds 获取已播放的时长(秒),基于 RTP 时间戳
// RTP 时间戳单位是 90kHz视频标准时钟频率
func (p *PSReceiver) GetElapsedSeconds() float64 {
if !p.hasFirstTimestamp {
return 0
}
// 计算时间戳差值(处理回绕)
var diff uint32
if p.currentRtpTimestamp >= p.firstRtpTimestamp {
diff = p.currentRtpTimestamp - p.firstRtpTimestamp
} else {
// 32位回绕
diff = (0xFFFFFFFF - p.firstRtpTimestamp) + p.currentRtpTimestamp + 1
}
// 转换为秒timestamp / 90000
return float64(diff) / 90000.0
}
// IsTimestampStable 检查 RTP 时间戳是否已经稳定(停止增长)
// 如果时间戳超过 2 秒没有变化,认为已经稳定
func (p *PSReceiver) IsTimestampStable() bool {
if !p.hasFirstTimestamp {
return false
}
return time.Since(p.lastTimestampUpdate) > 2*time.Second
}
func (p *Receiver) Start() (err error) {
var rtpReader *RTPPayloadReader
switch p.StreamMode {
@@ -104,6 +172,7 @@ func (p *Receiver) Start() (err error) {
}
p.OnStop(conn.Close)
rtpReader = NewRTPPayloadReader(NewRTPTCPReader(conn))
p.rtpReader = rtpReader
p.BufReader = util.NewBufReader(rtpReader)
case StreamModeTCPPassive:
var conn io.ReadCloser
@@ -125,6 +194,7 @@ func (p *Receiver) Start() (err error) {
}
p.OnStop(conn.Close)
rtpReader = NewRTPPayloadReader(NewRTPTCPReader(conn))
p.rtpReader = rtpReader
p.BufReader = util.NewBufReader(rtpReader)
case StreamModeUDP:
var conn io.ReadCloser
@@ -143,10 +213,12 @@ func (p *Receiver) Start() (err error) {
}
p.OnStop(conn.Close)
rtpReader = NewRTPPayloadReader(NewRTPUDPReader(conn))
p.rtpReader = rtpReader
p.BufReader = util.NewBufReader(rtpReader)
case StreamModeManual:
p.RTPMouth = make(chan []byte)
rtpReader = NewRTPPayloadReader((RTPChanReader)(p.RTPMouth))
p.rtpReader = rtpReader
p.BufReader = util.NewBufReader(rtpReader)
}
p.Using(rtpReader, p.BufReader)

View File

@@ -9,6 +9,8 @@ import (
"time"
"unsafe"
"github.com/bluenviron/mediacommon/pkg/bits"
"github.com/bluenviron/mediacommon/pkg/codecs/av1"
"github.com/deepch/vdk/codec/h264parser"
"github.com/deepch/vdk/codec/h265parser"
"github.com/langhuihui/gomem"
@@ -37,6 +39,7 @@ type (
}
AV1Ctx struct {
RTPCtx
seq uint16
*codec.AV1Ctx
}
VP9Ctx struct {
@@ -61,6 +64,10 @@ const (
endBit = 1 << 6
MTUSize = 1460
ReceiveMTU = 1500
// AV1 RTP payload descriptor bits (subset used)
av1ZBit = 1 << 7 // start of OBU
av1YBit = 1 << 6 // end of OBU
)
func (r *VideoFrame) Recycle() {
@@ -79,9 +86,9 @@ func (r *VideoFrame) CheckCodecChange() (err error) {
}
// 处理时间戳和序列号
pts := r.Packets[0].Timestamp
nalus := r.Raw.(*Nalus)
switch ctx := old.(type) {
case *H264Ctx:
nalus := r.Raw.(*Nalus)
dts := ctx.dtsEst.Feed(pts)
r.SetDTS(time.Duration(dts))
r.SetPTS(time.Duration(pts))
@@ -153,10 +160,10 @@ func (r *VideoFrame) CheckCodecChange() (err error) {
ctx.seq++
}
case *H265Ctx:
nalus := r.Raw.(*Nalus)
dts := ctx.dtsEst.Feed(pts)
r.SetDTS(time.Duration(dts))
r.SetPTS(time.Duration(pts))
// 检查 VPS、SPS、PPS 和 IDR 帧
var vps, sps, pps []byte
var hasVPSSPSPPS bool
for nalu := range nalus.RangePoint {
@@ -179,8 +186,6 @@ func (r *VideoFrame) CheckCodecChange() (err error) {
r.IDR = true
}
}
// 如果发现新的 VPS/SPS/PPS更新编解码器上下文
if hasVPSSPSPPS = vps != nil && sps != nil && pps != nil; hasVPSSPSPPS && (len(ctx.Record) == 0 || !bytes.Equal(vps, ctx.VPS()) || !bytes.Equal(sps, ctx.SPS()) || !bytes.Equal(pps, ctx.PPS())) {
var newCodecData h265parser.CodecData
if newCodecData, err = h265parser.NewCodecDataFromVPSAndSPSAndPPS(vps, sps, pps); err != nil {
@@ -192,13 +197,11 @@ func (r *VideoFrame) CheckCodecChange() (err error) {
CodecData: newCodecData,
},
}
// 保持原有的 RTP 参数
if oldCtx, ok := old.(*H265Ctx); ok {
newCtx.RTPCtx = oldCtx.RTPCtx
}
r.ICodecCtx = newCtx
} else {
// 如果是 IDR 帧但没有 VPS/SPS/PPS需要插入
if r.IDR && len(ctx.VPS()) > 0 && len(ctx.SPS()) > 0 && len(ctx.PPS()) > 0 {
vpsRTP := rtp.Packet{
Header: rtp.Header{
@@ -233,7 +236,17 @@ func (r *VideoFrame) CheckCodecChange() (err error) {
r.Packets = slices.Insert(r.Packets, 0, vpsRTP, spsRTP, ppsRTP)
}
}
for p := range r.Packets.RangePoint {
p.SequenceNumber = ctx.seq
ctx.seq++
}
case *AV1Ctx:
r.SetPTS(time.Duration(pts))
r.SetDTS(time.Duration(pts))
// detect keyframe from OBUs
if obus, ok := r.Raw.(*OBUs); ok {
r.IDR = ctx.IsKeyFrame(obus)
}
// 更新序列号
for p := range r.Packets.RangePoint {
p.SequenceNumber = ctx.seq
@@ -243,6 +256,72 @@ func (r *VideoFrame) CheckCodecChange() (err error) {
return
}
// AV1 helper to detect keyframe (KEY_FRAME or INTRA_ONLY)
func (av1Ctx *AV1Ctx) IsKeyFrame(obus *OBUs) bool {
for o := range obus.RangePoint {
reader := o.NewReader()
if reader.Length < 2 { // need at least header + leb
continue
}
var first byte
if b, err := reader.ReadByte(); err == nil {
first = b
} else {
continue
}
var header av1.OBUHeader
if err := header.Unmarshal([]byte{first}); err != nil {
continue
}
// read leb128 size to move to payload start
_, _, _ = reader.LEB128Unmarshal()
// only inspect frame header or frame obu
// OBU_FRAME_HEADER = 3, OBU_FRAME = 6
switch header.Type {
case 3, 6:
// try parse a minimal frame header: show_existing_frame (1), frame_type (2)
payload := reader
var pos int
// read show_existing_frame
showExisting, ok := utilReadBits(&payload, &pos, 1)
if !ok {
continue
}
if showExisting == 1 {
return false
}
// attempt to read frame_type (2 bits)
ft, ok := utilReadBits(&payload, &pos, 2)
if !ok {
continue
}
if ft == 0 || ft == 2 { // KEY_FRAME(0) or INTRA_ONLY(2)
return true
}
case av1.OBUTypeSequenceHeader:
// sequence header often precedes keyframes; treat as keyframe
return true
}
}
return false
}
// utilReadBits reads nbits from MemoryReader, returns value and ok
func utilReadBits(r *gomem.MemoryReader, pos *int, nbits int) (uint64, bool) {
// use mediacommon bits reader on a copy of remaining bytes
data, err := r.ReadBytes(r.Length)
if err != nil {
return 0, false
}
v, err2 := av1ReadBits(data, pos, nbits)
return v, err2 == nil
}
// av1ReadBits uses mediacommon bits helper
func av1ReadBits(buf []byte, pos *int, nbits int) (uint64, error) {
return bits.ReadBits(buf, pos, nbits)
}
func (h264 *H264Ctx) GetInfo() string {
return h264.SDPFmtpLine
}
@@ -362,6 +441,43 @@ func (r *VideoFrame) Mux(baseFrame *Sample) error {
}
}
lastPacket.Header.Marker = true
case *AV1Ctx:
ctx := &c.RTPCtx
var lastPacket *rtp.Packet
for obu := range baseFrame.Raw.(*OBUs).RangePoint {
reader := obu.NewReader()
payloadCap := MTUSize - 1
if reader.Length+1 <= MTUSize {
mem := r.NextN(reader.Length + 1)
mem[0] = av1ZBit | av1YBit
reader.Read(mem[1:])
lastPacket = r.Append(ctx, pts, mem)
continue
}
// fragmented OBU
first := true
for reader.Length > 0 {
chunk := payloadCap
if reader.Length < chunk {
chunk = reader.Length
}
mem := r.NextN(chunk + 1)
head := byte(0)
if first {
head |= av1ZBit
first = false
}
reader.Read(mem[1:])
if reader.Length == 0 {
head |= av1YBit
}
mem[0] = head
lastPacket = r.Append(ctx, pts, mem)
}
}
if lastPacket != nil {
lastPacket.Header.Marker = true
}
}
return nil
}
@@ -471,6 +587,28 @@ func (r *VideoFrame) Demux() (err error) {
}
}
return nil
case *AV1Ctx:
obus := r.GetOBUs()
obus.Reset()
var cur *gomem.Memory
for _, packet := range r.Packets {
if len(packet.Payload) <= 1 {
continue
}
desc := packet.Payload[0]
payload := packet.Payload[1:]
if desc&av1ZBit != 0 {
// start of OBU
cur = obus.GetNextPointer()
}
if cur != nil {
cur.PushOne(payload)
if desc&av1YBit != 0 {
cur = nil
}
}
}
return nil
}
return ErrUnsupportCodec
}

View File

@@ -5,6 +5,7 @@ import (
"context"
"errors"
"fmt"
"io"
"log/slog"
"net/http"
"net/url"
@@ -677,6 +678,20 @@ func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) {
return
}
// For .map files, set correct content-type before serving
if strings.HasSuffix(r.URL.Path, ".map") {
filePath := strings.TrimPrefix(r.URL.Path, "/admin/")
file, err := s.Admin.zipReader.Open(filePath)
if err != nil {
http.NotFound(w, r)
return
}
defer file.Close()
w.Header().Set("Content-Type", "application/json")
io.Copy(w, file)
return
}
http.ServeFileFS(w, r, s.Admin.zipReader, strings.TrimPrefix(r.URL.Path, "/admin"))
return
}