Compare commits

..

18 Commits

Author SHA1 Message Date
langhuihui
01fa1f3ed8 gifix: replay cap script 2025-06-17 23:51:19 +08:00
langhuihui
830da3aaab fix: mp4 demuxer 2025-06-17 20:22:51 +08:00
langhuihui
5a04dc814d fix: event record check 2025-06-17 19:32:53 +08:00
langhuihui
af5d2bc1f2 fix: set record type 2025-06-17 18:34:10 +08:00
langhuihui
a3e0c1864e feat: add ping pong to batchv2 2025-06-17 14:03:37 +08:00
langhuihui
33d385d2bf fix: record bug 2025-06-17 11:36:32 +08:00
langhuihui
29c47a8d08 fix: hls demo page 2025-06-17 11:26:11 +08:00
langhuihui
5bf5e7bb20 feat: mp4 conert to ts format 2025-06-17 11:09:35 +08:00
langhuihui
4b74ea5841 doc: auth 2025-06-17 09:41:36 +08:00
langhuihui
43710fb017 fix: record 2025-06-16 22:41:55 +08:00
langhuihui
962dda8d08 refactor: mp4 and record system 2025-06-16 20:28:49 +08:00
erroot
ec56bba75a Erroot v5 (#286)
* 插件数据库不同时,新建DB 对象赋值给插件

* MP4 plugin adds extraction, clips, images, compressed video, GOP clicp

* remove mp4/util panic code
2025-06-16 08:29:14 +08:00
pggiroro
b2b511d755 fix: user.LastLogin set gorm type:timestamp, gb28181 api GetGroupChannels modify 2025-06-15 22:19:14 +08:00
pggiroro
42acf47250 feature: gb28181 support single mediaport 2025-06-15 16:58:52 +08:00
langhuihui
6206ee847d fix: record table fit pg database 2025-06-15 15:58:12 +08:00
langhuihui
6cfdc03e4a fix: user mode fit pg database 2025-06-15 15:21:21 +08:00
pggiroro
b425b8da1f fix: ignore RecordEvent in gorm 2025-06-13 12:52:57 +08:00
langhuihui
e105243cd5 refactor: record 2025-06-13 12:52:57 +08:00
53 changed files with 4243 additions and 1791 deletions

90
api.go
View File

@@ -96,22 +96,14 @@ func (s *Server) api_Stream_AnnexB_(rw http.ResponseWriter, r *http.Request) {
return
}
defer reader.StopRead()
if reader.Value.Raw == nil {
if err = reader.Value.Demux(publisher.VideoTrack.ICodecCtx); err != nil {
http.Error(rw, err.Error(), http.StatusInternalServerError)
return
}
}
var annexb pkg.AnnexB
var t pkg.AVTrack
t.ICodecCtx, t.SequenceFrame, err = annexb.ConvertCtx(publisher.VideoTrack.ICodecCtx)
if t.ICodecCtx == nil {
http.Error(rw, "unsupported codec", http.StatusInternalServerError)
var annexb *pkg.AnnexB
var converter = pkg.NewAVFrameConvert[*pkg.AnnexB](publisher.VideoTrack.AVTrack, nil)
annexb, err = converter.ConvertFromAVFrame(&reader.Value)
if err != nil {
http.Error(rw, err.Error(), http.StatusInternalServerError)
return
}
annexb.Mux(t.ICodecCtx, &reader.Value)
_, err = annexb.WriteTo(rw)
annexb.WriteTo(rw)
}
func (s *Server) getStreamInfo(pub *Publisher) (res *pb.StreamInfoResponse, err error) {
@@ -736,7 +728,63 @@ func (s *Server) GetConfig(_ context.Context, req *pb.GetConfigRequest) (res *pb
return
}
func (s *Server) GetRecordList(ctx context.Context, req *pb.ReqRecordList) (resp *pb.ResponseList, err error) {
func (s *Server) GetRecordList(ctx context.Context, req *pb.ReqRecordList) (resp *pb.RecordResponseList, err error) {
if s.DB == nil {
err = pkg.ErrNoDB
return
}
if req.PageSize == 0 {
req.PageSize = 10
}
if req.PageNum == 0 {
req.PageNum = 1
}
offset := (req.PageNum - 1) * req.PageSize // 计算偏移量
var totalCount int64 //总条数
var result []*RecordStream
query := s.DB.Model(&RecordStream{})
if strings.Contains(req.StreamPath, "*") {
query = query.Where("stream_path like ?", strings.ReplaceAll(req.StreamPath, "*", "%"))
} else if req.StreamPath != "" {
query = query.Where("stream_path = ?", req.StreamPath)
}
if req.Type != "" {
query = query.Where("type = ?", req.Type)
}
startTime, endTime, err := util.TimeRangeQueryParse(url.Values{"range": []string{req.Range}, "start": []string{req.Start}, "end": []string{req.End}})
if err == nil {
if !startTime.IsZero() {
query = query.Where("start_time >= ?", startTime)
}
if !endTime.IsZero() {
query = query.Where("end_time <= ?", endTime)
}
}
query.Count(&totalCount)
err = query.Offset(int(offset)).Limit(int(req.PageSize)).Order("start_time desc").Find(&result).Error
if err != nil {
return
}
resp = &pb.RecordResponseList{
Total: uint32(totalCount),
PageNum: req.PageNum,
PageSize: req.PageSize,
}
for _, recordFile := range result {
resp.Data = append(resp.Data, &pb.RecordFile{
Id: uint32(recordFile.ID),
StartTime: timestamppb.New(recordFile.StartTime),
EndTime: timestamppb.New(recordFile.EndTime),
FilePath: recordFile.FilePath,
StreamPath: recordFile.StreamPath,
})
}
return
}
func (s *Server) GetEventRecordList(ctx context.Context, req *pb.ReqRecordList) (resp *pb.EventRecordResponseList, err error) {
if s.DB == nil {
err = pkg.ErrNoDB
return
@@ -751,15 +799,12 @@ func (s *Server) GetRecordList(ctx context.Context, req *pb.ReqRecordList) (resp
var totalCount int64 //总条数
var result []*EventRecordStream
query := s.DB.Model(&RecordStream{})
query := s.DB.Model(&EventRecordStream{})
if strings.Contains(req.StreamPath, "*") {
query = query.Where("stream_path like ?", strings.ReplaceAll(req.StreamPath, "*", "%"))
} else if req.StreamPath != "" {
query = query.Where("stream_path = ?", req.StreamPath)
}
if req.Mode != "" {
query = query.Where("mode = ?", req.Mode)
}
if req.Type != "" {
query = query.Where("type = ?", req.Type)
}
@@ -781,21 +826,22 @@ func (s *Server) GetRecordList(ctx context.Context, req *pb.ReqRecordList) (resp
if err != nil {
return
}
resp = &pb.ResponseList{
resp = &pb.EventRecordResponseList{
Total: uint32(totalCount),
PageNum: req.PageNum,
PageSize: req.PageSize,
}
for _, recordFile := range result {
resp.Data = append(resp.Data, &pb.RecordFile{
resp.Data = append(resp.Data, &pb.EventRecordFile{
Id: uint32(recordFile.ID),
StartTime: timestamppb.New(recordFile.StartTime),
EndTime: timestamppb.New(recordFile.EndTime),
FilePath: recordFile.FilePath,
StreamPath: recordFile.StreamPath,
EventLevel: recordFile.EventLevel,
EventDesc: recordFile.EventDesc,
EventId: recordFile.EventId,
EventName: recordFile.EventName,
EventDesc: recordFile.EventDesc,
})
}
return

279
doc/arch/auth.md Normal file
View File

@@ -0,0 +1,279 @@
# Stream Authentication Mechanism
Monibuca V5 provides a comprehensive stream authentication mechanism to control access permissions for publishing and subscribing to streams. The authentication mechanism supports multiple methods, including key-based signature authentication and custom authentication handlers.
## Authentication Principles
### 1. Authentication Flow Sequence Diagrams
#### Publishing Authentication Sequence Diagram
```mermaid
sequenceDiagram
participant Client as Publishing Client
participant Plugin as Plugin
participant AuthHandler as Auth Handler
participant Server as Server
Client->>Plugin: Publishing Request (streamPath, args)
Plugin->>Plugin: Check EnableAuth && Type == PublishTypeServer
alt Authentication Enabled
Plugin->>Plugin: Look for custom auth handler
alt Custom Handler Exists
Plugin->>AuthHandler: onAuthPub(publisher)
AuthHandler->>AuthHandler: Execute custom auth logic
AuthHandler-->>Plugin: Auth result
else Use Key-based Auth
Plugin->>Plugin: Check if conf.Key exists
alt Key Configured
Plugin->>Plugin: auth(streamPath, key, secret, expire)
Plugin->>Plugin: Validate timestamp
Plugin->>Plugin: Validate secret length
Plugin->>Plugin: Calculate MD5 signature
Plugin->>Plugin: Compare signatures
Plugin-->>Plugin: Auth result
end
end
alt Auth Failed
Plugin-->>Client: Auth failed, reject publishing
else Auth Success
Plugin->>Server: Create Publisher and add to stream management
Server-->>Plugin: Publishing successful
Plugin-->>Client: Publishing established successfully
end
else Auth Disabled
Plugin->>Server: Create Publisher directly
Server-->>Plugin: Publishing successful
Plugin-->>Client: Publishing established successfully
end
```
#### Subscribing Authentication Sequence Diagram
```mermaid
sequenceDiagram
participant Client as Subscribing Client
participant Plugin as Plugin
participant AuthHandler as Auth Handler
participant Server as Server
Client->>Plugin: Subscribing Request (streamPath, args)
Plugin->>Plugin: Check EnableAuth && Type == SubscribeTypeServer
alt Authentication Enabled
Plugin->>Plugin: Look for custom auth handler
alt Custom Handler Exists
Plugin->>AuthHandler: onAuthSub(subscriber)
AuthHandler->>AuthHandler: Execute custom auth logic
AuthHandler-->>Plugin: Auth result
else Use Key-based Auth
Plugin->>Plugin: Check if conf.Key exists
alt Key Configured
Plugin->>Plugin: auth(streamPath, key, secret, expire)
Plugin->>Plugin: Validate timestamp
Plugin->>Plugin: Validate secret length
Plugin->>Plugin: Calculate MD5 signature
Plugin->>Plugin: Compare signatures
Plugin-->>Plugin: Auth result
end
end
alt Auth Failed
Plugin-->>Client: Auth failed, reject subscribing
else Auth Success
Plugin->>Server: Create Subscriber and wait for Publisher
Server->>Server: Wait for stream publishing and track ready
Server-->>Plugin: Subscribing ready
Plugin-->>Client: Start streaming data transmission
end
else Auth Disabled
Plugin->>Server: Create Subscriber directly
Server-->>Plugin: Subscribing successful
Plugin-->>Client: Start streaming data transmission
end
```
### 2. Authentication Trigger Points
Authentication is triggered in the following two scenarios:
- **Publishing Authentication**: Triggered when there's a publishing request in the `PublishWithConfig` method
- **Subscribing Authentication**: Triggered when there's a subscribing request in the `SubscribeWithConfig` method
### 3. Authentication Condition Checks
Authentication is only executed when the following conditions are met simultaneously:
```go
if p.config.EnableAuth && publisher.Type == PublishTypeServer
```
- `EnableAuth`: Authentication is enabled in the plugin configuration
- `Type == PublishTypeServer/SubscribeTypeServer`: Only authenticate server-type publishing/subscribing
### 4. Authentication Method Priority
The system executes authentication in the following priority order:
1. **Custom Authentication Handler** (Highest priority)
2. **Key-based Signature Authentication**
3. **No Authentication** (Default pass)
## Custom Authentication Handlers
### Publishing Authentication Handler
```go
onAuthPub := p.Meta.OnAuthPub
if onAuthPub == nil {
onAuthPub = p.Server.Meta.OnAuthPub
}
if onAuthPub != nil {
if err = onAuthPub(publisher).Await(); err != nil {
p.Warn("auth failed", "error", err)
return
}
}
```
Authentication handler lookup order:
1. Plugin-level authentication handler `p.Meta.OnAuthPub`
2. Server-level authentication handler `p.Server.Meta.OnAuthPub`
### Subscribing Authentication Handler
```go
onAuthSub := p.Meta.OnAuthSub
if onAuthSub == nil {
onAuthSub = p.Server.Meta.OnAuthSub
}
if onAuthSub != nil {
if err = onAuthSub(subscriber).Await(); err != nil {
p.Warn("auth failed", "error", err)
return
}
}
```
## Key-based Signature Authentication
When there's no custom authentication handler, if a Key is configured, the system will use MD5-based signature authentication mechanism.
### Authentication Algorithm
```go
func (p *Plugin) auth(streamPath string, key string, secret string, expire string) (err error) {
// 1. Validate expiration time
if unixTime, err := strconv.ParseInt(expire, 16, 64); err != nil || time.Now().Unix() > unixTime {
return fmt.Errorf("auth failed expired")
}
// 2. Validate secret length
if len(secret) != 32 {
return fmt.Errorf("auth failed secret length must be 32")
}
// 3. Calculate the true secret
trueSecret := md5.Sum([]byte(key + streamPath + expire))
// 4. Compare secrets
if secret == hex.EncodeToString(trueSecret[:]) {
return nil
}
return fmt.Errorf("auth failed invalid secret")
}
```
### Signature Calculation Steps
1. **Construct signature string**: `key + streamPath + expire`
2. **MD5 encryption**: Perform MD5 hash on the signature string
3. **Hexadecimal encoding**: Convert MD5 result to 32-character hexadecimal string
4. **Verify signature**: Compare calculation result with client-provided secret
### Parameter Description
| Parameter | Type | Description | Example |
|-----------|------|-------------|---------|
| key | string | Secret key set in configuration file | "mySecretKey" |
| streamPath | string | Stream path | "live/test" |
| expire | string | Expiration timestamp (hexadecimal) | "64a1b2c3" |
| secret | string | Client-calculated signature (32-char hex) | "5d41402abc4b2a76b9719d911017c592" |
### Timestamp Handling
- Expiration time uses hexadecimal Unix timestamp
- System validates if current time exceeds expiration time
- Timestamp parsing failure or expiration will cause authentication failure
## API Key Generation
The system also provides API interfaces for key generation, supporting authentication needs for admin dashboard:
```go
p.handle("/api/secret/{type}/{streamPath...}", http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
// JWT Token validation
authHeader := r.Header.Get("Authorization")
tokenString := strings.TrimPrefix(authHeader, "Bearer ")
_, err := p.Server.ValidateToken(tokenString)
// Generate publishing or subscribing key
streamPath := r.PathValue("streamPath")
t := r.PathValue("type")
expire := r.URL.Query().Get("expire")
if t == "publish" {
secret := md5.Sum([]byte(p.config.Publish.Key + streamPath + expire))
rw.Write([]byte(hex.EncodeToString(secret[:])))
} else if t == "subscribe" {
secret := md5.Sum([]byte(p.config.Subscribe.Key + streamPath + expire))
rw.Write([]byte(hex.EncodeToString(secret[:])))
}
}))
```
## Configuration Examples
### Enable Authentication
```yaml
# Plugin configuration
rtmp:
enableAuth: true
publish:
key: "your-publish-key"
subscribe:
key: "your-subscribe-key"
```
### Publishing URL Example
```
rtmp://localhost/live/test?secret=5d41402abc4b2a76b9719d911017c592&expire=64a1b2c3
```
### Subscribing URL Example
```
http://localhost:8080/flv/live/test.flv?secret=a1b2c3d4e5f6789012345678901234ab&expire=64a1b2c3
```
## Security Considerations
1. **Key Protection**: Keys in configuration files should be properly secured to prevent leakage
2. **Time Window**: Set reasonable expiration times to balance security and usability
3. **HTTPS Transport**: Use HTTPS for transmitting authentication parameters in production
4. **Logging**: Authentication failures are logged as warnings for security auditing
## Error Handling
Common causes of authentication failure:
- `auth failed expired`: Timestamp expired or format error
- `auth failed secret length must be 32`: Incorrect secret length
- `auth failed invalid secret`: Signature verification failed
- `invalid token`: JWT verification failed during API key generation

View File

@@ -26,7 +26,7 @@
### Plugin Development
[plugin/README.md](../plugin/README.md)
[plugin/README.md](../../plugin/README.md)
## Task System

View File

@@ -0,0 +1,279 @@
# 流鉴权机制
Monibuca V5 提供了完善的流鉴权机制,用于控制推流和拉流的访问权限。鉴权机制支持多种方式,包括基于密钥的签名鉴权和自定义鉴权处理器。
## 鉴权原理
### 1. 鉴权流程时序图
#### 推流鉴权时序图
```mermaid
sequenceDiagram
participant Client as 推流客户端
participant Plugin as 插件
participant AuthHandler as 鉴权处理器
participant Server as 服务器
Client->>Plugin: 推流请求 (streamPath, args)
Plugin->>Plugin: 检查 EnableAuth && Type == PublishTypeServer
alt 启用鉴权
Plugin->>Plugin: 查找自定义鉴权处理器
alt 存在自定义处理器
Plugin->>AuthHandler: onAuthPub(publisher)
AuthHandler->>AuthHandler: 执行自定义鉴权逻辑
AuthHandler-->>Plugin: 鉴权结果
else 使用密钥鉴权
Plugin->>Plugin: 检查 conf.Key 是否存在
alt 配置了Key
Plugin->>Plugin: auth(streamPath, key, secret, expire)
Plugin->>Plugin: 验证时间戳
Plugin->>Plugin: 验证secret长度
Plugin->>Plugin: 计算MD5签名
Plugin->>Plugin: 比较签名
Plugin-->>Plugin: 鉴权结果
end
end
alt 鉴权失败
Plugin-->>Client: 鉴权失败,拒绝推流
else 鉴权成功
Plugin->>Server: 创建Publisher并添加到流管理
Server-->>Plugin: 推流成功
Plugin-->>Client: 推流建立成功
end
else 未启用鉴权
Plugin->>Server: 直接创建Publisher
Server-->>Plugin: 推流成功
Plugin-->>Client: 推流建立成功
end
```
#### 拉流鉴权时序图
```mermaid
sequenceDiagram
participant Client as 拉流客户端
participant Plugin as 插件
participant AuthHandler as 鉴权处理器
participant Server as 服务器
Client->>Plugin: 拉流请求 (streamPath, args)
Plugin->>Plugin: 检查 EnableAuth && Type == SubscribeTypeServer
alt 启用鉴权
Plugin->>Plugin: 查找自定义鉴权处理器
alt 存在自定义处理器
Plugin->>AuthHandler: onAuthSub(subscriber)
AuthHandler->>AuthHandler: 执行自定义鉴权逻辑
AuthHandler-->>Plugin: 鉴权结果
else 使用密钥鉴权
Plugin->>Plugin: 检查 conf.Key 是否存在
alt 配置了Key
Plugin->>Plugin: auth(streamPath, key, secret, expire)
Plugin->>Plugin: 验证时间戳
Plugin->>Plugin: 验证secret长度
Plugin->>Plugin: 计算MD5签名
Plugin->>Plugin: 比较签名
Plugin-->>Plugin: 鉴权结果
end
end
alt 鉴权失败
Plugin-->>Client: 鉴权失败,拒绝拉流
else 鉴权成功
Plugin->>Server: 创建Subscriber并等待Publisher
Server->>Server: 等待流发布和轨道就绪
Server-->>Plugin: 拉流准备就绪
Plugin-->>Client: 开始传输流数据
end
else 未启用鉴权
Plugin->>Server: 直接创建Subscriber
Server-->>Plugin: 拉流成功
Plugin-->>Client: 开始传输流数据
end
```
### 2. 鉴权触发时机
鉴权在以下两种情况下触发:
- **推流鉴权**:当有推流请求时,在`PublishWithConfig`方法中触发
- **拉流鉴权**:当有拉流请求时,在`SubscribeWithConfig`方法中触发
### 3. 鉴权条件判断
鉴权只在以下条件同时满足时才会执行:
```go
if p.config.EnableAuth && publisher.Type == PublishTypeServer
```
- `EnableAuth`:插件配置中启用了鉴权
- `Type == PublishTypeServer/SubscribeTypeServer`:只对服务端类型的推流/拉流进行鉴权
### 4. 鉴权方式优先级
系统按以下优先级执行鉴权:
1. **自定义鉴权处理器**(最高优先级)
2. **基于密钥的签名鉴权**
3. **无鉴权**(默认通过)
## 自定义鉴权处理器
### 推流鉴权处理器
```go
onAuthPub := p.Meta.OnAuthPub
if onAuthPub == nil {
onAuthPub = p.Server.Meta.OnAuthPub
}
if onAuthPub != nil {
if err = onAuthPub(publisher).Await(); err != nil {
p.Warn("auth failed", "error", err)
return
}
}
```
鉴权处理器查找顺序:
1. 插件级别的鉴权处理器 `p.Meta.OnAuthPub`
2. 服务器级别的鉴权处理器 `p.Server.Meta.OnAuthPub`
### 拉流鉴权处理器
```go
onAuthSub := p.Meta.OnAuthSub
if onAuthSub == nil {
onAuthSub = p.Server.Meta.OnAuthSub
}
if onAuthSub != nil {
if err = onAuthSub(subscriber).Await(); err != nil {
p.Warn("auth failed", "error", err)
return
}
}
```
## 基于密钥的签名鉴权
当没有自定义鉴权处理器时如果配置了Key系统将使用基于MD5的签名鉴权机制。
### 鉴权算法
```go
func (p *Plugin) auth(streamPath string, key string, secret string, expire string) (err error) {
// 1. 验证过期时间
if unixTime, err := strconv.ParseInt(expire, 16, 64); err != nil || time.Now().Unix() > unixTime {
return fmt.Errorf("auth failed expired")
}
// 2. 验证secret长度
if len(secret) != 32 {
return fmt.Errorf("auth failed secret length must be 32")
}
// 3. 计算真实的secret
trueSecret := md5.Sum([]byte(key + streamPath + expire))
// 4. 比较secret
if secret == hex.EncodeToString(trueSecret[:]) {
return nil
}
return fmt.Errorf("auth failed invalid secret")
}
```
### 签名计算步骤
1. **构造签名字符串**`key + streamPath + expire`
2. **MD5加密**对签名字符串进行MD5哈希
3. **十六进制编码**将MD5结果转换为32位十六进制字符串
4. **验证签名**比较计算结果与客户端提供的secret
### 参数说明
| 参数 | 类型 | 说明 | 示例 |
|------|------|------|------|
| key | string | 密钥,在配置文件中设置 | "mySecretKey" |
| streamPath | string | 流路径 | "live/test" |
| expire | string | 过期时间戳16进制 | "64a1b2c3" |
| secret | string | 客户端计算的签名32位十六进制 | "5d41402abc4b2a76b9719d911017c592" |
### 时间戳处理
- 过期时间使用16进制Unix时间戳
- 系统会验证当前时间是否超过过期时间
- 时间戳解析失败或已过期都会导致鉴权失败
## API密钥生成
系统还提供了API接口用于生成密钥支持管理后台的鉴权需求
```go
p.handle("/api/secret/{type}/{streamPath...}", http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
// JWT Token验证
authHeader := r.Header.Get("Authorization")
tokenString := strings.TrimPrefix(authHeader, "Bearer ")
_, err := p.Server.ValidateToken(tokenString)
// 生成推流或拉流密钥
streamPath := r.PathValue("streamPath")
t := r.PathValue("type")
expire := r.URL.Query().Get("expire")
if t == "publish" {
secret := md5.Sum([]byte(p.config.Publish.Key + streamPath + expire))
rw.Write([]byte(hex.EncodeToString(secret[:])))
} else if t == "subscribe" {
secret := md5.Sum([]byte(p.config.Subscribe.Key + streamPath + expire))
rw.Write([]byte(hex.EncodeToString(secret[:])))
}
}))
```
## 配置示例
### 启用鉴权
```yaml
# 插件配置
rtmp:
enableAuth: true
publish:
key: "your-publish-key"
subscribe:
key: "your-subscribe-key"
```
### 推流URL示例
```
rtmp://localhost/live/test?secret=5d41402abc4b2a76b9719d911017c592&expire=64a1b2c3
```
### 拉流URL示例
```
http://localhost:8080/flv/live/test.flv?secret=a1b2c3d4e5f6789012345678901234ab&expire=64a1b2c3
```
## 安全考虑
1. **密钥保护**配置文件中的key应当妥善保管避免泄露
2. **时间窗口**:合理设置过期时间,平衡安全性和可用性
3. **HTTPS传输**生产环境建议使用HTTPS传输鉴权参数
4. **日志记录**:鉴权失败会记录警告日志,便于安全审计
## 错误处理
鉴权失败的常见原因:
- `auth failed expired`:时间戳已过期或格式错误
- `auth failed secret length must be 32`secret长度不正确
- `auth failed invalid secret`:签名验证失败
- `invalid token`API密钥生成时JWT验证失败

View File

@@ -4010,9 +4010,8 @@ type ReqRecordList struct {
End string `protobuf:"bytes,4,opt,name=end,proto3" json:"end,omitempty"`
PageNum uint32 `protobuf:"varint,5,opt,name=pageNum,proto3" json:"pageNum,omitempty"`
PageSize uint32 `protobuf:"varint,6,opt,name=pageSize,proto3" json:"pageSize,omitempty"`
Mode string `protobuf:"bytes,7,opt,name=mode,proto3" json:"mode,omitempty"`
Type string `protobuf:"bytes,8,opt,name=type,proto3" json:"type,omitempty"`
EventLevel string `protobuf:"bytes,9,opt,name=eventLevel,proto3" json:"eventLevel,omitempty"`
Type string `protobuf:"bytes,7,opt,name=type,proto3" json:"type,omitempty"`
EventLevel string `protobuf:"bytes,8,opt,name=eventLevel,proto3" json:"eventLevel,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
@@ -4089,13 +4088,6 @@ func (x *ReqRecordList) GetPageSize() uint32 {
return 0
}
func (x *ReqRecordList) GetMode() string {
if x != nil {
return x.Mode
}
return ""
}
func (x *ReqRecordList) GetType() string {
if x != nil {
return x.Type
@@ -4117,9 +4109,6 @@ type RecordFile struct {
StreamPath string `protobuf:"bytes,3,opt,name=streamPath,proto3" json:"streamPath,omitempty"`
StartTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=startTime,proto3" json:"startTime,omitempty"`
EndTime *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=endTime,proto3" json:"endTime,omitempty"`
EventLevel string `protobuf:"bytes,6,opt,name=eventLevel,proto3" json:"eventLevel,omitempty"`
EventName string `protobuf:"bytes,7,opt,name=eventName,proto3" json:"eventName,omitempty"`
EventDesc string `protobuf:"bytes,8,opt,name=eventDesc,proto3" json:"eventDesc,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
@@ -4189,53 +4178,35 @@ func (x *RecordFile) GetEndTime() *timestamppb.Timestamp {
return nil
}
func (x *RecordFile) GetEventLevel() string {
if x != nil {
return x.EventLevel
}
return ""
}
func (x *RecordFile) GetEventName() string {
if x != nil {
return x.EventName
}
return ""
}
func (x *RecordFile) GetEventDesc() string {
if x != nil {
return x.EventDesc
}
return ""
}
type ResponseList struct {
type EventRecordFile struct {
state protoimpl.MessageState `protogen:"open.v1"`
Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"`
Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"`
Total uint32 `protobuf:"varint,3,opt,name=total,proto3" json:"total,omitempty"`
PageNum uint32 `protobuf:"varint,4,opt,name=pageNum,proto3" json:"pageNum,omitempty"`
PageSize uint32 `protobuf:"varint,5,opt,name=pageSize,proto3" json:"pageSize,omitempty"`
Data []*RecordFile `protobuf:"bytes,6,rep,name=data,proto3" json:"data,omitempty"`
Id uint32 `protobuf:"varint,1,opt,name=id,proto3" json:"id,omitempty"`
FilePath string `protobuf:"bytes,2,opt,name=filePath,proto3" json:"filePath,omitempty"`
StreamPath string `protobuf:"bytes,3,opt,name=streamPath,proto3" json:"streamPath,omitempty"`
StartTime *timestamppb.Timestamp `protobuf:"bytes,4,opt,name=startTime,proto3" json:"startTime,omitempty"`
EndTime *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=endTime,proto3" json:"endTime,omitempty"`
EventId string `protobuf:"bytes,6,opt,name=eventId,proto3" json:"eventId,omitempty"`
EventLevel string `protobuf:"bytes,7,opt,name=eventLevel,proto3" json:"eventLevel,omitempty"`
EventName string `protobuf:"bytes,8,opt,name=eventName,proto3" json:"eventName,omitempty"`
EventDesc string `protobuf:"bytes,9,opt,name=eventDesc,proto3" json:"eventDesc,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *ResponseList) Reset() {
*x = ResponseList{}
func (x *EventRecordFile) Reset() {
*x = EventRecordFile{}
mi := &file_global_proto_msgTypes[58]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *ResponseList) String() string {
func (x *EventRecordFile) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*ResponseList) ProtoMessage() {}
func (*EventRecordFile) ProtoMessage() {}
func (x *ResponseList) ProtoReflect() protoreflect.Message {
func (x *EventRecordFile) ProtoReflect() protoreflect.Message {
mi := &file_global_proto_msgTypes[58]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
@@ -4247,47 +4218,236 @@ func (x *ResponseList) ProtoReflect() protoreflect.Message {
return mi.MessageOf(x)
}
// Deprecated: Use ResponseList.ProtoReflect.Descriptor instead.
func (*ResponseList) Descriptor() ([]byte, []int) {
// Deprecated: Use EventRecordFile.ProtoReflect.Descriptor instead.
func (*EventRecordFile) Descriptor() ([]byte, []int) {
return file_global_proto_rawDescGZIP(), []int{58}
}
func (x *ResponseList) GetCode() int32 {
func (x *EventRecordFile) GetId() uint32 {
if x != nil {
return x.Id
}
return 0
}
func (x *EventRecordFile) GetFilePath() string {
if x != nil {
return x.FilePath
}
return ""
}
func (x *EventRecordFile) GetStreamPath() string {
if x != nil {
return x.StreamPath
}
return ""
}
func (x *EventRecordFile) GetStartTime() *timestamppb.Timestamp {
if x != nil {
return x.StartTime
}
return nil
}
func (x *EventRecordFile) GetEndTime() *timestamppb.Timestamp {
if x != nil {
return x.EndTime
}
return nil
}
func (x *EventRecordFile) GetEventId() string {
if x != nil {
return x.EventId
}
return ""
}
func (x *EventRecordFile) GetEventLevel() string {
if x != nil {
return x.EventLevel
}
return ""
}
func (x *EventRecordFile) GetEventName() string {
if x != nil {
return x.EventName
}
return ""
}
func (x *EventRecordFile) GetEventDesc() string {
if x != nil {
return x.EventDesc
}
return ""
}
type RecordResponseList struct {
state protoimpl.MessageState `protogen:"open.v1"`
Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"`
Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"`
Total uint32 `protobuf:"varint,3,opt,name=total,proto3" json:"total,omitempty"`
PageNum uint32 `protobuf:"varint,4,opt,name=pageNum,proto3" json:"pageNum,omitempty"`
PageSize uint32 `protobuf:"varint,5,opt,name=pageSize,proto3" json:"pageSize,omitempty"`
Data []*RecordFile `protobuf:"bytes,6,rep,name=data,proto3" json:"data,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *RecordResponseList) Reset() {
*x = RecordResponseList{}
mi := &file_global_proto_msgTypes[59]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *RecordResponseList) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*RecordResponseList) ProtoMessage() {}
func (x *RecordResponseList) ProtoReflect() protoreflect.Message {
mi := &file_global_proto_msgTypes[59]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use RecordResponseList.ProtoReflect.Descriptor instead.
func (*RecordResponseList) Descriptor() ([]byte, []int) {
return file_global_proto_rawDescGZIP(), []int{59}
}
func (x *RecordResponseList) GetCode() int32 {
if x != nil {
return x.Code
}
return 0
}
func (x *ResponseList) GetMessage() string {
func (x *RecordResponseList) GetMessage() string {
if x != nil {
return x.Message
}
return ""
}
func (x *ResponseList) GetTotal() uint32 {
func (x *RecordResponseList) GetTotal() uint32 {
if x != nil {
return x.Total
}
return 0
}
func (x *ResponseList) GetPageNum() uint32 {
func (x *RecordResponseList) GetPageNum() uint32 {
if x != nil {
return x.PageNum
}
return 0
}
func (x *ResponseList) GetPageSize() uint32 {
func (x *RecordResponseList) GetPageSize() uint32 {
if x != nil {
return x.PageSize
}
return 0
}
func (x *ResponseList) GetData() []*RecordFile {
func (x *RecordResponseList) GetData() []*RecordFile {
if x != nil {
return x.Data
}
return nil
}
type EventRecordResponseList struct {
state protoimpl.MessageState `protogen:"open.v1"`
Code int32 `protobuf:"varint,1,opt,name=code,proto3" json:"code,omitempty"`
Message string `protobuf:"bytes,2,opt,name=message,proto3" json:"message,omitempty"`
Total uint32 `protobuf:"varint,3,opt,name=total,proto3" json:"total,omitempty"`
PageNum uint32 `protobuf:"varint,4,opt,name=pageNum,proto3" json:"pageNum,omitempty"`
PageSize uint32 `protobuf:"varint,5,opt,name=pageSize,proto3" json:"pageSize,omitempty"`
Data []*EventRecordFile `protobuf:"bytes,6,rep,name=data,proto3" json:"data,omitempty"`
unknownFields protoimpl.UnknownFields
sizeCache protoimpl.SizeCache
}
func (x *EventRecordResponseList) Reset() {
*x = EventRecordResponseList{}
mi := &file_global_proto_msgTypes[60]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *EventRecordResponseList) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EventRecordResponseList) ProtoMessage() {}
func (x *EventRecordResponseList) ProtoReflect() protoreflect.Message {
mi := &file_global_proto_msgTypes[60]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EventRecordResponseList.ProtoReflect.Descriptor instead.
func (*EventRecordResponseList) Descriptor() ([]byte, []int) {
return file_global_proto_rawDescGZIP(), []int{60}
}
func (x *EventRecordResponseList) GetCode() int32 {
if x != nil {
return x.Code
}
return 0
}
func (x *EventRecordResponseList) GetMessage() string {
if x != nil {
return x.Message
}
return ""
}
func (x *EventRecordResponseList) GetTotal() uint32 {
if x != nil {
return x.Total
}
return 0
}
func (x *EventRecordResponseList) GetPageNum() uint32 {
if x != nil {
return x.PageNum
}
return 0
}
func (x *EventRecordResponseList) GetPageSize() uint32 {
if x != nil {
return x.PageSize
}
return 0
}
func (x *EventRecordResponseList) GetData() []*EventRecordFile {
if x != nil {
return x.Data
}
@@ -4306,7 +4466,7 @@ type Catalog struct {
func (x *Catalog) Reset() {
*x = Catalog{}
mi := &file_global_proto_msgTypes[59]
mi := &file_global_proto_msgTypes[61]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -4318,7 +4478,7 @@ func (x *Catalog) String() string {
func (*Catalog) ProtoMessage() {}
func (x *Catalog) ProtoReflect() protoreflect.Message {
mi := &file_global_proto_msgTypes[59]
mi := &file_global_proto_msgTypes[61]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -4331,7 +4491,7 @@ func (x *Catalog) ProtoReflect() protoreflect.Message {
// Deprecated: Use Catalog.ProtoReflect.Descriptor instead.
func (*Catalog) Descriptor() ([]byte, []int) {
return file_global_proto_rawDescGZIP(), []int{59}
return file_global_proto_rawDescGZIP(), []int{61}
}
func (x *Catalog) GetStreamPath() string {
@@ -4373,7 +4533,7 @@ type ResponseCatalog struct {
func (x *ResponseCatalog) Reset() {
*x = ResponseCatalog{}
mi := &file_global_proto_msgTypes[60]
mi := &file_global_proto_msgTypes[62]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -4385,7 +4545,7 @@ func (x *ResponseCatalog) String() string {
func (*ResponseCatalog) ProtoMessage() {}
func (x *ResponseCatalog) ProtoReflect() protoreflect.Message {
mi := &file_global_proto_msgTypes[60]
mi := &file_global_proto_msgTypes[62]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -4398,7 +4558,7 @@ func (x *ResponseCatalog) ProtoReflect() protoreflect.Message {
// Deprecated: Use ResponseCatalog.ProtoReflect.Descriptor instead.
func (*ResponseCatalog) Descriptor() ([]byte, []int) {
return file_global_proto_rawDescGZIP(), []int{60}
return file_global_proto_rawDescGZIP(), []int{62}
}
func (x *ResponseCatalog) GetCode() int32 {
@@ -4436,7 +4596,7 @@ type ReqRecordDelete struct {
func (x *ReqRecordDelete) Reset() {
*x = ReqRecordDelete{}
mi := &file_global_proto_msgTypes[61]
mi := &file_global_proto_msgTypes[63]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -4448,7 +4608,7 @@ func (x *ReqRecordDelete) String() string {
func (*ReqRecordDelete) ProtoMessage() {}
func (x *ReqRecordDelete) ProtoReflect() protoreflect.Message {
mi := &file_global_proto_msgTypes[61]
mi := &file_global_proto_msgTypes[63]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -4461,7 +4621,7 @@ func (x *ReqRecordDelete) ProtoReflect() protoreflect.Message {
// Deprecated: Use ReqRecordDelete.ProtoReflect.Descriptor instead.
func (*ReqRecordDelete) Descriptor() ([]byte, []int) {
return file_global_proto_rawDescGZIP(), []int{61}
return file_global_proto_rawDescGZIP(), []int{63}
}
func (x *ReqRecordDelete) GetStreamPath() string {
@@ -4517,7 +4677,7 @@ type ResponseDelete struct {
func (x *ResponseDelete) Reset() {
*x = ResponseDelete{}
mi := &file_global_proto_msgTypes[62]
mi := &file_global_proto_msgTypes[64]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -4529,7 +4689,7 @@ func (x *ResponseDelete) String() string {
func (*ResponseDelete) ProtoMessage() {}
func (x *ResponseDelete) ProtoReflect() protoreflect.Message {
mi := &file_global_proto_msgTypes[62]
mi := &file_global_proto_msgTypes[64]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -4542,7 +4702,7 @@ func (x *ResponseDelete) ProtoReflect() protoreflect.Message {
// Deprecated: Use ResponseDelete.ProtoReflect.Descriptor instead.
func (*ResponseDelete) Descriptor() ([]byte, []int) {
return file_global_proto_rawDescGZIP(), []int{62}
return file_global_proto_rawDescGZIP(), []int{64}
}
func (x *ResponseDelete) GetCode() int32 {
@@ -4575,7 +4735,7 @@ type ReqRecordCatalog struct {
func (x *ReqRecordCatalog) Reset() {
*x = ReqRecordCatalog{}
mi := &file_global_proto_msgTypes[63]
mi := &file_global_proto_msgTypes[65]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
@@ -4587,7 +4747,7 @@ func (x *ReqRecordCatalog) String() string {
func (*ReqRecordCatalog) ProtoMessage() {}
func (x *ReqRecordCatalog) ProtoReflect() protoreflect.Message {
mi := &file_global_proto_msgTypes[63]
mi := &file_global_proto_msgTypes[65]
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
@@ -4600,7 +4760,7 @@ func (x *ReqRecordCatalog) ProtoReflect() protoreflect.Message {
// Deprecated: Use ReqRecordCatalog.ProtoReflect.Descriptor instead.
func (*ReqRecordCatalog) Descriptor() ([]byte, []int) {
return file_global_proto_rawDescGZIP(), []int{63}
return file_global_proto_rawDescGZIP(), []int{65}
}
func (x *ReqRecordCatalog) GetType() string {
@@ -5015,7 +5175,7 @@ const file_global_proto_rawDesc = "" +
"\x15TransformListResponse\x12\x12\n" +
"\x04code\x18\x01 \x01(\x05R\x04code\x12\x18\n" +
"\amessage\x18\x02 \x01(\tR\amessage\x12%\n" +
"\x04data\x18\x03 \x03(\v2\x11.global.TransformR\x04data\"\xeb\x01\n" +
"\x04data\x18\x03 \x03(\v2\x11.global.TransformR\x04data\"\xd7\x01\n" +
"\rReqRecordList\x12\x1e\n" +
"\n" +
"streamPath\x18\x01 \x01(\tR\n" +
@@ -5025,11 +5185,10 @@ const file_global_proto_rawDesc = "" +
"\x03end\x18\x04 \x01(\tR\x03end\x12\x18\n" +
"\apageNum\x18\x05 \x01(\rR\apageNum\x12\x1a\n" +
"\bpageSize\x18\x06 \x01(\rR\bpageSize\x12\x12\n" +
"\x04mode\x18\a \x01(\tR\x04mode\x12\x12\n" +
"\x04type\x18\b \x01(\tR\x04type\x12\x1e\n" +
"\x04type\x18\a \x01(\tR\x04type\x12\x1e\n" +
"\n" +
"eventLevel\x18\t \x01(\tR\n" +
"eventLevel\"\xa4\x02\n" +
"eventLevel\x18\b \x01(\tR\n" +
"eventLevel\"\xc8\x01\n" +
"\n" +
"RecordFile\x12\x0e\n" +
"\x02id\x18\x01 \x01(\rR\x02id\x12\x1a\n" +
@@ -5038,19 +5197,35 @@ const file_global_proto_rawDesc = "" +
"streamPath\x18\x03 \x01(\tR\n" +
"streamPath\x128\n" +
"\tstartTime\x18\x04 \x01(\v2\x1a.google.protobuf.TimestampR\tstartTime\x124\n" +
"\aendTime\x18\x05 \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\x12\x1e\n" +
"\aendTime\x18\x05 \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\"\xc3\x02\n" +
"\x0fEventRecordFile\x12\x0e\n" +
"\x02id\x18\x01 \x01(\rR\x02id\x12\x1a\n" +
"\bfilePath\x18\x02 \x01(\tR\bfilePath\x12\x1e\n" +
"\n" +
"eventLevel\x18\x06 \x01(\tR\n" +
"streamPath\x18\x03 \x01(\tR\n" +
"streamPath\x128\n" +
"\tstartTime\x18\x04 \x01(\v2\x1a.google.protobuf.TimestampR\tstartTime\x124\n" +
"\aendTime\x18\x05 \x01(\v2\x1a.google.protobuf.TimestampR\aendTime\x12\x18\n" +
"\aeventId\x18\x06 \x01(\tR\aeventId\x12\x1e\n" +
"\n" +
"eventLevel\x18\a \x01(\tR\n" +
"eventLevel\x12\x1c\n" +
"\teventName\x18\a \x01(\tR\teventName\x12\x1c\n" +
"\teventDesc\x18\b \x01(\tR\teventDesc\"\xb0\x01\n" +
"\fResponseList\x12\x12\n" +
"\teventName\x18\b \x01(\tR\teventName\x12\x1c\n" +
"\teventDesc\x18\t \x01(\tR\teventDesc\"\xb6\x01\n" +
"\x12RecordResponseList\x12\x12\n" +
"\x04code\x18\x01 \x01(\x05R\x04code\x12\x18\n" +
"\amessage\x18\x02 \x01(\tR\amessage\x12\x14\n" +
"\x05total\x18\x03 \x01(\rR\x05total\x12\x18\n" +
"\apageNum\x18\x04 \x01(\rR\apageNum\x12\x1a\n" +
"\bpageSize\x18\x05 \x01(\rR\bpageSize\x12&\n" +
"\x04data\x18\x06 \x03(\v2\x12.global.RecordFileR\x04data\"\xaf\x01\n" +
"\x04data\x18\x06 \x03(\v2\x12.global.RecordFileR\x04data\"\xc0\x01\n" +
"\x17EventRecordResponseList\x12\x12\n" +
"\x04code\x18\x01 \x01(\x05R\x04code\x12\x18\n" +
"\amessage\x18\x02 \x01(\tR\amessage\x12\x14\n" +
"\x05total\x18\x03 \x01(\rR\x05total\x12\x18\n" +
"\apageNum\x18\x04 \x01(\rR\apageNum\x12\x1a\n" +
"\bpageSize\x18\x05 \x01(\rR\bpageSize\x12+\n" +
"\x04data\x18\x06 \x03(\v2\x17.global.EventRecordFileR\x04data\"\xaf\x01\n" +
"\aCatalog\x12\x1e\n" +
"\n" +
"streamPath\x18\x01 \x01(\tR\n" +
@@ -5076,7 +5251,7 @@ const file_global_proto_rawDesc = "" +
"\amessage\x18\x02 \x01(\tR\amessage\x12&\n" +
"\x04data\x18\x03 \x03(\v2\x12.global.RecordFileR\x04data\"&\n" +
"\x10ReqRecordCatalog\x12\x12\n" +
"\x04type\x18\x01 \x01(\tR\x04type2\xae!\n" +
"\x04type\x18\x01 \x01(\tR\x04type2\xba\"\n" +
"\x03api\x12P\n" +
"\aSysInfo\x12\x16.google.protobuf.Empty\x1a\x17.global.SysInfoResponse\"\x14\x82\xd3\xe4\x93\x02\x0e\x12\f/api/sysinfo\x12i\n" +
"\x0fDisabledPlugins\x12\x16.google.protobuf.Empty\x1a\x1f.global.DisabledPluginsResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\x12\x15/api/plugins/disabled\x12P\n" +
@@ -5118,8 +5293,9 @@ const file_global_proto_rawDesc = "" +
"\x0fRemovePushProxy\x12\x15.global.RequestWithId\x1a\x17.global.SuccessResponse\"&\x82\xd3\xe4\x93\x02 :\x01*\"\x1b/api/proxy/push/remove/{id}\x12d\n" +
"\x0fUpdatePushProxy\x12\x15.global.PushProxyInfo\x1a\x17.global.SuccessResponse\"!\x82\xd3\xe4\x93\x02\x1b:\x01*\"\x16/api/proxy/push/update\x12_\n" +
"\fGetRecording\x12\x16.google.protobuf.Empty\x1a\x1d.global.RecordingListResponse\"\x18\x82\xd3\xe4\x93\x02\x12\x12\x10/api/record/list\x12f\n" +
"\x10GetTransformList\x12\x16.google.protobuf.Empty\x1a\x1d.global.TransformListResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\x12\x13/api/transform/list\x12m\n" +
"\rGetRecordList\x12\x15.global.ReqRecordList\x1a\x14.global.ResponseList\"/\x82\xd3\xe4\x93\x02)\x12'/api/record/{type}/list/{streamPath=**}\x12i\n" +
"\x10GetTransformList\x12\x16.google.protobuf.Empty\x1a\x1d.global.TransformListResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\x12\x13/api/transform/list\x12s\n" +
"\rGetRecordList\x12\x15.global.ReqRecordList\x1a\x1a.global.RecordResponseList\"/\x82\xd3\xe4\x93\x02)\x12'/api/record/{type}/list/{streamPath=**}\x12\x83\x01\n" +
"\x12GetEventRecordList\x12\x15.global.ReqRecordList\x1a\x1f.global.EventRecordResponseList\"5\x82\xd3\xe4\x93\x02/\x12-/api/record/{type}/event/list/{streamPath=**}\x12i\n" +
"\x10GetRecordCatalog\x12\x18.global.ReqRecordCatalog\x1a\x17.global.ResponseCatalog\"\"\x82\xd3\xe4\x93\x02\x1c\x12\x1a/api/record/{type}/catalog\x12u\n" +
"\fDeleteRecord\x12\x17.global.ReqRecordDelete\x1a\x16.global.ResponseDelete\"4\x82\xd3\xe4\x93\x02.:\x01*\")/api/record/{type}/delete/{streamPath=**}B\x10Z\x0em7s.live/v5/pbb\x06proto3"
@@ -5135,7 +5311,7 @@ func file_global_proto_rawDescGZIP() []byte {
return file_global_proto_rawDescData
}
var file_global_proto_msgTypes = make([]protoimpl.MessageInfo, 71)
var file_global_proto_msgTypes = make([]protoimpl.MessageInfo, 73)
var file_global_proto_goTypes = []any{
(*DisabledPluginsResponse)(nil), // 0: global.DisabledPluginsResponse
(*GetConfigRequest)(nil), // 1: global.GetConfigRequest
@@ -5195,171 +5371,178 @@ var file_global_proto_goTypes = []any{
(*TransformListResponse)(nil), // 55: global.TransformListResponse
(*ReqRecordList)(nil), // 56: global.ReqRecordList
(*RecordFile)(nil), // 57: global.RecordFile
(*ResponseList)(nil), // 58: global.ResponseList
(*Catalog)(nil), // 59: global.Catalog
(*ResponseCatalog)(nil), // 60: global.ResponseCatalog
(*ReqRecordDelete)(nil), // 61: global.ReqRecordDelete
(*ResponseDelete)(nil), // 62: global.ResponseDelete
(*ReqRecordCatalog)(nil), // 63: global.ReqRecordCatalog
nil, // 64: global.Formily.PropertiesEntry
nil, // 65: global.Formily.ComponentPropsEntry
nil, // 66: global.FormilyResponse.PropertiesEntry
nil, // 67: global.PluginInfo.DescriptionEntry
nil, // 68: global.TaskTreeData.DescriptionEntry
nil, // 69: global.StreamWaitListResponse.ListEntry
nil, // 70: global.TrackSnapShotData.ReaderEntry
(*timestamppb.Timestamp)(nil), // 71: google.protobuf.Timestamp
(*durationpb.Duration)(nil), // 72: google.protobuf.Duration
(*anypb.Any)(nil), // 73: google.protobuf.Any
(*emptypb.Empty)(nil), // 74: google.protobuf.Empty
(*EventRecordFile)(nil), // 58: global.EventRecordFile
(*RecordResponseList)(nil), // 59: global.RecordResponseList
(*EventRecordResponseList)(nil), // 60: global.EventRecordResponseList
(*Catalog)(nil), // 61: global.Catalog
(*ResponseCatalog)(nil), // 62: global.ResponseCatalog
(*ReqRecordDelete)(nil), // 63: global.ReqRecordDelete
(*ResponseDelete)(nil), // 64: global.ResponseDelete
(*ReqRecordCatalog)(nil), // 65: global.ReqRecordCatalog
nil, // 66: global.Formily.PropertiesEntry
nil, // 67: global.Formily.ComponentPropsEntry
nil, // 68: global.FormilyResponse.PropertiesEntry
nil, // 69: global.PluginInfo.DescriptionEntry
nil, // 70: global.TaskTreeData.DescriptionEntry
nil, // 71: global.StreamWaitListResponse.ListEntry
nil, // 72: global.TrackSnapShotData.ReaderEntry
(*timestamppb.Timestamp)(nil), // 73: google.protobuf.Timestamp
(*durationpb.Duration)(nil), // 74: google.protobuf.Duration
(*anypb.Any)(nil), // 75: google.protobuf.Any
(*emptypb.Empty)(nil), // 76: google.protobuf.Empty
}
var file_global_proto_depIdxs = []int32{
12, // 0: global.DisabledPluginsResponse.data:type_name -> global.PluginInfo
64, // 1: global.Formily.properties:type_name -> global.Formily.PropertiesEntry
65, // 2: global.Formily.componentProps:type_name -> global.Formily.ComponentPropsEntry
66, // 3: global.FormilyResponse.properties:type_name -> global.FormilyResponse.PropertiesEntry
66, // 1: global.Formily.properties:type_name -> global.Formily.PropertiesEntry
67, // 2: global.Formily.componentProps:type_name -> global.Formily.ComponentPropsEntry
68, // 3: global.FormilyResponse.properties:type_name -> global.FormilyResponse.PropertiesEntry
4, // 4: global.GetConfigResponse.data:type_name -> global.ConfigData
10, // 5: global.SummaryResponse.memory:type_name -> global.Usage
10, // 6: global.SummaryResponse.hardDisk:type_name -> global.Usage
9, // 7: global.SummaryResponse.netWork:type_name -> global.NetWorkInfo
67, // 8: global.PluginInfo.description:type_name -> global.PluginInfo.DescriptionEntry
71, // 9: global.SysInfoData.startTime:type_name -> google.protobuf.Timestamp
69, // 8: global.PluginInfo.description:type_name -> global.PluginInfo.DescriptionEntry
73, // 9: global.SysInfoData.startTime:type_name -> google.protobuf.Timestamp
12, // 10: global.SysInfoData.plugins:type_name -> global.PluginInfo
13, // 11: global.SysInfoResponse.data:type_name -> global.SysInfoData
71, // 12: global.TaskTreeData.startTime:type_name -> google.protobuf.Timestamp
68, // 13: global.TaskTreeData.description:type_name -> global.TaskTreeData.DescriptionEntry
73, // 12: global.TaskTreeData.startTime:type_name -> google.protobuf.Timestamp
70, // 13: global.TaskTreeData.description:type_name -> global.TaskTreeData.DescriptionEntry
15, // 14: global.TaskTreeData.children:type_name -> global.TaskTreeData
15, // 15: global.TaskTreeData.blocked:type_name -> global.TaskTreeData
15, // 16: global.TaskTreeResponse.data:type_name -> global.TaskTreeData
22, // 17: global.StreamListResponse.data:type_name -> global.StreamInfo
69, // 18: global.StreamWaitListResponse.list:type_name -> global.StreamWaitListResponse.ListEntry
71, // 18: global.StreamWaitListResponse.list:type_name -> global.StreamWaitListResponse.ListEntry
22, // 19: global.StreamInfoResponse.data:type_name -> global.StreamInfo
28, // 20: global.StreamInfo.audioTrack:type_name -> global.AudioTrackInfo
31, // 21: global.StreamInfo.videoTrack:type_name -> global.VideoTrackInfo
71, // 22: global.StreamInfo.startTime:type_name -> google.protobuf.Timestamp
72, // 23: global.StreamInfo.bufferTime:type_name -> google.protobuf.Duration
73, // 22: global.StreamInfo.startTime:type_name -> google.protobuf.Timestamp
74, // 23: global.StreamInfo.bufferTime:type_name -> google.protobuf.Duration
23, // 24: global.StreamInfo.recording:type_name -> global.RecordingDetail
72, // 25: global.RecordingDetail.fragment:type_name -> google.protobuf.Duration
71, // 26: global.TrackSnapShot.writeTime:type_name -> google.protobuf.Timestamp
74, // 25: global.RecordingDetail.fragment:type_name -> google.protobuf.Duration
73, // 26: global.TrackSnapShot.writeTime:type_name -> google.protobuf.Timestamp
24, // 27: global.TrackSnapShot.wrap:type_name -> global.Wrap
26, // 28: global.MemoryBlockGroup.list:type_name -> global.MemoryBlock
25, // 29: global.TrackSnapShotData.ring:type_name -> global.TrackSnapShot
70, // 30: global.TrackSnapShotData.reader:type_name -> global.TrackSnapShotData.ReaderEntry
72, // 30: global.TrackSnapShotData.reader:type_name -> global.TrackSnapShotData.ReaderEntry
27, // 31: global.TrackSnapShotData.memory:type_name -> global.MemoryBlockGroup
29, // 32: global.TrackSnapShotResponse.data:type_name -> global.TrackSnapShotData
71, // 33: global.SubscriberSnapShot.startTime:type_name -> google.protobuf.Timestamp
73, // 33: global.SubscriberSnapShot.startTime:type_name -> google.protobuf.Timestamp
37, // 34: global.SubscriberSnapShot.audioReader:type_name -> global.RingReaderSnapShot
37, // 35: global.SubscriberSnapShot.videoReader:type_name -> global.RingReaderSnapShot
72, // 36: global.SubscriberSnapShot.bufferTime:type_name -> google.protobuf.Duration
74, // 36: global.SubscriberSnapShot.bufferTime:type_name -> google.protobuf.Duration
38, // 37: global.SubscribersResponse.data:type_name -> global.SubscriberSnapShot
41, // 38: global.PullProxyListResponse.data:type_name -> global.PullProxyInfo
71, // 39: global.PullProxyInfo.createTime:type_name -> google.protobuf.Timestamp
71, // 40: global.PullProxyInfo.updateTime:type_name -> google.protobuf.Timestamp
72, // 41: global.PullProxyInfo.recordFragment:type_name -> google.protobuf.Duration
71, // 42: global.PushProxyInfo.createTime:type_name -> google.protobuf.Timestamp
71, // 43: global.PushProxyInfo.updateTime:type_name -> google.protobuf.Timestamp
73, // 39: global.PullProxyInfo.createTime:type_name -> google.protobuf.Timestamp
73, // 40: global.PullProxyInfo.updateTime:type_name -> google.protobuf.Timestamp
74, // 41: global.PullProxyInfo.recordFragment:type_name -> google.protobuf.Duration
73, // 42: global.PushProxyInfo.createTime:type_name -> google.protobuf.Timestamp
73, // 43: global.PushProxyInfo.updateTime:type_name -> google.protobuf.Timestamp
42, // 44: global.PushProxyListResponse.data:type_name -> global.PushProxyInfo
45, // 45: global.StreamAliasListResponse.data:type_name -> global.StreamAlias
71, // 46: global.Recording.startTime:type_name -> google.protobuf.Timestamp
73, // 46: global.Recording.startTime:type_name -> google.protobuf.Timestamp
49, // 47: global.RecordingListResponse.data:type_name -> global.Recording
71, // 48: global.PushInfo.startTime:type_name -> google.protobuf.Timestamp
73, // 48: global.PushInfo.startTime:type_name -> google.protobuf.Timestamp
51, // 49: global.PushListResponse.data:type_name -> global.PushInfo
54, // 50: global.TransformListResponse.data:type_name -> global.Transform
71, // 51: global.RecordFile.startTime:type_name -> google.protobuf.Timestamp
71, // 52: global.RecordFile.endTime:type_name -> google.protobuf.Timestamp
57, // 53: global.ResponseList.data:type_name -> global.RecordFile
71, // 54: global.Catalog.startTime:type_name -> google.protobuf.Timestamp
71, // 55: global.Catalog.endTime:type_name -> google.protobuf.Timestamp
59, // 56: global.ResponseCatalog.data:type_name -> global.Catalog
57, // 57: global.ResponseDelete.data:type_name -> global.RecordFile
2, // 58: global.Formily.PropertiesEntry.value:type_name -> global.Formily
73, // 59: global.Formily.ComponentPropsEntry.value:type_name -> google.protobuf.Any
2, // 60: global.FormilyResponse.PropertiesEntry.value:type_name -> global.Formily
74, // 61: global.api.SysInfo:input_type -> google.protobuf.Empty
74, // 62: global.api.DisabledPlugins:input_type -> google.protobuf.Empty
74, // 63: global.api.Summary:input_type -> google.protobuf.Empty
33, // 64: global.api.Shutdown:input_type -> global.RequestWithId
33, // 65: global.api.Restart:input_type -> global.RequestWithId
74, // 66: global.api.TaskTree:input_type -> google.protobuf.Empty
34, // 67: global.api.StopTask:input_type -> global.RequestWithId64
34, // 68: global.api.RestartTask:input_type -> global.RequestWithId64
17, // 69: global.api.StreamList:input_type -> global.StreamListRequest
74, // 70: global.api.WaitList:input_type -> google.protobuf.Empty
20, // 71: global.api.StreamInfo:input_type -> global.StreamSnapRequest
20, // 72: global.api.PauseStream:input_type -> global.StreamSnapRequest
20, // 73: global.api.ResumeStream:input_type -> global.StreamSnapRequest
47, // 74: global.api.SetStreamSpeed:input_type -> global.SetStreamSpeedRequest
48, // 75: global.api.SeekStream:input_type -> global.SeekStreamRequest
36, // 76: global.api.GetSubscribers:input_type -> global.SubscribersRequest
20, // 77: global.api.AudioTrackSnap:input_type -> global.StreamSnapRequest
20, // 78: global.api.VideoTrackSnap:input_type -> global.StreamSnapRequest
35, // 79: global.api.ChangeSubscribe:input_type -> global.ChangeSubscribeRequest
74, // 80: global.api.GetStreamAlias:input_type -> google.protobuf.Empty
44, // 81: global.api.SetStreamAlias:input_type -> global.SetStreamAliasRequest
20, // 82: global.api.StopPublish:input_type -> global.StreamSnapRequest
33, // 83: global.api.StopSubscribe:input_type -> global.RequestWithId
74, // 84: global.api.GetConfigFile:input_type -> google.protobuf.Empty
7, // 85: global.api.UpdateConfigFile:input_type -> global.UpdateConfigFileRequest
1, // 86: global.api.GetConfig:input_type -> global.GetConfigRequest
1, // 87: global.api.GetFormily:input_type -> global.GetConfigRequest
74, // 88: global.api.GetPullProxyList:input_type -> google.protobuf.Empty
41, // 89: global.api.AddPullProxy:input_type -> global.PullProxyInfo
33, // 90: global.api.RemovePullProxy:input_type -> global.RequestWithId
41, // 91: global.api.UpdatePullProxy:input_type -> global.PullProxyInfo
74, // 92: global.api.GetPushProxyList:input_type -> google.protobuf.Empty
42, // 93: global.api.AddPushProxy:input_type -> global.PushProxyInfo
33, // 94: global.api.RemovePushProxy:input_type -> global.RequestWithId
42, // 95: global.api.UpdatePushProxy:input_type -> global.PushProxyInfo
74, // 96: global.api.GetRecording:input_type -> google.protobuf.Empty
74, // 97: global.api.GetTransformList:input_type -> google.protobuf.Empty
56, // 98: global.api.GetRecordList:input_type -> global.ReqRecordList
63, // 99: global.api.GetRecordCatalog:input_type -> global.ReqRecordCatalog
61, // 100: global.api.DeleteRecord:input_type -> global.ReqRecordDelete
14, // 101: global.api.SysInfo:output_type -> global.SysInfoResponse
0, // 102: global.api.DisabledPlugins:output_type -> global.DisabledPluginsResponse
11, // 103: global.api.Summary:output_type -> global.SummaryResponse
32, // 104: global.api.Shutdown:output_type -> global.SuccessResponse
32, // 105: global.api.Restart:output_type -> global.SuccessResponse
16, // 106: global.api.TaskTree:output_type -> global.TaskTreeResponse
32, // 107: global.api.StopTask:output_type -> global.SuccessResponse
32, // 108: global.api.RestartTask:output_type -> global.SuccessResponse
18, // 109: global.api.StreamList:output_type -> global.StreamListResponse
19, // 110: global.api.WaitList:output_type -> global.StreamWaitListResponse
21, // 111: global.api.StreamInfo:output_type -> global.StreamInfoResponse
32, // 112: global.api.PauseStream:output_type -> global.SuccessResponse
32, // 113: global.api.ResumeStream:output_type -> global.SuccessResponse
32, // 114: global.api.SetStreamSpeed:output_type -> global.SuccessResponse
32, // 115: global.api.SeekStream:output_type -> global.SuccessResponse
39, // 116: global.api.GetSubscribers:output_type -> global.SubscribersResponse
30, // 117: global.api.AudioTrackSnap:output_type -> global.TrackSnapShotResponse
30, // 118: global.api.VideoTrackSnap:output_type -> global.TrackSnapShotResponse
32, // 119: global.api.ChangeSubscribe:output_type -> global.SuccessResponse
46, // 120: global.api.GetStreamAlias:output_type -> global.StreamAliasListResponse
32, // 121: global.api.SetStreamAlias:output_type -> global.SuccessResponse
32, // 122: global.api.StopPublish:output_type -> global.SuccessResponse
32, // 123: global.api.StopSubscribe:output_type -> global.SuccessResponse
5, // 124: global.api.GetConfigFile:output_type -> global.GetConfigFileResponse
32, // 125: global.api.UpdateConfigFile:output_type -> global.SuccessResponse
6, // 126: global.api.GetConfig:output_type -> global.GetConfigResponse
6, // 127: global.api.GetFormily:output_type -> global.GetConfigResponse
40, // 128: global.api.GetPullProxyList:output_type -> global.PullProxyListResponse
32, // 129: global.api.AddPullProxy:output_type -> global.SuccessResponse
32, // 130: global.api.RemovePullProxy:output_type -> global.SuccessResponse
32, // 131: global.api.UpdatePullProxy:output_type -> global.SuccessResponse
43, // 132: global.api.GetPushProxyList:output_type -> global.PushProxyListResponse
32, // 133: global.api.AddPushProxy:output_type -> global.SuccessResponse
32, // 134: global.api.RemovePushProxy:output_type -> global.SuccessResponse
32, // 135: global.api.UpdatePushProxy:output_type -> global.SuccessResponse
50, // 136: global.api.GetRecording:output_type -> global.RecordingListResponse
55, // 137: global.api.GetTransformList:output_type -> global.TransformListResponse
58, // 138: global.api.GetRecordList:output_type -> global.ResponseList
60, // 139: global.api.GetRecordCatalog:output_type -> global.ResponseCatalog
62, // 140: global.api.DeleteRecord:output_type -> global.ResponseDelete
101, // [101:141] is the sub-list for method output_type
61, // [61:101] is the sub-list for method input_type
61, // [61:61] is the sub-list for extension type_name
61, // [61:61] is the sub-list for extension extendee
0, // [0:61] is the sub-list for field type_name
73, // 51: global.RecordFile.startTime:type_name -> google.protobuf.Timestamp
73, // 52: global.RecordFile.endTime:type_name -> google.protobuf.Timestamp
73, // 53: global.EventRecordFile.startTime:type_name -> google.protobuf.Timestamp
73, // 54: global.EventRecordFile.endTime:type_name -> google.protobuf.Timestamp
57, // 55: global.RecordResponseList.data:type_name -> global.RecordFile
58, // 56: global.EventRecordResponseList.data:type_name -> global.EventRecordFile
73, // 57: global.Catalog.startTime:type_name -> google.protobuf.Timestamp
73, // 58: global.Catalog.endTime:type_name -> google.protobuf.Timestamp
61, // 59: global.ResponseCatalog.data:type_name -> global.Catalog
57, // 60: global.ResponseDelete.data:type_name -> global.RecordFile
2, // 61: global.Formily.PropertiesEntry.value:type_name -> global.Formily
75, // 62: global.Formily.ComponentPropsEntry.value:type_name -> google.protobuf.Any
2, // 63: global.FormilyResponse.PropertiesEntry.value:type_name -> global.Formily
76, // 64: global.api.SysInfo:input_type -> google.protobuf.Empty
76, // 65: global.api.DisabledPlugins:input_type -> google.protobuf.Empty
76, // 66: global.api.Summary:input_type -> google.protobuf.Empty
33, // 67: global.api.Shutdown:input_type -> global.RequestWithId
33, // 68: global.api.Restart:input_type -> global.RequestWithId
76, // 69: global.api.TaskTree:input_type -> google.protobuf.Empty
34, // 70: global.api.StopTask:input_type -> global.RequestWithId64
34, // 71: global.api.RestartTask:input_type -> global.RequestWithId64
17, // 72: global.api.StreamList:input_type -> global.StreamListRequest
76, // 73: global.api.WaitList:input_type -> google.protobuf.Empty
20, // 74: global.api.StreamInfo:input_type -> global.StreamSnapRequest
20, // 75: global.api.PauseStream:input_type -> global.StreamSnapRequest
20, // 76: global.api.ResumeStream:input_type -> global.StreamSnapRequest
47, // 77: global.api.SetStreamSpeed:input_type -> global.SetStreamSpeedRequest
48, // 78: global.api.SeekStream:input_type -> global.SeekStreamRequest
36, // 79: global.api.GetSubscribers:input_type -> global.SubscribersRequest
20, // 80: global.api.AudioTrackSnap:input_type -> global.StreamSnapRequest
20, // 81: global.api.VideoTrackSnap:input_type -> global.StreamSnapRequest
35, // 82: global.api.ChangeSubscribe:input_type -> global.ChangeSubscribeRequest
76, // 83: global.api.GetStreamAlias:input_type -> google.protobuf.Empty
44, // 84: global.api.SetStreamAlias:input_type -> global.SetStreamAliasRequest
20, // 85: global.api.StopPublish:input_type -> global.StreamSnapRequest
33, // 86: global.api.StopSubscribe:input_type -> global.RequestWithId
76, // 87: global.api.GetConfigFile:input_type -> google.protobuf.Empty
7, // 88: global.api.UpdateConfigFile:input_type -> global.UpdateConfigFileRequest
1, // 89: global.api.GetConfig:input_type -> global.GetConfigRequest
1, // 90: global.api.GetFormily:input_type -> global.GetConfigRequest
76, // 91: global.api.GetPullProxyList:input_type -> google.protobuf.Empty
41, // 92: global.api.AddPullProxy:input_type -> global.PullProxyInfo
33, // 93: global.api.RemovePullProxy:input_type -> global.RequestWithId
41, // 94: global.api.UpdatePullProxy:input_type -> global.PullProxyInfo
76, // 95: global.api.GetPushProxyList:input_type -> google.protobuf.Empty
42, // 96: global.api.AddPushProxy:input_type -> global.PushProxyInfo
33, // 97: global.api.RemovePushProxy:input_type -> global.RequestWithId
42, // 98: global.api.UpdatePushProxy:input_type -> global.PushProxyInfo
76, // 99: global.api.GetRecording:input_type -> google.protobuf.Empty
76, // 100: global.api.GetTransformList:input_type -> google.protobuf.Empty
56, // 101: global.api.GetRecordList:input_type -> global.ReqRecordList
56, // 102: global.api.GetEventRecordList:input_type -> global.ReqRecordList
65, // 103: global.api.GetRecordCatalog:input_type -> global.ReqRecordCatalog
63, // 104: global.api.DeleteRecord:input_type -> global.ReqRecordDelete
14, // 105: global.api.SysInfo:output_type -> global.SysInfoResponse
0, // 106: global.api.DisabledPlugins:output_type -> global.DisabledPluginsResponse
11, // 107: global.api.Summary:output_type -> global.SummaryResponse
32, // 108: global.api.Shutdown:output_type -> global.SuccessResponse
32, // 109: global.api.Restart:output_type -> global.SuccessResponse
16, // 110: global.api.TaskTree:output_type -> global.TaskTreeResponse
32, // 111: global.api.StopTask:output_type -> global.SuccessResponse
32, // 112: global.api.RestartTask:output_type -> global.SuccessResponse
18, // 113: global.api.StreamList:output_type -> global.StreamListResponse
19, // 114: global.api.WaitList:output_type -> global.StreamWaitListResponse
21, // 115: global.api.StreamInfo:output_type -> global.StreamInfoResponse
32, // 116: global.api.PauseStream:output_type -> global.SuccessResponse
32, // 117: global.api.ResumeStream:output_type -> global.SuccessResponse
32, // 118: global.api.SetStreamSpeed:output_type -> global.SuccessResponse
32, // 119: global.api.SeekStream:output_type -> global.SuccessResponse
39, // 120: global.api.GetSubscribers:output_type -> global.SubscribersResponse
30, // 121: global.api.AudioTrackSnap:output_type -> global.TrackSnapShotResponse
30, // 122: global.api.VideoTrackSnap:output_type -> global.TrackSnapShotResponse
32, // 123: global.api.ChangeSubscribe:output_type -> global.SuccessResponse
46, // 124: global.api.GetStreamAlias:output_type -> global.StreamAliasListResponse
32, // 125: global.api.SetStreamAlias:output_type -> global.SuccessResponse
32, // 126: global.api.StopPublish:output_type -> global.SuccessResponse
32, // 127: global.api.StopSubscribe:output_type -> global.SuccessResponse
5, // 128: global.api.GetConfigFile:output_type -> global.GetConfigFileResponse
32, // 129: global.api.UpdateConfigFile:output_type -> global.SuccessResponse
6, // 130: global.api.GetConfig:output_type -> global.GetConfigResponse
6, // 131: global.api.GetFormily:output_type -> global.GetConfigResponse
40, // 132: global.api.GetPullProxyList:output_type -> global.PullProxyListResponse
32, // 133: global.api.AddPullProxy:output_type -> global.SuccessResponse
32, // 134: global.api.RemovePullProxy:output_type -> global.SuccessResponse
32, // 135: global.api.UpdatePullProxy:output_type -> global.SuccessResponse
43, // 136: global.api.GetPushProxyList:output_type -> global.PushProxyListResponse
32, // 137: global.api.AddPushProxy:output_type -> global.SuccessResponse
32, // 138: global.api.RemovePushProxy:output_type -> global.SuccessResponse
32, // 139: global.api.UpdatePushProxy:output_type -> global.SuccessResponse
50, // 140: global.api.GetRecording:output_type -> global.RecordingListResponse
55, // 141: global.api.GetTransformList:output_type -> global.TransformListResponse
59, // 142: global.api.GetRecordList:output_type -> global.RecordResponseList
60, // 143: global.api.GetEventRecordList:output_type -> global.EventRecordResponseList
62, // 144: global.api.GetRecordCatalog:output_type -> global.ResponseCatalog
64, // 145: global.api.DeleteRecord:output_type -> global.ResponseDelete
105, // [105:146] is the sub-list for method output_type
64, // [64:105] is the sub-list for method input_type
64, // [64:64] is the sub-list for extension type_name
64, // [64:64] is the sub-list for extension extendee
0, // [0:64] is the sub-list for field type_name
}
func init() { file_global_proto_init() }
@@ -5373,7 +5556,7 @@ func file_global_proto_init() {
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: unsafe.Slice(unsafe.StringData(file_global_proto_rawDesc), len(file_global_proto_rawDesc)),
NumEnums: 0,
NumMessages: 71,
NumMessages: 73,
NumExtensions: 0,
NumServices: 1,
},

View File

@@ -1708,6 +1708,96 @@ func local_request_Api_GetRecordList_0(ctx context.Context, marshaler runtime.Ma
}
var (
filter_Api_GetEventRecordList_0 = &utilities.DoubleArray{Encoding: map[string]int{"type": 0, "streamPath": 1}, Base: []int{1, 1, 2, 0, 0}, Check: []int{0, 1, 1, 2, 3}}
)
func request_Api_GetEventRecordList_0(ctx context.Context, marshaler runtime.Marshaler, client ApiClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ReqRecordList
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["type"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "type")
}
protoReq.Type, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "type", err)
}
val, ok = pathParams["streamPath"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "streamPath")
}
protoReq.StreamPath, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "streamPath", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Api_GetEventRecordList_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.GetEventRecordList(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Api_GetEventRecordList_0(ctx context.Context, marshaler runtime.Marshaler, server ApiServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ReqRecordList
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["type"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "type")
}
protoReq.Type, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "type", err)
}
val, ok = pathParams["streamPath"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "streamPath")
}
protoReq.StreamPath, err = runtime.String(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "streamPath", err)
}
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Api_GetEventRecordList_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.GetEventRecordList(ctx, &protoReq)
return msg, metadata, err
}
func request_Api_GetRecordCatalog_0(ctx context.Context, marshaler runtime.Marshaler, client ApiClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq ReqRecordCatalog
var metadata runtime.ServerMetadata
@@ -2896,6 +2986,31 @@ func RegisterApiHandlerServer(ctx context.Context, mux *runtime.ServeMux, server
})
mux.Handle("GET", pattern_Api_GetEventRecordList_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
var stream runtime.ServerTransportStream
ctx = grpc.NewContextWithServerTransportStream(ctx, &stream)
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
var err error
var annotatedContext context.Context
annotatedContext, err = runtime.AnnotateIncomingContext(ctx, mux, req, "/global.Api/GetEventRecordList", runtime.WithHTTPPathPattern("/api/record/{type}/event/list/{streamPath=**}"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Api_GetEventRecordList_0(annotatedContext, inboundMarshaler, server, req, pathParams)
md.HeaderMD, md.TrailerMD = metadata.Join(md.HeaderMD, stream.Header()), metadata.Join(md.TrailerMD, stream.Trailer())
annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md)
if err != nil {
runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err)
return
}
forward_Api_GetEventRecordList_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Api_GetRecordCatalog_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
@@ -3911,6 +4026,28 @@ func RegisterApiHandlerClient(ctx context.Context, mux *runtime.ServeMux, client
})
mux.Handle("GET", pattern_Api_GetEventRecordList_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
var err error
var annotatedContext context.Context
annotatedContext, err = runtime.AnnotateContext(ctx, mux, req, "/global.Api/GetEventRecordList", runtime.WithHTTPPathPattern("/api/record/{type}/event/list/{streamPath=**}"))
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Api_GetEventRecordList_0(annotatedContext, inboundMarshaler, client, req, pathParams)
annotatedContext = runtime.NewServerMetadataContext(annotatedContext, md)
if err != nil {
runtime.HTTPError(annotatedContext, mux, outboundMarshaler, w, req, err)
return
}
forward_Api_GetEventRecordList_0(annotatedContext, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Api_GetRecordCatalog_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
@@ -4043,6 +4180,8 @@ var (
pattern_Api_GetRecordList_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 3, 0, 4, 1, 5, 4}, []string{"api", "record", "type", "list", "streamPath"}, ""))
pattern_Api_GetEventRecordList_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 2, 4, 3, 0, 4, 1, 5, 5}, []string{"api", "record", "type", "event", "list", "streamPath"}, ""))
pattern_Api_GetRecordCatalog_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3}, []string{"api", "record", "type", "catalog"}, ""))
pattern_Api_DeleteRecord_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 1, 0, 4, 1, 5, 2, 2, 3, 3, 0, 4, 1, 5, 4}, []string{"api", "record", "type", "delete", "streamPath"}, ""))
@@ -4133,6 +4272,8 @@ var (
forward_Api_GetRecordList_0 = runtime.ForwardResponseMessage
forward_Api_GetEventRecordList_0 = runtime.ForwardResponseMessage
forward_Api_GetRecordCatalog_0 = runtime.ForwardResponseMessage
forward_Api_DeleteRecord_0 = runtime.ForwardResponseMessage

View File

@@ -224,11 +224,16 @@ service api {
get: "/api/transform/list"
};
}
rpc GetRecordList (ReqRecordList) returns (ResponseList) {
rpc GetRecordList (ReqRecordList) returns (RecordResponseList) {
option (google.api.http) = {
get: "/api/record/{type}/list/{streamPath=**}"
};
}
rpc GetEventRecordList (ReqRecordList) returns (EventRecordResponseList) {
option (google.api.http) = {
get: "/api/record/{type}/event/list/{streamPath=**}"
};
}
rpc GetRecordCatalog (ReqRecordCatalog) returns (ResponseCatalog) {
option (google.api.http) = {
get: "/api/record/{type}/catalog"
@@ -664,9 +669,8 @@ message ReqRecordList {
string end = 4;
uint32 pageNum = 5;
uint32 pageSize = 6;
string eventId = 7;
string type = 8;
string eventLevel = 9;
string type = 7;
string eventLevel = 8;
}
message RecordFile {
@@ -675,12 +679,21 @@ message RecordFile {
string streamPath = 3;
google.protobuf.Timestamp startTime = 4;
google.protobuf.Timestamp endTime = 5;
string eventLevel = 6;
string eventName = 7;
string eventDesc = 8;
}
message ResponseList {
message EventRecordFile {
uint32 id = 1;
string filePath = 2;
string streamPath = 3;
google.protobuf.Timestamp startTime = 4;
google.protobuf.Timestamp endTime = 5;
string eventId = 6;
string eventLevel = 7;
string eventName = 8;
string eventDesc = 9;
}
message RecordResponseList {
int32 code = 1;
string message = 2;
uint32 total = 3;
@@ -689,6 +702,15 @@ message ResponseList {
repeated RecordFile data = 6;
}
message EventRecordResponseList {
int32 code = 1;
string message = 2;
uint32 total = 3;
uint32 pageNum = 4;
uint32 pageSize = 5;
repeated EventRecordFile data = 6;
}
message Catalog {
string streamPath = 1;
uint32 count = 2;

View File

@@ -20,46 +20,47 @@ import (
const _ = grpc.SupportPackageIsVersion9
const (
Api_SysInfo_FullMethodName = "/global.api/SysInfo"
Api_DisabledPlugins_FullMethodName = "/global.api/DisabledPlugins"
Api_Summary_FullMethodName = "/global.api/Summary"
Api_Shutdown_FullMethodName = "/global.api/Shutdown"
Api_Restart_FullMethodName = "/global.api/Restart"
Api_TaskTree_FullMethodName = "/global.api/TaskTree"
Api_StopTask_FullMethodName = "/global.api/StopTask"
Api_RestartTask_FullMethodName = "/global.api/RestartTask"
Api_StreamList_FullMethodName = "/global.api/StreamList"
Api_WaitList_FullMethodName = "/global.api/WaitList"
Api_StreamInfo_FullMethodName = "/global.api/StreamInfo"
Api_PauseStream_FullMethodName = "/global.api/PauseStream"
Api_ResumeStream_FullMethodName = "/global.api/ResumeStream"
Api_SetStreamSpeed_FullMethodName = "/global.api/SetStreamSpeed"
Api_SeekStream_FullMethodName = "/global.api/SeekStream"
Api_GetSubscribers_FullMethodName = "/global.api/GetSubscribers"
Api_AudioTrackSnap_FullMethodName = "/global.api/AudioTrackSnap"
Api_VideoTrackSnap_FullMethodName = "/global.api/VideoTrackSnap"
Api_ChangeSubscribe_FullMethodName = "/global.api/ChangeSubscribe"
Api_GetStreamAlias_FullMethodName = "/global.api/GetStreamAlias"
Api_SetStreamAlias_FullMethodName = "/global.api/SetStreamAlias"
Api_StopPublish_FullMethodName = "/global.api/StopPublish"
Api_StopSubscribe_FullMethodName = "/global.api/StopSubscribe"
Api_GetConfigFile_FullMethodName = "/global.api/GetConfigFile"
Api_UpdateConfigFile_FullMethodName = "/global.api/UpdateConfigFile"
Api_GetConfig_FullMethodName = "/global.api/GetConfig"
Api_GetFormily_FullMethodName = "/global.api/GetFormily"
Api_GetPullProxyList_FullMethodName = "/global.api/GetPullProxyList"
Api_AddPullProxy_FullMethodName = "/global.api/AddPullProxy"
Api_RemovePullProxy_FullMethodName = "/global.api/RemovePullProxy"
Api_UpdatePullProxy_FullMethodName = "/global.api/UpdatePullProxy"
Api_GetPushProxyList_FullMethodName = "/global.api/GetPushProxyList"
Api_AddPushProxy_FullMethodName = "/global.api/AddPushProxy"
Api_RemovePushProxy_FullMethodName = "/global.api/RemovePushProxy"
Api_UpdatePushProxy_FullMethodName = "/global.api/UpdatePushProxy"
Api_GetRecording_FullMethodName = "/global.api/GetRecording"
Api_GetTransformList_FullMethodName = "/global.api/GetTransformList"
Api_GetRecordList_FullMethodName = "/global.api/GetRecordList"
Api_GetRecordCatalog_FullMethodName = "/global.api/GetRecordCatalog"
Api_DeleteRecord_FullMethodName = "/global.api/DeleteRecord"
Api_SysInfo_FullMethodName = "/global.api/SysInfo"
Api_DisabledPlugins_FullMethodName = "/global.api/DisabledPlugins"
Api_Summary_FullMethodName = "/global.api/Summary"
Api_Shutdown_FullMethodName = "/global.api/Shutdown"
Api_Restart_FullMethodName = "/global.api/Restart"
Api_TaskTree_FullMethodName = "/global.api/TaskTree"
Api_StopTask_FullMethodName = "/global.api/StopTask"
Api_RestartTask_FullMethodName = "/global.api/RestartTask"
Api_StreamList_FullMethodName = "/global.api/StreamList"
Api_WaitList_FullMethodName = "/global.api/WaitList"
Api_StreamInfo_FullMethodName = "/global.api/StreamInfo"
Api_PauseStream_FullMethodName = "/global.api/PauseStream"
Api_ResumeStream_FullMethodName = "/global.api/ResumeStream"
Api_SetStreamSpeed_FullMethodName = "/global.api/SetStreamSpeed"
Api_SeekStream_FullMethodName = "/global.api/SeekStream"
Api_GetSubscribers_FullMethodName = "/global.api/GetSubscribers"
Api_AudioTrackSnap_FullMethodName = "/global.api/AudioTrackSnap"
Api_VideoTrackSnap_FullMethodName = "/global.api/VideoTrackSnap"
Api_ChangeSubscribe_FullMethodName = "/global.api/ChangeSubscribe"
Api_GetStreamAlias_FullMethodName = "/global.api/GetStreamAlias"
Api_SetStreamAlias_FullMethodName = "/global.api/SetStreamAlias"
Api_StopPublish_FullMethodName = "/global.api/StopPublish"
Api_StopSubscribe_FullMethodName = "/global.api/StopSubscribe"
Api_GetConfigFile_FullMethodName = "/global.api/GetConfigFile"
Api_UpdateConfigFile_FullMethodName = "/global.api/UpdateConfigFile"
Api_GetConfig_FullMethodName = "/global.api/GetConfig"
Api_GetFormily_FullMethodName = "/global.api/GetFormily"
Api_GetPullProxyList_FullMethodName = "/global.api/GetPullProxyList"
Api_AddPullProxy_FullMethodName = "/global.api/AddPullProxy"
Api_RemovePullProxy_FullMethodName = "/global.api/RemovePullProxy"
Api_UpdatePullProxy_FullMethodName = "/global.api/UpdatePullProxy"
Api_GetPushProxyList_FullMethodName = "/global.api/GetPushProxyList"
Api_AddPushProxy_FullMethodName = "/global.api/AddPushProxy"
Api_RemovePushProxy_FullMethodName = "/global.api/RemovePushProxy"
Api_UpdatePushProxy_FullMethodName = "/global.api/UpdatePushProxy"
Api_GetRecording_FullMethodName = "/global.api/GetRecording"
Api_GetTransformList_FullMethodName = "/global.api/GetTransformList"
Api_GetRecordList_FullMethodName = "/global.api/GetRecordList"
Api_GetEventRecordList_FullMethodName = "/global.api/GetEventRecordList"
Api_GetRecordCatalog_FullMethodName = "/global.api/GetRecordCatalog"
Api_DeleteRecord_FullMethodName = "/global.api/DeleteRecord"
)
// ApiClient is the client API for Api service.
@@ -103,7 +104,8 @@ type ApiClient interface {
UpdatePushProxy(ctx context.Context, in *PushProxyInfo, opts ...grpc.CallOption) (*SuccessResponse, error)
GetRecording(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*RecordingListResponse, error)
GetTransformList(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*TransformListResponse, error)
GetRecordList(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*ResponseList, error)
GetRecordList(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*RecordResponseList, error)
GetEventRecordList(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*EventRecordResponseList, error)
GetRecordCatalog(ctx context.Context, in *ReqRecordCatalog, opts ...grpc.CallOption) (*ResponseCatalog, error)
DeleteRecord(ctx context.Context, in *ReqRecordDelete, opts ...grpc.CallOption) (*ResponseDelete, error)
}
@@ -486,9 +488,9 @@ func (c *apiClient) GetTransformList(ctx context.Context, in *emptypb.Empty, opt
return out, nil
}
func (c *apiClient) GetRecordList(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*ResponseList, error) {
func (c *apiClient) GetRecordList(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*RecordResponseList, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ResponseList)
out := new(RecordResponseList)
err := c.cc.Invoke(ctx, Api_GetRecordList_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
@@ -496,6 +498,16 @@ func (c *apiClient) GetRecordList(ctx context.Context, in *ReqRecordList, opts .
return out, nil
}
func (c *apiClient) GetEventRecordList(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*EventRecordResponseList, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(EventRecordResponseList)
err := c.cc.Invoke(ctx, Api_GetEventRecordList_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *apiClient) GetRecordCatalog(ctx context.Context, in *ReqRecordCatalog, opts ...grpc.CallOption) (*ResponseCatalog, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ResponseCatalog)
@@ -557,7 +569,8 @@ type ApiServer interface {
UpdatePushProxy(context.Context, *PushProxyInfo) (*SuccessResponse, error)
GetRecording(context.Context, *emptypb.Empty) (*RecordingListResponse, error)
GetTransformList(context.Context, *emptypb.Empty) (*TransformListResponse, error)
GetRecordList(context.Context, *ReqRecordList) (*ResponseList, error)
GetRecordList(context.Context, *ReqRecordList) (*RecordResponseList, error)
GetEventRecordList(context.Context, *ReqRecordList) (*EventRecordResponseList, error)
GetRecordCatalog(context.Context, *ReqRecordCatalog) (*ResponseCatalog, error)
DeleteRecord(context.Context, *ReqRecordDelete) (*ResponseDelete, error)
mustEmbedUnimplementedApiServer()
@@ -681,9 +694,12 @@ func (UnimplementedApiServer) GetRecording(context.Context, *emptypb.Empty) (*Re
func (UnimplementedApiServer) GetTransformList(context.Context, *emptypb.Empty) (*TransformListResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetTransformList not implemented")
}
func (UnimplementedApiServer) GetRecordList(context.Context, *ReqRecordList) (*ResponseList, error) {
func (UnimplementedApiServer) GetRecordList(context.Context, *ReqRecordList) (*RecordResponseList, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetRecordList not implemented")
}
func (UnimplementedApiServer) GetEventRecordList(context.Context, *ReqRecordList) (*EventRecordResponseList, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetEventRecordList not implemented")
}
func (UnimplementedApiServer) GetRecordCatalog(context.Context, *ReqRecordCatalog) (*ResponseCatalog, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetRecordCatalog not implemented")
}
@@ -1395,6 +1411,24 @@ func _Api_GetRecordList_Handler(srv interface{}, ctx context.Context, dec func(i
return interceptor(ctx, in, info, handler)
}
func _Api_GetEventRecordList_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ReqRecordList)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(ApiServer).GetEventRecordList(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: Api_GetEventRecordList_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApiServer).GetEventRecordList(ctx, req.(*ReqRecordList))
}
return interceptor(ctx, in, info, handler)
}
func _Api_GetRecordCatalog_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ReqRecordCatalog)
if err := dec(in); err != nil {
@@ -1590,6 +1624,10 @@ var Api_ServiceDesc = grpc.ServiceDesc{
MethodName: "GetRecordList",
Handler: _Api_GetRecordList_Handler,
},
{
MethodName: "GetEventRecordList",
Handler: _Api_GetEventRecordList_Handler,
},
{
MethodName: "GetRecordCatalog",
Handler: _Api_GetRecordCatalog_Handler,

View File

@@ -65,8 +65,6 @@ type (
}
)
var _ IAVFrame = (*AnnexB)(nil)
func (frame *AVFrame) Clone() {
}

74
pkg/avframe_convert.go Normal file
View File

@@ -0,0 +1,74 @@
package pkg
import (
"reflect"
"m7s.live/v5/pkg/codec"
"m7s.live/v5/pkg/util"
)
type AVFrameConvert[T IAVFrame] struct {
FromTrack, ToTrack *AVTrack
lastFromCodecCtx codec.ICodecCtx
}
func NewAVFrameConvert[T IAVFrame](fromTrack *AVTrack, toTrack *AVTrack) *AVFrameConvert[T] {
ret := &AVFrameConvert[T]{}
ret.FromTrack = fromTrack
ret.ToTrack = toTrack
if ret.FromTrack == nil {
ret.FromTrack = &AVTrack{
RingWriter: &RingWriter{
Ring: util.NewRing[AVFrame](1),
},
}
}
if ret.ToTrack == nil {
ret.ToTrack = &AVTrack{
RingWriter: &RingWriter{
Ring: util.NewRing[AVFrame](1),
},
}
var to T
ret.ToTrack.FrameType = reflect.TypeOf(to).Elem()
}
return ret
}
func (c *AVFrameConvert[T]) ConvertFromAVFrame(avFrame *AVFrame) (to T, err error) {
to = reflect.New(c.ToTrack.FrameType).Interface().(T)
if c.ToTrack.ICodecCtx == nil {
if c.ToTrack.ICodecCtx, c.ToTrack.SequenceFrame, err = to.ConvertCtx(c.FromTrack.ICodecCtx); err != nil {
return
}
}
if err = avFrame.Demux(c.FromTrack.ICodecCtx); err != nil {
return
}
to.SetAllocator(avFrame.Wraps[0].GetAllocator())
to.Mux(c.ToTrack.ICodecCtx, avFrame)
return
}
func (c *AVFrameConvert[T]) Convert(frame IAVFrame) (to T, err error) {
to = reflect.New(c.ToTrack.FrameType).Interface().(T)
// Not From Publisher
if c.FromTrack.LastValue == nil {
err = frame.Parse(c.FromTrack)
if err != nil {
return
}
}
if c.ToTrack.ICodecCtx == nil || c.lastFromCodecCtx != c.FromTrack.ICodecCtx {
if c.ToTrack.ICodecCtx, c.ToTrack.SequenceFrame, err = to.ConvertCtx(c.FromTrack.ICodecCtx); err != nil {
return
}
}
c.lastFromCodecCtx = c.FromTrack.ICodecCtx
if c.FromTrack.Value.Raw, err = frame.Demux(c.FromTrack.ICodecCtx); err != nil {
return
}
to.SetAllocator(frame.GetAllocator())
to.Mux(c.ToTrack.ICodecCtx, &c.FromTrack.Value)
return
}

View File

@@ -9,14 +9,11 @@ import (
// User represents a user in the system
type User struct {
ID uint `gorm:"primarykey"`
CreatedAt time.Time
UpdatedAt time.Time
DeletedAt gorm.DeletedAt `gorm:"index"`
Username string `gorm:"uniqueIndex;size:64"`
Password string `gorm:"size:60"` // bcrypt hash
Role string `gorm:"size:20;default:'user'"` // admin or user
LastLogin time.Time `gorm:"type:datetime;default:CURRENT_TIMESTAMP"`
gorm.Model
Username string `gorm:"uniqueIndex;size:64"`
Password string `gorm:"size:60"` // bcrypt hash
Role string `gorm:"size:20;default:'user'"` // admin or user
LastLogin time.Time `gorm:"type:timestamp;default:CURRENT_TIMESTAMP"`
}
// BeforeCreate hook to hash password before saving

View File

@@ -13,7 +13,6 @@ type (
Port struct {
Protocol string
Ports [2]int
Map [2]int // 映射端口范围,通常用于 NAT 或端口转发
}
IPort interface {
IsTCP() bool
@@ -23,23 +22,10 @@ type (
)
func (p Port) String() string {
var result string
if p.Ports[0] == p.Ports[1] {
result = p.Protocol + ":" + strconv.Itoa(p.Ports[0])
} else {
result = p.Protocol + ":" + strconv.Itoa(p.Ports[0]) + "-" + strconv.Itoa(p.Ports[1])
return p.Protocol + ":" + strconv.Itoa(p.Ports[0])
}
// 如果有端口映射,添加映射信息
if p.HasMapping() {
if p.Map[0] == p.Map[1] {
result += ":" + strconv.Itoa(p.Map[0])
} else {
result += ":" + strconv.Itoa(p.Map[0]) + "-" + strconv.Itoa(p.Map[1])
}
}
return result
return p.Protocol + ":" + strconv.Itoa(p.Ports[0]) + "-" + strconv.Itoa(p.Ports[1])
}
func (p Port) IsTCP() bool {
@@ -54,36 +40,6 @@ func (p Port) IsRange() bool {
return p.Ports[0] != p.Ports[1]
}
func (p Port) HasMapping() bool {
return p.Map[0] > 0 || p.Map[1] > 0
}
func (p Port) IsRangeMapping() bool {
return p.HasMapping() && p.Map[0] != p.Map[1]
}
// ParsePort2 解析端口配置字符串并返回对应的端口类型实例
// 根据协议类型和端口范围返回不同的类型:
// - TCP单端口返回 TCPPort
// - TCP端口范围返回 TCPRangePort
// - UDP单端口返回 UDPPort
// - UDP端口范围返回 UDPRangePort
//
// 参数:
//
// conf - 端口配置字符串格式protocol:port 或 protocol:port1-port2
//
// 返回值:
//
// ret - 端口实例 (TCPPort/UDPPort/TCPRangePort/UDPRangePort)
// err - 解析错误
//
// 示例:
//
// ParsePort2("tcp:8080") // 返回 TCPPort(8080)
// ParsePort2("tcp:8080-8090") // 返回 TCPRangePort([2]int{8080, 8090})
// ParsePort2("udp:5000") // 返回 UDPPort(5000)
// ParsePort2("udp:5000-5010") // 返回 UDPRangePort([2]int{5000, 5010})
func ParsePort2(conf string) (ret any, err error) {
var port Port
port, err = ParsePort(conf)
@@ -102,84 +58,10 @@ func ParsePort2(conf string) (ret any, err error) {
return UDPPort(port.Ports[0]), nil
}
// ParsePort 解析端口配置字符串为 Port 结构体
// 支持协议前缀、端口号/端口范围以及端口映射的解析
//
// 参数:
//
// conf - 端口配置字符串,格式:
// - "protocol:port" 单端口,如 "tcp:8080"
// - "protocol:port1-port2" 端口范围,如 "tcp:8080-8090"
// - "protocol:port:mapPort" 单端口映射,如 "tcp:8080:9090"
// - "protocol:port:mapPort1-mapPort2" 单端口映射到端口范围,如 "tcp:8080:9000-9010"
// - "protocol:port1-port2:mapPort1-mapPort2" 端口范围映射,如 "tcp:8080-8090:9000-9010"
//
// 返回值:
//
// ret - Port 结构体,包含协议、端口和映射端口信息
// err - 解析错误
//
// 注意:
// - 如果端口范围中 min > max会自动交换顺序
// - 单端口时Ports[0] 和 Ports[1] 值相同
// - 端口映射时Map[0] 和 Map[1] 存储映射的目标端口范围
// - 单个映射端口时Map[0] 和 Map[1] 值相同
//
// 示例:
//
// ParsePort("tcp:8080") // Port{Protocol:"tcp", Ports:[2]int{8080, 8080}, Map:[2]int{0, 0}}
// ParsePort("tcp:8080-8090") // Port{Protocol:"tcp", Ports:[2]int{8080, 8090}, Map:[2]int{0, 0}}
// ParsePort("tcp:8080:9090") // Port{Protocol:"tcp", Ports:[2]int{8080, 8080}, Map:[2]int{9090, 9090}}
// ParsePort("tcp:8080:9000-9010") // Port{Protocol:"tcp", Ports:[2]int{8080, 8080}, Map:[2]int{9000, 9010}}
// ParsePort("tcp:8080-8090:9000-9010") // Port{Protocol:"tcp", Ports:[2]int{8080, 8090}, Map:[2]int{9000, 9010}}
// ParsePort("udp:5000") // Port{Protocol:"udp", Ports:[2]int{5000, 5000}, Map:[2]int{0, 0}}
// ParsePort("udp:5010-5000") // Port{Protocol:"udp", Ports:[2]int{5000, 5010}, Map:[2]int{0, 0}}
func ParsePort(conf string) (ret Port, err error) {
var port, mapPort string
var port string
var min, max int
// 按冒号分割,支持端口映射
parts := strings.Split(conf, ":")
if len(parts) < 2 || len(parts) > 3 {
err = strconv.ErrSyntax
return
}
ret.Protocol = parts[0]
port = parts[1]
// 处理端口映射
if len(parts) == 3 {
mapPort = parts[2]
// 解析映射端口,支持单端口和端口范围
if mapRange := strings.Split(mapPort, "-"); len(mapRange) == 2 {
// 映射端口范围
var mapMin, mapMax int
mapMin, err = strconv.Atoi(mapRange[0])
if err != nil {
return
}
mapMax, err = strconv.Atoi(mapRange[1])
if err != nil {
return
}
if mapMin < mapMax {
ret.Map[0], ret.Map[1] = mapMin, mapMax
} else {
ret.Map[0], ret.Map[1] = mapMax, mapMin
}
} else {
// 单个映射端口
var mapPortNum int
mapPortNum, err = strconv.Atoi(mapPort)
if err != nil {
return
}
ret.Map[0], ret.Map[1] = mapPortNum, mapPortNum
}
}
// 处理端口范围
ret.Protocol, port, _ = strings.Cut(conf, ":")
if r := strings.Split(port, "-"); len(r) == 2 {
min, err = strconv.Atoi(r[0])
if err != nil {
@@ -194,12 +76,7 @@ func ParsePort(conf string) (ret Port, err error) {
} else {
ret.Ports[0], ret.Ports[1] = max, min
}
} else {
var p int
p, err = strconv.Atoi(port)
if err != nil {
return
}
} else if p, err := strconv.Atoi(port); err == nil {
ret.Ports[0], ret.Ports[1] = p, p
}
return

View File

@@ -1,370 +0,0 @@
package pkg
import (
"testing"
)
func TestParsePort(t *testing.T) {
tests := []struct {
name string
input string
expected Port
hasError bool
}{
{
name: "TCP单端口",
input: "tcp:8080",
expected: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8080},
Map: [2]int{0, 0},
},
hasError: false,
},
{
name: "TCP端口范围",
input: "tcp:8080-8090",
expected: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8090},
Map: [2]int{0, 0},
},
hasError: false,
},
{
name: "TCP端口范围反序",
input: "tcp:8090-8080",
expected: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8090},
Map: [2]int{0, 0},
},
hasError: false,
},
{
name: "TCP单端口映射到单端口",
input: "tcp:8080:9090",
expected: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8080},
Map: [2]int{9090, 9090},
},
hasError: false,
},
{
name: "TCP单端口映射到端口范围",
input: "tcp:8080:9000-9010",
expected: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8080},
Map: [2]int{9000, 9010},
},
hasError: false,
},
{
name: "TCP端口范围映射到端口范围",
input: "tcp:8080-8090:9000-9010",
expected: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8090},
Map: [2]int{9000, 9010},
},
hasError: false,
},
{
name: "UDP单端口",
input: "udp:5000",
expected: Port{
Protocol: "udp",
Ports: [2]int{5000, 5000},
Map: [2]int{0, 0},
},
hasError: false,
},
{
name: "UDP端口范围",
input: "udp:5000-5010",
expected: Port{
Protocol: "udp",
Ports: [2]int{5000, 5010},
Map: [2]int{0, 0},
},
hasError: false,
},
{
name: "UDP端口映射",
input: "udp:5000:6000",
expected: Port{
Protocol: "udp",
Ports: [2]int{5000, 5000},
Map: [2]int{6000, 6000},
},
hasError: false,
},
{
name: "UDP端口范围映射映射范围反序",
input: "udp:5000-5010:6010-6000",
expected: Port{
Protocol: "udp",
Ports: [2]int{5000, 5010},
Map: [2]int{6000, 6010},
},
hasError: false,
},
// 错误情况
{
name: "缺少协议",
input: "8080",
expected: Port{},
hasError: true,
},
{
name: "过多冒号",
input: "tcp:8080:9090:extra",
expected: Port{},
hasError: true,
},
{
name: "无效端口号",
input: "tcp:abc",
expected: Port{},
hasError: true,
},
{
name: "无效映射端口号",
input: "tcp:8080:abc",
expected: Port{},
hasError: true,
},
{
name: "无效端口范围",
input: "tcp:8080-abc",
expected: Port{},
hasError: true,
},
{
name: "无效映射端口范围",
input: "tcp:8080:9000-abc",
expected: Port{},
hasError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := ParsePort(tt.input)
if tt.hasError {
if err == nil {
t.Errorf("期望有错误,但没有错误")
}
return
}
if err != nil {
t.Errorf("意外的错误: %v", err)
return
}
if result.Protocol != tt.expected.Protocol {
t.Errorf("协议不匹配: 期望 %s, 得到 %s", tt.expected.Protocol, result.Protocol)
}
if result.Ports != tt.expected.Ports {
t.Errorf("端口不匹配: 期望 %v, 得到 %v", tt.expected.Ports, result.Ports)
}
if result.Map != tt.expected.Map {
t.Errorf("映射端口不匹配: 期望 %v, 得到 %v", tt.expected.Map, result.Map)
}
})
}
}
func TestPortMethods(t *testing.T) {
tests := []struct {
name string
port Port
expectTCP bool
expectUDP bool
expectRange bool
expectMapping bool
expectRangeMap bool
expectString string
}{
{
name: "TCP单端口",
port: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8080},
Map: [2]int{0, 0},
},
expectTCP: true,
expectUDP: false,
expectRange: false,
expectMapping: false,
expectRangeMap: false,
expectString: "tcp:8080",
},
{
name: "TCP端口范围",
port: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8090},
Map: [2]int{0, 0},
},
expectTCP: true,
expectUDP: false,
expectRange: true,
expectMapping: false,
expectRangeMap: false,
expectString: "tcp:8080-8090",
},
{
name: "TCP单端口映射",
port: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8080},
Map: [2]int{9090, 9090},
},
expectTCP: true,
expectUDP: false,
expectRange: false,
expectMapping: true,
expectRangeMap: false,
expectString: "tcp:8080:9090",
},
{
name: "TCP端口范围映射",
port: Port{
Protocol: "tcp",
Ports: [2]int{8080, 8090},
Map: [2]int{9000, 9010},
},
expectTCP: true,
expectUDP: false,
expectRange: true,
expectMapping: true,
expectRangeMap: true,
expectString: "tcp:8080-8090:9000-9010",
},
{
name: "UDP单端口映射到端口范围",
port: Port{
Protocol: "udp",
Ports: [2]int{5000, 5000},
Map: [2]int{6000, 6010},
},
expectTCP: false,
expectUDP: true,
expectRange: false,
expectMapping: true,
expectRangeMap: true,
expectString: "udp:5000:6000-6010",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.port.IsTCP() != tt.expectTCP {
t.Errorf("IsTCP(): 期望 %v, 得到 %v", tt.expectTCP, tt.port.IsTCP())
}
if tt.port.IsUDP() != tt.expectUDP {
t.Errorf("IsUDP(): 期望 %v, 得到 %v", tt.expectUDP, tt.port.IsUDP())
}
if tt.port.IsRange() != tt.expectRange {
t.Errorf("IsRange(): 期望 %v, 得到 %v", tt.expectRange, tt.port.IsRange())
}
if tt.port.HasMapping() != tt.expectMapping {
t.Errorf("HasMapping(): 期望 %v, 得到 %v", tt.expectMapping, tt.port.HasMapping())
}
if tt.port.IsRangeMapping() != tt.expectRangeMap {
t.Errorf("IsRangeMapping(): 期望 %v, 得到 %v", tt.expectRangeMap, tt.port.IsRangeMapping())
}
if tt.port.String() != tt.expectString {
t.Errorf("String(): 期望 %s, 得到 %s", tt.expectString, tt.port.String())
}
})
}
}
func TestParsePort2(t *testing.T) {
tests := []struct {
name string
input string
expectedType string
hasError bool
}{
{
name: "TCP单端口",
input: "tcp:8080",
expectedType: "TCPPort",
hasError: false,
},
{
name: "TCP端口范围",
input: "tcp:8080-8090",
expectedType: "TCPRangePort",
hasError: false,
},
{
name: "UDP单端口",
input: "udp:5000",
expectedType: "UDPPort",
hasError: false,
},
{
name: "UDP端口范围",
input: "udp:5000-5010",
expectedType: "UDPRangePort",
hasError: false,
},
{
name: "无效输入",
input: "invalid",
hasError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result, err := ParsePort2(tt.input)
if tt.hasError {
if err == nil {
t.Errorf("期望有错误,但没有错误")
}
return
}
if err != nil {
t.Errorf("意外的错误: %v", err)
return
}
switch tt.expectedType {
case "TCPPort":
if _, ok := result.(TCPPort); !ok {
t.Errorf("期望类型 TCPPort, 得到 %T", result)
}
case "TCPRangePort":
if _, ok := result.(TCPRangePort); !ok {
t.Errorf("期望类型 TCPRangePort, 得到 %T", result)
}
case "UDPPort":
if _, ok := result.(UDPPort); !ok {
t.Errorf("期望类型 UDPPort, 得到 %T", result)
}
case "UDPRangePort":
if _, ok := result.(UDPRangePort); !ok {
t.Errorf("期望类型 UDPRangePort, 得到 %T", result)
}
}
})
}
}

View File

@@ -9,7 +9,7 @@ import (
flvpb "m7s.live/v5/plugin/flv/pb"
)
func (p *FLVPlugin) List(ctx context.Context, req *flvpb.ReqRecordList) (resp *pb.ResponseList, err error) {
func (p *FLVPlugin) List(ctx context.Context, req *flvpb.ReqRecordList) (resp *pb.RecordResponseList, err error) {
globalReq := &pb.ReqRecordList{
StreamPath: req.StreamPath,
Range: req.Range,
@@ -17,7 +17,6 @@ func (p *FLVPlugin) List(ctx context.Context, req *flvpb.ReqRecordList) (resp *p
End: req.End,
PageNum: req.PageNum,
PageSize: req.PageSize,
Mode: req.Mode,
Type: "flv",
}
return p.Server.GetRecordList(ctx, globalReq)

View File

@@ -12,11 +12,9 @@ import (
"time"
m7s "m7s.live/v5"
codec "m7s.live/v5/pkg/codec"
"m7s.live/v5/pkg/util"
flv "m7s.live/v5/plugin/flv/pkg"
mp4 "m7s.live/v5/plugin/mp4/pkg"
"m7s.live/v5/plugin/mp4/pkg/box"
rtmp "m7s.live/v5/plugin/rtmp/pkg"
)
@@ -198,31 +196,51 @@ func (plugin *FLVPlugin) processMp4ToFlv(w http.ResponseWriter, r *http.Request,
})
}
// 创建DemuxerRange进行MP4解复用
demuxer := &mp4.DemuxerRange{
StartTime: params.startTime,
EndTime: params.endTime,
Streams: mp4Streams,
// 创建DemuxerConverterRange进行MP4解复用和转换
demuxer := &mp4.DemuxerConverterRange[*rtmp.RTMPAudio, *rtmp.RTMPVideo]{
DemuxerRange: mp4.DemuxerRange{
StartTime: params.startTime,
EndTime: params.endTime,
Streams: mp4Streams,
Logger: plugin.Logger.With("demuxer", "mp4_flv"),
},
}
// 创建FLV编码器状态
flvWriter := &flvMp4Writer{
FlvWriter: flv.NewFlvWriter(w),
plugin: plugin,
hasWritten: false,
}
// 设置回调函数
demuxer.OnVideoExtraData = flvWriter.onVideoExtraData
demuxer.OnAudioExtraData = flvWriter.onAudioExtraData
demuxer.OnVideoSample = flvWriter.onVideoSample
demuxer.OnAudioSample = flvWriter.onAudioSample
flvWriter := flv.NewFlvWriter(w)
hasWritten := false
ts := int64(0) // 初始化时间戳
tsOffset := int64(0) // 偏移时间戳
// 执行解复用和转换
err := demuxer.Demux(r.Context())
err := demuxer.Demux(r.Context(),
func(audio *rtmp.RTMPAudio) error {
if !hasWritten {
if err := flvWriter.WriteHeader(demuxer.AudioTrack != nil, demuxer.VideoTrack != nil); err != nil {
return err
}
}
// 计算调整后的时间戳
ts = int64(audio.Timestamp) + tsOffset
timestamp := uint32(ts)
// 写入音频数据帧
return flvWriter.WriteTag(flv.FLV_TAG_TYPE_AUDIO, timestamp, uint32(audio.Size), audio.Buffers...)
}, func(frame *rtmp.RTMPVideo) error {
if !hasWritten {
if err := flvWriter.WriteHeader(demuxer.AudioTrack != nil, demuxer.VideoTrack != nil); err != nil {
return err
}
}
// 计算调整后的时间戳
ts = int64(frame.Timestamp) + tsOffset
timestamp := uint32(ts)
// 写入视频数据帧
return flvWriter.WriteTag(flv.FLV_TAG_TYPE_VIDEO, timestamp, uint32(frame.Size), frame.Buffers...)
})
if err != nil {
plugin.Error("MP4 to FLV conversion failed", "err", err)
if !flvWriter.hasWritten {
if !hasWritten {
http.Error(w, "Conversion failed", http.StatusInternalServerError)
}
return
@@ -231,160 +249,6 @@ func (plugin *FLVPlugin) processMp4ToFlv(w http.ResponseWriter, r *http.Request,
plugin.Info("MP4 to FLV conversion completed")
}
type ExtraDataInfo struct {
CodecType box.MP4_CODEC_TYPE
Data []byte
}
// flvMp4Writer 处理MP4到FLV的转换写入
type flvMp4Writer struct {
*flv.FlvWriter
plugin *FLVPlugin
audioExtra, videoExtra *ExtraDataInfo
hasWritten bool // 是否已经写入FLV头
ts int64 // 当前时间戳
tsOffset int64 // 时间戳偏移量,用于多文件连续播放
}
// writeFlvHeader 写入FLV文件头
func (w *flvMp4Writer) writeFlvHeader() error {
if w.hasWritten {
return nil
}
// 使用 FlvWriter 的 WriteHeader 方法
err := w.FlvWriter.WriteHeader(w.audioExtra != nil, w.videoExtra != nil) // 有音频和视频
if err != nil {
return err
}
w.hasWritten = true
if w.videoExtra != nil {
w.onVideoExtraData(w.videoExtra.CodecType, w.videoExtra.Data)
}
if w.audioExtra != nil {
w.onAudioExtraData(w.audioExtra.CodecType, w.audioExtra.Data)
}
return nil
}
// onVideoExtraData 处理视频序列头
func (w *flvMp4Writer) onVideoExtraData(codecType box.MP4_CODEC_TYPE, data []byte) error {
if !w.hasWritten {
w.videoExtra = &ExtraDataInfo{
CodecType: codecType,
Data: data,
}
return nil
}
switch codecType {
case box.MP4_CODEC_H264:
return w.WriteTag(flv.FLV_TAG_TYPE_VIDEO, uint32(w.ts), uint32(len(data)+5), []byte{(1 << 4) | 7, 0, 0, 0, 0}, data)
case box.MP4_CODEC_H265:
return w.WriteTag(flv.FLV_TAG_TYPE_VIDEO, uint32(w.ts), uint32(len(data)+5), []byte{0b1001_0000 | rtmp.PacketTypeSequenceStart, codec.FourCC_H265[0], codec.FourCC_H265[1], codec.FourCC_H265[2], codec.FourCC_H265[3]}, data)
default:
return fmt.Errorf("unsupported video codec: %v", codecType)
}
}
// onAudioExtraData 处理音频序列头
func (w *flvMp4Writer) onAudioExtraData(codecType box.MP4_CODEC_TYPE, data []byte) error {
if !w.hasWritten {
w.audioExtra = &ExtraDataInfo{
CodecType: codecType,
Data: data,
}
return nil
}
var flvCodec byte
switch codecType {
case box.MP4_CODEC_AAC:
flvCodec = 10 // AAC
case box.MP4_CODEC_G711A:
flvCodec = 7 // G.711 A-law
case box.MP4_CODEC_G711U:
flvCodec = 8 // G.711 μ-law
default:
return fmt.Errorf("unsupported audio codec: %v", codecType)
}
// 构建FLV音频标签 - 序列头
if flvCodec == 10 { // AAC 需要两个字节头部
return w.WriteTag(flv.FLV_TAG_TYPE_AUDIO, uint32(w.ts), uint32(len(data)+2), []byte{(flvCodec << 4) | (3 << 2) | (1 << 1) | 1, 0}, data)
} else {
return w.WriteTag(flv.FLV_TAG_TYPE_AUDIO, uint32(w.ts), uint32(len(data)+1), []byte{(flvCodec << 4) | (3 << 2) | (1 << 1) | 1}, data)
}
}
// onVideoSample 处理视频样本
func (w *flvMp4Writer) onVideoSample(codecType box.MP4_CODEC_TYPE, sample box.Sample) error {
if !w.hasWritten {
if err := w.writeFlvHeader(); err != nil {
return err
}
}
// 计算调整后的时间戳
w.ts = int64(sample.Timestamp) + w.tsOffset
timestamp := uint32(w.ts)
switch codecType {
case box.MP4_CODEC_H264:
frameType := byte(2) // P帧
if sample.KeyFrame {
frameType = 1 // I帧
}
return w.WriteTag(flv.FLV_TAG_TYPE_VIDEO, timestamp, uint32(len(sample.Data)+5), []byte{(frameType << 4) | 7, 1, byte(sample.CTS >> 16), byte(sample.CTS >> 8), byte(sample.CTS)}, sample.Data)
case box.MP4_CODEC_H265:
// Enhanced RTMP格式用于H.265
var b0 byte = 0b1010_0000 // P帧标识
if sample.KeyFrame {
b0 = 0b1001_0000 // 关键帧标识
}
if sample.CTS == 0 {
// CTS为0时使用PacketTypeCodedFramesX5字节头
return w.WriteTag(flv.FLV_TAG_TYPE_VIDEO, timestamp, uint32(len(sample.Data)+5), []byte{b0 | rtmp.PacketTypeCodedFramesX, codec.FourCC_H265[0], codec.FourCC_H265[1], codec.FourCC_H265[2], codec.FourCC_H265[3]}, sample.Data)
} else {
// CTS不为0时使用PacketTypeCodedFrames8字节头包含CTS
return w.WriteTag(flv.FLV_TAG_TYPE_VIDEO, timestamp, uint32(len(sample.Data)+8), []byte{b0 | rtmp.PacketTypeCodedFrames, codec.FourCC_H265[0], codec.FourCC_H265[1], codec.FourCC_H265[2], codec.FourCC_H265[3], byte(sample.CTS >> 16), byte(sample.CTS >> 8), byte(sample.CTS)}, sample.Data)
}
default:
return fmt.Errorf("unsupported video codec: %v", codecType)
}
}
// onAudioSample 处理音频样本
func (w *flvMp4Writer) onAudioSample(codec box.MP4_CODEC_TYPE, sample box.Sample) error {
if !w.hasWritten {
if err := w.writeFlvHeader(); err != nil {
return err
}
}
// 计算调整后的时间戳
w.ts = int64(sample.Timestamp) + w.tsOffset
timestamp := uint32(w.ts)
var flvCodec byte
switch codec {
case box.MP4_CODEC_AAC:
flvCodec = 10 // AAC
case box.MP4_CODEC_G711A:
flvCodec = 7 // G.711 A-law
case box.MP4_CODEC_G711U:
flvCodec = 8 // G.711 μ-law
default:
return fmt.Errorf("unsupported audio codec: %v", codec)
}
// 构建FLV音频标签 - 音频帧
if flvCodec == 10 { // AAC 需要两个字节头部
return w.WriteTag(flv.FLV_TAG_TYPE_AUDIO, timestamp, uint32(len(sample.Data)+2), []byte{(flvCodec << 4) | (3 << 2) | (1 << 1) | 1, 1}, sample.Data)
} else {
// 对于非AAC编解码器如G.711),只需要一个字节头部
return w.WriteTag(flv.FLV_TAG_TYPE_AUDIO, timestamp, uint32(len(sample.Data)+1), []byte{(flvCodec << 4) | (3 << 2) | (1 << 1) | 1}, sample.Data)
}
}
// processFlvFiles 处理原生FLV文件
func (plugin *FLVPlugin) processFlvFiles(w http.ResponseWriter, r *http.Request, fileInfoList []*fileInfo, params *requestParams) {
plugin.Info("Processing FLV files", "count", len(fileInfoList))

View File

@@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.28.1
// protoc v3.19.1
// protoc-gen-go v1.36.6
// protoc v5.29.3
// source: flv.proto
package pb
@@ -16,6 +16,7 @@ import (
pb "m7s.live/v5/pb"
reflect "reflect"
sync "sync"
unsafe "unsafe"
)
const (
@@ -26,26 +27,23 @@ const (
)
type ReqRecordList struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
state protoimpl.MessageState `protogen:"open.v1"`
StreamPath string `protobuf:"bytes,1,opt,name=streamPath,proto3" json:"streamPath,omitempty"`
Range string `protobuf:"bytes,2,opt,name=range,proto3" json:"range,omitempty"`
Start string `protobuf:"bytes,3,opt,name=start,proto3" json:"start,omitempty"`
End string `protobuf:"bytes,4,opt,name=end,proto3" json:"end,omitempty"`
PageNum uint32 `protobuf:"varint,5,opt,name=pageNum,proto3" json:"pageNum,omitempty"`
PageSize uint32 `protobuf:"varint,6,opt,name=pageSize,proto3" json:"pageSize,omitempty"`
Mode string `protobuf:"bytes,7,opt,name=mode,proto3" json:"mode,omitempty"`
unknownFields protoimpl.UnknownFields
StreamPath string `protobuf:"bytes,1,opt,name=streamPath,proto3" json:"streamPath,omitempty"`
Range string `protobuf:"bytes,2,opt,name=range,proto3" json:"range,omitempty"`
Start string `protobuf:"bytes,3,opt,name=start,proto3" json:"start,omitempty"`
End string `protobuf:"bytes,4,opt,name=end,proto3" json:"end,omitempty"`
PageNum uint32 `protobuf:"varint,5,opt,name=pageNum,proto3" json:"pageNum,omitempty"`
PageSize uint32 `protobuf:"varint,6,opt,name=pageSize,proto3" json:"pageSize,omitempty"`
Mode string `protobuf:"bytes,7,opt,name=mode,proto3" json:"mode,omitempty"`
sizeCache protoimpl.SizeCache
}
func (x *ReqRecordList) Reset() {
*x = ReqRecordList{}
if protoimpl.UnsafeEnabled {
mi := &file_flv_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_flv_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *ReqRecordList) String() string {
@@ -56,7 +54,7 @@ func (*ReqRecordList) ProtoMessage() {}
func (x *ReqRecordList) ProtoReflect() protoreflect.Message {
mi := &file_flv_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -121,24 +119,21 @@ func (x *ReqRecordList) GetMode() string {
}
type ReqRecordDelete struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
state protoimpl.MessageState `protogen:"open.v1"`
StreamPath string `protobuf:"bytes,1,opt,name=streamPath,proto3" json:"streamPath,omitempty"`
Ids []uint32 `protobuf:"varint,2,rep,packed,name=ids,proto3" json:"ids,omitempty"`
StartTime string `protobuf:"bytes,3,opt,name=startTime,proto3" json:"startTime,omitempty"`
EndTime string `protobuf:"bytes,4,opt,name=endTime,proto3" json:"endTime,omitempty"`
Range string `protobuf:"bytes,5,opt,name=range,proto3" json:"range,omitempty"`
unknownFields protoimpl.UnknownFields
StreamPath string `protobuf:"bytes,1,opt,name=streamPath,proto3" json:"streamPath,omitempty"`
Ids []uint32 `protobuf:"varint,2,rep,packed,name=ids,proto3" json:"ids,omitempty"`
StartTime string `protobuf:"bytes,3,opt,name=startTime,proto3" json:"startTime,omitempty"`
EndTime string `protobuf:"bytes,4,opt,name=endTime,proto3" json:"endTime,omitempty"`
Range string `protobuf:"bytes,5,opt,name=range,proto3" json:"range,omitempty"`
sizeCache protoimpl.SizeCache
}
func (x *ReqRecordDelete) Reset() {
*x = ReqRecordDelete{}
if protoimpl.UnsafeEnabled {
mi := &file_flv_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
mi := &file_flv_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
func (x *ReqRecordDelete) String() string {
@@ -149,7 +144,7 @@ func (*ReqRecordDelete) ProtoMessage() {}
func (x *ReqRecordDelete) ProtoReflect() protoreflect.Message {
mi := &file_flv_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
if x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -201,86 +196,58 @@ func (x *ReqRecordDelete) GetRange() string {
var File_flv_proto protoreflect.FileDescriptor
var file_flv_proto_rawDesc = []byte{
0x0a, 0x09, 0x66, 0x6c, 0x76, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x66, 0x6c, 0x76,
0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e,
0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f,
0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d,
0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67, 0x6f,
0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75,
0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0c, 0x67, 0x6c,
0x6f, 0x62, 0x61, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xb7, 0x01, 0x0a, 0x0d, 0x52,
0x65, 0x71, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1e, 0x0a, 0x0a,
0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09,
0x52, 0x0a, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x05,
0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x61, 0x6e,
0x67, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28,
0x09, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18,
0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61,
0x67, 0x65, 0x4e, 0x75, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x07, 0x70, 0x61, 0x67,
0x65, 0x4e, 0x75, 0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65,
0x18, 0x06, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65,
0x12, 0x12, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
0x6d, 0x6f, 0x64, 0x65, 0x22, 0x91, 0x01, 0x0a, 0x0f, 0x52, 0x65, 0x71, 0x52, 0x65, 0x63, 0x6f,
0x72, 0x64, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x72, 0x65,
0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74,
0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x12, 0x10, 0x0a, 0x03, 0x69, 0x64, 0x73, 0x18,
0x02, 0x20, 0x03, 0x28, 0x0d, 0x52, 0x03, 0x69, 0x64, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74,
0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73,
0x74, 0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x54,
0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69,
0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28,
0x09, 0x52, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x32, 0x98, 0x02, 0x0a, 0x03, 0x61, 0x70, 0x69,
0x12, 0x57, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x12, 0x2e, 0x66, 0x6c, 0x76, 0x2e, 0x52,
0x65, 0x71, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x1a, 0x14, 0x2e, 0x67,
0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x4c, 0x69,
0x73, 0x74, 0x22, 0x25, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1f, 0x12, 0x1d, 0x2f, 0x66, 0x6c, 0x76,
0x2f, 0x61, 0x70, 0x69, 0x2f, 0x6c, 0x69, 0x73, 0x74, 0x2f, 0x7b, 0x73, 0x74, 0x72, 0x65, 0x61,
0x6d, 0x50, 0x61, 0x74, 0x68, 0x3d, 0x2a, 0x2a, 0x7d, 0x12, 0x54, 0x0a, 0x07, 0x43, 0x61, 0x74,
0x61, 0x6c, 0x6f, 0x67, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x17, 0x2e, 0x67,
0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x43, 0x61,
0x74, 0x61, 0x6c, 0x6f, 0x67, 0x22, 0x18, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x12, 0x12, 0x10, 0x2f,
0x66, 0x6c, 0x76, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x63, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x12,
0x62, 0x0a, 0x06, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x2e, 0x66, 0x6c, 0x76, 0x2e,
0x52, 0x65, 0x71, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x1a,
0x16, 0x2e, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x22, 0x2a, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x24, 0x22,
0x1f, 0x2f, 0x66, 0x6c, 0x76, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x64, 0x65, 0x6c, 0x65, 0x74, 0x65,
0x2f, 0x7b, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x3d, 0x2a, 0x2a, 0x7d,
0x3a, 0x01, 0x2a, 0x42, 0x1b, 0x5a, 0x19, 0x6d, 0x37, 0x73, 0x2e, 0x6c, 0x69, 0x76, 0x65, 0x2f,
0x76, 0x35, 0x2f, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2f, 0x66, 0x6c, 0x76, 0x2f, 0x70, 0x62,
0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
const file_flv_proto_rawDesc = "" +
"\n" +
"\tflv.proto\x12\x03flv\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\fglobal.proto\"\xb7\x01\n" +
"\rReqRecordList\x12\x1e\n" +
"\n" +
"streamPath\x18\x01 \x01(\tR\n" +
"streamPath\x12\x14\n" +
"\x05range\x18\x02 \x01(\tR\x05range\x12\x14\n" +
"\x05start\x18\x03 \x01(\tR\x05start\x12\x10\n" +
"\x03end\x18\x04 \x01(\tR\x03end\x12\x18\n" +
"\apageNum\x18\x05 \x01(\rR\apageNum\x12\x1a\n" +
"\bpageSize\x18\x06 \x01(\rR\bpageSize\x12\x12\n" +
"\x04mode\x18\a \x01(\tR\x04mode\"\x91\x01\n" +
"\x0fReqRecordDelete\x12\x1e\n" +
"\n" +
"streamPath\x18\x01 \x01(\tR\n" +
"streamPath\x12\x10\n" +
"\x03ids\x18\x02 \x03(\rR\x03ids\x12\x1c\n" +
"\tstartTime\x18\x03 \x01(\tR\tstartTime\x12\x18\n" +
"\aendTime\x18\x04 \x01(\tR\aendTime\x12\x14\n" +
"\x05range\x18\x05 \x01(\tR\x05range2\x9e\x02\n" +
"\x03api\x12]\n" +
"\x04List\x12\x12.flv.ReqRecordList\x1a\x1a.global.RecordResponseList\"%\x82\xd3\xe4\x93\x02\x1f\x12\x1d/flv/api/list/{streamPath=**}\x12T\n" +
"\aCatalog\x12\x16.google.protobuf.Empty\x1a\x17.global.ResponseCatalog\"\x18\x82\xd3\xe4\x93\x02\x12\x12\x10/flv/api/catalog\x12b\n" +
"\x06Delete\x12\x14.flv.ReqRecordDelete\x1a\x16.global.ResponseDelete\"*\x82\xd3\xe4\x93\x02$:\x01*\"\x1f/flv/api/delete/{streamPath=**}B\x1bZ\x19m7s.live/v5/plugin/flv/pbb\x06proto3"
var (
file_flv_proto_rawDescOnce sync.Once
file_flv_proto_rawDescData = file_flv_proto_rawDesc
file_flv_proto_rawDescData []byte
)
func file_flv_proto_rawDescGZIP() []byte {
file_flv_proto_rawDescOnce.Do(func() {
file_flv_proto_rawDescData = protoimpl.X.CompressGZIP(file_flv_proto_rawDescData)
file_flv_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_flv_proto_rawDesc), len(file_flv_proto_rawDesc)))
})
return file_flv_proto_rawDescData
}
var file_flv_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_flv_proto_goTypes = []interface{}{
(*ReqRecordList)(nil), // 0: flv.ReqRecordList
(*ReqRecordDelete)(nil), // 1: flv.ReqRecordDelete
(*emptypb.Empty)(nil), // 2: google.protobuf.Empty
(*pb.ResponseList)(nil), // 3: global.ResponseList
(*pb.ResponseCatalog)(nil), // 4: global.ResponseCatalog
(*pb.ResponseDelete)(nil), // 5: global.ResponseDelete
var file_flv_proto_goTypes = []any{
(*ReqRecordList)(nil), // 0: flv.ReqRecordList
(*ReqRecordDelete)(nil), // 1: flv.ReqRecordDelete
(*emptypb.Empty)(nil), // 2: google.protobuf.Empty
(*pb.RecordResponseList)(nil), // 3: global.RecordResponseList
(*pb.ResponseCatalog)(nil), // 4: global.ResponseCatalog
(*pb.ResponseDelete)(nil), // 5: global.ResponseDelete
}
var file_flv_proto_depIdxs = []int32{
0, // 0: flv.api.List:input_type -> flv.ReqRecordList
2, // 1: flv.api.Catalog:input_type -> google.protobuf.Empty
1, // 2: flv.api.Delete:input_type -> flv.ReqRecordDelete
3, // 3: flv.api.List:output_type -> global.ResponseList
3, // 3: flv.api.List:output_type -> global.RecordResponseList
4, // 4: flv.api.Catalog:output_type -> global.ResponseCatalog
5, // 5: flv.api.Delete:output_type -> global.ResponseDelete
3, // [3:6] is the sub-list for method output_type
@@ -295,37 +262,11 @@ func file_flv_proto_init() {
if File_flv_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_flv_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ReqRecordList); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_flv_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ReqRecordDelete); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_flv_proto_rawDesc,
RawDescriptor: unsafe.Slice(unsafe.StringData(file_flv_proto_rawDesc), len(file_flv_proto_rawDesc)),
NumEnums: 0,
NumMessages: 2,
NumExtensions: 0,
@@ -336,7 +277,6 @@ func file_flv_proto_init() {
MessageInfos: file_flv_proto_msgTypes,
}.Build()
File_flv_proto = out.File
file_flv_proto_rawDesc = nil
file_flv_proto_goTypes = nil
file_flv_proto_depIdxs = nil
}

View File

@@ -8,7 +8,7 @@ package flv;
option go_package="m7s.live/v5/plugin/flv/pb";
service api {
rpc List (ReqRecordList) returns (global.ResponseList) {
rpc List (ReqRecordList) returns (global.RecordResponseList) {
option (google.api.http) = {
get: "/flv/api/list/{streamPath=**}"
};

View File

@@ -1,7 +1,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.19.1
// - protoc-gen-go-grpc v1.5.1
// - protoc v5.29.3
// source: flv.proto
package pb
@@ -17,14 +17,20 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// Requires gRPC-Go v1.64.0 or later.
const _ = grpc.SupportPackageIsVersion9
const (
Api_List_FullMethodName = "/flv.api/List"
Api_Catalog_FullMethodName = "/flv.api/Catalog"
Api_Delete_FullMethodName = "/flv.api/Delete"
)
// ApiClient is the client API for Api service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type ApiClient interface {
List(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*pb.ResponseList, error)
List(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*pb.RecordResponseList, error)
Catalog(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*pb.ResponseCatalog, error)
Delete(ctx context.Context, in *ReqRecordDelete, opts ...grpc.CallOption) (*pb.ResponseDelete, error)
}
@@ -37,9 +43,10 @@ func NewApiClient(cc grpc.ClientConnInterface) ApiClient {
return &apiClient{cc}
}
func (c *apiClient) List(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*pb.ResponseList, error) {
out := new(pb.ResponseList)
err := c.cc.Invoke(ctx, "/flv.api/List", in, out, opts...)
func (c *apiClient) List(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*pb.RecordResponseList, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(pb.RecordResponseList)
err := c.cc.Invoke(ctx, Api_List_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -47,8 +54,9 @@ func (c *apiClient) List(ctx context.Context, in *ReqRecordList, opts ...grpc.Ca
}
func (c *apiClient) Catalog(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*pb.ResponseCatalog, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(pb.ResponseCatalog)
err := c.cc.Invoke(ctx, "/flv.api/Catalog", in, out, opts...)
err := c.cc.Invoke(ctx, Api_Catalog_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -56,8 +64,9 @@ func (c *apiClient) Catalog(ctx context.Context, in *emptypb.Empty, opts ...grpc
}
func (c *apiClient) Delete(ctx context.Context, in *ReqRecordDelete, opts ...grpc.CallOption) (*pb.ResponseDelete, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(pb.ResponseDelete)
err := c.cc.Invoke(ctx, "/flv.api/Delete", in, out, opts...)
err := c.cc.Invoke(ctx, Api_Delete_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
@@ -66,19 +75,22 @@ func (c *apiClient) Delete(ctx context.Context, in *ReqRecordDelete, opts ...grp
// ApiServer is the server API for Api service.
// All implementations must embed UnimplementedApiServer
// for forward compatibility
// for forward compatibility.
type ApiServer interface {
List(context.Context, *ReqRecordList) (*pb.ResponseList, error)
List(context.Context, *ReqRecordList) (*pb.RecordResponseList, error)
Catalog(context.Context, *emptypb.Empty) (*pb.ResponseCatalog, error)
Delete(context.Context, *ReqRecordDelete) (*pb.ResponseDelete, error)
mustEmbedUnimplementedApiServer()
}
// UnimplementedApiServer must be embedded to have forward compatible implementations.
type UnimplementedApiServer struct {
}
// UnimplementedApiServer must be embedded to have
// forward compatible implementations.
//
// NOTE: this should be embedded by value instead of pointer to avoid a nil
// pointer dereference when methods are called.
type UnimplementedApiServer struct{}
func (UnimplementedApiServer) List(context.Context, *ReqRecordList) (*pb.ResponseList, error) {
func (UnimplementedApiServer) List(context.Context, *ReqRecordList) (*pb.RecordResponseList, error) {
return nil, status.Errorf(codes.Unimplemented, "method List not implemented")
}
func (UnimplementedApiServer) Catalog(context.Context, *emptypb.Empty) (*pb.ResponseCatalog, error) {
@@ -88,6 +100,7 @@ func (UnimplementedApiServer) Delete(context.Context, *ReqRecordDelete) (*pb.Res
return nil, status.Errorf(codes.Unimplemented, "method Delete not implemented")
}
func (UnimplementedApiServer) mustEmbedUnimplementedApiServer() {}
func (UnimplementedApiServer) testEmbeddedByValue() {}
// UnsafeApiServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to ApiServer will
@@ -97,6 +110,13 @@ type UnsafeApiServer interface {
}
func RegisterApiServer(s grpc.ServiceRegistrar, srv ApiServer) {
// If the following call pancis, it indicates UnimplementedApiServer was
// embedded by pointer and is nil. This will cause panics if an
// unimplemented method is ever invoked, so we test this at initialization
// time to prevent it from happening at runtime later due to I/O.
if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
t.testEmbeddedByValue()
}
s.RegisterService(&Api_ServiceDesc, srv)
}
@@ -110,7 +130,7 @@ func _Api_List_Handler(srv interface{}, ctx context.Context, dec func(interface{
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/flv.api/List",
FullMethod: Api_List_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApiServer).List(ctx, req.(*ReqRecordList))
@@ -128,7 +148,7 @@ func _Api_Catalog_Handler(srv interface{}, ctx context.Context, dec func(interfa
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/flv.api/Catalog",
FullMethod: Api_Catalog_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApiServer).Catalog(ctx, req.(*emptypb.Empty))
@@ -146,7 +166,7 @@ func _Api_Delete_Handler(srv interface{}, ctx context.Context, dec func(interfac
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/flv.api/Delete",
FullMethod: Api_Delete_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(ApiServer).Delete(ctx, req.(*ReqRecordDelete))

View File

@@ -153,6 +153,7 @@ var CustomFileName = func(job *m7s.RecordJob) string {
}
func (r *Recorder) createStream(start time.Time) (err error) {
r.RecordJob.RecConf.Type = "flv"
return r.CreateStream(start, CustomFileName)
}

View File

@@ -1873,8 +1873,8 @@ func (gb *GB28181Plugin) GetGroupChannels(ctx context.Context, req *pb.GetGroupC
Select(`
IFNULL(gc.id, 0) AS id,
dc.channel_id,
dc.device_id,
dc.name AS channel_name,
d.device_id AS device_id,
d.name AS device_name,
dc.status AS status,
CASE
@@ -1883,11 +1883,11 @@ func (gb *GB28181Plugin) GetGroupChannels(ctx context.Context, req *pb.GetGroupC
END AS in_group
`).
Joins("LEFT JOIN "+deviceTable+" AS d ON dc.device_id = d.device_id").
Joins("LEFT JOIN "+groupsChannelTable+" AS gc ON dc.channel_id = gc.channel_id AND gc.group_id = ?", req.GroupId)
Joins("LEFT JOIN "+groupsChannelTable+" AS gc ON dc.channel_id = gc.channel_id AND dc.device_id = gc.device_id AND gc.group_id = ?", req.GroupId)
// 如果有设备ID过滤条件
if req.DeviceId != "" {
baseQuery = baseQuery.Where("d.device_id = ?", req.DeviceId)
baseQuery = baseQuery.Where("dc.device_id = ?", req.DeviceId)
}
// 统计符合条件的通道总数
@@ -1903,7 +1903,7 @@ func (gb *GB28181Plugin) GetGroupChannels(ctx context.Context, req *pb.GetGroupC
query := baseQuery
// 添加排序
query = query.Order("channel_id ASC")
query = query.Order("dc.device_id ASC, dc.channel_id ASC")
// 如果指定了分页参数,则应用分页
if req.Page > 0 && req.Count > 0 {
@@ -1922,12 +1922,14 @@ func (gb *GB28181Plugin) GetGroupChannels(ctx context.Context, req *pb.GetGroupC
var pbGroupChannels []*pb.GroupChannel
for _, result := range results {
channelInfo := &pb.GroupChannel{
Id: int32(result.ID),
GroupId: req.GroupId,
ChannelId: result.ChannelID,
DeviceId: result.DeviceID,
ChannelName: result.ChannelName,
DeviceName: result.DeviceName,
Status: result.Status,
InGroup: result.InGroup, // 设置inGroup字段
InGroup: result.InGroup,
}
// 从内存中获取设备信息以获取传输协议
@@ -1935,13 +1937,6 @@ func (gb *GB28181Plugin) GetGroupChannels(ctx context.Context, req *pb.GetGroupC
channelInfo.StreamMode = device.StreamMode
}
if result.InGroup {
channelInfo.Id = int32(result.ID)
channelInfo.GroupId = int32(req.GroupId)
} else {
channelInfo.Id = 0
}
pbGroupChannels = append(pbGroupChannels, channelInfo)
}
@@ -2082,19 +2077,19 @@ func (gb *GB28181Plugin) getGroupChannels(groupId int32) ([]*pb.GroupChannel, er
InGroup bool `gorm:"column:in_group"`
}
// 构建查询
// 构建优化后的查询
query := gb.DB.Table(groupsChannelTable+" AS gc").
Select(`
gc.id AS id,
gc.channel_id AS channel_id,
gc.device_id AS device_id,
dc.name AS channel_name,
d.name AS device_name,
dc.status AS status,
ch.name AS channel_name,
dev.name AS device_name,
ch.status AS status,
true AS in_group
`).
Joins("LEFT JOIN "+deviceChannelTable+" AS dc ON gc.channel_id = dc.channel_id").
Joins("LEFT JOIN "+deviceTable+" AS d ON gc.device_id = d.device_id").
Joins("LEFT JOIN "+deviceChannelTable+" AS ch ON gc.device_id = ch.device_id AND gc.channel_id = ch.channel_id").
Joins("LEFT JOIN "+deviceTable+" AS dev ON ch.device_id = dev.device_id").
Where("gc.group_id = ?", groupId)
var results []Result
@@ -2107,7 +2102,7 @@ func (gb *GB28181Plugin) getGroupChannels(groupId int32) ([]*pb.GroupChannel, er
for _, result := range results {
channelInfo := &pb.GroupChannel{
Id: int32(result.ID),
GroupId: groupId,
GroupId: groupId, // 使用函数参数 groupId
ChannelId: result.ChannelID,
DeviceId: result.DeviceID,
ChannelName: result.ChannelName,
@@ -2868,7 +2863,7 @@ func (gb *GB28181Plugin) RemoveDevice(ctx context.Context, req *pb.RemoveDeviceR
}
// 删除设备关联的通道
if err := tx.Delete(&gb28181.DeviceChannel{DeviceID: req.Id}).Error; err != nil {
if err := tx.Where("device_id = ?", req.Id).Delete(&gb28181.DeviceChannel{}).Error; err != nil {
tx.Rollback()
resp.Code = 500
resp.Message = "删除设备通道失败"

View File

@@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"math/rand"
"net"
"net/url"
"strconv"
"strings"
@@ -99,15 +100,20 @@ func (d *Dialog) Start() (err error) {
d.gb.dialogs.Set(d)
//defer d.gb.dialogs.Remove(d)
if d.gb.MediaPort.Valid() {
select {
case d.MediaPort = <-d.gb.tcpPorts:
default:
return fmt.Errorf("no available tcp port")
}
if d.gb.tcpPort > 0 {
d.MediaPort = d.gb.tcpPort
} else {
d.MediaPort = d.gb.MediaPort[0]
if d.gb.MediaPort.Valid() {
select {
case d.MediaPort = <-d.gb.tcpPorts:
default:
return fmt.Errorf("no available tcp port")
}
} else {
d.MediaPort = d.gb.MediaPort[0]
}
}
ssrc := d.CreateSSRC(d.gb.Serial)
d.Info("MediaIp is ", device.MediaIp)
@@ -266,7 +272,7 @@ func (d *Dialog) Run() (err error) {
if _ssrc, err := strconv.ParseInt(ls[1], 10, 0); err == nil {
d.SSRC = uint32(_ssrc)
} else {
d.gb.Error("read invite response y ", "err", err)
return errors.New("read invite respose y error" + err.Error())
}
}
case "c":
@@ -299,6 +305,18 @@ func (d *Dialog) Run() (err error) {
if d.StreamMode == "TCP-ACTIVE" {
pub.Receiver.ListenAddr = fmt.Sprintf("%s:%d", d.targetIP, d.targetPort)
} else {
if d.gb.tcpPort > 0 {
d.Info("into single port mode,use gb.tcpPort", d.gb.tcpPort)
if d.gb.netListener != nil {
d.Info("use gb.netListener", d.gb.netListener.Addr())
pub.Receiver.Listener = d.gb.netListener
} else {
d.Info("listen tcp4", fmt.Sprintf(":%d", d.gb.tcpPort))
pub.Receiver.Listener, _ = net.Listen("tcp4", fmt.Sprintf(":%d", d.gb.tcpPort))
d.gb.netListener = pub.Receiver.Listener
}
pub.Receiver.SSRC = d.SSRC
}
pub.Receiver.ListenAddr = fmt.Sprintf(":%d", d.MediaPort)
}
pub.Receiver.StreamMode = d.StreamMode
@@ -316,7 +334,11 @@ func (d *Dialog) GetKey() uint32 {
}
func (d *Dialog) Dispose() {
d.gb.tcpPorts <- d.MediaPort
if d.gb.tcpPort == 0 {
// 如果没有设置tcp端口则将MediaPort设置为0表示不再使用
d.gb.tcpPorts <- d.MediaPort
}
d.Info("dialog dispose", "ssrc", d.SSRC, "mediaPort", d.MediaPort, "streamMode", d.StreamMode, "deviceId", d.Channel.DeviceID, "channelId", d.Channel.ChannelID)
if d.session != nil {
err := d.session.Bye(d)
if err != nil {

View File

@@ -3,9 +3,9 @@ package plugin_gb28181pro
import (
"errors"
"fmt"
"net"
"net/http"
"os"
"regexp"
"slices"
"strconv"
"strings"
@@ -41,7 +41,7 @@ type GB28181Plugin struct {
pb.UnimplementedApiServer
m7s.Plugin
Serial string `default:"34020000002000000001" desc:"sip 服务 id"` //sip 服务器 id, 默认 34020000002000000001
Realm string `default:"3402000000" desc:"sip 服务域"` //sip 服务器域,默认 3402000000
Realm string `default:"3402000000" desc:"sip 服务域"` //sip 服务器域,默认 3402000000
Password string
Sip SipConfig
MediaPort util.Range[uint16] `default:"10001-20000" desc:"媒体端口范围"` //媒体端口范围
@@ -55,12 +55,14 @@ type GB28181Plugin struct {
forwardDialogs util.Collection[uint32, *ForwardDialog]
platforms util.Collection[string, *Platform]
tcpPorts chan uint16
tcpPort uint16
sipPorts []int
SipIP string `desc:"sip发送命令的IP一般是本地IP多网卡时需要配置正确的IP"`
MediaIP string `desc:"流媒体IP用于接收流"`
deviceManager task.Manager[string, *DeviceRegisterQueueTask]
Platforms []*gb28181.PlatformModel
channels util.Collection[string, *gb28181.DeviceChannel]
netListener net.Listener
}
var _ = m7s.InstallPlugin[GB28181Plugin](m7s.PluginMeta{
@@ -75,6 +77,18 @@ var _ = m7s.InstallPlugin[GB28181Plugin](m7s.PluginMeta{
NewPullProxy: NewPullProxy,
})
func (gb *GB28181Plugin) Dispose() {
if gb.netListener != nil {
gb.Info("gb28181 plugin dispose")
err := gb.netListener.Close()
if err != nil {
gb.Error("Close netListener error", "error", err)
} else {
gb.Info("netListener closed")
}
}
}
func init() {
sip.SIPDebug = true
}
@@ -153,8 +167,16 @@ func (gb *GB28181Plugin) OnInit() (err error) {
if gb.MediaPort.Valid() {
gb.SetDescription("tcp", fmt.Sprintf("%d-%d", gb.MediaPort[0], gb.MediaPort[1]))
gb.tcpPorts = make(chan uint16, gb.MediaPort.Size())
for i := range gb.MediaPort.Size() {
gb.tcpPorts <- gb.MediaPort[0] + i
if gb.MediaPort.Size() == 0 {
gb.tcpPort = gb.MediaPort[0]
gb.netListener, _ = net.Listen("tcp4", fmt.Sprintf(":%d", gb.tcpPort))
} else if gb.MediaPort.Size() == 1 {
gb.tcpPort = gb.MediaPort[0] + 1
gb.netListener, _ = net.Listen("tcp4", fmt.Sprintf(":%d", gb.tcpPort))
} else {
for i := range gb.MediaPort.Size() {
gb.tcpPorts <- gb.MediaPort[0] + i
}
}
} else {
gb.SetDescription("tcp", fmt.Sprintf("%d", gb.MediaPort[0]))
@@ -438,22 +460,9 @@ func (gb *GB28181Plugin) OnRegister(req *sip.Request, tx sip.ServerTransaction)
from := req.From()
if from == nil || from.Address.User == "" {
gb.Error("OnRegister", "error", "no user")
response := sip.NewResponseFromRequest(req, sip.StatusBadRequest, "Invalid sip from format", nil)
if err := tx.Respond(response); err != nil {
gb.Error("respond BadRequest", "error", err.Error())
}
return
}
deviceId := from.Address.User
// 验证设备ID是否符合GB28181规范(20位数字)
if match, _ := regexp.MatchString(`^\d{20}$`, deviceId); !match {
gb.Error("OnRegister", "error", "invalid device id format, must be 20 digits", "deviceId", deviceId)
response := sip.NewResponseFromRequest(req, sip.StatusBadRequest, "Invalid device ID format", nil)
if err := tx.Respond(response); err != nil {
gb.Error("respond BadRequest", "error", err.Error())
}
return
}
registerHandlerTask := registerHandlerTask{
gb: gb,
req: req,

View File

@@ -44,8 +44,9 @@ type Receiver struct {
psAudio PSAudio
RTPReader *rtp2.TCP
ListenAddr string
listener net.Listener
Listener net.Listener
StreamMode string // 数据流传输模式UDP:udp传输/TCP-ACTIVEtcp主动模式/TCP-PASSIVEtcp被动模式
SSRC uint32 // RTP SSRC
}
func NewPSPublisher(puber *m7s.Publisher) *PSPublisher {
@@ -147,9 +148,19 @@ func (p *Receiver) ReadRTP(rtp util.Buffer) (err error) {
p.Error("unmarshal error", "err", err)
return
}
// 如果设置了SSRC过滤只处理匹配的SSRC
if p.SSRC != 0 && p.SSRC != p.Packet.SSRC {
p.Info("into single port mode, ssrc mismatch", "expected", p.SSRC, "actual", p.Packet.SSRC)
if p.TraceEnabled() {
p.Trace("rtp ssrc mismatch, skip", "expected", p.SSRC, "actual", p.Packet.SSRC)
}
return nil
}
if lastSeq == 0 || p.SequenceNumber == lastSeq+1 {
if p.TraceEnabled() {
p.Trace("rtp", "len", rtp.Len(), "seq", p.SequenceNumber, "payloadType", p.PayloadType, "ssrc", p.SSRC)
p.Trace("rtp", "len", rtp.Len(), "seq", p.SequenceNumber, "payloadType", p.PayloadType, "ssrc", p.Packet.SSRC)
}
copyData := make([]byte, len(p.Payload))
copy(copyData, p.Payload)
@@ -172,18 +183,24 @@ func (p *Receiver) Start() (err error) {
return nil
}
// TCP被动模式
p.listener, err = net.Listen("tcp4", p.ListenAddr)
if err != nil {
p.Error("start listen", "err", err)
return errors.New("start listen,err" + err.Error())
if p.Listener == nil {
p.Info("start new listener", "addr", p.ListenAddr)
p.Listener, err = net.Listen("tcp4", p.ListenAddr)
if err != nil {
p.Error("start listen", "err", err)
return errors.New("start listen,err" + err.Error())
}
}
p.Info("start listen", "addr", p.ListenAddr)
return
}
func (p *Receiver) Dispose() {
if p.listener != nil {
p.listener.Close()
if p.SSRC == 0 {
p.Info("into multiport mode ,close listener ", p.SSRC)
if p.Listener != nil {
p.Listener.Close()
}
}
if p.RTPReader != nil {
p.RTPReader.Close()
@@ -216,7 +233,7 @@ func (p *Receiver) Go() error {
}
// TCP被动模式
p.Info("start accept")
conn, err := p.listener.Accept()
conn, err := p.Listener.Accept()
if err != nil {
p.Error("accept", "err", err)
return err

422
plugin/hls/download.go Normal file
View File

@@ -0,0 +1,422 @@
package plugin_hls
import (
"bufio"
"fmt"
"io"
"net/http"
"os"
"strconv"
"strings"
"time"
m7s "m7s.live/v5"
"m7s.live/v5/pkg"
"m7s.live/v5/pkg/codec"
"m7s.live/v5/pkg/util"
hls "m7s.live/v5/plugin/hls/pkg"
mpegts "m7s.live/v5/plugin/hls/pkg/ts"
mp4 "m7s.live/v5/plugin/mp4/pkg"
)
// requestParams 包含请求解析后的参数
type requestParams struct {
streamPath string
startTime time.Time
endTime time.Time
timeRange time.Duration
}
// fileInfo 包含文件信息
type fileInfo struct {
filePath string
startTime time.Time
endTime time.Time
startOffsetTime time.Duration
recordType string // "ts", "mp4", "fmp4"
}
// parseRequestParams 解析请求参数
func (plugin *HLSPlugin) parseRequestParams(r *http.Request) (*requestParams, error) {
// 从URL路径中提取流路径去除前缀 "/download/" 和后缀 ".ts"
streamPath := strings.TrimSuffix(strings.TrimPrefix(r.URL.Path, "/download/"), ".ts")
// 解析URL查询参数中的时间范围start和end参数
startTime, endTime, err := util.TimeRangeQueryParse(r.URL.Query())
if err != nil {
return nil, err
}
return &requestParams{
streamPath: streamPath,
startTime: startTime,
endTime: endTime,
timeRange: endTime.Sub(startTime),
}, nil
}
// queryRecordStreams 从数据库查询录像记录
func (plugin *HLSPlugin) queryRecordStreams(params *requestParams) ([]m7s.RecordStream, error) {
// 检查数据库是否可用
if plugin.DB == nil {
return nil, fmt.Errorf("database not available")
}
var recordStreams []m7s.RecordStream
// 首先查询HLS记录 (ts)
query := plugin.DB.Model(&m7s.RecordStream{}).Where("stream_path = ? AND type = ?", params.streamPath, "hls")
// 添加时间范围查询条件
if !params.startTime.IsZero() && !params.endTime.IsZero() {
query = query.Where("(start_time <= ? AND end_time >= ?) OR (start_time >= ? AND start_time <= ?)",
params.endTime, params.startTime, params.startTime, params.endTime)
}
err := query.Order("start_time ASC").Find(&recordStreams).Error
if err != nil {
return nil, err
}
// 如果没有找到HLS记录尝试查询MP4记录
if len(recordStreams) == 0 {
query = plugin.DB.Model(&m7s.RecordStream{}).Where("stream_path = ? AND type IN (?)", params.streamPath, []string{"mp4", "fmp4"})
if !params.startTime.IsZero() && !params.endTime.IsZero() {
query = query.Where("(start_time <= ? AND end_time >= ?) OR (start_time >= ? AND start_time <= ?)",
params.endTime, params.startTime, params.startTime, params.endTime)
}
err = query.Order("start_time ASC").Find(&recordStreams).Error
if err != nil {
return nil, err
}
}
return recordStreams, nil
}
// buildFileInfoList 构建文件信息列表
func (plugin *HLSPlugin) buildFileInfoList(recordStreams []m7s.RecordStream, startTime, endTime time.Time) ([]*fileInfo, bool) {
var fileInfoList []*fileInfo
var found bool
for _, record := range recordStreams {
// 检查文件是否存在
if !util.Exist(record.FilePath) {
plugin.Warn("Record file not found", "filePath", record.FilePath)
continue
}
var startOffsetTime time.Duration
recordStartTime := record.StartTime
recordEndTime := record.EndTime
// 计算文件内的偏移时间
if startTime.After(recordStartTime) {
startOffsetTime = startTime.Sub(recordStartTime)
}
// 检查是否在时间范围内
if recordEndTime.Before(startTime) || recordStartTime.After(endTime) {
continue
}
fileInfoList = append(fileInfoList, &fileInfo{
filePath: record.FilePath,
startTime: recordStartTime,
endTime: recordEndTime,
startOffsetTime: startOffsetTime,
recordType: record.Type,
})
found = true
}
return fileInfoList, found
}
// hasOnlyMp4Records 检查是否只有MP4记录
func (plugin *HLSPlugin) hasOnlyMp4Records(fileInfoList []*fileInfo) bool {
if len(fileInfoList) == 0 {
return false
}
for _, info := range fileInfoList {
if info.recordType == "hls" {
return false
}
}
return true
}
// filterTsFiles 过滤HLS TS文件
func (plugin *HLSPlugin) filterTsFiles(fileInfoList []*fileInfo) []*fileInfo {
var filteredList []*fileInfo
for _, info := range fileInfoList {
if info.recordType == "hls" {
filteredList = append(filteredList, info)
}
}
plugin.Debug("TS files filtered", "original", len(fileInfoList), "filtered", len(filteredList))
return filteredList
}
// filterMp4Files 过滤MP4文件
func (plugin *HLSPlugin) filterMp4Files(fileInfoList []*fileInfo) []*fileInfo {
var filteredList []*fileInfo
for _, info := range fileInfoList {
if info.recordType == "mp4" || info.recordType == "fmp4" {
filteredList = append(filteredList, info)
}
}
plugin.Debug("MP4 files filtered", "original", len(fileInfoList), "filtered", len(filteredList))
return filteredList
}
// processMp4ToTs 将MP4记录转换为TS输出
func (plugin *HLSPlugin) processMp4ToTs(w http.ResponseWriter, r *http.Request, fileInfoList []*fileInfo, params *requestParams) {
plugin.Info("Converting MP4 records to TS", "count", len(fileInfoList))
// 设置HTTP响应头
w.Header().Set("Content-Type", "video/mp2t")
w.Header().Set("Content-Disposition", "attachment")
// 创建MP4流列表
var mp4Streams []m7s.RecordStream
for _, info := range fileInfoList {
plugin.Debug("Processing MP4 file", "path", info.filePath, "startTime", info.startTime, "endTime", info.endTime)
mp4Streams = append(mp4Streams, m7s.RecordStream{
FilePath: info.filePath,
StartTime: info.startTime,
EndTime: info.endTime,
Type: info.recordType,
})
}
// 创建DemuxerConverterRange进行MP4解复用和转换
demuxer := &mp4.DemuxerConverterRange[*pkg.ADTS, *pkg.AnnexB]{
DemuxerRange: mp4.DemuxerRange{
StartTime: params.startTime,
EndTime: params.endTime,
Streams: mp4Streams,
Logger: plugin.Logger.With("demuxer", "mp4_Ts"),
},
}
// 创建TS编码器状态
tsWriter := &hls.TsInMemory{}
hasWritten := false
// 写入PMT头的辅助函数
writePMTHeader := func() {
if !hasWritten {
var audio, video codec.FourCC
if demuxer.AudioTrack != nil && demuxer.AudioTrack.ICodecCtx != nil {
audio = demuxer.AudioTrack.ICodecCtx.FourCC()
}
if demuxer.VideoTrack != nil && demuxer.VideoTrack.ICodecCtx != nil {
video = demuxer.VideoTrack.ICodecCtx.FourCC()
}
tsWriter.WritePMTPacket(audio, video)
hasWritten = true
}
}
// 创建音频帧结构
audioFrame := mpegts.MpegtsPESFrame{
Pid: mpegts.PID_AUDIO,
}
// 创建视频帧结构
videoFrame := mpegts.MpegtsPESFrame{
Pid: mpegts.PID_VIDEO,
}
// 执行解复用和转换
err := demuxer.Demux(r.Context(),
func(audio *pkg.ADTS) error {
writePMTHeader()
// 写入音频帧
return tsWriter.WriteAudioFrame(audio, &audioFrame)
}, func(video *pkg.AnnexB) error {
writePMTHeader()
videoFrame.IsKeyFrame = demuxer.VideoTrack.Value.IDR
// 写入视频帧
return tsWriter.WriteVideoFrame(video, &videoFrame)
})
if err != nil {
plugin.Error("MP4 to TS conversion failed", "err", err)
if !hasWritten {
http.Error(w, "Conversion failed", http.StatusInternalServerError)
}
return
}
// 将所有累积的 TsInMemory 内容写入到响应
w.WriteHeader(http.StatusOK)
_, err = tsWriter.WriteTo(w)
if err != nil {
plugin.Error("Failed to write TS data to response", "error", err)
return
}
plugin.Info("MP4 to TS conversion completed")
}
// processTsFiles 处理原生TS文件拼接
func (plugin *HLSPlugin) processTsFiles(w http.ResponseWriter, r *http.Request, fileInfoList []*fileInfo, params *requestParams) {
plugin.Info("Processing TS files", "count", len(fileInfoList))
// 设置HTTP响应头
w.Header().Set("Content-Type", "video/mp2t")
w.Header().Set("Content-Disposition", "attachment")
var writer io.Writer = w
var totalSize uint64
// 第一次遍历:计算总大小
for _, info := range fileInfoList {
if r.Context().Err() != nil {
return
}
fileInfo, err := os.Stat(info.filePath)
if err != nil {
plugin.Error("Failed to stat file", "path", info.filePath, "err", err)
continue
}
totalSize += uint64(fileInfo.Size())
}
// 设置内容长度
w.Header().Set("Content-Length", strconv.FormatUint(totalSize, 10))
w.WriteHeader(http.StatusOK)
// 第二次遍历:写入数据
for i, info := range fileInfoList {
if r.Context().Err() != nil {
return
}
plugin.Debug("Processing TS file", "path", info.filePath)
file, err := os.Open(info.filePath)
if err != nil {
plugin.Error("Failed to open file", "path", info.filePath, "err", err)
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
reader := bufio.NewReader(file)
if i == 0 {
// 第一个文件,直接拷贝
_, err = io.Copy(writer, reader)
} else {
// 后续文件跳过PAT/PMT包只拷贝媒体数据
err = plugin.copyTsFileSkipHeaders(writer, reader)
}
file.Close()
if err != nil {
plugin.Error("Failed to copy file", "path", info.filePath, "err", err)
return
}
}
plugin.Info("TS download completed")
}
// copyTsFileSkipHeaders 拷贝TS文件跳过PAT/PMT包
func (plugin *HLSPlugin) copyTsFileSkipHeaders(writer io.Writer, reader *bufio.Reader) error {
buffer := make([]byte, mpegts.TS_PACKET_SIZE)
for {
n, err := io.ReadFull(reader, buffer)
if err != nil {
if err == io.EOF || err == io.ErrUnexpectedEOF {
break
}
return err
}
if n != mpegts.TS_PACKET_SIZE {
continue
}
// 检查同步字节
if buffer[0] != 0x47 {
continue
}
// 提取PID
pid := uint16(buffer[1]&0x1f)<<8 | uint16(buffer[2])
// 跳过PAT(PID=0)和PMT(PID=256)包
if pid == mpegts.PID_PAT || pid == mpegts.PID_PMT {
continue
}
// 写入媒体数据包
_, err = writer.Write(buffer)
if err != nil {
return err
}
}
return nil
}
// download 下载处理函数
func (plugin *HLSPlugin) download(w http.ResponseWriter, r *http.Request) {
// 解析请求参数
params, err := plugin.parseRequestParams(r)
if err != nil {
plugin.Error("Failed to parse request params", "err", err)
http.Error(w, "Invalid parameters", http.StatusBadRequest)
return
}
plugin.Info("TS download request", "streamPath", params.streamPath, "timeRange", params.timeRange)
// 查询录像记录
recordStreams, err := plugin.queryRecordStreams(params)
if err != nil {
plugin.Error("Failed to query record streams", "err", err)
http.Error(w, "Database error", http.StatusInternalServerError)
return
}
if len(recordStreams) == 0 {
plugin.Warn("No records found", "streamPath", params.streamPath)
http.Error(w, "No records found", http.StatusNotFound)
return
}
// 构建文件信息列表
fileInfoList, found := plugin.buildFileInfoList(recordStreams, params.startTime, params.endTime)
if !found {
plugin.Warn("No valid files found", "streamPath", params.streamPath)
http.Error(w, "No valid files found", http.StatusNotFound)
return
}
// 检查文件类型并处理
if plugin.hasOnlyMp4Records(fileInfoList) {
// 只有MP4记录转换为TS
mp4Files := plugin.filterMp4Files(fileInfoList)
plugin.processMp4ToTs(w, r, mp4Files, params)
} else {
// 有TS记录优先使用TS文件
tsFiles := plugin.filterTsFiles(fileInfoList)
if len(tsFiles) > 0 {
plugin.processTsFiles(w, r, tsFiles, params)
} else {
// 没有TS文件使用MP4转换
mp4Files := plugin.filterMp4Files(fileInfoList)
plugin.processMp4ToTs(w, r, mp4Files, params)
}
}
}

View File

@@ -59,6 +59,7 @@ func (p *HLSPlugin) OnInit() (err error) {
func (p *HLSPlugin) RegisterHandler() map[string]http.HandlerFunc {
return map[string]http.HandlerFunc{
"/vod/{streamPath...}": p.vod,
"/download/{streamPath...}": p.download,
"/api/record/start/{streamPath...}": p.API_record_start,
"/api/record/stop/{id}": p.API_record_stop,
}
@@ -73,6 +74,9 @@ func (config *HLSPlugin) vod(w http.ResponseWriter, r *http.Request) {
}
query := r.URL.Query()
fileName := query.Get("streamPath")
if fileName == "" {
fileName = r.PathValue("streamPath")
}
waitTimeout, err := time.ParseDuration(query.Get("timeout"))
if err == nil {
config.Debug("request", "fileName", fileName, "timeout", waitTimeout)
@@ -113,6 +117,25 @@ func (config *HLSPlugin) vod(w http.ResponseWriter, r *http.Request) {
plBuffer.WriteString("#EXT-X-ENDLIST\n")
w.Write(plBuffer)
return
} else if recordType == "ts" {
playlist := hls.Playlist{
Version: 3,
Sequence: 0,
Targetduration: 10,
}
var plBuffer util.Buffer
playlist.Writer = &plBuffer
playlist.Init()
for i := startTime; i.Before(endTime); i = i.Add(10 * time.Second) {
playlist.WriteInf(hls.PlaylistInf{
Duration: 10,
URL: fmt.Sprintf("/hls/download/%s.ts?start=%d&end=%d", streamPath, i.Unix(), i.Add(10*time.Second).Unix()),
Title: i.Format(time.RFC3339),
})
}
plBuffer.WriteString("#EXT-X-ENDLIST\n")
w.Write(plBuffer)
return
}
query := `stream_path = ? AND type = ? AND start_time IS NOT NULL AND end_time IS NOT NULL AND ? <= end_time AND ? >= start_time`
config.DB.Where(query, streamPath, recordType, startTime, endTime).Find(&records)
@@ -272,7 +295,7 @@ func (config *HLSPlugin) ServeHTTP(w http.ResponseWriter, r *http.Request) {
}
}
} else {
http.ServeFileFS(w, r, zipReader, strings.TrimPrefix(r.URL.Path, "/hls.js"))
http.ServeFileFS(w, r, zipReader, r.URL.Path)
}
}

View File

@@ -35,6 +35,7 @@ var CustomFileName = func(job *m7s.RecordJob) string {
}
func (r *Recorder) createStream(start time.Time) (err error) {
r.RecordJob.RecConf.Type = "ts"
return r.CreateStream(start, CustomFileName)
}

View File

@@ -1,9 +1,11 @@
package mpegts
import (
"bytes"
"errors"
"fmt"
"io"
"m7s.live/v5/pkg/util"
)
@@ -179,50 +181,56 @@ func WritePSI(w io.Writer, pt uint32, psi MpegTsPSI, data []byte) (err error) {
return
}
cw := &util.Crc32Writer{W: w, Crc32: 0xffffffff}
// 使用buffer收集所有需要计算CRC32的数据
bw := &bytes.Buffer{}
// table id(8)
if err = util.WriteUint8ToByte(cw, tableId); err != nil {
if err = util.WriteUint8ToByte(bw, tableId); err != nil {
return
}
// sectionSyntaxIndicator(1) + zero(1) + reserved1(2) + sectionLength(12)
// sectionLength 前两个字节固定为00
// 1 0 11 sectionLength
if err = util.WriteUint16ToByte(cw, sectionSyntaxIndicatorAndSectionLength, true); err != nil {
if err = util.WriteUint16ToByte(bw, sectionSyntaxIndicatorAndSectionLength, true); err != nil {
return
}
// PAT TransportStreamID(16) or PMT ProgramNumber(16)
if err = util.WriteUint16ToByte(cw, transportStreamIdOrProgramNumber, true); err != nil {
if err = util.WriteUint16ToByte(bw, transportStreamIdOrProgramNumber, true); err != nil {
return
}
// reserved2(2) + versionNumber(5) + currentNextIndicator(1)
// 0x3 << 6 -> 1100 0000
// 0x3 << 6 | 1 -> 1100 0001
if err = util.WriteUint8ToByte(cw, versionNumberAndCurrentNextIndicator); err != nil {
if err = util.WriteUint8ToByte(bw, versionNumberAndCurrentNextIndicator); err != nil {
return
}
// sectionNumber(8)
if err = util.WriteUint8ToByte(cw, sectionNumber); err != nil {
if err = util.WriteUint8ToByte(bw, sectionNumber); err != nil {
return
}
// lastSectionNumber(8)
if err = util.WriteUint8ToByte(cw, lastSectionNumber); err != nil {
if err = util.WriteUint8ToByte(bw, lastSectionNumber); err != nil {
return
}
// data
if _, err = cw.Write(data); err != nil {
if _, err = bw.Write(data); err != nil {
return
}
// crc32
crc32 := util.BigLittleSwap(uint(cw.Crc32))
if err = util.WriteUint32ToByte(cw, uint32(crc32), true); err != nil {
// 写入PSI数据
if _, err = w.Write(bw.Bytes()); err != nil {
return
}
// 使用MPEG-TS CRC32算法计算CRC32
crc32 := GetCRC32(bw.Bytes())
if err = util.WriteUint32ToByte(w, crc32, true); err != nil {
return
}

View File

@@ -77,3 +77,100 @@ mp4:
此时如果有人订阅了 vod/test/123 流那么就会从数据库中查询streamPath 为 `live/test` 录制文件,并且根据拉流参数中的 start 参数筛选录制文件。
此时 123 就是某个订阅者的唯一标识。
## 拼装逻辑
```mermaid
sequenceDiagram
participant Client
participant Handler as download()
participant DB as Database
participant Muxer as MP4Muxer
participant File1 as RecordFile1
participant File2 as RecordFile2
participant FileN as RecordFileN
participant Writer as ResponseWriter
Client->>Handler: GET /download?start=xxx&end=xxx
Handler->>Handler: 解析时间范围参数
Handler->>DB: 查询时间范围内的录制文件
DB-->>Handler: 返回 streams[]
Handler->>Muxer: NewMuxer(flag)
Handler->>Muxer: CreateFTYPBox()
Note over Handler: 初始化变量lastTs, tsOffset, parts[], audioTrack, videoTrack
loop 遍历每个录制文件
Handler->>File1: os.Open(stream.FilePath)
File1-->>Handler: file handle
Handler->>File1: NewDemuxer(file)
Handler->>File1: demuxer.Demux()
File1-->>Handler: 解析完成
alt 第一个文件
Handler->>Handler: 处理开始时间偏移
loop 处理每个track
Handler->>Muxer: AddTrack(track.Cid)
Muxer-->>Handler: 新轨道
end
end
Note over Handler: 设置 tsOffset = lastTs
loop 处理每个样本 (RangeSample)
alt 最后一个文件 && 超出结束时间
Handler->>Handler: break (跳出循环)
else
Handler->>Handler: 创建 ContentPart
Handler->>Handler: 计算调整后时间戳
alt flag == 0 (常规MP4)
Handler->>Handler: 调整样本偏移量
Handler->>Muxer: AddSampleEntry(fixSample)
else flag == FLAG_FRAGMENT (分片MP4)
Handler->>File1: 读取样本数据
File1-->>Handler: sample.Data
Handler->>Muxer: CreateFlagment(track, sample)
Muxer-->>Handler: moof, mdat boxes
Handler->>Handler: 添加到 part.boxies
end
Handler->>Handler: 更新 lastTs
end
end
Handler->>Handler: 添加 part 到 parts[]
Handler->>File1: Close()
end
alt flag == 0 (常规MP4模式)
Handler->>Muxer: MakeMoov()
Muxer-->>Handler: moov box
Handler->>Handler: 计算总大小
Handler->>Writer: Set Content-Length header
Handler->>Writer: 调整样本偏移量
Handler->>Muxer: CreateBaseBox(MDAT)
Muxer-->>Handler: mdat box
Handler->>Writer: WriteTo(ftyp, moov, free, mdat header)
loop 写入所有内容片段
Handler->>Handler: part.Seek(part.Start)
Handler->>Writer: io.CopyN(writer, part.File, part.Size)
Handler->>Handler: part.Close()
end
else flag == FLAG_FRAGMENT (分片MP4模式)
Handler->>Handler: 组装所有 children boxes
Handler->>Handler: 计算总大小
Handler->>Writer: Set Content-Length header
Handler->>Writer: WriteTo(所有boxes)
loop 关闭所有文件
Handler->>Handler: part.Close()
end
end
Handler-->>Client: MP4文件流
```

View File

@@ -576,7 +576,7 @@ func (p *MP4Plugin) EventStart(ctx context.Context, req *mp4pb.ReqEventRecord) (
return res, err
}
func (p *MP4Plugin) List(ctx context.Context, req *mp4pb.ReqRecordList) (resp *pb.ResponseList, err error) {
func (p *MP4Plugin) List(ctx context.Context, req *mp4pb.ReqRecordList) (resp *pb.RecordResponseList, err error) {
globalReq := &pb.ReqRecordList{
StreamPath: req.StreamPath,
Range: req.Range,
@@ -584,7 +584,6 @@ func (p *MP4Plugin) List(ctx context.Context, req *mp4pb.ReqRecordList) (resp *p
End: req.End,
PageNum: req.PageNum,
PageSize: req.PageSize,
Mode: req.Mode,
Type: "mp4",
EventLevel: req.EventLevel,
}

874
plugin/mp4/api_extract.go Normal file
View File

@@ -0,0 +1,874 @@
/**
* @file 文件名.h
* @brief MP4 文件查询提取功能,GOP提取新的MP4片段提取图片等已验证测试H264,H265
* @author erroot
* @date 250614
* @version 1.0.0
*/
package plugin_mp4
import (
"bytes"
"fmt"
"io"
"net/http"
"os"
"strconv"
"strings"
"time"
m7s "m7s.live/v5"
"m7s.live/v5/pkg"
"m7s.live/v5/pkg/util"
mp4 "m7s.live/v5/plugin/mp4/pkg"
"m7s.live/v5/plugin/mp4/pkg/box"
)
// bytes2hexStr 将字节数组前n个字节转为16进制字符串
// data: 原始字节数组
// length: 需要转换的字节数(超过实际长度时自动截断)
func Bytes2HexStr(data []byte, length int) string {
if length > len(data) {
length = len(data)
}
var builder strings.Builder
for i := 0; i < length; i++ {
if i > 0 {
builder.WriteString(" ")
}
builder.WriteString(fmt.Sprintf("%02X", data[i]))
}
return builder.String()
}
/*
提取压缩视频(快放视频)
njtv/glgc.mp4?
start=1748620153000&
end=1748620453000&
outputPath=/opt/njtv/1748620153000.mp4
gopSeconds=1&
gopInterval=1&
FLAG_FRAGMENT 暂时不支持没有调试
假设原生帧率25fps GOP = 50 frame
时间范围: endTime-startTime = 300s = 7500 frame = 150 GOP
gopSeconds=0.2 6 frame
gopInterval=10
提取结果15 gop, 90 frame , 90/25 = 3.6 s
反过推算 要求 5范围分钟 压缩到15s 播放完
当gopSeconds=0.1 推算 gopInterval=1
当gopSeconds=0.2 推算 gopInterval=2
*/
func (p *MP4Plugin) extractCompressedVideo(streamPath string, startTime, endTime time.Time, writer io.Writer, gopSeconds float64, gopInterval int) error {
if p.DB == nil {
return pkg.ErrNoDB
}
var flag mp4.Flag
if strings.HasSuffix(streamPath, ".fmp4") {
flag = mp4.FLAG_FRAGMENT
streamPath = strings.TrimSuffix(streamPath, ".fmp4")
} else {
streamPath = strings.TrimSuffix(streamPath, ".mp4")
}
// 查询数据库获取符合条件的片段
queryRecord := m7s.RecordStream{
Type: "mp4",
}
var streams []m7s.RecordStream
p.DB.Where(&queryRecord).Find(&streams, "end_time>? AND start_time<? AND stream_path=?", startTime, endTime, streamPath)
if len(streams) == 0 {
return fmt.Errorf("no matching MP4 segments found")
}
// 创建输出文件
outputFile := writer
p.Info("extracting compressed video", "streamPath", streamPath, "start", startTime, "end", endTime,
"gopSeconds", gopSeconds, "gopInterval", gopInterval)
muxer := mp4.NewMuxer(flag)
ftyp := muxer.CreateFTYPBox()
n := ftyp.Size()
muxer.CurrentOffset = int64(n)
var videoTrack *mp4.Track
sampleOffset := muxer.CurrentOffset + mp4.BeforeMdatData
mdatOffset := sampleOffset
//var audioTrack *mp4.Track
var extraData []byte
// 压缩相关变量
currentGOPCount := -1
inGOP := false
targetFrameInterval := 40 // 25fps对应的毫秒间隔 (1000/25=40ms)
var filteredSamples []box.Sample
//var lastVideoTimestamp uint32
var timescale uint32 = 1000 // 默认时间刻度为1000 (毫秒)
var currentGopStartTime int64 = -1
// 仅处理视频轨道
for i, stream := range streams {
file, err := os.Open(stream.FilePath)
if err != nil {
return fmt.Errorf("failed to open file %s: %v", stream.FilePath, err)
}
defer file.Close()
p.Info("processing segment", "file", file.Name())
demuxer := mp4.NewDemuxer(file)
err = demuxer.Demux()
if err != nil {
p.Warn("demux error, skipping segment", "error", err, "file", stream.FilePath)
continue
}
// 确保有视频轨道
var hasVideo bool
for _, track := range demuxer.Tracks {
if track.Cid.IsVideo() {
hasVideo = true
// 只在第一个片段或关键帧变化时更新extraData
if extraData == nil || !bytes.Equal(extraData, track.ExtraData) {
extraData = track.ExtraData
if videoTrack == nil {
videoTrack = muxer.AddTrack(track.Cid)
videoTrack.ExtraData = extraData
videoTrack.Width = track.Width
videoTrack.Height = track.Height
}
}
break
}
}
if !hasVideo {
p.Warn("no video track found in segment", "file", stream.FilePath)
continue
}
// 处理起始时间边界
var tsOffset int64
if i == 0 {
startTimestamp := startTime.Sub(stream.StartTime).Milliseconds()
if startTimestamp < 0 {
startTimestamp = 0
}
startSample, err := demuxer.SeekTime(uint64(startTimestamp))
if err == nil {
tsOffset = -int64(startSample.Timestamp)
}
}
// 处理样本
for track, sample := range demuxer.RangeSample {
if !track.Cid.IsVideo() {
continue
}
//for _, sample := range samples {
adjustedTimestamp := sample.Timestamp + uint32(tsOffset)
// 处理GOP逻辑
if sample.KeyFrame {
currentGOPCount++
inGOP = false
if currentGOPCount%gopInterval == 0 {
currentGopStartTime = int64(sample.Timestamp)
inGOP = true
}
}
// 跳过不在当前GOP的帧
if !inGOP {
currentGopStartTime = -1
continue
}
// 如果不在有效的GOP中跳过
if currentGopStartTime == -1 {
continue
}
// 检查是否超过gopSeconds限制
currentTime := int64(sample.Timestamp)
gopElapsed := float64(currentTime-currentGopStartTime) / float64(timescale)
if gopSeconds > 0 && gopElapsed > gopSeconds {
continue
}
// 处理结束时间边界
if i == len(streams)-1 && int64(adjustedTimestamp) > endTime.Sub(streams[0].StartTime).Milliseconds() {
continue
}
// 确保样本数据有效
if sample.Size <= 0 || sample.Size > 10*1024*1024 { // 10MB限制
p.Warn("invalid sample size", "size", sample.Size, "timestamp", sample.Timestamp)
continue
}
// 读取样本数据
if _, err := file.Seek(sample.Offset, io.SeekStart); err != nil {
p.Warn("seek error", "error", err, "offset", sample.Offset)
continue
}
data := make([]byte, sample.Size)
if _, err := io.ReadFull(file, data); err != nil {
p.Warn("read sample error", "error", err, "size", sample.Size)
continue
}
// 创建新的样本
newSample := box.Sample{
KeyFrame: sample.KeyFrame,
Data: data,
Timestamp: adjustedTimestamp,
Offset: sampleOffset,
Size: sample.Size,
Duration: sample.Duration,
}
// p.Info("Compressed", "KeyFrame", newSample.KeyFrame,
// "CTS", newSample.CTS,
// "Timestamp", newSample.Timestamp,
// "Offset", newSample.Offset,
// "Size", newSample.Size,
// "Duration", newSample.Duration,
// "Data", Bytes2HexStr(newSample.Data, 16))
sampleOffset += int64(newSample.Size)
filteredSamples = append(filteredSamples, newSample)
}
}
if len(filteredSamples) == 0 {
return fmt.Errorf("no valid video samples found")
}
// 按25fps重新计算时间戳
for i := range filteredSamples {
filteredSamples[i].Timestamp = uint32(i * targetFrameInterval)
}
// 添加样本到轨道
for _, sample := range filteredSamples {
videoTrack.AddSampleEntry(sample)
}
// 计算视频时长
videoDuration := uint32(len(filteredSamples) * targetFrameInterval)
// 写入输出文件
if flag == 0 {
// 非分片MP4处理
moovSize := muxer.MakeMoov().Size()
dataSize := uint64(sampleOffset - mdatOffset)
// 调整sample偏移量
for _, track := range muxer.Tracks {
for i := range track.Samplelist {
track.Samplelist[i].Offset += int64(moovSize)
}
}
// 创建MDAT盒子 (添加8字节头)
mdatHeaderSize := uint64(8)
mdatBox := box.CreateBaseBox(box.TypeMDAT, dataSize+mdatHeaderSize)
var freeBox *box.FreeBox
if mdatBox.HeaderSize() == box.BasicBoxLen {
freeBox = box.CreateFreeBox(nil)
}
// 写入文件头
_, err := box.WriteTo(outputFile, ftyp, muxer.MakeMoov(), freeBox, mdatBox)
if err != nil {
return fmt.Errorf("failed to write header: %v", err)
}
for _, track := range muxer.Tracks {
for i := range track.Samplelist {
track.Samplelist[i].Offset += int64(moovSize)
if _, err := outputFile.Write(track.Samplelist[i].Data); err != nil {
return err
}
}
}
} else {
// 分片MP4处理
var children []box.IBox
moov := muxer.MakeMoov()
children = append(children, ftyp, moov)
// 创建分片
for _, sample := range filteredSamples {
moof, mdat := muxer.CreateFlagment(videoTrack, sample)
children = append(children, moof, mdat)
}
_, err := box.WriteTo(outputFile, children...)
if err != nil {
return fmt.Errorf("failed to write fragmented MP4: %v", err)
}
}
p.Info("compressed video saved",
"originalDuration", (endTime.Sub(startTime)).Milliseconds(),
"compressedDuration", videoDuration,
"frameCount", len(filteredSamples),
"fps", 25)
return nil
}
/*
根据时间范围提取视频片段
njtv/glgc.mp4?
timest=1748620153000&
outputPath=/opt/njtv/gop_tmp_1748620153000.mp4
原理根据时间戳找到最近的mp4文件再从mp4 文件中找到最近gop 生成mp4 文件
*/
func (p *MP4Plugin) extractGopVideo(streamPath string, targetTime time.Time, writer io.Writer) (float64, error) {
if p.DB == nil {
return 0, pkg.ErrNoDB
}
var flag mp4.Flag
if strings.HasSuffix(streamPath, ".fmp4") {
flag = mp4.FLAG_FRAGMENT
streamPath = strings.TrimSuffix(streamPath, ".fmp4")
} else {
streamPath = strings.TrimSuffix(streamPath, ".mp4")
}
// 查询数据库获取符合条件的片段
queryRecord := m7s.RecordStream{
Type: "mp4",
}
var streams []m7s.RecordStream
p.DB.Where(&queryRecord).Find(&streams, "end_time>=? AND start_time<=? AND stream_path=?", targetTime, targetTime, streamPath)
if len(streams) == 0 {
return 0, fmt.Errorf("no matching MP4 segments found")
}
// 创建输出文件
outputFile := writer
p.Info("extracting compressed video", "streamPath", streamPath, "targetTime", targetTime)
muxer := mp4.NewMuxer(flag)
ftyp := muxer.CreateFTYPBox()
n := ftyp.Size()
muxer.CurrentOffset = int64(n)
var videoTrack *mp4.Track
sampleOffset := muxer.CurrentOffset + mp4.BeforeMdatData
mdatOffset := sampleOffset
//var audioTrack *mp4.Track
var extraData []byte
// 压缩相关变量
findGOP := false
targetFrameInterval := 40 // 25fps对应的毫秒间隔 (1000/25=40ms)
var filteredSamples []box.Sample
//var lastVideoTimestamp uint32
var timescale uint32 = 1000 // 默认时间刻度为1000 (毫秒)
var currentGopStartTime int64 = -1
var gopElapsed float64 = 0
// 仅处理视频轨道
for _, stream := range streams {
file, err := os.Open(stream.FilePath)
if err != nil {
return 0, fmt.Errorf("failed to open file %s: %v", stream.FilePath, err)
}
defer file.Close()
p.Info("processing segment", "file", file.Name())
demuxer := mp4.NewDemuxer(file)
err = demuxer.Demux()
if err != nil {
p.Warn("demux error, skipping segment", "error", err, "file", stream.FilePath)
continue
}
// 确保有视频轨道
var hasVideo bool
for _, track := range demuxer.Tracks {
if track.Cid.IsVideo() {
hasVideo = true
// 只在第一个片段或关键帧变化时更新extraData
if extraData == nil || !bytes.Equal(extraData, track.ExtraData) {
extraData = track.ExtraData
if videoTrack == nil {
videoTrack = muxer.AddTrack(track.Cid)
videoTrack.ExtraData = extraData
videoTrack.Width = track.Width
videoTrack.Height = track.Height
}
}
break
}
}
if !hasVideo {
p.Warn("no video track found in segment", "file", stream.FilePath)
continue
}
// 处理起始时间边界
var tsOffset int64
startTimestamp := targetTime.Sub(stream.StartTime).Milliseconds()
// p.Info("extractGop", "targetTime", targetTime,
// "stream.StartTime", stream.StartTime,
// "startTimestamp", startTimestamp)
if startTimestamp < 0 {
startTimestamp = 0
}
//通过时间戳定位到最近的关键帧如视频IDR帧返回的startSample是该关键帧对应的样本
startSample, err := demuxer.SeekTime(uint64(startTimestamp))
if err == nil {
tsOffset = -int64(startSample.Timestamp)
}
//p.Info("extractGop", "startSample", startSample)
// 处理样本
//RangeSample迭代的是当前时间范围内的所有样本可能包含非关键帧顺序取决于MP4文件中样本的物理存储顺序
for track, sample := range demuxer.RangeSample {
if !track.Cid.IsVideo() {
continue
}
if sample.Timestamp < startSample.Timestamp {
continue
}
//for _, sample := range samples {
adjustedTimestamp := sample.Timestamp + uint32(tsOffset)
// 处理GOP逻辑,已经处理完上一个gop
if sample.KeyFrame && findGOP {
break
}
// 处理GOP逻辑
if sample.KeyFrame && !findGOP {
findGOP = true
currentGopStartTime = int64(sample.Timestamp)
}
// 跳过不在当前GOP的帧
if !findGOP {
currentGopStartTime = -1
continue
}
// 检查是否超过gopSeconds限制
currentTime := int64(sample.Timestamp)
gopElapsed = float64(currentTime-currentGopStartTime) / float64(timescale)
// 确保样本数据有效
if sample.Size <= 0 || sample.Size > 10*1024*1024 { // 10MB限制
p.Warn("invalid sample size", "size", sample.Size, "timestamp", sample.Timestamp)
continue
}
// 读取样本数据
if _, err := file.Seek(sample.Offset, io.SeekStart); err != nil {
p.Warn("seek error", "error", err, "offset", sample.Offset)
continue
}
data := make([]byte, sample.Size)
if _, err := io.ReadFull(file, data); err != nil {
p.Warn("read sample error", "error", err, "size", sample.Size)
continue
}
// 创建新的样本
newSample := box.Sample{
KeyFrame: sample.KeyFrame,
Data: data,
Timestamp: adjustedTimestamp,
Offset: sampleOffset,
Size: sample.Size,
Duration: sample.Duration,
}
// p.Info("extractGop", "KeyFrame", newSample.KeyFrame,
// "CTS", newSample.CTS,
// "Timestamp", newSample.Timestamp,
// "Offset", newSample.Offset,
// "Size", newSample.Size,
// "Duration", newSample.Duration,
// "Data", Bytes2HexStr(newSample.Data, 16))
sampleOffset += int64(newSample.Size)
filteredSamples = append(filteredSamples, newSample)
}
}
if len(filteredSamples) == 0 {
return 0, fmt.Errorf("no valid video samples found")
}
// 按25fps重新计算时间戳
for i := range filteredSamples {
filteredSamples[i].Timestamp = uint32(i * targetFrameInterval)
}
// 添加样本到轨道
for _, sample := range filteredSamples {
videoTrack.AddSampleEntry(sample)
}
// 计算视频时长
videoDuration := uint32(len(filteredSamples) * targetFrameInterval)
// 写入输出文件
if flag == 0 {
// 非分片MP4处理
moovSize := muxer.MakeMoov().Size()
dataSize := uint64(sampleOffset - mdatOffset)
// 调整sample偏移量
for _, track := range muxer.Tracks {
for i := range track.Samplelist {
track.Samplelist[i].Offset += int64(moovSize)
}
}
// 创建MDAT盒子 (添加8字节头)
mdatHeaderSize := uint64(8)
mdatBox := box.CreateBaseBox(box.TypeMDAT, dataSize+mdatHeaderSize)
var freeBox *box.FreeBox
if mdatBox.HeaderSize() == box.BasicBoxLen {
freeBox = box.CreateFreeBox(nil)
}
// 写入文件头
_, err := box.WriteTo(outputFile, ftyp, muxer.MakeMoov(), freeBox, mdatBox)
if err != nil {
return 0, fmt.Errorf("failed to write header: %v", err)
}
for _, track := range muxer.Tracks {
for i := range track.Samplelist {
track.Samplelist[i].Offset += int64(moovSize)
if _, err := outputFile.Write(track.Samplelist[i].Data); err != nil {
return 0, err
}
}
}
} else {
// 分片MP4处理
var children []box.IBox
moov := muxer.MakeMoov()
children = append(children, ftyp, moov)
// 创建分片
for _, sample := range filteredSamples {
moof, mdat := muxer.CreateFlagment(videoTrack, sample)
children = append(children, moof, mdat)
}
_, err := box.WriteTo(outputFile, children...)
if err != nil {
return 0, fmt.Errorf("failed to write fragmented MP4: %v", err)
}
}
p.Info("extract gop video saved",
"targetTime", targetTime,
"compressedDuration", videoDuration,
"gopElapsed", gopElapsed,
"frameCount", len(filteredSamples),
"fps", 25)
return gopElapsed, nil
}
/*
提取压缩视频
GET http://192.168.0.238:8080/mp4/extract/compressed/
njtv/glgc.mp4?
start=1748620153000&
end=1748620453000&
outputPath=/opt/njtv/1748620153000.mp4
gopSeconds=1&
gopInterval=1&
*/
func (p *MP4Plugin) extractCompressedVideoHandel(w http.ResponseWriter, r *http.Request) {
streamPath := r.PathValue("streamPath")
query := r.URL.Query()
// 合并多个 mp4
startTime, endTime, err := util.TimeRangeQueryParse(query)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
p.Info("extractCompressedVideoHandel", "streamPath", streamPath, "start", startTime, "end", endTime)
gopSeconds, _ := strconv.ParseFloat(query.Get("gopSeconds"), 64)
gopInterval, _ := strconv.Atoi(query.Get("gopInterval"))
if gopSeconds == 0 {
gopSeconds = 1
}
if gopInterval == 0 {
gopInterval = 1
}
// 设置响应头
w.Header().Set("Content-Type", "video/mp4")
w.Header().Set("Content-Disposition", "attachment; filename=\"compressed_video.mp4\"")
err = p.extractCompressedVideo(streamPath, startTime, endTime, w, gopSeconds, gopInterval)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func (p *MP4Plugin) extractGopVideoHandel(w http.ResponseWriter, r *http.Request) {
streamPath := r.PathValue("streamPath")
query := r.URL.Query()
targetTimeString := query.Get("targetTime")
// 合并多个 mp4
targetTime, err := util.UnixTimeQueryParse(targetTimeString)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
p.Info("extractGopVideoHandel", "streamPath", streamPath, "targetTime", targetTime)
// 设置响应头
w.Header().Set("Content-Type", "video/mp4")
w.Header().Set("Content-Disposition", "attachment; filename=\"gop_video.mp4\"")
_, err = p.extractGopVideo(streamPath, targetTime, w)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
}
func (p *MP4Plugin) snapHandel(w http.ResponseWriter, r *http.Request) {
streamPath := r.PathValue("streamPath")
query := r.URL.Query()
targetTimeString := query.Get("targetTime")
// 合并多个 mp4
targetTime, err := util.UnixTimeQueryParse(targetTimeString)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
p.Info("snapHandel", "streamPath", streamPath, "targetTime", targetTime)
// 设置响应头
w.Header().Set("Content-Type", "image/jpeg")
w.Header().Set("Content-Disposition", "attachment; filename=\"snapshot.jpg\"")
err = p.snapToWriter(streamPath, targetTime, w)
if err != nil {
p.Info("snapHandel", "err", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
}
func (p *MP4Plugin) snapToWriter(streamPath string, targetTime time.Time, writer io.Writer) error {
if p.DB == nil {
return pkg.ErrNoDB
}
var flag mp4.Flag
if strings.HasSuffix(streamPath, ".fmp4") {
flag = mp4.FLAG_FRAGMENT
streamPath = strings.TrimSuffix(streamPath, ".fmp4")
} else {
streamPath = strings.TrimSuffix(streamPath, ".mp4")
}
// 查询数据库获取符合条件的片段
queryRecord := m7s.RecordStream{
Type: "mp4",
}
var streams []m7s.RecordStream
p.DB.Where(&queryRecord).Find(&streams, "end_time>=? AND start_time<=? AND stream_path=?", targetTime, targetTime, streamPath)
if len(streams) == 0 {
return fmt.Errorf("no matching MP4 segments found")
}
muxer := mp4.NewMuxer(flag)
ftyp := muxer.CreateFTYPBox()
n := ftyp.Size()
muxer.CurrentOffset = int64(n)
var videoTrack *mp4.Track
sampleOffset := muxer.CurrentOffset + mp4.BeforeMdatData
//var audioTrack *mp4.Track
var extraData []byte
// 压缩相关变量
findGOP := false
var filteredSamples []box.Sample
var sampleIdx = 0
// 仅处理视频轨道
for _, stream := range streams {
file, err := os.Open(stream.FilePath)
if err != nil {
return fmt.Errorf("failed to open file %s: %v", stream.FilePath, err)
}
defer file.Close()
p.Info("processing segment", "file", file.Name())
demuxer := mp4.NewDemuxer(file)
err = demuxer.Demux()
if err != nil {
p.Warn("demux error, skipping segment", "error", err, "file", stream.FilePath)
continue
}
// 确保有视频轨道
var hasVideo bool
for _, track := range demuxer.Tracks {
if track.Cid.IsVideo() {
hasVideo = true
// 只在第一个片段或关键帧变化时更新extraData
if extraData == nil || !bytes.Equal(extraData, track.ExtraData) {
extraData = track.ExtraData
if videoTrack == nil {
videoTrack = muxer.AddTrack(track.Cid)
videoTrack.ExtraData = extraData
videoTrack.Width = track.Width
videoTrack.Height = track.Height
}
}
break
}
}
if !hasVideo {
p.Warn("no video track found in segment", "file", stream.FilePath)
continue
}
// 处理起始时间边界
var tsOffset int64
startTimestamp := targetTime.Sub(stream.StartTime).Milliseconds()
if startTimestamp < 0 {
startTimestamp = 0
}
//通过时间戳定位到最近的关键帧如视频IDR帧返回的startSample是该关键帧对应的样本
startSample, err := demuxer.SeekTime(uint64(startTimestamp))
if err == nil {
tsOffset = -int64(startSample.Timestamp)
}
// 处理样本
//RangeSample迭代的是当前时间范围内的所有样本可能包含非关键帧顺序取决于MP4文件中样本的物理存储顺序
for track, sample := range demuxer.RangeSample {
if !track.Cid.IsVideo() {
continue
}
if sample.Timestamp < startSample.Timestamp {
continue
}
//记录GOP内帧的序号没有考虑B帧的情况
if sample.Timestamp < uint32(startTimestamp) {
sampleIdx++
}
adjustedTimestamp := sample.Timestamp + uint32(tsOffset)
// 处理GOP逻辑,已经处理完上一个gop
if sample.KeyFrame && findGOP {
break
}
// 处理GOP逻辑
if sample.KeyFrame && !findGOP {
findGOP = true
}
// 跳过不在当前GOP的帧
if !findGOP {
continue
}
// 确保样本数据有效
if sample.Size <= 0 || sample.Size > 10*1024*1024 { // 10MB限制
p.Warn("invalid sample size", "size", sample.Size, "timestamp", sample.Timestamp)
continue
}
// 读取样本数据
if _, err := file.Seek(sample.Offset, io.SeekStart); err != nil {
p.Warn("seek error", "error", err, "offset", sample.Offset)
continue
}
data := make([]byte, sample.Size)
if _, err := io.ReadFull(file, data); err != nil {
p.Warn("read sample error", "error", err, "size", sample.Size)
continue
}
// 创建新的样本
newSample := box.Sample{
KeyFrame: sample.KeyFrame,
Data: data,
Timestamp: adjustedTimestamp,
Offset: sampleOffset,
Size: sample.Size,
Duration: sample.Duration,
}
sampleOffset += int64(newSample.Size)
filteredSamples = append(filteredSamples, newSample)
}
}
if len(filteredSamples) == 0 {
return fmt.Errorf("no valid video samples found")
}
// 按25fps重新计算时间戳
targetFrameInterval := 40 // 25fps对应的毫秒间隔 (1000/25=40ms)
for i := range filteredSamples {
filteredSamples[i].Timestamp = uint32(i * targetFrameInterval)
}
p.Info("extract gop and snap",
"targetTime", targetTime,
"frist", filteredSamples[0].Timestamp,
"sampleIdx", sampleIdx,
"frameCount", len(filteredSamples))
err := ProcessWithFFmpeg(filteredSamples, sampleIdx, videoTrack, writer)
if err != nil {
return err
}
p.Info("extract gop and snap saved",
"targetTime", targetTime,
"frameCount", len(filteredSamples))
return nil
}

View File

@@ -76,7 +76,10 @@ var _ = m7s.InstallPlugin[MP4Plugin](m7s.PluginMeta{
func (p *MP4Plugin) RegisterHandler() map[string]http.HandlerFunc {
return map[string]http.HandlerFunc{
"/download/{streamPath...}": p.download,
"/download/{streamPath...}": p.download,
"/extract/compressed/{streamPath...}": p.extractCompressedVideoHandel,
"/extract/gop/{streamPath...}": p.extractGopVideoHandel,
"/snap/{streamPath...}": p.snapHandel,
}
}

View File

@@ -1,7 +1,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.36.5
// protoc v5.28.3
// protoc-gen-go v1.36.6
// protoc v5.29.3
// source: mp4.proto
package pb
@@ -587,123 +587,74 @@ func (x *ResponseStopRecord) GetData() uint64 {
var File_mp4_proto protoreflect.FileDescriptor
var file_mp4_proto_rawDesc = string([]byte{
0x0a, 0x09, 0x6d, 0x70, 0x34, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x6d, 0x70, 0x34,
0x1a, 0x1c, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x61, 0x6e, 0x6e,
0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1b,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f,
0x65, 0x6d, 0x70, 0x74, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x67, 0x6f, 0x6f,
0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x75, 0x72,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0c, 0x67, 0x6c, 0x6f,
0x62, 0x61, 0x6c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xd7, 0x01, 0x0a, 0x0d, 0x52, 0x65,
0x71, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x1e, 0x0a, 0x0a, 0x73,
0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
0x0a, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x72,
0x61, 0x6e, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x72, 0x61, 0x6e, 0x67,
0x65, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09,
0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x04,
0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x67,
0x65, 0x4e, 0x75, 0x6d, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x07, 0x70, 0x61, 0x67, 0x65,
0x4e, 0x75, 0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x18,
0x06, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x70, 0x61, 0x67, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12,
0x12, 0x0a, 0x04, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6d,
0x6f, 0x64, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x65, 0x76, 0x65,
0x6c, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x65,
0x76, 0x65, 0x6c, 0x22, 0x91, 0x01, 0x0a, 0x0f, 0x52, 0x65, 0x71, 0x52, 0x65, 0x63, 0x6f, 0x72,
0x64, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x72, 0x65, 0x61,
0x6d, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x72,
0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x12, 0x10, 0x0a, 0x03, 0x69, 0x64, 0x73, 0x18, 0x02,
0x20, 0x03, 0x28, 0x0d, 0x52, 0x03, 0x69, 0x64, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x61,
0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74,
0x61, 0x72, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69,
0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x6e, 0x64, 0x54, 0x69, 0x6d,
0x65, 0x12, 0x14, 0x0a, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09,
0x52, 0x05, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x22, 0x90, 0x02, 0x0a, 0x0e, 0x52, 0x65, 0x71, 0x45,
0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74,
0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a,
0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x76,
0x65, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65,
0x6e, 0x74, 0x49, 0x64, 0x12, 0x1c, 0x0a, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x4e, 0x61, 0x6d,
0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x4e, 0x61,
0x6d, 0x65, 0x12, 0x26, 0x0a, 0x0e, 0x62, 0x65, 0x66, 0x6f, 0x72, 0x65, 0x44, 0x75, 0x72, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0e, 0x62, 0x65, 0x66, 0x6f,
0x72, 0x65, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x24, 0x0a, 0x0d, 0x61, 0x66,
0x74, 0x65, 0x72, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x05, 0x20, 0x01, 0x28,
0x09, 0x52, 0x0d, 0x61, 0x66, 0x74, 0x65, 0x72, 0x44, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x12, 0x1c, 0x0a, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x73, 0x63, 0x18, 0x06, 0x20,
0x01, 0x28, 0x09, 0x52, 0x09, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x44, 0x65, 0x73, 0x63, 0x12, 0x1e,
0x0a, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x07, 0x20, 0x01,
0x28, 0x09, 0x52, 0x0a, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x4c, 0x65, 0x76, 0x65, 0x6c, 0x12, 0x1a,
0x0a, 0x08, 0x66, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09,
0x52, 0x08, 0x66, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x22, 0x57, 0x0a, 0x13, 0x52, 0x65,
0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x6f, 0x72,
0x64, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52,
0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65,
0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12,
0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x04, 0x64,
0x61, 0x74, 0x61, 0x22, 0x83, 0x01, 0x0a, 0x0e, 0x52, 0x65, 0x71, 0x53, 0x74, 0x61, 0x72, 0x74,
0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d,
0x50, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x72, 0x65,
0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x12, 0x35, 0x0a, 0x08, 0x66, 0x72, 0x61, 0x67, 0x6d, 0x65,
0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x44, 0x75, 0x72, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x52, 0x08, 0x66, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a,
0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x22, 0x57, 0x0a, 0x13, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64,
0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04,
0x63, 0x6f, 0x64, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18,
0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x12,
0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x04, 0x64, 0x61,
0x74, 0x61, 0x22, 0x2f, 0x0a, 0x0d, 0x52, 0x65, 0x71, 0x53, 0x74, 0x6f, 0x70, 0x52, 0x65, 0x63,
0x6f, 0x72, 0x64, 0x12, 0x1e, 0x0a, 0x0a, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74,
0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50,
0x61, 0x74, 0x68, 0x22, 0x56, 0x0a, 0x12, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x53,
0x74, 0x6f, 0x70, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x12, 0x0a, 0x04, 0x63, 0x6f, 0x64,
0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x63, 0x6f, 0x64, 0x65, 0x12, 0x18, 0x0a,
0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07,
0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18,
0x03, 0x20, 0x01, 0x28, 0x04, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x32, 0xc4, 0x04, 0x0a, 0x03,
0x61, 0x70, 0x69, 0x12, 0x57, 0x0a, 0x04, 0x4c, 0x69, 0x73, 0x74, 0x12, 0x12, 0x2e, 0x6d, 0x70,
0x34, 0x2e, 0x52, 0x65, 0x71, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x4c, 0x69, 0x73, 0x74, 0x1a,
0x14, 0x2e, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x4c, 0x69, 0x73, 0x74, 0x22, 0x25, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x1f, 0x12, 0x1d, 0x2f,
0x6d, 0x70, 0x34, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x6c, 0x69, 0x73, 0x74, 0x2f, 0x7b, 0x73, 0x74,
0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x3d, 0x2a, 0x2a, 0x7d, 0x12, 0x54, 0x0a, 0x07,
0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a,
0x17, 0x2e, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x65, 0x43, 0x61, 0x74, 0x61, 0x6c, 0x6f, 0x67, 0x22, 0x18, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x12,
0x12, 0x10, 0x2f, 0x6d, 0x70, 0x34, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x63, 0x61, 0x74, 0x61, 0x6c,
0x6f, 0x67, 0x12, 0x62, 0x0a, 0x06, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x12, 0x14, 0x2e, 0x6d,
0x70, 0x34, 0x2e, 0x52, 0x65, 0x71, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x44, 0x65, 0x6c, 0x65,
0x74, 0x65, 0x1a, 0x16, 0x2e, 0x67, 0x6c, 0x6f, 0x62, 0x61, 0x6c, 0x2e, 0x52, 0x65, 0x73, 0x70,
0x6f, 0x6e, 0x73, 0x65, 0x44, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x22, 0x2a, 0x82, 0xd3, 0xe4, 0x93,
0x02, 0x24, 0x3a, 0x01, 0x2a, 0x22, 0x1f, 0x2f, 0x6d, 0x70, 0x34, 0x2f, 0x61, 0x70, 0x69, 0x2f,
0x64, 0x65, 0x6c, 0x65, 0x74, 0x65, 0x2f, 0x7b, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61,
0x74, 0x68, 0x3d, 0x2a, 0x2a, 0x7d, 0x12, 0x5c, 0x0a, 0x0a, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x53,
0x74, 0x61, 0x72, 0x74, 0x12, 0x13, 0x2e, 0x6d, 0x70, 0x34, 0x2e, 0x52, 0x65, 0x71, 0x45, 0x76,
0x65, 0x6e, 0x74, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x1a, 0x18, 0x2e, 0x6d, 0x70, 0x34, 0x2e,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x63,
0x6f, 0x72, 0x64, 0x22, 0x1f, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x19, 0x3a, 0x01, 0x2a, 0x22, 0x14,
0x2f, 0x6d, 0x70, 0x34, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x2f, 0x73,
0x74, 0x61, 0x72, 0x74, 0x12, 0x67, 0x0a, 0x0b, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x63,
0x6f, 0x72, 0x64, 0x12, 0x13, 0x2e, 0x6d, 0x70, 0x34, 0x2e, 0x52, 0x65, 0x71, 0x53, 0x74, 0x61,
0x72, 0x74, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x1a, 0x18, 0x2e, 0x6d, 0x70, 0x34, 0x2e, 0x52,
0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x53, 0x74, 0x61, 0x72, 0x74, 0x52, 0x65, 0x63, 0x6f,
0x72, 0x64, 0x22, 0x29, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x23, 0x3a, 0x01, 0x2a, 0x22, 0x1e, 0x2f,
0x6d, 0x70, 0x34, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x2f, 0x7b, 0x73,
0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x3d, 0x2a, 0x2a, 0x7d, 0x12, 0x63, 0x0a,
0x0a, 0x53, 0x74, 0x6f, 0x70, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x12, 0x12, 0x2e, 0x6d, 0x70,
0x34, 0x2e, 0x52, 0x65, 0x71, 0x53, 0x74, 0x6f, 0x70, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x1a,
0x17, 0x2e, 0x6d, 0x70, 0x34, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x53, 0x74,
0x6f, 0x70, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x22, 0x28, 0x82, 0xd3, 0xe4, 0x93, 0x02, 0x22,
0x3a, 0x01, 0x2a, 0x22, 0x1d, 0x2f, 0x6d, 0x70, 0x34, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x73, 0x74,
0x6f, 0x70, 0x2f, 0x7b, 0x73, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x50, 0x61, 0x74, 0x68, 0x3d, 0x2a,
0x2a, 0x7d, 0x42, 0x1b, 0x5a, 0x19, 0x6d, 0x37, 0x73, 0x2e, 0x6c, 0x69, 0x76, 0x65, 0x2f, 0x76,
0x35, 0x2f, 0x70, 0x6c, 0x75, 0x67, 0x69, 0x6e, 0x2f, 0x6d, 0x70, 0x34, 0x2f, 0x70, 0x62, 0x62,
0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
})
const file_mp4_proto_rawDesc = "" +
"\n" +
"\tmp4.proto\x12\x03mp4\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\fglobal.proto\"\xd7\x01\n" +
"\rReqRecordList\x12\x1e\n" +
"\n" +
"streamPath\x18\x01 \x01(\tR\n" +
"streamPath\x12\x14\n" +
"\x05range\x18\x02 \x01(\tR\x05range\x12\x14\n" +
"\x05start\x18\x03 \x01(\tR\x05start\x12\x10\n" +
"\x03end\x18\x04 \x01(\tR\x03end\x12\x18\n" +
"\apageNum\x18\x05 \x01(\rR\apageNum\x12\x1a\n" +
"\bpageSize\x18\x06 \x01(\rR\bpageSize\x12\x12\n" +
"\x04mode\x18\a \x01(\tR\x04mode\x12\x1e\n" +
"\n" +
"eventLevel\x18\b \x01(\tR\n" +
"eventLevel\"\x91\x01\n" +
"\x0fReqRecordDelete\x12\x1e\n" +
"\n" +
"streamPath\x18\x01 \x01(\tR\n" +
"streamPath\x12\x10\n" +
"\x03ids\x18\x02 \x03(\rR\x03ids\x12\x1c\n" +
"\tstartTime\x18\x03 \x01(\tR\tstartTime\x12\x18\n" +
"\aendTime\x18\x04 \x01(\tR\aendTime\x12\x14\n" +
"\x05range\x18\x05 \x01(\tR\x05range\"\x90\x02\n" +
"\x0eReqEventRecord\x12\x1e\n" +
"\n" +
"streamPath\x18\x01 \x01(\tR\n" +
"streamPath\x12\x18\n" +
"\aeventId\x18\x02 \x01(\tR\aeventId\x12\x1c\n" +
"\teventName\x18\x03 \x01(\tR\teventName\x12&\n" +
"\x0ebeforeDuration\x18\x04 \x01(\tR\x0ebeforeDuration\x12$\n" +
"\rafterDuration\x18\x05 \x01(\tR\rafterDuration\x12\x1c\n" +
"\teventDesc\x18\x06 \x01(\tR\teventDesc\x12\x1e\n" +
"\n" +
"eventLevel\x18\a \x01(\tR\n" +
"eventLevel\x12\x1a\n" +
"\bfragment\x18\b \x01(\tR\bfragment\"W\n" +
"\x13ResponseEventRecord\x12\x12\n" +
"\x04code\x18\x01 \x01(\x05R\x04code\x12\x18\n" +
"\amessage\x18\x02 \x01(\tR\amessage\x12\x12\n" +
"\x04data\x18\x03 \x01(\rR\x04data\"\x83\x01\n" +
"\x0eReqStartRecord\x12\x1e\n" +
"\n" +
"streamPath\x18\x01 \x01(\tR\n" +
"streamPath\x125\n" +
"\bfragment\x18\x02 \x01(\v2\x19.google.protobuf.DurationR\bfragment\x12\x1a\n" +
"\bfilePath\x18\x03 \x01(\tR\bfilePath\"W\n" +
"\x13ResponseStartRecord\x12\x12\n" +
"\x04code\x18\x01 \x01(\x05R\x04code\x12\x18\n" +
"\amessage\x18\x02 \x01(\tR\amessage\x12\x12\n" +
"\x04data\x18\x03 \x01(\x04R\x04data\"/\n" +
"\rReqStopRecord\x12\x1e\n" +
"\n" +
"streamPath\x18\x01 \x01(\tR\n" +
"streamPath\"V\n" +
"\x12ResponseStopRecord\x12\x12\n" +
"\x04code\x18\x01 \x01(\x05R\x04code\x12\x18\n" +
"\amessage\x18\x02 \x01(\tR\amessage\x12\x12\n" +
"\x04data\x18\x03 \x01(\x04R\x04data2\xca\x04\n" +
"\x03api\x12]\n" +
"\x04List\x12\x12.mp4.ReqRecordList\x1a\x1a.global.RecordResponseList\"%\x82\xd3\xe4\x93\x02\x1f\x12\x1d/mp4/api/list/{streamPath=**}\x12T\n" +
"\aCatalog\x12\x16.google.protobuf.Empty\x1a\x17.global.ResponseCatalog\"\x18\x82\xd3\xe4\x93\x02\x12\x12\x10/mp4/api/catalog\x12b\n" +
"\x06Delete\x12\x14.mp4.ReqRecordDelete\x1a\x16.global.ResponseDelete\"*\x82\xd3\xe4\x93\x02$:\x01*\"\x1f/mp4/api/delete/{streamPath=**}\x12\\\n" +
"\n" +
"EventStart\x12\x13.mp4.ReqEventRecord\x1a\x18.mp4.ResponseEventRecord\"\x1f\x82\xd3\xe4\x93\x02\x19:\x01*\"\x14/mp4/api/event/start\x12g\n" +
"\vStartRecord\x12\x13.mp4.ReqStartRecord\x1a\x18.mp4.ResponseStartRecord\")\x82\xd3\xe4\x93\x02#:\x01*\"\x1e/mp4/api/start/{streamPath=**}\x12c\n" +
"\n" +
"StopRecord\x12\x12.mp4.ReqStopRecord\x1a\x17.mp4.ResponseStopRecord\"(\x82\xd3\xe4\x93\x02\":\x01*\"\x1d/mp4/api/stop/{streamPath=**}B\x1bZ\x19m7s.live/v5/plugin/mp4/pbb\x06proto3"
var (
file_mp4_proto_rawDescOnce sync.Once
@@ -719,19 +670,19 @@ func file_mp4_proto_rawDescGZIP() []byte {
var file_mp4_proto_msgTypes = make([]protoimpl.MessageInfo, 8)
var file_mp4_proto_goTypes = []any{
(*ReqRecordList)(nil), // 0: mp4.ReqRecordList
(*ReqRecordDelete)(nil), // 1: mp4.ReqRecordDelete
(*ReqEventRecord)(nil), // 2: mp4.ReqEventRecord
(*ResponseEventRecord)(nil), // 3: mp4.ResponseEventRecord
(*ReqStartRecord)(nil), // 4: mp4.ReqStartRecord
(*ResponseStartRecord)(nil), // 5: mp4.ResponseStartRecord
(*ReqStopRecord)(nil), // 6: mp4.ReqStopRecord
(*ResponseStopRecord)(nil), // 7: mp4.ResponseStopRecord
(*durationpb.Duration)(nil), // 8: google.protobuf.Duration
(*emptypb.Empty)(nil), // 9: google.protobuf.Empty
(*pb.ResponseList)(nil), // 10: global.ResponseList
(*pb.ResponseCatalog)(nil), // 11: global.ResponseCatalog
(*pb.ResponseDelete)(nil), // 12: global.ResponseDelete
(*ReqRecordList)(nil), // 0: mp4.ReqRecordList
(*ReqRecordDelete)(nil), // 1: mp4.ReqRecordDelete
(*ReqEventRecord)(nil), // 2: mp4.ReqEventRecord
(*ResponseEventRecord)(nil), // 3: mp4.ResponseEventRecord
(*ReqStartRecord)(nil), // 4: mp4.ReqStartRecord
(*ResponseStartRecord)(nil), // 5: mp4.ResponseStartRecord
(*ReqStopRecord)(nil), // 6: mp4.ReqStopRecord
(*ResponseStopRecord)(nil), // 7: mp4.ResponseStopRecord
(*durationpb.Duration)(nil), // 8: google.protobuf.Duration
(*emptypb.Empty)(nil), // 9: google.protobuf.Empty
(*pb.RecordResponseList)(nil), // 10: global.RecordResponseList
(*pb.ResponseCatalog)(nil), // 11: global.ResponseCatalog
(*pb.ResponseDelete)(nil), // 12: global.ResponseDelete
}
var file_mp4_proto_depIdxs = []int32{
8, // 0: mp4.ReqStartRecord.fragment:type_name -> google.protobuf.Duration
@@ -741,7 +692,7 @@ var file_mp4_proto_depIdxs = []int32{
2, // 4: mp4.api.EventStart:input_type -> mp4.ReqEventRecord
4, // 5: mp4.api.StartRecord:input_type -> mp4.ReqStartRecord
6, // 6: mp4.api.StopRecord:input_type -> mp4.ReqStopRecord
10, // 7: mp4.api.List:output_type -> global.ResponseList
10, // 7: mp4.api.List:output_type -> global.RecordResponseList
11, // 8: mp4.api.Catalog:output_type -> global.ResponseCatalog
12, // 9: mp4.api.Delete:output_type -> global.ResponseDelete
3, // 10: mp4.api.EventStart:output_type -> mp4.ResponseEventRecord

View File

@@ -330,7 +330,6 @@ func local_request_Api_StopRecord_0(ctx context.Context, marshaler runtime.Marsh
// UnaryRPC :call ApiServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
// Note that using this registration option will cause many gRPC library features to stop working. Consider using RegisterApiHandlerFromEndpoint instead.
// GRPC interceptors will not work for this type of registration. To use interceptors, you must use the "runtime.WithMiddlewares" option in the "runtime.NewServeMux" call.
func RegisterApiHandlerServer(ctx context.Context, mux *runtime.ServeMux, server ApiServer) error {
mux.Handle("GET", pattern_Api_List_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
@@ -489,21 +488,21 @@ func RegisterApiHandlerServer(ctx context.Context, mux *runtime.ServeMux, server
// RegisterApiHandlerFromEndpoint is same as RegisterApiHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterApiHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.NewClient(endpoint, opts...)
conn, err := grpc.DialContext(ctx, endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr)
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Errorf("Failed to close conn to %s: %v", endpoint, cerr)
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
@@ -521,7 +520,7 @@ func RegisterApiHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.C
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "ApiClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "ApiClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "ApiClient" to call the correct interceptors. This client ignores the HTTP middlewares.
// "ApiClient" to call the correct interceptors.
func RegisterApiHandlerClient(ctx context.Context, mux *runtime.ServeMux, client ApiClient) error {
mux.Handle("GET", pattern_Api_List_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {

View File

@@ -7,7 +7,7 @@ package mp4;
option go_package="m7s.live/v5/plugin/mp4/pb";
service api {
rpc List (ReqRecordList) returns (global.ResponseList) {
rpc List (ReqRecordList) returns (global.RecordResponseList) {
option (google.api.http) = {
get: "/mp4/api/list/{streamPath=**}"
};

View File

@@ -1,7 +1,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.5.1
// - protoc v5.28.3
// - protoc v5.29.3
// source: mp4.proto
package pb
@@ -33,7 +33,7 @@ const (
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type ApiClient interface {
List(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*pb.ResponseList, error)
List(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*pb.RecordResponseList, error)
Catalog(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*pb.ResponseCatalog, error)
Delete(ctx context.Context, in *ReqRecordDelete, opts ...grpc.CallOption) (*pb.ResponseDelete, error)
EventStart(ctx context.Context, in *ReqEventRecord, opts ...grpc.CallOption) (*ResponseEventRecord, error)
@@ -49,9 +49,9 @@ func NewApiClient(cc grpc.ClientConnInterface) ApiClient {
return &apiClient{cc}
}
func (c *apiClient) List(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*pb.ResponseList, error) {
func (c *apiClient) List(ctx context.Context, in *ReqRecordList, opts ...grpc.CallOption) (*pb.RecordResponseList, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(pb.ResponseList)
out := new(pb.RecordResponseList)
err := c.cc.Invoke(ctx, Api_List_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
@@ -113,7 +113,7 @@ func (c *apiClient) StopRecord(ctx context.Context, in *ReqStopRecord, opts ...g
// All implementations must embed UnimplementedApiServer
// for forward compatibility.
type ApiServer interface {
List(context.Context, *ReqRecordList) (*pb.ResponseList, error)
List(context.Context, *ReqRecordList) (*pb.RecordResponseList, error)
Catalog(context.Context, *emptypb.Empty) (*pb.ResponseCatalog, error)
Delete(context.Context, *ReqRecordDelete) (*pb.ResponseDelete, error)
EventStart(context.Context, *ReqEventRecord) (*ResponseEventRecord, error)
@@ -129,7 +129,7 @@ type ApiServer interface {
// pointer dereference when methods are called.
type UnimplementedApiServer struct{}
func (UnimplementedApiServer) List(context.Context, *ReqRecordList) (*pb.ResponseList, error) {
func (UnimplementedApiServer) List(context.Context, *ReqRecordList) (*pb.RecordResponseList, error) {
return nil, status.Errorf(codes.Unimplemented, "method List not implemented")
}
func (UnimplementedApiServer) Catalog(context.Context, *emptypb.Empty) (*pb.ResponseCatalog, error) {

139
plugin/mp4/pkg/audio.go Normal file
View File

@@ -0,0 +1,139 @@
package mp4
import (
"fmt"
"io"
"time"
"m7s.live/v5/pkg"
"m7s.live/v5/pkg/codec"
"m7s.live/v5/pkg/util"
"m7s.live/v5/plugin/mp4/pkg/box"
)
var _ pkg.IAVFrame = (*Audio)(nil)
type Audio struct {
box.Sample
allocator *util.ScalableMemoryAllocator
}
// GetAllocator implements pkg.IAVFrame.
func (a *Audio) GetAllocator() *util.ScalableMemoryAllocator {
return a.allocator
}
// SetAllocator implements pkg.IAVFrame.
func (a *Audio) SetAllocator(allocator *util.ScalableMemoryAllocator) {
a.allocator = allocator
}
// Parse implements pkg.IAVFrame.
func (a *Audio) Parse(t *pkg.AVTrack) error {
return nil
}
// ConvertCtx implements pkg.IAVFrame.
func (a *Audio) ConvertCtx(ctx codec.ICodecCtx) (codec.ICodecCtx, pkg.IAVFrame, error) {
// 返回基础编解码器上下文,不进行转换
return ctx.GetBase(), nil, nil
}
// Demux implements pkg.IAVFrame.
func (a *Audio) Demux(codecCtx codec.ICodecCtx) (any, error) {
if len(a.Data) == 0 {
return nil, fmt.Errorf("no audio data to demux")
}
// 创建内存对象
var result util.Memory
result.AppendOne(a.Data)
// 根据编解码器类型进行解复用
switch codecCtx.(type) {
case *codec.AACCtx:
// 对于 AAC直接返回原始数据
return result, nil
case *codec.PCMACtx, *codec.PCMUCtx:
// 对于 PCM 格式,直接返回原始数据
return result, nil
default:
// 对于其他格式,也直接返回原始数据
return result, nil
}
}
// Mux implements pkg.IAVFrame.
func (a *Audio) Mux(codecCtx codec.ICodecCtx, frame *pkg.AVFrame) {
// 从 AVFrame 复制数据到 MP4 Sample
a.KeyFrame = false // 音频帧通常不是关键帧
a.Timestamp = uint32(frame.Timestamp.Milliseconds())
a.CTS = uint32(frame.CTS.Milliseconds())
// 处理原始数据
if frame.Raw != nil {
switch rawData := frame.Raw.(type) {
case util.Memory: // 包括 pkg.AudioData (它是 util.Memory 的别名)
a.Data = rawData.ToBytes()
a.Size = len(a.Data)
case []byte:
// 直接复制字节数据
a.Data = rawData
a.Size = len(a.Data)
default:
// 对于其他类型,尝试转换为字节
a.Data = nil
a.Size = 0
}
} else {
a.Data = nil
a.Size = 0
}
}
// GetTimestamp implements pkg.IAVFrame.
func (a *Audio) GetTimestamp() time.Duration {
return time.Duration(a.Timestamp) * time.Millisecond
}
// GetCTS implements pkg.IAVFrame.
func (a *Audio) GetCTS() time.Duration {
return time.Duration(a.CTS) * time.Millisecond
}
// GetSize implements pkg.IAVFrame.
func (a *Audio) GetSize() int {
return a.Size
}
// Recycle implements pkg.IAVFrame.
func (a *Audio) Recycle() {
// 回收资源
if a.allocator != nil && a.Data != nil {
// 如果数据是通过分配器分配的,这里可以进行回收
// 由于我们使用的是复制的数据,这里暂时不需要特殊处理
}
a.Data = nil
a.Size = 0
a.KeyFrame = false
a.Timestamp = 0
a.CTS = 0
a.Offset = 0
a.Duration = 0
}
// String implements pkg.IAVFrame.
func (a *Audio) String() string {
return fmt.Sprintf("MP4Audio[ts:%d, cts:%d, size:%d]",
a.Timestamp, a.CTS, a.Size)
}
// Dump implements pkg.IAVFrame.
func (a *Audio) Dump(t byte, w io.Writer) {
// 输出数据到 writer
if a.Data != nil {
w.Write(a.Data)
}
}

View File

@@ -2,25 +2,31 @@ package mp4
import (
"context"
"log/slog"
"os"
"time"
"github.com/deepch/vdk/codec/aacparser"
"github.com/deepch/vdk/codec/h264parser"
"github.com/deepch/vdk/codec/h265parser"
"m7s.live/v5"
"m7s.live/v5/pkg"
"m7s.live/v5/pkg/codec"
"m7s.live/v5/pkg/util"
"m7s.live/v5/plugin/mp4/pkg/box"
)
type DemuxerRange struct {
StartTime, EndTime time.Time
Streams []m7s.RecordStream
OnAudioExtraData func(codec box.MP4_CODEC_TYPE, data []byte) error
OnVideoExtraData func(codec box.MP4_CODEC_TYPE, data []byte) error
OnAudioSample func(codec box.MP4_CODEC_TYPE, sample box.Sample) error
OnVideoSample func(codec box.MP4_CODEC_TYPE, sample box.Sample) error
*slog.Logger
StartTime, EndTime time.Time
Streams []m7s.RecordStream
AudioTrack, VideoTrack *pkg.AVTrack
}
func (d *DemuxerRange) Demux(ctx context.Context) error {
func (d *DemuxerRange) Demux(ctx context.Context, onAudio func(*Audio) error, onVideo func(*Video) error) error {
var ts, tsOffset int64
allocator := util.NewScalableMemoryAllocator(1 << 10)
defer allocator.Recycle()
for _, stream := range d.Streams {
// 检查流的时间范围是否在指定范围内
if stream.EndTime.Before(d.StartTime) || stream.StartTime.After(d.EndTime) {
@@ -42,20 +48,84 @@ func (d *DemuxerRange) Demux(ctx context.Context) error {
// 处理每个轨道的额外数据 (序列头)
for _, track := range demuxer.Tracks {
switch track.Cid {
case box.MP4_CODEC_H264, box.MP4_CODEC_H265:
if d.OnVideoExtraData != nil {
err := d.OnVideoExtraData(track.Cid, track.ExtraData)
if err != nil {
return err
case box.MP4_CODEC_H264:
var h264Ctx codec.H264Ctx
h264Ctx.CodecData, err = h264parser.NewCodecDataFromAVCDecoderConfRecord(track.ExtraData)
if err == nil {
if d.VideoTrack == nil {
d.VideoTrack = &pkg.AVTrack{
ICodecCtx: &h264Ctx,
RingWriter: &pkg.RingWriter{
Ring: util.NewRing[pkg.AVFrame](1),
}}
d.VideoTrack.Logger = d.With("track", "video")
} else {
// 如果已经有视频轨道,使用现有的轨道
d.VideoTrack.ICodecCtx = &h264Ctx
}
}
case box.MP4_CODEC_AAC, box.MP4_CODEC_G711A, box.MP4_CODEC_G711U:
if d.OnAudioExtraData != nil {
err := d.OnAudioExtraData(track.Cid, track.ExtraData)
if err != nil {
return err
case box.MP4_CODEC_H265:
var h265Ctx codec.H265Ctx
h265Ctx.CodecData, err = h265parser.NewCodecDataFromAVCDecoderConfRecord(track.ExtraData)
if err == nil {
if d.VideoTrack == nil {
d.VideoTrack = &pkg.AVTrack{
ICodecCtx: &h265Ctx,
RingWriter: &pkg.RingWriter{
Ring: util.NewRing[pkg.AVFrame](1),
}}
d.VideoTrack.Logger = d.With("track", "video")
} else {
// 如果已经有视频轨道,使用现有的轨道
d.VideoTrack.ICodecCtx = &h265Ctx
}
}
case box.MP4_CODEC_AAC:
var aacCtx codec.AACCtx
aacCtx.CodecData, err = aacparser.NewCodecDataFromMPEG4AudioConfigBytes(track.ExtraData)
if err == nil {
if d.AudioTrack == nil {
d.AudioTrack = &pkg.AVTrack{
ICodecCtx: &aacCtx,
RingWriter: &pkg.RingWriter{
Ring: util.NewRing[pkg.AVFrame](1),
}}
d.AudioTrack.Logger = d.With("track", "audio")
} else {
// 如果已经有音频轨道,使用现有的轨道
d.AudioTrack.ICodecCtx = &aacCtx
}
}
case box.MP4_CODEC_G711A:
if d.AudioTrack == nil {
d.AudioTrack = &pkg.AVTrack{
ICodecCtx: &codec.PCMACtx{
AudioCtx: codec.AudioCtx{
SampleRate: 8000,
Channels: 1,
SampleSize: 16,
},
},
RingWriter: &pkg.RingWriter{
Ring: util.NewRing[pkg.AVFrame](1),
}}
d.AudioTrack.Logger = d.With("track", "audio")
}
case box.MP4_CODEC_G711U:
if d.AudioTrack == nil {
d.AudioTrack = &pkg.AVTrack{
ICodecCtx: &codec.PCMUCtx{
AudioCtx: codec.AudioCtx{
SampleRate: 8000,
Channels: 1,
SampleSize: 16,
},
},
RingWriter: &pkg.RingWriter{
Ring: util.NewRing[pkg.AVFrame](1),
}}
d.AudioTrack.Logger = d.With("track", "audio")
}
}
}
@@ -101,21 +171,50 @@ func (d *DemuxerRange) Demux(ctx context.Context) error {
// 根据轨道类型调用相应的回调函数
switch track.Cid {
case box.MP4_CODEC_H264, box.MP4_CODEC_H265:
if d.OnVideoSample != nil {
err := d.OnVideoSample(track.Cid, sample)
if err != nil {
return err
}
if err := onVideo(&Video{
Sample: sample,
allocator: allocator,
}); err != nil {
return err
}
case box.MP4_CODEC_AAC, box.MP4_CODEC_G711A, box.MP4_CODEC_G711U:
if d.OnAudioSample != nil {
err := d.OnAudioSample(track.Cid, sample)
if err != nil {
return err
}
if err := onAudio(&Audio{
Sample: sample,
allocator: allocator,
}); err != nil {
return err
}
}
}
}
return nil
}
type DemuxerConverterRange[TA pkg.IAVFrame, TV pkg.IAVFrame] struct {
DemuxerRange
audioConverter *pkg.AVFrameConvert[TA]
videoConverter *pkg.AVFrameConvert[TV]
}
func (d *DemuxerConverterRange[TA, TV]) Demux(ctx context.Context, onAudio func(TA) error, onVideo func(TV) error) error {
d.DemuxerRange.Demux(ctx, func(audio *Audio) error {
if d.audioConverter == nil {
d.audioConverter = pkg.NewAVFrameConvert[TA](d.AudioTrack, nil)
}
target, err := d.audioConverter.Convert(audio)
if err == nil {
err = onAudio(target)
}
return err
}, func(video *Video) error {
if d.videoConverter == nil {
d.videoConverter = pkg.NewAVFrameConvert[TV](d.VideoTrack, nil)
}
target, err := d.videoConverter.Convert(video)
if err == nil {
err = onVideo(target)
}
return err
})
return nil
}

View File

@@ -382,23 +382,20 @@ func (d *Demuxer) ReadSample(yield func(*Track, Sample) bool) {
maxdts := int64(-1)
minTsSample := Sample{Timestamp: uint32(maxdts)}
var whichTrack *Track
whichTracki := 0
for i, track := range d.Tracks {
idx := d.ReadSampleIdx[i]
for _, track := range d.Tracks {
idx := d.ReadSampleIdx[track.TrackId-1]
if int(idx) == len(track.Samplelist) {
continue
}
if whichTrack == nil {
minTsSample = track.Samplelist[idx]
whichTrack = track
whichTracki = i
} else {
dts1 := uint64(minTsSample.Timestamp) * uint64(d.moov.MVHD.Timescale) / uint64(whichTrack.Timescale)
dts2 := uint64(track.Samplelist[idx].Timestamp) * uint64(d.moov.MVHD.Timescale) / uint64(track.Timescale)
if dts1 > dts2 {
minTsSample = track.Samplelist[idx]
whichTrack = track
whichTracki = i
}
}
// subSample := d.readSubSample(idx, whichTrack)
@@ -407,7 +404,7 @@ func (d *Demuxer) ReadSample(yield func(*Track, Sample) bool) {
return
}
d.ReadSampleIdx[whichTracki]++
d.ReadSampleIdx[whichTrack.TrackId-1]++
if !yield(whichTrack, minTsSample) {
return
}
@@ -418,21 +415,18 @@ func (d *Demuxer) RangeSample(yield func(*Track, *Sample) bool) {
for {
var minTsSample *Sample
var whichTrack *Track
whichTracki := 0
for i, track := range d.Tracks {
idx := d.ReadSampleIdx[i]
for _, track := range d.Tracks {
idx := d.ReadSampleIdx[track.TrackId-1]
if int(idx) == len(track.Samplelist) {
continue
}
if whichTrack == nil {
minTsSample = &track.Samplelist[idx]
whichTrack = track
whichTracki = i
} else {
if minTsSample.Offset > track.Samplelist[idx].Offset {
minTsSample = &track.Samplelist[idx]
whichTrack = track
whichTracki = i
}
}
// subSample := d.readSubSample(idx, whichTrack)
@@ -440,7 +434,7 @@ func (d *Demuxer) RangeSample(yield func(*Track, *Sample) bool) {
if minTsSample == nil {
return
}
d.ReadSampleIdx[whichTracki]++
d.ReadSampleIdx[whichTrack.TrackId-1]++
if !yield(whichTrack, minTsSample) {
return
}

View File

@@ -6,11 +6,13 @@ import (
"strings"
"time"
"github.com/deepch/vdk/codec/aacparser"
"github.com/deepch/vdk/codec/h264parser"
"github.com/deepch/vdk/codec/h265parser"
m7s "m7s.live/v5"
"m7s.live/v5/pkg/codec"
"m7s.live/v5/pkg/util"
"m7s.live/v5/plugin/mp4/pkg/box"
rtmp "m7s.live/v5/plugin/rtmp/pkg"
)
type HTTPReader struct {
@@ -51,20 +53,27 @@ func (p *HTTPReader) Run() (err error) {
for _, track := range demuxer.Tracks {
switch track.Cid {
case box.MP4_CODEC_H264:
var sequence rtmp.RTMPVideo
sequence.SetAllocator(allocator)
sequence.Append([]byte{0x17, 0x00, 0x00, 0x00, 0x00}, track.ExtraData)
err = publisher.WriteVideo(&sequence)
var h264Ctx codec.H264Ctx
h264Ctx.CodecData, err = h264parser.NewCodecDataFromAVCDecoderConfRecord(track.ExtraData)
if err == nil {
publisher.SetCodecCtx(&h264Ctx, &Video{})
}
case box.MP4_CODEC_H265:
var sequence rtmp.RTMPVideo
sequence.SetAllocator(allocator)
sequence.Append([]byte{0b1001_0000 | rtmp.PacketTypeSequenceStart}, codec.FourCC_H265[:], track.ExtraData)
err = publisher.WriteVideo(&sequence)
var h265Ctx codec.H265Ctx
h265Ctx.CodecData, err = h265parser.NewCodecDataFromAVCDecoderConfRecord(track.ExtraData)
if err == nil {
publisher.SetCodecCtx(&h265Ctx, &Video{
allocator: allocator,
})
}
case box.MP4_CODEC_AAC:
var sequence rtmp.RTMPAudio
sequence.SetAllocator(allocator)
sequence.Append([]byte{0xaf, 0x00}, track.ExtraData)
err = publisher.WriteAudio(&sequence)
var aacCtx codec.AACCtx
aacCtx.CodecData, err = aacparser.NewCodecDataFromMPEG4AudioConfigBytes(track.ExtraData)
if err == nil {
publisher.SetCodecCtx(&aacCtx, &Audio{
allocator: allocator,
})
}
}
}
@@ -92,56 +101,44 @@ func (p *HTTPReader) Run() (err error) {
allocator.Free(sample.Data)
return
}
fixTimestamp := uint32(uint64(sample.Timestamp)*1000/uint64(track.Timescale) + timestampOffset)
switch track.Cid {
case box.MP4_CODEC_H264:
var videoFrame rtmp.RTMPVideo
videoFrame.SetAllocator(allocator)
videoFrame.CTS = sample.CTS
videoFrame.Timestamp = uint32(uint64(sample.Timestamp)*1000/uint64(track.Timescale) + timestampOffset)
videoFrame.AppendOne([]byte{util.Conditional[byte](sample.KeyFrame, 0x17, 0x27), 0x01, byte(videoFrame.CTS >> 24), byte(videoFrame.CTS >> 8), byte(videoFrame.CTS)})
videoFrame.AddRecycleBytes(sample.Data)
var videoFrame = Video{
Sample: sample,
allocator: allocator,
}
videoFrame.Timestamp = fixTimestamp
err = publisher.WriteVideo(&videoFrame)
case box.MP4_CODEC_H265:
var videoFrame rtmp.RTMPVideo
videoFrame.SetAllocator(allocator)
videoFrame.CTS = uint32(sample.CTS)
videoFrame.Timestamp = uint32(uint64(sample.Timestamp)*1000/uint64(track.Timescale) + timestampOffset)
var head []byte
var b0 byte = 0b1010_0000
if sample.KeyFrame {
b0 = 0b1001_0000
var videoFrame = Video{
Sample: sample,
allocator: allocator,
}
if videoFrame.CTS == 0 {
head = videoFrame.NextN(5)
head[0] = b0 | rtmp.PacketTypeCodedFramesX
} else {
head = videoFrame.NextN(8)
head[0] = b0 | rtmp.PacketTypeCodedFrames
util.PutBE(head[5:8], videoFrame.CTS) // cts
}
copy(head[1:], codec.FourCC_H265[:])
videoFrame.AddRecycleBytes(sample.Data)
videoFrame.Timestamp = fixTimestamp
err = publisher.WriteVideo(&videoFrame)
case box.MP4_CODEC_AAC:
var audioFrame rtmp.RTMPAudio
audioFrame.SetAllocator(allocator)
audioFrame.Timestamp = uint32(uint64(sample.Timestamp)*1000/uint64(track.Timescale) + timestampOffset)
audioFrame.AppendOne([]byte{0xaf, 0x01})
audioFrame.AddRecycleBytes(sample.Data)
var audioFrame = Audio{
Sample: sample,
allocator: allocator,
}
audioFrame.Timestamp = fixTimestamp
err = publisher.WriteAudio(&audioFrame)
case box.MP4_CODEC_G711A:
var audioFrame rtmp.RTMPAudio
audioFrame.SetAllocator(allocator)
audioFrame.Timestamp = uint32(uint64(sample.Timestamp)*1000/uint64(track.Timescale) + timestampOffset)
audioFrame.AppendOne([]byte{0x72})
audioFrame.AddRecycleBytes(sample.Data)
var audioFrame = Audio{
Sample: sample,
allocator: allocator,
}
audioFrame.Timestamp = fixTimestamp
err = publisher.WriteAudio(&audioFrame)
case box.MP4_CODEC_G711U:
var audioFrame rtmp.RTMPAudio
var audioFrame = Audio{
Sample: sample,
allocator: allocator,
}
audioFrame.Sample = sample
audioFrame.SetAllocator(allocator)
audioFrame.Timestamp = uint32(uint64(sample.Timestamp)*1000/uint64(track.Timescale) + timestampOffset)
audioFrame.AppendOne([]byte{0x82})
audioFrame.AddRecycleBytes(sample.Data)
audioFrame.Timestamp = fixTimestamp
err = publisher.WriteAudio(&audioFrame)
}
}

View File

@@ -6,18 +6,14 @@ import (
m7s "m7s.live/v5"
"m7s.live/v5/pkg"
"m7s.live/v5/pkg/codec"
"m7s.live/v5/pkg/config"
"m7s.live/v5/pkg/task"
"m7s.live/v5/pkg/util"
"m7s.live/v5/plugin/mp4/pkg/box"
rtmp "m7s.live/v5/plugin/rtmp/pkg"
)
type (
RecordReader struct {
m7s.RecordFilePuller
demuxer *Demuxer
}
)
@@ -53,125 +49,8 @@ func (p *RecordReader) Run() (err error) {
var tsOffset int64 // 时间戳偏移量
// 创建可复用的 DemuxerRange 实例
demuxerRange := &DemuxerRange{}
// 设置音视频额外数据回调(序列头)
demuxerRange.OnVideoExtraData = func(codecType box.MP4_CODEC_TYPE, data []byte) error {
switch codecType {
case box.MP4_CODEC_H264:
var sequence rtmp.RTMPVideo
sequence.Append([]byte{0x17, 0x00, 0x00, 0x00, 0x00}, data)
err = publisher.WriteVideo(&sequence)
case box.MP4_CODEC_H265:
var sequence rtmp.RTMPVideo
sequence.Append([]byte{0b1001_0000 | rtmp.PacketTypeSequenceStart}, codec.FourCC_H265[:], data)
err = publisher.WriteVideo(&sequence)
}
return err
}
demuxerRange.OnAudioExtraData = func(codecType box.MP4_CODEC_TYPE, data []byte) error {
if codecType == box.MP4_CODEC_AAC {
var sequence rtmp.RTMPAudio
sequence.Append([]byte{0xaf, 0x00}, data)
err = publisher.WriteAudio(&sequence)
}
return err
}
// 设置视频样本回调
demuxerRange.OnVideoSample = func(codecType box.MP4_CODEC_TYPE, sample box.Sample) error {
if publisher.Paused != nil {
publisher.Paused.Await()
}
// 检查是否需要跳转
if needSeek, seekErr := p.CheckSeek(); seekErr != nil {
return seekErr
} else if needSeek {
return pkg.ErrSkip
}
// 简化的时间戳处理
if int64(sample.Timestamp)+tsOffset < 0 {
ts = 0
} else {
ts = int64(sample.Timestamp) + tsOffset
}
// 更新实时时间
realTime = time.Now() // 这里可以根据需要调整为更精确的时间计算
// 根据编解码器类型处理视频帧
switch codecType {
case box.MP4_CODEC_H264:
var videoFrame rtmp.RTMPVideo
videoFrame.CTS = sample.CTS
videoFrame.Timestamp = uint32(ts)
videoFrame.Append([]byte{util.Conditional[byte](sample.KeyFrame, 0x17, 0x27), 0x01, byte(videoFrame.CTS >> 24), byte(videoFrame.CTS >> 8), byte(videoFrame.CTS)}, sample.Data)
err = publisher.WriteVideo(&videoFrame)
case box.MP4_CODEC_H265:
var videoFrame rtmp.RTMPVideo
videoFrame.CTS = sample.CTS
videoFrame.Timestamp = uint32(ts)
var head []byte
var b0 byte = 0b1010_0000
if sample.KeyFrame {
b0 = 0b1001_0000
}
if videoFrame.CTS == 0 {
head = videoFrame.NextN(5)
head[0] = b0 | rtmp.PacketTypeCodedFramesX
} else {
head = videoFrame.NextN(8)
head[0] = b0 | rtmp.PacketTypeCodedFrames
util.PutBE(head[5:8], videoFrame.CTS) // cts
}
copy(head[1:], codec.FourCC_H265[:])
videoFrame.AppendOne(sample.Data)
err = publisher.WriteVideo(&videoFrame)
}
return err
}
// 设置音频样本回调
demuxerRange.OnAudioSample = func(codecType box.MP4_CODEC_TYPE, sample box.Sample) error {
if publisher.Paused != nil {
publisher.Paused.Await()
}
// 检查是否需要跳转
if needSeek, seekErr := p.CheckSeek(); seekErr != nil {
return seekErr
} else if needSeek {
return pkg.ErrSkip
}
// 简化的时间戳处理
if int64(sample.Timestamp)+tsOffset < 0 {
ts = 0
} else {
ts = int64(sample.Timestamp) + tsOffset
}
// 根据编解码器类型处理音频帧
switch codecType {
case box.MP4_CODEC_AAC:
var audioFrame rtmp.RTMPAudio
audioFrame.Timestamp = uint32(ts)
audioFrame.Append([]byte{0xaf, 0x01}, sample.Data)
err = publisher.WriteAudio(&audioFrame)
case box.MP4_CODEC_G711A:
var audioFrame rtmp.RTMPAudio
audioFrame.Timestamp = uint32(ts)
audioFrame.Append([]byte{0x72}, sample.Data)
err = publisher.WriteAudio(&audioFrame)
case box.MP4_CODEC_G711U:
var audioFrame rtmp.RTMPAudio
audioFrame.Timestamp = uint32(ts)
audioFrame.Append([]byte{0x82}, sample.Data)
err = publisher.WriteAudio(&audioFrame)
}
return err
demuxerRange := &DemuxerRange{
Logger: p.Logger.With("demuxer", "mp4"),
}
for loop := 0; loop < p.Loop; loop++ {
@@ -186,7 +65,56 @@ func (p *RecordReader) Run() (err error) {
} else {
demuxerRange.EndTime = time.Now()
}
if err = demuxerRange.Demux(p.Context); err != nil {
if err = demuxerRange.Demux(p.Context, func(a *Audio) error {
if !publisher.HasAudioTrack() {
publisher.SetCodecCtx(demuxerRange.AudioTrack.ICodecCtx, a)
}
if publisher.Paused != nil {
publisher.Paused.Await()
}
// 检查是否需要跳转
if needSeek, seekErr := p.CheckSeek(); seekErr != nil {
return seekErr
} else if needSeek {
return pkg.ErrSkip
}
// 简化的时间戳处理
if int64(a.Timestamp)+tsOffset < 0 {
ts = 0
} else {
ts = int64(a.Timestamp) + tsOffset
}
a.Timestamp = uint32(ts)
return publisher.WriteAudio(a)
}, func(v *Video) error {
if !publisher.HasVideoTrack() {
publisher.SetCodecCtx(demuxerRange.VideoTrack.ICodecCtx, v)
}
if publisher.Paused != nil {
publisher.Paused.Await()
}
// 检查是否需要跳转
if needSeek, seekErr := p.CheckSeek(); seekErr != nil {
return seekErr
} else if needSeek {
return pkg.ErrSkip
}
// 简化的时间戳处理
if int64(v.Timestamp)+tsOffset < 0 {
ts = 0
} else {
ts = int64(v.Timestamp) + tsOffset
}
// 更新实时时间
realTime = time.Now() // 这里可以根据需要调整为更精确的时间计算
v.Timestamp = uint32(ts)
return publisher.WriteVideo(v)
}); err != nil {
if err == pkg.ErrSkip {
loop--
continue

View File

@@ -13,7 +13,6 @@ import (
"m7s.live/v5/pkg/config"
"m7s.live/v5/pkg/task"
"m7s.live/v5/plugin/mp4/pkg/box"
rtmp "m7s.live/v5/plugin/rtmp/pkg"
)
type WriteTrailerQueueTask struct {
@@ -136,7 +135,23 @@ var CustomFileName = func(job *m7s.RecordJob) string {
}
func (r *Recorder) createStream(start time.Time) (err error) {
return r.CreateStream(start, CustomFileName)
if r.RecordJob.RecConf.Type == "" {
r.RecordJob.RecConf.Type = "mp4"
}
err = r.CreateStream(start, CustomFileName)
if err != nil {
return
}
r.file, err = os.Create(r.Event.FilePath)
if err != nil {
return
}
if r.Event.Type == "fmp4" {
r.muxer = NewMuxerWithStreamPath(FLAG_FRAGMENT, r.Event.StreamPath)
} else {
r.muxer = NewMuxerWithStreamPath(0, r.Event.StreamPath)
}
return r.muxer.WriteInitSegment(r.file)
}
func (r *Recorder) Dispose() {
@@ -149,27 +164,7 @@ func (r *Recorder) Run() (err error) {
recordJob := &r.RecordJob
sub := recordJob.Subscriber
var audioTrack, videoTrack *Track
startTime := time.Now()
if recordJob.Event != nil {
startTime = startTime.Add(-time.Duration(recordJob.Event.BeforeDuration) * time.Millisecond)
}
err = r.createStream(startTime)
if err != nil {
return
}
r.file, err = os.Create(r.Event.FilePath)
if err != nil {
return
}
if recordJob.RecConf.Type == "fmp4" {
r.Event.Type = "fmp4"
r.muxer = NewMuxerWithStreamPath(FLAG_FRAGMENT, r.Event.StreamPath)
} else {
r.muxer = NewMuxerWithStreamPath(0, r.Event.StreamPath)
}
r.muxer.WriteInitSegment(r.file)
var at, vt *pkg.AVTrack
checkEventRecordStop := func(absTime uint32) (err error) {
if absTime >= recordJob.Event.AfterDuration+recordJob.Event.BeforeDuration {
r.RecordJob.Stop(task.ErrStopByUser)
@@ -177,19 +172,16 @@ func (r *Recorder) Run() (err error) {
return
}
checkFragment := func(absTime uint32) (err error) {
if duration := int64(absTime); time.Duration(duration)*time.Millisecond >= recordJob.RecConf.Fragment {
now := time.Now()
r.writeTailer(now)
err = r.createStream(now)
checkFragment := func(reader *pkg.AVRingReader) (err error) {
if duration := int64(reader.AbsTime); time.Duration(duration)*time.Millisecond >= recordJob.RecConf.Fragment {
r.writeTailer(reader.Value.WriteTime)
err = r.createStream(reader.Value.WriteTime)
if err != nil {
return
}
at, vt = nil, nil
if vr := sub.VideoReader; vr != nil {
vr.ResetAbsTime()
//seq := vt.SequenceFrame.(*rtmp.RTMPVideo)
//offset = int64(seq.Size + 15)
}
if ar := sub.AudioReader; ar != nil {
ar.ResetAbsTime()
@@ -198,7 +190,13 @@ func (r *Recorder) Run() (err error) {
return
}
return m7s.PlayBlock(sub, func(audio *pkg.RawAudio) error {
return m7s.PlayBlock(sub, func(audio *Audio) error {
if r.Event.StartTime.IsZero() {
err = r.createStream(sub.AudioReader.Value.WriteTime)
if err != nil {
return err
}
}
r.Event.Duration = sub.AudioReader.AbsTime
if sub.VideoReader == nil {
if recordJob.Event != nil {
@@ -208,7 +206,7 @@ func (r *Recorder) Run() (err error) {
}
}
if recordJob.RecConf.Fragment != 0 {
err := checkFragment(sub.AudioReader.AbsTime)
err := checkFragment(sub.AudioReader)
if err != nil {
return err
}
@@ -238,12 +236,16 @@ func (r *Recorder) Run() (err error) {
track.ChannelCount = uint8(ctx.Channels)
}
}
dts := sub.AudioReader.AbsTime
return r.muxer.WriteSample(r.file, audioTrack, box.Sample{
Data: audio.ToBytes(),
Timestamp: uint32(dts),
})
}, func(video *rtmp.RTMPVideo) error {
sample := audio.Sample
sample.Timestamp = uint32(sub.AudioReader.AbsTime)
return r.muxer.WriteSample(r.file, audioTrack, sample)
}, func(video *Video) error {
if r.Event.StartTime.IsZero() {
err = r.createStream(sub.VideoReader.Value.WriteTime)
if err != nil {
return err
}
}
r.Event.Duration = sub.VideoReader.AbsTime
if sub.VideoReader.Value.IDR {
if recordJob.Event != nil {
@@ -253,78 +255,53 @@ func (r *Recorder) Run() (err error) {
}
}
if recordJob.RecConf.Fragment != 0 {
err := checkFragment(sub.VideoReader.AbsTime)
err := checkFragment(sub.VideoReader)
if err != nil {
return err
}
}
}
offset := 5
bytes := video.ToBytes()
if vt == nil {
vt = sub.VideoReader.Track
ctx := vt.ICodecCtx.(pkg.IVideoCodecCtx)
width, height := uint32(ctx.Width()), uint32(ctx.Height())
switch ctx := vt.ICodecCtx.GetBase().(type) {
case *codec.H264Ctx:
track := r.muxer.AddTrack(box.MP4_CODEC_H264)
videoTrack = track
track.ExtraData = ctx.Record
track.Width = uint32(ctx.Width())
track.Height = uint32(ctx.Height())
track.Width = width
track.Height = height
case *codec.H265Ctx:
track := r.muxer.AddTrack(box.MP4_CODEC_H265)
videoTrack = track
track.ExtraData = ctx.Record
track.Width = uint32(ctx.Width())
track.Height = uint32(ctx.Height())
track.Width = width
track.Height = height
}
}
switch ctx := vt.ICodecCtx.(type) {
case *codec.H264Ctx:
if bytes[1] == 0 {
// Check if video resolution has changed
if uint32(ctx.Width()) != videoTrack.Width || uint32(ctx.Height()) != videoTrack.Height {
r.Info("Video resolution changed, restarting recording",
"old", fmt.Sprintf("%dx%d", videoTrack.Width, videoTrack.Height),
"new", fmt.Sprintf("%dx%d", ctx.Width(), ctx.Height()))
now := time.Now()
r.writeTailer(now)
err = r.createStream(now)
if err != nil {
return nil
}
at, vt = nil, nil
if vr := sub.VideoReader; vr != nil {
vr.ResetAbsTime()
//seq := vt.SequenceFrame.(*rtmp.RTMPVideo)
//offset = int64(seq.Size + 15)
}
if ar := sub.AudioReader; ar != nil {
ar.ResetAbsTime()
}
}
ctx := vt.ICodecCtx.(pkg.IVideoCodecCtx)
width, height := uint32(ctx.Width()), uint32(ctx.Height())
if width != videoTrack.Width || height != videoTrack.Height {
r.Info("Video resolution changed, restarting recording",
"old", fmt.Sprintf("%dx%d", videoTrack.Width, videoTrack.Height),
"new", fmt.Sprintf("%dx%d", width, height))
r.writeTailer(sub.VideoReader.Value.WriteTime)
err = r.createStream(sub.VideoReader.Value.WriteTime)
if err != nil {
return nil
}
case *rtmp.H265Ctx:
if ctx.Enhanced {
switch t := bytes[0] & 0b1111; t {
case rtmp.PacketTypeCodedFrames:
offset += 3
case rtmp.PacketTypeSequenceStart:
return nil
case rtmp.PacketTypeCodedFramesX:
default:
r.Warn("unknown h265 packet type", "type", t)
return nil
}
} else if bytes[1] == 0 {
return nil
at, vt = nil, nil
if vr := sub.VideoReader; vr != nil {
vr.ResetAbsTime()
}
if ar := sub.AudioReader; ar != nil {
ar.ResetAbsTime()
}
}
return r.muxer.WriteSample(r.file, videoTrack, box.Sample{
KeyFrame: sub.VideoReader.Value.IDR,
Data: bytes[offset:],
Timestamp: uint32(sub.VideoReader.AbsTime),
CTS: video.CTS,
})
sample := video.Sample
sample.Timestamp = uint32(sub.VideoReader.AbsTime)
return r.muxer.WriteSample(r.file, videoTrack, sample)
})
}

View File

@@ -87,19 +87,17 @@ func (track *Track) makeElstBox() *EditListBox {
}
func (track *Track) Seek(dts uint64) int {
func (track *Track) Seek(dts uint64) (idx int) {
idx = -1
for i, sample := range track.Samplelist {
if sample.Timestamp*1000/uint32(track.Timescale) < uint32(dts) {
continue
} else if track.Cid.IsVideo() {
if sample.KeyFrame {
return i
}
} else {
return i
if track.Cid.IsVideo() && sample.KeyFrame {
idx = i
}
if sample.Timestamp*1000/uint32(track.Timescale) > uint32(dts) {
break
}
}
return -1
return
}
func (track *Track) makeEdtsBox() *ContainerBox {

170
plugin/mp4/pkg/video.go Normal file
View File

@@ -0,0 +1,170 @@
package mp4
import (
"fmt"
"io"
"slices"
"time"
"m7s.live/v5/pkg"
"m7s.live/v5/pkg/codec"
"m7s.live/v5/pkg/util"
"m7s.live/v5/plugin/mp4/pkg/box"
)
var _ pkg.IAVFrame = (*Video)(nil)
type Video struct {
box.Sample
allocator *util.ScalableMemoryAllocator
}
// GetAllocator implements pkg.IAVFrame.
func (v *Video) GetAllocator() *util.ScalableMemoryAllocator {
return v.allocator
}
// SetAllocator implements pkg.IAVFrame.
func (v *Video) SetAllocator(allocator *util.ScalableMemoryAllocator) {
v.allocator = allocator
}
// Parse implements pkg.IAVFrame.
func (v *Video) Parse(t *pkg.AVTrack) error {
t.Value.IDR = v.KeyFrame
return nil
}
// ConvertCtx implements pkg.IAVFrame.
func (v *Video) ConvertCtx(ctx codec.ICodecCtx) (codec.ICodecCtx, pkg.IAVFrame, error) {
// 返回基础编解码器上下文,不进行转换
return ctx.GetBase(), nil, nil
}
// Demux implements pkg.IAVFrame.
func (v *Video) Demux(codecCtx codec.ICodecCtx) (any, error) {
if len(v.Data) == 0 {
return nil, fmt.Errorf("no video data to demux")
}
// 创建内存读取器
var mem util.Memory
mem.AppendOne(v.Data)
reader := mem.NewReader()
var nalus pkg.Nalus
// 根据编解码器类型进行解复用
switch ctx := codecCtx.(type) {
case *codec.H264Ctx:
// 对于 H.264,解析 AVCC 格式的 NAL 单元
if err := nalus.ParseAVCC(reader, int(ctx.RecordInfo.LengthSizeMinusOne)+1); err != nil {
return nil, fmt.Errorf("failed to parse H.264 AVCC: %w", err)
}
case *codec.H265Ctx:
// 对于 H.265,解析 AVCC 格式的 NAL 单元
if err := nalus.ParseAVCC(reader, int(ctx.RecordInfo.LengthSizeMinusOne)+1); err != nil {
return nil, fmt.Errorf("failed to parse H.265 AVCC: %w", err)
}
default:
// 对于其他格式,尝试默认的 AVCC 解析4字节长度前缀
if err := nalus.ParseAVCC(reader, 4); err != nil {
return nil, fmt.Errorf("failed to parse AVCC with default settings: %w", err)
}
}
return nalus, nil
}
// Mux implements pkg.IAVFrame.
func (v *Video) Mux(codecCtx codec.ICodecCtx, frame *pkg.AVFrame) {
// 从 AVFrame 复制数据到 MP4 Sample
v.KeyFrame = frame.IDR
v.Timestamp = uint32(frame.Timestamp.Milliseconds())
v.CTS = uint32(frame.CTS.Milliseconds())
// 处理原始数据
if frame.Raw != nil {
switch rawData := frame.Raw.(type) {
case pkg.Nalus:
// 将 Nalus 转换为 AVCC 格式的字节数据
var buffer util.Buffer
// 根据编解码器类型确定 NALU 长度字段的大小
var naluSizeLen int = 4 // 默认使用 4 字节
switch ctx := codecCtx.(type) {
case *codec.H264Ctx:
naluSizeLen = int(ctx.RecordInfo.LengthSizeMinusOne) + 1
case *codec.H265Ctx:
naluSizeLen = int(ctx.RecordInfo.LengthSizeMinusOne) + 1
}
// 为每个 NALU 添加长度前缀
for _, nalu := range rawData {
util.PutBE(buffer.Malloc(naluSizeLen), nalu.Size) // 写入 NALU 长度
var buffers = slices.Clone(nalu.Buffers) // 克隆 NALU 的缓冲区
buffers.WriteTo(&buffer) // 直接写入 NALU 数据
}
v.Data = buffer
v.Size = len(v.Data)
case []byte:
// 直接复制字节数据
v.Data = rawData
v.Size = len(v.Data)
default:
// 对于其他类型,尝试转换为字节
v.Data = nil
v.Size = 0
}
} else {
v.Data = nil
v.Size = 0
}
}
// GetTimestamp implements pkg.IAVFrame.
func (v *Video) GetTimestamp() time.Duration {
return time.Duration(v.Timestamp) * time.Millisecond
}
// GetCTS implements pkg.IAVFrame.
func (v *Video) GetCTS() time.Duration {
return time.Duration(v.CTS) * time.Millisecond
}
// GetSize implements pkg.IAVFrame.
func (v *Video) GetSize() int {
return v.Size
}
// Recycle implements pkg.IAVFrame.
func (v *Video) Recycle() {
// 回收资源
if v.allocator != nil && v.Data != nil {
// 如果数据是通过分配器分配的,这里可以进行回收
// 由于我们使用的是复制的数据,这里暂时不需要特殊处理
}
v.Data = nil
v.Size = 0
v.KeyFrame = false
v.Timestamp = 0
v.CTS = 0
v.Offset = 0
v.Duration = 0
}
// String implements pkg.IAVFrame.
func (v *Video) String() string {
return fmt.Sprintf("MP4Video[ts:%d, cts:%d, size:%d, keyframe:%t]",
v.Timestamp, v.CTS, v.Size, v.KeyFrame)
}
// Dump implements pkg.IAVFrame.
func (v *Video) Dump(t byte, w io.Writer) {
// 输出数据到 writer
if v.Data != nil {
w.Write(v.Data)
}
}

99
plugin/mp4/util.go Normal file
View File

@@ -0,0 +1,99 @@
package plugin_mp4
import (
"fmt"
"io"
"log"
"os/exec"
"github.com/deepch/vdk/codec/h264parser"
"github.com/deepch/vdk/codec/h265parser"
"m7s.live/v5/pkg"
"m7s.live/v5/pkg/codec"
mp4 "m7s.live/v5/plugin/mp4/pkg"
"m7s.live/v5/plugin/mp4/pkg/box"
)
// ProcessWithFFmpeg 使用 FFmpeg 处理视频帧并生成截图
func ProcessWithFFmpeg(samples []box.Sample, index int, videoTrack *mp4.Track, output io.Writer) error {
// 创建ffmpeg命令直接输出JPEG格式
cmd := exec.Command("ffmpeg",
"-hide_banner",
"-i", "pipe:0",
"-vf", fmt.Sprintf("select=eq(n\\,%d)", index),
"-vframes", "1",
"-f", "mjpeg",
"pipe:1")
stdin, err := cmd.StdinPipe()
if err != nil {
return err
}
stdout, err := cmd.StdoutPipe()
if err != nil {
return err
}
stderr, err := cmd.StderrPipe()
if err != nil {
return err
}
go func() {
errOutput, _ := io.ReadAll(stderr)
log.Printf("FFmpeg stderr: %s", errOutput)
}()
if err = cmd.Start(); err != nil {
log.Printf("cmd.Start失败: %v", err)
return err
}
go func() {
defer stdin.Close()
convert := pkg.NewAVFrameConvert[*pkg.AnnexB](nil, nil)
switch videoTrack.Cid {
case box.MP4_CODEC_H264:
var h264Ctx codec.H264Ctx
h264Ctx.CodecData, err = h264parser.NewCodecDataFromAVCDecoderConfRecord(videoTrack.ExtraData)
if err != nil {
log.Printf("解析H264失败: %v", err)
return
}
convert.FromTrack.ICodecCtx = &h264Ctx
case box.MP4_CODEC_H265:
var h265Ctx codec.H265Ctx
h265Ctx.CodecData, err = h265parser.NewCodecDataFromAVCDecoderConfRecord(videoTrack.ExtraData)
if err != nil {
log.Printf("解析H265失败: %v", err)
return
}
convert.FromTrack.ICodecCtx = &h265Ctx
default:
log.Printf("不支持的编解码器: %v", videoTrack.Cid)
return
}
for _, sample := range samples {
annexb, err := convert.Convert(&mp4.Video{
Sample: sample,
})
if err != nil {
log.Printf("转换失败: %v", err)
continue
}
annexb.WriteTo(stdin)
}
}()
// 从ffmpeg的stdout读取JPEG数据并写入到输出
if _, err = io.Copy(output, stdout); err != nil {
log.Printf("读取失败: %v", err)
return err
}
if err = cmd.Wait(); err != nil {
log.Printf("cmd.Wait失败: %v", err)
return err
}
log.Printf("ffmpeg JPEG输出成功")
return nil
}

View File

@@ -46,7 +46,7 @@ func parseRGBA(rgba string) (color.RGBA, error) {
func (p *SnapPlugin) snap(publisher *m7s.Publisher, watermarkConfig *snap_pkg.WatermarkConfig) (*bytes.Buffer, error) {
// 获取视频帧
annexb, _, err := snap_pkg.GetVideoFrame(publisher, p.Server)
annexb, err := snap_pkg.GetVideoFrame(publisher, p.Server)
if err != nil {
return nil, err
}

View File

@@ -183,7 +183,7 @@ func (t *TimeSnapTask) GetTickInterval() time.Duration {
// Tick 执行定时截图操作
func (t *TimeSnapTask) Tick(any) {
// 获取视频帧
annexb, _, err := GetVideoFrame(t.job.OriginPublisher, t.job.Plugin.Server)
annexb, err := GetVideoFrame(t.job.OriginPublisher, t.job.Plugin.Server)
if err != nil {
t.Error("get video frame failed", "error", err.Error())
return

View File

@@ -10,45 +10,34 @@ import (
)
// GetVideoFrame 获取视频帧数据
func GetVideoFrame(publisher *m7s.Publisher, server *m7s.Server) ([]*pkg.AnnexB, *pkg.AVTrack, error) {
func GetVideoFrame(publisher *m7s.Publisher, server *m7s.Server) ([]*pkg.AnnexB, error) {
if publisher.VideoTrack.AVTrack == nil {
return nil, nil, pkg.ErrNotFound
return nil, pkg.ErrNotFound
}
// 等待视频就绪
if err := publisher.VideoTrack.WaitReady(); err != nil {
return nil, nil, err
return nil, err
}
// 创建读取器并等待 I 帧
reader := pkg.NewAVRingReader(publisher.VideoTrack.AVTrack, "snapshot")
if err := reader.StartRead(publisher.VideoTrack.GetIDR()); err != nil {
return nil, nil, err
return nil, err
}
defer reader.StopRead()
var track pkg.AVTrack
var annexb pkg.AnnexB
var err error
track.ICodecCtx, track.SequenceFrame, err = annexb.ConvertCtx(publisher.VideoTrack.ICodecCtx)
if err != nil {
return nil, nil, err
}
if track.ICodecCtx == nil {
return nil, nil, pkg.ErrUnsupportCodec
}
var converter = pkg.NewAVFrameConvert[*pkg.AnnexB](publisher.VideoTrack.AVTrack, nil)
var annexbList []*pkg.AnnexB
for lastFrameSequence := publisher.VideoTrack.AVTrack.LastValue.Sequence; reader.Value.Sequence <= lastFrameSequence; reader.ReadNext() {
if reader.Value.Raw == nil {
if err := reader.Value.Demux(publisher.VideoTrack.ICodecCtx); err != nil {
return nil, nil, err
}
annexb, err := converter.ConvertFromAVFrame(&reader.Value)
if err != nil {
return nil, err
}
var annexb pkg.AnnexB
annexb.Mux(track.ICodecCtx, &reader.Value)
annexbList = append(annexbList, &annexb)
annexbList = append(annexbList, annexb)
}
return annexbList, &track, nil
return annexbList, nil
}
// ProcessWithFFmpeg 使用 FFmpeg 处理视频帧并生成截图

View File

@@ -153,6 +153,8 @@ func (wsh *WebSocketHandler) Go() (err error) {
wsh.handleAnswer(signal)
case SignalTypeGetStreamList:
wsh.handleGetStreamList()
case SignalTypePing:
wsh.handlePing(signal)
default:
wsh.sendError("Unknown signal type: " + string(signal.Type))
}
@@ -161,7 +163,9 @@ func (wsh *WebSocketHandler) Go() (err error) {
// Dispose 清理资源
func (wsh *WebSocketHandler) Dispose() {
wsh.PeerConnection.Close()
if wsh.PeerConnection != nil {
wsh.PeerConnection.Close()
}
wsh.conn.Close()
}
@@ -190,6 +194,20 @@ func (wsh *WebSocketHandler) sendError(message string) error {
})
}
func (wsh *WebSocketHandler) handlePing(signal Signal) {
// 处理ping信号直接回复pong
if signal.Type == SignalTypePing {
wsh.Debug("Received ping, sending pong")
if err := wsh.sendJSON(Signal{
Type: SignalTypePong,
}); err != nil {
wsh.Error("Failed to send pong", "error", err)
}
} else {
wsh.sendError("Invalid signal type for ping: " + string(signal.Type))
}
}
// handlePublish 处理发布信号
func (wsh *WebSocketHandler) handlePublish(signal Signal) {
if publisher, err := wsh.config.Publish(wsh, signal.StreamPath); err == nil {

View File

@@ -37,13 +37,12 @@ var (
type WebRTCPlugin struct {
m7s.Plugin
ICEServers []ICEServer `desc:"ice服务器配置"`
Port string `default:"tcp:9000" desc:"监听端口"`
PLI time.Duration `default:"2s" desc:"发送PLI请求间隔"` // 视频流丢包后发送PLI请求
EnableDC bool `default:"true" desc:"是否启用DataChannel"` // 在不支持编码格式的情况下是否启用DataChannel传输
MimeType []string `desc:"MimeType过滤列表为空则不过滤"` // MimeType过滤列表支持的格式如video/H264, audio/opus
s SettingEngine
portMapping map[int]int // 内部端口到外部端口的映射
ICEServers []ICEServer `desc:"ice服务器配置"`
Port string `default:"tcp:9000" desc:"监听端口"`
PLI time.Duration `default:"2s" desc:"发送PLI请求间隔"` // 视频流丢包后发送PLI请求
EnableDC bool `default:"true" desc:"是否启用DataChannel"` // 在不支持编码格式的情况下是否启用DataChannel传输
MimeType []string `desc:"MimeType过滤列表为空则不过滤"` // MimeType过滤列表支持的格式如video/H264, audio/opus
s SettingEngine
}
func (p *WebRTCPlugin) RegisterHandler() map[string]http.HandlerFunc {
@@ -307,90 +306,50 @@ func (p *WebRTCPlugin) initSettingEngine() error {
// configurePort 配置端口设置
func (p *WebRTCPlugin) configurePort() error {
// 使用 ParsePort 而不是 ParsePort2 来获取端口映射信息
portInfo, err := ParsePort(p.Port)
ports, err := ParsePort2(p.Port)
if err != nil {
p.Error("webrtc port config error", "error", err, "port", p.Port)
return err
}
// 初始化端口映射
p.portMapping = make(map[int]int)
// 如果有端口映射,存储映射关系
if portInfo.HasMapping() {
if portInfo.IsRange() {
// 端口范围映射
for i := 0; i <= portInfo.Ports[1]-portInfo.Ports[0]; i++ {
internalPort := portInfo.Ports[0] + i
var externalPort int
if portInfo.IsRangeMapping() {
// 映射端口也是范围
externalPort = portInfo.Map[0] + i
} else {
// 映射端口是单个端口
externalPort = portInfo.Map[0]
}
p.portMapping[internalPort] = externalPort
}
} else {
// 单端口映射
p.portMapping[portInfo.Ports[0]] = portInfo.Map[0]
switch v := ports.(type) {
case TCPPort:
tcpport := int(v)
tcpl, err := net.ListenTCP("tcp", &net.TCPAddr{
IP: net.IP{0, 0, 0, 0},
Port: tcpport,
})
p.OnDispose(func() {
_ = tcpl.Close()
})
if err != nil {
p.Error("webrtc listener tcp", "error", err)
}
p.Info("Port mapping configured", "mapping", p.portMapping)
}
// 根据协议类型进行配置
if portInfo.IsTCP() {
if portInfo.IsRange() {
// TCP端口范围这里可能需要特殊处理
p.Error("TCP port range not supported in current implementation")
return fmt.Errorf("TCP port range not supported")
} else {
// TCP单端口
tcpport := portInfo.Ports[0]
tcpl, err := net.ListenTCP("tcp", &net.TCPAddr{
IP: net.IP{0, 0, 0, 0},
Port: tcpport,
})
p.OnDispose(func() {
_ = tcpl.Close()
})
if err != nil {
p.Error("webrtc listener tcp", "error", err)
return err
}
p.SetDescription("tcp", fmt.Sprintf("%d", tcpport))
p.Info("webrtc start listen", "port", tcpport)
p.s.SetICETCPMux(NewICETCPMux(nil, tcpl, 4096))
p.s.SetNetworkTypes([]NetworkType{NetworkTypeTCP4, NetworkTypeTCP6})
p.s.DisableSRTPReplayProtection(true)
}
} else {
// UDP配置
if portInfo.IsRange() {
// UDP端口范围
p.s.SetEphemeralUDPPortRange(uint16(portInfo.Ports[0]), uint16(portInfo.Ports[1]))
p.SetDescription("udp", fmt.Sprintf("%d-%d", portInfo.Ports[0], portInfo.Ports[1]))
} else {
// UDP单端口
udpport := portInfo.Ports[0]
udpListener, err := net.ListenUDP("udp", &net.UDPAddr{
IP: net.IP{0, 0, 0, 0},
Port: udpport,
})
p.OnDispose(func() {
_ = udpListener.Close()
})
if err != nil {
p.Error("webrtc listener udp", "error", err)
return err
}
p.SetDescription("udp", fmt.Sprintf("%d", udpport))
p.Info("webrtc start listen", "port", udpport)
p.s.SetICEUDPMux(NewICEUDPMux(nil, udpListener))
p.s.SetNetworkTypes([]NetworkType{NetworkTypeUDP4, NetworkTypeUDP6})
p.SetDescription("tcp", fmt.Sprintf("%d", tcpport))
p.Info("webrtc start listen", "port", tcpport)
p.s.SetICETCPMux(NewICETCPMux(nil, tcpl, 4096))
p.s.SetNetworkTypes([]NetworkType{NetworkTypeTCP4, NetworkTypeTCP6})
p.s.DisableSRTPReplayProtection(true)
case UDPRangePort:
p.s.SetEphemeralUDPPortRange(uint16(v[0]), uint16(v[1]))
p.SetDescription("udp", fmt.Sprintf("%d-%d", v[0], v[1]))
case UDPPort:
// 创建共享WEBRTC端口 默认9000
udpListener, err := net.ListenUDP("udp", &net.UDPAddr{
IP: net.IP{0, 0, 0, 0},
Port: int(v),
})
p.OnDispose(func() {
_ = udpListener.Close()
})
if err != nil {
p.Error("webrtc listener udp", "error", err)
return err
}
p.SetDescription("udp", fmt.Sprintf("%d", v))
p.Info("webrtc start listen", "port", v)
p.s.SetICEUDPMux(NewICEUDPMux(nil, udpListener))
p.s.SetNetworkTypes([]NetworkType{NetworkTypeUDP4, NetworkTypeUDP6})
}
return nil
@@ -409,33 +368,9 @@ func (p *WebRTCPlugin) CreatePC(sd SessionDescription, conf Configuration) (pc *
return
}
pc, err = api.NewPeerConnection(conf)
if err != nil {
return
if err == nil {
err = pc.SetRemoteDescription(sd)
}
// 如果有端口映射配置,记录 ICE 候选者信息以供调试
if len(p.portMapping) > 0 {
pc.OnICECandidate(func(candidate *ICECandidate) {
if candidate != nil {
// 记录端口映射信息(用于调试和监控)
if mappedPort, exists := p.portMapping[int(candidate.Port)]; exists {
p.Debug("ICE candidate with port mapping detected",
"original_port", candidate.Port,
"mapped_port", mappedPort,
"candidate_address", candidate.Address,
"candidate_type", candidate.Typ)
candidate.Port = uint16(mappedPort) // 更新候选者端口为映射后的端口
} else {
p.Debug("ICE candidate generated",
"port", candidate.Port,
"address", candidate.Address,
"type", candidate.Typ)
}
}
})
}
err = pc.SetRemoteDescription(sd)
return
}

View File

@@ -13,6 +13,9 @@ const (
SignalTypeUnpublish SignalType = "unpublish"
SignalTypeAnswer SignalType = "answer"
SignalTypeGetStreamList SignalType = "getStreamList"
SignalTypePing SignalType = "ping"
SignalTypePong SignalType = "pong"
SignalTypeError SignalType = "error"
)
type Signal struct {

View File

@@ -314,6 +314,30 @@ func (p *Publisher) trackAdded() error {
return nil
}
func (p *Publisher) SetCodecCtx(ctx codec.ICodecCtx, data IAVFrame) {
if _, ok := ctx.(IAudioCodecCtx); ok {
t := p.AudioTrack.AVTrack
if t == nil {
t = NewAVTrack(data, p.Logger.With("track", "audio"), &p.Publish, p.audioReady, ctx)
p.AudioTrack.Set(t)
p.Call(p.trackAdded)
} else {
t.ICodecCtx = ctx
}
return
} else if _, ok := ctx.(IVideoCodecCtx); ok {
t := p.VideoTrack.AVTrack
if t == nil {
t = NewAVTrack(data, p.Logger.With("track", "video"), &p.Publish, p.videoReady, ctx)
p.VideoTrack.Set(t)
p.Call(p.trackAdded)
} else {
t.ICodecCtx = ctx
}
return
}
}
func (p *Publisher) WriteVideo(data IAVFrame) (err error) {
defer func() {
if err != nil {

View File

@@ -23,7 +23,6 @@ type (
// RecordEvent 包含录像事件的公共字段
EventRecordStream struct {
CreatedAt time.Time
*config.RecordEvent
RecordStream
}
@@ -44,8 +43,8 @@ type (
}
RecordStream struct {
ID uint `gorm:"primarykey"`
StartTime time.Time `gorm:"type:datetime;default:NULL"`
EndTime time.Time `gorm:"type:datetime;default:NULL"`
StartTime time.Time `gorm:"default:NULL"`
EndTime time.Time `gorm:"default:NULL"`
Duration uint32 `gorm:"comment:录像时长;default:0"`
Filename string `json:"fileName" desc:"文件名" gorm:"type:varchar(255);comment:文件名"`
Type string `json:"type" desc:"录像文件类型" gorm:"type:varchar(255);comment:录像文件类型,flv,mp4,raw,fmp4,hls"`
@@ -53,6 +52,7 @@ type (
StreamPath string
AudioCodec string
VideoCodec string
CreatedAt time.Time
DeletedAt gorm.DeletedAt `gorm:"index" yaml:"-"`
}
)
@@ -204,9 +204,9 @@ type eventRecordCheck struct {
func (t *eventRecordCheck) Run() (err error) {
var eventRecordStreams []EventRecordStream
t.DB.Find(&eventRecordStreams, "type=? AND level=high AND stream_path=?", t.Type, t.streamPath) //搜索事件录像,且为重要事件(无法自动删除)
t.DB.Find(&eventRecordStreams, "type=? AND event_level=high AND stream_path=?", t.Type, t.streamPath) //搜索事件录像,且为重要事件(无法自动删除)
for _, recordStream := range eventRecordStreams {
t.DB.Model(&EventRecordStream{}).Where(`level=low AND start_time <= ? and end_time >= ?`, recordStream.EndTime, recordStream.StartTime).Update("level", config.EventLevelHigh)
t.DB.Model(&EventRecordStream{}).Where(`event_level=low AND start_time <= ? and end_time >= ?`, recordStream.EndTime, recordStream.StartTime).Update("event_level", config.EventLevelHigh)
}
return
}

274
scripts/packet_replayer.py Normal file
View File

@@ -0,0 +1,274 @@
#!/usr/bin/env python3
import argparse
from scapy.all import rdpcap, IP, TCP, UDP, Raw, send, sr1, sr, PcapReader
import sys
import time
from collections import defaultdict
import random
import threading
import queue
import socket
import heapq
class PacketReplayer:
def __init__(self, pcap_file, target_ip, target_port):
self.pcap_file = pcap_file
self.target_ip = target_ip
self.target_port = target_port
self.connections = defaultdict(list) # 存储每个连接的包序列
self.response_queue = queue.Queue()
self.stop_reading = threading.Event()
self.socket = None
self.next_seq = None # 下一个期望的序列号
self.pending_packets = [] # 使用优先队列存储待发送的包
self.seen_packets = set() # 用于去重
self.initial_seq = None # 初始序列号
self.initial_ack = None # 初始确认号
self.client_ip = None # 客户端IP
self.client_port = None # 客户端端口
self.first_data_packet = True # 标记是否是第一个数据包
self.total_packets_sent = 0 # 发送的数据包数量
self.total_bytes_sent = 0 # 发送的总字节数
def establish_tcp_connection(self, src_port):
"""建立TCP连接"""
print(f"正在建立TCP连接 {self.target_ip}:{self.target_port}...")
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# 不绑定源端口,让系统自动分配
self.socket.settimeout(5)
self.socket.connect((self.target_ip, self.target_port))
actual_port = self.socket.getsockname()[1]
print(f"使用本地端口: {actual_port}")
print("TCP连接已建立")
return True
except Exception as e:
print(f"建立连接失败: {e}")
if self.socket:
self.socket.close()
self.socket = None
return False
def process_packet(self, packet, src_ip=None, src_port=None, protocol=None):
"""处理单个数据包"""
if IP not in packet:
return
if src_ip and packet[IP].src != src_ip:
return
if protocol == 'tcp' and TCP in packet:
if src_port and packet[TCP].sport != src_port:
return
conn_id = (packet[IP].src, packet[TCP].sport)
self.connections[conn_id].append(packet)
elif protocol == 'udp' and UDP in packet:
if src_port and packet[UDP].sport != src_port:
return
conn_id = (packet[IP].src, packet[UDP].sport)
self.connections[conn_id].append(packet)
elif not protocol:
if TCP in packet:
if src_port and packet[TCP].sport != src_port:
return
conn_id = (packet[IP].src, packet[TCP].sport)
self.connections[conn_id].append(packet)
elif UDP in packet:
if src_port and packet[UDP].sport != src_port:
return
conn_id = (packet[IP].src, packet[UDP].sport)
self.connections[conn_id].append(packet)
def send_packet(self, packet, packet_count):
"""发送单个数据包,处理序列号"""
if TCP not in packet or IP not in packet:
return True
try:
# 检查是否是发送到目标端口的包
if packet[TCP].dport == self.target_port:
# 记录客户端信息
if self.client_ip is None:
self.client_ip = packet[IP].src
self.client_port = packet[TCP].sport
print(f"识别到客户端: {self.client_ip}:{self.client_port}")
# 获取TCP序列号和确认号
seq = packet[TCP].seq
ack = packet[TCP].ack
flags = packet[TCP].flags
# 打印数据包信息
print(f"[序号:{packet_count}] 处理数据包: src={packet[IP].src}:{packet[TCP].sport} -> dst={packet[IP].dst}:{packet[TCP].dport}, seq={seq}, ack={ack}, flags={flags}")
# 发送当前包
if Raw in packet:
# 如果是第一个数据包,记录初始序列号
if self.first_data_packet:
self.initial_seq = seq
self.next_seq = seq
self.first_data_packet = False
print(f"第一个数据包,初始序列号: {seq}")
# 如果是重传包,跳过
if seq in self.seen_packets:
print(f"跳过重传包,序列号: {seq}")
return True
# 如果序列号大于期望的序列号,将包放入待发送队列
if seq > self.next_seq:
print(f"包乱序,放入队列,序列号: {seq}, 期望序列号: {self.next_seq}")
heapq.heappush(self.pending_packets, (seq, packet))
return True
payload = packet[Raw].load
print(f"准备发送数据包,负载大小: {len(payload)} 字节")
self.socket.send(payload)
self.seen_packets.add(seq)
old_seq = self.next_seq
self.next_seq = self.next_seq + len(payload)
print(f"更新序列号: {old_seq} -> {self.next_seq}")
# 更新统计信息
self.total_packets_sent += 1
self.total_bytes_sent += len(payload)
# 检查并发送待发送队列中的包
while self.pending_packets and self.pending_packets[0][0] == self.next_seq:
_, next_packet = heapq.heappop(self.pending_packets)
if Raw in next_packet:
next_payload = next_packet[Raw].load
print(f"发送队列中的包,负载大小: {len(next_payload)} 字节")
self.socket.send(next_payload)
self.seen_packets.add(self.next_seq)
old_seq = self.next_seq
self.next_seq += len(next_payload)
print(f"更新序列号: {old_seq} -> {self.next_seq}")
# 更新统计信息
self.total_packets_sent += 1
self.total_bytes_sent += len(next_payload)
packet_time = time.strftime("%H:%M:%S", time.localtime(float(packet.time)))
print(f"[{packet_time}] [序号:{packet_count}] 已发送数据包 (序列号: {seq}, 负载大小: {len(payload)} 字节)")
else:
# 对于控制包,只记录到已处理集合
if flags & 0x02: # SYN
print(f"[序号:{packet_count}] 处理SYN包")
elif flags & 0x10: # ACK
print(f"[序号:{packet_count}] 处理ACK包")
else:
print(f"[序号:{packet_count}] 跳过无负载包")
else:
print(f"[序号:{packet_count}] 跳过非目标端口的包: src={packet[IP].src}:{packet[TCP].sport} -> dst={packet[IP].dst}:{packet[TCP].dport}")
return True
except Exception as e:
print(f"发送数据包 {packet_count} 时出错: {e}")
return False
def response_reader(self, src_port):
"""持续读取服务器响应的线程函数"""
while not self.stop_reading.is_set() and self.socket:
try:
data = self.socket.recv(4096)
if data:
self.response_queue.put(data)
print(f"收到响应: {len(data)} 字节")
except socket.timeout:
continue
except Exception as e:
if not self.stop_reading.is_set():
print(f"读取响应时出错: {e}")
break
time.sleep(0.1)
def replay_packets(self, src_ip=None, src_port=None, protocol=None, delay=0):
"""边读取边重放数据包"""
print(f"开始读取并重放数据包到 {self.target_ip}:{self.target_port}")
try:
reader = PcapReader(self.pcap_file)
packet_count = 0
connection_established = False
for packet in reader:
packet_count += 1
if IP not in packet:
continue
if src_ip and packet[IP].src != src_ip:
continue
current_src_port = None
if protocol == 'tcp' and TCP in packet:
if src_port and packet[TCP].sport != src_port:
continue
current_src_port = packet[TCP].sport
elif protocol == 'udp' and UDP in packet:
if src_port and packet[UDP].sport != src_port:
continue
current_src_port = packet[UDP].sport
elif not protocol:
if TCP in packet:
if src_port and packet[TCP].sport != src_port:
continue
current_src_port = packet[TCP].sport
elif UDP in packet:
if src_port and packet[UDP].sport != src_port:
continue
current_src_port = packet[UDP].sport
else:
continue
else:
continue
if not connection_established:
if not self.establish_tcp_connection(current_src_port):
print("无法建立连接,退出")
return
self.stop_reading.clear()
reader_thread = threading.Thread(target=self.response_reader, args=(current_src_port,))
reader_thread.daemon = True
reader_thread.start()
connection_established = True
if not self.send_packet(packet, packet_count):
print("发送数据包失败,退出")
return
if delay > 0:
time.sleep(delay)
print(f"\n统计信息:")
print(f"总共处理了 {packet_count} 个数据包")
print(f"成功发送了 {self.total_packets_sent} 个数据包")
print(f"总共发送了 {self.total_bytes_sent} 字节数据")
except Exception as e:
print(f"处理数据包时出错: {e}")
sys.exit(1)
finally:
self.stop_reading.set()
if self.socket:
self.socket.close()
self.socket = None
reader.close()
def main():
parser = argparse.ArgumentParser(description='Wireshark数据包重放工具')
parser.add_argument('pcap_file', help='pcap文件路径')
parser.add_argument('target_ip', help='目标IP地址')
parser.add_argument('target_port', type=int, help='目标端口')
parser.add_argument('--delay', type=float, default=0, help='数据包发送间隔(秒)')
parser.add_argument('--src-ip', help='过滤源IP地址')
parser.add_argument('--src-port', type=int, help='过滤源端口')
parser.add_argument('--protocol', choices=['tcp', 'udp'], help='过滤协议类型')
args = parser.parse_args()
replayer = PacketReplayer(args.pcap_file, args.target_ip, args.target_port)
replayer.replay_packets(args.src_ip, args.src_port, args.protocol, args.delay)
if __name__ == '__main__':
main()