From baf1c3391a583c38731fd0ebb634038d5e41a374 Mon Sep 17 00:00:00 2001 From: Ingo Oppermann Date: Mon, 3 Apr 2023 21:21:02 +0200 Subject: [PATCH 1/2] Deprecate ENV names that do not correspond to JSON name --- config/config.go | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/config/config.go b/config/config.go index b6eab656..33d9492b 100644 --- a/config/config.go +++ b/config/config.go @@ -150,7 +150,7 @@ func (d *Config) init() { // Log d.vars.Register(value.NewString(&d.Log.Level, "info"), "log.level", "CORE_LOG_LEVEL", nil, "Loglevel: silent, error, warn, info, debug", false, false) d.vars.Register(value.NewStringList(&d.Log.Topics, []string{}, ","), "log.topics", "CORE_LOG_TOPICS", nil, "Show only selected log topics", false, false) - d.vars.Register(value.NewInt(&d.Log.MaxLines, 1000), "log.max_lines", "CORE_LOG_MAXLINES", nil, "Number of latest log lines to keep in memory", false, false) + d.vars.Register(value.NewInt(&d.Log.MaxLines, 1000), "log.max_lines", "CORE_LOG_MAX_LINES", []string{"CORE_LOG_MAXLINES"}, "Number of latest log lines to keep in memory", false, false) // DB d.vars.Register(value.NewMustDir(&d.DB.Dir, "./config", d.fs), "db.dir", "CORE_DB_DIR", nil, "Directory for holding the operational data", false, false) @@ -182,19 +182,19 @@ func (d *Config) init() { d.vars.Register(value.NewBool(&d.TLS.Enable, false), "tls.enable", "CORE_TLS_ENABLE", nil, "Enable HTTPS", false, false) d.vars.Register(value.NewBool(&d.TLS.Auto, false), "tls.auto", "CORE_TLS_AUTO", nil, "Enable Let's Encrypt certificate", false, false) d.vars.Register(value.NewEmail(&d.TLS.Email, "cert@datarhei.com"), "tls.email", "CORE_TLS_EMAIL", nil, "Email for Let's Encrypt registration", false, false) - d.vars.Register(value.NewFile(&d.TLS.CertFile, "", d.fs), "tls.cert_file", "CORE_TLS_CERTFILE", nil, "Path to certificate file in PEM format", false, false) - d.vars.Register(value.NewFile(&d.TLS.KeyFile, "", d.fs), "tls.key_file", "CORE_TLS_KEYFILE", nil, "Path to key file in PEM format", false, false) + d.vars.Register(value.NewFile(&d.TLS.CertFile, "", d.fs), "tls.cert_file", "CORE_TLS_CERT_FILE", []string{"CORE_TLS_CERTFILE"}, "Path to certificate file in PEM format", false, false) + d.vars.Register(value.NewFile(&d.TLS.KeyFile, "", d.fs), "tls.key_file", "CORE_TLS_KEY_FILE", []string{"CORE_TLS_KEYFILE"}, "Path to key file in PEM format", false, false) // Storage d.vars.Register(value.NewFile(&d.Storage.MimeTypes, "./mime.types", d.fs), "storage.mimetypes_file", "CORE_STORAGE_MIMETYPES_FILE", []string{"CORE_MIMETYPES_FILE"}, "Path to file with mime-types", false, false) // Storage (Disk) d.vars.Register(value.NewMustDir(&d.Storage.Disk.Dir, "./data", d.fs), "storage.disk.dir", "CORE_STORAGE_DISK_DIR", nil, "Directory on disk, exposed on /", false, false) - d.vars.Register(value.NewInt64(&d.Storage.Disk.Size, 0), "storage.disk.max_size_mbytes", "CORE_STORAGE_DISK_MAXSIZEMBYTES", nil, "Max. allowed megabytes for storage.disk.dir, 0 for unlimited", false, false) + d.vars.Register(value.NewInt64(&d.Storage.Disk.Size, 0), "storage.disk.max_size_mbytes", "CORE_STORAGE_DISK_MAX_SIZE_MBYTES", []string{"CORE_STORAGE_DISK_MAXSIZEMBYTES"}, "Max. allowed megabytes for storage.disk.dir, 0 for unlimited", false, false) d.vars.Register(value.NewBool(&d.Storage.Disk.Cache.Enable, true), "storage.disk.cache.enable", "CORE_STORAGE_DISK_CACHE_ENABLE", nil, "Enable cache for /", false, false) - d.vars.Register(value.NewUint64(&d.Storage.Disk.Cache.Size, 0), "storage.disk.cache.max_size_mbytes", "CORE_STORAGE_DISK_CACHE_MAXSIZEMBYTES", nil, "Max. allowed cache size, 0 for unlimited", false, false) - d.vars.Register(value.NewInt64(&d.Storage.Disk.Cache.TTL, 300), "storage.disk.cache.ttl_seconds", "CORE_STORAGE_DISK_CACHE_TTLSECONDS", nil, "Seconds to keep files in cache", false, false) - d.vars.Register(value.NewUint64(&d.Storage.Disk.Cache.FileSize, 1), "storage.disk.cache.max_file_size_mbytes", "CORE_STORAGE_DISK_CACHE_MAXFILESIZEMBYTES", nil, "Max. file size to put in cache", false, false) + d.vars.Register(value.NewUint64(&d.Storage.Disk.Cache.Size, 0), "storage.disk.cache.max_size_mbytes", "CORE_STORAGE_DISK_CACHE_MAX_SIZE_MBYTES", []string{"CORE_STORAGE_DISK_CACHE_MAXSIZEMBYTES"}, "Max. allowed cache size, 0 for unlimited", false, false) + d.vars.Register(value.NewInt64(&d.Storage.Disk.Cache.TTL, 300), "storage.disk.cache.ttl_seconds", "CORE_STORAGE_DISK_CACHE_TTL_SECONDS", []string{"CORE_STORAGE_DISK_CACHE_TTLSECONDS"}, "Seconds to keep files in cache", false, false) + d.vars.Register(value.NewUint64(&d.Storage.Disk.Cache.FileSize, 1), "storage.disk.cache.max_file_size_mbytes", "CORE_STORAGE_DISK_CACHE_MAX_FILE_SIZE_MBYTES", []string{"CORE_STORAGE_DISK_CACHE_MAXFILESIZEMBYTES"}, "Max. file size to put in cache", false, false) d.vars.Register(value.NewStringList(&d.Storage.Disk.Cache.Types.Allow, []string{}, " "), "storage.disk.cache.type.allow", "CORE_STORAGE_DISK_CACHE_TYPES_ALLOW", []string{"CORE_STORAGE_DISK_CACHE_TYPES"}, "File extensions to cache, empty for all", false, false) d.vars.Register(value.NewStringList(&d.Storage.Disk.Cache.Types.Block, []string{".m3u8", ".mpd"}, " "), "storage.disk.cache.type.block", "CORE_STORAGE_DISK_CACHE_TYPES_BLOCK", nil, "File extensions not to cache, empty for none", false, false) @@ -202,7 +202,7 @@ func (d *Config) init() { d.vars.Register(value.NewBool(&d.Storage.Memory.Auth.Enable, true), "storage.memory.auth.enable", "CORE_STORAGE_MEMORY_AUTH_ENABLE", nil, "Enable basic auth for PUT,POST, and DELETE on /memfs", false, false) d.vars.Register(value.NewString(&d.Storage.Memory.Auth.Username, "admin"), "storage.memory.auth.username", "CORE_STORAGE_MEMORY_AUTH_USERNAME", nil, "Username for Basic-Auth of /memfs", false, false) d.vars.Register(value.NewString(&d.Storage.Memory.Auth.Password, rand.StringAlphanumeric(18)), "storage.memory.auth.password", "CORE_STORAGE_MEMORY_AUTH_PASSWORD", nil, "Password for Basic-Auth of /memfs", false, true) - d.vars.Register(value.NewInt64(&d.Storage.Memory.Size, 0), "storage.memory.max_size_mbytes", "CORE_STORAGE_MEMORY_MAXSIZEMBYTES", nil, "Max. allowed megabytes for /memfs, 0 for unlimited", false, false) + d.vars.Register(value.NewInt64(&d.Storage.Memory.Size, 0), "storage.memory.max_size_mbytes", "CORE_STORAGE_MEMORY_MAX_SIZE_MBYTES", []string{"CORE_STORAGE_MEMORY_MAXSIZEMBYTES"}, "Max. allowed megabytes for /memfs, 0 for unlimited", false, false) d.vars.Register(value.NewBool(&d.Storage.Memory.Purge, false), "storage.memory.purge", "CORE_STORAGE_MEMORY_PURGE", nil, "Automatically remove the oldest files if /memfs is full", false, false) // Storage (S3) @@ -234,17 +234,17 @@ func (d *Config) init() { d.vars.Register(value.NewStringList(&d.FFmpeg.Access.Input.Block, []string{}, " "), "ffmpeg.access.input.block", "CORE_FFMPEG_ACCESS_INPUT_BLOCK", nil, "List of blocked expression to match against the input addresses", false, false) d.vars.Register(value.NewStringList(&d.FFmpeg.Access.Output.Allow, []string{}, " "), "ffmpeg.access.output.allow", "CORE_FFMPEG_ACCESS_OUTPUT_ALLOW", nil, "List of allowed expression to match against the output addresses", false, false) d.vars.Register(value.NewStringList(&d.FFmpeg.Access.Output.Block, []string{}, " "), "ffmpeg.access.output.block", "CORE_FFMPEG_ACCESS_OUTPUT_BLOCK", nil, "List of blocked expression to match against the output addresses", false, false) - d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxLines, 50), "ffmpeg.log.max_lines", "CORE_FFMPEG_LOG_MAXLINES", nil, "Number of latest log lines to keep for each process", false, false) - d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxHistory, 3), "ffmpeg.log.max_history", "CORE_FFMPEG_LOG_MAXHISTORY", nil, "Number of latest logs to keep for each process", false, false) + d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxLines, 50), "ffmpeg.log.max_lines", "CORE_FFMPEG_LOG_MAX_LINES", []string{"CORE_FFMPEG_LOG_MAXLINES"}, "Number of latest log lines to keep for each process", false, false) + d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxHistory, 3), "ffmpeg.log.max_history", "CORE_FFMPEG_LOG_MAX_HISTORY", []string{"CORE_FFMPEG_LOG_MAXHISTORY"}, "Number of latest logs to keep for each process", false, false) // Playout d.vars.Register(value.NewBool(&d.Playout.Enable, false), "playout.enable", "CORE_PLAYOUT_ENABLE", nil, "Enable playout proxy where available", false, false) - d.vars.Register(value.NewPort(&d.Playout.MinPort, 0), "playout.min_port", "CORE_PLAYOUT_MINPORT", nil, "Min. playout server port", false, false) - d.vars.Register(value.NewPort(&d.Playout.MaxPort, 0), "playout.max_port", "CORE_PLAYOUT_MAXPORT", nil, "Max. playout server port", false, false) + d.vars.Register(value.NewPort(&d.Playout.MinPort, 0), "playout.min_port", "CORE_PLAYOUT_MIN_PORT", []string{"CORE_PLAYOUT_MINPORT"}, "Min. playout server port", false, false) + d.vars.Register(value.NewPort(&d.Playout.MaxPort, 0), "playout.max_port", "CORE_PLAYOUT_MAX_PORT", []string{"CORE_PLAYOUT_MAXPORT"}, "Max. playout server port", false, false) // Debug d.vars.Register(value.NewBool(&d.Debug.Profiling, false), "debug.profiling", "CORE_DEBUG_PROFILING", nil, "Enable profiling endpoint on /profiling", false, false) - d.vars.Register(value.NewInt(&d.Debug.ForceGC, 0), "debug.force_gc", "CORE_DEBUG_FORCEGC", nil, "Number of seconds between forcing GC to return memory to the OS", false, false) + d.vars.Register(value.NewInt(&d.Debug.ForceGC, 0), "debug.force_gc", "CORE_DEBUG_FORCE_GC", []string{"CORE_DEBUG_FORCEGC"}, "Number of seconds between forcing GC to return memory to the OS", false, false) d.vars.Register(value.NewInt64(&d.Debug.MemoryLimit, 0), "debug.memory_limit_mbytes", "CORE_DEBUG_MEMORY_LIMIT_MBYTES", nil, "Impose a soft memory limit for the core, in megabytes", false, false) // Metrics @@ -260,7 +260,7 @@ func (d *Config) init() { d.vars.Register(value.NewBool(&d.Sessions.Persist, false), "sessions.persist", "CORE_SESSIONS_PERSIST", nil, "Whether to persist session history. Will be stored as sessions.json in db.dir", false, false) d.vars.Register(value.NewInt(&d.Sessions.PersistInterval, 300), "sessions.persist_interval_sec", "CORE_SESSIONS_PERSIST_INTERVAL_SEC", nil, "Interval in seconds in which to persist the current session history", false, false) d.vars.Register(value.NewUint64(&d.Sessions.MaxBitrate, 0), "sessions.max_bitrate_mbit", "CORE_SESSIONS_MAXBITRATE_MBIT", nil, "Max. allowed outgoing bitrate in mbit/s, 0 for unlimited", false, false) - d.vars.Register(value.NewUint64(&d.Sessions.MaxSessions, 0), "sessions.max_sessions", "CORE_SESSIONS_MAXSESSIONS", nil, "Max. allowed number of simultaneous sessions, 0 for unlimited", false, false) + d.vars.Register(value.NewUint64(&d.Sessions.MaxSessions, 0), "sessions.max_sessions", "CORE_SESSIONS_MAX_SESSIONS", []string{"CORE_SESSIONS_MAXSESSIONS"}, "Max. allowed number of simultaneous sessions, 0 for unlimited", false, false) // Service d.vars.Register(value.NewBool(&d.Service.Enable, false), "service.enable", "CORE_SERVICE_ENABLE", nil, "Enable connecting to the Restreamer Service", false, false) From 7e9e6fce8df5e0cceede230d840b3894b808397c Mon Sep 17 00:00:00 2001 From: Ingo Oppermann Date: Tue, 4 Apr 2023 20:44:57 +0200 Subject: [PATCH 2/2] Add number of keyframes and extradata size to process progress data --- docs/docs.go | 9 ++++++ docs/swagger.json | 9 ++++++ docs/swagger.yaml | 7 +++++ ffmpeg/parse/parser.go | 26 +++++++++++++++-- ffmpeg/parse/stats.go | 10 +++---- ffmpeg/parse/types.go | 62 +++++++++++++++++++++++++--------------- http/api/progress.go | 28 ++++++++++-------- restream/app/avstream.go | 14 ++++----- restream/app/progress.go | 46 +++++++++++++++-------------- 9 files changed, 139 insertions(+), 72 deletions(-) diff --git a/docs/docs.go b/docs/docs.go index 484bd9ca..5f51006e 100644 --- a/docs/docs.go +++ b/docs/docs.go @@ -3342,6 +3342,11 @@ const docTemplate = `{ "coder": { "type": "string" }, + "extradata_size_bytes": { + "description": "bytes", + "type": "integer", + "format": "uint64" + }, "format": { "type": "string" }, @@ -3364,6 +3369,10 @@ const docTemplate = `{ "type": "integer", "format": "uint64" }, + "keyframe": { + "type": "integer", + "format": "uint64" + }, "layout": { "type": "string" }, diff --git a/docs/swagger.json b/docs/swagger.json index a76838f6..426d1075 100644 --- a/docs/swagger.json +++ b/docs/swagger.json @@ -3335,6 +3335,11 @@ "coder": { "type": "string" }, + "extradata_size_bytes": { + "description": "bytes", + "type": "integer", + "format": "uint64" + }, "format": { "type": "string" }, @@ -3357,6 +3362,10 @@ "type": "integer", "format": "uint64" }, + "keyframe": { + "type": "integer", + "format": "uint64" + }, "layout": { "type": "string" }, diff --git a/docs/swagger.yaml b/docs/swagger.yaml index 43e9479f..10ddcaaa 100644 --- a/docs/swagger.yaml +++ b/docs/swagger.yaml @@ -902,6 +902,10 @@ definitions: type: string coder: type: string + extradata_size_bytes: + description: bytes + format: uint64 + type: integer format: type: string fps: @@ -918,6 +922,9 @@ definitions: description: General format: uint64 type: integer + keyframe: + format: uint64 + type: integer layout: type: string packet: diff --git a/ffmpeg/parse/parser.go b/ffmpeg/parse/parser.go index 0adb53d2..edf0ca03 100644 --- a/ffmpeg/parse/parser.go +++ b/ffmpeg/parse/parser.go @@ -356,7 +356,7 @@ func (p *parser) Parse(line string) uint64 { if p.collector.IsCollectableIP(p.process.input[i].IP) { p.collector.Activate("") - p.collector.Ingress("", int64(p.stats.input[i].diff.size)*1024) + p.collector.Ingress("", int64(p.stats.input[i].diff.size)) } } } @@ -373,7 +373,7 @@ func (p *parser) Parse(line string) uint64 { if p.collector.IsCollectableIP(p.process.output[i].IP) { p.collector.Activate("") - p.collector.Egress("", int64(p.stats.output[i].diff.size)*1024) + p.collector.Egress("", int64(p.stats.output[i].diff.size)) } } } @@ -410,7 +410,7 @@ func (p *parser) parseDefaultProgress(line string) error { if matches = p.re.size.FindStringSubmatch(line); matches != nil { if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil { - p.progress.ffmpeg.Size = x + p.progress.ffmpeg.Size = x * 1024 } } @@ -485,6 +485,26 @@ func (p *parser) parseFFmpegProgress(line string) error { return fmt.Errorf("output length mismatch (have: %d, want: %d)", len(progress.Output), len(p.process.output)) } + if progress.Size == 0 { + progress.Size = progress.SizeKB * 1024 + } + + for i, io := range progress.Input { + if io.Size == 0 { + io.Size = io.SizeKB * 1024 + } + + progress.Input[i].Size = io.Size + } + + for i, io := range progress.Output { + if io.Size == 0 { + io.Size = io.SizeKB * 1024 + } + + progress.Output[i].Size = io.Size + } + p.progress.ffmpeg = progress return nil diff --git a/ffmpeg/parse/stats.go b/ffmpeg/parse/stats.go index df7e714b..d36ecb82 100644 --- a/ffmpeg/parse/stats.go +++ b/ffmpeg/parse/stats.go @@ -1,11 +1,11 @@ package parse type statsData struct { - frame uint64 - packet uint64 - size uint64 // kbytes - dup uint64 - drop uint64 + frame uint64 // counter + packet uint64 // counter + size uint64 // bytes + dup uint64 // counter + drop uint64 // counter } type stats struct { diff --git a/ffmpeg/parse/types.go b/ffmpeg/parse/types.go index bf031fb0..aa0c33e3 100644 --- a/ffmpeg/parse/types.go +++ b/ffmpeg/parse/types.go @@ -44,9 +44,9 @@ func (d *Duration) UnmarshalJSON(b []byte) error { type ffmpegAVstreamIO struct { State string `json:"state"` - Packet uint64 `json:"packet"` + Packet uint64 `json:"packet"` // counter Time uint64 `json:"time"` - Size uint64 `json:"size_kb"` + Size uint64 `json:"size_kb"` // kbytes } func (avio *ffmpegAVstreamIO) export() app.AVstreamIO { @@ -54,7 +54,7 @@ func (avio *ffmpegAVstreamIO) export() app.AVstreamIO { State: avio.State, Packet: avio.Packet, Time: avio.Time, - Size: avio.Size, + Size: avio.Size * 1024, } } @@ -91,14 +91,17 @@ func (av *ffmpegAVstream) export() *app.AVstream { type ffmpegProgressIO struct { // common - Index uint64 `json:"index"` - Stream uint64 `json:"stream"` - Size uint64 `json:"size_kb"` // kbytes - Bitrate float64 `json:"-"` // kbit/s - Frame uint64 `json:"frame"` - Packet uint64 `json:"packet"` - FPS float64 `json:"-"` - PPS float64 `json:"-"` + Index uint64 `json:"index"` + Stream uint64 `json:"stream"` + SizeKB uint64 `json:"size_kb"` // kbytes + Size uint64 `json:"size_bytes"` // bytes + Bitrate float64 `json:"-"` // bit/s + Frame uint64 `json:"frame"` // counter + Keyframe uint64 `json:"keyframe"` // counter + Packet uint64 `json:"packet"` // counter + Extradata uint64 `json:"extradata_size_bytes"` // bytes + FPS float64 `json:"-"` // rate, frames per second + PPS float64 `json:"-"` // rate, packets per second // video Quantizer float64 `json:"q"` @@ -108,28 +111,36 @@ func (io *ffmpegProgressIO) exportTo(progress *app.ProgressIO) { progress.Index = io.Index progress.Stream = io.Stream progress.Frame = io.Frame + progress.Keyframe = io.Keyframe progress.Packet = io.Packet progress.FPS = io.FPS progress.PPS = io.PPS progress.Quantizer = io.Quantizer - progress.Size = io.Size * 1024 - progress.Bitrate = io.Bitrate * 1024 + progress.Bitrate = io.Bitrate + progress.Extradata = io.Extradata + + if io.Size == 0 { + progress.Size = io.SizeKB * 1024 + } else { + progress.Size = io.Size + } } type ffmpegProgress struct { Input []ffmpegProgressIO `json:"inputs"` Output []ffmpegProgressIO `json:"outputs"` - Frame uint64 `json:"frame"` - Packet uint64 `json:"packet"` - FPS float64 `json:"-"` - PPS float64 `json:"-"` + Frame uint64 `json:"frame"` // counter + Packet uint64 `json:"packet"` // counter + FPS float64 `json:"-"` // rate, frames per second + PPS float64 `json:"-"` // rate, packets per second Quantizer float64 `json:"q"` - Size uint64 `json:"size_kb"` // kbytes - Bitrate float64 `json:"-"` // kbit/s + SizeKB uint64 `json:"size_kb"` // kbytes + Size uint64 `json:"size_bytes"` // bytes + Bitrate float64 `json:"-"` // bit/s Time Duration `json:"time"` Speed float64 `json:"speed"` - Drop uint64 `json:"drop"` - Dup uint64 `json:"dup"` + Drop uint64 `json:"drop"` // counter + Dup uint64 `json:"dup"` // counter } func (p *ffmpegProgress) exportTo(progress *app.Progress) { @@ -138,13 +149,18 @@ func (p *ffmpegProgress) exportTo(progress *app.Progress) { progress.FPS = p.FPS progress.PPS = p.PPS progress.Quantizer = p.Quantizer - progress.Size = p.Size * 1024 progress.Time = p.Time.Seconds() - progress.Bitrate = p.Bitrate * 1024 + progress.Bitrate = p.Bitrate progress.Speed = p.Speed progress.Drop = p.Drop progress.Dup = p.Dup + if p.Size == 0 { + progress.Size = p.SizeKB * 1024 + } else { + progress.Size = p.Size + } + for i := range p.Input { if len(progress.Input) <= i { break diff --git a/http/api/progress.go b/http/api/progress.go index a402d55a..1bf22c59 100644 --- a/http/api/progress.go +++ b/http/api/progress.go @@ -13,18 +13,20 @@ type ProgressIO struct { Address string `json:"address" jsonschema:"minLength=1"` // General - Index uint64 `json:"index" format:"uint64"` - Stream uint64 `json:"stream" format:"uint64"` - Format string `json:"format"` - Type string `json:"type"` - Codec string `json:"codec"` - Coder string `json:"coder"` - Frame uint64 `json:"frame" format:"uint64"` - FPS json.Number `json:"fps" swaggertype:"number" jsonschema:"type=number"` - Packet uint64 `json:"packet" format:"uint64"` - PPS json.Number `json:"pps" swaggertype:"number" jsonschema:"type=number"` - Size uint64 `json:"size_kb" format:"uint64"` // kbytes - Bitrate json.Number `json:"bitrate_kbit" swaggertype:"number" jsonschema:"type=number"` // kbit/s + Index uint64 `json:"index" format:"uint64"` + Stream uint64 `json:"stream" format:"uint64"` + Format string `json:"format"` + Type string `json:"type"` + Codec string `json:"codec"` + Coder string `json:"coder"` + Frame uint64 `json:"frame" format:"uint64"` + Keyframe uint64 `json:"keyframe" format:"uint64"` + FPS json.Number `json:"fps" swaggertype:"number" jsonschema:"type=number"` + Packet uint64 `json:"packet" format:"uint64"` + PPS json.Number `json:"pps" swaggertype:"number" jsonschema:"type=number"` + Size uint64 `json:"size_kb" format:"uint64"` // kbytes + Bitrate json.Number `json:"bitrate_kbit" swaggertype:"number" jsonschema:"type=number"` // kbit/s + Extradata uint64 `json:"extradata_size_bytes" format:"uint64"` // bytes // Video Pixfmt string `json:"pix_fmt,omitempty"` @@ -56,11 +58,13 @@ func (i *ProgressIO) Unmarshal(io *app.ProgressIO) { i.Codec = io.Codec i.Coder = io.Coder i.Frame = io.Frame + i.Keyframe = io.Keyframe i.FPS = json.Number(fmt.Sprintf("%.3f", io.FPS)) i.Packet = io.Packet i.PPS = json.Number(fmt.Sprintf("%.3f", io.PPS)) i.Size = io.Size / 1024 i.Bitrate = json.Number(fmt.Sprintf("%.3f", io.Bitrate/1024)) + i.Extradata = io.Extradata i.Pixfmt = io.Pixfmt i.Quantizer = json.Number(fmt.Sprintf("%.3f", io.Quantizer)) i.Width = io.Width diff --git a/restream/app/avstream.go b/restream/app/avstream.go index fcfb8ded..70cf9634 100644 --- a/restream/app/avstream.go +++ b/restream/app/avstream.go @@ -2,19 +2,19 @@ package app type AVstreamIO struct { State string - Packet uint64 + Packet uint64 // counter Time uint64 - Size uint64 + Size uint64 // bytes } type AVstream struct { Input AVstreamIO Output AVstreamIO - Aqueue uint64 - Queue uint64 - Dup uint64 - Drop uint64 - Enc uint64 + Aqueue uint64 // gauge + Queue uint64 // gauge + Dup uint64 // counter + Drop uint64 // counter + Enc uint64 // counter Looping bool Duplicating bool GOP string diff --git a/restream/app/progress.go b/restream/app/progress.go index 7d081d39..c9f1fcd5 100644 --- a/restream/app/progress.go +++ b/restream/app/progress.go @@ -5,18 +5,20 @@ type ProgressIO struct { Address string // General - Index uint64 - Stream uint64 - Format string - Type string - Codec string - Coder string - Frame uint64 - FPS float64 - Packet uint64 - PPS float64 - Size uint64 // bytes - Bitrate float64 // bit/s + Index uint64 + Stream uint64 + Format string + Type string + Codec string + Coder string + Frame uint64 // counter + Keyframe uint64 // counter + FPS float64 // rate, frames per second + Packet uint64 // counter + PPS float64 // rate, packets per second + Size uint64 // bytes + Bitrate float64 // bit/s + Extradata uint64 // bytes // Video Pixfmt string @@ -36,15 +38,15 @@ type ProgressIO struct { type Progress struct { Input []ProgressIO Output []ProgressIO - Frame uint64 - Packet uint64 - FPS float64 - PPS float64 - Quantizer float64 - Size uint64 // bytes - Time float64 + Frame uint64 // counter + Packet uint64 // counter + FPS float64 // rate, frames per second + PPS float64 // rate, packets per second + Quantizer float64 // gauge + Size uint64 // bytes + Time float64 // seconds with fractions Bitrate float64 // bit/s - Speed float64 - Drop uint64 - Dup uint64 + Speed float64 // gauge + Drop uint64 // counter + Dup uint64 // counter }