mirror of
https://github.com/datarhei/core.git
synced 2025-10-06 00:17:07 +08:00
Merge branch 'dev' into vod
This commit is contained in:
@@ -1,4 +1,4 @@
|
|||||||
ARG CORE_IMAGE=datarhei/base:alpine-core-latest
|
ARG CORE_IMAGE=core:dev
|
||||||
|
|
||||||
ARG FFMPEG_IMAGE=datarhei/base:alpine-ffmpeg-latest
|
ARG FFMPEG_IMAGE=datarhei/base:alpine-ffmpeg-latest
|
||||||
|
|
||||||
|
@@ -11,6 +11,7 @@ import (
|
|||||||
gohttp "net/http"
|
gohttp "net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"runtime/debug"
|
"runtime/debug"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
@@ -1274,6 +1275,7 @@ func (a *api) start() error {
|
|||||||
a.gcTickerStop = cancel
|
a.gcTickerStop = cancel
|
||||||
|
|
||||||
if cfg.Debug.ForceGC > 0 {
|
if cfg.Debug.ForceGC > 0 {
|
||||||
|
/*
|
||||||
go func(ctx context.Context) {
|
go func(ctx context.Context) {
|
||||||
ticker := time.NewTicker(time.Duration(cfg.Debug.ForceGC) * time.Second)
|
ticker := time.NewTicker(time.Duration(cfg.Debug.ForceGC) * time.Second)
|
||||||
defer ticker.Stop()
|
defer ticker.Stop()
|
||||||
@@ -1286,6 +1288,21 @@ func (a *api) start() error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}(ctx)
|
}(ctx)
|
||||||
|
*/
|
||||||
|
go func(ctx context.Context) {
|
||||||
|
ticker := time.NewTicker(time.Duration(cfg.Debug.ForceGC) * time.Second)
|
||||||
|
defer ticker.Stop()
|
||||||
|
var mem runtime.MemStats
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-ctx.Done():
|
||||||
|
return
|
||||||
|
case <-ticker.C:
|
||||||
|
runtime.ReadMemStats(&mem)
|
||||||
|
fmt.Printf("mem in use: %.02f MiB\n", float64(mem.HeapInuse)/(1<<20))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}(ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
if cfg.Debug.MemoryLimit > 0 {
|
if cfg.Debug.MemoryLimit > 0 {
|
||||||
|
@@ -150,7 +150,7 @@ func (d *Config) init() {
|
|||||||
// Log
|
// Log
|
||||||
d.vars.Register(value.NewString(&d.Log.Level, "info"), "log.level", "CORE_LOG_LEVEL", nil, "Loglevel: silent, error, warn, info, debug", false, false)
|
d.vars.Register(value.NewString(&d.Log.Level, "info"), "log.level", "CORE_LOG_LEVEL", nil, "Loglevel: silent, error, warn, info, debug", false, false)
|
||||||
d.vars.Register(value.NewStringList(&d.Log.Topics, []string{}, ","), "log.topics", "CORE_LOG_TOPICS", nil, "Show only selected log topics", false, false)
|
d.vars.Register(value.NewStringList(&d.Log.Topics, []string{}, ","), "log.topics", "CORE_LOG_TOPICS", nil, "Show only selected log topics", false, false)
|
||||||
d.vars.Register(value.NewInt(&d.Log.MaxLines, 1000), "log.max_lines", "CORE_LOG_MAXLINES", nil, "Number of latest log lines to keep in memory", false, false)
|
d.vars.Register(value.NewInt(&d.Log.MaxLines, 1000), "log.max_lines", "CORE_LOG_MAX_LINES", []string{"CORE_LOG_MAXLINES"}, "Number of latest log lines to keep in memory", false, false)
|
||||||
|
|
||||||
// DB
|
// DB
|
||||||
d.vars.Register(value.NewMustDir(&d.DB.Dir, "./config", d.fs), "db.dir", "CORE_DB_DIR", nil, "Directory for holding the operational data", false, false)
|
d.vars.Register(value.NewMustDir(&d.DB.Dir, "./config", d.fs), "db.dir", "CORE_DB_DIR", nil, "Directory for holding the operational data", false, false)
|
||||||
@@ -182,19 +182,19 @@ func (d *Config) init() {
|
|||||||
d.vars.Register(value.NewBool(&d.TLS.Enable, false), "tls.enable", "CORE_TLS_ENABLE", nil, "Enable HTTPS", false, false)
|
d.vars.Register(value.NewBool(&d.TLS.Enable, false), "tls.enable", "CORE_TLS_ENABLE", nil, "Enable HTTPS", false, false)
|
||||||
d.vars.Register(value.NewBool(&d.TLS.Auto, false), "tls.auto", "CORE_TLS_AUTO", nil, "Enable Let's Encrypt certificate", false, false)
|
d.vars.Register(value.NewBool(&d.TLS.Auto, false), "tls.auto", "CORE_TLS_AUTO", nil, "Enable Let's Encrypt certificate", false, false)
|
||||||
d.vars.Register(value.NewEmail(&d.TLS.Email, "cert@datarhei.com"), "tls.email", "CORE_TLS_EMAIL", nil, "Email for Let's Encrypt registration", false, false)
|
d.vars.Register(value.NewEmail(&d.TLS.Email, "cert@datarhei.com"), "tls.email", "CORE_TLS_EMAIL", nil, "Email for Let's Encrypt registration", false, false)
|
||||||
d.vars.Register(value.NewFile(&d.TLS.CertFile, "", d.fs), "tls.cert_file", "CORE_TLS_CERTFILE", nil, "Path to certificate file in PEM format", false, false)
|
d.vars.Register(value.NewFile(&d.TLS.CertFile, "", d.fs), "tls.cert_file", "CORE_TLS_CERT_FILE", []string{"CORE_TLS_CERTFILE"}, "Path to certificate file in PEM format", false, false)
|
||||||
d.vars.Register(value.NewFile(&d.TLS.KeyFile, "", d.fs), "tls.key_file", "CORE_TLS_KEYFILE", nil, "Path to key file in PEM format", false, false)
|
d.vars.Register(value.NewFile(&d.TLS.KeyFile, "", d.fs), "tls.key_file", "CORE_TLS_KEY_FILE", []string{"CORE_TLS_KEYFILE"}, "Path to key file in PEM format", false, false)
|
||||||
|
|
||||||
// Storage
|
// Storage
|
||||||
d.vars.Register(value.NewFile(&d.Storage.MimeTypes, "./mime.types", d.fs), "storage.mimetypes_file", "CORE_STORAGE_MIMETYPES_FILE", []string{"CORE_MIMETYPES_FILE"}, "Path to file with mime-types", false, false)
|
d.vars.Register(value.NewFile(&d.Storage.MimeTypes, "./mime.types", d.fs), "storage.mimetypes_file", "CORE_STORAGE_MIMETYPES_FILE", []string{"CORE_MIMETYPES_FILE"}, "Path to file with mime-types", false, false)
|
||||||
|
|
||||||
// Storage (Disk)
|
// Storage (Disk)
|
||||||
d.vars.Register(value.NewMustDir(&d.Storage.Disk.Dir, "./data", d.fs), "storage.disk.dir", "CORE_STORAGE_DISK_DIR", nil, "Directory on disk, exposed on /", false, false)
|
d.vars.Register(value.NewMustDir(&d.Storage.Disk.Dir, "./data", d.fs), "storage.disk.dir", "CORE_STORAGE_DISK_DIR", nil, "Directory on disk, exposed on /", false, false)
|
||||||
d.vars.Register(value.NewInt64(&d.Storage.Disk.Size, 0), "storage.disk.max_size_mbytes", "CORE_STORAGE_DISK_MAXSIZEMBYTES", nil, "Max. allowed megabytes for storage.disk.dir, 0 for unlimited", false, false)
|
d.vars.Register(value.NewInt64(&d.Storage.Disk.Size, 0), "storage.disk.max_size_mbytes", "CORE_STORAGE_DISK_MAX_SIZE_MBYTES", []string{"CORE_STORAGE_DISK_MAXSIZEMBYTES"}, "Max. allowed megabytes for storage.disk.dir, 0 for unlimited", false, false)
|
||||||
d.vars.Register(value.NewBool(&d.Storage.Disk.Cache.Enable, true), "storage.disk.cache.enable", "CORE_STORAGE_DISK_CACHE_ENABLE", nil, "Enable cache for /", false, false)
|
d.vars.Register(value.NewBool(&d.Storage.Disk.Cache.Enable, true), "storage.disk.cache.enable", "CORE_STORAGE_DISK_CACHE_ENABLE", nil, "Enable cache for /", false, false)
|
||||||
d.vars.Register(value.NewUint64(&d.Storage.Disk.Cache.Size, 0), "storage.disk.cache.max_size_mbytes", "CORE_STORAGE_DISK_CACHE_MAXSIZEMBYTES", nil, "Max. allowed cache size, 0 for unlimited", false, false)
|
d.vars.Register(value.NewUint64(&d.Storage.Disk.Cache.Size, 0), "storage.disk.cache.max_size_mbytes", "CORE_STORAGE_DISK_CACHE_MAX_SIZE_MBYTES", []string{"CORE_STORAGE_DISK_CACHE_MAXSIZEMBYTES"}, "Max. allowed cache size, 0 for unlimited", false, false)
|
||||||
d.vars.Register(value.NewInt64(&d.Storage.Disk.Cache.TTL, 300), "storage.disk.cache.ttl_seconds", "CORE_STORAGE_DISK_CACHE_TTLSECONDS", nil, "Seconds to keep files in cache", false, false)
|
d.vars.Register(value.NewInt64(&d.Storage.Disk.Cache.TTL, 300), "storage.disk.cache.ttl_seconds", "CORE_STORAGE_DISK_CACHE_TTL_SECONDS", []string{"CORE_STORAGE_DISK_CACHE_TTLSECONDS"}, "Seconds to keep files in cache", false, false)
|
||||||
d.vars.Register(value.NewUint64(&d.Storage.Disk.Cache.FileSize, 1), "storage.disk.cache.max_file_size_mbytes", "CORE_STORAGE_DISK_CACHE_MAXFILESIZEMBYTES", nil, "Max. file size to put in cache", false, false)
|
d.vars.Register(value.NewUint64(&d.Storage.Disk.Cache.FileSize, 1), "storage.disk.cache.max_file_size_mbytes", "CORE_STORAGE_DISK_CACHE_MAX_FILE_SIZE_MBYTES", []string{"CORE_STORAGE_DISK_CACHE_MAXFILESIZEMBYTES"}, "Max. file size to put in cache", false, false)
|
||||||
d.vars.Register(value.NewStringList(&d.Storage.Disk.Cache.Types.Allow, []string{}, " "), "storage.disk.cache.type.allow", "CORE_STORAGE_DISK_CACHE_TYPES_ALLOW", []string{"CORE_STORAGE_DISK_CACHE_TYPES"}, "File extensions to cache, empty for all", false, false)
|
d.vars.Register(value.NewStringList(&d.Storage.Disk.Cache.Types.Allow, []string{}, " "), "storage.disk.cache.type.allow", "CORE_STORAGE_DISK_CACHE_TYPES_ALLOW", []string{"CORE_STORAGE_DISK_CACHE_TYPES"}, "File extensions to cache, empty for all", false, false)
|
||||||
d.vars.Register(value.NewStringList(&d.Storage.Disk.Cache.Types.Block, []string{".m3u8", ".mpd"}, " "), "storage.disk.cache.type.block", "CORE_STORAGE_DISK_CACHE_TYPES_BLOCK", nil, "File extensions not to cache, empty for none", false, false)
|
d.vars.Register(value.NewStringList(&d.Storage.Disk.Cache.Types.Block, []string{".m3u8", ".mpd"}, " "), "storage.disk.cache.type.block", "CORE_STORAGE_DISK_CACHE_TYPES_BLOCK", nil, "File extensions not to cache, empty for none", false, false)
|
||||||
|
|
||||||
@@ -202,7 +202,7 @@ func (d *Config) init() {
|
|||||||
d.vars.Register(value.NewBool(&d.Storage.Memory.Auth.Enable, true), "storage.memory.auth.enable", "CORE_STORAGE_MEMORY_AUTH_ENABLE", nil, "Enable basic auth for PUT,POST, and DELETE on /memfs", false, false)
|
d.vars.Register(value.NewBool(&d.Storage.Memory.Auth.Enable, true), "storage.memory.auth.enable", "CORE_STORAGE_MEMORY_AUTH_ENABLE", nil, "Enable basic auth for PUT,POST, and DELETE on /memfs", false, false)
|
||||||
d.vars.Register(value.NewString(&d.Storage.Memory.Auth.Username, "admin"), "storage.memory.auth.username", "CORE_STORAGE_MEMORY_AUTH_USERNAME", nil, "Username for Basic-Auth of /memfs", false, false)
|
d.vars.Register(value.NewString(&d.Storage.Memory.Auth.Username, "admin"), "storage.memory.auth.username", "CORE_STORAGE_MEMORY_AUTH_USERNAME", nil, "Username for Basic-Auth of /memfs", false, false)
|
||||||
d.vars.Register(value.NewString(&d.Storage.Memory.Auth.Password, rand.StringAlphanumeric(18)), "storage.memory.auth.password", "CORE_STORAGE_MEMORY_AUTH_PASSWORD", nil, "Password for Basic-Auth of /memfs", false, true)
|
d.vars.Register(value.NewString(&d.Storage.Memory.Auth.Password, rand.StringAlphanumeric(18)), "storage.memory.auth.password", "CORE_STORAGE_MEMORY_AUTH_PASSWORD", nil, "Password for Basic-Auth of /memfs", false, true)
|
||||||
d.vars.Register(value.NewInt64(&d.Storage.Memory.Size, 0), "storage.memory.max_size_mbytes", "CORE_STORAGE_MEMORY_MAXSIZEMBYTES", nil, "Max. allowed megabytes for /memfs, 0 for unlimited", false, false)
|
d.vars.Register(value.NewInt64(&d.Storage.Memory.Size, 0), "storage.memory.max_size_mbytes", "CORE_STORAGE_MEMORY_MAX_SIZE_MBYTES", []string{"CORE_STORAGE_MEMORY_MAXSIZEMBYTES"}, "Max. allowed megabytes for /memfs, 0 for unlimited", false, false)
|
||||||
d.vars.Register(value.NewBool(&d.Storage.Memory.Purge, false), "storage.memory.purge", "CORE_STORAGE_MEMORY_PURGE", nil, "Automatically remove the oldest files if /memfs is full", false, false)
|
d.vars.Register(value.NewBool(&d.Storage.Memory.Purge, false), "storage.memory.purge", "CORE_STORAGE_MEMORY_PURGE", nil, "Automatically remove the oldest files if /memfs is full", false, false)
|
||||||
|
|
||||||
// Storage (S3)
|
// Storage (S3)
|
||||||
@@ -234,18 +234,18 @@ func (d *Config) init() {
|
|||||||
d.vars.Register(value.NewStringList(&d.FFmpeg.Access.Input.Block, []string{}, " "), "ffmpeg.access.input.block", "CORE_FFMPEG_ACCESS_INPUT_BLOCK", nil, "List of blocked expression to match against the input addresses", false, false)
|
d.vars.Register(value.NewStringList(&d.FFmpeg.Access.Input.Block, []string{}, " "), "ffmpeg.access.input.block", "CORE_FFMPEG_ACCESS_INPUT_BLOCK", nil, "List of blocked expression to match against the input addresses", false, false)
|
||||||
d.vars.Register(value.NewStringList(&d.FFmpeg.Access.Output.Allow, []string{}, " "), "ffmpeg.access.output.allow", "CORE_FFMPEG_ACCESS_OUTPUT_ALLOW", nil, "List of allowed expression to match against the output addresses", false, false)
|
d.vars.Register(value.NewStringList(&d.FFmpeg.Access.Output.Allow, []string{}, " "), "ffmpeg.access.output.allow", "CORE_FFMPEG_ACCESS_OUTPUT_ALLOW", nil, "List of allowed expression to match against the output addresses", false, false)
|
||||||
d.vars.Register(value.NewStringList(&d.FFmpeg.Access.Output.Block, []string{}, " "), "ffmpeg.access.output.block", "CORE_FFMPEG_ACCESS_OUTPUT_BLOCK", nil, "List of blocked expression to match against the output addresses", false, false)
|
d.vars.Register(value.NewStringList(&d.FFmpeg.Access.Output.Block, []string{}, " "), "ffmpeg.access.output.block", "CORE_FFMPEG_ACCESS_OUTPUT_BLOCK", nil, "List of blocked expression to match against the output addresses", false, false)
|
||||||
d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxLines, 50), "ffmpeg.log.max_lines", "CORE_FFMPEG_LOG_MAXLINES", nil, "Number of latest log lines to keep for each process", false, false)
|
d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxLines, 50), "ffmpeg.log.max_lines", "CORE_FFMPEG_LOG_MAX_LINES", []string{"CORE_FFMPEG_LOG_MAXLINES"}, "Number of latest log lines to keep for each process", false, false)
|
||||||
d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxHistory, 3), "ffmpeg.log.max_history", "CORE_FFMPEG_LOG_MAXHISTORY", nil, "Number of latest logs to keep for each process", false, false)
|
d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxHistory, 3), "ffmpeg.log.max_history", "CORE_FFMPEG_LOG_MAX_HISTORY", []string{"CORE_FFMPEG_LOG_MAXHISTORY"}, "Number of latest logs to keep for each process", false, false)
|
||||||
d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxMinimalHistory, 0), "ffmpeg.log.max_minimal_history", "CORE_FFMPEG_LOG_MAXMINIMALHISTORY", nil, "Number of minimal logs to keep for each process on top of max_history", false, false)
|
d.vars.Register(value.NewInt(&d.FFmpeg.Log.MaxMinimalHistory, 0), "ffmpeg.log.max_minimal_history", "CORE_FFMPEG_LOG_MAX_MINIMAL_HISTORY", []string{"CORE_FFMPEG_LOG_MAXMINIMALHISTORY"}, "Number of minimal logs to keep for each process on top of max_history", false, false)
|
||||||
|
|
||||||
// Playout
|
// Playout
|
||||||
d.vars.Register(value.NewBool(&d.Playout.Enable, false), "playout.enable", "CORE_PLAYOUT_ENABLE", nil, "Enable playout proxy where available", false, false)
|
d.vars.Register(value.NewBool(&d.Playout.Enable, false), "playout.enable", "CORE_PLAYOUT_ENABLE", nil, "Enable playout proxy where available", false, false)
|
||||||
d.vars.Register(value.NewPort(&d.Playout.MinPort, 0), "playout.min_port", "CORE_PLAYOUT_MINPORT", nil, "Min. playout server port", false, false)
|
d.vars.Register(value.NewPort(&d.Playout.MinPort, 0), "playout.min_port", "CORE_PLAYOUT_MIN_PORT", []string{"CORE_PLAYOUT_MINPORT"}, "Min. playout server port", false, false)
|
||||||
d.vars.Register(value.NewPort(&d.Playout.MaxPort, 0), "playout.max_port", "CORE_PLAYOUT_MAXPORT", nil, "Max. playout server port", false, false)
|
d.vars.Register(value.NewPort(&d.Playout.MaxPort, 0), "playout.max_port", "CORE_PLAYOUT_MAX_PORT", []string{"CORE_PLAYOUT_MAXPORT"}, "Max. playout server port", false, false)
|
||||||
|
|
||||||
// Debug
|
// Debug
|
||||||
d.vars.Register(value.NewBool(&d.Debug.Profiling, false), "debug.profiling", "CORE_DEBUG_PROFILING", nil, "Enable profiling endpoint on /profiling", false, false)
|
d.vars.Register(value.NewBool(&d.Debug.Profiling, false), "debug.profiling", "CORE_DEBUG_PROFILING", nil, "Enable profiling endpoint on /profiling", false, false)
|
||||||
d.vars.Register(value.NewInt(&d.Debug.ForceGC, 0), "debug.force_gc", "CORE_DEBUG_FORCEGC", nil, "Number of seconds between forcing GC to return memory to the OS", false, false)
|
d.vars.Register(value.NewInt(&d.Debug.ForceGC, 0), "debug.force_gc", "CORE_DEBUG_FORCE_GC", []string{"CORE_DEBUG_FORCEGC"}, "Number of seconds between forcing GC to return memory to the OS", false, false)
|
||||||
d.vars.Register(value.NewInt64(&d.Debug.MemoryLimit, 0), "debug.memory_limit_mbytes", "CORE_DEBUG_MEMORY_LIMIT_MBYTES", nil, "Impose a soft memory limit for the core, in megabytes", false, false)
|
d.vars.Register(value.NewInt64(&d.Debug.MemoryLimit, 0), "debug.memory_limit_mbytes", "CORE_DEBUG_MEMORY_LIMIT_MBYTES", nil, "Impose a soft memory limit for the core, in megabytes", false, false)
|
||||||
|
|
||||||
// Metrics
|
// Metrics
|
||||||
@@ -261,7 +261,7 @@ func (d *Config) init() {
|
|||||||
d.vars.Register(value.NewBool(&d.Sessions.Persist, false), "sessions.persist", "CORE_SESSIONS_PERSIST", nil, "Whether to persist session history. Will be stored as sessions.json in db.dir", false, false)
|
d.vars.Register(value.NewBool(&d.Sessions.Persist, false), "sessions.persist", "CORE_SESSIONS_PERSIST", nil, "Whether to persist session history. Will be stored as sessions.json in db.dir", false, false)
|
||||||
d.vars.Register(value.NewInt(&d.Sessions.PersistInterval, 300), "sessions.persist_interval_sec", "CORE_SESSIONS_PERSIST_INTERVAL_SEC", nil, "Interval in seconds in which to persist the current session history", false, false)
|
d.vars.Register(value.NewInt(&d.Sessions.PersistInterval, 300), "sessions.persist_interval_sec", "CORE_SESSIONS_PERSIST_INTERVAL_SEC", nil, "Interval in seconds in which to persist the current session history", false, false)
|
||||||
d.vars.Register(value.NewUint64(&d.Sessions.MaxBitrate, 0), "sessions.max_bitrate_mbit", "CORE_SESSIONS_MAXBITRATE_MBIT", nil, "Max. allowed outgoing bitrate in mbit/s, 0 for unlimited", false, false)
|
d.vars.Register(value.NewUint64(&d.Sessions.MaxBitrate, 0), "sessions.max_bitrate_mbit", "CORE_SESSIONS_MAXBITRATE_MBIT", nil, "Max. allowed outgoing bitrate in mbit/s, 0 for unlimited", false, false)
|
||||||
d.vars.Register(value.NewUint64(&d.Sessions.MaxSessions, 0), "sessions.max_sessions", "CORE_SESSIONS_MAXSESSIONS", nil, "Max. allowed number of simultaneous sessions, 0 for unlimited", false, false)
|
d.vars.Register(value.NewUint64(&d.Sessions.MaxSessions, 0), "sessions.max_sessions", "CORE_SESSIONS_MAX_SESSIONS", []string{"CORE_SESSIONS_MAXSESSIONS"}, "Max. allowed number of simultaneous sessions, 0 for unlimited", false, false)
|
||||||
|
|
||||||
// Service
|
// Service
|
||||||
d.vars.Register(value.NewBool(&d.Service.Enable, false), "service.enable", "CORE_SERVICE_ENABLE", nil, "Enable connecting to the Restreamer Service", false, false)
|
d.vars.Register(value.NewBool(&d.Service.Enable, false), "service.enable", "CORE_SERVICE_ENABLE", nil, "Enable connecting to the Restreamer Service", false, false)
|
||||||
|
@@ -3746,6 +3746,11 @@ const docTemplate = `{
|
|||||||
"coder": {
|
"coder": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"extradata_size_bytes": {
|
||||||
|
"description": "bytes",
|
||||||
|
"type": "integer",
|
||||||
|
"format": "uint64"
|
||||||
|
},
|
||||||
"format": {
|
"format": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
@@ -3768,6 +3773,10 @@ const docTemplate = `{
|
|||||||
"type": "integer",
|
"type": "integer",
|
||||||
"format": "uint64"
|
"format": "uint64"
|
||||||
},
|
},
|
||||||
|
"keyframe": {
|
||||||
|
"type": "integer",
|
||||||
|
"format": "uint64"
|
||||||
|
},
|
||||||
"layout": {
|
"layout": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
@@ -3739,6 +3739,11 @@
|
|||||||
"coder": {
|
"coder": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
"extradata_size_bytes": {
|
||||||
|
"description": "bytes",
|
||||||
|
"type": "integer",
|
||||||
|
"format": "uint64"
|
||||||
|
},
|
||||||
"format": {
|
"format": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
@@ -3761,6 +3766,10 @@
|
|||||||
"type": "integer",
|
"type": "integer",
|
||||||
"format": "uint64"
|
"format": "uint64"
|
||||||
},
|
},
|
||||||
|
"keyframe": {
|
||||||
|
"type": "integer",
|
||||||
|
"format": "uint64"
|
||||||
|
},
|
||||||
"layout": {
|
"layout": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
|
@@ -1000,6 +1000,10 @@ definitions:
|
|||||||
type: string
|
type: string
|
||||||
coder:
|
coder:
|
||||||
type: string
|
type: string
|
||||||
|
extradata_size_bytes:
|
||||||
|
description: bytes
|
||||||
|
format: uint64
|
||||||
|
type: integer
|
||||||
format:
|
format:
|
||||||
type: string
|
type: string
|
||||||
fps:
|
fps:
|
||||||
@@ -1016,6 +1020,9 @@ definitions:
|
|||||||
description: General
|
description: General
|
||||||
format: uint64
|
format: uint64
|
||||||
type: integer
|
type: integer
|
||||||
|
keyframe:
|
||||||
|
format: uint64
|
||||||
|
type: integer
|
||||||
layout:
|
layout:
|
||||||
type: string
|
type: string
|
||||||
packet:
|
packet:
|
||||||
|
@@ -400,7 +400,7 @@ func (p *parser) Parse(line string) uint64 {
|
|||||||
|
|
||||||
if p.collector.IsCollectableIP(p.process.input[i].IP) {
|
if p.collector.IsCollectableIP(p.process.input[i].IP) {
|
||||||
p.collector.Activate("")
|
p.collector.Activate("")
|
||||||
p.collector.Ingress("", int64(p.stats.input[i].diff.size)*1024)
|
p.collector.Ingress("", int64(p.stats.input[i].diff.size))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -417,7 +417,7 @@ func (p *parser) Parse(line string) uint64 {
|
|||||||
|
|
||||||
if p.collector.IsCollectableIP(p.process.output[i].IP) {
|
if p.collector.IsCollectableIP(p.process.output[i].IP) {
|
||||||
p.collector.Activate("")
|
p.collector.Activate("")
|
||||||
p.collector.Egress("", int64(p.stats.output[i].diff.size)*1024)
|
p.collector.Egress("", int64(p.stats.output[i].diff.size))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -454,7 +454,7 @@ func (p *parser) parseDefaultProgress(line string) error {
|
|||||||
|
|
||||||
if matches = p.re.size.FindStringSubmatch(line); matches != nil {
|
if matches = p.re.size.FindStringSubmatch(line); matches != nil {
|
||||||
if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil {
|
if x, err := strconv.ParseUint(matches[1], 10, 64); err == nil {
|
||||||
p.progress.ffmpeg.Size = x
|
p.progress.ffmpeg.Size = x * 1024
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -529,6 +529,26 @@ func (p *parser) parseFFmpegProgress(line string) error {
|
|||||||
return fmt.Errorf("output length mismatch (have: %d, want: %d)", len(progress.Output), len(p.process.output))
|
return fmt.Errorf("output length mismatch (have: %d, want: %d)", len(progress.Output), len(p.process.output))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if progress.Size == 0 {
|
||||||
|
progress.Size = progress.SizeKB * 1024
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, io := range progress.Input {
|
||||||
|
if io.Size == 0 {
|
||||||
|
io.Size = io.SizeKB * 1024
|
||||||
|
}
|
||||||
|
|
||||||
|
progress.Input[i].Size = io.Size
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, io := range progress.Output {
|
||||||
|
if io.Size == 0 {
|
||||||
|
io.Size = io.SizeKB * 1024
|
||||||
|
}
|
||||||
|
|
||||||
|
progress.Output[i].Size = io.Size
|
||||||
|
}
|
||||||
|
|
||||||
p.progress.ffmpeg = progress
|
p.progress.ffmpeg = progress
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@@ -1,11 +1,11 @@
|
|||||||
package parse
|
package parse
|
||||||
|
|
||||||
type statsData struct {
|
type statsData struct {
|
||||||
frame uint64
|
frame uint64 // counter
|
||||||
packet uint64
|
packet uint64 // counter
|
||||||
size uint64 // kbytes
|
size uint64 // bytes
|
||||||
dup uint64
|
dup uint64 // counter
|
||||||
drop uint64
|
drop uint64 // counter
|
||||||
}
|
}
|
||||||
|
|
||||||
type stats struct {
|
type stats struct {
|
||||||
|
@@ -42,9 +42,9 @@ func (d *Duration) UnmarshalJSON(b []byte) error {
|
|||||||
|
|
||||||
type ffmpegAVstreamIO struct {
|
type ffmpegAVstreamIO struct {
|
||||||
State string `json:"state"`
|
State string `json:"state"`
|
||||||
Packet uint64 `json:"packet"`
|
Packet uint64 `json:"packet"` // counter
|
||||||
Time uint64 `json:"time"`
|
Time uint64 `json:"time"`
|
||||||
Size uint64 `json:"size_kb"`
|
Size uint64 `json:"size_kb"` // kbytes
|
||||||
}
|
}
|
||||||
|
|
||||||
func (avio *ffmpegAVstreamIO) export() AVstreamIO {
|
func (avio *ffmpegAVstreamIO) export() AVstreamIO {
|
||||||
@@ -52,7 +52,7 @@ func (avio *ffmpegAVstreamIO) export() AVstreamIO {
|
|||||||
State: avio.State,
|
State: avio.State,
|
||||||
Packet: avio.Packet,
|
Packet: avio.Packet,
|
||||||
Time: avio.Time,
|
Time: avio.Time,
|
||||||
Size: avio.Size,
|
Size: avio.Size * 1024,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,12 +91,15 @@ type ffmpegProgressIO struct {
|
|||||||
// common
|
// common
|
||||||
Index uint64 `json:"index"`
|
Index uint64 `json:"index"`
|
||||||
Stream uint64 `json:"stream"`
|
Stream uint64 `json:"stream"`
|
||||||
Size uint64 `json:"size_kb"` // kbytes
|
SizeKB uint64 `json:"size_kb"` // kbytes
|
||||||
Bitrate float64 `json:"-"` // kbit/s
|
Size uint64 `json:"size_bytes"` // bytes
|
||||||
Frame uint64 `json:"frame"`
|
Bitrate float64 `json:"-"` // bit/s
|
||||||
Packet uint64 `json:"packet"`
|
Frame uint64 `json:"frame"` // counter
|
||||||
FPS float64 `json:"-"`
|
Keyframe uint64 `json:"keyframe"` // counter
|
||||||
PPS float64 `json:"-"`
|
Packet uint64 `json:"packet"` // counter
|
||||||
|
Extradata uint64 `json:"extradata_size_bytes"` // bytes
|
||||||
|
FPS float64 `json:"-"` // rate, frames per second
|
||||||
|
PPS float64 `json:"-"` // rate, packets per second
|
||||||
|
|
||||||
// video
|
// video
|
||||||
Quantizer float64 `json:"q"`
|
Quantizer float64 `json:"q"`
|
||||||
@@ -106,28 +109,36 @@ func (io *ffmpegProgressIO) exportTo(progress *ProgressIO) {
|
|||||||
progress.Index = io.Index
|
progress.Index = io.Index
|
||||||
progress.Stream = io.Stream
|
progress.Stream = io.Stream
|
||||||
progress.Frame = io.Frame
|
progress.Frame = io.Frame
|
||||||
|
progress.Keyframe = io.Keyframe
|
||||||
progress.Packet = io.Packet
|
progress.Packet = io.Packet
|
||||||
progress.FPS = io.FPS
|
progress.FPS = io.FPS
|
||||||
progress.PPS = io.PPS
|
progress.PPS = io.PPS
|
||||||
progress.Quantizer = io.Quantizer
|
progress.Quantizer = io.Quantizer
|
||||||
progress.Size = io.Size * 1024
|
progress.Bitrate = io.Bitrate
|
||||||
progress.Bitrate = io.Bitrate * 1024
|
progress.Extradata = io.Extradata
|
||||||
|
|
||||||
|
if io.Size == 0 {
|
||||||
|
progress.Size = io.SizeKB * 1024
|
||||||
|
} else {
|
||||||
|
progress.Size = io.Size
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type ffmpegProgress struct {
|
type ffmpegProgress struct {
|
||||||
Input []ffmpegProgressIO `json:"inputs"`
|
Input []ffmpegProgressIO `json:"inputs"`
|
||||||
Output []ffmpegProgressIO `json:"outputs"`
|
Output []ffmpegProgressIO `json:"outputs"`
|
||||||
Frame uint64 `json:"frame"`
|
Frame uint64 `json:"frame"` // counter
|
||||||
Packet uint64 `json:"packet"`
|
Packet uint64 `json:"packet"` // counter
|
||||||
FPS float64 `json:"-"`
|
FPS float64 `json:"-"` // rate, frames per second
|
||||||
PPS float64 `json:"-"`
|
PPS float64 `json:"-"` // rate, packets per second
|
||||||
Quantizer float64 `json:"q"`
|
Quantizer float64 `json:"q"`
|
||||||
Size uint64 `json:"size_kb"` // kbytes
|
SizeKB uint64 `json:"size_kb"` // kbytes
|
||||||
Bitrate float64 `json:"-"` // kbit/s
|
Size uint64 `json:"size_bytes"` // bytes
|
||||||
|
Bitrate float64 `json:"-"` // bit/s
|
||||||
Time Duration `json:"time"`
|
Time Duration `json:"time"`
|
||||||
Speed float64 `json:"speed"`
|
Speed float64 `json:"speed"`
|
||||||
Drop uint64 `json:"drop"`
|
Drop uint64 `json:"drop"` // counter
|
||||||
Dup uint64 `json:"dup"`
|
Dup uint64 `json:"dup"` // counter
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *ffmpegProgress) exportTo(progress *Progress) {
|
func (p *ffmpegProgress) exportTo(progress *Progress) {
|
||||||
@@ -136,13 +147,18 @@ func (p *ffmpegProgress) exportTo(progress *Progress) {
|
|||||||
progress.FPS = p.FPS
|
progress.FPS = p.FPS
|
||||||
progress.PPS = p.PPS
|
progress.PPS = p.PPS
|
||||||
progress.Quantizer = p.Quantizer
|
progress.Quantizer = p.Quantizer
|
||||||
progress.Size = p.Size * 1024
|
|
||||||
progress.Time = p.Time.Seconds()
|
progress.Time = p.Time.Seconds()
|
||||||
progress.Bitrate = p.Bitrate * 1024
|
progress.Bitrate = p.Bitrate
|
||||||
progress.Speed = p.Speed
|
progress.Speed = p.Speed
|
||||||
progress.Drop = p.Drop
|
progress.Drop = p.Drop
|
||||||
progress.Dup = p.Dup
|
progress.Dup = p.Dup
|
||||||
|
|
||||||
|
if p.Size == 0 {
|
||||||
|
progress.Size = p.SizeKB * 1024
|
||||||
|
} else {
|
||||||
|
progress.Size = p.Size
|
||||||
|
}
|
||||||
|
|
||||||
for i := range p.Input {
|
for i := range p.Input {
|
||||||
if len(progress.Input) <= i {
|
if len(progress.Input) <= i {
|
||||||
break
|
break
|
||||||
|
@@ -20,11 +20,13 @@ type ProgressIO struct {
|
|||||||
Codec string `json:"codec"`
|
Codec string `json:"codec"`
|
||||||
Coder string `json:"coder"`
|
Coder string `json:"coder"`
|
||||||
Frame uint64 `json:"frame" format:"uint64"`
|
Frame uint64 `json:"frame" format:"uint64"`
|
||||||
|
Keyframe uint64 `json:"keyframe" format:"uint64"`
|
||||||
FPS json.Number `json:"fps" swaggertype:"number" jsonschema:"type=number"`
|
FPS json.Number `json:"fps" swaggertype:"number" jsonschema:"type=number"`
|
||||||
Packet uint64 `json:"packet" format:"uint64"`
|
Packet uint64 `json:"packet" format:"uint64"`
|
||||||
PPS json.Number `json:"pps" swaggertype:"number" jsonschema:"type=number"`
|
PPS json.Number `json:"pps" swaggertype:"number" jsonschema:"type=number"`
|
||||||
Size uint64 `json:"size_kb" format:"uint64"` // kbytes
|
Size uint64 `json:"size_kb" format:"uint64"` // kbytes
|
||||||
Bitrate json.Number `json:"bitrate_kbit" swaggertype:"number" jsonschema:"type=number"` // kbit/s
|
Bitrate json.Number `json:"bitrate_kbit" swaggertype:"number" jsonschema:"type=number"` // kbit/s
|
||||||
|
Extradata uint64 `json:"extradata_size_bytes" format:"uint64"` // bytes
|
||||||
|
|
||||||
// Video
|
// Video
|
||||||
Pixfmt string `json:"pix_fmt,omitempty"`
|
Pixfmt string `json:"pix_fmt,omitempty"`
|
||||||
@@ -56,11 +58,13 @@ func (i *ProgressIO) Unmarshal(io *app.ProgressIO) {
|
|||||||
i.Codec = io.Codec
|
i.Codec = io.Codec
|
||||||
i.Coder = io.Coder
|
i.Coder = io.Coder
|
||||||
i.Frame = io.Frame
|
i.Frame = io.Frame
|
||||||
|
i.Keyframe = io.Keyframe
|
||||||
i.FPS = json.Number(fmt.Sprintf("%.3f", io.FPS))
|
i.FPS = json.Number(fmt.Sprintf("%.3f", io.FPS))
|
||||||
i.Packet = io.Packet
|
i.Packet = io.Packet
|
||||||
i.PPS = json.Number(fmt.Sprintf("%.3f", io.PPS))
|
i.PPS = json.Number(fmt.Sprintf("%.3f", io.PPS))
|
||||||
i.Size = io.Size / 1024
|
i.Size = io.Size / 1024
|
||||||
i.Bitrate = json.Number(fmt.Sprintf("%.3f", io.Bitrate/1024))
|
i.Bitrate = json.Number(fmt.Sprintf("%.3f", io.Bitrate/1024))
|
||||||
|
i.Extradata = io.Extradata
|
||||||
i.Pixfmt = io.Pixfmt
|
i.Pixfmt = io.Pixfmt
|
||||||
i.Quantizer = json.Number(fmt.Sprintf("%.3f", io.Quantizer))
|
i.Quantizer = json.Number(fmt.Sprintf("%.3f", io.Quantizer))
|
||||||
i.Width = io.Width
|
i.Width = io.Width
|
||||||
|
@@ -2,19 +2,19 @@ package app
|
|||||||
|
|
||||||
type AVstreamIO struct {
|
type AVstreamIO struct {
|
||||||
State string
|
State string
|
||||||
Packet uint64
|
Packet uint64 // counter
|
||||||
Time uint64
|
Time uint64
|
||||||
Size uint64
|
Size uint64 // bytes
|
||||||
}
|
}
|
||||||
|
|
||||||
type AVstream struct {
|
type AVstream struct {
|
||||||
Input AVstreamIO
|
Input AVstreamIO
|
||||||
Output AVstreamIO
|
Output AVstreamIO
|
||||||
Aqueue uint64
|
Aqueue uint64 // gauge
|
||||||
Queue uint64
|
Queue uint64 // gauge
|
||||||
Dup uint64
|
Dup uint64 // counter
|
||||||
Drop uint64
|
Drop uint64 // counter
|
||||||
Enc uint64
|
Enc uint64 // counter
|
||||||
Looping bool
|
Looping bool
|
||||||
Duplicating bool
|
Duplicating bool
|
||||||
GOP string
|
GOP string
|
||||||
|
@@ -11,12 +11,14 @@ type ProgressIO struct {
|
|||||||
Type string
|
Type string
|
||||||
Codec string
|
Codec string
|
||||||
Coder string
|
Coder string
|
||||||
Frame uint64
|
Frame uint64 // counter
|
||||||
FPS float64
|
Keyframe uint64 // counter
|
||||||
Packet uint64
|
FPS float64 // rate, frames per second
|
||||||
PPS float64
|
Packet uint64 // counter
|
||||||
|
PPS float64 // rate, packets per second
|
||||||
Size uint64 // bytes
|
Size uint64 // bytes
|
||||||
Bitrate float64 // bit/s
|
Bitrate float64 // bit/s
|
||||||
|
Extradata uint64 // bytes
|
||||||
|
|
||||||
// Video
|
// Video
|
||||||
Pixfmt string
|
Pixfmt string
|
||||||
@@ -36,15 +38,15 @@ type ProgressIO struct {
|
|||||||
type Progress struct {
|
type Progress struct {
|
||||||
Input []ProgressIO
|
Input []ProgressIO
|
||||||
Output []ProgressIO
|
Output []ProgressIO
|
||||||
Frame uint64
|
Frame uint64 // counter
|
||||||
Packet uint64
|
Packet uint64 // counter
|
||||||
FPS float64
|
FPS float64 // rate, frames per second
|
||||||
PPS float64
|
PPS float64 // rate, packets per second
|
||||||
Quantizer float64
|
Quantizer float64 // gauge
|
||||||
Size uint64 // bytes
|
Size uint64 // bytes
|
||||||
Time float64
|
Time float64 // seconds with fractions
|
||||||
Bitrate float64 // bit/s
|
Bitrate float64 // bit/s
|
||||||
Speed float64
|
Speed float64 // gauge
|
||||||
Drop uint64
|
Drop uint64 // counter
|
||||||
Dup uint64
|
Dup uint64 // counter
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user