Allow to import report history for a process

This commit is contained in:
Ingo Oppermann
2024-07-10 16:46:49 +02:00
parent 480dbb7f53
commit 7e90bb87ce
39 changed files with 2488 additions and 573 deletions

View File

@@ -1,8 +1,6 @@
package api
import (
"fmt"
"github.com/datarhei/core/v16/encoding/json"
"github.com/datarhei/core/v16/restream/app"
)
@@ -50,7 +48,7 @@ type ProgressIO struct {
AVstream *AVstream `json:"avstream" jsonschema:"anyof_type=null;object"`
}
// Unmarshal converts a restreamer ProgressIO to a ProgressIO in API representation
// Unmarshal converts a core ProgressIO to a ProgressIO in API representation
func (i *ProgressIO) Unmarshal(io *app.ProgressIO) {
if io == nil {
return
@@ -66,17 +64,17 @@ func (i *ProgressIO) Unmarshal(io *app.ProgressIO) {
i.Coder = io.Coder
i.Frame = io.Frame
i.Keyframe = io.Keyframe
i.Framerate.Min = json.Number(fmt.Sprintf("%.3f", io.Framerate.Min))
i.Framerate.Max = json.Number(fmt.Sprintf("%.3f", io.Framerate.Max))
i.Framerate.Average = json.Number(fmt.Sprintf("%.3f", io.Framerate.Average))
i.FPS = json.Number(fmt.Sprintf("%.3f", io.FPS))
i.Framerate.Min = json.ToNumber(io.Framerate.Min)
i.Framerate.Max = json.ToNumber(io.Framerate.Max)
i.Framerate.Average = json.ToNumber(io.Framerate.Average)
i.FPS = json.ToNumber(io.FPS)
i.Packet = io.Packet
i.PPS = json.Number(fmt.Sprintf("%.3f", io.PPS))
i.PPS = json.ToNumber(io.PPS)
i.Size = io.Size / 1024
i.Bitrate = json.Number(fmt.Sprintf("%.3f", io.Bitrate/1024))
i.Bitrate = json.ToNumber(io.Bitrate / 1024)
i.Extradata = io.Extradata
i.Pixfmt = io.Pixfmt
i.Quantizer = json.Number(fmt.Sprintf("%.3f", io.Quantizer))
i.Quantizer = json.ToNumber(io.Quantizer)
i.Width = io.Width
i.Height = io.Height
i.Sampling = io.Sampling
@@ -89,6 +87,64 @@ func (i *ProgressIO) Unmarshal(io *app.ProgressIO) {
}
}
func (i *ProgressIO) Marshal() app.ProgressIO {
p := app.ProgressIO{
ID: i.ID,
Address: i.Address,
Index: i.Index,
Stream: i.Stream,
Format: i.Format,
Type: i.Type,
Codec: i.Codec,
Coder: i.Coder,
Frame: i.Frame,
Keyframe: i.Keyframe,
Packet: i.Packet,
Size: i.Size * 1024,
Extradata: i.Extradata,
Pixfmt: i.Pixfmt,
Width: i.Width,
Height: i.Height,
Sampling: i.Sampling,
Layout: i.Layout,
Channels: i.Channels,
}
if x, err := i.Framerate.Min.Float64(); err == nil {
p.Framerate.Min = x
}
if x, err := i.Framerate.Max.Float64(); err == nil {
p.Framerate.Max = x
}
if x, err := i.Framerate.Average.Float64(); err == nil {
p.Framerate.Average = x
}
if x, err := i.FPS.Float64(); err == nil {
p.FPS = x
}
if x, err := i.PPS.Float64(); err == nil {
p.PPS = x
}
if x, err := i.Bitrate.Float64(); err == nil {
p.Bitrate = x * 1024
}
if x, err := i.Quantizer.Float64(); err == nil {
p.Quantizer = x
}
if i.AVstream != nil {
p.AVstream = i.AVstream.Marshal()
}
return p
}
// Progress represents the progress of an ffmpeg process
type Progress struct {
Started bool `json:"started"`
@@ -107,38 +163,82 @@ type Progress struct {
Dup uint64 `json:"dup" format:"uint64"`
}
// Unmarshal converts a restreamer Progress to a Progress in API representation
func (progress *Progress) Unmarshal(p *app.Progress) {
progress.Input = []ProgressIO{}
progress.Output = []ProgressIO{}
// Unmarshal converts a core Progress to a Progress in API representation
func (p *Progress) Unmarshal(pp *app.Progress) {
p.Input = []ProgressIO{}
p.Output = []ProgressIO{}
if p == nil {
if pp == nil {
return
}
progress.Started = p.Started
progress.Input = make([]ProgressIO, len(p.Input))
progress.Output = make([]ProgressIO, len(p.Output))
progress.Frame = p.Frame
progress.Packet = p.Packet
progress.FPS = ToNumber(p.FPS)
progress.Quantizer = ToNumber(p.Quantizer)
progress.Size = p.Size / 1024
progress.Time = ToNumber(p.Time)
progress.Bitrate = ToNumber(p.Bitrate / 1024)
progress.Speed = ToNumber(p.Speed)
progress.Drop = p.Drop
progress.Dup = p.Dup
p.Started = pp.Started
p.Input = make([]ProgressIO, len(pp.Input))
p.Output = make([]ProgressIO, len(pp.Output))
p.Frame = pp.Frame
p.Packet = pp.Packet
p.FPS = json.ToNumber(pp.FPS)
p.Quantizer = json.ToNumber(pp.Quantizer)
p.Size = pp.Size / 1024
p.Time = json.ToNumber(pp.Time)
p.Bitrate = json.ToNumber(pp.Bitrate / 1024)
p.Speed = json.ToNumber(pp.Speed)
p.Drop = pp.Drop
p.Dup = pp.Dup
for i, io := range p.Input {
progress.Input[i].Unmarshal(&io)
for i, io := range pp.Input {
p.Input[i].Unmarshal(&io)
}
for i, io := range p.Output {
progress.Output[i].Unmarshal(&io)
for i, io := range pp.Output {
p.Output[i].Unmarshal(&io)
}
progress.Mapping.Unmarshal(&p.Mapping)
p.Mapping.Unmarshal(&pp.Mapping)
}
func (p *Progress) Marshal() app.Progress {
pp := app.Progress{
Started: p.Started,
Input: make([]app.ProgressIO, 0, len(p.Input)),
Output: make([]app.ProgressIO, 0, len(p.Output)),
Mapping: p.Mapping.Marshal(),
Frame: p.Frame,
Packet: p.Packet,
Size: p.Size * 1024,
Drop: p.Drop,
Dup: p.Dup,
}
if x, err := p.FPS.Float64(); err == nil {
pp.FPS = x
}
if x, err := p.Quantizer.Float64(); err == nil {
pp.Quantizer = x
}
if x, err := p.Time.Float64(); err == nil {
pp.Time = x
}
if x, err := p.Bitrate.Float64(); err == nil {
pp.Bitrate = x * 1024
}
if x, err := p.Speed.Float64(); err == nil {
pp.Speed = x
}
for _, io := range p.Input {
pp.Input = append(pp.Input, io.Marshal())
}
for _, io := range p.Output {
pp.Output = append(pp.Output, io.Marshal())
}
return pp
}
type GraphElement struct {
@@ -158,6 +258,44 @@ type GraphElement struct {
Height uint64 `json:"height"`
}
func (g *GraphElement) Unmarshal(a *app.GraphElement) {
g.Index = a.Index
g.Name = a.Name
g.Filter = a.Filter
g.DstName = a.DstName
g.DstFilter = a.DstFilter
g.Inpad = a.Inpad
g.Outpad = a.Outpad
g.Timebase = a.Timebase
g.Type = a.Type
g.Format = a.Format
g.Sampling = a.Sampling
g.Layout = a.Layout
g.Width = a.Width
g.Height = a.Height
}
func (g *GraphElement) Marshal() app.GraphElement {
a := app.GraphElement{
Index: g.Index,
Name: g.Name,
Filter: g.Filter,
DstName: g.DstName,
DstFilter: g.DstFilter,
Inpad: g.Inpad,
Outpad: g.Outpad,
Timebase: g.Timebase,
Type: g.Type,
Format: g.Format,
Sampling: g.Sampling,
Layout: g.Layout,
Width: g.Width,
Height: g.Height,
}
return a
}
type GraphMapping struct {
Input int `json:"input"`
Output int `json:"output"`
@@ -166,45 +304,57 @@ type GraphMapping struct {
Copy bool `json:"copy"`
}
func (g *GraphMapping) Unmarshal(a *app.GraphMapping) {
g.Input = a.Input
g.Output = a.Output
g.Index = a.Index
g.Name = a.Name
g.Copy = a.Copy
}
func (g *GraphMapping) Marshal() app.GraphMapping {
a := app.GraphMapping{
Input: g.Input,
Output: g.Output,
Index: g.Index,
Name: g.Name,
Copy: g.Copy,
}
return a
}
type StreamMapping struct {
Graphs []GraphElement `json:"graphs"`
Mapping []GraphMapping `json:"mapping"`
}
// Unmarshal converts a restreamer StreamMapping to a StreamMapping in API representation
// Unmarshal converts a core StreamMapping to a StreamMapping in API representation
func (s *StreamMapping) Unmarshal(m *app.StreamMapping) {
s.Graphs = make([]GraphElement, 0, len(m.Graphs))
for _, mge := range m.Graphs {
ge := GraphElement{
Index: mge.Index,
Name: mge.Name,
Filter: mge.Filter,
DstName: mge.DstName,
DstFilter: mge.DstFilter,
Inpad: mge.Inpad,
Outpad: mge.Outpad,
Timebase: mge.Timebase,
Type: mge.Type,
Format: mge.Format,
Sampling: mge.Sampling,
Layout: mge.Layout,
Width: mge.Width,
Height: mge.Height,
}
s.Graphs = append(s.Graphs, ge)
s.Graphs = make([]GraphElement, len(m.Graphs))
for i, graph := range m.Graphs {
s.Graphs[i].Unmarshal(&graph)
}
s.Mapping = make([]GraphMapping, 0, len(m.Mapping))
for _, mmapping := range m.Mapping {
mapping := GraphMapping{
Input: mmapping.Input,
Output: mmapping.Output,
Index: mmapping.Index,
Name: mmapping.Name,
Copy: mmapping.Copy,
}
s.Mapping = append(s.Mapping, mapping)
s.Mapping = make([]GraphMapping, len(m.Mapping))
for i, mapping := range m.Mapping {
s.Mapping[i].Unmarshal(&mapping)
}
}
func (s *StreamMapping) Marshal() app.StreamMapping {
m := app.StreamMapping{
Graphs: make([]app.GraphElement, 0, len(s.Graphs)),
Mapping: make([]app.GraphMapping, 0, len(s.Mapping)),
}
for _, graph := range s.Graphs {
m.Graphs = append(m.Graphs, graph.Marshal())
}
for _, mapping := range s.Mapping {
m.Mapping = append(m.Mapping, mapping.Marshal())
}
return m
}