start GstVideo bindings, GstMapInfo imprrovements/fixes, clean up examples

This commit is contained in:
tinyzimmer
2020-10-07 11:49:05 +03:00
parent 114637db87
commit 878e74bc27
21 changed files with 1863 additions and 111 deletions

View File

@@ -22,24 +22,23 @@ jobs:
- name: Check out code into the Go module directory
uses: actions/checkout@v2
# - uses: actions/cache@v1
# with:
# path: ~/go/pkg/mod
# key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
# restore-keys: |
# ${{ runner.os }}-go-
- uses: actions/cache@v1
with:
path: ~/go/pkg/mod
key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
${{ runner.os }}-go-
# - name: Get dependencies
# run: |
# go mod download
- name: Get dependencies
run: |
go mod download
# - name: Install gstreamer dependencies
# run: |
# sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get install -y \
# libgstreamer1.0-0 libgstreamer1.0-dev \
# libgstreamer-plugins-base1.0-0 libgstreamer-plugins-base1.0-dev \
# pkg-config build-essential
- name: Install gstreamer dependencies
run: |
sudo apt-get update && sudo DEBIAN_FRONTEND=noninteractive apt-get install -y \
libgstreamer1.0-0 libgstreamer1.0-dev libgstreamer-plugins-base1.0-0 libgstreamer-plugins-base1.0-dev \
pkg-config build-essential
- name: Lint all packages
run: |
make docker-lint
make lint

View File

@@ -2,17 +2,20 @@
package main
import (
"bytes"
"encoding/binary"
"fmt"
"math"
"image"
"image/color"
"time"
"github.com/tinyzimmer/go-gst/examples"
"github.com/tinyzimmer/go-gst/gst"
"github.com/tinyzimmer/go-gst/gst/app"
"github.com/tinyzimmer/go-gst/gst/video"
)
const width = 320
const height = 240
func createPipeline() (*gst.Pipeline, error) {
gst.Init(nil)
@@ -23,7 +26,7 @@ func createPipeline() (*gst.Pipeline, error) {
}
// Create the elements
elems, err := gst.NewElementMany("appsrc", "autoaudiosink")
elems, err := gst.NewElementMany("appsrc", "videoconvert", "autovideosink")
if err != nil {
return nil, err
}
@@ -40,30 +43,54 @@ func createPipeline() (*gst.Pipeline, error) {
"audio/x-raw, format=S16LE, layout=interleaved, channels=1, rate=44100",
))
// Add a callback for whene the sink requests a sample
i := 1
// Specify the format we want to provide as application into the pipeline
// by creating a video info with the given format and creating caps from it for the appsrc element.
videoInfo := video.NewInfo().
WithFormat(video.FormatRGBx, width, height).
WithFPS(gst.Fraction(2, 1))
src.SetCaps(videoInfo.ToCaps())
src.SetProperty("format", gst.FormatTime)
// Initialize a frame counter
var i int
// Since our appsrc element operates in pull mode (it asks us to provide data),
// we add a handler for the need-data callback and provide new data from there.
// In our case, we told gstreamer that we do 2 frames per second. While the
// buffers of all elements of the pipeline are still empty, this will be called
// a couple of times until all of them are filled. After this initial period,
// this handler will be called (on average) twice per second.
src.SetCallbacks(&app.SourceCallbacks{
NeedDataFunc: func(src *app.Source, _ uint) {
// Stop after 10 samples
if i == 10 {
NeedDataFunc: func(self *app.Source, _ uint) {
if i == 100 {
src.EndStream()
return
}
fmt.Println("Producing sample", i)
fmt.Println("Producing frame:", i)
sinWave := newSinWave(44100, 440.0, 1.0, time.Second)
// Produce an image frame for this iteration.
pixels := produceImageFrame(i)
// Allocate a new buffer with the sin wave
buffer := gst.NewBufferFromBytes(sinWave)
// Create a buffer that can hold exactly one video RGBx frame.
buf := gst.NewBufferWithSize(videoInfo.Size())
// Set the presentation timestamp on thee buffer
pts := time.Second * time.Duration(i)
buffer.SetPresentationTimestamp(pts)
buffer.SetDuration(time.Second)
// For each frame we produce, we set the timestamp when it should be displayed
// The autovideosink will use this information to display the frame at the right time.
buf.SetPresentationTimestamp(time.Duration(i) * 500 * time.Millisecond)
// Push tehe buffer onto the src
src.PushBuffer(buffer)
// At this point, buffer is only a reference to an existing memory region somewhere.
// When we want to access its content, we have to map it while requesting the required
// mode of access (read, read/write).
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
//
// There are convenience wrappers for building buffers directly from byte sequences as
// well.
buf.Map(gst.MapWrite).WriteData(pixels)
// Push the buffer onto the pipeline.
self.PushBuffer(buf)
i++
},
@@ -72,14 +99,39 @@ func createPipeline() (*gst.Pipeline, error) {
return pipeline, nil
}
func newSinWave(sampleRate int64, freq, vol float64, duration time.Duration) []byte {
numSamples := duration.Milliseconds() * (sampleRate / 1000.0)
buf := new(bytes.Buffer)
for i := int64(0); i < numSamples; i++ {
data := vol * math.Sin(2.0*math.Pi*freq*(1/float64(sampleRate)))
binary.Write(buf, binary.LittleEndian, data)
func produceImageFrame(i int) []uint8 {
upLeft := image.Point{0, 0}
lowRight := image.Point{width, height}
img := image.NewRGBA(image.Rectangle{upLeft, lowRight})
c := getColor(i)
for x := 0; x < width; x++ {
for y := 0; y < height; y++ {
img.Set(x, y, c)
}
return buf.Bytes()
}
return img.Pix
}
func getColor(i int) color.Color {
color := color.RGBA{}
if i%2 == 0 {
color.R = 0
} else {
color.R = 255
}
if i%3 == 0 {
color.G = 0
} else {
color.G = 255
}
if i%5 == 0 {
color.B = 0
} else {
color.B = 255
}
return color
}
func handleMessage(msg *gst.Message) error {
@@ -89,7 +141,11 @@ func handleMessage(msg *gst.Message) error {
case gst.MessageEOS:
return app.ErrEOS
case gst.MessageError:
return msg.ParseError()
gerr := msg.ParseError()
if debug := gerr.DebugString(); debug != "" {
fmt.Println(debug)
}
return gerr
}
return nil

View File

@@ -60,7 +60,6 @@ func padProbes(mainLoop *gst.MainLoop) error {
// So mapping the buffer makes the underlying memory region accessible to us.
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
mapInfo := buffer.Map(gst.MapRead)
defer mapInfo.Unmap()
// We know what format the data in the memory region has, since we requested
// it by setting the fakesink's caps. So what we do here is interpret the

View File

@@ -5,6 +5,22 @@ import "C"
import "unsafe"
// GFraction is a helper structure for building fractions for functions that require them.
type GFraction struct {
num, denom int
}
// Fraction returns a new GFraction with the given numerator and denominator.
func Fraction(numerator, denominator int) GFraction {
return GFraction{num: numerator, denom: denominator}
}
// Num returns the fraction's numerator.
func (g GFraction) Num() int { return g.num }
// Denom returns the fraction's denominator.
func (g GFraction) Denom() int { return g.denom }
// ClockTime is a go representation of a GstClockTime. Most of the time these are casted
// to time.Duration objects. It represents a time value in nanoseconds.
type ClockTime uint64

View File

@@ -14,6 +14,8 @@ gboolean cgoBufferMetaForEachCb (GstBuffer * buffer, GstMeta ** meta, gpointer u
{
return goBufferMetaForEachCb(buffer, meta, user_data);
}
gboolean isBuffer (GstBuffer * buffer) { return GST_IS_BUFFER(buffer); }
*/
import "C"
@@ -41,6 +43,12 @@ func NewEmptyBuffer() *Buffer {
return wrapBuffer(C.gst_buffer_new())
}
// NewBufferWithSize is a convenience wrapped for NewBufferrAllocate with the default allocator
// and parameters.
func NewBufferWithSize(size int64) *Buffer {
return NewBufferAllocate(nil, nil, size)
}
// NewBufferAllocate tries to create a newly allocated buffer with data of the given size
// and extra parameters from allocator. If the requested amount of memory can't be allocated,
// nil will be returned. The allocated buffer memory is not cleared.
@@ -53,7 +61,11 @@ func NewBufferAllocate(alloc *Allocator, params *AllocationParams, size int64) *
if alloc != nil {
gstalloc = alloc.Instance()
}
buf := C.gst_buffer_new_allocate(gstalloc, C.gsize(size), params.Instance())
var gstparams *C.GstAllocationParams
if params != nil {
gstparams = params.Instance()
}
buf := C.gst_buffer_new_allocate(gstalloc, C.gsize(size), gstparams)
if buf == nil {
return nil
}
@@ -135,7 +147,6 @@ func (b *Buffer) Bytes() []byte {
if mapInfo.ptr == nil {
return nil
}
defer mapInfo.Unmap()
return mapInfo.Bytes()
}
@@ -604,7 +615,7 @@ func (b *Buffer) IterateMetaFiltered(meta *Meta, apiType glib.Type) *Meta {
return wrapMeta(C.gst_buffer_iterate_meta_filtered(b.Instance(), (*C.gpointer)(&ptr), C.GType(apiType)))
}
// Map will map the data inside this buffer. Unmap after usage.
// Map will map the data inside this buffer.
func (b *Buffer) Map(flags MapFlags) *MapInfo {
var mapInfo C.GstMapInfo
C.gst_buffer_map(
@@ -613,7 +624,10 @@ func (b *Buffer) Map(flags MapFlags) *MapInfo {
C.GstMapFlags(flags),
)
return wrapMapInfo(&mapInfo, func() {
C.gst_buffer_unmap(b.Instance(), (*C.GstMapInfo)(unsafe.Pointer(&mapInfo)))
// This may not be necesesary
// if gobool(C.isBuffer(b.Instance())) {
// C.gst_buffer_unmap(b.Instance(), (*C.GstMapInfo)(unsafe.Pointer(&mapInfo)))
// }
})
}
@@ -726,7 +740,9 @@ func (b *Buffer) SetFlags(flags BufferFlags) bool {
}
// SetSize sets the total size of the memory blocks in buffer.
func (b *Buffer) SetSize(size int64) { C.gst_buffer_set_size(b.Instance(), C.gssize(size)) }
func (b *Buffer) SetSize(size int64) {
C.gst_buffer_set_size(b.Instance(), C.gssize(size))
}
// UnsetFlags removes one or more flags from the buffer.
func (b *Buffer) UnsetFlags(flags BufferFlags) bool {

View File

@@ -108,11 +108,11 @@ func (e *Event) ParseGap() (timestamp, duration time.Duration) {
}
// ParseGapFlags retrieves the gap flags that may have been set on a gap event with SetGapFlags.
func (e *Event) ParseGapFlags() GapFlags {
var out C.GstGapFlags
C.gst_event_parse_gap_flags(e.Instance(), &out)
return GapFlags(out)
}
// func (e *Event) ParseGapFlags() GapFlags {
// var out C.GstGapFlags
// C.gst_event_parse_gap_flags(e.Instance(), &out)
// return GapFlags(out)
// }
// ParseGroupID returns a group ID if set on the event.
func (e *Event) ParseGroupID() (ok bool, gid uint) {
@@ -293,9 +293,9 @@ func (e *Event) Ref() *Event {
}
// SetGapFlags sets flags on event to give additional information about the reason for the GST_EVENT_GAP.
func (e *Event) SetGapFlags(flags GapFlags) {
C.gst_event_set_gap_flags(e.Instance(), C.GstGapFlags(flags))
}
// func (e *Event) SetGapFlags(flags GapFlags) {
// C.gst_event_set_gap_flags(e.Instance(), C.GstGapFlags(flags))
// }
// NextGroupID returns a new group id that can be used for an event.
func NextGroupID() uint {

View File

@@ -1,18 +1,25 @@
package gst
// #include "gst.go.h"
/*
#include "gst.go.h"
void writeMapData (GstMapInfo * mapInfo, gint idx, guint8 data) { mapInfo->data[idx] = data; }
*/
import "C"
import (
"fmt"
"encoding/binary"
"runtime"
"unsafe"
"github.com/gotk3/gotk3/glib"
)
// Memory is a go representation of GstMemory. This object is implemented
// in a read-only fashion currently. You can create new memory blocks, but
// there are no methods implemented yet for modifying ones already in existence.
// Memory is a go representation of GstMemory. This object is implemented in a read-only fashion
// currently primarily for reference, and as such you should not really use it. You can create new
// memory blocks, but there are no methods implemented yet for modifying ones already in existence.
//
// Use the Buffer and its Map methods to interact with memory in both a read and writable way.
type Memory struct {
ptr *C.GstMemory
}
@@ -95,114 +102,132 @@ func (m *Memory) Bytes() []byte {
if mapInfo.ptr == nil {
return nil
}
defer mapInfo.Unmap()
return mapInfo.Bytes()
}
// MapInfo is a go representation of a GstMapInfo.
type MapInfo struct {
ptr *C.GstMapInfo
unmapFunc func()
Memory unsafe.Pointer // A pointer to the GstMemory object
Flags MapFlags
Data unsafe.Pointer // A pointer to the actual data
Size int64
MaxSize int64
}
// Unmap will unmap the MapInfo.
func (m *MapInfo) Unmap() {
if m.unmapFunc == nil {
fmt.Println("GO-GST-WARNING: Called Unmap() on unwrapped MapInfo")
}
m.unmapFunc()
// Memory returns the underlying memory object.
func (m *MapInfo) Memory() *Memory {
return wrapMemory(m.ptr.memory)
}
// Data returns a pointer to the raw data inside this map.
func (m *MapInfo) Data() unsafe.Pointer {
return unsafe.Pointer(m.ptr.data)
}
// Flags returns the flags set on this map.
func (m *MapInfo) Flags() MapFlags {
return MapFlags(m.ptr.flags)
}
// Size returrns the size of this map.
func (m *MapInfo) Size() int64 {
return int64(m.ptr.size)
}
// MaxSize returns the maximum size of this map.
func (m *MapInfo) MaxSize() int64 {
return int64(m.ptr.maxsize)
}
// Bytes returns a byte slice of the data inside this map info.
func (m *MapInfo) Bytes() []byte {
return C.GoBytes(m.Data, (C.int)(m.Size))
return C.GoBytes(m.Data(), (C.int)(m.Size()))
}
// AsInt8Slice returns the contents of this map as a slice of signed 8-bit integers.
func (m *MapInfo) AsInt8Slice() []int8 {
out := make([]int8, m.Size)
for i, t := range (*[1 << 30]int8)(m.Data)[:m.Size:m.Size] {
out[i] = int8(t)
uint8sl := m.AsUint8Slice()
out := make([]int8, m.Size())
for i := range out {
out[i] = int8(uint8sl[i])
}
return out
}
// AsInt16Slice returns the contents of this map as a slice of signed 16-bit integers.
func (m *MapInfo) AsInt16Slice() []int16 {
out := make([]int16, m.Size)
for i, t := range (*[1 << 30]int16)(m.Data)[:m.Size:m.Size] {
out[i] = int16(t)
uint8sl := m.AsUint8Slice()
out := make([]int16, m.Size()/2)
for i := range out {
out[i] = int16(binary.LittleEndian.Uint16(uint8sl[i*2 : (i+1)*2]))
}
return out
}
// AsInt32Slice returns the contents of this map as a slice of signed 32-bit integers.
func (m *MapInfo) AsInt32Slice() []int32 {
out := make([]int32, m.Size)
for i, t := range (*[1 << 30]int32)(m.Data)[:m.Size:m.Size] {
out[i] = int32(t)
uint8sl := m.AsUint8Slice()
out := make([]int32, m.Size()/4)
for i := range out {
out[i] = int32(binary.LittleEndian.Uint32(uint8sl[i*4 : (i+1)*4]))
}
return out
}
// AsInt64Slice returns the contents of this map as a slice of signed 64-bit integers.
func (m *MapInfo) AsInt64Slice() []int64 {
out := make([]int64, m.Size)
for i, t := range (*[1 << 30]int64)(m.Data)[:m.Size:m.Size] {
out[i] = int64(t)
uint8sl := m.AsUint8Slice()
out := make([]int64, m.Size()/8)
for i := range out {
out[i] = int64(binary.LittleEndian.Uint64(uint8sl[i*8 : (i+1)*8]))
}
return out
}
// AsUint8Slice returns the contents of this map as a slice of unsigned 8-bit integers.
func (m *MapInfo) AsUint8Slice() []uint8 {
out := make([]uint8, m.Size)
for i, t := range (*[1 << 30]uint8)(m.Data)[:m.Size:m.Size] {
out[i] = uint8(t)
out := make([]uint8, m.Size())
for i, t := range (*[1 << 30]uint8)(m.Data())[:m.Size():m.Size()] {
out[i] = t
}
return out
}
// AsUint16Slice returns the contents of this map as a slice of unsigned 16-bit integers.
func (m *MapInfo) AsUint16Slice() []uint16 {
out := make([]uint16, m.Size)
for i, t := range (*[1 << 30]uint16)(m.Data)[:m.Size:m.Size] {
out[i] = uint16(t)
uint8sl := m.AsUint8Slice()
out := make([]uint16, m.Size()/2)
for i := range out {
out[i] = uint16(binary.LittleEndian.Uint16(uint8sl[i*2 : (i+1)*2]))
}
return out
}
// AsUint32Slice returns the contents of this map as a slice of unsigned 32-bit integers.
func (m *MapInfo) AsUint32Slice() []uint32 {
out := make([]uint32, m.Size)
for i, t := range (*[1 << 30]uint32)(m.Data)[:m.Size:m.Size] {
out[i] = uint32(t)
uint8sl := m.AsUint8Slice()
out := make([]uint32, m.Size()/4)
for i := range out {
out[i] = uint32(binary.LittleEndian.Uint32(uint8sl[i*4 : (i+1)*4]))
}
return out
}
// AsUint64Slice returns the contents of this map as a slice of unsigned 64-bit integers.
func (m *MapInfo) AsUint64Slice() []uint64 {
out := make([]uint64, m.Size)
for i, t := range (*[1 << 30]uint64)(m.Data)[:m.Size:m.Size] {
out[i] = uint64(t)
uint8sl := m.AsUint8Slice()
out := make([]uint64, m.Size()/8)
for i := range out {
out[i] = uint64(binary.LittleEndian.Uint64(uint8sl[i*8 : (i+1)*8]))
}
return out
}
func wrapMapInfo(mapInfo *C.GstMapInfo, unmapFunc func()) *MapInfo {
return &MapInfo{
ptr: mapInfo,
unmapFunc: unmapFunc,
Memory: unsafe.Pointer(mapInfo.memory),
Flags: MapFlags(mapInfo.flags),
Data: unsafe.Pointer(mapInfo.data),
Size: int64(mapInfo.size),
MaxSize: int64(mapInfo.maxsize),
// WriteData writes the given values directly to the map's memory.
func (m *MapInfo) WriteData(data []uint8) {
for i, x := range data {
C.writeMapData(m.ptr, C.gint(i), C.guint8(x))
}
}
func wrapMapInfo(mapInfo *C.GstMapInfo, unmapFunc func()) *MapInfo {
info := &MapInfo{ptr: mapInfo}
runtime.SetFinalizer(info, func(_ *MapInfo) { unmapFunc() })
return info
}

View File

@@ -35,6 +35,10 @@ type Meta struct {
ptr *C.GstMeta
}
// FromGstMetaUnsafe wraps the pointer to the given C GstMeta with the go type.
// This is meant for internal usage and is exported for visibility to other packages.
func FromGstMetaUnsafe(ptr unsafe.Pointer) *Meta { return wrapMeta(C.toGstMeta(ptr)) }
// Instance returns the underlying GstMeta instance.
func (m *Meta) Instance() *C.GstMeta { return C.toGstMeta(unsafe.Pointer(m.ptr)) }

56
gst/video/c_util.go Normal file
View File

@@ -0,0 +1,56 @@
package video
/*
#include <gst/gst.h>
*/
import "C"
import (
"errors"
"time"
"unsafe"
"github.com/tinyzimmer/go-gst/gst"
)
func wrapGerr(gerr *C.GError) error {
defer C.g_error_free(gerr)
return errors.New(C.GoString(gerr.message))
}
func durationToClockTime(d time.Duration) C.GstClockTime {
if d.Nanoseconds() < 0 {
return C.GstClockTime(gst.ClockTimeNone)
}
return C.GstClockTime(d.Nanoseconds())
}
func fromCoreCaps(caps *gst.Caps) *C.GstCaps {
return (*C.GstCaps)(unsafe.Pointer(caps.Instance()))
}
func fromCoreElement(elem *gst.Element) *C.GstElement {
return (*C.GstElement)(unsafe.Pointer(elem.Instance()))
}
func fromCoreEvent(event *gst.Event) *C.GstEvent {
return (*C.GstEvent)(unsafe.Pointer(event.Instance()))
}
func fromCoreMessage(msg *gst.Message) *C.GstMessage {
return (*C.GstMessage)(unsafe.Pointer(msg.Instance()))
}
func fromCoreQuery(query *gst.Query) *C.GstQuery {
return (*C.GstQuery)(unsafe.Pointer(query.Instance()))
}
func fromCoreSample(sample *gst.Sample) *C.GstSample {
return (*C.GstSample)(unsafe.Pointer(sample.Instance()))
}
func fromCoreStructure(structure *gst.Structure) *C.GstStructure {
return (*C.GstStructure)(unsafe.Pointer(structure.Instance()))
}
func gobool(b C.gboolean) bool { return int(b) > 0 }

40
gst/video/cgo_exports.go Normal file
View File

@@ -0,0 +1,40 @@
package video
/*
#include <gst/gst.h>
*/
import "C"
import (
"unsafe"
gopointer "github.com/mattn/go-pointer"
"github.com/tinyzimmer/go-gst/gst"
)
//export goVideoGDestroyNotifyFunc
func goVideoGDestroyNotifyFunc(ptr C.gpointer) {
gopointer.Unref(unsafe.Pointer(ptr))
}
//export goVideoConvertSampleCb
func goVideoConvertSampleCb(gsample *C.GstSample, gerr *C.GError, userData C.gpointer) {
var sample *gst.Sample
var err error
if gerr != nil {
err = wrapGerr(gerr)
}
if sample != nil {
sample = gst.FromGstSampleUnsafe(unsafe.Pointer(sample))
defer sample.Unref()
}
iface := gopointer.Restore(unsafe.Pointer(userData))
if iface == nil {
return
}
cb, ok := iface.(ConvertSampleCallback)
if !ok {
return
}
cb(sample, err)
}

View File

@@ -0,0 +1,73 @@
package video
/*
#include <gst/video/video.h>
extern void goVideoGDestroyNotifyFunc (gpointer user_data);
extern void goVideoConvertSampleCb (GstSample * sample, GError * gerr, gpointer user_data);
void cgoVideoGDestroyNotifyFunc (gpointer user_data)
{
goVideoGDestroyNotifyFunc(user_data);
}
void cgoVideoConvertSampleCb (GstSample * sample, GError * gerr, gpointer user_data)
{
goVideoConvertSampleCb(sample, gerr, user_data);
}
*/
import "C"
import (
"time"
"unsafe"
gopointer "github.com/mattn/go-pointer"
"github.com/tinyzimmer/go-gst/gst"
)
// ConvertSampleCallback represents a callback from a video convert opereration.
// It contains the converted sample or any error that ocurred.
type ConvertSampleCallback func(*gst.Sample, error)
// ConvertSample converts a raw video buffer into the specified output caps.
//
// The output caps can be any raw video formats or any image formats (jpeg, png, ...).
//
// The width, height and pixel-aspect-ratio can also be specified in the output caps.
func ConvertSample(sample *gst.Sample, toCaps *gst.Caps, timeout time.Duration) (*gst.Sample, error) {
var gerr *C.GError
ret := C.gst_video_convert_sample(
fromCoreSample(sample),
fromCoreCaps(toCaps),
durationToClockTime(timeout),
&gerr,
)
if gerr != nil {
return nil, wrapGerr(gerr)
}
if ret == nil {
return nil, nil
}
return gst.FromGstSampleUnsafe(unsafe.Pointer(ret)), nil
}
// ConvertSampleAsync converts a raw video buffer into the specified output caps.
//
// The output caps can be any raw video formats or any image formats (jpeg, png, ...).
//
// The width, height and pixel-aspect-ratio can also be specified in the output caps.
//
// The callback will be called after conversion, when an error occurred or if conversion
// didn't finish after timeout.
func ConvertSampleAsync(sample *gst.Sample, toCaps *gst.Caps, timeout time.Duration, cb ConvertSampleCallback) {
ptr := gopointer.Save(cb)
C.gst_video_convert_sample_async(
fromCoreSample(sample),
fromCoreCaps(toCaps),
durationToClockTime(timeout),
C.GstVideoConvertSampleCallback(C.cgoVideoConvertSampleCb),
(C.gpointer)(unsafe.Pointer(ptr)),
C.GDestroyNotify(C.cgoVideoGDestroyNotifyFunc),
)
}

View File

@@ -0,0 +1,128 @@
package video
/*
#include <gst/gst.h>
#include <gst/video/video.h>
GstColorBalance * toGstColorBalance(GstElement * element)
{
return GST_COLOR_BALANCE(element);
}
*/
import "C"
import (
"unsafe"
"github.com/gotk3/gotk3/glib"
"github.com/tinyzimmer/go-gst/gst"
)
// ColorBalanceType is an enumeration indicating whether an element implements color
// balancing operations in software or in dedicated hardware. In general, dedicated
// hardware implementations (such as those provided by xvimagesink) are preferred.
type ColorBalanceType int
// Type castings
const (
ColorBalanceHardware ColorBalanceType = C.GST_COLOR_BALANCE_HARDWARE // (0) Color balance is implemented with dedicated hardware.
ColorBalanceSoftware ColorBalanceType = C.GST_COLOR_BALANCE_SOFTWARE // (1) Color balance is implemented via software processing.
)
// ColorBalanceChannel represents parameters for modifying the color balance implemented by
// an element providing the GstColorBalance interface. For example, Hue or Saturation.
type ColorBalanceChannel struct {
// A string containing a descriptive name for this channel
Label string
// The minimum valid value for this channel.
MinValue int
// The maximum valid value for this channel.
MaxValue int
}
// ColorBalance is an interface implemented by elements which can perform some color balance
// operation on video frames they process. For example, modifying the brightness, contrast,
// hue or saturation.
//
// Example elements are 'xvimagesink' and 'colorbalance'
type ColorBalance interface {
// Get the ColorBalanceType of this implementation.
GetBalanceType() ColorBalanceType
// Retrieve the current value of the indicated channel, between MinValue and MaxValue.
GetValue(*ColorBalanceChannel) int
// Retrieve a list of the available channels.
ListChannels() []*ColorBalanceChannel
// Sets the current value of the channel to the passed value, which must be between MinValue
// and MaxValue.
SetValue(*ColorBalanceChannel, int)
}
// ColorBalanceFromElement checks if the given element implements the ColorBalance interface,
// and if so, returns a usable interface. This currently only supports elements created from the
// C runtime.
func ColorBalanceFromElement(element *gst.Element) ColorBalance {
if C.toGstColorBalance(fromCoreElement(element)) == nil {
return nil
}
return &gstColorBalance{fromCoreElement(element)}
}
// gstColorBalance implements a ColorBalance interface backed by an element
// from the C runtime.
type gstColorBalance struct{ elem *C.GstElement }
// Instance returns the C GstColorBalance interface.
func (c *gstColorBalance) Instance() *C.GstColorBalance {
return C.toGstColorBalance(c.elem)
}
// GetBalanceType gets the ColorBalanceType of this implementation.
func (c *gstColorBalance) GetBalanceType() ColorBalanceType {
return ColorBalanceType(C.gst_color_balance_get_balance_type(c.Instance()))
}
// GetValue retrieve the current value of the indicated channel, between MinValue and MaxValue.
func (c *gstColorBalance) GetValue(channel *ColorBalanceChannel) int {
cLabel := C.CString(channel.Label)
defer C.free(unsafe.Pointer(cLabel))
gcbc := &C.GstColorBalanceChannel{
label: (*C.gchar)(cLabel),
min_value: C.gint(channel.MinValue),
max_value: C.gint(channel.MaxValue),
}
defer C.free(unsafe.Pointer(gcbc))
return int(C.gst_color_balance_get_value(c.Instance(), gcbc))
}
// ListChannels retrieves a list of the available channels.
func (c *gstColorBalance) ListChannels() []*ColorBalanceChannel {
gList := C.gst_color_balance_list_channels(c.Instance())
if gList == nil {
return nil
}
wrapped := glib.WrapList(uintptr(unsafe.Pointer(gList)))
defer wrapped.Free()
out := make([]*ColorBalanceChannel, 0)
wrapped.Foreach(func(item interface{}) {
channel := (*C.GstColorBalanceChannel)(item.(unsafe.Pointer))
out = append(out, &ColorBalanceChannel{
Label: C.GoString(channel.label),
MinValue: int(channel.min_value),
MaxValue: int(channel.max_value),
})
})
return out
}
// SetValue sets the current value of the channel to the passed value, which must be between MinValue
// and MaxValue.
func (c *gstColorBalance) SetValue(channel *ColorBalanceChannel, value int) {
cLabel := C.CString(channel.Label)
defer C.free(unsafe.Pointer(cLabel))
gcbc := &C.GstColorBalanceChannel{
label: (*C.gchar)(cLabel),
min_value: C.gint(channel.MinValue),
max_value: C.gint(channel.MaxValue),
}
defer C.free(unsafe.Pointer(gcbc))
C.gst_color_balance_set_value(c.Instance(), gcbc, C.gint(value))
}

47
gst/video/gst_meta.go Normal file
View File

@@ -0,0 +1,47 @@
package video
/*
#include <gst/gst.h>
#include <gst/video/video.h>
*/
import "C"
import (
"unsafe"
"github.com/tinyzimmer/go-gst/gst"
)
// CropMetaInfo contains extra buffer metadata describing image cropping.
type CropMetaInfo struct {
ptr *C.GstVideoCropMeta
}
// GetCropMetaInfo returns the default CropMetaInfo.
func GetCropMetaInfo() *CropMetaInfo {
meta := C.gst_video_crop_meta_get_info()
return &CropMetaInfo{(*C.GstVideoCropMeta)(unsafe.Pointer(meta))}
}
// Instance returns the underlying C GstVideoCropMeta instance.
func (c *CropMetaInfo) Instance() *C.GstVideoCropMeta {
return c.ptr
}
// Meta returns the parent Meta instance.
func (c *CropMetaInfo) Meta() *gst.Meta {
meta := c.Instance().meta
return gst.FromGstMetaUnsafe(unsafe.Pointer(&meta))
}
// X returns the horizontal offset.
func (c *CropMetaInfo) X() uint { return uint(c.Instance().x) }
// Y returns the vertical offset.
func (c *CropMetaInfo) Y() uint { return uint(c.Instance().y) }
// Width returns the cropped width.
func (c *CropMetaInfo) Width() uint { return uint(c.Instance().width) }
// Height returns the cropped height.
func (c *CropMetaInfo) Height() uint { return uint(c.Instance().height) }

279
gst/video/gst_navigation.go Normal file
View File

@@ -0,0 +1,279 @@
package video
/*
#include <gst/video/video.h>
GstNavigation * toGstNavigation (GstElement * element)
{
return GST_NAVIGATION(element);
}
*/
import "C"
import (
"unsafe"
"github.com/tinyzimmer/go-gst/gst"
)
// NavigationCommand is a set of commands that may be issued to an element providing the
// Navigation interface. The available commands can be queried via the QueryNewCommands
// query.
type NavigationCommand int
// Type castings
const (
NavigationCommandInvalid NavigationCommand = C.GST_NAVIGATION_COMMAND_INVALID // (0) An invalid command entry
NavigationCommandMenu1 NavigationCommand = C.GST_NAVIGATION_COMMAND_MENU1 // (1) Execute navigation menu command 1. For DVD, this enters the DVD root menu, or exits back to the title from the menu.
NavigationCommandMenu2 NavigationCommand = C.GST_NAVIGATION_COMMAND_MENU2 // (2) Execute navigation menu command 2. For DVD, this jumps to the DVD title menu.
NavigationCommandMenu3 NavigationCommand = C.GST_NAVIGATION_COMMAND_MENU3 // (3) Execute navigation menu command 3. For DVD, this jumps into the DVD root menu.
NavigationCommandMenu4 NavigationCommand = C.GST_NAVIGATION_COMMAND_MENU4 // (4) Execute navigation menu command 4. For DVD, this jumps to the Subpicture menu.
NavigationCommandMenu5 NavigationCommand = C.GST_NAVIGATION_COMMAND_MENU5 // (5) Execute navigation menu command 5. For DVD, this jumps to the audio menu.
NavigationCommandMenu6 NavigationCommand = C.GST_NAVIGATION_COMMAND_MENU6 // (6) Execute navigation menu command 6. For DVD, this jumps to the angles menu.
NavigationCommandMenu7 NavigationCommand = C.GST_NAVIGATION_COMMAND_MENU7 // (7) Execute navigation menu command 7. For DVD, this jumps to the chapter menu.
NavigationCommandLeft NavigationCommand = C.GST_NAVIGATION_COMMAND_LEFT // (20) Select the next button to the left in a menu, if such a button exists.
NavigationCommandRight NavigationCommand = C.GST_NAVIGATION_COMMAND_RIGHT // (21) Select the next button to the right in a menu, if such a button exists.
NavigationCommandUp NavigationCommand = C.GST_NAVIGATION_COMMAND_UP // (22) Select the button above the current one in a menu, if such a button exists.
NavigationCommandDown NavigationCommand = C.GST_NAVIGATION_COMMAND_DOWN // (23) Select the button below the current one in a menu, if such a button exists.
NavigationCommandActivate NavigationCommand = C.GST_NAVIGATION_COMMAND_ACTIVATE // (24) Activate (click) the currently selected button in a menu, if such a button exists.
NavigationCommandPrevAngle NavigationCommand = C.GST_NAVIGATION_COMMAND_PREV_ANGLE // (30) Switch to the previous angle in a multiangle feature.
NavigationCommandNextAngle NavigationCommand = C.GST_NAVIGATION_COMMAND_NEXT_ANGLE // (31) Switch to the next angle in a multiangle feature.
)
// Extra aliases for convenience in handling DVD navigation,
const (
NavigationCommandDVDMenu NavigationCommand = C.GST_NAVIGATION_COMMAND_DVD_MENU
NavigationCommandDVDTitleMenu NavigationCommand = C.GST_NAVIGATION_COMMAND_DVD_TITLE_MENU
NavigationCommandDVDRootMenu NavigationCommand = C.GST_NAVIGATION_COMMAND_DVD_ROOT_MENU
NavigationCommandDVDSubpictureMenu NavigationCommand = C.GST_NAVIGATION_COMMAND_DVD_SUBPICTURE_MENU
NavigationCommandDVDAudioMenu NavigationCommand = C.GST_NAVIGATION_COMMAND_DVD_AUDIO_MENU
NavigationCommandDVDAngleMenu NavigationCommand = C.GST_NAVIGATION_COMMAND_DVD_ANGLE_MENU
NavigationCommandDVDChapterMenu NavigationCommand = C.GST_NAVIGATION_COMMAND_DVD_CHAPTER_MENU
)
// NavigationEventType are enum values for the various events that an element implementing the
// Navigation interface might send up the pipeline.
type NavigationEventType int
// Type castings
const (
NavigationEventInvalid NavigationEventType = C.GST_NAVIGATION_EVENT_INVALID // (0) Returned from gst_navigation_event_get_type when the passed event is not a navigation event.
NavigationEventKeyPress NavigationEventType = C.GST_NAVIGATION_EVENT_KEY_PRESS // (1) A key press event. Use gst_navigation_event_parse_key_event to extract the details from the event.
NavigationEventKeyRelease NavigationEventType = C.GST_NAVIGATION_EVENT_KEY_RELEASE // (2) A key release event. Use gst_navigation_event_parse_key_event to extract the details from the event.
NavigationEventMouseButtonPress NavigationEventType = C.GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS // (3) A mouse button press event. Use gst_navigation_event_parse_mouse_button_event to extract the details from the event.
NavigationEventMouseButtonRelease NavigationEventType = C.GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE // (4) A mouse button release event. Use gst_navigation_event_parse_mouse_button_event to extract the details from the event.
NavigationEventMouseMove NavigationEventType = C.GST_NAVIGATION_EVENT_MOUSE_MOVE // (5) A mouse movement event. Use gst_navigation_event_parse_mouse_move_event to extract the details from the event.
NavigationEventCommand NavigationEventType = C.GST_NAVIGATION_EVENT_COMMAND // (6) A navigation command event. Use gst_navigation_event_parse_command to extract the details from the event.
NavigationEventMouseScroll NavigationEventType = C.GST_NAVIGATION_EVENT_MOUSE_SCROLL // (7) A mouse scroll event. Use gst_navigation_event_parse_mouse_scroll_event to extract the details from the event. (Since: 1.18)
)
// NavigationMessageType is a set of notifications that may be received on the bus when navigation
// related status changes.
type NavigationMessageType int
// Type castings
const (
NavigationMessageInvalid NavigationMessageType = C.GST_NAVIGATION_MESSAGE_INVALID // (0) Returned from gst_navigation_message_get_type when the passed message is not a navigation message.
NavigationMessageMouseOver NavigationMessageType = C.GST_NAVIGATION_MESSAGE_MOUSE_OVER // (1) Sent when the mouse moves over or leaves a clickable region of the output, such as a DVD menu button.
NavigationMessageCommandsChanged NavigationMessageType = C.GST_NAVIGATION_MESSAGE_COMMANDS_CHANGED // (2) Sent when the set of available commands changes and should re-queried by interested applications.
NavigationMessageAnglesChanged NavigationMessageType = C.GST_NAVIGATION_MESSAGE_ANGLES_CHANGED // (3) Sent when display angles in a multi-angle feature (such as a multiangle DVD) change - either angles have appeared or disappeared.
NavigationMessageEvent NavigationMessageType = C.GST_NAVIGATION_MESSAGE_EVENT // (4) Sent when a navigation event was not handled by any element in the pipeline
)
// NavigationQueryType represents types of navigation interface queries.
type NavigationQueryType int
// Type castings
const (
NavigationQueryInvalid NavigationQueryType = C.GST_NAVIGATION_QUERY_INVALID // (0) invalid query
NavigationQueryCommands NavigationQueryType = C.GST_NAVIGATION_QUERY_COMMANDS // (1) command query
NavigationQueryAngles NavigationQueryType = C.GST_NAVIGATION_QUERY_ANGLES // (2) viewing angle query
)
// KeyEvent represents types of key events.
type KeyEvent string
// Enums
const (
KeyPress KeyEvent = "key-press"
KeyRelease KeyEvent = "key-release"
)
// MouseEvent represents types of mouse events.
type MouseEvent string
// Enums
const (
MouseButtonPress MouseEvent = "mouse-button-press"
MouseButtonRelease MouseEvent = "mouse-button-release"
MouseMove MouseEvent = "mouse-move"
)
/*
Navigation interface is used for creating and injecting navigation related events such as
mouse button presses, cursor motion and key presses. The associated library also provides
methods for parsing received events, and for sending and receiving navigation related bus
events. One main use-case is DVD menu navigation.
The main parts of the API are:
- The Navigation interface, implemented by elements which provide an application with
the ability to create and inject navigation events into the pipeline.
- Navigation event handling API. Navigation events are created in response to calls
on a Navigation interface implementation, and sent in the pipeline. Upstream elements
can use the navigation event API functions to parse the contents of received messages.
- Navigation message handling API. Navigation messages may be sent on the message bus
to inform applications of navigation related changes in the pipeline, such as the mouse
moving over a clickable region, or the set of available angles changing.
The Navigation message functions provide functions for creating and parsing custom bus
messages for signaling GstNavigation changes.
*/
type Navigation interface {
// Sends the indicated command to the navigation interface.
SendCommand(NavigationCommand)
// Sends an event with the given structure.
SendEvent(*gst.Structure)
// Sends the given key event. Recognized values for the event are "key-press"
// and "key-release". The key is the character representation of the key. This is typically
// as produced by XKeysymToString.
SendKeyEvent(event KeyEvent, key string)
// Sends a mouse event to the navigation interface. Mouse event coordinates are sent relative
// to the display space of the related output area. This is usually the size in pixels of the
// window associated with the element implementing the Navigation interface. Use 0 for the
// button when doing mouse move events.
SendMouseEvent(event MouseEvent, button int, x, y float64)
// Sends a mouse scroll event to the navigation interface. Mouse event coordinates are sent
// relative to the display space of the related output area. This is usually the size in pixels
// of the window associated with the element implementing the Navigation interface.
SendMouseScrollEvent(x, y, dX, dY float64)
}
// NavigationFromElement checks if the given element implements the Navigation interface. If it does,
// a useable interface is returned. Otherwise, it returns nil.
func NavigationFromElement(element *gst.Element) Navigation {
if C.toGstNavigation(fromCoreElement(element)) == nil {
return nil
}
return &gstNavigation{fromCoreElement(element)}
}
type gstNavigation struct {
elem *C.GstElement
}
func (n *gstNavigation) instance() *C.GstNavigation {
return C.toGstNavigation(n.elem)
}
func (n *gstNavigation) SendCommand(cmd NavigationCommand) {
C.gst_navigation_send_command(n.instance(), C.GstNavigationCommand(cmd))
}
func (n *gstNavigation) SendEvent(structure *gst.Structure) {
C.gst_navigation_send_event(n.instance(), fromCoreStructure(structure))
}
func (n *gstNavigation) SendKeyEvent(event KeyEvent, key string) {
cEvent := C.CString(string(event))
cKey := C.CString(key)
defer C.free(unsafe.Pointer(cEvent))
defer C.free(unsafe.Pointer(cKey))
C.gst_navigation_send_key_event(
n.instance(),
(*C.gchar)(unsafe.Pointer(cEvent)),
(*C.gchar)(unsafe.Pointer(cKey)),
)
}
func (n *gstNavigation) SendMouseEvent(event MouseEvent, button int, x, y float64) {
cEvent := C.CString(string(event))
defer C.free(unsafe.Pointer(cEvent))
C.gst_navigation_send_mouse_event(
n.instance(),
(*C.gchar)(unsafe.Pointer(cEvent)),
C.int(button), C.double(x), C.double(y),
)
}
func (n *gstNavigation) SendMouseScrollEvent(x, y, dX, dY float64) {
C.gst_navigation_send_mouse_scroll_event(
n.instance(),
C.double(x), C.double(y), C.double(dX), C.double(dY),
)
}
// NavigationEvent extends the Event from the core library and is used by elements
// implementing the Navigation interface. You can wrap an event in this struct yourself,
// but it is safer to use the ToNavigationEvent method first to check validity.
type NavigationEvent struct{ *gst.Event }
// ToNavigationEvent checks if the given event is a NavigationEvent, and if so, returrns
// a NavigationEvent instance wrapping the event. If the event is not a NavigationEvent
// this function returns nil.
func ToNavigationEvent(event *gst.Event) *NavigationEvent {
evType := NavigationEventType(C.gst_navigation_event_get_type(fromCoreEvent(event)))
if evType == NavigationEventInvalid {
return nil
}
return &NavigationEvent{event}
}
// GetType returns the type of this event.
func (e *NavigationEvent) GetType() NavigationEventType {
return NavigationEventType(C.gst_navigation_event_get_type(e.instance()))
}
// instance returns the underlying GstEvent instance.
func (e *NavigationEvent) instance() *C.GstEvent { return fromCoreEvent(e.Event) }
// NavigationMessage extends the Event from the core library and is used by elements
// implementing the Navigation interface. You can wrap a message in this struct yourself,
// but it is safer to use the ToNavigationMessage method first to check validity.
type NavigationMessage struct{ *gst.Message }
// ToNavigationMessage checks if the given message is a NavigationMessage, and if so,
// returns a NavigatonMessage instance wrapping the message. If the message is not a
// NavigationMessage, this function returns nil.
func ToNavigationMessage(msg *gst.Message) *NavigationMessage {
msgType := NavigationMessageType(C.gst_navigation_message_get_type(fromCoreMessage(msg)))
if msgType == NavigationMessageInvalid {
return nil
}
return &NavigationMessage{msg}
}
// instance returns the underlying GstMessage instance.
func (m *NavigationMessage) instance() *C.GstMessage { return fromCoreMessage(m.Message) }
// GetType returns the type of this message.
func (m *NavigationMessage) GetType() NavigationMessageType {
return NavigationMessageType(C.gst_navigation_message_get_type(m.instance()))
}
// NavigationQuery extends the Query from the core library and is used by elements
// implementing the Navigation interface. You can wrap a query in this struct yourself,
// but it is safer to use the ToNavigationQuery method first to check validity.
type NavigationQuery struct{ *gst.Query }
// ToNavigationQuery checks if the given query is a NavigationQuery, and if so, returns
// a NavigationQuery instance wrapping the query. If the query is not a NavigationQuery,
// this function returns nil.
func ToNavigationQuery(query *gst.Query) *NavigationQuery {
qType := NavigationQueryType(C.gst_navigation_query_get_type(fromCoreQuery(query)))
if qType == NavigationQueryInvalid {
return nil
}
return &NavigationQuery{query}
}
// instance returns the underlying GstQuery instance.
func (q *NavigationQuery) instance() *C.GstQuery { return fromCoreQuery(q.Query) }
// GetType returns the type of this query.
func (q *NavigationQuery) GetType() NavigationQueryType {
return NavigationQueryType(C.gst_navigation_query_get_type(q.instance()))
}

89
gst/video/gst_video.go Normal file
View File

@@ -0,0 +1,89 @@
package video
// #include <gst/video/video.h>
import "C"
import (
"runtime"
"time"
"unsafe"
"github.com/tinyzimmer/go-gst/gst"
)
// OrientationMethod represents the different video orientation methods.
type OrientationMethod int
// Type castings
const (
OrientationMethodIdentity OrientationMethod = C.GST_VIDEO_ORIENTATION_IDENTITY // (0) Identity (no rotation)
OrientationMethod90R OrientationMethod = C.GST_VIDEO_ORIENTATION_90R // (1) Rotate clockwise 90 degrees
OrientationMethod180 OrientationMethod = C.GST_VIDEO_ORIENTATION_180 // (2) Rotate 180 degrees
OrientationMethod90L OrientationMethod = C.GST_VIDEO_ORIENTATION_90L // (3) Rotate counter-clockwise 90 degrees
OrientationMethodHoriz OrientationMethod = C.GST_VIDEO_ORIENTATION_HORIZ // (4) Flip horizontally
OrientationMethodVert OrientationMethod = C.GST_VIDEO_ORIENTATION_VERT // (5) Flip vertically
OrientationMethodULLR OrientationMethod = C.GST_VIDEO_ORIENTATION_UL_LR // (6) Flip across upper left/lower right diagonal
OrientationMethodURLL OrientationMethod = C.GST_VIDEO_ORIENTATION_UR_LL // (7) Flip across upper right/lower left diagonal
OrientationMethodAuto OrientationMethod = C.GST_VIDEO_ORIENTATION_AUTO // (8) Select flip method based on image-orientation tag
OrientationMethodCustom OrientationMethod = C.GST_VIDEO_ORIENTATION_CUSTOM // (9) Current status depends on plugin internal setup
)
// Additional video meta tags
const (
TagVideoColorspage gst.Tag = C.GST_META_TAG_VIDEO_COLORSPACE_STR
TagVideoOrientation gst.Tag = C.GST_META_TAG_VIDEO_ORIENTATION_STR
TagVideoSize gst.Tag = C.GST_META_TAG_VIDEO_SIZE_STR
TagVideo gst.Tag = C.GST_META_TAG_VIDEO_STR
)
// Alignment represents parameters for the memory of video buffers. This structure is
// usually used to configure the bufferpool if it supports the BufferPoolOptionVideoAlignment.
type Alignment struct {
// extra pixels on the top
PaddingTop uint
// extra pixels on bottom
PaddingBottom uint
// extra pixels on the left
PaddingLeft uint
// extra pixels on the right
PaddingRight uint
}
func (a *Alignment) instance() *C.GstVideoAlignment {
g := &C.GstVideoAlignment{
padding_top: C.guint(a.PaddingTop),
padding_bottom: C.guint(a.PaddingBottom),
padding_left: C.guint(a.PaddingLeft),
padding_right: C.guint(a.PaddingRight),
}
runtime.SetFinalizer(a, func(_ *Alignment) { C.g_free((C.gpointer)(unsafe.Pointer(g))) })
return g
}
// CalculateDisplayRatio will, given the Pixel Aspect Ratio and size of an input video frame, and
// the pixel aspect ratio of the intended display device, calculate the actual display ratio the
// video will be rendered with.
//
// See https://gstreamer.freedesktop.org/documentation/video/gstvideo.html?gi-language=c#gst_video_calculate_display_ratio
func CalculateDisplayRatio(videoWidth, videoHeight, videoParNum, videoParDenom, displayParNum, displayParDenom uint) (darNum, darDenom uint, ok bool) {
var gNum, gDenom C.guint
gok := C.gst_video_calculate_display_ratio(
&gNum, &gDenom,
C.guint(videoWidth), C.guint(videoHeight),
C.guint(videoParNum), C.guint(videoParDenom),
C.guint(displayParNum), C.guint(displayParDenom),
)
return uint(gNum), uint(gDenom), gobool(gok)
}
// GuessFramerate will, given the nominal duration of one video frame, check some standard framerates
// for a close match (within 0.1%) and return one if possible,
//
// It will calculate an arbitrary framerate if no close match was found, and return FALSE.
//
// It returns FALSE if a duration of 0 is passed.
func GuessFramerate(dur time.Duration) (destNum, destDenom int, ok bool) {
var num, denom C.gint
gok := C.gst_video_guess_framerate(durationToClockTime(dur), &num, &denom)
return int(num), int(denom), gobool(gok)
}

View File

@@ -0,0 +1,83 @@
package video
// #include <gst/video/video.h>
import "C"
import (
"runtime"
"unsafe"
)
// ChromaFlags are extra flags that influence the result from NewChromaResample.
type ChromaFlags int
// Type castings
const (
ChromaFlagNone ChromaFlags = C.GST_VIDEO_CHROMA_FLAG_NONE // (0) no flags
ChromaFlagInterlaced ChromaFlags = C.GST_VIDEO_CHROMA_FLAG_INTERLACED // (1) the input is interlaced
)
// ChromaMethod represents different subsampling and upsampling methods.
type ChromaMethod int
// Type castings
const (
ChromaMethodNearest ChromaMethod = C.GST_VIDEO_CHROMA_METHOD_NEAREST // (0) Duplicates the chroma samples when upsampling and drops when subsampling
ChromaMethodLinear ChromaMethod = C.GST_VIDEO_CHROMA_METHOD_LINEAR // (1) Uses linear interpolation to reconstruct missing chroma and averaging to subsample
)
// ChromaSite represents various Chroma sitings.
type ChromaSite int
// Type castings
const (
ChromaSiteUnknown ChromaSite = C.GST_VIDEO_CHROMA_SITE_UNKNOWN // (0) unknown cositing
ChromaSiteNone ChromaSite = C.GST_VIDEO_CHROMA_SITE_NONE // (1) no cositing
ChromaSiteHCosited ChromaSite = C.GST_VIDEO_CHROMA_SITE_H_COSITED // (2) chroma is horizontally cosited
ChromaSiteVCosited ChromaSite = C.GST_VIDEO_CHROMA_SITE_V_COSITED // (4) chroma is vertically cosited
ChromaSiteAltLine ChromaSite = C.GST_VIDEO_CHROMA_SITE_ALT_LINE // (8) choma samples are sited on alternate lines
ChromaSiteCosited ChromaSite = C.GST_VIDEO_CHROMA_SITE_COSITED // (6) chroma samples cosited with luma samples
ChromaSiteJpeg ChromaSite = C.GST_VIDEO_CHROMA_SITE_JPEG // (1) jpeg style cositing, also for mpeg1 and mjpeg
ChromaSiteMpeg2 ChromaSite = C.GST_VIDEO_CHROMA_SITE_MPEG2 // (2) mpeg2 style cositing
ChromaSiteDV ChromaSite = C.GST_VIDEO_CHROMA_SITE_DV // (14) DV style cositing
)
// String implements a stringer on ChromaSite.
func (c ChromaSite) String() string {
out := C.gst_video_chroma_to_string(C.GstVideoChromaSite(c))
defer C.g_free((C.gpointer)(unsafe.Pointer(out)))
return C.GoString(out)
}
// ChromaResample is a utility object for resampling chroma planes and converting between different chroma sampling sitings.
type ChromaResample struct {
ptr *C.GstVideoChromaResample
}
// NewChromaResample creates a new resampler object for the given parameters. When h_factor or v_factor is > 0,
// upsampling will be used, otherwise subsampling is performed.
func NewChromaResample(method ChromaMethod, site ChromaSite, flags ChromaFlags, format Format, hFactor, vFactor int) *ChromaResample {
resample := C.gst_video_chroma_resample_new(
C.GstVideoChromaMethod(method),
C.GstVideoChromaSite(site),
C.GstVideoChromaFlags(flags),
C.GstVideoFormat(format),
C.gint(hFactor), C.gint(vFactor),
)
if resample == nil {
return nil
}
goResample := &ChromaResample{resample}
runtime.SetFinalizer(goResample, func(c *ChromaResample) { C.gst_video_chroma_resample_free(c.instance()) })
return goResample
}
func (c *ChromaResample) instance() *C.GstVideoChromaResample { return c.ptr }
// GetInfo returns the info about the Resample. The resampler must be fed n_lines at a time. The first line
// should be at offset.
func (c *ChromaResample) GetInfo() (nLines uint, offset int) {
var lines C.guint
var off C.gint
C.gst_video_chroma_resample_get_info(c.instance(), &lines, &off)
return uint(lines), int(off)
}

View File

@@ -0,0 +1,145 @@
package video
// #include <gst/video/video.h>
import "C"
// ColorMatrix is used to convert between Y'PbPr and non-linear RGB (R'G'B')
type ColorMatrix int
// Type castings
const (
ColorMatrixUnknown ColorMatrix = C.GST_VIDEO_COLOR_MATRIX_UNKNOWN // (0) unknown matrix
ColorMatrixRGB ColorMatrix = C.GST_VIDEO_COLOR_MATRIX_RGB // (1) identity matrix. Order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB)
ColorMatrixFCC ColorMatrix = C.GST_VIDEO_COLOR_MATRIX_FCC // (2) FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
ColorMatrixBT709 ColorMatrix = C.GST_VIDEO_COLOR_MATRIX_BT709 // (3) ITU-R BT.709 color matrix, also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
ColorMatrixBT601 ColorMatrix = C.GST_VIDEO_COLOR_MATRIX_BT601 // (4) ITU-R BT.601 color matrix, also SMPTE170M / ITU-R BT1358 525 / ITU-R BT1700 NTSC
ColorMatrixSMPTE240M ColorMatrix = C.GST_VIDEO_COLOR_MATRIX_SMPTE240M // (5) SMPTE 240M color matrix
ColorMatrixBT2020 ColorMatrix = C.GST_VIDEO_COLOR_MATRIX_BT2020 // (6) ITU-R BT.2020 color matrix. Since: 1.6
)
// ColorPrimaries define the how to transform linear RGB values to and from the
// CIE XYZ colorspace.
type ColorPrimaries int
// Type castings
const (
ColorPrimariesUnknown ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_UNKNOWN // (0) unknown color primaries
ColorPrimariesBT709 ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_BT709 // (1) BT709 primaries, also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP177 Annex B
ColorPrimariesBT470M ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_BT470M // (2) BT470M primaries, also FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
ColorPrimariesBT470BG ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_BT470BG // (3) BT470BG primaries, also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
ColorPrimariesSMPTE170M ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_SMPTE170M // (4) SMPTE170M primaries, also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
ColorPrimariesSMPTE240M ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_SMPTE240M // (5) SMPTE240M primaries
ColorPrimariesFilm ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_FILM // (6) Generic film (colour filters using Illuminant C)
ColorPrimariesBT2020 ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_BT2020 // (7) ITU-R BT2020 primaries. Since: 1.6
ColorPrimariesAdobeRGB ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_ADOBERGB // (8) Adobe RGB primaries. Since: 1.8
ColorPrimariesSMPTEST428 ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_SMPTEST428 // (9) SMPTE ST 428 primaries (CIE 1931 XYZ). Since: 1.16
ColorPrimariesSMPTERP431 ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_SMPTERP431 // (10) SMPTE RP 431 primaries (ST 431-2 (2011) / DCI P3). Since: 1.16
ColorPrimariesSMPTEEG432 ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_SMPTEEG432 // (11) SMPTE EG 432 primaries (ST 432-1 (2010) / P3 D65). Since: 1.16
ColorPrimariesEBU3213 ColorPrimaries = C.GST_VIDEO_COLOR_PRIMARIES_EBU3213 // (12) EBU 3213 primaries (JEDEC P22 phosphors). Since: 1.16
)
// ColorRange represents possible color range values. These constants are defined for
// 8 bit color values and can be scaled for other bit depths.
type ColorRange int
// Type castings
const (
ColorRangeUnknown ColorRange = C.GST_VIDEO_COLOR_RANGE_UNKNOWN // (0) unknown range
ColorRange0255 ColorRange = C.GST_VIDEO_COLOR_RANGE_0_255 // (1) [0..255] for 8 bit components
ColorRange16235 ColorRange = C.GST_VIDEO_COLOR_RANGE_16_235 // (2) [16..235] for 8 bit components. Chroma has [16..240] range.
)
// TransferFunction defines the formula for converting between non-linear RGB (R'G'B')
// and linear RGB
type TransferFunction int
// Type castings
const (
TransferUnknown TransferFunction = C.GST_VIDEO_TRANSFER_UNKNOWN // (0) unknown transfer function
TransferGamma10 TransferFunction = C.GST_VIDEO_TRANSFER_GAMMA10 // (1) linear RGB, gamma 1.0 curve
TransferGamma18 TransferFunction = C.GST_VIDEO_TRANSFER_GAMMA18 // (2) Gamma 1.8 curve
TransferGamma20 TransferFunction = C.GST_VIDEO_TRANSFER_GAMMA20 // (3) Gamma 2.0 curve
TransferGamma22 TransferFunction = C.GST_VIDEO_TRANSFER_GAMMA22 // (4) Gamma 2.2 curve
TransferBT709 TransferFunction = C.GST_VIDEO_TRANSFER_BT709 // (5) Gamma 2.2 curve with a linear segment in the lower range, also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM / ITU-R BT1361
TransferSMPTE240M TransferFunction = C.GST_VIDEO_TRANSFER_SMPTE240M // (6) Gamma 2.2 curve with a linear segment in the lower range
TransferSRGB TransferFunction = C.GST_VIDEO_TRANSFER_SRGB // (7) Gamma 2.4 curve with a linear segment in the lower range. IEC 61966-2-1 (sRGB or sYCC)
TransferGamma28 TransferFunction = C.GST_VIDEO_TRANSFER_GAMMA28 // (8) Gamma 2.8 curve, also ITU-R BT470BG
TransferLog100 TransferFunction = C.GST_VIDEO_TRANSFER_LOG100 // (9) Logarithmic transfer characteristic 100:1 range
TransferLog316 TransferFunction = C.GST_VIDEO_TRANSFER_LOG316 // (10) Logarithmic transfer characteristic 316.22777:1 range (100 * sqrt(10) : 1)
TransferBT202012 TransferFunction = C.GST_VIDEO_TRANSFER_BT2020_12 // (11) Gamma 2.2 curve with a linear segment in the lower range. Used for BT.2020 with 12 bits per component. Since: 1.6
TransferAdobeRGB TransferFunction = C.GST_VIDEO_TRANSFER_ADOBERGB // (12) Gamma 2.19921875. Since: 1.8
TransferBT202010 TransferFunction = C.GST_VIDEO_TRANSFER_BT2020_10 // (13) Rec. ITU-R BT.2020-2 with 10 bits per component. (functionally the same as the values GST_VIDEO_TRANSFER_BT709 and GST_VIDEO_TRANSFER_BT601). Since: 1.18
TransferSMPTE2084 TransferFunction = C.GST_VIDEO_TRANSFER_SMPTE2084 // (14) SMPTE ST 2084 for 10, 12, 14, and 16-bit systems. Known as perceptual quantization (PQ) Since: 1.18
TransferARIBSTDB67 TransferFunction = C.GST_VIDEO_TRANSFER_ARIB_STD_B67 // (15) Association of Radio Industries and Businesses (ARIB) STD-B67 and Rec. ITU-R BT.2100-1 hybrid loggamma (HLG) system Since: 1.18
TransferBT601 TransferFunction = C.GST_VIDEO_TRANSFER_BT601 // (16) also known as SMPTE170M / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
)
// Pre-defined colorimetries
const (
ColorimetryBT2020 string = C.GST_VIDEO_COLORIMETRY_BT2020
ColorimetryBT202010 string = C.GST_VIDEO_COLORIMETRY_BT2020_10
ColorimetryBT2100HLG string = C.GST_VIDEO_COLORIMETRY_BT2100_HLG
ColorimetryBT2100PQ string = C.GST_VIDEO_COLORIMETRY_BT2100_PQ
ColorimetryBT601 string = C.GST_VIDEO_COLORIMETRY_BT601
ColorimetryBT709 string = C.GST_VIDEO_COLORIMETRY_BT709
ColorimetrySMPTE240M string = C.GST_VIDEO_COLORIMETRY_SMPTE240M
ColorimetrySRRGB string = C.GST_VIDEO_COLORIMETRY_SRGB
)
// ColorPrimariesInfo is a structure describing the chromaticity coordinates of an RGB system.
// These values can be used to construct a matrix to transform RGB to and from the XYZ colorspace.
type ColorPrimariesInfo struct {
Primaries ColorPrimaries
Wx, Wy float64 // Reference white coordinates
Rx, Ry float64 // Red coordinates
Gx, Gy float64 // Green coordinates
Bx, By float64 // Blue coordinates
}
// func (c *ColorPrimariesInfo) instance() *C.GstVideoColorPrimariesInfo {
// i := &C.GstVideoColorPrimariesInfo{
// primaries: C.GstVideoColorPrimaries(c.Primaries),
// Wx: C.gdouble(c.Wx),
// Wy: C.gdouble(c.Wy),
// Rx: C.gdouble(c.Rx),
// Ry: C.gdouble(c.Ry),
// Gx: C.gdouble(c.Gx),
// Gy: C.gdouble(c.Gy),
// Bx: C.gdouble(c.Bx),
// By: C.gdouble(c.By),
// }
// runtime.SetFinalizer(c, func(_ *ColorPrimariesInfo) { C.g_free((C.gpointer)(unsafe.Pointer(i))) })
// return i
// }
// Colorimetry is a structure describing the color info.
type Colorimetry struct {
// The color range. This is the valid range for the samples. It is used to convert the samples to Y'PbPr values.
Range ColorRange
// The color matrix. Used to convert between Y'PbPr and non-linear RGB (R'G'B').
Matrix ColorMatrix
// The transfer function. used to convert between R'G'B' and RGB.
Transfer TransferFunction
// Color primaries. used to convert between R'G'B' and CIE XYZ.
Primaries ColorPrimaries
}
// func (c *Colorimetry) instance() *C.GstVideoColorimetry {
// i := &C.GstVideoColorimetry{
// _range: C.GstVideoColorRange(c.Range),
// matrix: C.GstVideoColorMatrix(c.Matrix),
// transfer: C.GstVideoTransferFunction(c.Transfer),
// primaries: C.GstVideoColorPrimaries(c.Primaries),
// }
// runtime.SetFinalizer(c, func(_ *Colorimetry) { C.g_free((C.gpointer)(unsafe.Pointer(i))) })
// return i
// }
func colorimetryFromInstance(c C.GstVideoColorimetry) *Colorimetry {
return &Colorimetry{
Range: ColorRange(c._range),
Matrix: ColorMatrix(c.matrix),
Transfer: TransferFunction(c.transfer),
Primaries: ColorPrimaries(c.primaries),
}
}

View File

@@ -0,0 +1,286 @@
package video
/*
#include <gst/video/video.h>
const gchar * formatInfoName (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_NAME(info); }
guint formatInfoBits (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_BITS(info); }
guint formatInfoDepth (GstVideoFormatInfo * info, guint c) { return GST_VIDEO_FORMAT_INFO_DEPTH(info, c); }
GstVideoFormatFlags formatInfoFlags (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_FLAGS(info); }
GstVideoFormat formatInfoFormat (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_FORMAT(info); }
gboolean formatInfoHasAlpha (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_HAS_ALPHA(info); }
gboolean formatInfoHasPalette (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_HAS_PALETTE(info); }
guint formatInfoHSub (GstVideoFormatInfo * info, guint c) { return GST_VIDEO_FORMAT_INFO_H_SUB(info, c); }
gboolean formatInfoIsComplex (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_IS_COMPLEX(info); }
gboolean formatInfoIsGray (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_IS_GRAY(info); }
gboolean formatInfoIsLE (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_IS_LE(info); }
gboolean formatInfoIsRGB (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_IS_RGB(info); }
gboolean formatInfoIsTiled (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_IS_TILED(info); }
gboolean formatInfoIsYUV (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_IS_YUV(info); }
guint formatInfoNComponent (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_N_COMPONENTS(info); }
guint formatInfoNPlanes (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_N_PLANES(info); }
guint formatInfoPlane (GstVideoFormatInfo * info, guint c) { return GST_VIDEO_FORMAT_INFO_PLANE(info, c); }
guint formatInfoPOffset (GstVideoFormatInfo * info, guint c) { return GST_VIDEO_FORMAT_INFO_POFFSET(info, c); }
guint formatInfoPStride (GstVideoFormatInfo * info, guint c) { return GST_VIDEO_FORMAT_INFO_PSTRIDE(info, c); }
guint formatInfoTileHS (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_TILE_HS(info); }
GstVideoTileMode formatInfoTileMode (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_TILE_MODE(info); }
guint formatInfoTileWS (GstVideoFormatInfo * info) { return GST_VIDEO_FORMAT_INFO_TILE_WS(info); }
guint formatInfoWSub (GstVideoFormatInfo * info, guint c) { return GST_VIDEO_FORMAT_INFO_W_SUB(info, c); }
*/
import "C"
import (
"runtime"
"unsafe"
)
// Format is an enum value describing the most common video formats.
type Format int
// Type castings
const (
FormatUnknown Format = C.GST_VIDEO_FORMAT_UNKNOWN // (0) Unknown or unset video format id
FormatEncoded Format = C.GST_VIDEO_FORMAT_ENCODED // (1) Encoded video format. Only ever use that in caps for special video formats in combination with non-system memory GstCapsFeatures where it does not make sense to specify a real video format.
FormatI420 Format = C.GST_VIDEO_FORMAT_I420 // (2) planar 4:2:0 YUV
FormatYV12 Format = C.GST_VIDEO_FORMAT_YV12 // (3) planar 4:2:0 YVU (like I420 but UV planes swapped)
FormatYUY2 Format = C.GST_VIDEO_FORMAT_YUY2 // (4) packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...)
FormatUYVY Format = C.GST_VIDEO_FORMAT_UYVY // (5) packed 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
FormatAYUV Format = C.GST_VIDEO_FORMAT_AYUV // (6) packed 4:4:4 YUV with alpha channel (A0-Y0-U0-V0 ...)
FormatRGBx Format = C.GST_VIDEO_FORMAT_RGBx // (7) sparse rgb packed into 32 bit, space last
FormatBGRx Format = C.GST_VIDEO_FORMAT_BGRx // (8) sparse reverse rgb packed into 32 bit, space last
FormatxRGB Format = C.GST_VIDEO_FORMAT_xRGB // (9) sparse rgb packed into 32 bit, space first
FormatxBGR Format = C.GST_VIDEO_FORMAT_xBGR // (10) sparse reverse rgb packed into 32 bit, space first
FormatRGBA Format = C.GST_VIDEO_FORMAT_RGBA // (11) rgb with alpha channel last
FormatBGRA Format = C.GST_VIDEO_FORMAT_BGRA // (12) reverse rgb with alpha channel last
FormatARGB Format = C.GST_VIDEO_FORMAT_ARGB // (13) rgb with alpha channel first
FormatABGR Format = C.GST_VIDEO_FORMAT_ABGR // (14) reverse rgb with alpha channel first
FormatRGB Format = C.GST_VIDEO_FORMAT_RGB // (15) RGB packed into 24 bits without padding (R-G-B-R-G-B)
FormatBGR Format = C.GST_VIDEO_FORMAT_BGR // (16) reverse RGB packed into 24 bits without padding (B-G-R-B-G-R)
FormatY41B Format = C.GST_VIDEO_FORMAT_Y41B // (17) planar 4:1:1 YUV
FormatY42B Format = C.GST_VIDEO_FORMAT_Y42B // (18) planar 4:2:2 YUV
FormatYVYU Format = C.GST_VIDEO_FORMAT_YVYU // (19) packed 4:2:2 YUV (Y0-V0-Y1-U0 Y2-V2-Y3-U2 Y4 ...)
FormatY444 Format = C.GST_VIDEO_FORMAT_Y444 // (20) planar 4:4:4 YUV
Formatv210 Format = C.GST_VIDEO_FORMAT_v210 // (21) packed 4:2:2 10-bit YUV, complex format
Formatv216 Format = C.GST_VIDEO_FORMAT_v216 // (22) packed 4:2:2 16-bit YUV, Y0-U0-Y1-V1 order
FormatNV12 Format = C.GST_VIDEO_FORMAT_NV12 // (23) planar 4:2:0 YUV with interleaved UV plane
FormatNV21 Format = C.GST_VIDEO_FORMAT_NV21 // (24) planar 4:2:0 YUV with interleaved VU plane
FormatGray8 Format = C.GST_VIDEO_FORMAT_GRAY8 // (25) 8-bit grayscale
FormatGray16BE Format = C.GST_VIDEO_FORMAT_GRAY16_BE // (26) 16-bit grayscale, most significant byte first
FormatGray16LE Format = C.GST_VIDEO_FORMAT_GRAY16_LE // (27) 16-bit grayscale, least significant byte first
Formatv308 Format = C.GST_VIDEO_FORMAT_v308 // (28) packed 4:4:4 YUV (Y-U-V ...)
FormatRGB16 Format = C.GST_VIDEO_FORMAT_RGB16 // (29) rgb 5-6-5 bits per component
FormatBGR16 Format = C.GST_VIDEO_FORMAT_BGR16 // (30) reverse rgb 5-6-5 bits per component
FormatRGB15 Format = C.GST_VIDEO_FORMAT_RGB15 // (31) rgb 5-5-5 bits per component
FormatBGR15 Format = C.GST_VIDEO_FORMAT_BGR15 // (32) reverse rgb 5-5-5 bits per component
FormatUYVP Format = C.GST_VIDEO_FORMAT_UYVP // (33) packed 10-bit 4:2:2 YUV (U0-Y0-V0-Y1 U2-Y2-V2-Y3 U4 ...)
FormatA420 Format = C.GST_VIDEO_FORMAT_A420 // (34) planar 4:4:2:0 AYUV
FormatRGB8P Format = C.GST_VIDEO_FORMAT_RGB8P // (35) 8-bit paletted RGB
FormatYUV9 Format = C.GST_VIDEO_FORMAT_YUV9 // (36) planar 4:1:0 YUV
FormatYVU9 Format = C.GST_VIDEO_FORMAT_YVU9 // (37) planar 4:1:0 YUV (like YUV9 but UV planes swapped)
FormatIYU1 Format = C.GST_VIDEO_FORMAT_IYU1 // (38) packed 4:1:1 YUV (Cb-Y0-Y1-Cr-Y2-Y3 ...)
FormatARGB64 Format = C.GST_VIDEO_FORMAT_ARGB64 // (39) rgb with alpha channel first, 16 bits per channel
FormatAYUV64 Format = C.GST_VIDEO_FORMAT_AYUV64 // (40) packed 4:4:4 YUV with alpha channel, 16 bits per channel (A0-Y0-U0-V0 ...)
Formatr210 Format = C.GST_VIDEO_FORMAT_r210 // (41) packed 4:4:4 RGB, 10 bits per channel
FormatI42010BE Format = C.GST_VIDEO_FORMAT_I420_10BE // (42) planar 4:2:0 YUV, 10 bits per channel
FormatI42010LE Format = C.GST_VIDEO_FORMAT_I420_10LE // (43) planar 4:2:0 YUV, 10 bits per channel
FormatI42210BE Format = C.GST_VIDEO_FORMAT_I422_10BE // (44) planar 4:2:2 YUV, 10 bits per channel
FormatI42210LE Format = C.GST_VIDEO_FORMAT_I422_10LE // (45) planar 4:2:2 YUV, 10 bits per channel
FormatY44410BE Format = C.GST_VIDEO_FORMAT_Y444_10BE // (46) planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
FormatY44410LE Format = C.GST_VIDEO_FORMAT_Y444_10LE // (47) planar 4:4:4 YUV, 10 bits per channel (Since: 1.2)
FormatGBR Format = C.GST_VIDEO_FORMAT_GBR // (48) planar 4:4:4 RGB, 8 bits per channel (Since: 1.2)
FormatGBR10BE Format = C.GST_VIDEO_FORMAT_GBR_10BE // (49) planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
FormatGBR10LE Format = C.GST_VIDEO_FORMAT_GBR_10LE // (50) planar 4:4:4 RGB, 10 bits per channel (Since: 1.2)
FormatNV16 Format = C.GST_VIDEO_FORMAT_NV16 // (51) planar 4:2:2 YUV with interleaved UV plane (Since: 1.2)
FormatNV24 Format = C.GST_VIDEO_FORMAT_NV24 // (52) planar 4:4:4 YUV with interleaved UV plane (Since: 1.2)
FormatNV1264Z32 Format = C.GST_VIDEO_FORMAT_NV12_64Z32 // (53) NV12 with 64x32 tiling in zigzag pattern (Since: 1.4)
FormatA42010BE Format = C.GST_VIDEO_FORMAT_A420_10BE // (54) planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
FormatA42010LE Format = C.GST_VIDEO_FORMAT_A420_10LE // (55) planar 4:4:2:0 YUV, 10 bits per channel (Since: 1.6)
FormatA42210BE Format = C.GST_VIDEO_FORMAT_A422_10BE // (56) planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
FormatA42210LE Format = C.GST_VIDEO_FORMAT_A422_10LE // (57) planar 4:4:2:2 YUV, 10 bits per channel (Since: 1.6)
FormatA44410BE Format = C.GST_VIDEO_FORMAT_A444_10BE // (58) planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
FormatA44410LE Format = C.GST_VIDEO_FORMAT_A444_10LE // (59) planar 4:4:4:4 YUV, 10 bits per channel (Since: 1.6)
FormatNV61 Format = C.GST_VIDEO_FORMAT_NV61 // (60) planar 4:2:2 YUV with interleaved VU plane (Since: 1.6)
FormatP01010BE Format = C.GST_VIDEO_FORMAT_P010_10BE // (61) planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
FormatP01010LE Format = C.GST_VIDEO_FORMAT_P010_10LE // (62) planar 4:2:0 YUV with interleaved UV plane, 10 bits per channel (Since: 1.10)
FormatIYU2 Format = C.GST_VIDEO_FORMAT_IYU2 // (63) packed 4:4:4 YUV (U-Y-V ...) (Since: 1.10)
FormatVYUY Format = C.GST_VIDEO_FORMAT_VYUY // (64) packed 4:2:2 YUV (V0-Y0-U0-Y1 V2-Y2-U2-Y3 V4 ...)
FormatGBRA Format = C.GST_VIDEO_FORMAT_GBRA // (65) planar 4:4:4:4 ARGB, 8 bits per channel (Since: 1.12)
FormatGBRA10BE Format = C.GST_VIDEO_FORMAT_GBRA_10BE // (66) planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
FormatGBRA10LE Format = C.GST_VIDEO_FORMAT_GBRA_10LE // (67) planar 4:4:4:4 ARGB, 10 bits per channel (Since: 1.12)
FormatGBR12BE Format = C.GST_VIDEO_FORMAT_GBR_12BE // (68) planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
FormatGBR12LE Format = C.GST_VIDEO_FORMAT_GBR_12LE // (69) planar 4:4:4 RGB, 12 bits per channel (Since: 1.12)
FormatGBRA12BE Format = C.GST_VIDEO_FORMAT_GBRA_12BE // (70) planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
FormatGBRA12LE Format = C.GST_VIDEO_FORMAT_GBRA_12LE // (71) planar 4:4:4:4 ARGB, 12 bits per channel (Since: 1.12)
FormatI42012BE Format = C.GST_VIDEO_FORMAT_I420_12BE // (72) planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
FormatI42012LE Format = C.GST_VIDEO_FORMAT_I420_12LE // (73) planar 4:2:0 YUV, 12 bits per channel (Since: 1.12)
FormatI42212BE Format = C.GST_VIDEO_FORMAT_I422_12BE // (74) planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
FormatI42212LE Format = C.GST_VIDEO_FORMAT_I422_12LE // (75) planar 4:2:2 YUV, 12 bits per channel (Since: 1.12)
FormatY44412BE Format = C.GST_VIDEO_FORMAT_Y444_12BE // (76) planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
FormatY44412LE Format = C.GST_VIDEO_FORMAT_Y444_12LE // (77) planar 4:4:4 YUV, 12 bits per channel (Since: 1.12)
FormatGray10LE32 Format = C.GST_VIDEO_FORMAT_GRAY10_LE32 // (78) 10-bit grayscale, packed into 32bit words (2 bits padding) (Since: 1.14)
FormatNV1210LE32 Format = C.GST_VIDEO_FORMAT_NV12_10LE32 // (79) 10-bit variant of GST_VIDEO_FORMAT_NV12, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
FormatNV1610LE32 Format = C.GST_VIDEO_FORMAT_NV16_10LE32 // (80) 10-bit variant of GST_VIDEO_FORMAT_NV16, packed into 32bit words (MSB 2 bits padding) (Since: 1.14)
FormatNV1210LE40 Format = C.GST_VIDEO_FORMAT_NV12_10LE40 // (81) Fully packed variant of NV12_10LE32 (Since: 1.16)
FormatY210 Format = C.GST_VIDEO_FORMAT_Y210 // (82) packed 4:2:2 YUV, 10 bits per channel (Since: 1.16)
FormatY410 Format = C.GST_VIDEO_FORMAT_Y410 // (83) packed 4:4:4 YUV, 10 bits per channel(A-V-Y-U...) (Since: 1.16)
FormatVUYA Format = C.GST_VIDEO_FORMAT_VUYA // (84) packed 4:4:4 YUV with alpha channel (V0-U0-Y0-A0...) (Since: 1.16)
FormatBGR10A2LE Format = C.GST_VIDEO_FORMAT_BGR10A2_LE // (85) packed 4:4:4 RGB with alpha channel(B-G-R-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.16)
FormatRGB10A2LE Format = C.GST_VIDEO_FORMAT_RGB10A2_LE // (86) packed 4:4:4 RGB with alpha channel(R-G-B-A), 10 bits for R/G/B channel and MSB 2 bits for alpha channel (Since: 1.18)
FormatY44416BE Format = C.GST_VIDEO_FORMAT_Y444_16BE // (87) planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
FormatY44416LE Format = C.GST_VIDEO_FORMAT_Y444_16LE // (88) planar 4:4:4 YUV, 16 bits per channel (Since: 1.18)
FormatP016BE Format = C.GST_VIDEO_FORMAT_P016_BE // (89) planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
FormatP016LE Format = C.GST_VIDEO_FORMAT_P016_LE // (90) planar 4:2:0 YUV with interleaved UV plane, 16 bits per channel (Since: 1.18)
FormatP012BE Format = C.GST_VIDEO_FORMAT_P012_BE // (91) planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
FormatP012LE Format = C.GST_VIDEO_FORMAT_P012_LE // (92) planar 4:2:0 YUV with interleaved UV plane, 12 bits per channel (Since: 1.18)
FormatY212BE Format = C.GST_VIDEO_FORMAT_Y212_BE // (93) packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
FormatY212LE Format = C.GST_VIDEO_FORMAT_Y212_LE // (94) packed 4:2:2 YUV, 12 bits per channel (Y-U-Y-V) (Since: 1.18)
FormatY412BE Format = C.GST_VIDEO_FORMAT_Y412_BE // (95) packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
FormatY412LE Format = C.GST_VIDEO_FORMAT_Y412_LE // (96) packed 4:4:4:4 YUV, 12 bits per channel(U-Y-V-A...) (Since: 1.18)
FormatNV124L4 Format = C.GST_VIDEO_FORMAT_NV12_4L4 // (97) NV12 with 4x4 tiles in linear order.
FormatNV1232L32Format Format = C.GST_VIDEO_FORMAT_NV12_32L32 // (98) NV12 with 32x32 tiles in linear order.
)
// RawFormats returns a slice of all the raw video formats supported by GStreamer.
func RawFormats() []Format {
var size C.guint
formats := C.gst_video_formats_raw(&size)
out := make([]Format, uint(size))
for i, f := range (*[1 << 30]C.GstVideoFormat)(unsafe.Pointer(formats))[:size:size] {
out[i] = Format(f)
}
return out
}
// Info returns the FormatInfo for this video format.
func (f Format) Info() *FormatInfo {
finfo := C.gst_video_format_get_info(C.GstVideoFormat(f))
info := &FormatInfo{ptr: finfo}
runtime.SetFinalizer(info, func(_ *FormatInfo) { C.g_free((C.gpointer)(unsafe.Pointer(finfo))) })
return info
}
// String implements a stringer on a Format.
func (f Format) String() string {
return C.GoString(C.gst_video_format_to_string(C.GstVideoFormat(f)))
}
// FOURCC converts this format value into the corresponding FOURCC. Only a few YUV formats have corresponding
// FOURCC values. If format has no corresponding FOURCC value, 0 is returned.
func (f Format) FOURCC() uint32 {
return uint32(C.gst_video_format_to_fourcc(C.GstVideoFormat(f)))
}
// FormatFlags are different video flags that a format info can have.
type FormatFlags int
// Type castings
const (
FormatFlagYUV FormatFlags = C.GST_VIDEO_FORMAT_FLAG_YUV // (1) The video format is YUV, components are numbered 0=Y, 1=U, 2=V.
FormatFlagRGB FormatFlags = C.GST_VIDEO_FORMAT_FLAG_RGB // (2) The video format is RGB, components are numbered 0=R, 1=G, 2=B.
FormatFlagGray FormatFlags = C.GST_VIDEO_FORMAT_FLAG_GRAY // (4) The video is gray, there is one gray component with index 0.
FormatFlagAlpha FormatFlags = C.GST_VIDEO_FORMAT_FLAG_ALPHA // (8) The video format has an alpha components with the number 3.
FormatFlagLE FormatFlags = C.GST_VIDEO_FORMAT_FLAG_LE // (16) The video format has data stored in little endianness.
FormatFlagPalette FormatFlags = C.GST_VIDEO_FORMAT_FLAG_PALETTE // (32) The video format has a palette. The palette is stored in the second plane and indexes are stored in the first plane.
FormatFlagComplex FormatFlags = C.GST_VIDEO_FORMAT_FLAG_COMPLEX // (64) The video format has a complex layout that can't be described with the usual information in the GstVideoFormatInfo.
FormatFlagUnpack FormatFlags = C.GST_VIDEO_FORMAT_FLAG_UNPACK // (128) This format can be used in a GstVideoFormatUnpack and GstVideoFormatPack function.
FormatFlagTiled FormatFlags = C.GST_VIDEO_FORMAT_FLAG_TILED // (256) The format is tiled, there is tiling information in the last plane.
)
// PackFlags are different flags that can be used when packing and unpacking.
type PackFlags int
// Type castings
const (
PackFlagNone PackFlags = C.GST_VIDEO_PACK_FLAG_NONE // (0) No flag
PackFlagTruncateRange PackFlags = C.GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE // (1) When the source has a smaller depth than the target format, set the least significant bits of the target to 0. This is likely slightly faster but less accurate. When this flag is not specified, the most significant bits of the source are duplicated in the least significant bits of the destination.
PackFlagInterlaced PackFlags = C.GST_VIDEO_PACK_FLAG_INTERLACED // (2) The source is interlaced. The unpacked format will be interlaced as well with each line containing information from alternating fields. (Since: 1.2)
)
// FormatInfo contains information for a video format.
type FormatInfo struct {
ptr *C.GstVideoFormatInfo
}
func (f *FormatInfo) instance() *C.GstVideoFormatInfo { return f.ptr }
// Bits returns the number of bits used to pack data items. This can be less than 8 when
// multiple pixels are stored in a byte. for values > 8 multiple bytes should be read
// according to the endianness flag before applying the shift and mask.
func (f *FormatInfo) Bits() uint { return uint(C.formatInfoBits(f.instance())) }
// ComponentDepth returns the depth in bits for the given component.
func (f *FormatInfo) ComponentDepth(component uint) uint {
return uint(C.formatInfoDepth(f.instance(), C.guint(component)))
}
// ComponentHSub returns the subsampling factor of the height for the component.
func (f *FormatInfo) ComponentHSub(component uint) uint {
return uint(C.formatInfoHSub(f.instance(), C.guint(component)))
}
// ComponentWSub returns the subsampling factor of the width for the component.
func (f *FormatInfo) ComponentWSub(n uint) uint {
return uint(C.formatInfoWSub(f.instance(), C.guint(n)))
}
// Flags returns the flags on this info.
func (f *FormatInfo) Flags() FormatFlags { return FormatFlags(C.formatInfoFlags(f.instance())) }
// Format returns the format for this info.
func (f *FormatInfo) Format() Format { return Format(C.formatInfoFormat(f.instance())) }
// HasAlpha returns true if the alpha flag is set.
func (f *FormatInfo) HasAlpha() bool { return gobool(C.formatInfoHasAlpha(f.instance())) }
// HasPalette returns true if this info has a palette.
func (f *FormatInfo) HasPalette() bool { return gobool(C.formatInfoHasPalette(f.instance())) }
// IsComplex returns true if the complex flag is set.
func (f *FormatInfo) IsComplex() bool { return gobool(C.formatInfoIsComplex(f.instance())) }
// IsGray returns true if the gray flag is set.
func (f *FormatInfo) IsGray() bool { return gobool(C.formatInfoIsGray(f.instance())) }
// IsLE returns true if the LE flag is set.
func (f *FormatInfo) IsLE() bool { return gobool(C.formatInfoIsLE(f.instance())) }
// IsRGB returns true if the RGB flag is set.
func (f *FormatInfo) IsRGB() bool { return gobool(C.formatInfoIsRGB(f.instance())) }
// IsTiled returns true if the tiled flag is set.
func (f *FormatInfo) IsTiled() bool { return gobool(C.formatInfoIsTiled(f.instance())) }
// IsYUV returns true if the YUV flag is set.
func (f *FormatInfo) IsYUV() bool { return gobool(C.formatInfoIsYUV(f.instance())) }
// Name returns a human readable name for this info.
func (f *FormatInfo) Name() string { return C.GoString(C.formatInfoName(f.instance())) }
// NumComponents returns the number of components in this info.
func (f *FormatInfo) NumComponents() uint { return uint(C.formatInfoNComponent(f.instance())) }
// NumPlanes returns the number of planes in this info.
func (f *FormatInfo) NumPlanes() uint { return uint(C.formatInfoNPlanes(f.instance())) }
// Plane returns the given plane index.
func (f *FormatInfo) Plane(n uint) uint { return uint(C.formatInfoPlane(f.instance(), C.guint(n))) }
// PlaneOffset returns the offset for the given plane.
func (f *FormatInfo) PlaneOffset(n uint) uint {
return uint(C.formatInfoPOffset(f.instance(), C.guint(n)))
}
// PlaneStride returns the stride for the given plane.
func (f *FormatInfo) PlaneStride(n uint) uint {
return uint(C.formatInfoPStride(f.instance(), C.guint(n)))
}
// TileHS returns the height of a tile, in bytes, represented as a shift.
func (f *FormatInfo) TileHS() uint { return uint(C.formatInfoTileHS(f.instance())) }
// TileMode returns the tiling mode.
func (f *FormatInfo) TileMode() TileMode { return TileMode(C.formatInfoTileMode(f.instance())) }
// TileWS returns the width of a tile, in bytes, represented as a shift.
func (f *FormatInfo) TileWS() uint { return uint(C.formatInfoTileWS(f.instance())) }

386
gst/video/gst_video_info.go Normal file
View File

@@ -0,0 +1,386 @@
package video
/*
#include <gst/video/video.h>
GstVideoChromaSite infoChromaSite (GstVideoInfo * info) { return GST_VIDEO_INFO_CHROMA_SITE(info); }
GstVideoColorimetry infoColorimetry (GstVideoInfo * info) { return GST_VIDEO_INFO_COLORIMETRY(info); }
gint infoFieldHeight (GstVideoInfo * info) { return GST_VIDEO_INFO_FIELD_HEIGHT(info); }
GstVideoFieldOrder infoFieldOrder (GstVideoInfo * info) { return GST_VIDEO_INFO_FIELD_ORDER(info); }
gint infoFieldRateN (GstVideoInfo * info) { return GST_VIDEO_INFO_FIELD_RATE_N(info); }
GstVideoFlags infoFlags (GstVideoInfo * info) { return GST_VIDEO_INFO_FLAGS(info); }
gboolean infoFlagIsSet (GstVideoInfo * info, GstVideoFlags flag) { return GST_VIDEO_INFO_FLAG_IS_SET(info, flag); }
void infoFlagSet (GstVideoInfo * info, GstVideoFlags flag) { GST_VIDEO_INFO_FLAG_SET(info, flag); }
void infoFlagUnset (GstVideoInfo * info, GstVideoFlags flag) { GST_VIDEO_INFO_FLAG_UNSET(info, flag); }
GstVideoFormat infoFormat (GstVideoInfo * info) { return GST_VIDEO_INFO_FORMAT(info); }
gint infoFPSd (GstVideoInfo * info) { return GST_VIDEO_INFO_FPS_D(info); }
gint infoFPSn (GstVideoInfo * info) { return GST_VIDEO_INFO_FPS_N(info); }
gboolean infoHasAlpha (GstVideoInfo * info) { return GST_VIDEO_INFO_HAS_ALPHA(info); }
gint infoHeight (GstVideoInfo * info) { return GST_VIDEO_INFO_HEIGHT(info); }
GstVideoInterlaceMode infoInterlaceMode (GstVideoInfo * info) { return GST_VIDEO_INFO_INTERLACE_MODE(info); }
gboolean infoIsGray (GstVideoInfo * info) { return GST_VIDEO_INFO_IS_GRAY(info); }
gboolean infoIsInterlaced (GstVideoInfo * info) { return GST_VIDEO_INFO_IS_INTERLACED(info); }
gboolean infoIsRGB (GstVideoInfo * info) { return GST_VIDEO_INFO_IS_RGB(info); }
gboolean infoIsYUV (GstVideoInfo * info) { return GST_VIDEO_INFO_IS_YUV(info); }
GstVideoMultiviewFlags infoMultiviewFlags (GstVideoInfo * info) { return GST_VIDEO_INFO_MULTIVIEW_FLAGS(info); }
GstVideoMultiviewMode infoMultiviewMode (GstVideoInfo * info) { return GST_VIDEO_INFO_MULTIVIEW_MODE(info); }
const gchar * infoName (GstVideoInfo * info) { return GST_VIDEO_INFO_NAME(info); }
guint infoNComponents (GstVideoInfo * info) { return GST_VIDEO_INFO_N_COMPONENTS(info); }
guint infoNPlanes (GstVideoInfo * info) { return GST_VIDEO_INFO_N_PLANES(info); }
gint infoPARd (GstVideoInfo * info) { return GST_VIDEO_INFO_PAR_D(info); }
gint infoPARn (GstVideoInfo * info) { return GST_VIDEO_INFO_PAR_N(info); }
gsize infoSize (GstVideoInfo * info) { return GST_VIDEO_INFO_SIZE(info); }
gint infoViews (GstVideoInfo * info) { return GST_VIDEO_INFO_VIEWS(info); }
gint infoWidth (GstVideoInfo * info) { return GST_VIDEO_INFO_WIDTH(info); }
*/
import "C"
import (
"runtime"
"unsafe"
"github.com/tinyzimmer/go-gst/gst"
)
// CapsFeatureFormatInterlaced is the name of the caps feature indicating that the stream is interlaced.
//
// Currently it is only used for video with 'interlace-mode=alternate' to ensure backwards compatibility
// for this new mode. In this mode each buffer carries a single field of interlaced video. BufferFlagTopField
// and BufferFlagBottomField indicate whether the buffer carries a top or bottom field. The order of
// buffers/fields in the stream and the timestamps on the buffers indicate the temporal order of the fields.
// Top and bottom fields are expected to alternate in this mode. The frame rate in the caps still signals the
// frame rate, so the notional field rate will be twice the frame rate from the caps.
const CapsFeatureFormatInterlaced string = C.GST_CAPS_FEATURE_FORMAT_INTERLACED
// FieldOrder is the field order of interlaced content. This is only valid for interlace-mode=interleaved
// and not interlace-mode=mixed. In the case of mixed or FieldOrderrUnknown, the field order is signalled
// via buffer flags.
type FieldOrder int
// Type castings
const (
FieldOrderUnknown FieldOrder = C.GST_VIDEO_FIELD_ORDER_UNKNOWN // (0) unknown field order for interlaced content. The actual field order is signalled via buffer flags.
FieldOrderTopFieldFirst FieldOrder = C.GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST // (1) top field is first
FieldOrderBottomFieldFirst FieldOrder = C.GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST // (2) bottom field is first
)
// String implements a stringer on FieldOrder
func (f FieldOrder) String() string {
cStr := C.gst_video_field_order_to_string(C.GstVideoFieldOrder(f))
return C.GoString(cStr)
}
// Flags represents extra video flags
type Flags int
// Type castings
const (
FlagNone Flags = C.GST_VIDEO_FLAG_NONE // (0) no flags
FlagVariableFPS Flags = C.GST_VIDEO_FLAG_VARIABLE_FPS // (1) a variable fps is selected, fps_n and fps_d denote the maximum fps of the video
FlagPremultipliedAlpha Flags = C.GST_VIDEO_FLAG_PREMULTIPLIED_ALPHA // (2) Each color has been scaled by the alpha value.
)
// InterlaceMode is the possible values describing the interlace mode of the stream.
type InterlaceMode int
// Type castings
const (
InterlaceModeProgressive InterlaceMode = C.GST_VIDEO_INTERLACE_MODE_PROGRESSIVE // (0) all frames are progressive
InterlaceModeInterleaved InterlaceMode = C.GST_VIDEO_INTERLACE_MODE_INTERLEAVED // (1) 2 fields are interleaved in one video frame. Extra buffer flags describe the field order.
InterlaceModeMixed InterlaceMode = C.GST_VIDEO_INTERLACE_MODE_MIXED // (2) frames contains both interlaced and progressive video, the buffer flags describe the frame and fields.
InterlaceModeFields InterlaceMode = C.GST_VIDEO_INTERLACE_MODE_FIELDS // (3) 2 fields are stored in one buffer, use the frame ID to get access to the required field. For multiview (the 'views' property > 1) the fields of view N can be found at frame ID (N * 2) and (N * 2) + 1. Each field has only half the amount of lines as noted in the height property. This mode requires multiple GstVideoMeta metadata to describe the fields.
InterlaceModeAlternate InterlaceMode = C.GST_VIDEO_INTERLACE_MODE_ALTERNATE // (4) 1 field is stored in one buffer, GST_VIDEO_BUFFER_FLAG_TF or GST_VIDEO_BUFFER_FLAG_BF indicates if the buffer is carrying the top or bottom field, respectively. The top and bottom buffers are expected to alternate in the pipeline, with this mode (Since: 1.16).
)
// String implements a stringer on interlace mode
func (i InterlaceMode) String() string {
return C.GoString(C.gst_video_interlace_mode_to_string(C.GstVideoInterlaceMode(i)))
}
// MultiviewFlags are used to indicate extra properties of a stereo/multiview stream beyond the frame layout
// and buffer mapping that is conveyed in the MultiviewMode.
type MultiviewFlags int
// Type castings
const (
MultiviewFlagsNone MultiviewFlags = C.GST_VIDEO_MULTIVIEW_FLAGS_NONE // (0) No flags
MultiviewFlagsRightViewFirst MultiviewFlags = C.GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST // (1) For stereo streams, the normal arrangement of left and right views is reversed.
MultiviewFlagsLeftFlipped MultiviewFlags = C.GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLIPPED // (2) The left view is vertically mirrored.
MultiviewFlagsLeftFlopped MultiviewFlags = C.GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLOPPED // (4) The left view is horizontally mirrored.
MultiviewFlagsRightFlipped MultiviewFlags = C.GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLIPPED // (8) The right view is vertically mirrored.
MultiviewFlagsRightFlopped MultiviewFlags = C.GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLOPPED // (16) The right view is horizontally mirrored.
MultiviewFlagsHalfAspect MultiviewFlags = C.GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT // (16384) For frame-packed multiview modes, indicates that the individual views have been encoded with half the true width or height and should be scaled back up for display. This flag is used for overriding input layout interpretation by adjusting pixel-aspect-ratio. For side-by-side, column interleaved or checkerboard packings, the pixel width will be doubled. For row interleaved and top-bottom encodings, pixel height will be doubled.
MultiviewFlagsMixedMono MultiviewFlags = C.GST_VIDEO_MULTIVIEW_FLAGS_MIXED_MONO // (32768) The video stream contains both mono and multiview portions, signalled on each buffer by the absence or presence of the GST_VIDEO_BUFFER_FLAG_MULTIPLE_VIEW buffer flag.
)
// MultiviewFramePacking represents the subset of MultiviewMode values that can be applied to any video frame
// without needing extra metadata. It can be used by elements that provide a property to override the multiview
// interpretation of a video stream when the video doesn't contain any markers.
//
// This enum is used (for example) on playbin, to re-interpret a played video stream as a stereoscopic video.
// The individual enum values are equivalent to and have the same value as the matching MultiviewMode.
type MultiviewFramePacking int
// Type castings
const (
MultiviewFramePackingNone MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_NONE // (-1) A special value indicating no frame packing info.
MultiviewFramePackingMono MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_MONO // (0) All frames are monoscopic.
MultiviewFramePackingLeft MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_LEFT // (1) All frames represent a left-eye view.
MultiviewFramePackingRight MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_RIGHT // (2) All frames represent a right-eye view.
MultiviewFramePackingSideBySide MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE // (3) Left and right eye views are provided in the left and right half of the frame respectively.
MultiviewFramePackingSideBySideQuincunx MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE_QUINCUNX // (4) Left and right eye views are provided in the left and right half of the frame, but have been sampled using quincunx method, with half-pixel offset between the 2 views.
MultiviewFramePackingColumnInterleaved MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_COLUMN_INTERLEAVED // (5) Alternating vertical columns of pixels represent the left and right eye view respectively.
MultiviewFramePackingRowInterleaved MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_ROW_INTERLEAVED // (6) Alternating horizontal rows of pixels represent the left and right eye view respectively.
MultiviewFramePackingTopBottom MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_TOP_BOTTOM // (7) The top half of the frame contains the left eye, and the bottom half the right eye.
MultiviewFramePackingCheckerboard MultiviewFramePacking = C.GST_VIDEO_MULTIVIEW_FRAME_PACKING_CHECKERBOARD // (8) Pixels are arranged with alternating pixels representing left and right eye views in a checkerboard fashion.
)
// MultiviewMode represents all possible stereoscopic 3D and multiview representations. In conjunction with
// MultiviewFlags, describes how multiview content is being transported in the stream.
type MultiviewMode int
// Type castings
const (
MultiviewModeNone MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_NONE // (-1) A special value indicating no multiview information. Used in GstVideoInfo and other places to indicate that no specific multiview handling has been requested or provided. This value is never carried on caps.
MultiviewModeMono MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_MONO // (0) All frames are monoscopic.
MultiviewModeLeft MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_LEFT // (1) All frames represent a left-eye view.
MultiviewModeRight MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_RIGHT // (2) All frames represent a right-eye view.
MultiviewModeSideBySide MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE // (3) Left and right eye views are provided in the left and right half of the frame respectively.
MultiviewModeSideBySideQuincunx MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX // (4) Left and right eye views are provided in the left and right half of the frame, but have been sampled using quincunx method, with half-pixel offset between the 2 views.
MultiviewModeColumnInterleaved MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED // (5) Alternating vertical columns of pixels represent the left and right eye view respectively.
MultiviewModeRowInterleaved MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED // (6) Alternating horizontal rows of pixels represent the left and right eye view respectively.
MultiviewModeTopBottom MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM // (7) The top half of the frame contains the left eye, and the bottom half the right eye.
MultiviewModeCheckerboard MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD // (8) Pixels are arranged with alternating pixels representing left and right eye views in a checkerboard fashion.
MultiviewModeFrameByFrame MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME // (32) Left and right eye views are provided in separate frames alternately.
MultiviewModeMultiviewFrameByFrame MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME // (33) Multiple independent views are provided in separate frames in sequence. This method only applies to raw video buffers at the moment. Specific view identification is via the GstVideoMultiviewMeta and GstVideoMeta(s) on raw video buffers.
MultiviewModeSeparated MultiviewMode = C.GST_VIDEO_MULTIVIEW_MODE_SEPARATED // (34) Multiple views are provided as separate GstMemory framebuffers attached to each GstBuffer, described by the GstVideoMultiviewMeta and GstVideoMeta(s)
)
// Info describes image properties. This information can be filled in from GstCaps with
// InfoFromCaps. The information is also used to store the specific video info when mapping
// a video frame with FrameMap.
type Info struct {
ptr *C.GstVideoInfo
}
func wrapInfo(vinfo *C.GstVideoInfo) *Info {
info := &Info{vinfo}
runtime.SetFinalizer(info, func(i *Info) { C.gst_video_info_free(vinfo) })
return info
}
// instance returns the underlying GstVideoInfo instance.
func (i *Info) instance() *C.GstVideoInfo { return i.ptr }
// NewInfo returns a new Info instance. You can populate it by chaining builders
// to this constructor.
func NewInfo() *Info {
return wrapInfo(C.gst_video_info_new())
}
// FromCaps parses the caps and updates this info.
func (i *Info) FromCaps(caps *gst.Caps) *Info {
C.gst_video_info_from_caps(i.instance(), fromCoreCaps(caps))
return i
}
// Convert converts among various gst.Format types. This function handles gst.FormatBytes, gst.FormatTime,
// and gst.FormatDefault. For raw video, gst.FormatDefault corresponds to video frames. This function can
// be used to handle pad queries of the type gst.QueryTypeConvert.
func (i *Info) Convert(srcFormat, destFormat gst.Format, srcValue int64) (out int64, ok bool) {
var gout C.gint64
gok := C.gst_video_info_convert(i.instance(), C.GstFormat(srcFormat), C.gint64(srcValue), C.GstFormat(destFormat), &gout)
return int64(gout), gobool(gok)
}
// IsEqual compares two GstVideoInfo and returns whether they are equal or not.
func (i *Info) IsEqual(info *Info) bool {
return gobool(C.gst_video_info_is_equal(i.instance(), info.instance()))
}
// ChromaSite returns the ChromaSite for this info.
func (i *Info) ChromaSite() ChromaSite {
return ChromaSite(C.infoChromaSite(i.instance()))
}
// Colorimetry returns the colorimetry for this info.
func (i *Info) Colorimetry() *Colorimetry {
return colorimetryFromInstance(C.infoColorimetry(i.instance()))
}
// FieldHeight returns the field height for this info.
func (i *Info) FieldHeight() int {
return int(C.infoFieldHeight(i.instance()))
}
// FieldOrder returns the field order for this info.
func (i *Info) FieldOrder() FieldOrder {
return FieldOrder(C.infoFieldOrder(i.instance()))
}
// FieldRateN returns the rate numerator depending on the interlace mode.
func (i *Info) FieldRateN() int {
return int(C.infoFieldRateN(i.instance()))
}
// Flags returns the flags on this info.
func (i *Info) Flags() Flags {
return Flags(C.infoFlags(i.instance()))
}
// FlagIsSet returns true if the given flag(s) are set on the info.
func (i *Info) FlagIsSet(f Flags) bool {
return gobool(C.infoFlagIsSet(i.instance(), C.GstVideoFlags(f)))
}
// FlagSet sets the given flag(s) on the info. The underlying info is returned
// for chaining builders.
func (i *Info) FlagSet(f Flags) *Info {
C.infoFlagSet(i.instance(), C.GstVideoFlags(f))
return i
}
// FlagUnset unsets the given flag(s) on the info. The underlying info is returned
// for chaining builders.
func (i *Info) FlagUnset(f Flags) *Info {
C.infoFlagUnset(i.instance(), C.GstVideoFlags(f))
return i
}
// Format returns the format for the info. You can call Info() on the return value
// to inspect the properties further.
func (i *Info) Format() Format {
return Format(C.infoFormat(i.instance()))
}
// FPS returns the frames-per-second value for the info.
func (i *Info) FPS() gst.GFraction {
return gst.Fraction(
int(C.infoFPSn(i.instance())),
int(C.infoFPSd(i.instance())),
)
}
// HasAlpha returns true if the alpha flag is set on the format info.
func (i *Info) HasAlpha() bool {
return gobool(C.infoHasAlpha(i.instance()))
}
// Height returns the height of the video.
func (i *Info) Height() int { return int(C.infoHeight(i.instance())) }
// InterlaceMode returns the interlace mode of this Info.
func (i *Info) InterlaceMode() InterlaceMode {
return InterlaceMode(C.infoInterlaceMode(i.instance()))
}
// IsInterlaced returns true if the interlace mode is not Progressive.
func (i *Info) IsInterlaced() bool {
return gobool(C.infoIsInterlaced(i.instance()))
}
// IsGray returns if the format is grayscale.
func (i *Info) IsGray() bool { return gobool(C.infoIsGray(i.instance())) }
// IsRGB returns if the format is RGB.
func (i *Info) IsRGB() bool { return gobool(C.infoIsRGB(i.instance())) }
// IsYUV returns if the format is YUV.
func (i *Info) IsYUV() bool { return gobool(C.infoIsYUV(i.instance())) }
// MultiviewFlags returns the MultiviewFlags on the info.
func (i *Info) MultiviewFlags() MultiviewFlags {
return MultiviewFlags(C.infoMultiviewFlags(i.instance()))
}
// MultiviewMode returns the MultiviewMode on thee info.
func (i *Info) MultiviewMode() MultiviewMode {
return MultiviewMode(C.infoMultiviewMode(i.instance()))
}
// Name returns a human readable name forr the info.
func (i *Info) Name() string {
return C.GoString(C.infoName(i.instance()))
}
// NumComponents returns the number of components in the info.
func (i *Info) NumComponents() uint {
return uint(C.infoNComponents(i.instance()))
}
// NumPlanes returns the number of planes in the info.
func (i *Info) NumPlanes() uint {
return uint(C.infoNPlanes(i.instance()))
}
// PAR returns the pixel-aspect-ration value for the info.
func (i *Info) PAR() gst.GFraction {
return gst.Fraction(
int(C.infoPARn(i.instance())),
int(C.infoPARd(i.instance())),
)
}
// Size returns the size of the info.
func (i *Info) Size() int64 {
return int64(C.infoSize(i.instance()))
}
// Views returns the number of views.
func (i *Info) Views() int {
return int(C.infoViews(i.instance()))
}
// Width returns the width of the video.
func (i *Info) Width() int { return int(C.infoWidth(i.instance())) }
// WithAlign adjusts the offset and stride fields in info so that the padding and stride alignment in
// align is respected.
//
// Extra padding will be added to the right side when stride alignment padding is required and align
// will be updated with the new padding values.
func (i *Info) WithAlign(align *Alignment) *Info {
C.gst_video_info_align(i.instance(), align.instance())
return i
}
// WithFormat sets the format on this info.
//
// Note: This initializes info first, no values are preserved. This function does not set the offsets
// correctly for interlaced vertically subsampled formats. If the format is invalid (e.g. because the
// size of a frame can't be represented as a 32 bit integer), nothing will happen. This is is for
// convenience in chaining, but may be changed in the future.
func (i *Info) WithFormat(format Format, width, height uint) *Info {
C.gst_video_info_set_format(i.instance(), C.GstVideoFormat(format), C.guint(width), C.guint(height))
return i
}
// WithInterlacedFormat is the same as WithFormat but also allows to set the interlaced mode.
func (i *Info) WithInterlacedFormat(format Format, interlaceMode InterlaceMode, width, height uint) *Info {
C.gst_video_info_set_interlaced_format(
i.instance(),
C.GstVideoFormat(format),
C.GstVideoInterlaceMode(interlaceMode),
C.guint(width), C.guint(height),
)
return i
}
// WithFPS sets the FPS on this info.
func (i *Info) WithFPS(f gst.GFraction) *Info {
i.instance().fps_d = C.gint(f.Denom())
i.instance().fps_n = C.gint(f.Num())
return i
}
// WithPAR sets the FPS on this info.
func (i *Info) WithPAR(f gst.GFraction) *Info {
i.instance().par_d = C.gint(f.Denom())
i.instance().par_n = C.gint(f.Num())
return i
}
// ToCaps returns the caps representation of this video info.
func (i *Info) ToCaps() *gst.Caps {
caps := C.gst_video_info_to_caps(i.instance())
return gst.FromGstCapsUnsafe(unsafe.Pointer(caps))
}

View File

@@ -0,0 +1,24 @@
package video
/*
#include <gst/video/video.h>
*/
import "C"
// TileMode is an enum value describing the available tiling modes.
type TileMode int
// Type castings
const (
TileModeUnknown TileMode = C.GST_VIDEO_TILE_MODE_UNKNOWN // (0) Unknown or unset tile mode
TileModeZFlipZ2X2 TileMode = C.GST_VIDEO_TILE_MODE_ZFLIPZ_2X2 // (65536) Every four adjacent blocks - two horizontally and two vertically are grouped together and are located in memory in Z or flipped Z order. In case of odd rows, the last row of blocks is arranged in linear order.
TileModeLinear TileMode = C.GST_VIDEO_TILE_MODE_LINEAR // (131072) Tiles are in row order.
)
// TileType is an enum value describing the most common tiling types.
type TileType int
// Type castings
const (
TileTypeIndexed TileType = C.GST_VIDEO_TILE_TYPE_INDEXED // (0) Tiles are indexed. Use gst_video_tile_get_index () to retrieve the tile at the requested coordinates.
)

View File

@@ -1,7 +1,8 @@
package video
/*
#cgo pkg-config: gstreamer-plugins-base-1.0 lgstvideo-1.0
#cgo pkg-config: gstreamer-plugins-base-1.0
#cgo CFLAGS: -Wno-deprecated-declarations
#cgo LDFLAGS: -lgstvideo-1.0
*/
import "C"