Added hardware filtering

This commit is contained in:
Quentin Renard
2024-12-03 12:20:17 +01:00
parent 7900de7e96
commit 09052ff745
11 changed files with 586 additions and 373 deletions

View File

@@ -1,6 +1,7 @@
# v0.25.0 # v0.25.0
- `CodecParameters`.`CodecType` and `CodecParameters`.`SetCodecType` have been removed, use `CodecParameters`.`MediaType` and `CodecParameters`.`SetMediaType` instead - `CodecParameters`.`CodecType` and `CodecParameters`.`SetCodecType` have been removed, use `CodecParameters`.`MediaType` and `CodecParameters`.`SetMediaType` instead
- `HardwareFrameContext`.`SetPixelFormat` has been replaced with `HardwareFrameContext`.`SetHardwarePixelFormat`
# v0.24.0 # v0.24.0

View File

@@ -29,7 +29,7 @@ Examples are located in the [examples](examples) directory and mirror as much as
|Demuxing/Decoding|[see](examples/demuxing_decoding/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/demuxing_decoding.c) |Demuxing/Decoding|[see](examples/demuxing_decoding/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/demuxing_decoding.c)
|Filtering|[see](examples/filtering/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/filtering_video.c) |Filtering|[see](examples/filtering/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/filtering_video.c)
|Frame data manipulation|[see](examples/frame_data_manipulation/main.go)|X |Frame data manipulation|[see](examples/frame_data_manipulation/main.go)|X
|Hardware Decoding|[see](examples/hardware_decoding/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/hw_decode.c) |Hardware Decoding/Filtering|[see](examples/hardware_decoding_filtering/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/hw_decode.c)
|Hardware Encoding|[see](examples/hardware_encoding/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/vaapi_encode.c) |Hardware Encoding|[see](examples/hardware_encoding/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/vaapi_encode.c)
|Remuxing|[see](examples/remuxing/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/remuxing.c) |Remuxing|[see](examples/remuxing/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/remuxing.c)
|Resampling audio|[see](examples/resampling_audio/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/resample_audio.c) |Resampling audio|[see](examples/resampling_audio/main.go)|[see](https://github.com/FFmpeg/FFmpeg/blob/n7.0/doc/examples/resample_audio.c)

View File

@@ -0,0 +1,87 @@
package astiav
//#include <libavfilter/buffersink.h>
import "C"
type BuffersinkFilterContext struct {
fc *FilterContext
}
func newBuffersinkFilterContext(fc *FilterContext) *BuffersinkFilterContext {
return &BuffersinkFilterContext{fc: fc}
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gaad918036937648701c09f9612f42706e
func (bfc *BuffersinkFilterContext) ChannelLayout() ChannelLayout {
var cl C.AVChannelLayout
C.av_buffersink_get_ch_layout(bfc.fc.c, &cl)
return newChannelLayoutFromC(&cl)
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gab80976e506ab88d23d94bb6d7a4051bd
func (bfc *BuffersinkFilterContext) ColorRange() ColorRange {
return ColorRange(C.av_buffersink_get_color_range(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gaad817cdcf5493c385126e8e17c5717f2
func (bfc *BuffersinkFilterContext) ColorSpace() ColorSpace {
return ColorSpace(C.av_buffersink_get_colorspace(bfc.fc.c))
}
func (bfc *BuffersinkFilterContext) FilterContext() *FilterContext {
return bfc.fc
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga55614fd28de2fa05b04f427390061d5b
func (bfc *BuffersinkFilterContext) FrameRate() Rational {
return newRationalFromC(C.av_buffersink_get_frame_rate(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink.html#ga71ae9c529c8da51681e12faa37d1a395
func (bfc *BuffersinkFilterContext) GetFrame(f *Frame, fs BuffersinkFlags) error {
var cf *C.AVFrame
if f != nil {
cf = f.c
}
return newError(C.av_buffersink_get_frame_flags(bfc.fc.c, cf, C.int(fs)))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga955ecf3680e71e10429d7500343be25c
func (bfc *BuffersinkFilterContext) Height() int {
return int(C.av_buffersink_get_h(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga1eb8bbf583ffb7cc29aaa1944b1e699c
func (bfc *BuffersinkFilterContext) MediaType() MediaType {
return MediaType(C.av_buffersink_get_type(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga402ddbef6f7347869725696846ac81eb
func (bfc *BuffersinkFilterContext) PixelFormat() PixelFormat {
return PixelFormat(C.av_buffersink_get_format(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gaa38ee33e1c7f6f7cb190bd2330e5f848
func (bfc *BuffersinkFilterContext) SampleAspectRatio() Rational {
return newRationalFromC(C.av_buffersink_get_sample_aspect_ratio(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga402ddbef6f7347869725696846ac81eb
func (bfc *BuffersinkFilterContext) SampleFormat() SampleFormat {
return SampleFormat(C.av_buffersink_get_format(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga2af714e82f48759551acdbc4488ded4a
func (bfc *BuffersinkFilterContext) SampleRate() int {
return int(C.av_buffersink_get_sample_rate(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gabc82f65ec7f4fa47c5216260639258a1
func (bfc *BuffersinkFilterContext) TimeBase() Rational {
return newRationalFromC(C.av_buffersink_get_time_base(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gac8c86515d2ef56090395dfd74854c835
func (bfc *BuffersinkFilterContext) Width() int {
return int(C.av_buffersink_get_w(bfc.fc.c))
}

View File

@@ -0,0 +1,30 @@
package astiav
//#include <libavfilter/buffersrc.h>
import "C"
type BuffersrcFilterContext struct {
fc *FilterContext
}
func newBuffersrcFilterContext(fc *FilterContext) *BuffersrcFilterContext {
return &BuffersrcFilterContext{fc: fc}
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersrc.html#ga73ed90c3c3407f36e54d65f91faaaed9
func (bfc *BuffersrcFilterContext) AddFrame(f *Frame, fs BuffersrcFlags) error {
var cf *C.AVFrame
if f != nil {
cf = f.c
}
return newError(C.av_buffersrc_add_frame_flags(bfc.fc.c, cf, C.int(fs)))
}
func (bfc *BuffersrcFilterContext) FilterContext() *FilterContext {
return bfc.fc
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersrc.html#ga398cd2a84f8b4a588197ab9d90135048
func (bfc *BuffersrcFilterContext) SetParameters(bfcp *BuffersrcFilterContextParameters) error {
return newError(C.av_buffersrc_parameters_set(bfc.fc.c, bfcp.c))
}

View File

@@ -0,0 +1,43 @@
package astiav
//#include <libavfilter/buffersrc.h>
import "C"
import "unsafe"
// https://ffmpeg.org/doxygen/7.0/structAVBufferSrcParameters.html
type BuffersrcFilterContextParameters struct {
c *C.AVBufferSrcParameters
}
func newBuffersrcFilterContextParametersFromC(c *C.AVBufferSrcParameters) *BuffersrcFilterContextParameters {
if c == nil {
return nil
}
return &BuffersrcFilterContextParameters{c: c}
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersrc.html#gaae82d4f8a69757ce01421dd3167861a5
func AllocBuffersrcFilterContextParameters() *BuffersrcFilterContextParameters {
return newBuffersrcFilterContextParametersFromC(C.av_buffersrc_parameters_alloc())
}
func (bfcp *BuffersrcFilterContextParameters) Free() {
if bfcp.c != nil {
if bfcp.c.hw_frames_ctx != nil {
C.av_buffer_unref(&bfcp.c.hw_frames_ctx)
}
C.av_freep(unsafe.Pointer(&bfcp.c))
}
}
// https://ffmpeg.org/doxygen/7.0/structAVBufferSrcParameters.html#a86c49b4202433037c9e2b0b6ae541534
func (bfcp *BuffersrcFilterContextParameters) SetHardwareFrameContext(hfc *HardwareFrameContext) {
if bfcp.c.hw_frames_ctx != nil {
C.av_buffer_unref(&bfcp.c.hw_frames_ctx)
}
if hfc != nil {
bfcp.c.hw_frames_ctx = C.av_buffer_ref(hfc.c)
} else {
bfcp.c.hw_frames_ctx = nil
}
}

View File

@@ -10,9 +10,6 @@ import (
// https://ffmpeg.org/doxygen/7.0/structAVCodecContext.html // https://ffmpeg.org/doxygen/7.0/structAVCodecContext.html
type CodecContext struct { type CodecContext struct {
c *C.AVCodecContext c *C.AVCodecContext
// We need to store this to unref it properly
hdc *HardwareDeviceContext
hfc *HardwareFrameContext
} }
func newCodecContextFromC(c *C.AVCodecContext) *CodecContext { func newCodecContextFromC(c *C.AVCodecContext) *CodecContext {
@@ -37,15 +34,13 @@ func AllocCodecContext(c *Codec) *CodecContext {
// https://ffmpeg.org/doxygen/7.0/group__lavc__core.html#gaf869d0829ed607cec3a4a02a1c7026b3 // https://ffmpeg.org/doxygen/7.0/group__lavc__core.html#gaf869d0829ed607cec3a4a02a1c7026b3
func (cc *CodecContext) Free() { func (cc *CodecContext) Free() {
if cc.hdc != nil {
C.av_buffer_unref(&cc.hdc.c)
cc.hdc = nil
}
if cc.hfc != nil {
C.av_buffer_unref(&cc.hfc.c)
cc.hfc = nil
}
if cc.c != nil { if cc.c != nil {
if cc.c.hw_device_ctx != nil {
C.av_buffer_unref(&cc.c.hw_device_ctx)
}
if cc.c.hw_frames_ctx != nil {
C.av_buffer_unref(&cc.c.hw_frames_ctx)
}
// Make sure to clone the classer before freeing the object since // Make sure to clone the classer before freeing the object since
// the C free method may reset the pointer // the C free method may reset the pointer
c := newClonedClasser(cc) c := newClonedClasser(cc)
@@ -370,12 +365,13 @@ func (cc *CodecContext) FromCodecParameters(cp *CodecParameters) error {
// https://ffmpeg.org/doxygen/7.0/structAVCodecContext.html#acf8113e490f9e7b57465e65af9c0c75c // https://ffmpeg.org/doxygen/7.0/structAVCodecContext.html#acf8113e490f9e7b57465e65af9c0c75c
func (cc *CodecContext) SetHardwareDeviceContext(hdc *HardwareDeviceContext) { func (cc *CodecContext) SetHardwareDeviceContext(hdc *HardwareDeviceContext) {
if cc.hdc != nil { if cc.c.hw_device_ctx != nil {
C.av_buffer_unref(&cc.hdc.c) C.av_buffer_unref(&cc.c.hw_device_ctx)
} }
cc.hdc = hdc if hdc != nil {
if cc.hdc != nil { cc.c.hw_device_ctx = C.av_buffer_ref(hdc.c)
cc.c.hw_device_ctx = C.av_buffer_ref(cc.hdc.c) } else {
cc.c.hw_device_ctx = nil
} }
} }
@@ -386,12 +382,13 @@ func (cc *CodecContext) HardwareFrameContext() *HardwareFrameContext {
// https://ffmpeg.org/doxygen/7.0/structAVCodecContext.html#a3bac44bb0b016ab838780cc19ac277d6 // https://ffmpeg.org/doxygen/7.0/structAVCodecContext.html#a3bac44bb0b016ab838780cc19ac277d6
func (cc *CodecContext) SetHardwareFrameContext(hfc *HardwareFrameContext) { func (cc *CodecContext) SetHardwareFrameContext(hfc *HardwareFrameContext) {
if cc.hfc != nil { if cc.c.hw_frames_ctx != nil {
C.av_buffer_unref(&cc.hfc.c) C.av_buffer_unref(&cc.c.hw_frames_ctx)
} }
cc.hfc = hfc if hfc != nil {
if cc.hfc != nil { cc.c.hw_frames_ctx = C.av_buffer_ref(hfc.c)
cc.c.hw_frames_ctx = C.av_buffer_ref(cc.hfc.c) } else {
cc.c.hw_frames_ctx = nil
} }
} }

View File

@@ -1,242 +0,0 @@
package main
import (
"errors"
"flag"
"fmt"
"log"
"strings"
"github.com/asticode/go-astiav"
)
var (
decoderCodecName = flag.String("c", "", "the decoder codec name (e.g. h264_cuvid)")
hardwareDeviceName = flag.String("n", "", "the hardware device name (e.g. 0)")
hardwareDeviceTypeName = flag.String("t", "", "the hardware device type (e.g. cuda)")
input = flag.String("i", "", "the input path")
)
type stream struct {
decCodec *astiav.Codec
decCodecContext *astiav.CodecContext
hardwareDeviceContext *astiav.HardwareDeviceContext
hardwarePixelFormat astiav.PixelFormat
inputStream *astiav.Stream
}
func main() {
// Handle ffmpeg logs
astiav.SetLogLevel(astiav.LogLevelDebug)
astiav.SetLogCallback(func(c astiav.Classer, l astiav.LogLevel, fmt, msg string) {
var cs string
if c != nil {
if cl := c.Class(); cl != nil {
cs = " - class: " + cl.String()
}
}
log.Printf("ffmpeg log: %s%s - level: %d\n", strings.TrimSpace(msg), cs, l)
})
// Parse flags
flag.Parse()
// Usage
if *input == "" || *hardwareDeviceTypeName == "" {
log.Println("Usage: <binary path> -t <hardware device type> -i <input path> [-n <hardware device name> -c <decoder codec>]")
return
}
// Get hardware device type
hardwareDeviceType := astiav.FindHardwareDeviceTypeByName(*hardwareDeviceTypeName)
if hardwareDeviceType == astiav.HardwareDeviceTypeNone {
log.Fatal(errors.New("main: hardware device not found"))
}
// Allocate packet
pkt := astiav.AllocPacket()
defer pkt.Free()
// Allocate hardware frame
hardwareFrame := astiav.AllocFrame()
defer hardwareFrame.Free()
// Allocate software frame
softwareFrame := astiav.AllocFrame()
defer softwareFrame.Free()
// Allocate input format context
inputFormatContext := astiav.AllocFormatContext()
if inputFormatContext == nil {
log.Fatal(errors.New("main: input format context is nil"))
}
defer inputFormatContext.Free()
// Open input
if err := inputFormatContext.OpenInput(*input, nil, nil); err != nil {
log.Fatal(fmt.Errorf("main: opening input failed: %w", err))
}
defer inputFormatContext.CloseInput()
// Find stream info
if err := inputFormatContext.FindStreamInfo(nil); err != nil {
log.Fatal(fmt.Errorf("main: finding stream info failed: %w", err))
}
// Loop through streams
streams := make(map[int]*stream) // Indexed by input stream index
for _, is := range inputFormatContext.Streams() {
// Only process video
if is.CodecParameters().MediaType() != astiav.MediaTypeVideo {
continue
}
// Create stream
s := &stream{
inputStream: is,
hardwarePixelFormat: astiav.PixelFormatNone,
}
// Find decoder
if *decoderCodecName != "" {
s.decCodec = astiav.FindDecoderByName(*decoderCodecName)
} else {
s.decCodec = astiav.FindDecoder(is.CodecParameters().CodecID())
}
// No codec
if s.decCodec == nil {
log.Fatal(errors.New("main: codec is nil"))
}
// Allocate codec context
if s.decCodecContext = astiav.AllocCodecContext(s.decCodec); s.decCodecContext == nil {
log.Fatal(errors.New("main: codec context is nil"))
}
defer s.decCodecContext.Free()
// Loop through codec hardware configs
for _, p := range s.decCodec.HardwareConfigs() {
// Valid hardware config
if p.MethodFlags().Has(astiav.CodecHardwareConfigMethodFlagHwDeviceCtx) && p.HardwareDeviceType() == hardwareDeviceType {
s.hardwarePixelFormat = p.PixelFormat()
break
}
}
// No valid hardware pixel format
if s.hardwarePixelFormat == astiav.PixelFormatNone {
log.Fatal(errors.New("main: hardware device type not supported by decoder"))
}
// Update codec context
if err := is.CodecParameters().ToCodecContext(s.decCodecContext); err != nil {
log.Fatal(fmt.Errorf("main: updating codec context failed: %w", err))
}
// Create hardware device context
var err error
if s.hardwareDeviceContext, err = astiav.CreateHardwareDeviceContext(hardwareDeviceType, *hardwareDeviceName, nil, 0); err != nil {
log.Fatal(fmt.Errorf("main: creating hardware device context failed: %w", err))
}
defer s.hardwareDeviceContext.Free()
// Update decoder context
s.decCodecContext.SetHardwareDeviceContext(s.hardwareDeviceContext)
s.decCodecContext.SetPixelFormatCallback(func(pfs []astiav.PixelFormat) astiav.PixelFormat {
for _, pf := range pfs {
if pf == s.hardwarePixelFormat {
return pf
}
}
log.Fatal(errors.New("main: using hardware pixel format failed"))
return astiav.PixelFormatNone
})
// Open codec context
if err := s.decCodecContext.Open(s.decCodec, nil); err != nil {
log.Fatal(fmt.Errorf("main: opening codec context failed: %w", err))
}
// Add stream
streams[is.Index()] = s
}
// Loop through packets
for {
// We use a closure to ease unreferencing the packet
if stop := func() bool {
// Read frame
if err := inputFormatContext.ReadFrame(pkt); err != nil {
if errors.Is(err, astiav.ErrEof) {
return true
}
log.Fatal(fmt.Errorf("main: reading frame failed: %w", err))
}
// Make sure to unreference the packet
defer pkt.Unref()
// Get stream
s, ok := streams[pkt.StreamIndex()]
if !ok {
return false
}
// Send packet
if err := s.decCodecContext.SendPacket(pkt); err != nil {
log.Fatal(fmt.Errorf("main: sending packet failed: %w", err))
}
// Loop
for {
// We use a closure to ease unreferencing frames
if stop := func() bool {
// Receive frame
if err := s.decCodecContext.ReceiveFrame(hardwareFrame); err != nil {
if errors.Is(err, astiav.ErrEof) || errors.Is(err, astiav.ErrEagain) {
return true
}
log.Fatal(fmt.Errorf("main: receiving frame failed: %w", err))
}
// Make sure to unreference hardware frame
defer hardwareFrame.Unref()
// Get final frame
var finalFrame *astiav.Frame
if hardwareFrame.PixelFormat() == s.hardwarePixelFormat {
// Transfer hardware data
if err := hardwareFrame.TransferHardwareData(softwareFrame); err != nil {
log.Fatal(fmt.Errorf("main: transferring hardware data failed: %w", err))
}
// Make sure to unreference software frame
defer softwareFrame.Unref()
// Update pts
softwareFrame.SetPts(hardwareFrame.Pts())
// Update final frame
finalFrame = softwareFrame
} else {
// Update final frame
finalFrame = hardwareFrame
}
// Do something with decoded frame
log.Printf("new frame: stream %d - pts: %d - transferred: %v", pkt.StreamIndex(), finalFrame.Pts(), hardwareFrame.PixelFormat() == s.hardwarePixelFormat)
return false
}(); stop {
break
}
}
return false
}(); stop {
break
}
}
// Success
log.Println("success")
}

View File

@@ -0,0 +1,404 @@
package main
import (
"errors"
"flag"
"fmt"
"log"
"strconv"
"strings"
"github.com/asticode/go-astiav"
"github.com/asticode/go-astikit"
)
var (
decoderCodecName = flag.String("c", "", "the decoder codec name (e.g. h264_cuvid)")
filter = flag.String("f", "", "the hardware filter")
hardwareDeviceName = flag.String("n", "", "the hardware device name (e.g. 0)")
hardwareDeviceTypeName = flag.String("t", "", "the hardware device type (e.g. cuda)")
input = flag.String("i", "", "the input path")
)
var (
buffersinkContext *astiav.BuffersinkFilterContext
buffersrcContext *astiav.BuffersrcFilterContext
c = astikit.NewCloser()
decCodec *astiav.Codec
decCodecContext *astiav.CodecContext
decodedHardwareFrame *astiav.Frame
filterGraph *astiav.FilterGraph
filteredHardwareFrame *astiav.Frame
inputStream *astiav.Stream
softwareFrame *astiav.Frame
)
func main() {
// Handle ffmpeg logs
astiav.SetLogLevel(astiav.LogLevelDebug)
astiav.SetLogCallback(func(c astiav.Classer, l astiav.LogLevel, fmt, msg string) {
var cs string
if c != nil {
if cl := c.Class(); cl != nil {
cs = " - class: " + cl.String()
}
}
log.Printf("ffmpeg log: %s%s - level: %d\n", strings.TrimSpace(msg), cs, l)
})
// Parse flags
flag.Parse()
// Usage
if *input == "" || *hardwareDeviceTypeName == "" {
log.Println("Usage: <binary path> -t <hardware device type> -i <input path> [-n <hardware device name> -c <decoder codec> -f <hardware filter>]")
return
}
// We use an astikit.Closer to free all resources properly
defer c.Close()
// Get hardware device type
hardwareDeviceType := astiav.FindHardwareDeviceTypeByName(*hardwareDeviceTypeName)
if hardwareDeviceType == astiav.HardwareDeviceTypeNone {
log.Fatal(errors.New("main: hardware device not found"))
}
// Allocate packet
pkt := astiav.AllocPacket()
c.Add(pkt.Free)
// Allocate decoded hardware frame
decodedHardwareFrame = astiav.AllocFrame()
c.Add(decodedHardwareFrame.Free)
// Allocate software frame
softwareFrame = astiav.AllocFrame()
c.Add(softwareFrame.Free)
// Allocate input format context
inputFormatContext := astiav.AllocFormatContext()
if inputFormatContext == nil {
log.Fatal(errors.New("main: input format context is nil"))
}
c.Add(inputFormatContext.Free)
// Open input
if err := inputFormatContext.OpenInput(*input, nil, nil); err != nil {
log.Fatal(fmt.Errorf("main: opening input failed: %w", err))
}
c.Add(inputFormatContext.CloseInput)
// Find stream info
if err := inputFormatContext.FindStreamInfo(nil); err != nil {
log.Fatal(fmt.Errorf("main: finding stream info failed: %w", err))
}
// Loop through streams
var hdc *astiav.HardwareDeviceContext
hardwarePixelFormat := astiav.PixelFormatNone
for _, is := range inputFormatContext.Streams() {
// Only process video
if is.CodecParameters().MediaType() != astiav.MediaTypeVideo {
continue
}
// Update input stream
inputStream = is
// Find decoder
decCodec = astiav.FindDecoder(is.CodecParameters().CodecID())
if *decoderCodecName != "" {
decCodec = astiav.FindDecoderByName(*decoderCodecName)
}
// No codec
if decCodec == nil {
log.Fatal(errors.New("main: codec is nil"))
}
// Allocate codec context
if decCodecContext = astiav.AllocCodecContext(decCodec); decCodecContext == nil {
log.Fatal(errors.New("main: codec context is nil"))
}
c.Add(decCodecContext.Free)
// Loop through codec hardware configs
for _, p := range decCodec.HardwareConfigs() {
// Valid hardware config
if p.MethodFlags().Has(astiav.CodecHardwareConfigMethodFlagHwDeviceCtx) && p.HardwareDeviceType() == hardwareDeviceType {
hardwarePixelFormat = p.PixelFormat()
break
}
}
// No valid hardware pixel format
if hardwarePixelFormat == astiav.PixelFormatNone {
log.Fatal(errors.New("main: hardware device type not supported by decoder"))
}
// Update codec context
if err := is.CodecParameters().ToCodecContext(decCodecContext); err != nil {
log.Fatal(fmt.Errorf("main: updating codec context failed: %w", err))
}
// Create hardware device context
var err error
if hdc, err = astiav.CreateHardwareDeviceContext(hardwareDeviceType, *hardwareDeviceName, nil, 0); err != nil {
log.Fatal(fmt.Errorf("main: creating hardware device context failed: %w", err))
}
c.Add(hdc.Free)
// Update decoder context
decCodecContext.SetHardwareDeviceContext(hdc)
decCodecContext.SetPixelFormatCallback(func(pfs []astiav.PixelFormat) astiav.PixelFormat {
for _, pf := range pfs {
if pf == hardwarePixelFormat {
return pf
}
}
log.Fatal(errors.New("main: using hardware pixel format failed"))
return astiav.PixelFormatNone
})
// Open codec context
if err := decCodecContext.Open(decCodec, nil); err != nil {
log.Fatal(fmt.Errorf("main: opening codec context failed: %w", err))
}
break
}
// No video stream
if inputStream == nil {
log.Fatal("main: no video stream found")
}
// Loop through packets
for {
// We use a closure to ease unreferencing the packet
if stop := func() bool {
// Read frame
if err := inputFormatContext.ReadFrame(pkt); err != nil {
if errors.Is(err, astiav.ErrEof) {
return true
}
log.Fatal(fmt.Errorf("main: reading frame failed: %w", err))
}
// Make sure to unreference the packet
defer pkt.Unref()
// Invalid stream
if pkt.StreamIndex() != inputStream.Index() {
return false
}
// Send packet
if err := decCodecContext.SendPacket(pkt); err != nil {
log.Fatal(fmt.Errorf("main: sending packet failed: %w", err))
}
// Loop
for {
// We use a closure to ease unreferencing frames
if stop := func() bool {
// Receive frame
if err := decCodecContext.ReceiveFrame(decodedHardwareFrame); err != nil {
if errors.Is(err, astiav.ErrEof) || errors.Is(err, astiav.ErrEagain) {
return true
}
log.Fatal(fmt.Errorf("main: receiving frame failed: %w", err))
}
// Make sure to unreference hardware frame
defer decodedHardwareFrame.Unref()
// Invalid pixel format
if decodedHardwareFrame.PixelFormat() != hardwarePixelFormat {
log.Fatalf("main: invalid decoded pixel format %s, expected %s", decodedHardwareFrame.PixelFormat(), hardwarePixelFormat)
}
// No filter requested
if *filter == "" {
// Do something with hardware frame
if err := doSomethingWithHardwareFrame(decodedHardwareFrame); err != nil {
log.Fatal(fmt.Errorf("main: doing something with hardware frame failed: %w", err))
}
return false
}
// Make sure the filter is initialized
// We need to wait for the first frame to be decoded before initializing the filter
// since the decoder codec context doesn't have a valid hardware frame context until then
if filterGraph == nil {
if err := initFilter(); err != nil {
log.Fatal(fmt.Errorf("main: initializing filter failed: %w", err))
}
}
// Filter frame
if err := filterFrame(); err != nil {
log.Fatal(fmt.Errorf("main: filtering frame failed: %w", err))
}
return false
}(); stop {
break
}
}
return false
}(); stop {
break
}
}
// Success
log.Println("success")
}
func initFilter() (err error) {
// Allocate graph
if filterGraph = astiav.AllocFilterGraph(); filterGraph == nil {
err = errors.New("main: graph is nil")
return
}
c.Add(filterGraph.Free)
// Allocate outputs
outputs := astiav.AllocFilterInOut()
if outputs == nil {
err = errors.New("main: outputs is nil")
return
}
c.Add(outputs.Free)
// Allocate inputs
inputs := astiav.AllocFilterInOut()
if inputs == nil {
err = errors.New("main: inputs is nil")
return
}
c.Add(inputs.Free)
// Create buffersrc
buffersrc := astiav.FindFilterByName("buffer")
if buffersrc == nil {
err = errors.New("main: buffersrc is nil")
return
}
// Create buffersink
buffersink := astiav.FindFilterByName("buffersink")
if buffersink == nil {
err = errors.New("main: buffersink is nil")
return
}
// Create filter contexts
if buffersrcContext, err = filterGraph.NewBuffersrcFilterContext(buffersrc, "in", astiav.FilterArgs{
"pix_fmt": strconv.Itoa(int(decCodecContext.PixelFormat())),
"pixel_aspect": decCodecContext.SampleAspectRatio().String(),
"time_base": inputStream.TimeBase().String(),
"video_size": strconv.Itoa(decCodecContext.Width()) + "x" + strconv.Itoa(decCodecContext.Height()),
}); err != nil {
err = fmt.Errorf("main: creating buffersrc context failed: %w", err)
return
}
if buffersinkContext, err = filterGraph.NewBuffersinkFilterContext(buffersink, "in", nil); err != nil {
err = fmt.Errorf("main: creating buffersink context failed: %w", err)
return
}
// Create buffersrc parameters
bfcp := astiav.AllocBuffersrcFilterContextParameters()
defer bfcp.Free()
bfcp.SetHardwareFrameContext(decCodecContext.HardwareFrameContext())
// Set buffersrc parameters
if err = buffersrcContext.SetParameters(bfcp); err != nil {
err = fmt.Errorf("main: setting buffersrc parameters failed: %w", err)
return
}
// Update outputs
outputs.SetName("in")
outputs.SetFilterContext(buffersrcContext.FilterContext())
outputs.SetPadIdx(0)
outputs.SetNext(nil)
// Update inputs
inputs.SetName("out")
inputs.SetFilterContext(buffersinkContext.FilterContext())
inputs.SetPadIdx(0)
inputs.SetNext(nil)
// Parse
if err = filterGraph.Parse(*filter, inputs, outputs); err != nil {
err = fmt.Errorf("main: parsing filter failed: %w", err)
return
}
// Configure
if err = filterGraph.Configure(); err != nil {
err = fmt.Errorf("main: configuring filter failed: %w", err)
return
}
// Allocate frame
filteredHardwareFrame = astiav.AllocFrame()
c.Add(filteredHardwareFrame.Free)
return
}
func filterFrame() (err error) {
// Add frame
if err = buffersrcContext.AddFrame(decodedHardwareFrame, astiav.NewBuffersrcFlags(astiav.BuffersrcFlagKeepRef)); err != nil {
err = fmt.Errorf("main: adding frame failed: %w", err)
return
}
// Loop
for {
// We use a closure to ease unreferencing the frame
if stop, err := func() (bool, error) {
// Get frame
if err := buffersinkContext.GetFrame(filteredHardwareFrame, astiav.NewBuffersinkFlags()); err != nil {
if errors.Is(err, astiav.ErrEof) || errors.Is(err, astiav.ErrEagain) {
return true, nil
}
return false, fmt.Errorf("main: getting frame failed: %w", err)
}
// Make sure to unrefernce the frame
defer filteredHardwareFrame.Unref()
// Do something with hardware frame
if err := doSomethingWithHardwareFrame(filteredHardwareFrame); err != nil {
return false, fmt.Errorf("main: doing something with hardware frame failed: %w", err)
}
return false, nil
}(); err != nil {
return err
} else if stop {
break
}
}
return
}
func doSomethingWithHardwareFrame(hardwareFrame *astiav.Frame) error {
// Transfer hardware data
if err := hardwareFrame.TransferHardwareData(softwareFrame); err != nil {
return fmt.Errorf("main: transferring hardware data failed: %w", err)
}
// Make sure to unreference software frame
defer softwareFrame.Unref()
// Update pts
softwareFrame.SetPts(hardwareFrame.Pts())
// Do something with software frame
log.Printf("new software frame: pts: %d", softwareFrame.Pts())
return nil
}

View File

@@ -95,7 +95,7 @@ func main() {
} }
// Set hardware frame content // Set hardware frame content
hardwareFrameContext.SetPixelFormat(hardwarePixelFormat) hardwareFrameContext.SetHardwarePixelFormat(hardwarePixelFormat)
hardwareFrameContext.SetSoftwarePixelFormat(softwarePixelFormat) hardwareFrameContext.SetSoftwarePixelFormat(softwarePixelFormat)
hardwareFrameContext.SetWidth(*width) hardwareFrameContext.SetWidth(*width)
hardwareFrameContext.SetHeight(*height) hardwareFrameContext.SetHeight(*height)

View File

@@ -1,9 +1,6 @@
package astiav package astiav
//#include <libavfilter/avfilter.h> //#include <libavfilter/avfilter.h>
//#include <libavfilter/buffersink.h>
//#include <libavfilter/buffersrc.h>
//#include <libavutil/frame.h>
import "C" import "C"
import ( import (
"unsafe" "unsafe"
@@ -42,107 +39,3 @@ func (fc *FilterContext) Free() {
func (fc *FilterContext) Class() *Class { func (fc *FilterContext) Class() *Class {
return newClassFromC(unsafe.Pointer(fc.c)) return newClassFromC(unsafe.Pointer(fc.c))
} }
type BuffersinkFilterContext struct {
fc *FilterContext
}
func newBuffersinkFilterContext(fc *FilterContext) *BuffersinkFilterContext {
return &BuffersinkFilterContext{fc: fc}
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gaad918036937648701c09f9612f42706e
func (bfc *BuffersinkFilterContext) ChannelLayout() ChannelLayout {
var cl C.AVChannelLayout
C.av_buffersink_get_ch_layout(bfc.fc.c, &cl)
return newChannelLayoutFromC(&cl)
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gab80976e506ab88d23d94bb6d7a4051bd
func (bfc *BuffersinkFilterContext) ColorRange() ColorRange {
return ColorRange(C.av_buffersink_get_color_range(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gaad817cdcf5493c385126e8e17c5717f2
func (bfc *BuffersinkFilterContext) ColorSpace() ColorSpace {
return ColorSpace(C.av_buffersink_get_colorspace(bfc.fc.c))
}
func (bfc *BuffersinkFilterContext) FilterContext() *FilterContext {
return bfc.fc
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga55614fd28de2fa05b04f427390061d5b
func (bfc *BuffersinkFilterContext) FrameRate() Rational {
return newRationalFromC(C.av_buffersink_get_frame_rate(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink.html#ga71ae9c529c8da51681e12faa37d1a395
func (bfc *BuffersinkFilterContext) GetFrame(f *Frame, fs BuffersinkFlags) error {
var cf *C.AVFrame
if f != nil {
cf = f.c
}
return newError(C.av_buffersink_get_frame_flags(bfc.fc.c, cf, C.int(fs)))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga955ecf3680e71e10429d7500343be25c
func (bfc *BuffersinkFilterContext) Height() int {
return int(C.av_buffersink_get_h(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga1eb8bbf583ffb7cc29aaa1944b1e699c
func (bfc *BuffersinkFilterContext) MediaType() MediaType {
return MediaType(C.av_buffersink_get_type(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga402ddbef6f7347869725696846ac81eb
func (bfc *BuffersinkFilterContext) PixelFormat() PixelFormat {
return PixelFormat(C.av_buffersink_get_format(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gaa38ee33e1c7f6f7cb190bd2330e5f848
func (bfc *BuffersinkFilterContext) SampleAspectRatio() Rational {
return newRationalFromC(C.av_buffersink_get_sample_aspect_ratio(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga402ddbef6f7347869725696846ac81eb
func (bfc *BuffersinkFilterContext) SampleFormat() SampleFormat {
return SampleFormat(C.av_buffersink_get_format(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#ga2af714e82f48759551acdbc4488ded4a
func (bfc *BuffersinkFilterContext) SampleRate() int {
return int(C.av_buffersink_get_sample_rate(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gabc82f65ec7f4fa47c5216260639258a1
func (bfc *BuffersinkFilterContext) TimeBase() Rational {
return newRationalFromC(C.av_buffersink_get_time_base(bfc.fc.c))
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersink__accessors.html#gac8c86515d2ef56090395dfd74854c835
func (bfc *BuffersinkFilterContext) Width() int {
return int(C.av_buffersink_get_w(bfc.fc.c))
}
type BuffersrcFilterContext struct {
fc *FilterContext
}
func newBuffersrcFilterContext(fc *FilterContext) *BuffersrcFilterContext {
return &BuffersrcFilterContext{fc: fc}
}
// https://ffmpeg.org/doxygen/7.0/group__lavfi__buffersrc.html#ga73ed90c3c3407f36e54d65f91faaaed9
func (bfc *BuffersrcFilterContext) AddFrame(f *Frame, fs BuffersrcFlags) error {
var cf *C.AVFrame
if f != nil {
cf = f.c
}
return newError(C.av_buffersrc_add_frame_flags(bfc.fc.c, cf, C.int(fs)))
}
func (bfc *BuffersrcFilterContext) FilterContext() *FilterContext {
return bfc.fc
}

View File

@@ -44,7 +44,7 @@ func (hfc *HardwareFrameContext) SetHeight(height int) {
} }
// https://ffmpeg.org/doxygen/7.0/structAVHWFramesContext.html#a045bc1713932804f6ceef170a5578e0e // https://ffmpeg.org/doxygen/7.0/structAVHWFramesContext.html#a045bc1713932804f6ceef170a5578e0e
func (hfc *HardwareFrameContext) SetPixelFormat(format PixelFormat) { func (hfc *HardwareFrameContext) SetHardwarePixelFormat(format PixelFormat) {
hfc.data().format = C.enum_AVPixelFormat(format) hfc.data().format = C.enum_AVPixelFormat(format)
} }