mirror of
https://github.com/qrtc/ffmpeg-dev-go.git
synced 2025-10-12 19:10:03 +08:00
2023-10-29 08:11:29 CST W44D0
This commit is contained in:
@@ -2,6 +2,7 @@ package main
|
||||
|
||||
/*
|
||||
#include <stdlib.h>
|
||||
#include <stdint.h>
|
||||
|
||||
int readPacket(void* opaque, uint8_t *buf, int bufSize);
|
||||
|
||||
|
@@ -6,7 +6,7 @@ import (
|
||||
"syscall"
|
||||
"unsafe"
|
||||
|
||||
"github.com/qrtc/ffmpeg-dev-go"
|
||||
ffmpeg "github.com/qrtc/ffmpeg-dev-go"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -151,7 +151,7 @@ func initFilters(decCtx *ffmpeg.AVCodecContext, fmtCtx *ffmpeg.AVFormatContext,
|
||||
args = ffmpeg.AvGetChannelLayoutString(-1, outlink.GetChannelLayout())
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_INFO, "Output: srate:%dHz fmt:%s chlayout:%s\n",
|
||||
outlink.GetSampleRate(),
|
||||
ffmpeg.AvGetSampleFmtName(outlink.GetFormat()),
|
||||
ffmpeg.AvStringIfNull(ffmpeg.AvGetSampleFmtName(outlink.GetFormat()), "?"),
|
||||
args)
|
||||
|
||||
end:
|
||||
|
@@ -1,22 +1,5 @@
|
||||
package main
|
||||
|
||||
/*
|
||||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
|
||||
static void putcs(uint16_t *p, int n)
|
||||
{
|
||||
const uint16_t *p_end = p + n;
|
||||
while (p < p_end) {
|
||||
fputc(*p & 0xff, stdout);
|
||||
fputc(*p>>8 & 0xff, stdout);
|
||||
p++;
|
||||
}
|
||||
fflush(stdout);
|
||||
}
|
||||
*/
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
@@ -24,7 +7,7 @@ import (
|
||||
"time"
|
||||
"unsafe"
|
||||
|
||||
"github.com/qrtc/ffmpeg-dev-go"
|
||||
ffmpeg "github.com/qrtc/ffmpeg-dev-go"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@@ -47,7 +47,7 @@ func processClient(client *ffmpeg.AVIOContext, inUri string) {
|
||||
ffmpeg.AvLog(client, ffmpeg.AV_LOG_TRACE, "resource: %s", resourceStr)
|
||||
replyCode = ffmpeg.AVERROR_HTTP_NOT_FOUND
|
||||
}
|
||||
if ret = ffmpeg.AvOptSetInt(client, "reply_code", int64(replyCode), ffmpeg.AV_OPT_SEARCH_CHILDREN); ret < 0 {
|
||||
if ret = ffmpeg.AvOptSetInt(client, "reply_code", replyCode, ffmpeg.AV_OPT_SEARCH_CHILDREN); ret < 0 {
|
||||
ffmpeg.AvLog(client, ffmpeg.AV_LOG_ERROR, "Failed to set reply_code: %s.\n", ffmpeg.AvErr2str(ret))
|
||||
goto end
|
||||
}
|
||||
|
@@ -40,7 +40,7 @@ import (
|
||||
"syscall"
|
||||
"unsafe"
|
||||
|
||||
"github.com/qrtc/ffmpeg-dev-go"
|
||||
ffmpeg "github.com/qrtc/ffmpeg-dev-go"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@@ -1,5 +1,567 @@
|
||||
package main
|
||||
|
||||
func main() {
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
|
||||
ffmpeg "github.com/qrtc/ffmpeg-dev-go"
|
||||
)
|
||||
|
||||
const (
|
||||
STREAM_DURATION = 10.0
|
||||
STREAM_FRAME_RATE = 25 // 25 images/s
|
||||
STREAM_PIX_FMT = ffmpeg.AV_PIX_FMT_YUV420P //default pix_fmt
|
||||
SCALE_FLAGS = ffmpeg.SWS_BICUBIC
|
||||
)
|
||||
|
||||
type outputStream struct {
|
||||
st *ffmpeg.AVStream
|
||||
enc *ffmpeg.AVCodecContext
|
||||
|
||||
// pts of the next frame that will be generated
|
||||
nextPts int64
|
||||
samplesCount int32
|
||||
frame *ffmpeg.AVFrame
|
||||
tmpFrame *ffmpeg.AVFrame
|
||||
|
||||
t, tincr, tincr2 float32
|
||||
|
||||
swsCtx *ffmpeg.SwsContext
|
||||
swrCtx *ffmpeg.SwrContext
|
||||
}
|
||||
|
||||
func logPacket(fmtCtx *ffmpeg.AVFormatContext, pkt *ffmpeg.AVPacket) {
|
||||
timeBase := fmtCtx.GetStreams()[pkt.GetStreamIndex()].GetTimeBaseAddr()
|
||||
fmt.Fprintf(os.Stdout, "pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s stream_index:%d\n",
|
||||
ffmpeg.AvTs2str(pkt.GetPts()), ffmpeg.AvTs2timestr(pkt.GetPts(), timeBase),
|
||||
ffmpeg.AvTs2str(pkt.GetDts()), ffmpeg.AvTs2timestr(pkt.GetDts(), timeBase),
|
||||
ffmpeg.AvTs2str(pkt.GetDuration()), ffmpeg.AvTs2timestr(pkt.GetDuration(), timeBase),
|
||||
pkt.GetStreamIndex())
|
||||
}
|
||||
|
||||
func writeFrame(fmtCtx *ffmpeg.AVFormatContext, c *ffmpeg.AVCodecContext, st *ffmpeg.AVStream, frame *ffmpeg.AVFrame) bool {
|
||||
var (
|
||||
ret int32
|
||||
)
|
||||
// send the frame to the encoder
|
||||
if ret = ffmpeg.AvCodecSendFrame(c, frame); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error sending a frame to the encoder: %s\n",
|
||||
ffmpeg.AvErr2str(ret))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
for ret >= 0 {
|
||||
var pkt ffmpeg.AVPacket
|
||||
|
||||
ret = ffmpeg.AvCodecReceivePacket(c, &pkt)
|
||||
if ret == ffmpeg.AVERROR(syscall.EAGAIN) || ret == ffmpeg.AVERROR_EOF {
|
||||
break
|
||||
} else if ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error encoding a frame: %s\n", ffmpeg.AvErr2str(ret))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// rescale output packet timestamp values from codec to stream timebase
|
||||
ffmpeg.AvPacketRescaleTs(&pkt, c.GetTimeBase(), st.GetTimeBase())
|
||||
pkt.SetStreamIndex(st.GetIndex())
|
||||
|
||||
// Write the compressed frame to the media file.
|
||||
logPacket(fmtCtx, &pkt)
|
||||
ret = ffmpeg.AvInterleavedWriteFrame(fmtCtx, &pkt)
|
||||
ffmpeg.AvPacketUnref(&pkt)
|
||||
if ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error while writing output packet: %s\n", ffmpeg.AvErr2str(ret))
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
return ret == ffmpeg.AVERROR_EOF
|
||||
}
|
||||
|
||||
// Add an output stream.
|
||||
func addStream(ost *outputStream, oc *ffmpeg.AVFormatContext, codecId ffmpeg.AVCodecID) (codec *ffmpeg.AVCodec) {
|
||||
var (
|
||||
c *ffmpeg.AVCodecContext
|
||||
)
|
||||
if codec = ffmpeg.AvCodecFindEncoder(codecId); codec == nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not find encoder for '%s'\n", ffmpeg.AvCodecGetName(codecId))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if ost.st = ffmpeg.AvFormatNewStream(oc, nil); ost.st == nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not allocate stream\n")
|
||||
os.Exit(1)
|
||||
}
|
||||
ost.st.SetId(int32(oc.GetNbStreams() - 1))
|
||||
if c = ffmpeg.AvCodecAllocContext3(codec); c == nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not alloc an encoding context\n")
|
||||
os.Exit(1)
|
||||
}
|
||||
ost.enc = c
|
||||
|
||||
switch codec.GetType() {
|
||||
case ffmpeg.AVMEDIA_TYPE_AUDIO:
|
||||
c.SetSampleFmt(ffmpeg.CondExpr(len(codec.GetSampleFmts()) > 0,
|
||||
codec.GetSampleFmts()[0], ffmpeg.AV_SAMPLE_FMT_FLTP))
|
||||
c.SetBitRate(64_000)
|
||||
c.SetSampleRate(44_100)
|
||||
if len(codec.GetSupportedSamplerates()) > 0 {
|
||||
c.SetSampleRate(codec.GetSupportedSamplerates()[0])
|
||||
for _, sr := range codec.GetSupportedSamplerates() {
|
||||
if sr == 44_100 {
|
||||
c.SetSampleRate(44_100)
|
||||
}
|
||||
}
|
||||
}
|
||||
c.SetChannels(ffmpeg.AvGetChannelLayoutNbChannels(c.GetChannelLayout()))
|
||||
c.SetChannelLayout(ffmpeg.AV_CH_LAYOUT_STEREO)
|
||||
if len(codec.GetChannelLayouts()) > 0 {
|
||||
c.SetChannelLayout(codec.GetChannelLayouts()[0])
|
||||
for _, cl := range codec.GetChannelLayouts() {
|
||||
if cl == ffmpeg.AV_CH_LAYOUT_STEREO {
|
||||
c.SetChannelLayout(ffmpeg.AV_CH_LAYOUT_STEREO)
|
||||
}
|
||||
}
|
||||
}
|
||||
c.SetChannels(ffmpeg.AvGetChannelLayoutNbChannels(c.GetChannelLayout()))
|
||||
ost.st.SetTimeBase(ffmpeg.AvMakeQ(1, c.GetSampleRate()))
|
||||
|
||||
case ffmpeg.AVMEDIA_TYPE_VIDEO:
|
||||
c.SetCodecId(codecId)
|
||||
|
||||
c.SetBitRate(4_000_000)
|
||||
// Resolution must be a multiple of two.
|
||||
c.SetWidth(352)
|
||||
c.SetHeight(288)
|
||||
// timebase: This is the fundamental unit of time (in seconds) in terms
|
||||
// of which frame timestamps are represented. For fixed-fps content,
|
||||
// timebase should be 1/framerate and timestamp increments should be
|
||||
// identical to 1.
|
||||
ost.st.SetTimeBase(ffmpeg.AvMakeQ(1, STREAM_FRAME_RATE))
|
||||
c.SetTimeBase(ost.st.GetTimeBase())
|
||||
|
||||
c.SetGopSize(12) // emit one intra frame every twelve frames at most
|
||||
c.SetPixFmt(STREAM_PIX_FMT)
|
||||
if c.GetCodecId() == ffmpeg.AV_CODEC_ID_MPEG2VIDEO {
|
||||
// just for testing, we also add B-frames
|
||||
c.SetMaxBFrames(2)
|
||||
}
|
||||
if c.GetCodecId() == ffmpeg.AV_CODEC_ID_MPEG1VIDEO {
|
||||
// Needed to avoid using macroblocks in which some coeffs overflow.
|
||||
// This does not happen with normal video, it just happens here as
|
||||
// the motion of the chroma plane does not match the luma plane.
|
||||
c.SetMbDecision(2)
|
||||
}
|
||||
|
||||
default:
|
||||
break
|
||||
}
|
||||
|
||||
if (oc.GetOformat().GetFlags() & ffmpeg.AVFMT_GLOBALHEADER) != 0 {
|
||||
c.SetFlags(c.GetFlags() | ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER)
|
||||
}
|
||||
|
||||
return codec
|
||||
}
|
||||
|
||||
// **************************************************************
|
||||
// audio output
|
||||
|
||||
func allocAudioFrame(sampleFmt ffmpeg.AVSampleFormat, channelLayout uint64, sampleRate int32, nbSamples int32) (
|
||||
frame *ffmpeg.AVFrame) {
|
||||
if frame = ffmpeg.AvFrameAlloc(); frame == nil {
|
||||
panic("Error allocating an audio frame")
|
||||
}
|
||||
|
||||
frame.SetFormat(sampleFmt)
|
||||
frame.SetChannelLayout(channelLayout)
|
||||
frame.SetSampleRate(sampleRate)
|
||||
frame.SetNbSamples(nbSamples)
|
||||
|
||||
if nbSamples != 0 {
|
||||
if ret := ffmpeg.AvFrameGetBuffer(frame, 0); ret < 0 {
|
||||
panic("Error allocating an audio buffer")
|
||||
}
|
||||
}
|
||||
return frame
|
||||
}
|
||||
|
||||
func openAudio(oc *ffmpeg.AVFormatContext, codec *ffmpeg.AVCodec, ost *outputStream, optArg *ffmpeg.AVDictionary) {
|
||||
var (
|
||||
c *ffmpeg.AVCodecContext
|
||||
opt *ffmpeg.AVDictionary
|
||||
nbSamples int32
|
||||
)
|
||||
c = ost.enc
|
||||
|
||||
// open it
|
||||
ffmpeg.AvDictCopy(&opt, optArg, 0)
|
||||
ret := ffmpeg.AvCodecOpen2(c, codec, &opt)
|
||||
ffmpeg.AvDictFree(&opt)
|
||||
if ret < 0 {
|
||||
panic(fmt.Sprintf("Could not open audio codec: %s", ffmpeg.AvErr2str(ret)))
|
||||
}
|
||||
|
||||
// init signal generator
|
||||
ost.t = 0
|
||||
ost.tincr = float32(2 * math.Pi * 110.0 / float64(c.GetSampleRate()))
|
||||
// increment frequency by 110 Hz per second
|
||||
ost.tincr2 = float32(2 * math.Pi * 110.0 / float64(c.GetSampleRate()) / float64(c.GetSampleRate()))
|
||||
|
||||
if (c.GetCodec().GetCapabilities() & ffmpeg.AV_CODEC_CAP_VARIABLE_FRAME_SIZE) != 0 {
|
||||
nbSamples = 10_000
|
||||
} else {
|
||||
nbSamples = c.GetFrameSize()
|
||||
}
|
||||
ost.frame = allocAudioFrame(c.GetSampleFmt(), c.GetChannelLayout(),
|
||||
c.GetSampleRate(), nbSamples)
|
||||
ost.tmpFrame = allocAudioFrame(ffmpeg.AV_SAMPLE_FMT_S16, c.GetChannelLayout(),
|
||||
c.GetSampleRate(), nbSamples)
|
||||
|
||||
// copy the stream parameters to the muxer
|
||||
if ret = ffmpeg.AvCodecParametersFromContext(ost.st.GetCodecpar(), c); ret < 0 {
|
||||
panic("Could not copy the stream parameters")
|
||||
}
|
||||
|
||||
// create resampler context
|
||||
if ost.swrCtx = ffmpeg.SwrAlloc(); ost.swrCtx == nil {
|
||||
panic("Could not allocate resampler context")
|
||||
}
|
||||
|
||||
// set options
|
||||
ffmpeg.AvOptSetInt(ost.swrCtx, "in_channel_count", c.GetChannels(), 0)
|
||||
ffmpeg.AvOptSetInt(ost.swrCtx, "in_sample_rate", c.GetSampleRate(), 0)
|
||||
ffmpeg.AvOptSetSampleFmt(ost.swrCtx, "in_sample_fmt", ffmpeg.AV_SAMPLE_FMT_S16, 0)
|
||||
ffmpeg.AvOptSetInt(ost.swrCtx, "out_channel_count", c.GetChannels(), 0)
|
||||
ffmpeg.AvOptSetInt(ost.swrCtx, "out_sample_rate", c.GetSampleRate(), 0)
|
||||
ffmpeg.AvOptSetSampleFmt(ost.swrCtx, "out_sample_fmt", c.GetSampleFmt(), 0)
|
||||
|
||||
// initialize the resampling context
|
||||
if ret = ffmpeg.SwrInit(ost.swrCtx); ret < 0 {
|
||||
panic("Failed to initialize the resampling context")
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare a 16 bit dummy audio frame of 'frame_size' samples and
|
||||
// 'nb_channels' channels.
|
||||
func getAudioFrame(ost *outputStream) (frame *ffmpeg.AVFrame) {
|
||||
frame = ost.tmpFrame
|
||||
data := unsafe.Slice((*int16)(unsafe.Pointer(frame.GetData()[0])),
|
||||
frame.GetNbSamples()*ost.enc.GetChannels())
|
||||
|
||||
if ffmpeg.AvCompareTs(ost.nextPts, ost.enc.GetTimeBase(), STREAM_DURATION, ffmpeg.AvMakeQ(1, 1)) > 0 {
|
||||
return nil
|
||||
}
|
||||
idx := 0
|
||||
for j := 0; j < int(frame.GetNbSamples()); j++ {
|
||||
v := (int16)(math.Sin(float64(ost.t)) * 10_000)
|
||||
for i := 0; i < int(ost.enc.GetChannels()); i++ {
|
||||
data[idx] = v
|
||||
idx++
|
||||
ost.t += ost.tincr
|
||||
ost.tincr += ost.tincr2
|
||||
}
|
||||
}
|
||||
frame.SetPts(ost.nextPts)
|
||||
ost.nextPts += int64(frame.GetNbSamples())
|
||||
|
||||
return frame
|
||||
}
|
||||
|
||||
// encode one audio frame and send it to the muxer
|
||||
// return 1 when encoding is finished, 0 otherwise
|
||||
func writeAudioFrame(oc *ffmpeg.AVFormatContext, ost *outputStream) bool {
|
||||
var (
|
||||
c *ffmpeg.AVCodecContext
|
||||
frame *ffmpeg.AVFrame
|
||||
ret int32
|
||||
dstNbSamples int32
|
||||
)
|
||||
c = ost.enc
|
||||
|
||||
frame = getAudioFrame(ost)
|
||||
|
||||
if frame != nil {
|
||||
// convert samples from native format to destination codec format, using the resampler */
|
||||
// compute destination number of samples
|
||||
dstNbSamples = ffmpeg.AvRescaleRnd(ffmpeg.SwrGetDelay(ost.swrCtx, c.GetSampleRate())+frame.GetNbSamples(),
|
||||
c.GetSampleRate(), c.GetSampleRate(), ffmpeg.AV_ROUND_UP)
|
||||
ffmpeg.AvAssert0(dstNbSamples == frame.GetNbSamples())
|
||||
|
||||
// when we pass a frame to the encoder, it may keep a reference to it
|
||||
// internally;
|
||||
// make sure we do not overwrite it here
|
||||
if ret = ffmpeg.AvFrameMakeWritable(ost.frame); ret < 0 {
|
||||
panic("Make frame writeable failed")
|
||||
}
|
||||
|
||||
if ret = ffmpeg.SwrConvert(ost.swrCtx,
|
||||
&ost.frame.GetData()[0], dstNbSamples,
|
||||
&frame.GetData()[0], frame.GetNbSamples()); ret < 0 {
|
||||
panic("Error while converting")
|
||||
}
|
||||
frame = ost.frame
|
||||
|
||||
frame.SetPts(ffmpeg.AvRescaleQ(int64(ost.samplesCount), ffmpeg.AvMakeQ(1, c.GetSampleRate()), c.GetTimeBase()))
|
||||
ost.samplesCount += dstNbSamples
|
||||
}
|
||||
|
||||
return writeFrame(oc, c, ost.st, frame)
|
||||
}
|
||||
|
||||
// **************************************************************
|
||||
// video output
|
||||
|
||||
func allocPicture(pixFmt ffmpeg.AVPixelFormat, width, height int32) (picture *ffmpeg.AVFrame) {
|
||||
if picture = ffmpeg.AvFrameAlloc(); picture == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
picture.SetFormat(pixFmt)
|
||||
picture.SetWidth(width)
|
||||
picture.SetHeight(height)
|
||||
|
||||
// allocate the buffers for the frame data
|
||||
if ret := ffmpeg.AvFrameGetBuffer(picture, 0); ret < 0 {
|
||||
panic("Could not allocate frame data.")
|
||||
}
|
||||
return picture
|
||||
}
|
||||
|
||||
func openVideo(oc *ffmpeg.AVFormatContext, codec *ffmpeg.AVCodec,
|
||||
ost *outputStream, optArg *ffmpeg.AVDictionary) {
|
||||
var (
|
||||
ret int32
|
||||
c = ost.enc
|
||||
opt *ffmpeg.AVDictionary
|
||||
)
|
||||
|
||||
ffmpeg.AvDictCopy(&opt, optArg, 0)
|
||||
|
||||
// open the codec
|
||||
ret = ffmpeg.AvCodecOpen2(c, codec, &opt)
|
||||
ffmpeg.AvDictFree(&opt)
|
||||
if ret < 0 {
|
||||
panic(fmt.Sprintf("Could not video audio codec: %s", ffmpeg.AvErr2str(ret)))
|
||||
}
|
||||
|
||||
// allocate and init a re-usable frame
|
||||
if ost.frame = allocPicture(c.GetPixFmt(), c.GetWidth(), c.GetHeight()); ost.frame == nil {
|
||||
panic("Could not allocate video frame")
|
||||
}
|
||||
|
||||
// If the output format is not YUV420P, then a temporary YUV420P
|
||||
// picture is needed too. It is then converted to the required
|
||||
// output format.
|
||||
ost.tmpFrame = nil
|
||||
if c.GetPixFmt() != ffmpeg.AV_PIX_FMT_YUV420P {
|
||||
ost.tmpFrame = allocPicture(ffmpeg.AV_PIX_FMT_YUV420P, c.GetWidth(), c.GetHeight())
|
||||
if ost.tmpFrame == nil {
|
||||
panic("Could not allocate temporary picture")
|
||||
}
|
||||
}
|
||||
|
||||
// copy the stream parameters to the muxer
|
||||
if ret = ffmpeg.AvCodecParametersFromContext(ost.st.GetCodecpar(), c); ret < 0 {
|
||||
panic("Could not copy the stream parameters")
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare a dummy image.
|
||||
func fillYuvImage(pict *ffmpeg.AVFrame, frameIndex int32, width, height int32) {
|
||||
var (
|
||||
data = ffmpeg.SliceSlice(&pict.GetData()[0], 3, width*height)
|
||||
linesize = pict.GetLinesize()
|
||||
i = frameIndex
|
||||
)
|
||||
|
||||
// Y
|
||||
for y := int32(0); y < height; y++ {
|
||||
for x := int32(0); x < width; x++ {
|
||||
data[0][y*linesize[0]+x] = uint8(x + y + i*3)
|
||||
}
|
||||
}
|
||||
// Cb and Cr
|
||||
for y := int32(0); y < height/2; y++ {
|
||||
for x := int32(0); x < width/2; x++ {
|
||||
data[1][y*linesize[1]+x] = uint8(128 + y + i*2)
|
||||
data[2][y*linesize[2]+x] = uint8(64 + x + i*5)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getVideoFrame(ost *outputStream) *ffmpeg.AVFrame {
|
||||
c := ost.enc
|
||||
|
||||
// check if we want to generate more frames
|
||||
if ffmpeg.AvCompareTs(ost.nextPts, c.GetTimeBase(), STREAM_DURATION, ffmpeg.AvMakeQ(1, 1)) > 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// when we pass a frame to the encoder, it may keep a reference to it
|
||||
// internally; make sure we do not overwrite it here
|
||||
if ffmpeg.AvFrameMakeWritable(ost.frame) < 0 {
|
||||
panic("Make video frame writable failed")
|
||||
}
|
||||
|
||||
if c.GetPixFmt() != ffmpeg.AV_PIX_FMT_YUV420P {
|
||||
// as we only generate a YUV420P picture, we must convert it
|
||||
// to the codec pixel format if needed
|
||||
if ost.swsCtx == nil {
|
||||
ost.swsCtx = ffmpeg.SwsGetContext(c.GetWidth(), c.GetHeight(),
|
||||
ffmpeg.AV_PIX_FMT_YUV420P,
|
||||
c.GetWidth(), c.GetHeight(),
|
||||
c.GetPixFmt(),
|
||||
SCALE_FLAGS, nil, nil, nil)
|
||||
if ost.swsCtx == nil {
|
||||
panic("Could not initialize the conversion context")
|
||||
}
|
||||
}
|
||||
fillYuvImage(ost.tmpFrame, int32(ost.nextPts), c.GetWidth(), c.GetHeight())
|
||||
ffmpeg.SwsScale(ost.swsCtx, ost.tmpFrame.GetData(),
|
||||
ost.tmpFrame.GetLinesize(), 0, c.GetHeight(), ost.frame.GetData(),
|
||||
ost.frame.GetLinesize())
|
||||
} else {
|
||||
fillYuvImage(ost.frame, int32(ost.nextPts), c.GetWidth(), c.GetHeight())
|
||||
}
|
||||
|
||||
ost.frame.SetPts(ffmpeg.PlusPlus(&ost.nextPts))
|
||||
|
||||
return ost.frame
|
||||
}
|
||||
|
||||
// encode one video frame and send it to the muxer
|
||||
// return 1 when encoding is finished, 0 otherwise
|
||||
func writeVideoFrame(oc *ffmpeg.AVFormatContext, ost *outputStream) bool {
|
||||
return writeFrame(oc, ost.enc, ost.st, getVideoFrame(ost))
|
||||
}
|
||||
|
||||
func closeStream(oc *ffmpeg.AVFormatContext, ost *outputStream) {
|
||||
ffmpeg.AvCodecFreeContext(&ost.enc)
|
||||
ffmpeg.AvFrameFree(&ost.frame)
|
||||
ffmpeg.AvFrameFree(&ost.tmpFrame)
|
||||
ffmpeg.SwsFreeContext(ost.swsCtx)
|
||||
ffmpeg.SwrFree(&ost.swrCtx)
|
||||
}
|
||||
|
||||
// **************************************************************
|
||||
// media file output
|
||||
|
||||
func main() {
|
||||
var (
|
||||
videoSt, audioSt outputStream
|
||||
videoCodec, audioCodec *ffmpeg.AVCodec
|
||||
haveVideo, haveAudio bool
|
||||
encodeVideo, encodeAudio bool
|
||||
opt *ffmpeg.AVDictionary
|
||||
ret int32
|
||||
_fmt *ffmpeg.AVOutputFormat
|
||||
oc *ffmpeg.AVFormatContext
|
||||
)
|
||||
|
||||
if len(os.Args) < 2 {
|
||||
fmt.Fprintf(os.Stdout, "usage: %s output_file\n"+
|
||||
"API example program to output a media file with libavformat.\n"+
|
||||
"This program generates a synthetic audio and video stream, encodes and\n"+
|
||||
"muxes them into a file named output_file.\n"+
|
||||
"The output format is automatically guessed according to the file extension.\n"+
|
||||
"Raw images can also be output by using '%%d' in the filename.\n"+
|
||||
"\n", os.Args[0])
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
filename := os.Args[1]
|
||||
for i := 2; i+1 < len(os.Args); i += 2 {
|
||||
if os.Args[i] == "-flags" || os.Args[i] == "-fflags" {
|
||||
ffmpeg.AvDictSet(&opt, os.Args[i][1:], os.Args[i+1], 0)
|
||||
}
|
||||
}
|
||||
|
||||
// allocate the output media context
|
||||
ffmpeg.AvFormatAllocOutputContext2(&oc, nil, ffmpeg.NIL, filename)
|
||||
if oc == nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not deduce output format from file extension: using MPEG.\n")
|
||||
ffmpeg.AvFormatAllocOutputContext2(&oc, nil, "mpeg", filename)
|
||||
}
|
||||
if oc == nil {
|
||||
panic("Allocate the output media context failed")
|
||||
}
|
||||
|
||||
_fmt = oc.GetOformat()
|
||||
|
||||
// Add the audio and video streams using the default format codecs
|
||||
// and initialize the codecs.
|
||||
if _fmt.GetVideoCodec() != ffmpeg.AV_CODEC_ID_NONE {
|
||||
videoCodec = addStream(&videoSt, oc, _fmt.GetVideoCodec())
|
||||
haveVideo = true
|
||||
encodeVideo = true
|
||||
}
|
||||
if _fmt.GetAudioCodec() != ffmpeg.AV_CODEC_ID_NONE {
|
||||
audioCodec = addStream(&audioSt, oc, _fmt.GetAudioCodec())
|
||||
haveAudio = true
|
||||
encodeAudio = true
|
||||
}
|
||||
|
||||
// Now that all the parameters are set, we can open the audio and
|
||||
// video codecs and allocate the necessary encode buffers.
|
||||
if haveVideo {
|
||||
openVideo(oc, videoCodec, &videoSt, opt)
|
||||
}
|
||||
if haveAudio {
|
||||
openAudio(oc, audioCodec, &audioSt, opt)
|
||||
}
|
||||
|
||||
ffmpeg.AvDumpFormat(oc, 0, filename, 1)
|
||||
|
||||
// open the output file, if needed
|
||||
if (_fmt.GetFlags() & ffmpeg.AVFMT_NOFILE) == 0 {
|
||||
if ret = ffmpeg.AvIOOpen(oc.GetPbAddr(), filename, ffmpeg.AVIO_FLAG_WRITE); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Could not open '%s': %s\n", filename, ffmpeg.AvErr2str(ret))
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
// Write the stream header, if any.
|
||||
if ret = ffmpeg.AvFormatWriteHeader(oc, &opt); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error occurred when opening output file: %s\n", ffmpeg.AvErr2str(ret))
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
for encodeVideo || encodeAudio {
|
||||
// select the stream to encode
|
||||
if encodeVideo &&
|
||||
(!encodeAudio || ffmpeg.AvCompareTs(videoSt.nextPts, videoSt.enc.GetTimeBase(),
|
||||
audioSt.nextPts, audioSt.enc.GetTimeBase()) <= 0) {
|
||||
encodeVideo = !writeVideoFrame(oc, &videoSt)
|
||||
} else {
|
||||
encodeAudio = !writeAudioFrame(oc, &audioSt)
|
||||
}
|
||||
}
|
||||
|
||||
// Write the trailer, if any. The trailer must be written before you
|
||||
// close the CodecContexts open when you wrote the header; otherwise
|
||||
// AvWriteTrailer() may try to use memory that was freed on
|
||||
// AvCodecClose().
|
||||
ffmpeg.AvWriteTrailer(oc)
|
||||
|
||||
// Close each codec.
|
||||
if haveVideo {
|
||||
closeStream(oc, &videoSt)
|
||||
}
|
||||
if haveAudio {
|
||||
closeStream(oc, &audioSt)
|
||||
}
|
||||
|
||||
if (_fmt.GetFlags() & ffmpeg.AVFMT_NOFILE) == 0 {
|
||||
// Close the output file.
|
||||
ffmpeg.AvIOClosep(oc.GetPbAddr())
|
||||
}
|
||||
|
||||
// free the stream
|
||||
ffmpeg.AvFormatFreeContext(oc)
|
||||
|
||||
os.Exit(0)
|
||||
}
|
||||
|
@@ -1,5 +1,150 @@
|
||||
package main
|
||||
|
||||
func main() {
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
ffmpeg "github.com/qrtc/ffmpeg-dev-go"
|
||||
)
|
||||
|
||||
func logPacket(fmtCtx *ffmpeg.AVFormatContext, pkt *ffmpeg.AVPacket, tag string) {
|
||||
timeBase := fmtCtx.GetStreams()[pkt.GetStreamIndex()].GetTimeBaseAddr()
|
||||
fmt.Fprintf(os.Stdout, "%s: pts:%s pts_time:%s dts:%s dts_time:%s duration:%s duration_time:%s stream_index:%d\n",
|
||||
tag,
|
||||
ffmpeg.AvTs2str(pkt.GetPts()), ffmpeg.AvTs2timestr(pkt.GetPts(), timeBase),
|
||||
ffmpeg.AvTs2str(pkt.GetDts()), ffmpeg.AvTs2timestr(pkt.GetDts(), timeBase),
|
||||
ffmpeg.AvTs2str(pkt.GetDuration()), ffmpeg.AvTs2timestr(pkt.GetDuration(), timeBase),
|
||||
pkt.GetStreamIndex())
|
||||
}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
ofmt *ffmpeg.AVOutputFormat
|
||||
ifmtCtx, ofmtCtx *ffmpeg.AVFormatContext
|
||||
pkt ffmpeg.AVPacket
|
||||
ret int32
|
||||
streamMapping []int32
|
||||
streamIndex int32
|
||||
)
|
||||
|
||||
if len(os.Args) < 3 {
|
||||
fmt.Fprintf(os.Stderr, "usage: %s input output\n"+
|
||||
"API example program to remux a media file with libavformat and libavcodec.\n"+
|
||||
"The output format is guessed according to the file extension.\n"+
|
||||
"\n", os.Args[0])
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
inFilename := os.Args[1]
|
||||
outFilename := os.Args[2]
|
||||
|
||||
if ret = ffmpeg.AvFormatOpenInput(&ifmtCtx, inFilename, nil, nil); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Could not open input file '%s'", inFilename)
|
||||
goto end
|
||||
}
|
||||
if ret = ffmpeg.AvFormatFindStreamInfo(ifmtCtx, nil); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Failed to retrieve input stream information")
|
||||
goto end
|
||||
}
|
||||
|
||||
ffmpeg.AvDumpFormat(ifmtCtx, 0, inFilename, 0)
|
||||
|
||||
if ffmpeg.AvFormatAllocOutputContext2(&ofmtCtx, nil, ffmpeg.NIL, outFilename); ofmtCtx == nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not create output context\n")
|
||||
ret = ffmpeg.AVERROR_UNKNOWN
|
||||
goto end
|
||||
}
|
||||
|
||||
streamMapping = make([]int32, ifmtCtx.GetNbStreams())
|
||||
|
||||
ofmt = ofmtCtx.GetOformat()
|
||||
|
||||
for i := 0; i < len(streamMapping); i++ {
|
||||
inStream := ifmtCtx.GetStreams()[i]
|
||||
inCodecPar := inStream.GetCodecpar()
|
||||
|
||||
if inCodecPar.GetCodecType() != ffmpeg.AVMEDIA_TYPE_AUDIO &&
|
||||
inCodecPar.GetCodecType() != ffmpeg.AVMEDIA_TYPE_VIDEO &&
|
||||
inCodecPar.GetCodecType() != ffmpeg.AVMEDIA_TYPE_SUBTITLE {
|
||||
streamMapping[i] = -1
|
||||
continue
|
||||
}
|
||||
|
||||
streamMapping[i] = streamIndex
|
||||
streamIndex++
|
||||
|
||||
outStream := ffmpeg.AvFormatNewStream(ofmtCtx, nil)
|
||||
if outStream == nil {
|
||||
fmt.Fprintf(os.Stderr, "Failed allocating output stream\n")
|
||||
ret = ffmpeg.AVERROR_UNKNOWN
|
||||
goto end
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvCodecParametersCopy(outStream.GetCodecpar(), inCodecPar); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Failed to copy codec parameters\n")
|
||||
goto end
|
||||
}
|
||||
outStream.GetCodecpar().SetCodecTag(0)
|
||||
}
|
||||
ffmpeg.AvDumpFormat(ofmtCtx, 0, outFilename, 1)
|
||||
|
||||
if (ofmt.GetFlags() & ffmpeg.AVFMT_NOFILE) == 0 {
|
||||
if ret = ffmpeg.AvIOOpen(ofmtCtx.GetPbAddr(), outFilename, ffmpeg.AVIO_FLAG_WRITE); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Could not open output file '%s'", outFilename)
|
||||
goto end
|
||||
}
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvFormatWriteHeader(ofmtCtx, nil); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error occurred when opening output file\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
for {
|
||||
var inStream, outStream *ffmpeg.AVStream
|
||||
|
||||
if ret = ffmpeg.AvReadFrame(ifmtCtx, &pkt); ret < 0 {
|
||||
break
|
||||
}
|
||||
|
||||
inStream = ifmtCtx.GetStreams()[pkt.GetStreamIndex()]
|
||||
if int(pkt.GetStreamIndex()) >= len(streamMapping) ||
|
||||
streamMapping[pkt.GetStreamIndex()] < 0 {
|
||||
ffmpeg.AvPacketUnref(&pkt)
|
||||
continue
|
||||
}
|
||||
|
||||
pkt.SetStreamIndex(streamMapping[pkt.GetStreamIndex()])
|
||||
outStream = ofmtCtx.GetStreams()[pkt.GetStreamIndex()]
|
||||
logPacket(ifmtCtx, &pkt, "in")
|
||||
|
||||
// copy packet
|
||||
pkt.SetPts(ffmpeg.AvRescaleQRnd(pkt.GetPts(), inStream.GetTimeBase(), outStream.GetTimeBase(),
|
||||
ffmpeg.AV_ROUND_NEAR_INF|ffmpeg.AV_ROUND_PASS_MINMAX))
|
||||
pkt.SetDts(ffmpeg.AvRescaleQRnd(pkt.GetDts(), inStream.GetTimeBase(), outStream.GetTimeBase(),
|
||||
ffmpeg.AV_ROUND_NEAR_INF|ffmpeg.AV_ROUND_PASS_MINMAX))
|
||||
pkt.SetDuration(ffmpeg.AvRescaleQ(pkt.GetDuration(), inStream.GetTimeBase(), outStream.GetTimeBase()))
|
||||
pkt.SetPos(-1)
|
||||
logPacket(ofmtCtx, &pkt, "out")
|
||||
|
||||
if ret = ffmpeg.AvInterleavedWriteFrame(ofmtCtx, &pkt); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error muxing packet\n")
|
||||
break
|
||||
}
|
||||
ffmpeg.AvPacketUnref(&pkt)
|
||||
}
|
||||
ffmpeg.AvWriteTrailer(ofmtCtx)
|
||||
end:
|
||||
// close output
|
||||
ffmpeg.AvFormatCloseInput(&ifmtCtx)
|
||||
if ofmtCtx != nil && (ofmt.GetFlags()&ffmpeg.AVFMT_NOFILE) == 0 {
|
||||
ffmpeg.AvIOClosep(ofmtCtx.GetPbAddr())
|
||||
}
|
||||
ffmpeg.AvFormatFreeContext(ofmtCtx)
|
||||
|
||||
if ret < 0 && ret != ffmpeg.AVERROR_EOF {
|
||||
fmt.Fprintf(os.Stderr, "Error occurred: %s\n", ffmpeg.AvErr2str(ret))
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Exit(0)
|
||||
}
|
||||
|
@@ -1,5 +1,186 @@
|
||||
package main
|
||||
|
||||
func main() {
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
|
||||
ffmpeg "github.com/qrtc/ffmpeg-dev-go"
|
||||
)
|
||||
|
||||
func getFormatFromSampleFmt(sampleFmt ffmpeg.AVSampleFormat) (string, int32) {
|
||||
sampleFmtEntry := []struct {
|
||||
sampleFmt ffmpeg.AVSampleFormat
|
||||
fmtBe string
|
||||
fmtLe string
|
||||
}{
|
||||
{ffmpeg.AV_SAMPLE_FMT_U8, "u8", "u8"},
|
||||
{ffmpeg.AV_SAMPLE_FMT_S16, "s16be", "s16le"},
|
||||
{ffmpeg.AV_SAMPLE_FMT_S32, "s32be", "s32le"},
|
||||
{ffmpeg.AV_SAMPLE_FMT_FLT, "f32be", "f32le"},
|
||||
{ffmpeg.AV_SAMPLE_FMT_DBL, "f64be", "f64le"},
|
||||
}
|
||||
|
||||
for _, entry := range sampleFmtEntry {
|
||||
if sampleFmt == entry.sampleFmt {
|
||||
return ffmpeg.AV_NE(entry.fmtBe, entry.fmtLe), 0
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "sample format %s is not supported as output format\n",
|
||||
ffmpeg.AvGetSampleFmtName(sampleFmt))
|
||||
return ffmpeg.NIL, -1
|
||||
}
|
||||
|
||||
// Fill dst buffer with nb_samples, generated starting from t.
|
||||
func fileSamples(dst []float64, nbSamples, nbChannels, sampleRate int32, t *float64) {
|
||||
var (
|
||||
tincr = 1.0 / float64(sampleRate)
|
||||
c = 2 * math.Pi * 440.0
|
||||
)
|
||||
|
||||
// generate sin tone with 440Hz frequency and duplicated channels
|
||||
for i := int32(0); i < nbSamples; i++ {
|
||||
dst[i*nbChannels] = math.Sin(c * (*t))
|
||||
for j := int32(1); j < nbChannels; j++ {
|
||||
dst[i*nbChannels+j] = dst[i*nbChannels]
|
||||
}
|
||||
*t += tincr
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
srcRate, dstRate int32 = 48000, 44100
|
||||
srcData, dstData **uint8
|
||||
srcNbChannels, dstNbChannels int32
|
||||
srcLinesize, dstLinesize int32
|
||||
srcNbSamples, dstNbSamples int32 = 1024, 0
|
||||
maxDstNbSamples int32
|
||||
srcChLayout, dstChLayout = ffmpeg.AV_CH_LAYOUT_STEREO, ffmpeg.AV_CH_LAYOUT_SURROUND
|
||||
srcSampleFmt, dstSampleFmt = ffmpeg.AV_SAMPLE_FMT_DBL, ffmpeg.AV_SAMPLE_FMT_S16
|
||||
swrCtx *ffmpeg.SwrContext
|
||||
ret int32
|
||||
t float64
|
||||
dstBufsize int32
|
||||
_fmt string
|
||||
)
|
||||
|
||||
if len(os.Args) != 2 {
|
||||
fmt.Fprintf(os.Stdout, "Usage: %s output_file\n"+
|
||||
"API example program to show how to resample an audio stream with libswresample.\n"+
|
||||
"This program generates a series of audio frames, resamples them to a specified "+
|
||||
"output format and rate and saves them to an output file named output_file.\n",
|
||||
os.Args[0])
|
||||
os.Exit(1)
|
||||
}
|
||||
dstFilename := os.Args[1]
|
||||
|
||||
dstFile, err := os.OpenFile(dstFilename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0755)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not open destination file %s\n", dstFilename)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// create resampler context
|
||||
if swrCtx = ffmpeg.SwrAlloc(); swrCtx == nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not allocate resampler context\n")
|
||||
ret = ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
goto end
|
||||
}
|
||||
|
||||
// set options
|
||||
ffmpeg.AvOptSetInt(swrCtx, "in_channel_layout", srcChLayout, 0)
|
||||
ffmpeg.AvOptSetInt(swrCtx, "in_sample_rate", srcRate, 0)
|
||||
ffmpeg.AvOptSetSampleFmt(swrCtx, "in_sample_fmt", srcSampleFmt, 0)
|
||||
|
||||
ffmpeg.AvOptSetInt(swrCtx, "out_channel_layout", dstChLayout, 0)
|
||||
ffmpeg.AvOptSetInt(swrCtx, "out_sample_rate", dstRate, 0)
|
||||
ffmpeg.AvOptSetSampleFmt(swrCtx, "out_sample_fmt", dstSampleFmt, 0)
|
||||
|
||||
// initialize the resampling context
|
||||
if ret = ffmpeg.SwrInit(swrCtx); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Failed to initialize the resampling context\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
// allocate source and destination samples buffers
|
||||
srcNbChannels = ffmpeg.AvGetChannelLayoutNbChannels(srcChLayout)
|
||||
if ret = ffmpeg.AvSamplesAllocArrayAndSamples(&srcData, &srcLinesize, srcNbChannels,
|
||||
srcNbSamples, srcSampleFmt, 0); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Could not allocate source samples\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
// compute the number of converted samples: buffering is avoided
|
||||
// ensuring that the output buffer will contain at least all the
|
||||
// converted input samples
|
||||
dstNbSamples = ffmpeg.AvRescaleRnd(srcNbSamples, dstRate, srcRate, ffmpeg.AV_ROUND_UP)
|
||||
maxDstNbSamples = dstNbSamples
|
||||
|
||||
// buffer is going to be directly written to a rawaudio file, no alignment
|
||||
dstNbChannels = ffmpeg.AvGetChannelLayoutNbChannels(dstChLayout)
|
||||
if ret = ffmpeg.AvSamplesAllocArrayAndSamples(&dstData, &dstLinesize, dstNbChannels,
|
||||
dstNbSamples, dstSampleFmt, 0); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Could not allocate destination samples\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
for ok := true; ok; ok = (t < 10) {
|
||||
// generate synthetic audio
|
||||
fileSamples(unsafe.Slice((*float64)(unsafe.Pointer(*srcData)), srcNbSamples*srcNbChannels),
|
||||
srcNbSamples, srcNbChannels, srcRate, &t)
|
||||
|
||||
//compute destination number of samples
|
||||
dstNbSamples = ffmpeg.AvRescaleRnd(ffmpeg.SwrGetDelay(swrCtx, srcRate)+srcNbSamples,
|
||||
dstRate, srcRate, ffmpeg.AV_ROUND_UP)
|
||||
if dstNbSamples > maxDstNbSamples {
|
||||
ffmpeg.AvFreep(dstData)
|
||||
if ret = ffmpeg.AvSamplesAlloc(dstData, &dstLinesize, dstNbChannels,
|
||||
dstNbSamples, dstSampleFmt, 1); ret < 0 {
|
||||
break
|
||||
}
|
||||
maxDstNbSamples = dstNbSamples
|
||||
}
|
||||
|
||||
// convert to destination format
|
||||
if ret = ffmpeg.SwrConvert(swrCtx, dstData, dstNbSamples, srcData, srcNbSamples); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error while converting\n")
|
||||
goto end
|
||||
}
|
||||
if dstBufsize = ffmpeg.AvSamplesGetBufferSize(&dstLinesize, dstNbChannels,
|
||||
ret, dstSampleFmt, 1); dstBufsize < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Could not get sample buffer size\n")
|
||||
goto end
|
||||
}
|
||||
fmt.Fprintf(os.Stdout, "t:%f in:%d out:%d\n", t, srcNbSamples, ret)
|
||||
dstFile.Write(ffmpeg.SliceSlice(dstData, 1, dstBufsize)[0])
|
||||
}
|
||||
|
||||
if _fmt, ret = getFormatFromSampleFmt(dstSampleFmt); ret < 0 {
|
||||
goto end
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "Resampling succeeded. Play the output file with the command:\n"+
|
||||
"ffplay -f %s -channel_layout %d -channels %d -ar %d %s\n",
|
||||
_fmt, dstChLayout, dstNbChannels, dstRate, dstFilename)
|
||||
|
||||
end:
|
||||
dstFile.Close()
|
||||
|
||||
if srcData != nil {
|
||||
ffmpeg.AvFreep(srcData)
|
||||
}
|
||||
ffmpeg.AvFreep(&srcData)
|
||||
|
||||
if dstData != nil {
|
||||
ffmpeg.AvFreep(dstData)
|
||||
}
|
||||
ffmpeg.AvFreep(&dstData)
|
||||
|
||||
ffmpeg.SwrFree(&swrCtx)
|
||||
if ret < 0 {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
@@ -1,5 +1,559 @@
|
||||
package main
|
||||
|
||||
func main() {
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
|
||||
ffmpeg "github.com/qrtc/ffmpeg-dev-go"
|
||||
)
|
||||
|
||||
type filteringContext struct {
|
||||
buffersinkCtx *ffmpeg.AVFilterContext
|
||||
buffersrcCtx *ffmpeg.AVFilterContext
|
||||
filterGraph *ffmpeg.AVFilterGraph
|
||||
encPkt *ffmpeg.AVPacket
|
||||
filteredFrame *ffmpeg.AVFrame
|
||||
}
|
||||
|
||||
type streamContext struct {
|
||||
decCtx *ffmpeg.AVCodecContext
|
||||
encCtx *ffmpeg.AVCodecContext
|
||||
decFrame *ffmpeg.AVFrame
|
||||
}
|
||||
|
||||
// Open an input file and the required decoder.
|
||||
func openInputFile(fileName string) (ifmtCtx *ffmpeg.AVFormatContext, streamCtx []streamContext, ret int32) {
|
||||
if ret = ffmpeg.AvFormatOpenInput(&ifmtCtx, fileName, nil, nil); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot open input file\n")
|
||||
return nil, nil, ret
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvFormatFindStreamInfo(ifmtCtx, nil); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot find stream information\n")
|
||||
return nil, nil, ret
|
||||
}
|
||||
|
||||
streamCtx = make([]streamContext, ifmtCtx.GetNbStreams())
|
||||
|
||||
for i := 0; i < int(ifmtCtx.GetNbStreams()); i++ {
|
||||
stream := ifmtCtx.GetStreams()[i]
|
||||
dec := ffmpeg.AvCodecFindDecoder(stream.GetCodecpar().GetCodecId())
|
||||
if dec == nil {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Failed to find decoder for stream #%d\n", i)
|
||||
return nil, nil, ffmpeg.AVERROR_DECODER_NOT_FOUND
|
||||
}
|
||||
codecCtx := ffmpeg.AvCodecAllocContext3(dec)
|
||||
if codecCtx == nil {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Failed to allocate the decoder context for stream #%d\n", i)
|
||||
return nil, nil, ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
}
|
||||
if ret = ffmpeg.AvCodecParametersToContext(codecCtx, stream.GetCodecpar()); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Failed to copy decoder parameters to input decoder context "+
|
||||
"for stream #%d\n", i)
|
||||
return nil, nil, ret
|
||||
}
|
||||
// Reencode video & audio and remux subtitles etc.
|
||||
if codecCtx.GetCodecType() == ffmpeg.AVMEDIA_TYPE_VIDEO ||
|
||||
codecCtx.GetCodecType() == ffmpeg.AVMEDIA_TYPE_AUDIO {
|
||||
if codecCtx.GetCodecType() == ffmpeg.AVMEDIA_TYPE_VIDEO {
|
||||
codecCtx.SetFramerate(ffmpeg.AvGuessFrameRate(ifmtCtx, stream, nil))
|
||||
}
|
||||
// Open decoder
|
||||
if ret = ffmpeg.AvCodecOpen2(codecCtx, dec, nil); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Failed to open decoder for stream #%d\n", i)
|
||||
return nil, nil, ret
|
||||
}
|
||||
}
|
||||
streamCtx[i].decCtx = codecCtx
|
||||
|
||||
if streamCtx[i].decFrame = ffmpeg.AvFrameAlloc(); streamCtx[i].decFrame == nil {
|
||||
return nil, nil, ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
}
|
||||
}
|
||||
|
||||
ffmpeg.AvDumpFormat(ifmtCtx, 0, fileName, 0)
|
||||
return ifmtCtx, streamCtx, 0
|
||||
}
|
||||
|
||||
func openOutputFile(ifmtCtx *ffmpeg.AVFormatContext, streamCtx []streamContext,
|
||||
filename string) (ofmtCtx *ffmpeg.AVFormatContext, ret int32) {
|
||||
if ffmpeg.AvFormatAllocOutputContext2(&ofmtCtx, nil, ffmpeg.NIL, filename); ofmtCtx == nil {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Could not create output context\n")
|
||||
return nil, ret
|
||||
}
|
||||
|
||||
for i := 0; i < len(streamCtx); i++ {
|
||||
outStream := ffmpeg.AvFormatNewStream(ofmtCtx, nil)
|
||||
if outStream == nil {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Failed allocating output stream\n")
|
||||
return nil, ret
|
||||
}
|
||||
|
||||
inStream := ifmtCtx.GetStreams()[i]
|
||||
decCtx := streamCtx[i].decCtx
|
||||
|
||||
if decCtx.GetCodecType() == ffmpeg.AVMEDIA_TYPE_VIDEO ||
|
||||
decCtx.GetCodecType() == ffmpeg.AVMEDIA_TYPE_AUDIO {
|
||||
encoder := ffmpeg.AvCodecFindEncoder(decCtx.GetCodecId())
|
||||
if encoder == nil {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Necessary encoder not found\n")
|
||||
return nil, ffmpeg.AVERROR_INVALIDDATA
|
||||
}
|
||||
encCtx := ffmpeg.AvCodecAllocContext3(encoder)
|
||||
if encCtx == nil {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Failed to allocate the encoder context\n")
|
||||
return nil, ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
}
|
||||
|
||||
// In this example, we transcode to same properties (picture size,
|
||||
// sample rate etc.). These properties can be changed for output
|
||||
// streams easily using filters
|
||||
if decCtx.GetCodecType() == ffmpeg.AVMEDIA_TYPE_VIDEO {
|
||||
encCtx.SetHeight(decCtx.GetHeight())
|
||||
encCtx.SetWidth(decCtx.GetWidth())
|
||||
encCtx.SetSampleAspectRatio(decCtx.GetSampleAspectRatio())
|
||||
// take first format from list of supported formats
|
||||
if len(encoder.GetPixFmts()) != 0 {
|
||||
encCtx.SetPixFmt(encoder.GetPixFmts()[0])
|
||||
} else {
|
||||
encCtx.SetPixFmt(decCtx.GetPixFmt())
|
||||
}
|
||||
// video time_base can be set to whatever is handy and supported by encoder
|
||||
encCtx.SetTimeBase(ffmpeg.AvInvQ(decCtx.GetFramerate()))
|
||||
} else {
|
||||
encCtx.SetSampleRate(decCtx.GetSampleRate())
|
||||
encCtx.SetChannelLayout(decCtx.GetChannelLayout())
|
||||
encCtx.SetChannels(ffmpeg.AvGetChannelLayoutNbChannels(encCtx.GetChannelLayout()))
|
||||
// take first format from list of supported formats
|
||||
encCtx.SetSampleFmt(encoder.GetSampleFmts()[0])
|
||||
encCtx.SetTimeBase(ffmpeg.AvMakeQ(1, encCtx.GetSampleRate()))
|
||||
}
|
||||
|
||||
if (ofmtCtx.GetOformat().GetFlags() & ffmpeg.AVFMT_GLOBALHEADER) != 0 {
|
||||
encCtx.SetFlags(encCtx.GetFlags() | ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER)
|
||||
}
|
||||
|
||||
// Third parameter can be used to pass settings to encoder
|
||||
if ret = ffmpeg.AvCodecOpen2(encCtx, encoder, nil); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot open video encoder for stream #%d\n", i)
|
||||
return nil, ret
|
||||
}
|
||||
if ret = ffmpeg.AvCodecParametersFromContext(outStream.GetCodecpar(), encCtx); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Failed to copy encoder parameters to output stream #%d\n", i)
|
||||
return nil, ret
|
||||
}
|
||||
|
||||
outStream.SetTimeBase(encCtx.GetTimeBase())
|
||||
streamCtx[i].encCtx = encCtx
|
||||
} else if decCtx.GetCodecType() == ffmpeg.AVMEDIA_TYPE_UNKNOWN {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Elementary stream #%d is of unknown type, cannot proceed\n", i)
|
||||
return nil, ffmpeg.AVERROR_INVALIDDATA
|
||||
} else {
|
||||
// if this stream must be remuxed
|
||||
if ret = ffmpeg.AvCodecParametersCopy(outStream.GetCodecpar(), inStream.GetCodecpar()); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Copying parameters for stream #%d failed\n", i)
|
||||
return nil, ret
|
||||
}
|
||||
outStream.SetTimeBase(inStream.GetTimeBase())
|
||||
}
|
||||
}
|
||||
ffmpeg.AvDumpFormat(ofmtCtx, 0, filename, 1)
|
||||
|
||||
if (ofmtCtx.GetOformat().GetFlags() & ffmpeg.AVFMT_NOFILE) == 0 {
|
||||
if ret = ffmpeg.AvIOOpen(ofmtCtx.GetPbAddr(), filename, ffmpeg.AVIO_FLAG_WRITE); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Could not open output file '%s'", filename)
|
||||
return nil, ret
|
||||
}
|
||||
}
|
||||
|
||||
// init muxer, write output file header
|
||||
if ret = ffmpeg.AvFormatWriteHeader(ofmtCtx, nil); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Error occurred when opening output file\n")
|
||||
return nil, ret
|
||||
}
|
||||
|
||||
return ofmtCtx, 0
|
||||
}
|
||||
|
||||
func initFilter(fctx *filteringContext, decCtx, encCtx *ffmpeg.AVCodecContext, filterSpec string) (ret int32) {
|
||||
var (
|
||||
buffersrc *ffmpeg.AVFilter
|
||||
buffersink *ffmpeg.AVFilter
|
||||
buffersrcCtx *ffmpeg.AVFilterContext
|
||||
buffersinkCtx *ffmpeg.AVFilterContext
|
||||
outputs = ffmpeg.AvFilterInoutAlloc()
|
||||
inputs = ffmpeg.AvFilterInoutAlloc()
|
||||
filterGraph = ffmpeg.AvFilterGraphAlloc()
|
||||
args string
|
||||
)
|
||||
|
||||
if outputs == nil || inputs == nil || filterGraph == nil {
|
||||
ret = ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
goto end
|
||||
}
|
||||
|
||||
if decCtx.GetCodecType() == ffmpeg.AVMEDIA_TYPE_VIDEO {
|
||||
buffersrc = ffmpeg.AvFilterGetByName("buffer")
|
||||
buffersink = ffmpeg.AvFilterGetByName("buffersink")
|
||||
if buffersrc == nil || buffersink == nil {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "filtering source or sink element not found\n")
|
||||
ret = ffmpeg.AVERROR_UNKNOWN
|
||||
goto end
|
||||
}
|
||||
|
||||
args = fmt.Sprintf("video_size=%dx%d:pix_fmt=%d:time_base=%d/%d:pixel_aspect=%d/%d",
|
||||
decCtx.GetWidth(), decCtx.GetHeight(), decCtx.GetPixFmt(),
|
||||
decCtx.GetTimeBaseAddr().GetNum(), decCtx.GetTimeBaseAddr().GetDen(),
|
||||
decCtx.GetSampleAspectRatioAddr().GetNum(), decCtx.GetSampleAspectRatioAddr().GetDen())
|
||||
|
||||
if ret = ffmpeg.AvFilterGraphCreateFilter(&buffersrcCtx, buffersrc, "in",
|
||||
args, nil, filterGraph); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot create buffer source\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvFilterGraphCreateFilter(&buffersinkCtx, buffersink, "out",
|
||||
ffmpeg.NIL, nil, filterGraph); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot create buffer sink\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvOptSetBin(buffersinkCtx, "pix_fmts",
|
||||
encCtx.GetPixFmtAddr(), unsafe.Sizeof(encCtx.GetPixFmt()), ffmpeg.AV_OPT_SEARCH_CHILDREN); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot set output pixel format\n")
|
||||
goto end
|
||||
}
|
||||
} else if decCtx.GetCodecType() == ffmpeg.AVMEDIA_TYPE_AUDIO {
|
||||
buffersrc = ffmpeg.AvFilterGetByName("abuffer")
|
||||
buffersink = ffmpeg.AvFilterGetByName("abuffersink")
|
||||
if buffersrc == nil || buffersink == nil {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "filtering source or sink element not found\n")
|
||||
ret = ffmpeg.AVERROR_UNKNOWN
|
||||
goto end
|
||||
}
|
||||
|
||||
if decCtx.GetChannelLayout() == 0 {
|
||||
decCtx.SetChannelLayout(uint64(ffmpeg.AvGetDefaultChannelLayout(decCtx.GetChannels())))
|
||||
}
|
||||
|
||||
args = fmt.Sprintf("time_base=%d/%d:sample_rate=%d:sample_fmt=%s:channel_layout=0x%d",
|
||||
decCtx.GetTimeBaseAddr().GetNum(), decCtx.GetTimeBaseAddr().GetDen(), decCtx.GetSampleRate(),
|
||||
ffmpeg.AvGetSampleFmtName(decCtx.GetSampleFmt()), decCtx.GetChannelLayout())
|
||||
|
||||
if ret = ffmpeg.AvFilterGraphCreateFilter(&buffersrcCtx, buffersrc, "in",
|
||||
args, nil, filterGraph); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot create audio buffer source\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvFilterGraphCreateFilter(&buffersinkCtx, buffersink, "out",
|
||||
ffmpeg.NIL, nil, filterGraph); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot create audio buffer sink\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvOptSetBin(buffersinkCtx, "sample_fmts",
|
||||
encCtx.GetSampleFmtAddr(), unsafe.Sizeof(encCtx.GetSampleFmt()), ffmpeg.AV_OPT_SEARCH_CHILDREN); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot set output sample format\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvOptSetBin(buffersinkCtx, "channel_layouts",
|
||||
encCtx.GetChannelLayoutAddr(), unsafe.Sizeof(encCtx.GetChannelLayout()), ffmpeg.AV_OPT_SEARCH_CHILDREN); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot set output channel layout\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvOptSetBin(buffersinkCtx, "sample_rates",
|
||||
encCtx.GetSampleRateAddr(), unsafe.Sizeof(encCtx.GetSampleRate()), ffmpeg.AV_OPT_SEARCH_CHILDREN); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot set output sample rate\n")
|
||||
goto end
|
||||
}
|
||||
} else {
|
||||
ret = ffmpeg.AVERROR_UNKNOWN
|
||||
goto end
|
||||
}
|
||||
|
||||
// Endpoints for the filter graph.
|
||||
outputs.SetName("in")
|
||||
outputs.SetFilterCtx(buffersrcCtx)
|
||||
outputs.SetPadIdx(0)
|
||||
outputs.SetNext(nil)
|
||||
|
||||
inputs.SetName("out")
|
||||
inputs.SetFilterCtx(buffersinkCtx)
|
||||
inputs.SetPadIdx(0)
|
||||
inputs.SetNext(nil)
|
||||
|
||||
if len(outputs.GetName()) == 0 || len(inputs.GetName()) == 0 {
|
||||
ret = ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
goto end
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvFilterGraphParsePtr(filterGraph, filterSpec, &inputs, &outputs, nil); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot parse filter graph\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvFilterGraphConfig(filterGraph, nil); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Cannot config filter graph\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
// Fill FilteringContext
|
||||
fctx.buffersinkCtx = buffersinkCtx
|
||||
fctx.buffersrcCtx = buffersrcCtx
|
||||
fctx.filterGraph = filterGraph
|
||||
|
||||
end:
|
||||
ffmpeg.AvFilterInoutFree(&inputs)
|
||||
ffmpeg.AvFilterInoutFree(&outputs)
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func initFilters(ifmtCtx *ffmpeg.AVFormatContext, streamCtx []streamContext) (filterCtx []filteringContext, ret int32) {
|
||||
var (
|
||||
filterSpec string
|
||||
)
|
||||
|
||||
filterCtx = make([]filteringContext, ifmtCtx.GetNbStreams())
|
||||
|
||||
for i := 0; i < int(ifmtCtx.GetNbStreams()); i++ {
|
||||
if !(ifmtCtx.GetStreams()[i].GetCodecpar().GetCodecType() == ffmpeg.AVMEDIA_TYPE_AUDIO ||
|
||||
ifmtCtx.GetStreams()[i].GetCodecpar().GetCodecType() == ffmpeg.AVMEDIA_TYPE_VIDEO) {
|
||||
continue
|
||||
}
|
||||
|
||||
if ifmtCtx.GetStreams()[i].GetCodecpar().GetCodecType() == ffmpeg.AVMEDIA_TYPE_VIDEO {
|
||||
// passthrough (dummy) filter for video
|
||||
filterSpec = "null"
|
||||
} else {
|
||||
// passthrough (dummy) filter for audio
|
||||
filterSpec = "anull"
|
||||
}
|
||||
if ret = initFilter(&filterCtx[i], streamCtx[i].decCtx, streamCtx[i].encCtx, filterSpec); ret > 0 {
|
||||
return nil, ret
|
||||
}
|
||||
|
||||
if filterCtx[i].encPkt = ffmpeg.AvPacketAlloc(); filterCtx[i].encPkt == nil {
|
||||
return nil, ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
}
|
||||
|
||||
if filterCtx[i].filteredFrame = ffmpeg.AvFrameAlloc(); filterCtx[i].filteredFrame == nil {
|
||||
return nil, ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
}
|
||||
}
|
||||
return filterCtx, 0
|
||||
}
|
||||
|
||||
func encodeWriteFrame(ofmtCtx *ffmpeg.AVFormatContext, streamCtx []streamContext, filterCtx []filteringContext,
|
||||
streamIndex int32, flush int32) (ret int32) {
|
||||
var (
|
||||
stream = &streamCtx[streamIndex]
|
||||
filter = &filterCtx[streamIndex]
|
||||
filtFrame *ffmpeg.AVFrame
|
||||
encPkt = filter.encPkt
|
||||
)
|
||||
if flush == 0 {
|
||||
filtFrame = filter.filteredFrame
|
||||
}
|
||||
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_INFO, "Encoding frame\n")
|
||||
// encode filtered frame
|
||||
ffmpeg.AvPacketUnref(encPkt)
|
||||
|
||||
if ret = ffmpeg.AvCodecSendFrame(stream.encCtx, filtFrame); ret < 0 {
|
||||
return ret
|
||||
}
|
||||
|
||||
for ret >= 0 {
|
||||
ret = ffmpeg.AvCodecReceivePacket(stream.encCtx, encPkt)
|
||||
|
||||
if ret == ffmpeg.AVERROR(syscall.EAGAIN) || ret == ffmpeg.AVERROR_EOF {
|
||||
return 0
|
||||
}
|
||||
|
||||
// prepare packet for muxing
|
||||
encPkt.SetStreamIndex(streamIndex)
|
||||
ffmpeg.AvPacketRescaleTs(encPkt, stream.encCtx.GetTimeBase(),
|
||||
ofmtCtx.GetStreams()[streamIndex].GetTimeBase())
|
||||
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_DEBUG, "Muxing frame\n")
|
||||
// mux encoded frame
|
||||
ret = ffmpeg.AvInterleavedWriteFrame(ofmtCtx, encPkt)
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
func filterEncodeWriteFrame(ofmtCtx *ffmpeg.AVFormatContext, streamCtx []streamContext, filterCtx []filteringContext,
|
||||
frame *ffmpeg.AVFrame, streamIndex int32) (ret int32) {
|
||||
var (
|
||||
filter = &filterCtx[streamIndex]
|
||||
)
|
||||
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_INFO, "Pushing decoded frame to filters\n")
|
||||
// push the decoded frame into the filtergraph
|
||||
if ret = ffmpeg.AvBuffersrcAddFrameFlags(filter.buffersrcCtx, frame, 0); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Error while feeding the filtergraph\n")
|
||||
return ret
|
||||
}
|
||||
|
||||
// pull filtered frames from the filtergraph
|
||||
for {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_INFO, "Pulling filtered frame from filters\n")
|
||||
if ret = ffmpeg.AvBuffersinkGetFrame(filter.buffersinkCtx, filter.filteredFrame); ret < 0 {
|
||||
// if no more frames for output - returns AVERROR(EAGAIN)
|
||||
// if flushed and no more frames for output - returns AVERROR_EOF
|
||||
// rewrite retcode to 0 to show it as normal procedure completion
|
||||
if ret == ffmpeg.AVERROR(syscall.EAGAIN) || ret == ffmpeg.AVERROR_EOF {
|
||||
ret = 0
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
filter.filteredFrame.SetPictType(ffmpeg.AV_PICTURE_TYPE_NONE)
|
||||
ret = encodeWriteFrame(ofmtCtx, streamCtx, filterCtx, streamIndex, 0)
|
||||
ffmpeg.AvFrameUnref(filter.filteredFrame)
|
||||
if ret < 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func flushEncoder(ofmtCtx *ffmpeg.AVFormatContext, streamCtx []streamContext, filterCtx []filteringContext,
|
||||
streamIndex int32) (ret int32) {
|
||||
if (streamCtx[streamIndex].encCtx.GetCodec().GetCapabilities() & ffmpeg.AV_CODEC_CAP_DELAY) == 0 {
|
||||
return 0
|
||||
}
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_INFO, "Flushing stream #%d encoder\n", streamIndex)
|
||||
return encodeWriteFrame(ofmtCtx, streamCtx, filterCtx, streamIndex, 1)
|
||||
}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
ret int32
|
||||
packet *ffmpeg.AVPacket
|
||||
streamIndex int32
|
||||
ifmtCtx *ffmpeg.AVFormatContext
|
||||
ofmtCtx *ffmpeg.AVFormatContext
|
||||
streamCtx []streamContext
|
||||
filterCtx []filteringContext
|
||||
)
|
||||
|
||||
if len(os.Args) != 3 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Usage: %s <input file> <output file>\n", os.Args[0])
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if ifmtCtx, streamCtx, ret = openInputFile(os.Args[1]); ret < 0 {
|
||||
goto end
|
||||
}
|
||||
if ofmtCtx, ret = openOutputFile(ifmtCtx, streamCtx, os.Args[2]); ret < 0 {
|
||||
goto end
|
||||
}
|
||||
if filterCtx, ret = initFilters(ifmtCtx, streamCtx); ret < 0 {
|
||||
goto end
|
||||
}
|
||||
if packet = ffmpeg.AvPacketAlloc(); packet == nil {
|
||||
goto end
|
||||
}
|
||||
|
||||
// read all packets
|
||||
for {
|
||||
if ret = ffmpeg.AvReadFrame(ifmtCtx, packet); ret < 0 {
|
||||
break
|
||||
}
|
||||
streamIndex = packet.GetStreamIndex()
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_DEBUG, "Demuxer gave frame of stream_index %d\n", streamIndex)
|
||||
|
||||
if filterCtx[streamIndex].filterGraph != nil {
|
||||
stream := &streamCtx[streamIndex]
|
||||
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_DEBUG, "Going to reencode&filter the frame\n")
|
||||
|
||||
ffmpeg.AvPacketRescaleTs(packet,
|
||||
ifmtCtx.GetStreams()[streamIndex].GetTimeBase(),
|
||||
stream.decCtx.GetTimeBase())
|
||||
if ret = ffmpeg.AvCodecSendPacket(stream.decCtx, packet); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Decoding failed\n")
|
||||
break
|
||||
}
|
||||
|
||||
for ret >= 0 {
|
||||
ret = ffmpeg.AvCodecReceiveFrame(stream.decCtx, stream.decFrame)
|
||||
if ret == ffmpeg.AVERROR_EOF || ret == ffmpeg.AVERROR(syscall.EAGAIN) {
|
||||
break
|
||||
} else if ret < 0 {
|
||||
goto end
|
||||
}
|
||||
|
||||
stream.decFrame.SetPts(stream.decFrame.GetBestEffortTimestamp())
|
||||
if ret = filterEncodeWriteFrame(ofmtCtx, streamCtx, filterCtx,
|
||||
stream.decFrame, streamIndex); ret < 0 {
|
||||
goto end
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// remux this frame without reencoding
|
||||
ffmpeg.AvPacketRescaleTs(packet,
|
||||
ifmtCtx.GetStreams()[streamIndex].GetTimeBase(),
|
||||
ofmtCtx.GetStreams()[streamIndex].GetTimeBase())
|
||||
if ret = ffmpeg.AvInterleavedWriteFrame(ofmtCtx, packet); ret < 0 {
|
||||
goto end
|
||||
}
|
||||
}
|
||||
ffmpeg.AvPacketUnref(packet)
|
||||
}
|
||||
|
||||
// flush filters and encoders
|
||||
for i := int32(0); i < int32(ifmtCtx.GetNbStreams()); i++ {
|
||||
// flush filter
|
||||
if filterCtx[i].filterGraph == nil {
|
||||
continue
|
||||
}
|
||||
if ret = filterEncodeWriteFrame(ofmtCtx, streamCtx, filterCtx, nil, i); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Flushing filter failed\n")
|
||||
goto end
|
||||
}
|
||||
|
||||
// flush encoder
|
||||
if ret = flushEncoder(ofmtCtx, streamCtx, filterCtx, i); ret < 0 {
|
||||
ffmpeg.AvLog(nil, ffmpeg.AV_LOG_ERROR, "Flushing encoder failed\n")
|
||||
goto end
|
||||
}
|
||||
}
|
||||
|
||||
ffmpeg.AvWriteTrailer(ofmtCtx)
|
||||
end:
|
||||
ffmpeg.AvPacketFree(&packet)
|
||||
for i := 0; i < len(streamCtx); i++ {
|
||||
ffmpeg.AvCodecFreeContext(&streamCtx[i].decCtx)
|
||||
if ofmtCtx != nil && ofmtCtx.GetNbStreams() > uint32(i) &&
|
||||
ofmtCtx.GetStreams()[i] != nil && streamCtx[i].encCtx != nil {
|
||||
ffmpeg.AvCodecFreeContext(&streamCtx[i].encCtx)
|
||||
}
|
||||
if filterCtx != nil && filterCtx[i].filterGraph != nil {
|
||||
ffmpeg.AvFilterGraphFree(&filterCtx[i].filterGraph)
|
||||
ffmpeg.AvPacketFree(&filterCtx[i].encPkt)
|
||||
ffmpeg.AvFrameFree(&filterCtx[i].filteredFrame)
|
||||
}
|
||||
|
||||
ffmpeg.AvFrameFree(&streamCtx[i].decFrame)
|
||||
}
|
||||
ffmpeg.AvFormatCloseInput(&ifmtCtx)
|
||||
if ofmtCtx != nil && (ofmtCtx.GetOformat().GetFlags()&ffmpeg.AVFMT_NOFILE) == 0 {
|
||||
ffmpeg.AvIOClosep(ofmtCtx.GetPbAddr())
|
||||
}
|
||||
ffmpeg.AvFormatFreeContext(ofmtCtx)
|
||||
|
||||
if ret < 0 && ret != ffmpeg.AVERROR_EOF {
|
||||
fmt.Fprintf(os.Stderr, "Error occurred: %s\n", ffmpeg.AvErr2str(ret))
|
||||
os.Exit(1)
|
||||
}
|
||||
os.Exit(0)
|
||||
}
|
||||
|
@@ -1,5 +1,205 @@
|
||||
package main
|
||||
|
||||
func main() {
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
|
||||
ffmpeg "github.com/qrtc/ffmpeg-dev-go"
|
||||
)
|
||||
|
||||
func setHwFrameCtx(ctx *ffmpeg.AVCodecContext, hwDeviceCtx *ffmpeg.AVBufferRef, width, height int32) (ret int32) {
|
||||
var (
|
||||
hwFramesRef *ffmpeg.AVBufferRef
|
||||
framesCtx *ffmpeg.AVHWFramesContext
|
||||
)
|
||||
|
||||
if hwFramesRef = ffmpeg.AvHWFrameCtxAlloc(hwDeviceCtx); hwFramesRef == nil {
|
||||
fmt.Fprintf(os.Stderr, "Failed to create VAAPI frame context.\n")
|
||||
return -1
|
||||
}
|
||||
framesCtx = (*ffmpeg.AVHWFramesContext)(unsafe.Pointer(hwFramesRef.GetData()))
|
||||
framesCtx.SetFormat(ffmpeg.AV_PIX_FMT_VAAPI)
|
||||
framesCtx.SetSwFormat(ffmpeg.AV_PIX_FMT_NV12)
|
||||
framesCtx.SetWidth(width)
|
||||
framesCtx.SetHeight(height)
|
||||
framesCtx.SetInitialPoolSize(20)
|
||||
if ret = ffmpeg.AvHWFrameCtxInit(hwFramesRef); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Failed to initialize VAAPI frame context."+
|
||||
"Error code: %s\n", ffmpeg.AvErr2str(ret))
|
||||
ffmpeg.AvBufferUnref(&hwFramesRef)
|
||||
return ret
|
||||
}
|
||||
ctx.SetHwFramesCtx(ffmpeg.AvBufferRef(hwFramesRef))
|
||||
if ctx.GetHwFramesCtx() == nil {
|
||||
ret = ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
}
|
||||
|
||||
ffmpeg.AvBufferUnref(&hwFramesRef)
|
||||
return ret
|
||||
}
|
||||
|
||||
func encodeWrite(avctx *ffmpeg.AVCodecContext, frame *ffmpeg.AVFrame, fout *os.File) (ret int32) {
|
||||
var (
|
||||
encPkt *ffmpeg.AVPacket
|
||||
)
|
||||
|
||||
if encPkt = ffmpeg.AvPacketAlloc(); encPkt == nil {
|
||||
return ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvCodecSendFrame(avctx, frame); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error code: %s\n", ffmpeg.AvErr2str(ret))
|
||||
goto end
|
||||
}
|
||||
for {
|
||||
if ret = ffmpeg.AvCodecReceivePacket(avctx, encPkt); ret != 0 {
|
||||
break
|
||||
}
|
||||
|
||||
encPkt.SetStreamIndex(0)
|
||||
fout.Write(unsafe.Slice(encPkt.GetData(), encPkt.GetSize()))
|
||||
}
|
||||
end:
|
||||
ffmpeg.AvPacketFree(&encPkt)
|
||||
ret = ffmpeg.CondExpr(ret == ffmpeg.AVERROR(syscall.EAGAIN), 0, int32(-1))
|
||||
return ret
|
||||
}
|
||||
|
||||
func main() {
|
||||
var (
|
||||
hwDeviceCtx *ffmpeg.AVBufferRef
|
||||
fin, fout *os.File
|
||||
swFrame, hwFrame *ffmpeg.AVFrame
|
||||
avctx *ffmpeg.AVCodecContext
|
||||
codec *ffmpeg.AVCodec
|
||||
width, height, size int32
|
||||
encName = "h264_vaapi"
|
||||
ret int32
|
||||
err error
|
||||
)
|
||||
|
||||
if len(os.Args) < 5 {
|
||||
fmt.Fprintf(os.Stderr, "Usage: %s <width> <height> <input file> <output file>\n", os.Args[0])
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if n, err := strconv.ParseInt(os.Args[1], 10, 32); err != nil {
|
||||
width = int32(n)
|
||||
}
|
||||
if n, err := strconv.ParseInt(os.Args[2], 10, 32); err != nil {
|
||||
height = int32(n)
|
||||
}
|
||||
size = width * height
|
||||
|
||||
fin, err = os.OpenFile(os.Args[3], os.O_RDONLY, 0666)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not open %s\n", os.Args[3])
|
||||
os.Exit(1)
|
||||
}
|
||||
fout, err = os.OpenFile(os.Args[4], os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0755)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not open %s\n", os.Args[4])
|
||||
goto close
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvHWDeviceCtxCreate(&hwDeviceCtx, ffmpeg.AV_HWDEVICE_TYPE_VAAPI,
|
||||
ffmpeg.NIL, nil, 0); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Failed to create a VAAPI device. Error code: %s\n", ffmpeg.AvErr2str(ret))
|
||||
goto close
|
||||
}
|
||||
|
||||
if codec = ffmpeg.AvCodecFindEncoderByName(encName); codec == nil {
|
||||
fmt.Fprintf(os.Stderr, "Could not find encoder.\n")
|
||||
ret = -1
|
||||
goto close
|
||||
}
|
||||
|
||||
if avctx = ffmpeg.AvCodecAllocContext3(codec); avctx == nil {
|
||||
ret = ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
goto close
|
||||
}
|
||||
avctx.SetWidth(width)
|
||||
avctx.SetHeight(height)
|
||||
avctx.SetTimeBase(ffmpeg.AvMakeQ(1, 25))
|
||||
avctx.SetFramerate(ffmpeg.AvMakeQ(25, 1))
|
||||
avctx.SetPixFmt(ffmpeg.AV_PIX_FMT_VAAPI)
|
||||
|
||||
// set hw_frames_ctx for encoder's AVCodecContext
|
||||
if ret = setHwFrameCtx(avctx, hwDeviceCtx, width, height); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Failed to set hwframe context.\n")
|
||||
goto close
|
||||
}
|
||||
|
||||
if ret = ffmpeg.AvCodecOpen2(avctx, codec, nil); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Cannot open video encoder codec. Error code: %s\n", ffmpeg.AvErr2str(ret))
|
||||
goto close
|
||||
}
|
||||
|
||||
for {
|
||||
if swFrame = ffmpeg.AvFrameAlloc(); swFrame == nil {
|
||||
ret = ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
goto close
|
||||
}
|
||||
// read data into software frame, and transfer them into hw frame
|
||||
swFrame.SetWidth(width)
|
||||
swFrame.SetHeight(height)
|
||||
swFrame.SetFormat(ffmpeg.AV_PIX_FMT_NV12)
|
||||
if ret = ffmpeg.AvFrameGetBuffer(swFrame, 0); ret < 0 {
|
||||
goto close
|
||||
}
|
||||
frameData := ffmpeg.SliceSlice(&swFrame.GetData()[0], 2, size)
|
||||
if _, err = fin.Read(frameData[0]); err != nil {
|
||||
break
|
||||
}
|
||||
if _, err = fin.Read(frameData[1][0 : size/2]); err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
if hwFrame = ffmpeg.AvFrameAlloc(); hwFrame == nil {
|
||||
ret = ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
goto close
|
||||
}
|
||||
if ret = ffmpeg.AvHWFrameGetBuffer(avctx.GetHwDeviceCtx(), hwFrame, 0); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error code: %s\n", ffmpeg.AvErr2str(ret))
|
||||
goto close
|
||||
}
|
||||
if hwFrame.GetHwFramesCtx() == nil {
|
||||
ret = ffmpeg.AVERROR(syscall.ENOMEM)
|
||||
goto close
|
||||
}
|
||||
if ret = ffmpeg.AvHWFrameTransferData(hwFrame, swFrame, 0); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Error while transferring frame data to surface."+
|
||||
"Error code: %s\n", ffmpeg.AvErr2str(ret))
|
||||
goto close
|
||||
}
|
||||
|
||||
if ret = encodeWrite(avctx, hwFrame, fout); ret < 0 {
|
||||
fmt.Fprintf(os.Stderr, "Failed to encode.\n")
|
||||
goto close
|
||||
}
|
||||
ffmpeg.AvFrameFree(&hwFrame)
|
||||
ffmpeg.AvFrameFree(&swFrame)
|
||||
}
|
||||
|
||||
// flush encoder
|
||||
if ret = encodeWrite(avctx, nil, fout); ret == ffmpeg.AVERROR_EOF {
|
||||
ret = 0
|
||||
}
|
||||
|
||||
close:
|
||||
if fin != nil {
|
||||
fin.Close()
|
||||
}
|
||||
if fout != nil {
|
||||
fout.Close()
|
||||
}
|
||||
ffmpeg.AvFrameFree(&swFrame)
|
||||
ffmpeg.AvFrameFree(&hwFrame)
|
||||
ffmpeg.AvCodecFreeContext(&avctx)
|
||||
ffmpeg.AvBufferUnref(&hwDeviceCtx)
|
||||
|
||||
os.Exit(int(ret))
|
||||
}
|
||||
|
Reference in New Issue
Block a user