improve examples (#703)

* add client-play-format-av1-to-jpeg
* improve client-play-format-av1 to decode frames
* improve speed of sample decoders by using pointers instead of copies
* improve client-record-format-h264 and client-record-format-h265 to encode frames
* add client-record-format-av1
This commit is contained in:
Alessandro Ros
2025-02-19 22:00:49 +01:00
committed by GitHub
parent 55556f1ecf
commit a17e1f776e
26 changed files with 1978 additions and 647 deletions

View File

@@ -0,0 +1,179 @@
package main
import (
"fmt"
"image"
"unsafe"
"github.com/bluenviron/mediacommon/v2/pkg/codecs/h264"
)
// #cgo pkg-config: libavcodec libavutil libswscale
// #include <libavcodec/avcodec.h>
// #include <libswscale/swscale.h>
// #include <libavutil/opt.h>
import "C"
func frameData(frame *C.AVFrame) **C.uint8_t {
return (**C.uint8_t)(unsafe.Pointer(&frame.data[0]))
}
func frameLineSize(frame *C.AVFrame) *C.int {
return (*C.int)(unsafe.Pointer(&frame.linesize[0]))
}
// h265Encoder is a wrapper around FFmpeg's H265 encoder.
type h265Encoder struct {
Width int
Height int
FPS int
codecCtx *C.AVCodecContext
rgbaFrame *C.AVFrame
yuv420Frame *C.AVFrame
swsCtx *C.struct_SwsContext
pkt *C.AVPacket
}
// initialize initializes a h265Encoder.
func (d *h265Encoder) initialize() error {
codec := C.avcodec_find_encoder(C.AV_CODEC_ID_H265)
if codec == nil {
return fmt.Errorf("avcodec_find_encoder() failed")
}
d.codecCtx = C.avcodec_alloc_context3(codec)
if d.codecCtx == nil {
return fmt.Errorf("avcodec_alloc_context3() failed")
}
key := C.CString("tune")
defer C.free(unsafe.Pointer(key))
val := C.CString("zerolatency")
defer C.free(unsafe.Pointer(val))
C.av_opt_set(d.codecCtx.priv_data, key, val, 0)
key = C.CString("preset")
defer C.free(unsafe.Pointer(key))
val = C.CString("ultrafast")
defer C.free(unsafe.Pointer(val))
C.av_opt_set(d.codecCtx.priv_data, key, val, 0)
d.codecCtx.pix_fmt = C.AV_PIX_FMT_YUV420P
d.codecCtx.width = (C.int)(d.Height)
d.codecCtx.height = (C.int)(d.Width)
d.codecCtx.time_base.num = 1
d.codecCtx.time_base.den = (C.int)(d.FPS)
d.codecCtx.gop_size = 10
d.codecCtx.max_b_frames = 0
d.codecCtx.bit_rate = 600000
res := C.avcodec_open2(d.codecCtx, codec, nil)
if res < 0 {
C.avcodec_close(d.codecCtx)
return fmt.Errorf("avcodec_open2() failed")
}
d.rgbaFrame = C.av_frame_alloc()
if d.rgbaFrame == nil {
C.avcodec_close(d.codecCtx)
return fmt.Errorf("av_frame_alloc() failed")
}
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
d.rgbaFrame.width = d.codecCtx.width
d.rgbaFrame.height = d.codecCtx.height
res = C.av_frame_get_buffer(d.rgbaFrame, 0)
if res < 0 {
return fmt.Errorf("av_frame_get_buffer() failed")
}
d.yuv420Frame = C.av_frame_alloc()
if d.rgbaFrame == nil {
C.av_frame_free(&d.rgbaFrame)
C.avcodec_close(d.codecCtx)
return fmt.Errorf("av_frame_alloc() failed")
}
d.yuv420Frame.format = C.AV_PIX_FMT_YUV420P
d.yuv420Frame.width = d.codecCtx.width
d.yuv420Frame.height = d.codecCtx.height
res = C.av_frame_get_buffer(d.yuv420Frame, 0)
if res < 0 {
return fmt.Errorf("av_frame_get_buffer() failed")
}
d.swsCtx = C.sws_getContext(d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format),
d.yuv420Frame.width, d.yuv420Frame.height, (int32)(d.yuv420Frame.format), C.SWS_BILINEAR, nil, nil, nil)
if d.swsCtx == nil {
C.av_frame_free(&d.yuv420Frame)
C.av_frame_free(&d.rgbaFrame)
C.avcodec_close(d.codecCtx)
return fmt.Errorf("sws_getContext() failed")
}
d.pkt = C.av_packet_alloc()
if d.pkt == nil {
C.av_packet_free(&d.pkt)
C.av_frame_free(&d.yuv420Frame)
C.av_frame_free(&d.rgbaFrame)
C.avcodec_close(d.codecCtx)
return fmt.Errorf("av_packet_alloc() failed")
}
return nil
}
// close closes the decoder.
func (d *h265Encoder) close() {
C.av_packet_free(&d.pkt)
C.sws_freeContext(d.swsCtx)
C.av_frame_free(&d.yuv420Frame)
C.av_frame_free(&d.rgbaFrame)
C.avcodec_close(d.codecCtx)
}
// encode encodes a RGBA image into H265.
func (d *h265Encoder) encode(img *image.RGBA, pts int64) ([][]byte, int64, error) {
// pass image pointer to frame
d.rgbaFrame.data[0] = (*C.uint8_t)(&img.Pix[0])
// convert color space from RGBA to YUV420
res := C.sws_scale(d.swsCtx, frameData(d.rgbaFrame), frameLineSize(d.rgbaFrame),
0, d.rgbaFrame.height, frameData(d.yuv420Frame), frameLineSize(d.yuv420Frame))
if res < 0 {
return nil, 0, fmt.Errorf("sws_scale() failed")
}
// send frame to the encoder
d.yuv420Frame.pts = (C.int64_t)(pts)
res = C.avcodec_send_frame(d.codecCtx, d.yuv420Frame)
if res < 0 {
return nil, 0, fmt.Errorf("avcodec_send_frame() failed")
}
// wait for result
res = C.avcodec_receive_packet(d.codecCtx, d.pkt)
if res == -C.EAGAIN {
return nil, 0, nil
}
if res < 0 {
return nil, 0, fmt.Errorf("avcodec_receive_packet() failed")
}
// perform a deep copy of the data before unreferencing the packet
data := C.GoBytes(unsafe.Pointer(d.pkt.data), d.pkt.size)
pts = (int64)(d.pkt.pts)
C.av_packet_unref(d.pkt)
// decompress
var au h264.AnnexB
err := au.Unmarshal(data)
if err != nil {
return nil, 0, err
}
return au, pts, nil
}

View File

@@ -1,77 +1,152 @@
//go:build cgo
package main
import (
"crypto/rand"
"image"
"image/color"
"log"
"net"
"time"
"github.com/bluenviron/gortsplib/v4"
"github.com/bluenviron/gortsplib/v4/pkg/description"
"github.com/bluenviron/gortsplib/v4/pkg/format"
"github.com/pion/rtp"
)
// This example shows how to
// 1. generate a H265 stream and RTP packets with GStreamer
// 2. connect to a RTSP server, announce an H265 format
// 3. route the packets from GStreamer to the server
// 1. connect to a RTSP server, announce an H265 format
// 2. generate dummy RGBA images
// 3. encode images with H265
// 4. generate RTP packets from H265
// 5. write RTP packets to the server
// This example requires the FFmpeg libraries, that can be installed with this command:
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
func multiplyAndDivide(v, m, d int64) int64 {
secs := v / d
dec := v % d
return (secs*m + dec*m/d)
}
func randUint32() (uint32, error) {
var b [4]byte
_, err := rand.Read(b[:])
if err != nil {
return 0, err
}
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
}
func createDummyImage(i int) *image.RGBA {
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
var cl color.RGBA
switch i {
case 0:
cl = color.RGBA{255, 0, 0, 0}
case 1:
cl = color.RGBA{0, 255, 0, 0}
case 2:
cl = color.RGBA{0, 0, 255, 0}
}
for y := 0; y < img.Rect.Dy(); y++ {
for x := 0; x < img.Rect.Dx(); x++ {
img.SetRGBA(x, y, cl)
}
}
return img
}
func main() {
// open a listener to receive RTP/H265 packets
pc, err := net.ListenPacket("udp", "localhost:9000")
if err != nil {
panic(err)
// create a stream description that contains a H265 format
forma := &format.H265{
PayloadTyp: 96,
}
defer pc.Close()
log.Println("Waiting for a RTP/H265 stream on UDP port 9000 - you can send one with GStreamer:\n" +
"gst-launch-1.0 videotestsrc ! video/x-raw,width=1920,height=1080" +
" ! x265enc speed-preset=ultrafast tune=zerolatency bitrate=3000" +
" ! rtph265pay config-interval=1 ! udpsink host=127.0.0.1 port=9000")
// wait for first packet
buf := make([]byte, 2048)
n, _, err := pc.ReadFrom(buf)
if err != nil {
panic(err)
}
log.Println("stream connected")
// create a description that contains a H265 format
desc := &description.Session{
Medias: []*description.Media{{
Type: description.MediaTypeVideo,
Formats: []format.Format{&format.H265{
PayloadTyp: 96,
}},
Type: description.MediaTypeVideo,
Formats: []format.Format{forma},
}},
}
// connect to the server and start recording
// connect to the server, announce the format and start recording
c := gortsplib.Client{}
err = c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
err := c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
if err != nil {
panic(err)
}
defer c.Close()
var pkt rtp.Packet
for {
// parse RTP packet
err = pkt.Unmarshal(buf[:n])
// setup RGBA -> H265 encoder
h265enc := &h265Encoder{
Width: 640,
Height: 480,
FPS: 5,
}
err = h265enc.initialize()
if err != nil {
panic(err)
}
defer h265enc.close()
// setup H265 -> RTP encoder
rtpEnc, err := forma.CreateEncoder()
if err != nil {
panic(err)
}
start := time.Now()
randomStart, err := randUint32()
if err != nil {
panic(err)
}
// setup a ticker to sleep between frames
ticker := time.NewTicker(200 * time.Millisecond)
defer ticker.Stop()
i := 0
for range ticker.C {
// create a dummy image
img := createDummyImage(i)
i = (i + 1) % 3
// get current timestamp
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
// encode the image with H265
au, pts, err := h265enc.encode(img, pts)
if err != nil {
panic(err)
}
// route RTP packet to the server
err = c.WritePacketRTP(desc.Medias[0], &pkt)
// wait for a H265 access unit
if au == nil {
continue
}
// generate RTP packets from the H265 access unit
pkts, err := rtpEnc.Encode(au)
if err != nil {
panic(err)
}
// read another RTP packet from source
n, _, err = pc.ReadFrom(buf)
if err != nil {
panic(err)
log.Printf("writing RTP packets with PTS=%d, au=%d, pkts=%d", pts, len(au), len(pkts))
// write RTP packets to the server
for _, pkt := range pkts {
pkt.Timestamp = uint32(int64(randomStart) + pts)
err = c.WritePacketRTP(desc.Medias[0], pkt)
if err != nil {
panic(err)
}
}
}
}