mirror of
https://github.com/aler9/gortsplib
synced 2025-09-27 03:25:52 +08:00
improve examples (#708)
This commit is contained in:
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
with:
|
||||
go-version: ${{ matrix.go }}
|
||||
|
||||
- run: sudo apt update && sudo apt install -y libavformat-dev libswscale-dev
|
||||
- run: sudo apt update && sudo apt install -y libavcodec-dev libswscale-dev libswresample-dev
|
||||
|
||||
- run: make test-nodocker
|
||||
|
||||
|
@@ -64,7 +64,6 @@ Features:
|
||||
* [client-play-format-av1](examples/client-play-format-av1/main.go)
|
||||
* [client-play-format-av1-to-jpeg](examples/client-play-format-av1-to-jpeg/main.go)
|
||||
* [client-play-format-g711](examples/client-play-format-g711/main.go)
|
||||
* [client-play-format-g722](examples/client-play-format-g722/main.go)
|
||||
* [client-play-format-h264](examples/client-play-format-h264/main.go)
|
||||
* [client-play-format-h264-to-jpeg](examples/client-play-format-h264-to-jpeg/main.go)
|
||||
* [client-play-format-h264-to-disk](examples/client-play-format-h264-to-disk/main.go)
|
||||
@@ -84,7 +83,6 @@ Features:
|
||||
* [client-record-pause](examples/client-record-pause/main.go)
|
||||
* [client-record-format-av1](examples/client-record-format-av1/main.go)
|
||||
* [client-record-format-g711](examples/client-record-format-g711/main.go)
|
||||
* [client-record-format-g722](examples/client-record-format-g722/main.go)
|
||||
* [client-record-format-h264](examples/client-record-format-h264/main.go)
|
||||
* [client-record-format-h264-from-disk](examples/client-record-format-h264-from-disk/main.go)
|
||||
* [client-record-format-h265](examples/client-record-format-h265/main.go)
|
||||
|
24
examples/client-play-backchannel/dummy_audio.go
Normal file
24
examples/client-play-backchannel/dummy_audio.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package main
|
||||
|
||||
import "math"
|
||||
|
||||
const (
|
||||
sampleRate = 8000
|
||||
frequency = 400
|
||||
amplitude = (1 << 14) - 1
|
||||
)
|
||||
|
||||
func createDummyAudio(pts int64, prevPTS int64) []byte {
|
||||
sampleCount := (pts - prevPTS)
|
||||
n := 0
|
||||
ret := make([]byte, sampleCount*2)
|
||||
|
||||
for i := int64(0); i < sampleCount; i++ {
|
||||
v := int16(amplitude * math.Sin((float64(prevPTS+i)*frequency*math.Pi*2)/sampleRate))
|
||||
ret[n] = byte(v >> 8)
|
||||
ret[n+1] = byte(v)
|
||||
n += 2
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
@@ -1,63 +1,51 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"log"
|
||||
"net"
|
||||
"time"
|
||||
|
||||
"github.com/bluenviron/gortsplib/v4"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/base"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/description"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/format"
|
||||
"github.com/pion/rtp"
|
||||
"github.com/bluenviron/mediacommon/v2/pkg/codecs/g711"
|
||||
)
|
||||
|
||||
// This example shows how to
|
||||
// 1. generate a G711 stream and RTP packets with GStreamer
|
||||
// 1. generate a dummy G711 audio stream
|
||||
// 2. connect to a RTSP server, find a back channel that supports G711
|
||||
// 3. route the packets from GStreamer to the channel
|
||||
|
||||
func findPCMUBackChannel(desc *description.Session) *description.Media {
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
dec := v % d
|
||||
return (secs*m + dec*m/d)
|
||||
}
|
||||
|
||||
func randUint32() (uint32, error) {
|
||||
var b [4]byte
|
||||
_, err := rand.Read(b[:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func findG711BackChannel(desc *description.Session) (*description.Media, *format.G711) {
|
||||
for _, media := range desc.Medias {
|
||||
if media.IsBackChannel {
|
||||
for _, forma := range media.Formats {
|
||||
if g711, ok := forma.(*format.G711); ok {
|
||||
if g711.MULaw {
|
||||
return media
|
||||
}
|
||||
return media, g711
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
// open a listener to receive RTP/G711 packets
|
||||
pc, err := net.ListenPacket("udp", "localhost:9000")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer pc.Close()
|
||||
|
||||
log.Println("Waiting for a RTP/G711 stream on UDP port 9000 - you can generate one with GStreamer:\n\n" +
|
||||
"* audio from a test sine:\n\n" +
|
||||
"gst-launch-1.0 audiotestsrc freq=300 ! audioconvert ! audioresample ! audio/x-raw,rate=8000" +
|
||||
" ! mulawenc ! rtppcmupay ! udpsink host=127.0.0.1 port=9000\n\n" +
|
||||
"* audio from a file:\n\n" +
|
||||
"gst-launch-1.0 filesrc location=my_file.mp4 ! decodebin ! audioconvert ! audioresample ! audio/x-raw,rate=8000" +
|
||||
" ! mulawenc ! rtppcmupay ! udpsink host=127.0.0.1 port=9000\n\n" +
|
||||
"* audio from a microphone:\n\n" +
|
||||
"gst-launch-1.0 pulsesrc ! audioconvert ! audioresample ! audio/x-raw,rate=8000" +
|
||||
" ! mulawenc ! rtppcmupay ! udpsink host=127.0.0.1 port=9000\n")
|
||||
|
||||
// wait for first packet
|
||||
buf := make([]byte, 2048)
|
||||
n, _, err := pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println("stream connected")
|
||||
|
||||
c := gortsplib.Client{
|
||||
RequestBackChannels: true,
|
||||
}
|
||||
@@ -82,7 +70,7 @@ func main() {
|
||||
}
|
||||
|
||||
// find the back channel
|
||||
medi := findPCMUBackChannel(desc)
|
||||
medi, forma := findG711BackChannel(desc)
|
||||
if medi == nil {
|
||||
panic("media not found")
|
||||
}
|
||||
@@ -99,24 +87,62 @@ func main() {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
var pkt rtp.Packet
|
||||
for {
|
||||
// parse RTP packet
|
||||
err = pkt.Unmarshal(buf[:n])
|
||||
// setup G711 -> RTP encoder
|
||||
rtpEnc, err := forma.CreateEncoder()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
prevPTS := int64(0)
|
||||
|
||||
randomStart, err := randUint32()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// setup a ticker to sleep between writings
|
||||
ticker := time.NewTicker(100 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
for range ticker.C {
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// generate dummy LPCM audio samples
|
||||
samples := createDummyAudio(pts, prevPTS)
|
||||
|
||||
// encode samples with G711
|
||||
if forma.MULaw {
|
||||
samples, err = g711.Mulaw(samples).Marshal()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
} else {
|
||||
samples, err = g711.Alaw(samples).Marshal()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// generate RTP packets from G711 samples
|
||||
pkts, err := rtpEnc.Encode(samples)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// route RTP packet to the server
|
||||
err = c.WritePacketRTP(medi, &pkt)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
log.Printf("writing RTP packets with PTS=%d, sample size=%d, pkt count=%d", prevPTS, len(samples), len(pkts))
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp += uint32(int64(randomStart) + prevPTS)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// read another RTP packet from source
|
||||
n, _, err = pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
prevPTS = pts
|
||||
}
|
||||
}
|
||||
|
@@ -73,12 +73,47 @@ func (d *av1Decoder) close() {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
func (d *av1Decoder) reinitDynamicStuff() error {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res := C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
return nil
|
||||
}
|
||||
|
||||
// decode decodes a RGBA image from AV1.
|
||||
func (d *av1Decoder) decode(tu [][]byte) (*image.RGBA, error) {
|
||||
// encode temporal unit into bytestream
|
||||
bs, err := av1.Bitstream(tu).Marshal()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// send access unit to decoder
|
||||
@@ -102,37 +137,10 @@ func (d *av1Decoder) decode(tu [][]byte) (*image.RGBA, error) {
|
||||
|
||||
// if frame size has changed, allocate needed objects
|
||||
if d.rgbaFrame == nil || d.rgbaFrame.width != d.yuv420Frame.width || d.rgbaFrame.height != d.yuv420Frame.height {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
err := d.reinitDynamicStuff()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return nil, fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return nil, fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return nil, fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
}
|
||||
|
||||
// convert color space from YUV420 to RGBA
|
||||
|
@@ -25,7 +25,7 @@ import (
|
||||
// 4. convert RGBA frames to JPEG images and save them on disk
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func saveToFile(img image.Image) error {
|
||||
// create file
|
||||
|
@@ -73,12 +73,47 @@ func (d *av1Decoder) close() {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
func (d *av1Decoder) reinitDynamicStuff() error {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res := C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
return nil
|
||||
}
|
||||
|
||||
// decode decodes a RGBA image from AV1.
|
||||
func (d *av1Decoder) decode(tu [][]byte) (*image.RGBA, error) {
|
||||
// encode temporal unit into bytestream
|
||||
bs, err := av1.Bitstream(tu).Marshal()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// send temporal unit to decoder
|
||||
@@ -102,37 +137,10 @@ func (d *av1Decoder) decode(tu [][]byte) (*image.RGBA, error) {
|
||||
|
||||
// if frame size has changed, allocate needed objects
|
||||
if d.rgbaFrame == nil || d.rgbaFrame.width != d.yuv420Frame.width || d.rgbaFrame.height != d.yuv420Frame.height {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
err := d.reinitDynamicStuff()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return nil, fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return nil, fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return nil, fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
}
|
||||
|
||||
// convert color space from YUV420 to RGBA
|
||||
|
@@ -19,7 +19,7 @@ import (
|
||||
// 3. decode the AV1 stream into RGBA frames
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func main() {
|
||||
c := gortsplib.Client{}
|
||||
|
@@ -6,13 +6,14 @@ import (
|
||||
"github.com/bluenviron/gortsplib/v4"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/base"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/format"
|
||||
"github.com/bluenviron/mediacommon/v2/pkg/codecs/g711"
|
||||
"github.com/pion/rtp"
|
||||
)
|
||||
|
||||
// This example shows how to
|
||||
// 1. connect to a RTSP server
|
||||
// 2. check if there's a G711 format
|
||||
// 3. get G711 frames of that format
|
||||
// 3. decode the G711 stream into audio samples
|
||||
|
||||
func main() {
|
||||
c := gortsplib.Client{}
|
||||
@@ -64,15 +65,26 @@ func main() {
|
||||
return
|
||||
}
|
||||
|
||||
// extract G711 frames from RTP packets
|
||||
op, err := rtpDec.Decode(pkt)
|
||||
// extract G711 samples from RTP packets
|
||||
samples, err := rtpDec.Decode(pkt)
|
||||
if err != nil {
|
||||
log.Printf("ERR: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// decode samples (these are 16-bit, big endian LPCM samples)
|
||||
if forma.MULaw {
|
||||
var raw g711.Mulaw
|
||||
raw.Unmarshal(samples)
|
||||
samples = raw
|
||||
} else {
|
||||
var raw g711.Alaw
|
||||
raw.Unmarshal(samples)
|
||||
samples = raw
|
||||
}
|
||||
|
||||
// print
|
||||
log.Printf("received G711 frame with PTS %v and size %d\n", pts, len(op))
|
||||
log.Printf("decoded audio samples with PTS %v and size %d\n", pts, len(samples))
|
||||
})
|
||||
|
||||
// start playing
|
||||
|
@@ -1,86 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
|
||||
"github.com/bluenviron/gortsplib/v4"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/base"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/format"
|
||||
"github.com/pion/rtp"
|
||||
)
|
||||
|
||||
// This example shows how to
|
||||
// 1. connect to a RTSP server
|
||||
// 2. check if there's a G722 format
|
||||
// 3. get G722 frames of that format
|
||||
|
||||
func main() {
|
||||
c := gortsplib.Client{}
|
||||
|
||||
// parse URL
|
||||
u, err := base.ParseURL("rtsp://myuser:mypass@localhost:8554/mystream")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// connect to the server
|
||||
err = c.Start(u.Scheme, u.Host)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer c.Close()
|
||||
|
||||
// find available medias
|
||||
desc, _, err := c.Describe(u)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// find the G722 media and format
|
||||
var forma *format.G722
|
||||
medi := desc.FindFormat(&forma)
|
||||
if medi == nil {
|
||||
panic("media not found")
|
||||
}
|
||||
|
||||
// create decoder
|
||||
rtpDec, err := forma.CreateDecoder()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// setup a single media
|
||||
_, err = c.Setup(desc.BaseURL, medi, 0, 0)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// called when a RTP packet arrives
|
||||
c.OnPacketRTP(medi, forma, func(pkt *rtp.Packet) {
|
||||
// decode timestamp
|
||||
pts, ok := c.PacketPTS2(medi, pkt)
|
||||
if !ok {
|
||||
log.Printf("waiting for timestamp")
|
||||
return
|
||||
}
|
||||
|
||||
// extract G722 frames from RTP packets
|
||||
op, err := rtpDec.Decode(pkt)
|
||||
if err != nil {
|
||||
log.Printf("ERR: %v", err)
|
||||
return
|
||||
}
|
||||
|
||||
// print
|
||||
log.Printf("received G722 frame with PTS %v size %d\n", pts, len(op))
|
||||
})
|
||||
|
||||
// start playing
|
||||
_, err = c.Play(nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// wait until a fatal error
|
||||
panic(c.Wait())
|
||||
}
|
@@ -73,12 +73,47 @@ func (d *h264Decoder) close() {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
func (d *h264Decoder) reinitDynamicStuff() error {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res := C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
return nil
|
||||
}
|
||||
|
||||
// decode decodes a RGBA image from H264.
|
||||
func (d *h264Decoder) decode(au [][]byte) (*image.RGBA, error) {
|
||||
// encode access unit into Annex-B
|
||||
annexb, err := h264.AnnexB(au).Marshal()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// send access unit to decoder
|
||||
@@ -102,37 +137,10 @@ func (d *h264Decoder) decode(au [][]byte) (*image.RGBA, error) {
|
||||
|
||||
// if frame size has changed, allocate needed objects
|
||||
if d.rgbaFrame == nil || d.rgbaFrame.width != d.yuv420Frame.width || d.rgbaFrame.height != d.yuv420Frame.height {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
err := d.reinitDynamicStuff()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return nil, fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return nil, fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return nil, fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
}
|
||||
|
||||
// convert color space from YUV420 to RGBA
|
||||
|
@@ -25,7 +25,7 @@ import (
|
||||
// 4. convert RGBA frames to JPEG images and save them on disk
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func saveToFile(img image.Image) error {
|
||||
// create file
|
||||
|
@@ -73,12 +73,47 @@ func (d *h264Decoder) close() {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
func (d *h264Decoder) reinitDynamicStuff() error {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res := C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
return nil
|
||||
}
|
||||
|
||||
// decode decodes a RGBA image from H264.
|
||||
func (d *h264Decoder) decode(au [][]byte) (*image.RGBA, error) {
|
||||
// encode access unit into Annex-B
|
||||
annexb, err := h264.AnnexB(au).Marshal()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// send access unit to decoder
|
||||
@@ -102,37 +137,10 @@ func (d *h264Decoder) decode(au [][]byte) (*image.RGBA, error) {
|
||||
|
||||
// if frame size has changed, allocate needed objects
|
||||
if d.rgbaFrame == nil || d.rgbaFrame.width != d.yuv420Frame.width || d.rgbaFrame.height != d.yuv420Frame.height {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
err := d.reinitDynamicStuff()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return nil, fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return nil, fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return nil, fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
}
|
||||
|
||||
// convert color space from YUV420 to RGBA
|
||||
|
@@ -19,7 +19,7 @@ import (
|
||||
// 3. decode the H264 stream into RGBA frames
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func main() {
|
||||
c := gortsplib.Client{}
|
||||
|
@@ -73,12 +73,47 @@ func (d *h265Decoder) close() {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
func (d *h265Decoder) reinitDynamicStuff() error {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res := C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
return nil
|
||||
}
|
||||
|
||||
// decode decodes a RGBA image from H265.
|
||||
func (d *h265Decoder) decode(au [][]byte) (*image.RGBA, error) {
|
||||
// encode access unit into Annex-B
|
||||
annexb, err := h264.AnnexB(au).Marshal()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// send access unit to decoder
|
||||
@@ -102,37 +137,10 @@ func (d *h265Decoder) decode(au [][]byte) (*image.RGBA, error) {
|
||||
|
||||
// if frame size has changed, allocate needed objects
|
||||
if d.rgbaFrame == nil || d.rgbaFrame.width != d.yuv420Frame.width || d.rgbaFrame.height != d.yuv420Frame.height {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
err := d.reinitDynamicStuff()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return nil, fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return nil, fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return nil, fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
}
|
||||
|
||||
// convert color space from YUV420 to RGBA
|
||||
|
@@ -25,7 +25,7 @@ import (
|
||||
// 4. convert RGBA frames to JPEG images and save them on disk
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func saveToFile(img image.Image) error {
|
||||
// create file
|
||||
|
@@ -73,12 +73,47 @@ func (d *h265Decoder) close() {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
func (d *h265Decoder) reinitDynamicStuff() error {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res := C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
return nil
|
||||
}
|
||||
|
||||
// decode decodes a RGBA image from H265.
|
||||
func (d *h265Decoder) decode(au [][]byte) (*image.RGBA, error) {
|
||||
// encode access unit into Annex-B
|
||||
annexb, err := h264.AnnexB(au).Marshal()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// send access unit to decoder
|
||||
@@ -100,39 +135,12 @@ func (d *h265Decoder) decode(au [][]byte) (*image.RGBA, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// if frame size has changed, allocate needed objects
|
||||
// if frame size has changed, reallocate needed objects
|
||||
if d.rgbaFrame == nil || d.rgbaFrame.width != d.yuv420Frame.width || d.rgbaFrame.height != d.yuv420Frame.height {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
err := d.reinitDynamicStuff()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return nil, fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return nil, fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return nil, fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
}
|
||||
|
||||
// convert color space from YUV420 to RGBA
|
||||
|
@@ -19,7 +19,7 @@ import (
|
||||
// 3. decode the H265 stream into RGBA frames
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func main() {
|
||||
c := gortsplib.Client{}
|
||||
|
@@ -18,7 +18,7 @@ import (
|
||||
// 3. decode the VP8 stream into RGBA frames
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func main() {
|
||||
c := gortsplib.Client{}
|
||||
|
@@ -71,6 +71,41 @@ func (d *vp8Decoder) close() {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
func (d *vp8Decoder) reinitDynamicStuff() error {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res := C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
return nil
|
||||
}
|
||||
|
||||
// decode decodes a RGBA image from VP8.
|
||||
func (d *vp8Decoder) decode(au []byte) (*image.RGBA, error) {
|
||||
// send access unit to decoder
|
||||
@@ -94,37 +129,10 @@ func (d *vp8Decoder) decode(au []byte) (*image.RGBA, error) {
|
||||
|
||||
// if frame size has changed, allocate needed objects
|
||||
if d.rgbaFrame == nil || d.rgbaFrame.width != d.yuv420Frame.width || d.rgbaFrame.height != d.yuv420Frame.height {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
err := d.reinitDynamicStuff()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return nil, fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return nil, fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return nil, fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
}
|
||||
|
||||
// convert color space from YUV420 to RGBA
|
||||
|
@@ -18,7 +18,7 @@ import (
|
||||
// 3. decode the VP9 stream into RGBA frames
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func main() {
|
||||
c := gortsplib.Client{}
|
||||
|
@@ -71,6 +71,41 @@ func (d *vp9Decoder) close() {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
func (d *vp9Decoder) reinitDynamicStuff() error {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res := C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
return nil
|
||||
}
|
||||
|
||||
// decode decodes a RGBA image from VP9.
|
||||
func (d *vp9Decoder) decode(au []byte) (*image.RGBA, error) {
|
||||
// send access unit to decoder
|
||||
@@ -94,37 +129,10 @@ func (d *vp9Decoder) decode(au []byte) (*image.RGBA, error) {
|
||||
|
||||
// if frame size has changed, allocate needed objects
|
||||
if d.rgbaFrame == nil || d.rgbaFrame.width != d.yuv420Frame.width || d.rgbaFrame.height != d.yuv420Frame.height {
|
||||
if d.swsCtx != nil {
|
||||
C.sws_freeContext(d.swsCtx)
|
||||
err := d.reinitDynamicStuff()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if d.rgbaFrame != nil {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
}
|
||||
|
||||
d.rgbaFrame = C.av_frame_alloc()
|
||||
if d.rgbaFrame == nil {
|
||||
return nil, fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.rgbaFrame.format = C.AV_PIX_FMT_RGBA
|
||||
d.rgbaFrame.width = d.yuv420Frame.width
|
||||
d.rgbaFrame.height = d.yuv420Frame.height
|
||||
d.rgbaFrame.color_range = C.AVCOL_RANGE_JPEG
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 1)
|
||||
if res < 0 {
|
||||
return nil, fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swsCtx = C.sws_getContext(d.yuv420Frame.width, d.yuv420Frame.height, int32(d.yuv420Frame.format),
|
||||
d.rgbaFrame.width, d.rgbaFrame.height, (int32)(d.rgbaFrame.format), C.SWS_BILINEAR, nil, nil, nil)
|
||||
if d.swsCtx == nil {
|
||||
return nil, fmt.Errorf("sws_getContext() failed")
|
||||
}
|
||||
|
||||
rgbaFrameSize := C.av_image_get_buffer_size((int32)(d.rgbaFrame.format), d.rgbaFrame.width, d.rgbaFrame.height, 1)
|
||||
d.rgbaFramePtr = (*[1 << 30]uint8)(unsafe.Pointer(d.rgbaFrame.data[0]))[:rgbaFrameSize:rgbaFrameSize]
|
||||
}
|
||||
|
||||
// convert color space from YUV420 to RGBA
|
||||
|
@@ -93,6 +93,8 @@ func (d *av1Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -109,6 +111,9 @@ func (d *av1Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.yuv420Frame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -123,7 +128,6 @@ func (d *av1Encoder) initialize() error {
|
||||
|
||||
d.pkt = C.av_packet_alloc()
|
||||
if d.pkt == nil {
|
||||
C.av_packet_free(&d.pkt)
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
|
31
examples/client-record-format-av1/dummy_image.go
Normal file
31
examples/client-record-format-av1/dummy_image.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
var dummyImageCount = 0
|
||||
|
||||
func createDummyImage() *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch dummyImageCount {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
dummyImageCount = (dummyImageCount + 1) % 3
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
@@ -4,8 +4,6 @@ package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"image"
|
||||
"image/color"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
@@ -22,7 +20,7 @@ import (
|
||||
// 5. write RTP packets to the server
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
@@ -39,28 +37,6 @@ func randUint32() (uint32, error) {
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func createDummyImage(i int) *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch i {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
||||
|
||||
func main() {
|
||||
// create a stream description that contains a AV1 format
|
||||
forma := &format.AV1{
|
||||
@@ -110,16 +86,13 @@ func main() {
|
||||
ticker := time.NewTicker(200 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
i := 0
|
||||
|
||||
for range ticker.C {
|
||||
// create a dummy image
|
||||
img := createDummyImage(i)
|
||||
i = (i + 1) % 3
|
||||
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// create a dummy image
|
||||
img := createDummyImage()
|
||||
|
||||
// encode the image with AV1
|
||||
au, pts, err := av1enc.encode(img, pts)
|
||||
if err != nil {
|
||||
@@ -141,7 +114,7 @@ func main() {
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp = uint32(int64(randomStart) + pts)
|
||||
pkt.Timestamp += uint32(int64(randomStart) + pts)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
|
24
examples/client-record-format-g711/dummy_audio.go
Normal file
24
examples/client-record-format-g711/dummy_audio.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package main
|
||||
|
||||
import "math"
|
||||
|
||||
const (
|
||||
sampleRate = 8000
|
||||
frequency = 400
|
||||
amplitude = (1 << 14) - 1
|
||||
)
|
||||
|
||||
func createDummyAudio(pts int64, prevPTS int64) []byte {
|
||||
sampleCount := (pts - prevPTS)
|
||||
n := 0
|
||||
ret := make([]byte, sampleCount*2)
|
||||
|
||||
for i := int64(0); i < sampleCount; i++ {
|
||||
v := int16(amplitude * math.Sin((float64(prevPTS+i)*frequency*math.Pi*2)/sampleRate))
|
||||
ret[n] = byte(v >> 8)
|
||||
ret[n+1] = byte(v)
|
||||
n += 2
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
@@ -1,80 +1,110 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"log"
|
||||
"net"
|
||||
"time"
|
||||
|
||||
"github.com/bluenviron/gortsplib/v4"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/description"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/format"
|
||||
"github.com/pion/rtp"
|
||||
"github.com/bluenviron/mediacommon/v2/pkg/codecs/g711"
|
||||
)
|
||||
|
||||
// This example shows how to
|
||||
// 1. generate a G711 and RTP packets with GStreamer
|
||||
// 2. connect to a RTSP server, announce a G711 format
|
||||
// 3. route the packets from GStreamer to the server
|
||||
// 1. connect to a RTSP server, announce a G711 format
|
||||
// 2. generate dummy LPCM audio samples
|
||||
// 3. encode audio samples with G711
|
||||
// 3. generate RTP packets from G711 samples
|
||||
// 4. write RTP packets to the server
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
dec := v % d
|
||||
return (secs*m + dec*m/d)
|
||||
}
|
||||
|
||||
func randUint32() (uint32, error) {
|
||||
var b [4]byte
|
||||
_, err := rand.Read(b[:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
// open a listener to receive RTP/G711 packets
|
||||
pc, err := net.ListenPacket("udp", "localhost:9000")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer pc.Close()
|
||||
|
||||
log.Println("Waiting for a RTP/G711 stream on UDP port 9000 - you can send one with GStreamer:\n" +
|
||||
"gst-launch-1.0 audiotestsrc freq=300 ! audioconvert ! audioresample ! audio/x-raw,rate=8000" +
|
||||
" ! alawenc ! rtppcmapay ! udpsink host=127.0.0.1 port=9000")
|
||||
|
||||
// wait for first packet
|
||||
buf := make([]byte, 2048)
|
||||
n, _, err := pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println("stream connected")
|
||||
|
||||
// create a description that contains a G711 format
|
||||
forma := &format.G711{
|
||||
PayloadTyp: 0,
|
||||
MULaw: true,
|
||||
SampleRate: 8000,
|
||||
ChannelCount: 1,
|
||||
}
|
||||
desc := &description.Session{
|
||||
Medias: []*description.Media{{
|
||||
Type: description.MediaTypeVideo,
|
||||
Formats: []format.Format{&format.G711{
|
||||
PayloadTyp: 8,
|
||||
MULaw: false,
|
||||
SampleRate: 8000,
|
||||
ChannelCount: 1,
|
||||
}},
|
||||
Type: description.MediaTypeAudio,
|
||||
Formats: []format.Format{forma},
|
||||
}},
|
||||
}
|
||||
|
||||
c := gortsplib.Client{}
|
||||
|
||||
// connect to the server and start recording
|
||||
err = c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
|
||||
c := gortsplib.Client{}
|
||||
err := c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer c.Close()
|
||||
|
||||
var pkt rtp.Packet
|
||||
for {
|
||||
// parse RTP packet
|
||||
err = pkt.Unmarshal(buf[:n])
|
||||
// setup G711 -> RTP encoder
|
||||
rtpEnc, err := forma.CreateEncoder()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
prevPTS := int64(0)
|
||||
|
||||
randomStart, err := randUint32()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// setup a ticker to sleep between writings
|
||||
ticker := time.NewTicker(100 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
for range ticker.C {
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// generate dummy LPCM audio samples
|
||||
samples := createDummyAudio(pts, prevPTS)
|
||||
|
||||
// encode samples with G711
|
||||
samples, err = g711.Mulaw(samples).Marshal()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// route RTP packet to the server
|
||||
err = c.WritePacketRTP(desc.Medias[0], &pkt)
|
||||
// generate RTP packets from G711 samples
|
||||
pkts, err := rtpEnc.Encode(samples)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// read another RTP packet from source
|
||||
n, _, err = pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
log.Printf("writing RTP packets with PTS=%d, sample size=%d, pkt count=%d", prevPTS, len(samples), len(pkts))
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp += uint32(int64(randomStart) + prevPTS)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
prevPTS = pts
|
||||
}
|
||||
}
|
||||
|
@@ -1,75 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net"
|
||||
|
||||
"github.com/bluenviron/gortsplib/v4"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/description"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/format"
|
||||
"github.com/pion/rtp"
|
||||
)
|
||||
|
||||
// This example shows how to
|
||||
// 1. generate a G722 stream and RTP packets with GStreamer
|
||||
// 2. connect to a RTSP server, announce a G722 format
|
||||
// 3. route the packets from GStreamer to the server
|
||||
|
||||
func main() {
|
||||
// open a listener to receive RTP/G722 packets
|
||||
pc, err := net.ListenPacket("udp", "localhost:9000")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer pc.Close()
|
||||
|
||||
log.Println("Waiting for a RTP/G722 stream on UDP port 9000 - you can send one with GStreamer:\n" +
|
||||
"gst-launch-1.0 audiotestsrc freq=300 ! audioconvert ! audioresample ! audio/x-raw,rate=16000" +
|
||||
" ! avenc_g722 ! rtpg722pay ! udpsink host=127.0.0.1 port=9000")
|
||||
|
||||
// wait for first packet
|
||||
buf := make([]byte, 2048)
|
||||
n, _, err := pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println("stream connected")
|
||||
|
||||
// create a description that contains a G722 format
|
||||
desc := &description.Session{
|
||||
Medias: []*description.Media{{
|
||||
Type: description.MediaTypeVideo,
|
||||
Formats: []format.Format{&format.G722{}},
|
||||
}},
|
||||
}
|
||||
|
||||
c := gortsplib.Client{}
|
||||
|
||||
// connect to the server and start recording
|
||||
err = c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer c.Close()
|
||||
|
||||
var pkt rtp.Packet
|
||||
for {
|
||||
// parse RTP packet
|
||||
err = pkt.Unmarshal(buf[:n])
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// route RTP packet to the server
|
||||
err = c.WritePacketRTP(desc.Medias[0], &pkt)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// read another RTP packet from source
|
||||
n, _, err = pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
31
examples/client-record-format-h264/dummy_image.go
Normal file
31
examples/client-record-format-h264/dummy_image.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
var dummyImageCount = 0
|
||||
|
||||
func createDummyImage() *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch dummyImageCount {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
dummyImageCount = (dummyImageCount + 1) % 3
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
@@ -86,6 +86,8 @@ func (d *h264Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -102,6 +104,9 @@ func (d *h264Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.yuv420Frame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -116,7 +121,6 @@ func (d *h264Encoder) initialize() error {
|
||||
|
||||
d.pkt = C.av_packet_alloc()
|
||||
if d.pkt == nil {
|
||||
C.av_packet_free(&d.pkt)
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
|
@@ -4,8 +4,6 @@ package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"image"
|
||||
"image/color"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
@@ -22,7 +20,7 @@ import (
|
||||
// 5. write RTP packets to the server
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
@@ -39,28 +37,6 @@ func randUint32() (uint32, error) {
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func createDummyImage(i int) *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch i {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
||||
|
||||
func main() {
|
||||
// create a stream description that contains a H264 format
|
||||
forma := &format.H264{
|
||||
@@ -111,16 +87,13 @@ func main() {
|
||||
ticker := time.NewTicker(200 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
i := 0
|
||||
|
||||
for range ticker.C {
|
||||
// create a dummy image
|
||||
img := createDummyImage(i)
|
||||
i = (i + 1) % 3
|
||||
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// create a dummy image
|
||||
img := createDummyImage()
|
||||
|
||||
// encode the image with H264
|
||||
au, pts, err := h264enc.encode(img, pts)
|
||||
if err != nil {
|
||||
@@ -138,11 +111,11 @@ func main() {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
log.Printf("writing RTP packets with PTS=%d, au=%d, pkts=%d", pts, len(au), len(pkts))
|
||||
log.Printf("writing RTP packets with PTS=%d, au size=%d, pkt count=%d", pts, len(au), len(pkts))
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp = uint32(int64(randomStart) + pts)
|
||||
pkt.Timestamp += uint32(int64(randomStart) + pts)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
|
31
examples/client-record-format-h265/dummy_image.go
Normal file
31
examples/client-record-format-h265/dummy_image.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
var dummyImageCount = 0
|
||||
|
||||
func createDummyImage() *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch dummyImageCount {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
dummyImageCount = (dummyImageCount + 1) % 3
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
@@ -86,6 +86,8 @@ func (d *h265Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -102,6 +104,9 @@ func (d *h265Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.yuv420Frame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -116,7 +121,6 @@ func (d *h265Encoder) initialize() error {
|
||||
|
||||
d.pkt = C.av_packet_alloc()
|
||||
if d.pkt == nil {
|
||||
C.av_packet_free(&d.pkt)
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
|
@@ -4,8 +4,6 @@ package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"image"
|
||||
"image/color"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
@@ -22,7 +20,7 @@ import (
|
||||
// 5. write RTP packets to the server
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
@@ -39,28 +37,6 @@ func randUint32() (uint32, error) {
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func createDummyImage(i int) *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch i {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
||||
|
||||
func main() {
|
||||
// create a stream description that contains a H265 format
|
||||
forma := &format.H265{
|
||||
@@ -110,16 +86,13 @@ func main() {
|
||||
ticker := time.NewTicker(200 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
i := 0
|
||||
|
||||
for range ticker.C {
|
||||
// create a dummy image
|
||||
img := createDummyImage(i)
|
||||
i = (i + 1) % 3
|
||||
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// create a dummy image
|
||||
img := createDummyImage()
|
||||
|
||||
// encode the image with H265
|
||||
au, pts, err := h265enc.encode(img, pts)
|
||||
if err != nil {
|
||||
@@ -137,11 +110,11 @@ func main() {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
log.Printf("writing RTP packets with PTS=%d, au=%d, pkts=%d", pts, len(au), len(pkts))
|
||||
log.Printf("writing RTP packets with PTS=%d, au size=%d, pkt count=%d", pts, len(au), len(pkts))
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp = uint32(int64(randomStart) + pts)
|
||||
pkt.Timestamp += uint32(int64(randomStart) + pts)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
|
24
examples/client-record-format-lpcm/dummy_audio.go
Normal file
24
examples/client-record-format-lpcm/dummy_audio.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package main
|
||||
|
||||
import "math"
|
||||
|
||||
const (
|
||||
sampleRate = 48000
|
||||
frequency = 400
|
||||
amplitude = (1 << 14) - 1
|
||||
)
|
||||
|
||||
func createDummyAudio(pts int64, prevPTS int64) []byte {
|
||||
sampleCount := (pts - prevPTS)
|
||||
n := 0
|
||||
ret := make([]byte, sampleCount*2)
|
||||
|
||||
for i := int64(0); i < sampleCount; i++ {
|
||||
v := int16(amplitude * math.Sin((float64(prevPTS+i)*frequency*math.Pi*2)/sampleRate))
|
||||
ret[n] = byte(v >> 8)
|
||||
ret[n+1] = byte(v)
|
||||
n += 2
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
@@ -1,80 +1,102 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"log"
|
||||
"net"
|
||||
"time"
|
||||
|
||||
"github.com/bluenviron/gortsplib/v4"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/description"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/format"
|
||||
"github.com/pion/rtp"
|
||||
)
|
||||
|
||||
// This example shows how to
|
||||
// 1. generate a LPCM stream and RTP packets with GStreamer
|
||||
// 2. connect to a RTSP server, announce an LPCM format
|
||||
// 3. route the packets from GStreamer to the server
|
||||
// 1. connect to a RTSP server, announce an LPCM format
|
||||
// 2. generate dummy LPCM audio samples
|
||||
// 3. generate RTP packets from LPCM audio samples
|
||||
// 4. write RTP packets to the server
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
dec := v % d
|
||||
return (secs*m + dec*m/d)
|
||||
}
|
||||
|
||||
func randUint32() (uint32, error) {
|
||||
var b [4]byte
|
||||
_, err := rand.Read(b[:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
// open a listener to receive RTP/LPCM packets
|
||||
pc, err := net.ListenPacket("udp", "localhost:9000")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer pc.Close()
|
||||
|
||||
log.Println("Waiting for a RTP/LPCM stream on UDP port 9000 - you can send one with GStreamer:\n" +
|
||||
"gst-launch-1.0 audiotestsrc freq=300 ! audioconvert ! audioresample ! audio/x-raw,format=S16BE,rate=44100" +
|
||||
" ! rtpL16pay ! udpsink host=127.0.0.1 port=9000")
|
||||
|
||||
// wait for first packet
|
||||
buf := make([]byte, 2048)
|
||||
n, _, err := pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println("stream connected")
|
||||
|
||||
// create a description that contains a LPCM format
|
||||
forma := &format.LPCM{
|
||||
PayloadTyp: 96,
|
||||
BitDepth: 16,
|
||||
SampleRate: 48000,
|
||||
ChannelCount: 1,
|
||||
}
|
||||
desc := &description.Session{
|
||||
Medias: []*description.Media{{
|
||||
Type: description.MediaTypeVideo,
|
||||
Formats: []format.Format{&format.LPCM{
|
||||
PayloadTyp: 96,
|
||||
BitDepth: 16,
|
||||
SampleRate: 44100,
|
||||
ChannelCount: 1,
|
||||
}},
|
||||
Type: description.MediaTypeAudio,
|
||||
Formats: []format.Format{forma},
|
||||
}},
|
||||
}
|
||||
|
||||
c := gortsplib.Client{}
|
||||
|
||||
// connect to the server and start recording
|
||||
err = c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
|
||||
c := gortsplib.Client{}
|
||||
err := c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer c.Close()
|
||||
|
||||
var pkt rtp.Packet
|
||||
for {
|
||||
// parse RTP packet
|
||||
err = pkt.Unmarshal(buf[:n])
|
||||
// setup LPCM -> RTP encoder
|
||||
rtpEnc, err := forma.CreateEncoder()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
prevPTS := int64(0)
|
||||
|
||||
randomStart, err := randUint32()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// setup a ticker to sleep between writings
|
||||
ticker := time.NewTicker(100 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
for range ticker.C {
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// generate dummy LPCM audio samples
|
||||
samples := createDummyAudio(pts, prevPTS)
|
||||
|
||||
// generate RTP packets from LPCM samples
|
||||
pkts, err := rtpEnc.Encode(samples)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// route RTP packet to the server
|
||||
err = c.WritePacketRTP(desc.Medias[0], &pkt)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
log.Printf("writing RTP packets with PTS=%d, sample size=%d, pkt count=%d", prevPTS, len(samples), len(pkts))
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp += uint32(int64(randomStart) + prevPTS)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// read another RTP packet from source
|
||||
n, _, err = pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
prevPTS = pts
|
||||
}
|
||||
}
|
||||
|
31
examples/client-record-format-mjpeg/dummy_image.go
Normal file
31
examples/client-record-format-mjpeg/dummy_image.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
var dummyImageCount = 0
|
||||
|
||||
func createDummyImage() *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch dummyImageCount {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
dummyImageCount = (dummyImageCount + 1) % 3
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
@@ -3,9 +3,8 @@ package main
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/rand"
|
||||
"image"
|
||||
"image/color"
|
||||
"image/jpeg"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/bluenviron/gortsplib/v4"
|
||||
@@ -35,28 +34,6 @@ func randUint32() (uint32, error) {
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func createDummyImage(i int) *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch i {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
||||
|
||||
func main() {
|
||||
// create a description that contains a M-JPEG format
|
||||
forma := &format.MJPEG{}
|
||||
@@ -92,12 +69,12 @@ func main() {
|
||||
ticker := time.NewTicker(200 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
i := 0
|
||||
|
||||
for range ticker.C {
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// create a dummy image
|
||||
img := createDummyImage(i)
|
||||
i = (i + 1) % 3
|
||||
img := createDummyImage()
|
||||
|
||||
// encode the image with JPEG
|
||||
var buf bytes.Buffer
|
||||
@@ -112,12 +89,11 @@ func main() {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
log.Printf("writing RTP packets with PTS=%d, jpeg size=%d, pkt count=%d", pts, buf.Len(), len(pkts))
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp = uint32(int64(randomStart) + pts)
|
||||
pkt.Timestamp += uint32(int64(randomStart) + pts)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
|
24
examples/client-record-format-mpeg4audio/dummy_audio.go
Normal file
24
examples/client-record-format-mpeg4audio/dummy_audio.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package main
|
||||
|
||||
import "math"
|
||||
|
||||
const (
|
||||
sampleRate = 48000
|
||||
frequency = 400
|
||||
amplitude = (1 << 14) - 1
|
||||
)
|
||||
|
||||
func createDummyAudio(pts int64, prevPTS int64) []byte {
|
||||
sampleCount := (pts - prevPTS)
|
||||
n := 0
|
||||
ret := make([]byte, sampleCount*2)
|
||||
|
||||
for i := int64(0); i < sampleCount; i++ {
|
||||
v := int16(amplitude * math.Sin((float64(prevPTS+i)*frequency*math.Pi*2)/sampleRate))
|
||||
ret[n] = byte(v >> 8)
|
||||
ret[n+1] = byte(v)
|
||||
n += 2
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
@@ -1,85 +1,129 @@
|
||||
//go:build cgo
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"log"
|
||||
"net"
|
||||
"time"
|
||||
|
||||
"github.com/bluenviron/gortsplib/v4"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/description"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/format"
|
||||
"github.com/bluenviron/mediacommon/v2/pkg/codecs/mpeg4audio"
|
||||
"github.com/pion/rtp"
|
||||
)
|
||||
|
||||
// This example shows how to
|
||||
// 1. generate a MPEG-4 audio stream and RTP packets with GStreamer
|
||||
// 2. connect to a RTSP server, announce an MPEG-4 audio format
|
||||
// 3. route the packets from GStreamer to the server
|
||||
// 1. connect to a RTSP server, announce a MPEG-4 Audio (AAC) format
|
||||
// 2. generate dummy LPCM audio samples
|
||||
// 3. encode audio samples with MPEG-4 Audio (AAC)
|
||||
// 3. generate RTP packets from MPEG-4 Audio units
|
||||
// 4. write RTP packets to the server
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavcodec-dev libswresample-dev gcc pkg-config
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
dec := v % d
|
||||
return (secs*m + dec*m/d)
|
||||
}
|
||||
|
||||
func randUint32() (uint32, error) {
|
||||
var b [4]byte
|
||||
_, err := rand.Read(b[:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
// open a listener to receive RTP/MPEG-4 audio packets
|
||||
pc, err := net.ListenPacket("udp", "localhost:9000")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
// create a description that contains a MPEG-4 Audio format
|
||||
forma := &format.MPEG4Audio{
|
||||
PayloadTyp: 96,
|
||||
Config: &mpeg4audio.Config{
|
||||
Type: mpeg4audio.ObjectTypeAACLC,
|
||||
SampleRate: 48000,
|
||||
ChannelCount: 2,
|
||||
},
|
||||
SizeLength: 13,
|
||||
IndexLength: 3,
|
||||
IndexDeltaLength: 3,
|
||||
}
|
||||
defer pc.Close()
|
||||
|
||||
log.Println("Waiting for a RTP/MPEG-4 audio stream on UDP port 9000 - you can send one with GStreamer:\n" +
|
||||
"gst-launch-1.0 audiotestsrc freq=300 ! audioconvert ! audioresample ! audio/x-raw,rate=48000" +
|
||||
" ! avenc_aac bitrate=128000 ! rtpmp4gpay ! udpsink host=127.0.0.1 port=9000")
|
||||
|
||||
// wait for first packet
|
||||
buf := make([]byte, 2048)
|
||||
n, _, err := pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println("stream connected")
|
||||
|
||||
// create a description that contains a MPEG-4 audio format
|
||||
desc := &description.Session{
|
||||
Medias: []*description.Media{{
|
||||
Type: description.MediaTypeVideo,
|
||||
Formats: []format.Format{&format.MPEG4Audio{
|
||||
PayloadTyp: 96,
|
||||
Config: &mpeg4audio.Config{
|
||||
Type: mpeg4audio.ObjectTypeAACLC,
|
||||
SampleRate: 48000,
|
||||
ChannelCount: 2,
|
||||
},
|
||||
SizeLength: 13,
|
||||
IndexLength: 3,
|
||||
IndexDeltaLength: 3,
|
||||
}},
|
||||
Type: description.MediaTypeAudio,
|
||||
Formats: []format.Format{forma},
|
||||
}},
|
||||
}
|
||||
|
||||
// connect to the server and start recording
|
||||
c := gortsplib.Client{}
|
||||
err = c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
|
||||
err := c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer c.Close()
|
||||
|
||||
var pkt rtp.Packet
|
||||
for {
|
||||
// parse RTP packet
|
||||
err = pkt.Unmarshal(buf[:n])
|
||||
// setup LPCM -> MPEG-4 Audio encoder
|
||||
mp4aEnc := &mp4aEncoder{}
|
||||
err = mp4aEnc.initialize()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// setup MPEG-4 Audio -> RTP encoder
|
||||
rtpEnc, err := forma.CreateEncoder()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
prevPTS := int64(0)
|
||||
|
||||
randomStart, err := randUint32()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// setup a ticker to sleep between writings
|
||||
ticker := time.NewTicker(100 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
for range ticker.C {
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// generate dummy LPCM audio samples
|
||||
samples := createDummyAudio(pts, prevPTS)
|
||||
|
||||
// encode samples with MPEG-4 Audio
|
||||
aus, outPTS, err := mp4aEnc.encode(samples)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if aus == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// generate RTP packets from MPEG-4 audio access units
|
||||
pkts, err := rtpEnc.Encode(aus)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// route RTP packet to the server
|
||||
err = c.WritePacketRTP(desc.Medias[0], &pkt)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
log.Printf("writing RTP packets with PTS=%d, packet count=%d", outPTS, len(pkts))
|
||||
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp += uint32(int64(randomStart) + outPTS)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// read another RTP packet from source
|
||||
n, _, err = pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
prevPTS = pts
|
||||
}
|
||||
}
|
||||
|
230
examples/client-record-format-mpeg4audio/mp4a_encoder.go
Normal file
230
examples/client-record-format-mpeg4audio/mp4a_encoder.go
Normal file
@@ -0,0 +1,230 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// #cgo pkg-config: libavcodec libavutil libswresample
|
||||
// #include <libavcodec/avcodec.h>
|
||||
// #include <libswresample/swresample.h>
|
||||
// #include <libavutil/opt.h>
|
||||
// #include <libavutil/channel_layout.h>
|
||||
import "C"
|
||||
|
||||
func frameData(frame *C.AVFrame) **C.uint8_t {
|
||||
return (**C.uint8_t)(unsafe.Pointer(&frame.data[0]))
|
||||
}
|
||||
|
||||
func frameLineSize(frame *C.AVFrame) *C.int {
|
||||
return (*C.int)(unsafe.Pointer(&frame.linesize[0]))
|
||||
}
|
||||
|
||||
func switchEndianness16(samples []byte) []byte {
|
||||
ls := len(samples)
|
||||
for i := 0; i < ls; i += 2 {
|
||||
samples[i], samples[i+1] = samples[i+1], samples[i]
|
||||
}
|
||||
return samples
|
||||
}
|
||||
|
||||
func littleEndianToFloat(swrCtx *C.struct_SwrContext, samples []byte) ([]byte, error) {
|
||||
sampleCount := len(samples) / 2
|
||||
outSize := len(samples) * 2
|
||||
outSamples := make([]byte, outSize)
|
||||
|
||||
var p runtime.Pinner
|
||||
p.Pin(&outSamples[0])
|
||||
p.Pin(&samples[0])
|
||||
defer p.Unpin()
|
||||
|
||||
outBufs := (*C.uint8_t)(&outSamples[0])
|
||||
inBufs := (*C.uint8_t)(&samples[0])
|
||||
|
||||
res := C.swr_convert(swrCtx, &outBufs, (C.int)(sampleCount), &inBufs, (C.int)(sampleCount))
|
||||
if res < 0 {
|
||||
return nil, fmt.Errorf("swr_convert() failed")
|
||||
}
|
||||
|
||||
return outSamples, nil
|
||||
}
|
||||
|
||||
// mp4aEncoder is a wrapper around FFmpeg's MPEG-4 Audio encoder.
|
||||
type mp4aEncoder struct {
|
||||
Width int
|
||||
Height int
|
||||
FPS int
|
||||
|
||||
codecCtx *C.AVCodecContext
|
||||
frame *C.AVFrame
|
||||
swrCtx *C.struct_SwrContext
|
||||
pkt *C.AVPacket
|
||||
samplesBuffer []byte
|
||||
samplesBufferPTS int64
|
||||
}
|
||||
|
||||
// initialize initializes a mp4aEncoder.
|
||||
func (d *mp4aEncoder) initialize() error {
|
||||
codec := C.avcodec_find_encoder(C.AV_CODEC_ID_AAC)
|
||||
if codec == nil {
|
||||
return fmt.Errorf("avcodec_find_encoder() failed")
|
||||
}
|
||||
|
||||
d.codecCtx = C.avcodec_alloc_context3(codec)
|
||||
if d.codecCtx == nil {
|
||||
return fmt.Errorf("avcodec_alloc_context3() failed")
|
||||
}
|
||||
|
||||
d.codecCtx.bit_rate = 64000
|
||||
d.codecCtx.sample_fmt = C.AV_SAMPLE_FMT_FLT
|
||||
d.codecCtx.sample_rate = 48000
|
||||
d.codecCtx.channel_layout = C.AV_CH_LAYOUT_MONO
|
||||
d.codecCtx.channels = C.av_get_channel_layout_nb_channels(d.codecCtx.channel_layout)
|
||||
|
||||
res := C.avcodec_open2(d.codecCtx, codec, nil)
|
||||
if res < 0 {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("avcodec_open2() failed")
|
||||
}
|
||||
|
||||
d.frame = C.av_frame_alloc()
|
||||
if d.frame == nil {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.frame.nb_samples = d.codecCtx.frame_size
|
||||
d.frame.format = (C.int)(d.codecCtx.sample_fmt)
|
||||
d.frame.channel_layout = d.codecCtx.channel_layout
|
||||
|
||||
res = C.av_frame_get_buffer(d.frame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.frame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.swrCtx = C.swr_alloc()
|
||||
if d.swrCtx == nil {
|
||||
C.av_frame_free(&d.frame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("swr_alloc() failed")
|
||||
}
|
||||
|
||||
cstr := C.CString("out_channel_layout")
|
||||
defer C.free(unsafe.Pointer(cstr))
|
||||
C.av_opt_set_channel_layout(unsafe.Pointer(d.swrCtx), cstr, (C.int64_t)(d.codecCtx.channel_layout), 0)
|
||||
|
||||
cstr = C.CString("out_sample_fmt")
|
||||
defer C.free(unsafe.Pointer(cstr))
|
||||
C.av_opt_set_int(unsafe.Pointer(d.swrCtx), cstr, C.AV_SAMPLE_FMT_FLTP, 0)
|
||||
|
||||
cstr = C.CString("out_sample_rate")
|
||||
defer C.free(unsafe.Pointer(cstr))
|
||||
C.av_opt_set_int(unsafe.Pointer(d.swrCtx), cstr, 48000, 0)
|
||||
|
||||
cstr = C.CString("in_channel_layout")
|
||||
defer C.free(unsafe.Pointer(cstr))
|
||||
C.av_opt_set_channel_layout(unsafe.Pointer(d.swrCtx), cstr, (C.int64_t)(d.codecCtx.channel_layout), 0)
|
||||
|
||||
cstr = C.CString("in_sample_fmt")
|
||||
defer C.free(unsafe.Pointer(cstr))
|
||||
C.av_opt_set_int(unsafe.Pointer(d.swrCtx), cstr, C.AV_SAMPLE_FMT_S16, 0)
|
||||
|
||||
cstr = C.CString("in_sample_rate")
|
||||
defer C.free(unsafe.Pointer(cstr))
|
||||
C.av_opt_set_int(unsafe.Pointer(d.swrCtx), cstr, 48000, 0)
|
||||
|
||||
res = C.swr_init(d.swrCtx)
|
||||
if res < 0 {
|
||||
C.swr_free(&d.swrCtx)
|
||||
C.av_frame_free(&d.frame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("swr_init() failed")
|
||||
}
|
||||
|
||||
d.pkt = C.av_packet_alloc()
|
||||
if d.pkt == nil {
|
||||
C.swr_free(&d.swrCtx)
|
||||
C.av_frame_free(&d.frame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_packet_alloc() failed")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// close closes the decoder.
|
||||
func (d *mp4aEncoder) close() {
|
||||
C.av_packet_free(&d.pkt)
|
||||
C.swr_free(&d.swrCtx)
|
||||
C.av_frame_free(&d.frame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
// encode encodes LPCM samples into Opus packets.
|
||||
func (d *mp4aEncoder) encode(samples []byte) ([][]byte, int64, error) {
|
||||
// convert from big-endian to little-endian
|
||||
samples = switchEndianness16(samples)
|
||||
|
||||
// convert from little-endian to float
|
||||
samples, err := littleEndianToFloat(d.swrCtx, samples)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
// put samples into an internal buffer
|
||||
d.samplesBuffer = append(d.samplesBuffer, samples...)
|
||||
|
||||
// split buffer into AVFrames
|
||||
requiredSampleSize := (int)(d.codecCtx.frame_size) * 4
|
||||
frameCount := len(d.samplesBuffer) / requiredSampleSize
|
||||
if frameCount == 0 {
|
||||
return nil, 0, fmt.Errorf("sample buffer is not filled enough")
|
||||
}
|
||||
|
||||
ret := make([][]byte, frameCount)
|
||||
var pts int64
|
||||
|
||||
for i := 0; i < frameCount; i++ {
|
||||
samples = d.samplesBuffer[:requiredSampleSize]
|
||||
d.samplesBuffer = d.samplesBuffer[requiredSampleSize:]
|
||||
|
||||
samplePTS := d.samplesBufferPTS
|
||||
d.samplesBufferPTS += int64(len(samples) / 4)
|
||||
|
||||
// pass samples pointer to frame
|
||||
d.frame.data[0] = (*C.uint8_t)(&samples[0])
|
||||
|
||||
// send frame to the encoder
|
||||
d.frame.pts = (C.int64_t)(samplePTS)
|
||||
res := C.avcodec_send_frame(d.codecCtx, d.frame)
|
||||
if res < 0 {
|
||||
return nil, 0, fmt.Errorf("avcodec_send_frame() failed")
|
||||
}
|
||||
|
||||
// wait for result
|
||||
res = C.avcodec_receive_packet(d.codecCtx, d.pkt)
|
||||
if res == -C.EAGAIN {
|
||||
return nil, 0, nil
|
||||
}
|
||||
if res < 0 {
|
||||
fmt.Println(res)
|
||||
return nil, 0, fmt.Errorf("avcodec_receive_packet() failed")
|
||||
}
|
||||
|
||||
// perform a deep copy of the data before unreferencing the packet
|
||||
data := C.GoBytes(unsafe.Pointer(d.pkt.data), d.pkt.size)
|
||||
|
||||
if i == 0 {
|
||||
pts = (int64)(d.pkt.pts)
|
||||
}
|
||||
|
||||
C.av_packet_unref(d.pkt)
|
||||
|
||||
ret[i] = data
|
||||
}
|
||||
|
||||
return ret, pts, nil
|
||||
}
|
24
examples/client-record-format-opus/dummy_audio.go
Normal file
24
examples/client-record-format-opus/dummy_audio.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package main
|
||||
|
||||
import "math"
|
||||
|
||||
const (
|
||||
sampleRate = 48000
|
||||
frequency = 400
|
||||
amplitude = (1 << 14) - 1
|
||||
)
|
||||
|
||||
func createDummyAudio(pts int64, prevPTS int64) []byte {
|
||||
sampleCount := (pts - prevPTS)
|
||||
n := 0
|
||||
ret := make([]byte, sampleCount*2)
|
||||
|
||||
for i := int64(0); i < sampleCount; i++ {
|
||||
v := int16(amplitude * math.Sin((float64(prevPTS+i)*frequency*math.Pi*2)/sampleRate))
|
||||
ret[n] = byte(v >> 8)
|
||||
ret[n+1] = byte(v)
|
||||
n += 2
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
@@ -1,77 +1,139 @@
|
||||
//go:build cgo
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"log"
|
||||
"net"
|
||||
"time"
|
||||
|
||||
"github.com/bluenviron/gortsplib/v4"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/description"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/format"
|
||||
"github.com/bluenviron/gortsplib/v4/pkg/format/rtpsimpleaudio"
|
||||
"github.com/bluenviron/mediacommon/v2/pkg/codecs/opus"
|
||||
"github.com/pion/rtp"
|
||||
)
|
||||
|
||||
// This example shows how to
|
||||
// 1. generate a Opus stream and RTP packets with GStreamer
|
||||
// 2. connect to a RTSP server, announce an Opus format
|
||||
// 3. route the packets from GStreamer to the server
|
||||
// 1. connect to a RTSP server, announce a Opus format
|
||||
// 2. generate dummy LPCM audio samples
|
||||
// 3. encode audio samples with Opus
|
||||
// 3. generate RTP packets from Opus packets
|
||||
// 4. write RTP packets to the server
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavcodec-dev gcc pkg-config
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
dec := v % d
|
||||
return (secs*m + dec*m/d)
|
||||
}
|
||||
|
||||
func randUint32() (uint32, error) {
|
||||
var b [4]byte
|
||||
_, err := rand.Read(b[:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func encodeMultiple(rtpEnc *rtpsimpleaudio.Encoder, opusPkts [][]byte) ([]*rtp.Packet, error) {
|
||||
ret := make([]*rtp.Packet, len(opusPkts))
|
||||
pts := uint32(0)
|
||||
|
||||
for i, opusPkt := range opusPkts {
|
||||
var err error
|
||||
ret[i], err = rtpEnc.Encode(opusPkt)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ret[i].Timestamp += pts
|
||||
|
||||
pts += uint32(opus.PacketDuration2(opusPkt))
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
// open a listener to receive RTP/Opus packets
|
||||
pc, err := net.ListenPacket("udp", "localhost:9000")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer pc.Close()
|
||||
|
||||
log.Println("Waiting for a RTP/Opus stream on UDP port 9000 - you can send one with GStreamer:\n" +
|
||||
"gst-launch-1.0 audiotestsrc freq=300 ! audioconvert ! audioresample ! audio/x-raw,rate=48000" +
|
||||
" ! opusenc ! rtpopuspay ! udpsink host=127.0.0.1 port=9000")
|
||||
|
||||
// wait for first packet
|
||||
buf := make([]byte, 2048)
|
||||
n, _, err := pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
log.Println("stream connected")
|
||||
|
||||
// create a description that contains a Opus format
|
||||
forma := &format.Opus{
|
||||
PayloadTyp: 96,
|
||||
ChannelCount: 1,
|
||||
}
|
||||
desc := &description.Session{
|
||||
Medias: []*description.Media{{
|
||||
Type: description.MediaTypeVideo,
|
||||
Formats: []format.Format{&format.Opus{
|
||||
PayloadTyp: 96,
|
||||
IsStereo: false,
|
||||
}},
|
||||
Type: description.MediaTypeAudio,
|
||||
Formats: []format.Format{forma},
|
||||
}},
|
||||
}
|
||||
|
||||
// connect to the server and start recording
|
||||
c := gortsplib.Client{}
|
||||
err = c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
|
||||
err := c.StartRecording("rtsp://myuser:mypass@localhost:8554/mystream", desc)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defer c.Close()
|
||||
|
||||
var pkt rtp.Packet
|
||||
for {
|
||||
// parse RTP packet
|
||||
err = pkt.Unmarshal(buf[:n])
|
||||
// setup LPCM -> Opus encoder
|
||||
opusEnc := &opusEncoder{}
|
||||
err = opusEnc.initialize()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// setup Opus -> RTP encoder
|
||||
rtpEnc, err := forma.CreateEncoder()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
prevPTS := int64(0)
|
||||
|
||||
randomStart, err := randUint32()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// setup a ticker to sleep between writings
|
||||
ticker := time.NewTicker(100 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
for range ticker.C {
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// generate dummy LPCM audio samples
|
||||
samples := createDummyAudio(pts, prevPTS)
|
||||
|
||||
// encode samples with Opus
|
||||
opusPkts, outPTS, err := opusEnc.encode(samples)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// route RTP packet to the server
|
||||
err = c.WritePacketRTP(desc.Medias[0], &pkt)
|
||||
// generate RTP packets from Opus packets
|
||||
pkts, err := encodeMultiple(rtpEnc, opusPkts)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// read another RTP packet from source
|
||||
n, _, err = pc.ReadFrom(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
log.Printf("writing RTP packets with PTS=%d, packet count=%d", outPTS, len(pkts))
|
||||
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp += uint32(int64(randomStart) + outPTS)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
prevPTS = pts
|
||||
}
|
||||
}
|
||||
|
153
examples/client-record-format-opus/opus_encoder.go
Normal file
153
examples/client-record-format-opus/opus_encoder.go
Normal file
@@ -0,0 +1,153 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
// #cgo pkg-config: libavcodec libavutil
|
||||
// #include <libavcodec/avcodec.h>
|
||||
import "C"
|
||||
|
||||
func frameData(frame *C.AVFrame) **C.uint8_t {
|
||||
return (**C.uint8_t)(unsafe.Pointer(&frame.data[0]))
|
||||
}
|
||||
|
||||
func frameLineSize(frame *C.AVFrame) *C.int {
|
||||
return (*C.int)(unsafe.Pointer(&frame.linesize[0]))
|
||||
}
|
||||
|
||||
func switchEndianness16(samples []byte) []byte {
|
||||
ls := len(samples)
|
||||
for i := 0; i < ls; i += 2 {
|
||||
samples[i], samples[i+1] = samples[i+1], samples[i]
|
||||
}
|
||||
return samples
|
||||
}
|
||||
|
||||
// opusEncoder is a wrapper around FFmpeg's Opus encoder.
|
||||
type opusEncoder struct {
|
||||
Width int
|
||||
Height int
|
||||
FPS int
|
||||
|
||||
codecCtx *C.AVCodecContext
|
||||
frame *C.AVFrame
|
||||
pkt *C.AVPacket
|
||||
samplesBuffer []byte
|
||||
samplesBufferPTS int64
|
||||
}
|
||||
|
||||
// initialize initializes a opusEncoder.
|
||||
func (d *opusEncoder) initialize() error {
|
||||
codec := C.avcodec_find_encoder(C.AV_CODEC_ID_OPUS)
|
||||
if codec == nil {
|
||||
return fmt.Errorf("avcodec_find_encoder() failed")
|
||||
}
|
||||
|
||||
d.codecCtx = C.avcodec_alloc_context3(codec)
|
||||
if d.codecCtx == nil {
|
||||
return fmt.Errorf("avcodec_alloc_context3() failed")
|
||||
}
|
||||
|
||||
d.codecCtx.bit_rate = 64000
|
||||
d.codecCtx.sample_fmt = C.AV_SAMPLE_FMT_S16
|
||||
d.codecCtx.sample_rate = 48000
|
||||
d.codecCtx.channel_layout = C.AV_CH_LAYOUT_MONO
|
||||
d.codecCtx.channels = C.av_get_channel_layout_nb_channels(d.codecCtx.channel_layout)
|
||||
|
||||
res := C.avcodec_open2(d.codecCtx, codec, nil)
|
||||
if res < 0 {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("avcodec_open2() failed")
|
||||
}
|
||||
|
||||
d.frame = C.av_frame_alloc()
|
||||
if d.frame == nil {
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_alloc() failed")
|
||||
}
|
||||
|
||||
d.frame.nb_samples = d.codecCtx.frame_size
|
||||
d.frame.format = (C.int)(d.codecCtx.sample_fmt)
|
||||
d.frame.channel_layout = d.codecCtx.channel_layout
|
||||
|
||||
res = C.av_frame_get_buffer(d.frame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.frame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
d.pkt = C.av_packet_alloc()
|
||||
if d.pkt == nil {
|
||||
C.av_frame_free(&d.frame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_packet_alloc() failed")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// close closes the decoder.
|
||||
func (d *opusEncoder) close() {
|
||||
C.av_packet_free(&d.pkt)
|
||||
C.av_frame_free(&d.frame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
}
|
||||
|
||||
// encode encodes LPCM samples into Opus packets.
|
||||
func (d *opusEncoder) encode(samples []byte) ([][]byte, int64, error) {
|
||||
// convert from big-endian to little-endian
|
||||
samples = switchEndianness16(samples)
|
||||
|
||||
// put samples into an internal buffer
|
||||
d.samplesBuffer = append(d.samplesBuffer, samples...)
|
||||
|
||||
// split buffer into AVFrames
|
||||
requiredSampleSize := (int)(d.codecCtx.frame_size) * 2
|
||||
frameCount := len(d.samplesBuffer) / requiredSampleSize
|
||||
if frameCount == 0 {
|
||||
return nil, 0, fmt.Errorf("sample buffer is not filled enough")
|
||||
}
|
||||
|
||||
ret := make([][]byte, frameCount)
|
||||
var pts int64
|
||||
|
||||
for i := 0; i < frameCount; i++ {
|
||||
samples = d.samplesBuffer[:requiredSampleSize]
|
||||
d.samplesBuffer = d.samplesBuffer[requiredSampleSize:]
|
||||
|
||||
samplePTS := d.samplesBufferPTS
|
||||
d.samplesBufferPTS += int64(len(samples) / 2)
|
||||
|
||||
// pass samples pointer to frame
|
||||
d.frame.data[0] = (*C.uint8_t)(&samples[0])
|
||||
|
||||
// send frame to the encoder
|
||||
d.frame.pts = (C.int64_t)(samplePTS)
|
||||
res := C.avcodec_send_frame(d.codecCtx, d.frame)
|
||||
if res < 0 {
|
||||
return nil, 0, fmt.Errorf("avcodec_send_frame() failed")
|
||||
}
|
||||
|
||||
// wait for result
|
||||
res = C.avcodec_receive_packet(d.codecCtx, d.pkt)
|
||||
if res < 0 {
|
||||
return nil, 0, fmt.Errorf("avcodec_receive_packet() failed")
|
||||
}
|
||||
|
||||
// perform a deep copy of the data before unreferencing the packet
|
||||
data := C.GoBytes(unsafe.Pointer(d.pkt.data), d.pkt.size)
|
||||
|
||||
if i == 0 {
|
||||
pts = (int64)(d.pkt.pts)
|
||||
}
|
||||
|
||||
C.av_packet_unref(d.pkt)
|
||||
|
||||
ret[i] = data
|
||||
}
|
||||
|
||||
return ret, pts, nil
|
||||
}
|
31
examples/client-record-format-vp8/dummy_image.go
Normal file
31
examples/client-record-format-vp8/dummy_image.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
var dummyImageCount = 0
|
||||
|
||||
func createDummyImage() *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch dummyImageCount {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
dummyImageCount = (dummyImageCount + 1) % 3
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
@@ -4,8 +4,6 @@ package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"image"
|
||||
"image/color"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
@@ -22,7 +20,7 @@ import (
|
||||
// 5. write RTP packets to the server
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
@@ -39,28 +37,6 @@ func randUint32() (uint32, error) {
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func createDummyImage(i int) *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch i {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
||||
|
||||
func main() {
|
||||
// create a stream description that contains a VP8 format
|
||||
forma := &format.VP8{
|
||||
@@ -110,38 +86,35 @@ func main() {
|
||||
ticker := time.NewTicker(200 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
i := 0
|
||||
|
||||
for range ticker.C {
|
||||
// create a dummy image
|
||||
img := createDummyImage(i)
|
||||
i = (i + 1) % 3
|
||||
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// create a dummy image
|
||||
img := createDummyImage()
|
||||
|
||||
// encode the image with VP8
|
||||
au, pts, err := vp8enc.encode(img, pts)
|
||||
frame, pts, err := vp8enc.encode(img, pts)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// wait for a VP8 access unit
|
||||
if au == nil {
|
||||
// wait for a VP8 frame
|
||||
if frame == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// generate RTP packets from the VP8 access unit
|
||||
pkts, err := rtpEnc.Encode(au)
|
||||
// generate RTP packets from the VP8 frame
|
||||
pkts, err := rtpEnc.Encode(frame)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
log.Printf("writing RTP packets with PTS=%d, au=%d, pkts=%d", pts, len(au), len(pkts))
|
||||
log.Printf("writing RTP packets with PTS=%d, frame size=%d, pkt count=%d", pts, len(frame), len(pkts))
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp = uint32(int64(randomStart) + pts)
|
||||
pkt.Timestamp += uint32(int64(randomStart) + pts)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
|
@@ -72,6 +72,8 @@ func (d *vp8Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -88,6 +90,9 @@ func (d *vp8Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.yuv420Frame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -102,7 +107,6 @@ func (d *vp8Encoder) initialize() error {
|
||||
|
||||
d.pkt = C.av_packet_alloc()
|
||||
if d.pkt == nil {
|
||||
C.av_packet_free(&d.pkt)
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
|
31
examples/client-record-format-vp9/dummy_image.go
Normal file
31
examples/client-record-format-vp9/dummy_image.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
var dummyImageCount = 0
|
||||
|
||||
func createDummyImage() *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch dummyImageCount {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
dummyImageCount = (dummyImageCount + 1) % 3
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
@@ -4,8 +4,6 @@ package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"image"
|
||||
"image/color"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
@@ -22,7 +20,7 @@ import (
|
||||
// 5. write RTP packets to the server
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
@@ -39,28 +37,6 @@ func randUint32() (uint32, error) {
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func createDummyImage(i int) *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch i {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
||||
|
||||
func main() {
|
||||
// create a stream description that contains a VP9 format
|
||||
forma := &format.VP9{
|
||||
@@ -110,38 +86,35 @@ func main() {
|
||||
ticker := time.NewTicker(200 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
i := 0
|
||||
|
||||
for range ticker.C {
|
||||
// create a dummy image
|
||||
img := createDummyImage(i)
|
||||
i = (i + 1) % 3
|
||||
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// create a dummy image
|
||||
img := createDummyImage()
|
||||
|
||||
// encode the image with VP9
|
||||
au, pts, err := vp9enc.encode(img, pts)
|
||||
frame, pts, err := vp9enc.encode(img, pts)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// wait for a VP9 access unit
|
||||
if au == nil {
|
||||
// wait for a VP9 frame
|
||||
if frame == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// generate RTP packets from the VP9 access unit
|
||||
pkts, err := rtpEnc.Encode(au)
|
||||
// generate RTP packets from the VP9 frame
|
||||
pkts, err := rtpEnc.Encode(frame)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
log.Printf("writing RTP packets with PTS=%d, au=%d, pkts=%d", pts, len(au), len(pkts))
|
||||
log.Printf("writing RTP packets with PTS=%d, frame size=%d, pkt count=%d", pts, len(frame), len(pkts))
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp = uint32(int64(randomStart) + pts)
|
||||
pkt.Timestamp += uint32(int64(randomStart) + pts)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
|
@@ -72,6 +72,8 @@ func (d *vp9Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -88,6 +90,9 @@ func (d *vp9Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.yuv420Frame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -102,7 +107,6 @@ func (d *vp9Encoder) initialize() error {
|
||||
|
||||
d.pkt = C.av_packet_alloc()
|
||||
if d.pkt == nil {
|
||||
C.av_packet_free(&d.pkt)
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
|
31
examples/client-record-options/dummy_image.go
Normal file
31
examples/client-record-options/dummy_image.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
var dummyImageCount = 0
|
||||
|
||||
func createDummyImage() *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch dummyImageCount {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
dummyImageCount = (dummyImageCount + 1) % 3
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
@@ -86,6 +86,8 @@ func (d *h264Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -102,6 +104,9 @@ func (d *h264Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.yuv420Frame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -116,7 +121,6 @@ func (d *h264Encoder) initialize() error {
|
||||
|
||||
d.pkt = C.av_packet_alloc()
|
||||
if d.pkt == nil {
|
||||
C.av_packet_free(&d.pkt)
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
|
@@ -4,8 +4,6 @@ package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"image"
|
||||
"image/color"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
@@ -23,7 +21,7 @@ import (
|
||||
// 6. write RTP packets to the server
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
@@ -40,27 +38,6 @@ func randUint32() (uint32, error) {
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func createDummyImage(i int) *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch i {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
||||
func main() {
|
||||
// create a stream description that contains a H264 format
|
||||
forma := &format.H264{
|
||||
@@ -120,16 +97,13 @@ func main() {
|
||||
ticker := time.NewTicker(200 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
i := 0
|
||||
|
||||
for range ticker.C {
|
||||
// create a dummy image
|
||||
img := createDummyImage(i)
|
||||
i = (i + 1) % 3
|
||||
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// create a dummy image
|
||||
img := createDummyImage()
|
||||
|
||||
// encode the image with H264
|
||||
au, pts, err := h264enc.encode(img, pts)
|
||||
if err != nil {
|
||||
@@ -151,7 +125,7 @@ func main() {
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp = uint32(int64(randomStart) + pts)
|
||||
pkt.Timestamp += uint32(int64(randomStart) + pts)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
|
31
examples/client-record-pause/dummy_image.go
Normal file
31
examples/client-record-pause/dummy_image.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
)
|
||||
|
||||
var dummyImageCount = 0
|
||||
|
||||
func createDummyImage() *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch dummyImageCount {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
dummyImageCount = (dummyImageCount + 1) % 3
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
@@ -86,6 +86,8 @@ func (d *h264Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.rgbaFrame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -102,6 +104,9 @@ func (d *h264Encoder) initialize() error {
|
||||
|
||||
res = C.av_frame_get_buffer(d.yuv420Frame, 0)
|
||||
if res < 0 {
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
return fmt.Errorf("av_frame_get_buffer() failed")
|
||||
}
|
||||
|
||||
@@ -116,7 +121,6 @@ func (d *h264Encoder) initialize() error {
|
||||
|
||||
d.pkt = C.av_packet_alloc()
|
||||
if d.pkt == nil {
|
||||
C.av_packet_free(&d.pkt)
|
||||
C.av_frame_free(&d.yuv420Frame)
|
||||
C.av_frame_free(&d.rgbaFrame)
|
||||
C.avcodec_close(d.codecCtx)
|
||||
|
@@ -4,8 +4,6 @@ package main
|
||||
|
||||
import (
|
||||
"crypto/rand"
|
||||
"image"
|
||||
"image/color"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
@@ -24,7 +22,7 @@ import (
|
||||
// 7. repeat
|
||||
|
||||
// This example requires the FFmpeg libraries, that can be installed with this command:
|
||||
// apt install -y libavformat-dev libswscale-dev gcc pkg-config
|
||||
// apt install -y libavcodec-dev libswscale-dev gcc pkg-config
|
||||
|
||||
func multiplyAndDivide(v, m, d int64) int64 {
|
||||
secs := v / d
|
||||
@@ -41,28 +39,6 @@ func randUint32() (uint32, error) {
|
||||
return uint32(b[0])<<24 | uint32(b[1])<<16 | uint32(b[2])<<8 | uint32(b[3]), nil
|
||||
}
|
||||
|
||||
func createDummyImage(i int) *image.RGBA {
|
||||
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
|
||||
|
||||
var cl color.RGBA
|
||||
switch i {
|
||||
case 0:
|
||||
cl = color.RGBA{255, 0, 0, 0}
|
||||
case 1:
|
||||
cl = color.RGBA{0, 255, 0, 0}
|
||||
case 2:
|
||||
cl = color.RGBA{0, 0, 255, 0}
|
||||
}
|
||||
|
||||
for y := 0; y < img.Rect.Dy(); y++ {
|
||||
for x := 0; x < img.Rect.Dx(); x++ {
|
||||
img.SetRGBA(x, y, cl)
|
||||
}
|
||||
}
|
||||
|
||||
return img
|
||||
}
|
||||
|
||||
func main() {
|
||||
// create a stream description that contains a H264 format
|
||||
forma := &format.H264{
|
||||
@@ -114,16 +90,13 @@ func main() {
|
||||
ticker := time.NewTicker(200 * time.Millisecond)
|
||||
defer ticker.Stop()
|
||||
|
||||
i := 0
|
||||
|
||||
for range ticker.C {
|
||||
// create a dummy image
|
||||
img := createDummyImage(i)
|
||||
i = (i + 1) % 3
|
||||
|
||||
// get current timestamp
|
||||
pts := multiplyAndDivide(int64(time.Since(start)), int64(forma.ClockRate()), int64(time.Second))
|
||||
|
||||
// create a dummy image
|
||||
img := createDummyImage()
|
||||
|
||||
// encode the image with H264
|
||||
au, pts, err := h264enc.encode(img, pts)
|
||||
if err != nil {
|
||||
@@ -145,7 +118,7 @@ func main() {
|
||||
|
||||
// write RTP packets to the server
|
||||
for _, pkt := range pkts {
|
||||
pkt.Timestamp = uint32(int64(randomStart) + pts)
|
||||
pkt.Timestamp += uint32(int64(randomStart) + pts)
|
||||
|
||||
err = c.WritePacketRTP(desc.Medias[0], pkt)
|
||||
if err != nil {
|
||||
|
2
go.mod
2
go.mod
@@ -3,7 +3,7 @@ module github.com/bluenviron/gortsplib/v4
|
||||
go 1.21.0
|
||||
|
||||
require (
|
||||
github.com/bluenviron/mediacommon/v2 v2.0.1-0.20250219181023-5dae4feddd9c
|
||||
github.com/bluenviron/mediacommon/v2 v2.0.1-0.20250222132106-205c4f7f3850
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/pion/rtcp v1.2.15
|
||||
github.com/pion/rtp v1.8.11
|
||||
|
4
go.sum
4
go.sum
@@ -2,8 +2,8 @@ github.com/asticode/go-astikit v0.30.0 h1:DkBkRQRIxYcknlaU7W7ksNfn4gMFsB0tqMJflx
|
||||
github.com/asticode/go-astikit v0.30.0/go.mod h1:h4ly7idim1tNhaVkdVBeXQZEE3L0xblP7fCWbgwipF0=
|
||||
github.com/asticode/go-astits v1.13.0 h1:XOgkaadfZODnyZRR5Y0/DWkA9vrkLLPLeeOvDwfKZ1c=
|
||||
github.com/asticode/go-astits v1.13.0/go.mod h1:QSHmknZ51pf6KJdHKZHJTLlMegIrhega3LPWz3ND/iI=
|
||||
github.com/bluenviron/mediacommon/v2 v2.0.1-0.20250219181023-5dae4feddd9c h1:Piva4HXk7CRxCqsGpb+SfkZX0M45UeMsNHlikgjn2Ug=
|
||||
github.com/bluenviron/mediacommon/v2 v2.0.1-0.20250219181023-5dae4feddd9c/go.mod h1:iHEz1SFIet6zBwAQoh1a92vTQ3dV3LpVFbom6/SLz3k=
|
||||
github.com/bluenviron/mediacommon/v2 v2.0.1-0.20250222132106-205c4f7f3850 h1:HiwdV9G5MOale+ot3odl0oAt6f3SR0eczjefNCcQRa4=
|
||||
github.com/bluenviron/mediacommon/v2 v2.0.1-0.20250222132106-205c4f7f3850/go.mod h1:iHEz1SFIet6zBwAQoh1a92vTQ3dV3LpVFbom6/SLz3k=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
|
Reference in New Issue
Block a user