Compare commits

...

27 Commits

Author SHA1 Message Date
Lukas Herman
f4a4edcabd Update codec.Reader interface to return byte slice 2020-10-29 04:48:47 +00:00
Lukas Herman
c8547c4597 Rename simple -> webrtc 2020-10-27 21:13:07 -07:00
Lukas Herman
21bb12dd6b Reduce examples to increase maintainability
Changes:
  * Remove facedetection, rtp-send, and screenshare examples
  * Rename simple to webrtc
2020-10-27 21:10:50 -07:00
Lukas Herman
fd43659fed Remove code owners 2020-10-27 21:09:19 -07:00
Lukas Herman
82f33cb572 Skip mmal package in CI 2020-10-27 21:08:12 -07:00
Lukas Herman
4f9822349a Add mmal hardware video encoder support
Using mmal significantly boosts video fps by offloading the encoding
work from CPU to GPU. On a Raspberry Pi 3, libx264 only gives 480p18,
whereas mmal can give 720p30.
2020-10-27 21:08:12 -07:00
Lukas Herman
16bcd0b7dd Fix step wise resolutions in linux camera
Some cameras support a range of resolutions with step wise. The fix is
to not only capture the highest resolutions but uses the step wise to
determine if we can support the hardcoded standard resolutions,
https://commons.wikimedia.org/wiki/File:Vector_Video_Standards2.svg.
In the future, we should use a custom data structure to capture more
resolutions that are outside of the listed standard resolutions.
2020-10-26 21:26:03 -07:00
Lukas Herman
2022a4b7f7 Fix undiscovered some camera devices
/dev/v4l/by-path doesn't return all available devices. So, to make sure
that we include all available devices, the list of devices will also
complement with /dev/video*.
2020-10-26 20:22:40 -07:00
Renovate Bot
0b6549eb8f Update actions/setup-go action to v2
Generated by Renovate Bot
2020-10-27 10:26:29 +09:00
Renovate Bot
1b0a237438 Update github.com/jfreymuth/pulse commit hash to 1e525c4
Generated by Renovate Bot
2020-10-20 12:40:12 +09:00
Lukas Herman
36edbd9485 Add meta reader helper
Since broadcaster has a ring buffer, we can take advantage of this
property to read the meta data for video/audio without losing any data.
2020-10-15 22:28:37 -04:00
Lukas Herman
eb689a3c79 Remove buffer in linux camera 2020-10-15 00:12:55 -04:00
Lukas Herman
e4b1b1aaba Fix included unsupported formats 2020-10-14 12:18:17 -04:00
Lukas Herman
0f5df05c16 Update lherman-cs/opus to remove libopusfile 2020-10-12 01:32:22 -04:00
Lukas Herman
9dcfaf1c1e Switch from type alias to embedded struct
Embedded struct provides more future compatibility
2020-10-11 23:32:58 -04:00
Lukas Herman
238f190e71 Use MediaDeviceInfo instead of webrtc.RTPCodecType
Changes:
  * Add unit tests for mediastream
  * Remove webrtc.RTPCodecType dependency in mediastream
  * Add Kind to Tracker interface
2020-10-11 01:29:59 -04:00
Lukas Herman
0210ec6ca6 Add audio change detection 2020-10-10 23:47:35 -04:00
Lukas Herman
abdd96e6b2 Replace Name with RTPCodec in codec builder
Allowing users to implement RTPCodec will give users freedom to have
a custom encoder with custom RTP payload.
2020-10-08 11:33:38 -04:00
Lukas Herman
c9779e7f73 Add audio pull-based broadcast 2020-10-05 22:30:03 -04:00
Lukas Herman
5703fd7e4b Remove webrtc dependency in codec and its sub packages 2020-10-05 22:23:52 -04:00
Lukas Herman
db5d8f23bd Fix unstored audio
* Add unit test to check internal buffer integrity
* Fix unstored audio in internal buffer
2020-10-05 22:20:12 -04:00
Lukas Herman
d6ba28af8c Update README.md 2020-10-02 15:56:21 -04:00
Lukas Herman
09c2998408 Add code coverage report 2020-10-02 01:42:01 -04:00
Lukas Herman
d129e982c7 Add generic wave's Buffer 2020-10-02 01:35:41 -04:00
Lukas Herman
74986c010b Move current broadcast test to io
* Move core broadcast tests to io
* Add targeted tests for video.Broadcast
2020-10-02 01:33:21 -04:00
Renovate Bot
b8be865ff3 Update golang.org/x/sys commit hash to fdedc70
Generated by Renovate Bot
2020-10-02 01:13:49 -04:00
Lukas Herman
7aad89ef37 Fix example go module versioning 2020-10-01 21:06:02 -07:00
50 changed files with 1584 additions and 1049 deletions

View File

@@ -18,7 +18,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Go
uses: actions/setup-go@v1
uses: actions/setup-go@v2
with:
go-version: ${{ matrix.go }}
- name: Install dependencies
@@ -30,15 +30,17 @@ jobs:
libvpx-dev \
libx264-dev
- name: go vet
run: go vet -tags nolibopusfile ./...
run: go vet $(go list ./... | grep -v mmal)
- name: go build
run: go build -tags nolibopusfile ./...
run: go build $(go list ./... | grep -v mmal)
- name: go build without CGO
run: go build . pkg/...
env:
CGO_ENABLED: 0
- name: go test
run: go test -tags nolibopusfile ./... -v -race
run: go test -v -race -coverprofile=coverage.txt -covermode=atomic $(go list ./... | grep -v mmal)
- uses: codecov/codecov-action@v1
if: matrix.go == '1.15'
- name: go test without CGO
run: go test . pkg/... -v
env:
@@ -53,7 +55,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Go
uses: actions/setup-go@v1
uses: actions/setup-go@v2
with:
go-version: ${{ matrix.go }}
- name: Install dependencies
@@ -64,15 +66,15 @@ jobs:
libvpx \
x264
- name: go vet
run: go vet ./...
run: go vet $(go list ./... | grep -v mmal)
- name: go build
run: go build ./...
run: go build $(go list ./... | grep -v mmal)
- name: go build without CGO
run: go build . pkg/...
env:
CGO_ENABLED: 0
- name: go test
run: go test ./... -v -race
run: go test -v -race $(go list ./... | grep -v mmal)
- name: go test without CGO
run: go test . pkg/... -v
env:

View File

@@ -1 +0,0 @@
* @lherman-cs @at-wat

View File

@@ -1,6 +1,17 @@
# mediadevices
Go implementation of the [MediaDevices](https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices) API.
<h1 align="center">
<br>
Pion MediaDevices
<br>
</h1>
<h4 align="center">Go implementation of the <a href="https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices">MediaDevices</a> API</h4>
<p align="center">
<a href="https://pion.ly/slack"><img src="https://img.shields.io/badge/join-us%20on%20slack-gray.svg?longCache=true&logo=slack&colorB=brightgreen" alt="Slack Widget"></a>
<a href="https://github.com/pion/mediadevices/actions"><img src="https://github.com/pion/mediadevices/workflows/CI/badge.svg?branch=master" alt="Build status"></a>
<a href="https://pkg.go.dev/github.com/pion/mediadevices"><img src="https://godoc.org/github.com/pion/mediadevices?status.svg" alt="GoDoc"></a>
<a href="https://codecov.io/gh/pion/mediadevices"><img src="https://codecov.io/gh/pion/mediadevices/branch/master/graph/badge.svg" alt="Coverage Status"></a>
<a href="LICENSE"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License: MIT"></a>
</p>
<br>
![](img/demo.gif)

10
codecov.yml Normal file
View File

@@ -0,0 +1,10 @@
coverage:
status:
project:
default:
# Allow decreasing 2% of total coverage to avoid noise.
threshold: 2%
patch: off
ignore:
- "examples/*"

View File

@@ -1,29 +0,0 @@
## Instructions
### Download facedetection
```
go get github.com/pion/mediadevices/examples/facedetection
```
### Open example page
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/video) you should see two text-areas and a 'Start Session' button
### Run facedetection with your browsers SessionDescription as stdin
In the jsfiddle the top textarea is your browser, copy that and:
#### Linux
Run `echo $BROWSER_SDP | facedetection`
### Input facedetection's SessionDescription into your browser
Copy the text that `facedetection` just emitted and copy into second text area
### Hit 'Start Session' in jsfiddle, enjoy your video!
A video should start playing in your browser above the input boxes, and will continue playing until you close the application.
Congrats, you have used pion-WebRTC! Now start building something cool

View File

@@ -1,118 +0,0 @@
package main
import (
"image"
"image/color"
"image/draw"
"io/ioutil"
"log"
"github.com/disintegration/imaging"
pigo "github.com/esimov/pigo/core"
)
var (
cascade []byte
err error
classifier *pigo.Pigo
)
func imgToGrayscale(img image.Image) []uint8 {
bounds := img.Bounds()
flatten := bounds.Dy() * bounds.Dx()
grayImg := make([]uint8, flatten)
i := 0
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
pix := img.At(x, y)
grayPix := color.GrayModel.Convert(pix).(color.Gray)
grayImg[i] = grayPix.Y
i++
}
}
return grayImg
}
// clusterDetection runs Pigo face detector core methods
// and returns a cluster with the detected faces coordinates.
func clusterDetection(img image.Image) []pigo.Detection {
grayscale := imgToGrayscale(img)
bounds := img.Bounds()
cParams := pigo.CascadeParams{
MinSize: 100,
MaxSize: 600,
ShiftFactor: 0.15,
ScaleFactor: 1.1,
ImageParams: pigo.ImageParams{
Pixels: grayscale,
Rows: bounds.Dy(),
Cols: bounds.Dx(),
Dim: bounds.Dx(),
},
}
if len(cascade) == 0 {
cascade, err = ioutil.ReadFile("facefinder")
if err != nil {
log.Fatalf("Error reading the cascade file: %s", err)
}
p := pigo.NewPigo()
// Unpack the binary file. This will return the number of cascade trees,
// the tree depth, the threshold and the prediction from tree's leaf nodes.
classifier, err = p.Unpack(cascade)
if err != nil {
log.Fatalf("Error unpacking the cascade file: %s", err)
}
}
// Run the classifier over the obtained leaf nodes and return the detection results.
// The result contains quadruplets representing the row, column, scale and detection score.
dets := classifier.RunCascade(cParams, 0.0)
// Calculate the intersection over union (IoU) of two clusters.
dets = classifier.ClusterDetections(dets, 0)
return dets
}
func drawCircle(img draw.Image, x0, y0, r int, c color.Color) {
x, y, dx, dy := r-1, 0, 1, 1
err := dx - (r * 2)
for x > y {
img.Set(x0+x, y0+y, c)
img.Set(x0+y, y0+x, c)
img.Set(x0-y, y0+x, c)
img.Set(x0-x, y0+y, c)
img.Set(x0-x, y0-y, c)
img.Set(x0-y, y0-x, c)
img.Set(x0+y, y0-x, c)
img.Set(x0+x, y0-y, c)
if err <= 0 {
y++
err += dy
dy += 2
}
if err > 0 {
x--
dx += 2
err += dx - (r * 2)
}
}
}
func markFaces(img image.Image) image.Image {
nrgba := imaging.Clone(img)
dets := clusterDetection(img)
for _, det := range dets {
if det.Q < 5.0 {
continue
}
drawCircle(nrgba, det.Col, det.Row, det.Scale/2, color.Black)
}
return nrgba
}

Binary file not shown.

View File

@@ -1,119 +0,0 @@
package main
import (
"fmt"
"image"
"github.com/pion/mediadevices"
"github.com/pion/mediadevices/examples/internal/signal"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
"github.com/pion/mediadevices/pkg/frame"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
func markFacesTransformer(r video.Reader) video.Reader {
return video.ReaderFunc(func() (img image.Image, err error) {
img, err = r.Read()
if err != nil {
return
}
img = markFaces(img)
return
})
}
func main() {
config := webrtc.Configuration{
ICEServers: []webrtc.ICEServer{
{
URLs: []string{"stun:stun.l.google.com:19302"},
},
},
}
// Wait for the offer to be pasted
offer := webrtc.SessionDescription{}
signal.Decode(signal.MustReadStdin(), &offer)
// Create a new RTCPeerConnection
mediaEngine := webrtc.MediaEngine{}
if err := mediaEngine.PopulateFromSDP(offer); err != nil {
panic(err)
}
api := webrtc.NewAPI(webrtc.WithMediaEngine(mediaEngine))
peerConnection, err := api.NewPeerConnection(config)
if err != nil {
panic(err)
}
// Set the handler for ICE connection state
// This will notify you when the peer has connected/disconnected
peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
fmt.Printf("Connection State has changed %s \n", connectionState.String())
})
md := mediadevices.NewMediaDevices(peerConnection)
vp8Params, err := vpx.NewVP8Params()
if err != nil {
panic(err)
}
vp8Params.BitRate = 100000 // 100kbps
s, err := md.GetUserMedia(mediadevices.MediaStreamConstraints{
Video: func(c *mediadevices.MediaTrackConstraints) {
c.FrameFormat = prop.FrameFormatExact(frame.FormatI420) // most of the encoder accepts I420
c.Enabled = true
c.Width = prop.Int(640)
c.Height = prop.Int(480)
c.VideoTransform = markFacesTransformer
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
},
})
if err != nil {
panic(err)
}
for _, tracker := range s.GetTracks() {
t := tracker.Track()
tracker.OnEnded(func(err error) {
fmt.Printf("Track (ID: %s, Label: %s) ended with error: %v\n",
t.ID(), t.Label(), err)
})
_, err = peerConnection.AddTransceiverFromTrack(t,
webrtc.RtpTransceiverInit{
Direction: webrtc.RTPTransceiverDirectionSendonly,
},
)
if err != nil {
panic(err)
}
}
// Set the remote SessionDescription
err = peerConnection.SetRemoteDescription(offer)
if err != nil {
panic(err)
}
// Create an answer
answer, err := peerConnection.CreateAnswer(nil)
if err != nil {
panic(err)
}
// Sets the LocalDescription, and starts our UDP listeners
err = peerConnection.SetLocalDescription(answer)
if err != nil {
panic(err)
}
// Output the answer in base64 so we can paste it in browser
fmt.Println(signal.Encode(answer))
select {}
}

View File

@@ -2,8 +2,8 @@ module github.com/pion/mediadevices/examples
go 1.14
replace github.com/pion/mediadevices => ../
// Please don't commit require entries of examples.
// `git checkout master examples/go.mod` to revert this file.
require github.com/pion/mediadevices v0.0.0-00010101000000-000000000000
require github.com/pion/mediadevices v0.0.0
replace github.com/pion/mediadevices v0.0.0 => ../

View File

@@ -1,29 +0,0 @@
## Instructions
### Download rtp-send example
```
go get github.com/pion/mediadevices/examples/rtp-send
```
### Listen RTP
Install GStreamer and run:
```
gst-launch-1.0 udpsrc port=5000 caps=application/x-rtp,encode-name=VP8 \
! rtpvp8depay ! vp8dec ! videoconvert ! autovideosink
```
Or run VLC media plyer:
```
vlc ./vp8.sdp
```
### Run rtp-send
Run `rtp-send localhost:5000`
A video should start playing in your GStreamer or VLC window.
It's not WebRTC, but pure RTP.
Congrats, you have used pion-MediaDevices! Now start building something cool

View File

@@ -1,120 +0,0 @@
package main
import (
"fmt"
"net"
"os"
"github.com/pion/mediadevices"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
"github.com/pion/mediadevices/pkg/frame"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/rtp"
"github.com/pion/webrtc/v2"
"github.com/pion/webrtc/v2/pkg/media"
)
const (
mtu = 1000
)
func main() {
if len(os.Args) != 2 {
fmt.Printf("usage: %s host:port\n", os.Args[0])
return
}
md := mediadevices.NewMediaDevicesFromCodecs(
map[webrtc.RTPCodecType][]*webrtc.RTPCodec{
webrtc.RTPCodecTypeVideo: []*webrtc.RTPCodec{
webrtc.NewRTPVP8Codec(100, 90000),
},
},
mediadevices.WithTrackGenerator(
func(_ uint8, _ uint32, id, _ string, codec *webrtc.RTPCodec) (
mediadevices.LocalTrack, error,
) {
return newTrack(codec, id, os.Args[1]), nil
},
),
)
vp8Params, err := vpx.NewVP8Params()
if err != nil {
panic(err)
}
vp8Params.BitRate = 100000 // 100kbps
_, err = md.GetUserMedia(mediadevices.MediaStreamConstraints{
Video: func(c *mediadevices.MediaTrackConstraints) {
c.FrameFormat = prop.FrameFormat(frame.FormatYUY2)
c.Enabled = true
c.Width = prop.Int(640)
c.Height = prop.Int(480)
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
},
})
if err != nil {
panic(err)
}
select {}
}
type track struct {
codec *webrtc.RTPCodec
packetizer rtp.Packetizer
id string
conn net.Conn
}
func newTrack(codec *webrtc.RTPCodec, id, dest string) *track {
addr, err := net.ResolveUDPAddr("udp", dest)
if err != nil {
panic(err)
}
conn, err := net.DialUDP("udp", nil, addr)
if err != nil {
panic(err)
}
return &track{
codec: codec,
packetizer: rtp.NewPacketizer(
mtu,
codec.PayloadType,
1,
codec.Payloader,
rtp.NewRandomSequencer(),
codec.ClockRate,
),
id: id,
conn: conn,
}
}
func (t *track) WriteSample(s media.Sample) error {
buf := make([]byte, mtu)
pkts := t.packetizer.Packetize(s.Data, s.Samples)
for _, p := range pkts {
n, err := p.MarshalTo(buf)
if err != nil {
panic(err)
}
_, _ = t.conn.Write(buf[:n])
}
return nil
}
func (t *track) Codec() *webrtc.RTPCodec {
return t.codec
}
func (t *track) ID() string {
return t.id
}
func (t *track) Kind() webrtc.RTPCodecType {
return t.codec.Type
}

View File

@@ -1,9 +0,0 @@
v=0
o=- 1234567890 1234567890 IN IP4 0.0.0.0
s=RTP-Send Example
i=Example
c=IN IP4 0.0.0.0
t=0 0
a=recvonly
m=video 5000 RTP/AVP 100
a=rtpmap:100 VP8/90000

View File

@@ -1,29 +0,0 @@
## Instructions
### Download screenshare
```
go get github.com/pion/mediadevices/examples/screenshare
```
### Open example page
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/audio-and-video) you should see two text-areas and a 'Start Session' button
### Run screenshare with your browsers SessionDescription as stdin
In the jsfiddle the top textarea is your browser, copy that and:
#### Linux
Run `echo $BROWSER_SDP | screenshare`
### Input screenshare's SessionDescription into your browser
Copy the text that `screenshare` just emitted and copy into second text area
### Hit 'Start Session' in jsfiddle, enjoy your video!
A video should start playing in your browser above the input boxes, and will continue playing until you close the application.
Congrats, you have used pion-WebRTC! Now start building something cool

View File

@@ -1,101 +0,0 @@
package main
import (
"fmt"
"github.com/pion/mediadevices"
"github.com/pion/mediadevices/examples/internal/signal"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
_ "github.com/pion/mediadevices/pkg/driver/screen" // This is required to register screen capture adapter
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/webrtc/v2"
)
func main() {
config := webrtc.Configuration{
ICEServers: []webrtc.ICEServer{
{
URLs: []string{"stun:stun.l.google.com:19302"},
},
},
}
// Wait for the offer to be pasted
offer := webrtc.SessionDescription{}
signal.Decode(signal.MustReadStdin(), &offer)
// Create a new RTCPeerConnection
mediaEngine := webrtc.MediaEngine{}
if err := mediaEngine.PopulateFromSDP(offer); err != nil {
panic(err)
}
api := webrtc.NewAPI(webrtc.WithMediaEngine(mediaEngine))
peerConnection, err := api.NewPeerConnection(config)
if err != nil {
panic(err)
}
// Set the handler for ICE connection state
// This will notify you when the peer has connected/disconnected
peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
fmt.Printf("Connection State has changed %s \n", connectionState.String())
})
md := mediadevices.NewMediaDevices(peerConnection)
vp8Params, err := vpx.NewVP8Params()
if err != nil {
panic(err)
}
vp8Params.BitRate = 100000 // 100kbps
s, err := md.GetDisplayMedia(mediadevices.MediaStreamConstraints{
Video: func(c *mediadevices.MediaTrackConstraints) {
c.Enabled = true
c.VideoTransform = video.Scale(-1, 360, nil) // Resize to 360p
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
},
})
if err != nil {
panic(err)
}
for _, tracker := range s.GetTracks() {
t := tracker.Track()
tracker.OnEnded(func(err error) {
fmt.Printf("Track (ID: %s, Label: %s) ended with error: %v\n",
t.ID(), t.Label(), err)
})
_, err = peerConnection.AddTransceiverFromTrack(t,
webrtc.RtpTransceiverInit{
Direction: webrtc.RTPTransceiverDirectionSendonly,
},
)
if err != nil {
panic(err)
}
}
// Set the remote SessionDescription
err = peerConnection.SetRemoteDescription(offer)
if err != nil {
panic(err)
}
// Create an answer
answer, err := peerConnection.CreateAnswer(nil)
if err != nil {
panic(err)
}
// Sets the LocalDescription, and starts our UDP listeners
err = peerConnection.SetLocalDescription(answer)
if err != nil {
panic(err)
}
// Output the answer in base64 so we can paste it in browser
fmt.Println(signal.Encode(answer))
select {}
}

View File

@@ -3,24 +3,24 @@
### Download gstreamer-send
```
go get github.com/pion/mediadevices/examples/simple
go get github.com/pion/mediadevices/examples/webrtc
```
### Open example page
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/audio-and-video) you should see two text-areas and a 'Start Session' button
### Run simple with your browsers SessionDescription as stdin
### Run webrtc with your browsers SessionDescription as stdin
In the jsfiddle the top textarea is your browser, copy that and:
#### Linux
Run `echo $BROWSER_SDP | simple`
Run `echo $BROWSER_SDP | webrtc`
### Input simple's SessionDescription into your browser
### Input webrtc's SessionDescription into your browser
Copy the text that `simple` just emitted and copy into second text area
Copy the text that `webrtc` just emitted and copy into second text area
### Hit 'Start Session' in jsfiddle, enjoy your video!

6
go.mod
View File

@@ -4,10 +4,10 @@ go 1.13
require (
github.com/blackjack/webcam v0.0.0-20200313125108-10ed912a8539
github.com/jfreymuth/pulse v0.0.0-20200817093420-a82ccdb5e8aa
github.com/lherman-cs/opus v0.0.0-20200925065115-26ea9d322d39
github.com/jfreymuth/pulse v0.0.0-20201014123913-1e525c426c93
github.com/lherman-cs/opus v0.0.2
github.com/pion/webrtc/v2 v2.2.26
github.com/satori/go.uuid v1.2.0
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5
golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f
)

22
go.sum
View File

@@ -15,15 +15,15 @@ github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/jfreymuth/pulse v0.0.0-20200817093420-a82ccdb5e8aa h1:qUZIj5+D3UDgfshNe8Cz/9maOxe8ddt43qwQH9vEEC8=
github.com/jfreymuth/pulse v0.0.0-20200817093420-a82ccdb5e8aa/go.mod h1:cpYspI6YljhkUf1WLXLLDmeaaPFc3CnGLjDZf9dZ4no=
github.com/jfreymuth/pulse v0.0.0-20201014123913-1e525c426c93 h1:gDcaH96SZ7q1JU6hj0tSv8FiuqadFERU17lLxhphLa8=
github.com/jfreymuth/pulse v0.0.0-20201014123913-1e525c426c93/go.mod h1:cpYspI6YljhkUf1WLXLLDmeaaPFc3CnGLjDZf9dZ4no=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/lherman-cs/opus v0.0.0-20200925065115-26ea9d322d39 h1:WEYmSwg/uoPVmfmpXWPYplb1UUx/Jr4TXGNrPaI8Cj4=
github.com/lherman-cs/opus v0.0.0-20200925065115-26ea9d322d39/go.mod h1:v9KQvlDYMuvlwniumBVMlrB0VHQvyTgxNvaXjPmTmps=
github.com/lherman-cs/opus v0.0.2 h1:fE9Du3NKXDBztqvoTd6P2y9eJ9vgIHahGK8yQostnhA=
github.com/lherman-cs/opus v0.0.2/go.mod h1:v9KQvlDYMuvlwniumBVMlrB0VHQvyTgxNvaXjPmTmps=
github.com/lucas-clemente/quic-go v0.7.1-0.20190401152353-907071221cf9 h1:tbuodUh2vuhOVZAdW3NEUvosFHUMJwUNl7jk/VSEiwc=
github.com/lucas-clemente/quic-go v0.7.1-0.20190401152353-907071221cf9/go.mod h1:PpMmPfPKO9nKJ/psF49ESTAGQSdfXxlg1otPbEB2nOw=
github.com/marten-seemann/qtls v0.2.3 h1:0yWJ43C62LsZt08vuQJDK1uC1czUc3FJeCLPoNAI4vA=
@@ -35,7 +35,6 @@ github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/pion/datachannel v1.4.21 h1:3ZvhNyfmxsAqltQrApLPQMhSFNA+aT87RqyCq4OXmf0=
github.com/pion/datachannel v1.4.21/go.mod h1:oiNyP4gHx2DIwRzX/MFyH0Rz/Gz05OgBlayAI2hAWjg=
github.com/pion/dtls/v2 v2.0.1 h1:ddE7+V0faYRbyh4uPsRZ2vLdRrjVZn+wmCfI7jlBfaA=
github.com/pion/dtls/v2 v2.0.1/go.mod h1:uMQkz2W0cSqY00xav7WByQ4Hb+18xeQh2oH2fRezr5U=
github.com/pion/dtls/v2 v2.0.2 h1:FHCHTiM182Y8e15aFTiORroiATUI16ryHiQh8AIOJ1E=
github.com/pion/dtls/v2 v2.0.2/go.mod h1:27PEO3MDdaCfo21heT59/vsdmZc0zMt9wQPcSlLu/1I=
@@ -63,9 +62,7 @@ github.com/pion/srtp v1.5.1/go.mod h1:B+QgX5xPeQTNc1CJStJPHzOlHK66ViMDWTT0HZTCkc
github.com/pion/stun v0.3.5 h1:uLUCBCkQby4S1cf6CGuR9QrVOKcvUwFeemaC865QHDg=
github.com/pion/stun v0.3.5/go.mod h1:gDMim+47EeEtfWogA37n6qXZS88L5V6LqFcf+DZA2UA=
github.com/pion/transport v0.6.0/go.mod h1:iWZ07doqOosSLMhZ+FXUTq+TamDoXSllxpbGcfkCmbE=
github.com/pion/transport v0.8.10 h1:lTiobMEw2PG6BH/mgIVqTV2mBp/mPT+IJLaN8ZxgdHk=
github.com/pion/transport v0.8.10/go.mod h1:tBmha/UCjpum5hqTWhfAEs3CO4/tHSg0MYRhSzR+CZ8=
github.com/pion/transport v0.10.0 h1:9M12BSneJm6ggGhJyWpDveFOstJsTiQjkLf4M44rm80=
github.com/pion/transport v0.10.0/go.mod h1:BnHnUipd0rZQyTVB2SBGojFHT9CBt5C5TcsJSQGkvSE=
github.com/pion/transport v0.10.1 h1:2W+yJT+0mOQ160ThZYUx5Zp2skzshiNgxrNE9GUfhJM=
github.com/pion/transport v0.10.1/go.mod h1:PBis1stIILMiis0PewDw91WJeLJkyIMcEk+DwKOzf4A=
@@ -84,17 +81,13 @@ github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdh
github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
golang.org/x/crypto v0.0.0-20190228161510-8dd112bcdc25/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200602180216-279210d13fed h1:g4KENRiCMEx58Q7/ecwfT0N2o8z35Fnbsjig/Alf2T4=
golang.org/x/crypto v0.0.0-20200602180216-279210d13fed/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899 h1:DZhuSZLsGlFL4CmhA8BcRA0mnthyA/nZ00AqCUo7vHg=
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
@@ -104,9 +97,7 @@ golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73r
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20191126235420-ef20fe5d7933/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200602114024-627f9648deb9 h1:pNX+40auqi2JqRfOP1akLGtYcn15TUbkhwuCO3foqqM=
golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200625001655-4c5254603344 h1:vGXIOMxbNfDTk/aXCmfdLgkrSV+Z2tcbze+pEc3v5W4=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200707034311-ab3426394381 h1:VXak5I6aEWmAXeQjA+QSZzlgNrpq9mjcfDemuexIKsU=
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
@@ -117,10 +108,9 @@ golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20190228124157-a34e9553db1e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200724161237-0e2f3a69832c h1:UIcGWL6/wpCfyGuJnRFJRurA+yj8RrW7Q6x2YMCXt6c=
golang.org/x/sys v0.0.0-20200724161237-0e2f3a69832c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a h1:i47hUS795cOydZI4AwJQCKXOr4BvxzvikwDoDtHhP2Y=
golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=

View File

@@ -7,7 +7,7 @@ type MediaDeviceType int
// MediaDeviceType definitions.
const (
VideoInput MediaDeviceType = iota
VideoInput MediaDeviceType = iota + 1
AudioInput
AudioOutput
)

View File

@@ -161,8 +161,10 @@ type mockParams struct {
name string
}
func (params *mockParams) Name() string {
return params.name
func (params *mockParams) RTPCodec() *codec.RTPCodec {
rtpCodec := codec.NewRTPH264Codec(90000)
rtpCodec.Name = params.name
return rtpCodec
}
func (params *mockParams) BuildVideoEncoder(r video.Reader, p prop.Media) (codec.ReadCloser, error) {
@@ -199,11 +201,11 @@ type mockVideoCodec struct {
closed chan struct{}
}
func (m *mockVideoCodec) Read(b []byte) (int, error) {
func (m *mockVideoCodec) Read() ([]byte, error) {
if _, err := m.r.Read(); err != nil {
return 0, err
return nil, err
}
return len(b), nil
return make([]byte, 20), nil
}
func (m *mockVideoCodec) Close() error { return nil }
@@ -214,11 +216,11 @@ type mockAudioCodec struct {
closed chan struct{}
}
func (m *mockAudioCodec) Read(b []byte) (int, error) {
func (m *mockAudioCodec) Read() ([]byte, error) {
if _, err := m.r.Read(); err != nil {
return 0, err
return nil, err
}
return len(b), nil
return make([]byte, 20), nil
}
func (m *mockAudioCodec) Close() error { return nil }

View File

@@ -2,8 +2,6 @@ package mediadevices
import (
"sync"
"github.com/pion/webrtc/v2"
)
// MediaStream is an interface that represents a collection of existing tracks.
@@ -21,21 +19,20 @@ type MediaStream interface {
}
type mediaStream struct {
trackers map[string]Tracker
trackers map[Tracker]struct{}
l sync.RWMutex
}
const rtpCodecTypeDefault webrtc.RTPCodecType = 0
const trackTypeDefault MediaDeviceType = 0
// NewMediaStream creates a MediaStream interface that's defined in
// https://w3c.github.io/mediacapture-main/#dom-mediastream
func NewMediaStream(trackers ...Tracker) (MediaStream, error) {
m := mediaStream{trackers: make(map[string]Tracker)}
m := mediaStream{trackers: make(map[Tracker]struct{})}
for _, tracker := range trackers {
id := tracker.LocalTrack().ID()
if _, ok := m.trackers[id]; !ok {
m.trackers[id] = tracker
if _, ok := m.trackers[tracker]; !ok {
m.trackers[tracker] = struct{}{}
}
}
@@ -43,26 +40,26 @@ func NewMediaStream(trackers ...Tracker) (MediaStream, error) {
}
func (m *mediaStream) GetAudioTracks() []Tracker {
return m.queryTracks(webrtc.RTPCodecTypeAudio)
return m.queryTracks(AudioInput)
}
func (m *mediaStream) GetVideoTracks() []Tracker {
return m.queryTracks(webrtc.RTPCodecTypeVideo)
return m.queryTracks(VideoInput)
}
func (m *mediaStream) GetTracks() []Tracker {
return m.queryTracks(rtpCodecTypeDefault)
return m.queryTracks(trackTypeDefault)
}
// queryTracks returns all tracks that are the same kind as t.
// If t is 0, which is the default, queryTracks will return all the tracks.
func (m *mediaStream) queryTracks(t webrtc.RTPCodecType) []Tracker {
func (m *mediaStream) queryTracks(t MediaDeviceType) []Tracker {
m.l.RLock()
defer m.l.RUnlock()
result := make([]Tracker, 0)
for _, tracker := range m.trackers {
if tracker.LocalTrack().Kind() == t || t == rtpCodecTypeDefault {
for tracker := range m.trackers {
if tracker.Kind() == t || t == trackTypeDefault {
result = append(result, tracker)
}
}
@@ -74,17 +71,16 @@ func (m *mediaStream) AddTrack(t Tracker) {
m.l.Lock()
defer m.l.Unlock()
id := t.LocalTrack().ID()
if _, ok := m.trackers[id]; ok {
if _, ok := m.trackers[t]; ok {
return
}
m.trackers[id] = t
m.trackers[t] = struct{}{}
}
func (m *mediaStream) RemoveTrack(t Tracker) {
m.l.Lock()
defer m.l.Unlock()
delete(m.trackers, t.LocalTrack().ID())
delete(m.trackers, t)
}

83
mediastream_test.go Normal file
View File

@@ -0,0 +1,83 @@
package mediadevices
import (
"testing"
"github.com/pion/webrtc/v2"
)
type mockMediaStreamTrack struct {
kind MediaDeviceType
}
func (track *mockMediaStreamTrack) Track() *webrtc.Track {
return nil
}
func (track *mockMediaStreamTrack) LocalTrack() LocalTrack {
return nil
}
func (track *mockMediaStreamTrack) Stop() {
}
func (track *mockMediaStreamTrack) Kind() MediaDeviceType {
return track.kind
}
func (track *mockMediaStreamTrack) OnEnded(handler func(error)) {
}
func TestMediaStreamFilters(t *testing.T) {
audioTracks := []Tracker{
&mockMediaStreamTrack{AudioInput},
&mockMediaStreamTrack{AudioInput},
&mockMediaStreamTrack{AudioInput},
&mockMediaStreamTrack{AudioInput},
&mockMediaStreamTrack{AudioInput},
}
videoTracks := []Tracker{
&mockMediaStreamTrack{VideoInput},
&mockMediaStreamTrack{VideoInput},
&mockMediaStreamTrack{VideoInput},
}
tracks := append(audioTracks, videoTracks...)
stream, err := NewMediaStream(tracks...)
if err != nil {
t.Fatal(err)
}
expect := func(t *testing.T, actual, expected []Tracker) {
if len(actual) != len(expected) {
t.Fatalf("%s: Expected to get %d trackers, but got %d trackers", t.Name(), len(expected), len(actual))
}
for _, a := range actual {
found := false
for _, e := range expected {
if e == a {
found = true
break
}
}
if !found {
t.Fatalf("%s: Expected to find %p in the query results", t.Name(), a)
}
}
}
t.Run("GetAudioTracks", func(t *testing.T) {
expect(t, stream.GetAudioTracks(), audioTracks)
})
t.Run("GetVideoTracks", func(t *testing.T) {
expect(t, stream.GetVideoTracks(), videoTracks)
})
t.Run("GetTracks", func(t *testing.T) {
expect(t, stream.GetTracks(), tracks)
})
}

35
meta.go Normal file
View File

@@ -0,0 +1,35 @@
package mediadevices
import (
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
// detectCurrentVideoProp is a small helper to get current video property
func detectCurrentVideoProp(broadcaster *video.Broadcaster) (prop.Media, error) {
var currentProp prop.Media
// Since broadcaster has a ring buffer internally, a new reader will either read the last
// buffered frame or a new frame from the source. This also implies that no frame will be lost
// in any case.
metaReader := broadcaster.NewReader(false)
metaReader = video.DetectChanges(0, func(p prop.Media) { currentProp = p })(metaReader)
_, err := metaReader.Read()
return currentProp, err
}
// detectCurrentAudioProp is a small helper to get current audio property
func detectCurrentAudioProp(broadcaster *audio.Broadcaster) (prop.Media, error) {
var currentProp prop.Media
// Since broadcaster has a ring buffer internally, a new reader will either read the last
// buffered frame or a new frame from the source. This also implies that no frame will be lost
// in any case.
metaReader := broadcaster.NewReader(false)
metaReader = audio.DetectChanges(0, func(p prop.Media) { currentProp = p })(metaReader)
_, err := metaReader.Read()
return currentProp, err
}

98
meta_test.go Normal file
View File

@@ -0,0 +1,98 @@
package mediadevices
import (
"image"
"testing"
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/wave"
)
func TestDetectCurrentVideoProp(t *testing.T) {
resolution := image.Rect(0, 0, 4, 4)
first := image.NewRGBA(resolution)
first.Pix[0] = 1
second := image.NewRGBA(resolution)
second.Pix[0] = 2
isFirst := true
source := video.ReaderFunc(func() (image.Image, error) {
if isFirst {
isFirst = true
return first, nil
} else {
return second, nil
}
})
broadcaster := video.NewBroadcaster(source, nil)
currentProp, err := detectCurrentVideoProp(broadcaster)
if err != nil {
t.Fatal(err)
}
if currentProp.Width != resolution.Dx() {
t.Fatalf("Expect the actual width to be %d, but got %d", currentProp.Width, resolution.Dx())
}
if currentProp.Height != resolution.Dy() {
t.Fatalf("Expect the actual height to be %d, but got %d", currentProp.Height, resolution.Dy())
}
reader := broadcaster.NewReader(false)
img, err := reader.Read()
if err != nil {
t.Fatal(err)
}
rgba := img.(*image.RGBA)
if rgba.Pix[0] != 1 {
t.Fatal("Expect the frame after reading the current prop is not the first frame")
}
}
func TestDetectCurrentAudioProp(t *testing.T) {
info := wave.ChunkInfo{
Len: 4,
Channels: 2,
SamplingRate: 48000,
}
first := wave.NewInt16Interleaved(info)
first.Data[0] = 1
second := wave.NewInt16Interleaved(info)
second.Data[0] = 2
isFirst := true
source := audio.ReaderFunc(func() (wave.Audio, error) {
if isFirst {
isFirst = true
return first, nil
} else {
return second, nil
}
})
broadcaster := audio.NewBroadcaster(source, nil)
currentProp, err := detectCurrentAudioProp(broadcaster)
if err != nil {
t.Fatal(err)
}
if currentProp.ChannelCount != info.Channels {
t.Fatalf("Expect the actual channel count to be %d, but got %d", currentProp.ChannelCount, info.Channels)
}
reader := broadcaster.NewReader(false)
chunk, err := reader.Read()
if err != nil {
t.Fatal(err)
}
realChunk := chunk.(*wave.Int16Interleaved)
if realChunk.Data[0] != 1 {
t.Fatal("Expect the chunk after reading the current prop is not the first chunk")
}
}

View File

@@ -1,21 +1,45 @@
package codec
import (
"io"
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
// RTPCodec wraps webrtc.RTPCodec. RTPCodec might extend webrtc.RTPCodec in the future.
type RTPCodec struct {
*webrtc.RTPCodec
}
// NewRTPH264Codec is a helper to create an H264 codec
func NewRTPH264Codec(clockrate uint32) *RTPCodec {
return &RTPCodec{webrtc.NewRTPH264Codec(webrtc.DefaultPayloadTypeH264, clockrate)}
}
// NewRTPVP8Codec is a helper to create an VP8 codec
func NewRTPVP8Codec(clockrate uint32) *RTPCodec {
return &RTPCodec{webrtc.NewRTPVP8Codec(webrtc.DefaultPayloadTypeVP8, clockrate)}
}
// NewRTPVP9Codec is a helper to create an VP9 codec
func NewRTPVP9Codec(clockrate uint32) *RTPCodec {
return &RTPCodec{webrtc.NewRTPVP9Codec(webrtc.DefaultPayloadTypeVP9, clockrate)}
}
// NewRTPOpusCodec is a helper to create an Opus codec
func NewRTPOpusCodec(clockrate uint32) *RTPCodec {
return &RTPCodec{webrtc.NewRTPOpusCodec(webrtc.DefaultPayloadTypeOpus, clockrate)}
}
// AudioEncoderBuilder is the interface that wraps basic operations that are
// necessary to build the audio encoder.
//
// This interface is for codec implementors to provide codec specific params,
// but still giving generality for the users.
type AudioEncoderBuilder interface {
// Name represents the codec name
Name() string
// RTPCodec represents the codec metadata
RTPCodec() *RTPCodec
// BuildAudioEncoder builds audio encoder by given media params and audio input
BuildAudioEncoder(r audio.Reader, p prop.Media) (ReadCloser, error)
}
@@ -26,15 +50,16 @@ type AudioEncoderBuilder interface {
// This interface is for codec implementors to provide codec specific params,
// but still giving generality for the users.
type VideoEncoderBuilder interface {
// Name represents the codec name
Name() string
// RTPCodec represents the codec metadata
RTPCodec() *RTPCodec
// BuildVideoEncoder builds video encoder by given media params and video input
BuildVideoEncoder(r video.Reader, p prop.Media) (ReadCloser, error)
}
// ReadCloser is an io.ReadCloser with methods for rate limiting: SetBitRate and ForceKeyFrame
type ReadCloser interface {
io.ReadCloser
Read() ([]byte, error)
Close() error
// SetBitRate sets current target bitrate, lower bitrate means smaller data will be transmitted
// but this also means that the quality will also be lower.
SetBitRate(int) error

196
pkg/codec/mmal/bridge.h Normal file
View File

@@ -0,0 +1,196 @@
#include <interface/mmal/mmal.h>
#include <interface/mmal/util/mmal_default_components.h>
#include <interface/mmal/util/mmal_util_params.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdlib.h>
#define CHK(__status, __msg) \
do { \
status.code = __status; \
if (status.code != MMAL_SUCCESS) { \
status.msg = __msg; \
goto CleanUp; \
} \
} while (0)
typedef struct Status {
MMAL_STATUS_T code;
const char *msg;
} Status;
typedef struct Slice {
uint8_t *data;
int len;
} Slice;
typedef struct Params {
int width, height;
uint32_t bitrate;
uint32_t key_frame_interval;
} Params;
typedef struct Encoder {
MMAL_COMPONENT_T *component;
MMAL_PORT_T *port_in, *port_out;
MMAL_QUEUE_T *queue_out;
MMAL_POOL_T *pool_in, *pool_out;
} Encoder;
Status enc_new(Params, Encoder *);
Status enc_encode(Encoder *, Slice y, Slice cb, Slice cr, MMAL_BUFFER_HEADER_T **);
Status enc_close(Encoder *);
static void encoder_in_cb(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer) { mmal_buffer_header_release(buffer); }
static void encoder_out_cb(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer) {
MMAL_QUEUE_T *queue = (MMAL_QUEUE_T *)port->userdata;
mmal_queue_put(queue, buffer);
}
Status enc_new(Params params, Encoder *encoder) {
Status status = {0};
bool created = false;
memset(encoder, 0, sizeof(Encoder));
CHK(mmal_component_create(MMAL_COMPONENT_DEFAULT_VIDEO_ENCODER, &encoder->component),
"Failed to create video encoder component");
created = true;
encoder->port_in = encoder->component->input[0];
encoder->port_in->format->type = MMAL_ES_TYPE_VIDEO;
encoder->port_in->format->encoding = MMAL_ENCODING_I420;
encoder->port_in->format->es->video.width = params.width;
encoder->port_in->format->es->video.height = params.height;
encoder->port_in->format->es->video.par.num = 1;
encoder->port_in->format->es->video.par.den = 1;
encoder->port_in->format->es->video.crop.x = 0;
encoder->port_in->format->es->video.crop.y = 0;
encoder->port_in->format->es->video.crop.width = params.width;
encoder->port_in->format->es->video.crop.height = params.height;
CHK(mmal_port_format_commit(encoder->port_in), "Failed to commit input port format");
encoder->port_out = encoder->component->output[0];
encoder->port_out->format->type = MMAL_ES_TYPE_VIDEO;
encoder->port_out->format->encoding = MMAL_ENCODING_H264;
encoder->port_out->format->bitrate = params.bitrate;
CHK(mmal_port_format_commit(encoder->port_out), "Failed to commit output port format");
MMAL_PARAMETER_VIDEO_PROFILE_T encoder_param_profile = {0};
encoder_param_profile.hdr.id = MMAL_PARAMETER_PROFILE;
encoder_param_profile.hdr.size = sizeof(encoder_param_profile);
encoder_param_profile.profile[0].profile = MMAL_VIDEO_PROFILE_H264_BASELINE;
encoder_param_profile.profile[0].level = MMAL_VIDEO_LEVEL_H264_42;
CHK(mmal_port_parameter_set(encoder->port_out, &encoder_param_profile.hdr), "Failed to set encoder profile param");
CHK(mmal_port_parameter_set_uint32(encoder->port_out, MMAL_PARAMETER_INTRAPERIOD, params.key_frame_interval),
"Failed to set intra period param");
MMAL_PARAMETER_VIDEO_RATECONTROL_T encoder_param_rate_control = {0};
encoder_param_rate_control.hdr.id = MMAL_PARAMETER_RATECONTROL;
encoder_param_rate_control.hdr.size = sizeof(encoder_param_rate_control);
encoder_param_rate_control.control = MMAL_VIDEO_RATECONTROL_VARIABLE;
CHK(mmal_port_parameter_set(encoder->port_out, &encoder_param_rate_control.hdr), "Failed to set rate control param");
// Some decoders expect SPS/PPS headers to be added to every frame
CHK(mmal_port_parameter_set_boolean(encoder->port_out, MMAL_PARAMETER_VIDEO_ENCODE_INLINE_HEADER, MMAL_TRUE),
"Failed to set inline header param");
CHK(mmal_port_parameter_set_boolean(encoder->port_out, MMAL_PARAMETER_VIDEO_ENCODE_HEADERS_WITH_FRAME, MMAL_TRUE),
"Failed to set headers with frame param");
/* FIXME: Somehow this flag is broken? When this flag is on, the encoder will get stuck.
// Since our use case is mainly for real time streaming, the encoder should optimized for low latency
CHK(mmal_port_parameter_set_boolean(encoder->port_out, MMAL_PARAMETER_VIDEO_ENCODE_H264_LOW_LATENCY, MMAL_TRUE),
"Failed to set low latency param");
*/
// Now we know the format of both ports and the requirements of the encoder, we can create
// our buffer headers and their associated memory buffers. We use the buffer pool API for this.
encoder->port_in->buffer_num = encoder->port_in->buffer_num_min;
// mmal calculates recommended size that's big enough to store all of the pixels
encoder->port_in->buffer_size = encoder->port_in->buffer_size_recommended;
encoder->pool_in = mmal_pool_create(encoder->port_in->buffer_num, encoder->port_in->buffer_size);
encoder->port_out->buffer_num = encoder->port_out->buffer_num_min;
encoder->port_out->buffer_size = encoder->port_out->buffer_size_recommended;
encoder->pool_out = mmal_pool_create(encoder->port_out->buffer_num, encoder->port_out->buffer_size);
// Create a queue to store our encoded video frames. The callback we will get when
// a frame has been encoded will put the frame into this queue.
encoder->queue_out = mmal_queue_create();
encoder->port_out->userdata = (void *)encoder->queue_out;
// Enable all the input port and the output port.
// The callback specified here is the function which will be called when the buffer header
// we sent to the component has been processed.
CHK(mmal_port_enable(encoder->port_in, encoder_in_cb), "Failed to enable input port");
CHK(mmal_port_enable(encoder->port_out, encoder_out_cb), "Failed to enable output port");
// Enable the component. Components will only process data when they are enabled.
CHK(mmal_component_enable(encoder->component), "Failed to enable component");
CleanUp:
if (status.code != MMAL_SUCCESS) {
if (created) {
enc_close(encoder);
}
}
return status;
}
// enc_encode encodes y, cb, cr. The encoded frame is going to be stored in encoded_buffer.
// IMPORTANT: the caller is responsible to release the ownership of encoded_buffer
Status enc_encode(Encoder *encoder, Slice y, Slice cb, Slice cr, MMAL_BUFFER_HEADER_T **encoded_buffer) {
Status status = {0};
MMAL_BUFFER_HEADER_T *buffer;
uint32_t required_size;
// buffer should always be available since the encoding process is blocking
buffer = mmal_queue_get(encoder->pool_in->queue);
assert(buffer != NULL);
// buffer->data should've been allocated with enough memory to contain a frame by pool_in
required_size = y.len + cb.len + cr.len;
assert(buffer->alloc_size >= required_size);
memcpy(buffer->data, y.data, y.len);
memcpy(buffer->data + y.len, cb.data, cb.len);
memcpy(buffer->data + y.len + cb.len, cr.data, cr.len);
buffer->length = required_size;
CHK(mmal_port_send_buffer(encoder->port_in, buffer), "Failed to send filled buffer to input port");
while (1) {
// Send empty buffers to the output port to allow the encoder to start
// producing frames as soon as it gets input data
while ((buffer = mmal_queue_get(encoder->pool_out->queue)) != NULL) {
CHK(mmal_port_send_buffer(encoder->port_out, buffer), "Failed to send empty buffers to output port");
}
while ((buffer = mmal_queue_wait(encoder->queue_out)) != NULL) {
if ((buffer->flags & MMAL_BUFFER_HEADER_FLAG_FRAME_END) != 0) {
*encoded_buffer = buffer;
goto CleanUp;
}
mmal_buffer_header_release(buffer);
}
}
CleanUp:
return status;
}
Status enc_close(Encoder *encoder) {
Status status = {0};
mmal_pool_destroy(encoder->pool_out);
mmal_pool_destroy(encoder->pool_in);
mmal_queue_destroy(encoder->queue_out);
mmal_component_destroy(encoder->component);
CleanUp:
return status;
}

112
pkg/codec/mmal/mmal.go Normal file
View File

@@ -0,0 +1,112 @@
// Package mmal implements a hardware accelerated H264 encoder for raspberry pi.
// This package requires libmmal headers and libraries to be built.
// Reference: https://github.com/raspberrypi/userland/tree/master/interface/mmal
package mmal
// #cgo pkg-config: mmal
// #include "bridge.h"
import "C"
import (
"fmt"
"image"
"io"
"sync"
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
type encoder struct {
engine C.Encoder
r video.Reader
mu sync.Mutex
closed bool
cntr int
}
func statusToErr(status *C.Status) error {
return fmt.Errorf("(status = %d) %s", int(status.code), C.GoString(status.msg))
}
func newEncoder(r video.Reader, p prop.Media, params Params) (codec.ReadCloser, error) {
if params.KeyFrameInterval == 0 {
params.KeyFrameInterval = 60
}
if params.BitRate == 0 {
params.BitRate = 300000
}
e := encoder{
r: video.ToI420(r),
}
status := C.enc_new(C.Params{
width: C.int(p.Width),
height: C.int(p.Height),
bitrate: C.uint(params.BitRate),
key_frame_interval: C.uint(params.KeyFrameInterval),
}, &e.engine)
if status.code != 0 {
return nil, statusToErr(&status)
}
return &e, nil
}
func (e *encoder) Read() ([]byte, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return nil, io.EOF
}
img, err := e.r.Read()
if err != nil {
return nil, err
}
imgReal := img.(*image.YCbCr)
var y, cb, cr C.Slice
y.data = (*C.uchar)(&imgReal.Y[0])
y.len = C.int(len(imgReal.Y))
cb.data = (*C.uchar)(&imgReal.Cb[0])
cb.len = C.int(len(imgReal.Cb))
cr.data = (*C.uchar)(&imgReal.Cr[0])
cr.len = C.int(len(imgReal.Cr))
var encodedBuffer *C.MMAL_BUFFER_HEADER_T
status := C.enc_encode(&e.engine, y, cb, cr, &encodedBuffer)
if status.code != 0 {
return nil, statusToErr(&status)
}
// GoBytes copies the C array to Go slice. After this, it's safe to release the C array
encoded := C.GoBytes(unsafe.Pointer(encodedBuffer.data), C.int(encodedBuffer.length))
// Release the buffer so that mmal can reuse this memory
C.mmal_buffer_header_release(encodedBuffer)
return encoded, err
}
func (e *encoder) SetBitRate(b int) error {
panic("SetBitRate is not implemented")
}
func (e *encoder) ForceKeyFrame() error {
panic("ForceKeyFrame is not implemented")
}
func (e *encoder) Close() error {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return nil
}
e.closed = true
C.enc_close(&e.engine)
return nil
}

31
pkg/codec/mmal/params.go Normal file
View File

@@ -0,0 +1,31 @@
package mmal
import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
// Params stores libmmal specific encoding parameters.
type Params struct {
codec.BaseParams
}
// NewParams returns default mmal codec specific parameters.
func NewParams() (Params, error) {
return Params{
BaseParams: codec.BaseParams{
KeyFrameInterval: 60,
},
}, nil
}
// RTPCodec represents the codec metadata
func (p *Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPH264Codec(90000)
}
// BuildVideoEncoder builds mmal encoder with given params
func (p *Params) BuildVideoEncoder(r video.Reader, property prop.Media) (codec.ReadCloser, error) {
return newEncoder(r, property, *p)
}

View File

@@ -16,7 +16,6 @@ import (
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
mio "github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
@@ -24,7 +23,6 @@ import (
type encoder struct {
engine *C.Encoder
r video.Reader
buff []byte
mu sync.Mutex
closed bool
@@ -52,26 +50,17 @@ func newEncoder(r video.Reader, p prop.Media, params Params) (codec.ReadCloser,
}, nil
}
func (e *encoder) Read(p []byte) (n int, err error) {
func (e *encoder) Read() ([]byte, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return 0, io.EOF
}
if e.buff != nil {
n, err = mio.Copy(p, e.buff)
if err == nil {
e.buff = nil
}
return n, err
return nil, io.EOF
}
img, err := e.r.Read()
if err != nil {
return 0, err
return nil, err
}
yuvImg := img.(*image.YCbCr)
@@ -85,16 +74,11 @@ func (e *encoder) Read(p []byte) (n int, err error) {
width: C.int(bounds.Max.X - bounds.Min.X),
}, &rv)
if err := errResult(rv); err != nil {
return 0, fmt.Errorf("failed in encoding: %v", err)
return nil, fmt.Errorf("failed in encoding: %v", err)
}
encoded := C.GoBytes(unsafe.Pointer(s.data), s.data_len)
n, err = mio.Copy(p, encoded)
if err != nil {
e.buff = encoded
}
return n, err
return encoded, nil
}
func (e *encoder) SetBitRate(b int) error {

View File

@@ -4,7 +4,6 @@ import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
// Params stores libopenh264 specific encoding parameters.
@@ -21,9 +20,9 @@ func NewParams() (Params, error) {
}, nil
}
// Name represents the codec name
func (p *Params) Name() string {
return webrtc.H264
// RTPCodec represents the codec metadata
func (p *Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPH264Codec(90000)
}
// BuildVideoEncoder builds openh264 encoder with given params

View File

@@ -72,27 +72,22 @@ func newEncoder(r audio.Reader, p prop.Media, params Params) (codec.ReadCloser,
return &e, nil
}
func (e *encoder) Read(p []byte) (int, error) {
func (e *encoder) Read() ([]byte, error) {
buff, err := e.reader.Read()
if err != nil {
return 0, err
return nil, err
}
encoded := make([]byte, 1024)
switch b := buff.(type) {
case *wave.Int16Interleaved:
n, err := e.engine.Encode(b.Data, p)
if err != nil {
return n, err
}
return n, nil
n, err := e.engine.Encode(b.Data, encoded)
return encoded[:n:n], err
case *wave.Float32Interleaved:
n, err := e.engine.EncodeFloat32(b.Data, p)
if err != nil {
return n, err
}
return n, nil
n, err := e.engine.EncodeFloat32(b.Data, encoded)
return encoded[:n:n], err
default:
return 0, errors.New("unknown type of audio buffer")
return nil, errors.New("unknown type of audio buffer")
}
}

View File

@@ -5,7 +5,6 @@ import (
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/mediadevices/pkg/wave/mixer"
"github.com/pion/webrtc/v2"
)
// Params stores opus specific encoding parameters.
@@ -20,9 +19,9 @@ func NewParams() (Params, error) {
return Params{}, nil
}
// Name represents the codec name
func (p *Params) Name() string {
return webrtc.Opus
// RTPCodec represents the codec metadata
func (p *Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPOpusCodec(48000)
}
// BuildAudioEncoder builds opus encoder with given params

View File

@@ -4,7 +4,6 @@ import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
// ParamsVP8 stores VP8 encoding parameters.
@@ -44,9 +43,9 @@ func NewVP8Params() (ParamsVP8, error) {
}, nil
}
// Name represents the codec name
func (p *ParamsVP8) Name() string {
return webrtc.VP8
// RTPCodec represents the codec metadata
func (p *ParamsVP8) RTPCodec() *codec.RTPCodec {
return codec.NewRTPVP8Codec(90000)
}
// BuildVideoEncoder builds VP8 encoder with given params
@@ -113,9 +112,9 @@ func NewVP9Params() (ParamsVP9, error) {
}, nil
}
// Name represents the codec name
func (p *ParamsVP9) Name() string {
return webrtc.VP9
// RTPCodec represents the codec metadata
func (p *ParamsVP9) RTPCodec() *codec.RTPCodec {
return codec.NewRTPVP9Codec(90000)
}
// BuildVideoEncoder builds VP9 encoder with given params

View File

@@ -80,7 +80,6 @@ const (
type encoderVP8 struct {
r video.Reader
buf []byte
frame []byte
fdDRI C.int
@@ -297,25 +296,17 @@ func newVP8Encoder(r video.Reader, p prop.Media, params ParamsVP8) (codec.ReadCl
return e, nil
}
func (e *encoderVP8) Read(p []byte) (int, error) {
func (e *encoderVP8) Read() ([]byte, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return 0, io.EOF
}
if e.buf != nil {
n, err := mio.Copy(p, e.buf)
if err == nil {
e.buf = nil
}
return n, err
return nil, io.EOF
}
img, err := e.r.Read()
if err != nil {
return 0, err
return nil, err
}
yuvImg := img.(*image.YCbCr)
@@ -357,7 +348,7 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
}
}
if e.picParam.reconstructed_frame == C.VA_INVALID_SURFACE {
return 0, errors.New("no available surface")
return nil, errors.New("no available surface")
}
C.setForceKFFlagVP8(&e.picParam, 0)
@@ -425,7 +416,7 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
C.size_t(uintptr(p.src)),
&id,
); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
}
buffs = append(buffs, id)
}
@@ -435,17 +426,17 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
e.display, e.ctxID,
e.surfs[surfaceVP8Input],
); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
}
// Upload image
var vaImg C.VAImage
var rawBuf unsafe.Pointer
if s := C.vaDeriveImage(e.display, e.surfs[surfaceVP8Input], &vaImg); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaMapBuffer(e.display, vaImg.buf, &rawBuf); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
}
// TODO: use vaImg.pitches to support padding
C.memcpy(
@@ -461,10 +452,10 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
unsafe.Pointer(&yuvImg.Cr[0]), C.size_t(len(yuvImg.Cr)),
)
if s := C.vaUnmapBuffer(e.display, vaImg.buf); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaDestroyImage(e.display, vaImg.image_id); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaRenderPicture(
@@ -472,38 +463,38 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
&buffs[1], // 0 is for ouput
C.int(len(buffs)-1),
); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaEndPicture(
e.display, e.ctxID,
); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
}
// Load encoded data
for retry := 3; retry >= 0; retry-- {
if s := C.vaSyncSurface(e.display, e.picParam.reconstructed_frame); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
}
var surfStat C.VASurfaceStatus
if s := C.vaQuerySurfaceStatus(
e.display, e.picParam.reconstructed_frame, &surfStat,
); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
}
if surfStat == C.VASurfaceReady {
break
}
if retry == 0 {
return 0, fmt.Errorf("failed to sync surface: %d", surfStat)
return nil, fmt.Errorf("failed to sync surface: %d", surfStat)
}
}
var seg *C.VACodedBufferSegment
if s := C.vaMapBufferSeg(e.display, buffs[0], &seg); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
}
if seg.status&C.VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK != 0 {
return 0, errors.New("buffer size too small")
return nil, errors.New("buffer size too small")
}
if cap(e.frame) < int(seg.size) {
@@ -516,13 +507,13 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
)
if s := C.vaUnmapBuffer(e.display, buffs[0]); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
}
// Destroy buffers
for _, b := range buffs {
if s := C.vaDestroyBuffer(e.display, b); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
}
}
@@ -545,11 +536,9 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
e.picParam.ref_last_frame = e.picParam.reconstructed_frame
C.setRefreshLastFlagVP8(&e.picParam, 1)
n, err := mio.Copy(p, e.frame)
if err != nil {
e.buf = e.frame
}
return n, err
encoded := make([]byte, len(e.frame))
copy(encoded, e.frame)
return encoded, err
}
func (e *encoderVP8) SetBitRate(b int) error {

View File

@@ -67,7 +67,6 @@ const (
type encoderVP9 struct {
r video.Reader
buf []byte
frame []byte
fdDRI C.int
@@ -286,25 +285,17 @@ func newVP9Encoder(r video.Reader, p prop.Media, params ParamsVP9) (codec.ReadCl
return e, nil
}
func (e *encoderVP9) Read(p []byte) (int, error) {
func (e *encoderVP9) Read() ([]byte, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return 0, io.EOF
}
if e.buf != nil {
n, err := mio.Copy(p, e.buf)
if err == nil {
e.buf = nil
}
return n, err
return nil, io.EOF
}
img, err := e.r.Read()
if err != nil {
return 0, err
return nil, err
}
yuvImg := img.(*image.YCbCr)
@@ -398,17 +389,17 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
e.display, e.ctxID,
e.surfs[surfaceVP9Input],
); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
}
// Upload image
var vaImg C.VAImage
var rawBuf unsafe.Pointer
if s := C.vaDeriveImage(e.display, e.surfs[surfaceVP9Input], &vaImg); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaMapBuffer(e.display, vaImg.buf, &rawBuf); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
}
// TODO: use vaImg.pitches to support padding
C.copyI420toNV12(
@@ -419,10 +410,10 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
C.uint(len(yuvImg.Y)),
)
if s := C.vaUnmapBuffer(e.display, vaImg.buf); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaDestroyImage(e.display, vaImg.image_id); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaRenderPicture(
@@ -430,27 +421,27 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
&buffs[1], // 0 is for ouput
C.int(len(buffs)-1),
); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaEndPicture(
e.display, e.ctxID,
); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
}
// Load encoded data
if s := C.vaSyncSurface(e.display, e.picParam.reconstructed_frame); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
}
var surfStat C.VASurfaceStatus
if s := C.vaQuerySurfaceStatus(
e.display, e.picParam.reconstructed_frame, &surfStat,
); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
}
var seg *C.VACodedBufferSegment
if s := C.vaMapBufferSeg(e.display, buffs[0], &seg); s != C.VA_STATUS_SUCCESS {
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
return nil, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
}
if cap(e.frame) < int(seg.size) {
e.frame = make([]byte, int(seg.size))
@@ -480,11 +471,9 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
e.slotCurr = 0
}
n, err := mio.Copy(p, e.frame)
if err != nil {
e.buf = e.frame
}
return n, err
encoded := make([]byte, len(e.frame))
copy(encoded, e.frame)
return encoded, err
}
func (e *encoderVP9) SetBitRate(b int) error {

View File

@@ -56,10 +56,8 @@ import (
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
mio "github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
type encoder struct {
@@ -68,7 +66,6 @@ type encoder struct {
cfg *C.vpx_codec_enc_cfg_t
r video.Reader
frameIndex int
buff []byte
tStart int
tLastFrame int
frame []byte
@@ -95,9 +92,9 @@ func NewVP8Params() (VP8Params, error) {
}, nil
}
// Name represents the codec name
func (p *VP8Params) Name() string {
return webrtc.VP8
// RTPCodec represents the codec metadata
func (p *VP8Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPVP8Codec(90000)
}
// BuildVideoEncoder builds VP8 encoder with given params
@@ -122,9 +119,9 @@ func NewVP9Params() (VP9Params, error) {
}, nil
}
// Name represents the codec name
func (p *VP9Params) Name() string {
return webrtc.VP9
// RTPCodec represents the codec metadata
func (p *VP9Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPVP9Codec(90000)
}
// BuildVideoEncoder builds VP9 encoder with given params
@@ -207,25 +204,17 @@ func newEncoder(r video.Reader, p prop.Media, params Params, codecIface *C.vpx_c
}, nil
}
func (e *encoder) Read(p []byte) (int, error) {
func (e *encoder) Read() ([]byte, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return 0, io.EOF
}
if e.buff != nil {
n, err := mio.Copy(p, e.buff)
if err == nil {
e.buff = nil
}
return n, err
return nil, io.EOF
}
img, err := e.r.Read()
if err != nil {
return 0, err
return nil, err
}
yuvImg := img.(*image.YCbCr)
bounds := yuvImg.Bounds()
@@ -241,7 +230,7 @@ func (e *encoder) Read(p []byte) (int, error) {
if e.cfg.g_w != C.uint(width) || e.cfg.g_h != C.uint(height) {
e.cfg.g_w, e.cfg.g_h = C.uint(width), C.uint(height)
if ec := C.vpx_codec_enc_config_set(e.codec, e.cfg); ec != C.VPX_CODEC_OK {
return 0, fmt.Errorf("vpx_codec_enc_config_set failed (%d)", ec)
return nil, fmt.Errorf("vpx_codec_enc_config_set failed (%d)", ec)
}
e.raw.w, e.raw.h = C.uint(width), C.uint(height)
e.raw.r_w, e.raw.r_h = C.uint(width), C.uint(height)
@@ -254,7 +243,7 @@ func (e *encoder) Read(p []byte) (int, error) {
C.long(t-e.tStart), C.ulong(t-e.tLastFrame), C.long(flags), C.ulong(e.deadline),
(*C.uchar)(&yuvImg.Y[0]), (*C.uchar)(&yuvImg.Cb[0]), (*C.uchar)(&yuvImg.Cr[0]),
); ec != C.VPX_CODEC_OK {
return 0, fmt.Errorf("vpx_codec_encode failed (%d)", ec)
return nil, fmt.Errorf("vpx_codec_encode failed (%d)", ec)
}
e.frameIndex++
@@ -272,11 +261,10 @@ func (e *encoder) Read(p []byte) (int, error) {
e.frame = append(e.frame, encoded...)
}
}
n, err := mio.Copy(p, e.frame)
if err != nil {
e.buff = e.frame
}
return n, err
encoded := make([]byte, len(e.frame))
copy(encoded, e.frame)
return encoded, err
}
func (e *encoder) SetBitRate(b int) error {

View File

@@ -4,7 +4,6 @@ import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
// Params stores libx264 specific encoding parameters.
@@ -40,9 +39,9 @@ func NewParams() (Params, error) {
}, nil
}
// Name represents the codec name
func (p *Params) Name() string {
return webrtc.H264
// RTPCodec represents the codec metadata
func (p *Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPH264Codec(90000)
}
// BuildVideoEncoder builds x264 encoder with given params

View File

@@ -14,14 +14,12 @@ import (
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
mio "github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
type encoder struct {
engine *C.Encoder
buff []byte
r video.Reader
mu sync.Mutex
closed bool
@@ -96,25 +94,17 @@ func newEncoder(r video.Reader, p prop.Media, params Params) (codec.ReadCloser,
return &e, nil
}
func (e *encoder) Read(p []byte) (int, error) {
func (e *encoder) Read() ([]byte, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return 0, io.EOF
}
if e.buff != nil {
n, err := mio.Copy(p, e.buff)
if err == nil {
e.buff = nil
}
return n, err
return nil, io.EOF
}
img, err := e.r.Read()
if err != nil {
return 0, err
return nil, err
}
yuvImg := img.(*image.YCbCr)
@@ -127,15 +117,11 @@ func (e *encoder) Read(p []byte) (int, error) {
&rc,
)
if err := errFromC(rc); err != nil {
return 0, err
return nil, err
}
encoded := C.GoBytes(unsafe.Pointer(s.data), s.data_len)
n, err := mio.Copy(p, encoded)
if err != nil {
e.buff = encoded
}
return n, err
return encoded, err
}
func (e *encoder) SetBitRate(b int) error {

View File

@@ -8,7 +8,8 @@ import (
"errors"
"image"
"io"
"io/ioutil"
"os"
"path/filepath"
"sync"
"github.com/blackjack/webcam"
@@ -25,6 +26,36 @@ const (
var (
errReadTimeout = errors.New("read timeout")
errEmptyFrame = errors.New("empty frame")
// Reference: https://commons.wikimedia.org/wiki/File:Vector_Video_Standards2.svg
supportedResolutions = [][2]int{
{320, 240},
{640, 480},
{768, 576},
{800, 600},
{1024, 768},
{1280, 854},
{1280, 960},
{1280, 1024},
{1400, 1050},
{1600, 1200},
{2048, 1536},
{320, 200},
{800, 480},
{854, 480},
{1024, 600},
{1152, 768},
{1280, 720},
{1280, 768},
{1366, 768},
{1280, 800},
{1440, 900},
{1440, 960},
{1680, 1050},
{1920, 1080},
{2048, 1080},
{1920, 1200},
{2560, 1600},
}
)
// Camera implementation using v4l2
@@ -40,19 +71,38 @@ type camera struct {
}
func init() {
searchPath := "/dev/v4l/by-path/"
devices, err := ioutil.ReadDir(searchPath)
if err != nil {
// No v4l device.
return
}
for _, device := range devices {
cam := newCamera(searchPath + device.Name())
driver.GetManager().Register(cam, driver.Info{
Label: device.Name(),
DeviceType: driver.Camera,
})
discovered := make(map[string]struct{})
discover := func(pattern string) {
devices, err := filepath.Glob(pattern)
if err != nil {
// No v4l device.
return
}
for _, device := range devices {
label := filepath.Base(device)
reallink, err := os.Readlink(device)
if err != nil {
reallink = label
} else {
reallink = filepath.Base(reallink)
}
if _, ok := discovered[reallink]; ok {
continue
}
discovered[reallink] = struct{}{}
cam := newCamera(device)
driver.GetManager().Register(cam, driver.Info{
Label: label,
DeviceType: driver.Camera,
})
}
}
discover("/dev/v4l/by-path/*")
discover("/dev/video*")
}
func newCamera(path string) *camera {
@@ -83,6 +133,8 @@ func (c *camera) Open() error {
return err
}
// Late frames should be discarded. Buffering should be handled in higher level.
cam.SetBufferCount(1)
c.cam = cam
return nil
}
@@ -185,13 +237,46 @@ func (c *camera) Properties() []prop.Media {
properties := make([]prop.Media, 0)
for format := range c.cam.GetSupportedFormats() {
for _, frameSize := range c.cam.GetSupportedFrameSizes(format) {
properties = append(properties, prop.Media{
Video: prop.Video{
Width: int(frameSize.MaxWidth),
Height: int(frameSize.MaxHeight),
FrameFormat: c.formats[format],
},
})
supportedFormat, ok := c.formats[format]
if !ok {
continue
}
if frameSize.StepWidth == 0 || frameSize.StepHeight == 0 {
properties = append(properties, prop.Media{
Video: prop.Video{
Width: int(frameSize.MaxWidth),
Height: int(frameSize.MaxHeight),
FrameFormat: supportedFormat,
},
})
} else {
// FIXME: we should probably use a custom data structure to capture all of the supported resolutions
for _, supportedResolution := range supportedResolutions {
minWidth, minHeight := int(frameSize.MinWidth), int(frameSize.MinHeight)
maxWidth, maxHeight := int(frameSize.MaxWidth), int(frameSize.MaxHeight)
stepWidth, stepHeight := int(frameSize.StepWidth), int(frameSize.StepHeight)
width, height := supportedResolution[0], supportedResolution[1]
if width < minWidth || width > maxWidth ||
height < minHeight || height > maxHeight {
continue
}
if (width-minWidth)%stepWidth != 0 ||
(height-minHeight)%stepHeight != 0 {
continue
}
properties = append(properties, prop.Media{
Video: prop.Video{
Width: width,
Height: height,
FrameFormat: supportedFormat,
},
})
}
}
}
}
return properties

76
pkg/io/audio/broadcast.go Normal file
View File

@@ -0,0 +1,76 @@
package audio
import (
"errors"
"github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/wave"
)
var errEmptySource = errors.New("Source can't be nil")
// Broadcaster is a specialized video broadcaster.
type Broadcaster struct {
ioBroadcaster *io.Broadcaster
}
type BroadcasterConfig struct {
Core *io.BroadcasterConfig
}
// NewBroadcaster creates a new broadcaster. Source is expected to drop chunks
// when any of the readers is slower than the source.
func NewBroadcaster(source Reader, config *BroadcasterConfig) *Broadcaster {
var coreConfig *io.BroadcasterConfig
if config != nil {
coreConfig = config.Core
}
broadcaster := io.NewBroadcaster(io.ReaderFunc(func() (interface{}, error) {
return source.Read()
}), coreConfig)
return &Broadcaster{broadcaster}
}
// NewReader creates a new reader. Each reader will retrieve the same data from the source.
// copyFn is used to copy the data from the source to individual readers. Broadcaster uses a small ring
// buffer, this means that slow readers might miss some data if they're really late and the data is no longer
// in the ring buffer.
func (broadcaster *Broadcaster) NewReader(copyChunk bool) Reader {
copyFn := func(src interface{}) interface{} { return src }
if copyChunk {
buffer := wave.NewBuffer()
copyFn = func(src interface{}) interface{} {
realSrc, _ := src.(wave.Audio)
buffer.StoreCopy(realSrc)
return buffer.Load()
}
}
reader := broadcaster.ioBroadcaster.NewReader(copyFn)
return ReaderFunc(func() (wave.Audio, error) {
data, err := reader.Read()
chunk, _ := data.(wave.Audio)
return chunk, err
})
}
// ReplaceSource replaces the underlying source. This operation is thread safe.
func (broadcaster *Broadcaster) ReplaceSource(source Reader) error {
return broadcaster.ioBroadcaster.ReplaceSource(io.ReaderFunc(func() (interface{}, error) {
return source.Read()
}))
}
// Source retrieves the underlying source. This operation is thread safe.
func (broadcaster *Broadcaster) Source() Reader {
source := broadcaster.ioBroadcaster.Source()
return ReaderFunc(func() (wave.Audio, error) {
data, err := source.Read()
img, _ := data.(wave.Audio)
return img, err
})
}

View File

@@ -0,0 +1,54 @@
package audio
import (
"reflect"
"testing"
"github.com/pion/mediadevices/pkg/wave"
)
func TestBroadcast(t *testing.T) {
chunk := wave.NewFloat32Interleaved(wave.ChunkInfo{
Len: 8,
Channels: 2,
SamplingRate: 48000,
})
source := ReaderFunc(func() (wave.Audio, error) {
return chunk, nil
})
broadcaster := NewBroadcaster(source, nil)
readerWithoutCopy1 := broadcaster.NewReader(false)
readerWithoutCopy2 := broadcaster.NewReader(false)
actualWithoutCopy1, err := readerWithoutCopy1.Read()
if err != nil {
t.Fatal(err)
}
actualWithoutCopy2, err := readerWithoutCopy2.Read()
if err != nil {
t.Fatal(err)
}
if &actualWithoutCopy1.(*wave.Float32Interleaved).Data[0] != &actualWithoutCopy2.(*wave.Float32Interleaved).Data[0] {
t.Fatal("Expected underlying buffer for frame with copy to be the same from broadcaster's buffer")
}
if !reflect.DeepEqual(chunk, actualWithoutCopy1) {
t.Fatal("Expected actual frame without copy to be the same with the original")
}
readerWithCopy := broadcaster.NewReader(true)
actualWithCopy, err := readerWithCopy.Read()
if err != nil {
t.Fatal(err)
}
if &actualWithCopy.(*wave.Float32Interleaved).Data[0] == &actualWithoutCopy1.(*wave.Float32Interleaved).Data[0] {
t.Fatal("Expected underlying buffer for frame with copy to be different from broadcaster's buffer")
}
if !reflect.DeepEqual(chunk, actualWithCopy) {
t.Fatal("Expected actual frame without copy to be the same with the original")
}
}

46
pkg/io/audio/detect.go Normal file
View File

@@ -0,0 +1,46 @@
package audio
import (
"time"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/mediadevices/pkg/wave"
)
// DetectChanges will detect chunk and audio property changes. For audio property detection,
// since it's time related, interval will be used to determine the sample rate.
func DetectChanges(interval time.Duration, onChange func(prop.Media)) TransformFunc {
return func(r Reader) Reader {
var currentProp prop.Media
var chunkCount uint
return ReaderFunc(func() (wave.Audio, error) {
var dirty bool
chunk, err := r.Read()
if err != nil {
return nil, err
}
info := chunk.ChunkInfo()
if currentProp.ChannelCount != info.Channels {
currentProp.ChannelCount = info.Channels
dirty = true
}
if currentProp.SampleRate != info.SamplingRate {
currentProp.SampleRate = info.SamplingRate
dirty = true
}
// TODO: Also detect sample format changes?
// TODO: Add audio detect changes. As of now, there's no useful property to track.
if dirty {
onChange(currentProp)
}
chunkCount++
return chunk, nil
})
}
}

View File

@@ -0,0 +1,77 @@
package audio
import (
"reflect"
"testing"
"time"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/mediadevices/pkg/wave"
)
func TestDetectChanges(t *testing.T) {
buildSource := func(p prop.Media) (Reader, func(prop.Media)) {
return ReaderFunc(func() (wave.Audio, error) {
return wave.NewFloat32Interleaved(wave.ChunkInfo{
Len: 0,
Channels: p.ChannelCount,
SamplingRate: p.SampleRate,
}), nil
}), func(newProp prop.Media) {
p = newProp
}
}
t.Run("OnChangeCalledBeforeFirstFrame", func(t *testing.T) {
var detectBeforeFirstChunk bool
var expected prop.Media
var actual prop.Media
expected.ChannelCount = 2
expected.SampleRate = 48000
src, _ := buildSource(expected)
src = DetectChanges(time.Second, func(p prop.Media) {
actual = p
detectBeforeFirstChunk = true
})(src)
_, err := src.Read()
if err != nil {
t.Fatal(err)
}
if !detectBeforeFirstChunk {
t.Fatal("on change callback should have called before first chunk")
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("Received an unexpected prop\nExpected:\n%v\nActual:\n%v\n", expected, actual)
}
})
t.Run("DetectChangesOnEveryUpdate", func(t *testing.T) {
var expected prop.Media
var actual prop.Media
expected.ChannelCount = 2
expected.SampleRate = 48000
src, update := buildSource(expected)
src = DetectChanges(time.Second, func(p prop.Media) {
actual = p
})(src)
for channelCount := 1; channelCount < 8; channelCount++ {
for sampleRate := 12000; sampleRate <= 48000; sampleRate += 4000 {
expected.ChannelCount = channelCount
expected.SampleRate = sampleRate
update(expected)
_, err := src.Read()
if err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("Received an unexpected prop\nExpected:\n%v\nActual:\n%v\n", expected, actual)
}
}
}
})
}

148
pkg/io/broadcast_test.go Normal file
View File

@@ -0,0 +1,148 @@
package io
import (
"fmt"
"runtime"
"sync"
"sync/atomic"
"testing"
"time"
)
func TestBroadcast(t *testing.T) {
// https://github.com/pion/mediadevices/issues/198
if runtime.GOOS == "darwin" {
t.Skip("Skipping because Darwin CI is not reliable for timing related tests.")
}
frames := make([]int, 5*30) // 5 seconds worth of frames
for i := range frames {
frames[i] = i
}
routinePauseConds := []struct {
src bool
dst bool
expectedFPS float64
expectedDrop float64
}{
{
src: false,
dst: false,
expectedFPS: 30,
},
{
src: true,
dst: false,
expectedFPS: 20,
expectedDrop: 10,
},
{
src: false,
dst: true,
expectedFPS: 20,
expectedDrop: 10,
},
}
for _, pauseCond := range routinePauseConds {
pauseCond := pauseCond
t.Run(fmt.Sprintf("SrcPause-%v/DstPause-%v", pauseCond.src, pauseCond.dst), func(t *testing.T) {
for n := 1; n <= 256; n *= 16 {
n := n
t.Run(fmt.Sprintf("Readers-%d", n), func(t *testing.T) {
var src Reader
interval := time.NewTicker(time.Millisecond * 33) // 30 fps
defer interval.Stop()
frameCount := 0
frameSent := 0
lastSend := time.Now()
src = ReaderFunc(func() (interface{}, error) {
if pauseCond.src && frameSent == 30 {
time.Sleep(time.Second)
}
<-interval.C
now := time.Now()
if interval := now.Sub(lastSend); interval > time.Millisecond*33*3/2 {
// Source reader should drop frames to catch up the latest frame.
drop := int(interval/(time.Millisecond*33)) - 1
frameCount += drop
t.Logf("Skipped %d frames", drop)
}
lastSend = now
frame := frames[frameCount]
frameCount++
frameSent++
return frame, nil
})
broadcaster := NewBroadcaster(src, nil)
var done uint32
duration := time.Second * 3
fpsChan := make(chan []float64)
var wg sync.WaitGroup
wg.Add(n)
for i := 0; i < n; i++ {
go func() {
reader := broadcaster.NewReader(func(src interface{}) interface{} { return src })
count := 0
lastFrameCount := -1
droppedFrames := 0
wg.Done()
wg.Wait()
for atomic.LoadUint32(&done) == 0 {
if pauseCond.dst && count == 30 {
time.Sleep(time.Second)
}
frame, err := reader.Read()
if err != nil {
t.Error(err)
}
frameCount := frame.(int)
droppedFrames += (frameCount - lastFrameCount - 1)
lastFrameCount = frameCount
count++
}
fps := float64(count) / duration.Seconds()
if fps < pauseCond.expectedFPS-2 || fps > pauseCond.expectedFPS+2 {
t.Fatal("Unexpected average FPS")
}
droppedFramesPerSecond := float64(droppedFrames) / duration.Seconds()
if droppedFramesPerSecond < pauseCond.expectedDrop-2 || droppedFramesPerSecond > pauseCond.expectedDrop+2 {
t.Fatal("Unexpected drop count")
}
fpsChan <- []float64{fps, droppedFramesPerSecond, float64(lastFrameCount)}
}()
}
time.Sleep(duration)
atomic.StoreUint32(&done, 1)
var fpsAvg float64
var droppedFramesPerSecondAvg float64
var lastFrameCountAvg float64
var count int
for metric := range fpsChan {
fps, droppedFramesPerSecond, lastFrameCount := metric[0], metric[1], metric[2]
fpsAvg += fps
droppedFramesPerSecondAvg += droppedFramesPerSecond
lastFrameCountAvg += lastFrameCount
count++
if count == n {
break
}
}
t.Log("Average FPS :", fpsAvg/float64(n))
t.Log("Average dropped frames per second:", droppedFramesPerSecondAvg/float64(n))
t.Log("Last frame count (src) :", frameCount)
t.Log("Average last frame count (dst) :", lastFrameCountAvg/float64(n))
})
}
})
}
}

View File

@@ -1,11 +0,0 @@
package io
// Copy copies data from src to dst. If dst is not big enough, return an
// InsufficientBufferError.
func Copy(dst, src []byte) (n int, err error) {
if len(dst) < len(src) {
return 0, &InsufficientBufferError{len(src)}
}
return copy(dst, src), nil
}

View File

@@ -1,45 +0,0 @@
package io
import (
"log"
"testing"
)
func TestCopy(t *testing.T) {
var dst []byte
src := make([]byte, 4)
n, err := Copy(dst, src)
if err == nil {
t.Fatal("expected err to be non-nill")
}
if n != 0 {
t.Fatalf("expected n to be 0, but got %d", n)
}
e, ok := err.(*InsufficientBufferError)
if !ok {
t.Fatalf("expected error to be InsufficientBufferError")
}
if e.RequiredSize != len(src) {
t.Fatalf("expected required size to be %d, but got %d", len(src), e.RequiredSize)
}
dst = make([]byte, 2*e.RequiredSize)
n, err = Copy(dst, src)
if err != nil {
t.Fatalf("expected to not get an error after expanding the buffer")
}
if n != len(src) {
t.Fatalf("expected n to be %d, but got %d", len(src), n)
}
for i := 0; i < len(src); i++ {
if src[i] != dst[i] {
log.Fatalf("expected value at %d to be %d, but got %d", i, src[i], dst[i])
}
}
}

View File

@@ -1,187 +1,49 @@
package video
import (
"fmt"
"image"
"runtime"
"sync"
"sync/atomic"
"reflect"
"testing"
"time"
)
func BenchmarkBroadcast(b *testing.B) {
var src Reader
img := image.NewRGBA(image.Rect(0, 0, 1920, 1080))
interval := time.NewTicker(time.Millisecond * 33) // 30 fps
defer interval.Stop()
src = ReaderFunc(func() (image.Image, error) {
<-interval.C
func TestBroadcast(t *testing.T) {
resolution := image.Rect(0, 0, 1920, 1080)
img := image.NewGray(resolution)
source := ReaderFunc(func() (image.Image, error) {
return img, nil
})
for n := 1; n <= 4096; n *= 16 {
n := n
broadcaster := NewBroadcaster(source, nil)
readerWithoutCopy1 := broadcaster.NewReader(false)
readerWithoutCopy2 := broadcaster.NewReader(false)
actualWithoutCopy1, err := readerWithoutCopy1.Read()
if err != nil {
t.Fatal(err)
}
actualWithoutCopy2, err := readerWithoutCopy2.Read()
if err != nil {
t.Fatal(err)
}
b.Run(fmt.Sprintf("Readers-%d", n), func(b *testing.B) {
b.SetParallelism(n)
broadcaster := NewBroadcaster(src, nil)
b.RunParallel(func(pb *testing.PB) {
reader := broadcaster.NewReader(false)
for pb.Next() {
reader.Read()
}
})
})
}
}
func TestBroadcast(t *testing.T) {
// https://github.com/pion/mediadevices/issues/198
if runtime.GOOS == "darwin" {
t.Skip("Skipping because Darwin CI is not reliable for timing related tests.")
}
frames := make([]image.Image, 5*30) // 5 seconds worth of frames
resolution := image.Rect(0, 0, 1920, 1080)
for i := range frames {
rgba := image.NewRGBA(resolution)
rgba.Pix[0] = uint8(i >> 24)
rgba.Pix[1] = uint8(i >> 16)
rgba.Pix[2] = uint8(i >> 8)
rgba.Pix[3] = uint8(i)
frames[i] = rgba
}
routinePauseConds := []struct {
src bool
dst bool
expectedFPS float64
expectedDrop float64
}{
{
src: false,
dst: false,
expectedFPS: 30,
},
{
src: true,
dst: false,
expectedFPS: 20,
expectedDrop: 10,
},
{
src: false,
dst: true,
expectedFPS: 20,
expectedDrop: 10,
},
}
for _, pauseCond := range routinePauseConds {
pauseCond := pauseCond
t.Run(fmt.Sprintf("SrcPause-%v/DstPause-%v", pauseCond.src, pauseCond.dst), func(t *testing.T) {
for n := 1; n <= 256; n *= 16 {
n := n
t.Run(fmt.Sprintf("Readers-%d", n), func(t *testing.T) {
var src Reader
interval := time.NewTicker(time.Millisecond * 33) // 30 fps
defer interval.Stop()
frameCount := 0
frameSent := 0
lastSend := time.Now()
src = ReaderFunc(func() (image.Image, error) {
if pauseCond.src && frameSent == 30 {
time.Sleep(time.Second)
}
<-interval.C
now := time.Now()
if interval := now.Sub(lastSend); interval > time.Millisecond*33*3/2 {
// Source reader should drop frames to catch up the latest frame.
drop := int(interval/(time.Millisecond*33)) - 1
frameCount += drop
t.Logf("Skipped %d frames", drop)
}
lastSend = now
frame := frames[frameCount]
frameCount++
frameSent++
return frame, nil
})
broadcaster := NewBroadcaster(src, nil)
var done uint32
duration := time.Second * 3
fpsChan := make(chan []float64)
var wg sync.WaitGroup
wg.Add(n)
for i := 0; i < n; i++ {
go func() {
reader := broadcaster.NewReader(false)
count := 0
lastFrameCount := -1
droppedFrames := 0
wg.Done()
wg.Wait()
for atomic.LoadUint32(&done) == 0 {
if pauseCond.dst && count == 30 {
time.Sleep(time.Second)
}
frame, err := reader.Read()
if err != nil {
t.Error(err)
}
rgba := frame.(*image.RGBA)
var frameCount int
frameCount |= int(rgba.Pix[0]) << 24
frameCount |= int(rgba.Pix[1]) << 16
frameCount |= int(rgba.Pix[2]) << 8
frameCount |= int(rgba.Pix[3])
droppedFrames += (frameCount - lastFrameCount - 1)
lastFrameCount = frameCount
count++
}
fps := float64(count) / duration.Seconds()
if fps < pauseCond.expectedFPS-2 || fps > pauseCond.expectedFPS+2 {
t.Fatal("Unexpected average FPS")
}
droppedFramesPerSecond := float64(droppedFrames) / duration.Seconds()
if droppedFramesPerSecond < pauseCond.expectedDrop-2 || droppedFramesPerSecond > pauseCond.expectedDrop+2 {
t.Fatal("Unexpected drop count")
}
fpsChan <- []float64{fps, droppedFramesPerSecond, float64(lastFrameCount)}
}()
}
time.Sleep(duration)
atomic.StoreUint32(&done, 1)
var fpsAvg float64
var droppedFramesPerSecondAvg float64
var lastFrameCountAvg float64
var count int
for metric := range fpsChan {
fps, droppedFramesPerSecond, lastFrameCount := metric[0], metric[1], metric[2]
fpsAvg += fps
droppedFramesPerSecondAvg += droppedFramesPerSecond
lastFrameCountAvg += lastFrameCount
count++
if count == n {
break
}
}
t.Log("Average FPS :", fpsAvg/float64(n))
t.Log("Average dropped frames per second:", droppedFramesPerSecondAvg/float64(n))
t.Log("Last frame count (src) :", frameCount)
t.Log("Average last frame count (dst) :", lastFrameCountAvg/float64(n))
})
}
})
if &actualWithoutCopy1.(*image.Gray).Pix[0] != &actualWithoutCopy2.(*image.Gray).Pix[0] {
t.Fatal("Expected underlying buffer for frame with copy to be the same from broadcaster's buffer")
}
if !reflect.DeepEqual(img, actualWithoutCopy1) {
t.Fatal("Expected actual frame without copy to be the same with the original")
}
readerWithCopy := broadcaster.NewReader(true)
actualWithCopy, err := readerWithCopy.Read()
if err != nil {
t.Fatal(err)
}
if &actualWithCopy.(*image.Gray).Pix[0] == &actualWithoutCopy1.(*image.Gray).Pix[0] {
t.Fatal("Expected underlying buffer for frame with copy to be different from broadcaster's buffer")
}
if !reflect.DeepEqual(img, actualWithCopy) {
t.Fatal("Expected actual frame without copy to be the same with the original")
}
}

149
pkg/wave/buffer.go Normal file
View File

@@ -0,0 +1,149 @@
package wave
import "fmt"
var (
errUnsupportedFormat = fmt.Errorf("Unsupported format")
)
// Buffer is a buffer that can store any audio format.
type Buffer struct {
// TODO: Probably standardize the audio formats so that we don't need to have the following different types
// and duplicated codes for each type
bufferFloat32Interleaved []float32
bufferFloat32NonInterleaved [][]float32
bufferInt16Interleaved []int16
bufferInt16NonInterleaved [][]int16
tmp Audio
}
// NewBuffer creates a new Buffer instance
func NewBuffer() *Buffer {
return &Buffer{}
}
// Load loads the current owned Audio
func (buff *Buffer) Load() Audio {
return buff.tmp
}
// StoreCopy makes a copy of src and store its copy. StoreCopy will reuse as much memory as it can
// from the previous copies. For example, if StoreCopy is given an audio that has the format from the previous call,
// StoreCopy will not allocate extra memory and only copy the content from src to the previous buffer.
func (buff *Buffer) StoreCopy(src Audio) {
switch src := src.(type) {
case *Float32Interleaved:
clone, ok := buff.tmp.(*Float32Interleaved)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
neededSize := len(src.Data)
if len(buff.bufferFloat32Interleaved) < neededSize {
if cap(buff.bufferFloat32Interleaved) >= neededSize {
buff.bufferFloat32Interleaved = buff.bufferFloat32Interleaved[:neededSize]
} else {
buff.bufferFloat32Interleaved = make([]float32, neededSize)
}
}
copy(buff.bufferFloat32Interleaved, src.Data)
clone.Data = buff.bufferFloat32Interleaved
buff.tmp = clone
case *Float32NonInterleaved:
clone, ok := buff.tmp.(*Float32NonInterleaved)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
neededSize := len(src.Data)
if len(buff.bufferFloat32NonInterleaved) < neededSize {
if cap(buff.bufferFloat32NonInterleaved) >= neededSize {
buff.bufferFloat32NonInterleaved = buff.bufferFloat32NonInterleaved[:neededSize]
} else {
buff.bufferFloat32NonInterleaved = make([][]float32, neededSize)
}
}
for i := range src.Data {
neededSize := len(src.Data[i])
if len(buff.bufferFloat32NonInterleaved[i]) < neededSize {
if cap(buff.bufferFloat32NonInterleaved[i]) >= neededSize {
buff.bufferFloat32NonInterleaved[i] = buff.bufferFloat32NonInterleaved[i][:neededSize]
} else {
buff.bufferFloat32NonInterleaved[i] = make([]float32, neededSize)
}
}
copy(buff.bufferFloat32NonInterleaved[i], src.Data[i])
}
clone.Data = buff.bufferFloat32NonInterleaved
buff.tmp = clone
case *Int16Interleaved:
clone, ok := buff.tmp.(*Int16Interleaved)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
neededSize := len(src.Data)
if len(buff.bufferInt16Interleaved) < neededSize {
if cap(buff.bufferInt16Interleaved) >= neededSize {
buff.bufferInt16Interleaved = buff.bufferInt16Interleaved[:neededSize]
} else {
buff.bufferInt16Interleaved = make([]int16, neededSize)
}
}
copy(buff.bufferInt16Interleaved, src.Data)
clone.Data = buff.bufferInt16Interleaved
buff.tmp = clone
case *Int16NonInterleaved:
clone, ok := buff.tmp.(*Int16NonInterleaved)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
neededSize := len(src.Data)
if len(buff.bufferInt16NonInterleaved) < neededSize {
if cap(buff.bufferInt16NonInterleaved) >= neededSize {
buff.bufferInt16NonInterleaved = buff.bufferInt16NonInterleaved[:neededSize]
} else {
buff.bufferInt16NonInterleaved = make([][]int16, neededSize)
}
}
for i := range src.Data {
neededSize := len(src.Data[i])
if len(buff.bufferInt16NonInterleaved[i]) < neededSize {
if cap(buff.bufferInt16NonInterleaved[i]) >= neededSize {
buff.bufferInt16NonInterleaved[i] = buff.bufferInt16NonInterleaved[i][:neededSize]
} else {
buff.bufferInt16NonInterleaved[i] = make([]int16, neededSize)
}
}
copy(buff.bufferInt16NonInterleaved[i], src.Data[i])
}
clone.Data = buff.bufferInt16NonInterleaved
buff.tmp = clone
default:
// TODO: Should have a routine to convert any format to one of the supported formats above
panic(errUnsupportedFormat)
}
}

129
pkg/wave/buffer_test.go Normal file
View File

@@ -0,0 +1,129 @@
package wave
import (
"errors"
"fmt"
"reflect"
"testing"
)
var (
errIdenticalAddress = errors.New("Cloned audio has the same memory address with the original audio")
)
func TestBufferStoreCopyAndLoad(t *testing.T) {
chunkInfo := ChunkInfo{
Len: 4,
Channels: 2,
SamplingRate: 48000,
}
testCases := map[string]struct {
New func() EditableAudio
Update func(EditableAudio)
Validate func(*testing.T, Audio, Audio)
}{
"Float32Interleaved": {
New: func() EditableAudio {
return NewFloat32Interleaved(chunkInfo)
},
Update: func(src EditableAudio) {
src.Set(0, 0, Float32Sample(1))
},
Validate: func(t *testing.T, original Audio, clone Audio) {
ok := reflect.ValueOf(original.(*Float32Interleaved).Data).Pointer() != reflect.ValueOf(clone.(*Float32Interleaved).Data).Pointer()
if !ok {
t.Error(errIdenticalAddress)
}
},
},
"Float32NonInterleaved": {
New: func() EditableAudio {
return NewFloat32NonInterleaved(chunkInfo)
},
Update: func(src EditableAudio) {
src.Set(0, 0, Float32Sample(1))
},
Validate: func(t *testing.T, original Audio, clone Audio) {
originalReal := original.(*Float32NonInterleaved)
cloneReal := clone.(*Float32NonInterleaved)
if reflect.ValueOf(originalReal.Data).Pointer() == reflect.ValueOf(cloneReal.Data).Pointer() {
t.Error(errIdenticalAddress)
}
for i := range cloneReal.Data {
if reflect.ValueOf(originalReal.Data[i]).Pointer() == reflect.ValueOf(cloneReal.Data[i]).Pointer() {
err := fmt.Errorf("Channel %d memory address should be different", i)
t.Errorf("%v: %w", errIdenticalAddress, err)
}
}
},
},
"Int16Interleaved": {
New: func() EditableAudio {
return NewInt16Interleaved(chunkInfo)
},
Update: func(src EditableAudio) {
src.Set(1, 1, Int16Sample(2))
},
Validate: func(t *testing.T, original Audio, clone Audio) {
ok := reflect.ValueOf(original.(*Int16Interleaved).Data).Pointer() != reflect.ValueOf(clone.(*Int16Interleaved).Data).Pointer()
if !ok {
t.Error(errIdenticalAddress)
}
},
},
"Int16NonInterleaved": {
New: func() EditableAudio {
return NewInt16NonInterleaved(chunkInfo)
},
Update: func(src EditableAudio) {
src.Set(1, 1, Int16Sample(2))
},
Validate: func(t *testing.T, original Audio, clone Audio) {
originalReal := original.(*Int16NonInterleaved)
cloneReal := clone.(*Int16NonInterleaved)
if reflect.ValueOf(originalReal.Data).Pointer() == reflect.ValueOf(cloneReal.Data).Pointer() {
t.Error(errIdenticalAddress)
}
for i := range cloneReal.Data {
if reflect.ValueOf(originalReal.Data[i]).Pointer() == reflect.ValueOf(cloneReal.Data[i]).Pointer() {
err := fmt.Errorf("Channel %d memory address should be different", i)
t.Errorf("%v: %w", errIdenticalAddress, err)
}
}
},
},
}
buffer := NewBuffer()
for name, testCase := range testCases {
// Since the test also wants to make sure that Copier can convert from 1 type to another,
// t.Run is not ideal since it'll run the tests separately
t.Log("Testing", name)
src := testCase.New()
src.Set(0, 0, Int16Sample(1))
buffer.StoreCopy(src)
testCase.Validate(t, src, buffer.Load())
if !reflect.DeepEqual(buffer.Load(), src) {
t.Fatalf(`Expected the copied audio chunk to be identical with the source
Expected:
%v
Actual:
%v
`, src, buffer.Load())
}
testCase.Update(src)
buffer.StoreCopy(src)
if !reflect.DeepEqual(buffer.Load(), src) {
t.Fatal("Expected the copied audio chunk to be identical with the source after an update in source")
}
}
}

View File

@@ -7,7 +7,6 @@ import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/driver"
mio "github.com/pion/mediadevices/pkg/io"
"github.com/pion/webrtc/v2"
"github.com/pion/webrtc/v2/pkg/media"
)
@@ -18,6 +17,7 @@ type Tracker interface {
Track() *webrtc.Track
LocalTrack() LocalTrack
Stop()
Kind() MediaDeviceType
// OnEnded registers a handler to receive an error from the media stream track.
// If the error is already occured before registering, the handler will be
// immediately called.
@@ -41,12 +41,14 @@ type track struct {
err error
mu sync.Mutex
endOnce sync.Once
kind MediaDeviceType
}
func newTrack(opts *MediaDevicesOptions, d driver.Driver, constraints MediaTrackConstraints) (*track, error) {
var encoderBuilders []encoderBuilder
var rtpCodecs []*webrtc.RTPCodec
var buildSampler func(t LocalTrack) samplerFunc
var kind MediaDeviceType
var err error
err = d.Open()
@@ -56,10 +58,12 @@ func newTrack(opts *MediaDevicesOptions, d driver.Driver, constraints MediaTrack
switch r := d.(type) {
case driver.VideoRecorder:
kind = VideoInput
rtpCodecs = opts.codecs[webrtc.RTPCodecTypeVideo]
buildSampler = newVideoSampler
encoderBuilders, err = newVideoEncoderBuilders(r, constraints)
case driver.AudioRecorder:
kind = AudioInput
rtpCodecs = opts.codecs[webrtc.RTPCodecTypeAudio]
buildSampler = func(t LocalTrack) samplerFunc {
return newAudioSampler(t, constraints.selectedMedia.Latency)
@@ -108,6 +112,7 @@ func newTrack(opts *MediaDevicesOptions, d driver.Driver, constraints MediaTrack
sample: buildSampler(localTrack),
d: d,
encoder: encoder,
kind: kind,
}
go t.start()
return &t, nil
@@ -117,6 +122,11 @@ func newTrack(opts *MediaDevicesOptions, d driver.Driver, constraints MediaTrack
return nil, errors.New("newTrack: failed to find a matching codec")
}
// Kind returns track's kind
func (t *track) Kind() MediaDeviceType {
return t.kind
}
// OnEnded sets an error handler. When a track has been created and started, if an
// error occurs, handler will get called with the error given to the parameter.
func (t *track) OnEnded(handler func(error)) {
@@ -149,22 +159,14 @@ func (t *track) onError(err error) {
// start starts the data flow from the driver all the way to the localTrack
func (t *track) start() {
var n int
var err error
buff := make([]byte, 1024)
for {
n, err = t.encoder.Read(buff)
buff, err := t.encoder.Read()
if err != nil {
if e, ok := err.(*mio.InsufficientBufferError); ok {
buff = make([]byte, 2*e.RequiredSize)
continue
}
t.onError(err)
return
}
if err := t.sample(buff[:n]); err != nil {
if err := t.sample(buff); err != nil {
t.onError(err)
return
}
@@ -207,7 +209,7 @@ func newVideoEncoderBuilders(vr driver.VideoRecorder, constraints MediaTrackCons
encoderBuilders := make([]encoderBuilder, len(constraints.VideoEncoderBuilders))
for i, b := range constraints.VideoEncoderBuilders {
encoderBuilders[i].name = b.Name()
encoderBuilders[i].name = b.RTPCodec().Name
encoderBuilders[i].build = func() (codec.ReadCloser, error) {
return b.BuildVideoEncoder(r, constraints.selectedMedia)
}
@@ -229,7 +231,7 @@ func newAudioEncoderBuilders(ar driver.AudioRecorder, constraints MediaTrackCons
encoderBuilders := make([]encoderBuilder, len(constraints.AudioEncoderBuilders))
for i, b := range constraints.AudioEncoderBuilders {
encoderBuilders[i].name = b.Name()
encoderBuilders[i].name = b.RTPCodec().Name
encoderBuilders[i].build = func() (codec.ReadCloser, error) {
return b.BuildAudioEncoder(r, constraints.selectedMedia)
}