Compare commits

..

1 Commits

Author SHA1 Message Date
Atsushi Watanabe
bb7611783d codec/vpx: support temporal layer 2020-05-31 03:14:12 +09:00
123 changed files with 1708 additions and 6114 deletions

View File

@@ -8,17 +8,17 @@ on:
- master
jobs:
build-linux:
build:
runs-on: ubuntu-latest
strategy:
matrix:
go: [ '1.15', '1.14' ]
name: Linux Go ${{ matrix.go }}
go: [ '1.14', '1.13' ]
name: Go ${{ matrix.go }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Go
uses: actions/setup-go@v2
uses: actions/setup-go@v1
with:
go-version: ${{ matrix.go }}
- name: Install dependencies
@@ -30,52 +30,18 @@ jobs:
libvpx-dev \
libx264-dev
- name: go vet
run: go vet $(go list ./... | grep -v mmal)
run: go vet ./...
- name: go build
run: go build $(go list ./... | grep -v mmal)
run: go build ./...
- name: go build without CGO
run: go build . pkg/...
env:
CGO_ENABLED: 0
- name: go test
run: go test -v -race -coverprofile=coverage.txt -covermode=atomic $(go list ./... | grep -v mmal)
- uses: codecov/codecov-action@v1
if: matrix.go == '1.15'
- name: go test without CGO
run: go test . pkg/... -v
env:
CGO_ENABLED: 0
build-darwin:
runs-on: macos-latest
strategy:
matrix:
go: [ '1.15', '1.14' ]
name: Darwin Go ${{ matrix.go }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Go
uses: actions/setup-go@v2
with:
go-version: ${{ matrix.go }}
- name: Install dependencies
run: |
brew install \
pkg-config \
opus \
libvpx \
x264
- name: go vet
run: go vet $(go list ./... | grep -v mmal)
- name: go build
run: go build $(go list ./... | grep -v mmal)
- name: go build without CGO
run: go build . pkg/...
env:
CGO_ENABLED: 0
- name: go test
run: go test -v -race $(go list ./... | grep -v mmal)
run: go test ./... -v -race
- name: go test without CGO
run: go test . pkg/... -v
env:
CGO_ENABLED: 0
#- name: golint
# run: go lint ./...

1
CODEOWNERS Normal file
View File

@@ -0,0 +1 @@
* @lherman-cs @at-wat

View File

@@ -1,6 +1,6 @@
MIT License
Copyright (c) 2019-2020 Pion
Copyright (c) 2019 Pion
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

View File

@@ -1,17 +1,6 @@
<h1 align="center">
<br>
Pion MediaDevices
<br>
</h1>
<h4 align="center">Go implementation of the <a href="https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices">MediaDevices</a> API</h4>
<p align="center">
<a href="https://pion.ly/slack"><img src="https://img.shields.io/badge/join-us%20on%20slack-gray.svg?longCache=true&logo=slack&colorB=brightgreen" alt="Slack Widget"></a>
<a href="https://github.com/pion/mediadevices/actions"><img src="https://github.com/pion/mediadevices/workflows/CI/badge.svg?branch=master" alt="Build status"></a>
<a href="https://pkg.go.dev/github.com/pion/mediadevices"><img src="https://godoc.org/github.com/pion/mediadevices?status.svg" alt="GoDoc"></a>
<a href="https://codecov.io/gh/pion/mediadevices"><img src="https://codecov.io/gh/pion/mediadevices/branch/master/graph/badge.svg" alt="Coverage Status"></a>
<a href="LICENSE"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License: MIT"></a>
</p>
<br>
# mediadevices
Go implementation of the [MediaDevices](https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices) API.
![](img/demo.gif)
@@ -19,7 +8,7 @@
| Interface | Linux | Mac | Windows |
| :--------: | :---: | :-: | :-----: |
| Camera | ✔️ | | ✔️ |
| Camera | ✔️ | | ✔️ |
| Microphone | ✔️ | ✖️ | ✔️ |
| Screen | ✔️ | ✖️ | ✖️ |
@@ -28,15 +17,15 @@
| OS | Library/Interface |
| :-----: | :---------------------------------------------------------------------: |
| Linux | [Video4Linux](https://en.wikipedia.org/wiki/Video4Linux) |
| Mac | [AVFoundation](https://developer.apple.com/av-foundation/) |
| Mac | N/A |
| Windows | [DirectShow](https://docs.microsoft.com/en-us/windows/win32/directshow) |
| Pixel Format | Linux | Mac | Windows |
| :---------------------------------------------------: | :---: | :-: | :-----: |
| [YUY2](https://www.fourcc.org/pixel-format/yuv-yuy2/) | ✔️ | ✖️ | ✔️ |
| [UYVY](https://www.fourcc.org/pixel-format/yuv-uyvy/) | ✔️ | | ✖️ |
| [UYVY](https://www.fourcc.org/pixel-format/yuv-uyvy/) | ✔️ | | ✖️ |
| [I420](https://www.fourcc.org/pixel-format/yuv-i420/) | ✔️ | ✖️ | ✖️ |
| [NV21](https://www.fourcc.org/pixel-format/yuv-nv21/) | ✔️ | | ✖️ |
| [NV21](https://www.fourcc.org/pixel-format/yuv-nv21/) | ✔️ | | ✖️ |
| [MJPEG](https://www.fourcc.org/mjpg/) | ✔️ | ✖️ | ✖️ |
### Microphone

117
codec.go
View File

@@ -1,117 +0,0 @@
package mediadevices
import (
"errors"
"fmt"
"strings"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
// CodecSelector is a container of video and audio encoder builders, which later will be used
// for codec matching.
type CodecSelector struct {
videoEncoders []codec.VideoEncoderBuilder
audioEncoders []codec.AudioEncoderBuilder
}
// CodecSelectorOption is a type for specifying CodecSelector options
type CodecSelectorOption func(*CodecSelector)
// WithVideoEncoders replace current video codecs with listed encoders
func WithVideoEncoders(encoders ...codec.VideoEncoderBuilder) CodecSelectorOption {
return func(t *CodecSelector) {
t.videoEncoders = encoders
}
}
// WithVideoEncoders replace current audio codecs with listed encoders
func WithAudioEncoders(encoders ...codec.AudioEncoderBuilder) CodecSelectorOption {
return func(t *CodecSelector) {
t.audioEncoders = encoders
}
}
// NewCodecSelector constructs CodecSelector with given variadic options
func NewCodecSelector(opts ...CodecSelectorOption) *CodecSelector {
var track CodecSelector
for _, opt := range opts {
opt(&track)
}
return &track
}
// Populate lets the webrtc engine be aware of supported codecs that are contained in CodecSelector
func (selector *CodecSelector) Populate(setting *webrtc.MediaEngine) {
for _, encoder := range selector.videoEncoders {
setting.RegisterCodec(encoder.RTPCodec().RTPCodec)
}
for _, encoder := range selector.audioEncoders {
setting.RegisterCodec(encoder.RTPCodec().RTPCodec)
}
}
func (selector *CodecSelector) selectVideoCodec(wantCodecs []*webrtc.RTPCodec, reader video.Reader, inputProp prop.Media) (codec.ReadCloser, *codec.RTPCodec, error) {
var selectedEncoder codec.VideoEncoderBuilder
var encodedReader codec.ReadCloser
var errReasons []string
var err error
outer:
for _, wantCodec := range wantCodecs {
name := wantCodec.Name
for _, encoder := range selector.videoEncoders {
if encoder.RTPCodec().Name == name {
encodedReader, err = encoder.BuildVideoEncoder(reader, inputProp)
if err == nil {
selectedEncoder = encoder
break outer
}
}
errReasons = append(errReasons, fmt.Sprintf("%s: %s", encoder.RTPCodec().Name, err))
}
}
if selectedEncoder == nil {
return nil, nil, errors.New(strings.Join(errReasons, "\n\n"))
}
return encodedReader, selectedEncoder.RTPCodec(), nil
}
func (selector *CodecSelector) selectAudioCodec(wantCodecs []*webrtc.RTPCodec, reader audio.Reader, inputProp prop.Media) (codec.ReadCloser, *codec.RTPCodec, error) {
var selectedEncoder codec.AudioEncoderBuilder
var encodedReader codec.ReadCloser
var errReasons []string
var err error
outer:
for _, wantCodec := range wantCodecs {
name := wantCodec.Name
for _, encoder := range selector.audioEncoders {
if encoder.RTPCodec().Name == name {
encodedReader, err = encoder.BuildAudioEncoder(reader, inputProp)
if err == nil {
selectedEncoder = encoder
break outer
}
}
errReasons = append(errReasons, fmt.Sprintf("%s: %s", encoder.RTPCodec().Name, err))
}
}
if selectedEncoder == nil {
return nil, nil, errors.New(strings.Join(errReasons, "\n\n"))
}
return encodedReader, selectedEncoder.RTPCodec(), nil
}

View File

@@ -1,10 +0,0 @@
coverage:
status:
project:
default:
# Allow decreasing 2% of total coverage to avoid noise.
threshold: 2%
patch: off
ignore:
- "examples/*"

View File

@@ -0,0 +1,29 @@
## Instructions
### Download facedetection
```
go get github.com/pion/mediadevices/examples/facedetection
```
### Open example page
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/video) you should see two text-areas and a 'Start Session' button
### Run facedetection with your browsers SessionDescription as stdin
In the jsfiddle the top textarea is your browser, copy that and:
#### Linux
Run `echo $BROWSER_SDP | facedetection`
### Input facedetection's SessionDescription into your browser
Copy the text that `facedetection` just emitted and copy into second text area
### Hit 'Start Session' in jsfiddle, enjoy your video!
A video should start playing in your browser above the input boxes, and will continue playing until you close the application.
Congrats, you have used pion-WebRTC! Now start building something cool

View File

@@ -0,0 +1,118 @@
package main
import (
"image"
"image/color"
"image/draw"
"io/ioutil"
"log"
"github.com/disintegration/imaging"
pigo "github.com/esimov/pigo/core"
)
var (
cascade []byte
err error
classifier *pigo.Pigo
)
func imgToGrayscale(img image.Image) []uint8 {
bounds := img.Bounds()
flatten := bounds.Dy() * bounds.Dx()
grayImg := make([]uint8, flatten)
i := 0
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
pix := img.At(x, y)
grayPix := color.GrayModel.Convert(pix).(color.Gray)
grayImg[i] = grayPix.Y
i++
}
}
return grayImg
}
// clusterDetection runs Pigo face detector core methods
// and returns a cluster with the detected faces coordinates.
func clusterDetection(img image.Image) []pigo.Detection {
grayscale := imgToGrayscale(img)
bounds := img.Bounds()
cParams := pigo.CascadeParams{
MinSize: 100,
MaxSize: 600,
ShiftFactor: 0.15,
ScaleFactor: 1.1,
ImageParams: pigo.ImageParams{
Pixels: grayscale,
Rows: bounds.Dy(),
Cols: bounds.Dx(),
Dim: bounds.Dx(),
},
}
if len(cascade) == 0 {
cascade, err = ioutil.ReadFile("facefinder")
if err != nil {
log.Fatalf("Error reading the cascade file: %s", err)
}
p := pigo.NewPigo()
// Unpack the binary file. This will return the number of cascade trees,
// the tree depth, the threshold and the prediction from tree's leaf nodes.
classifier, err = p.Unpack(cascade)
if err != nil {
log.Fatalf("Error unpacking the cascade file: %s", err)
}
}
// Run the classifier over the obtained leaf nodes and return the detection results.
// The result contains quadruplets representing the row, column, scale and detection score.
dets := classifier.RunCascade(cParams, 0.0)
// Calculate the intersection over union (IoU) of two clusters.
dets = classifier.ClusterDetections(dets, 0)
return dets
}
func drawCircle(img draw.Image, x0, y0, r int, c color.Color) {
x, y, dx, dy := r-1, 0, 1, 1
err := dx - (r * 2)
for x > y {
img.Set(x0+x, y0+y, c)
img.Set(x0+y, y0+x, c)
img.Set(x0-y, y0+x, c)
img.Set(x0-x, y0+y, c)
img.Set(x0-x, y0-y, c)
img.Set(x0-y, y0-x, c)
img.Set(x0+y, y0-x, c)
img.Set(x0+x, y0-y, c)
if err <= 0 {
y++
err += dy
dy += 2
}
if err > 0 {
x--
dx += 2
err += dx - (r * 2)
}
}
}
func markFaces(img image.Image) image.Image {
nrgba := imaging.Clone(img)
dets := clusterDetection(img)
for _, det := range dets {
if det.Q < 5.0 {
continue
}
drawCircle(nrgba, det.Col, det.Row, det.Scale/2, color.Black)
}
return nrgba
}

Binary file not shown.

View File

@@ -0,0 +1,119 @@
package main
import (
"fmt"
"image"
"github.com/pion/mediadevices"
"github.com/pion/mediadevices/examples/internal/signal"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
"github.com/pion/mediadevices/pkg/frame"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
func markFacesTransformer(r video.Reader) video.Reader {
return video.ReaderFunc(func() (img image.Image, err error) {
img, err = r.Read()
if err != nil {
return
}
img = markFaces(img)
return
})
}
func main() {
config := webrtc.Configuration{
ICEServers: []webrtc.ICEServer{
{
URLs: []string{"stun:stun.l.google.com:19302"},
},
},
}
// Wait for the offer to be pasted
offer := webrtc.SessionDescription{}
signal.Decode(signal.MustReadStdin(), &offer)
// Create a new RTCPeerConnection
mediaEngine := webrtc.MediaEngine{}
if err := mediaEngine.PopulateFromSDP(offer); err != nil {
panic(err)
}
api := webrtc.NewAPI(webrtc.WithMediaEngine(mediaEngine))
peerConnection, err := api.NewPeerConnection(config)
if err != nil {
panic(err)
}
// Set the handler for ICE connection state
// This will notify you when the peer has connected/disconnected
peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
fmt.Printf("Connection State has changed %s \n", connectionState.String())
})
md := mediadevices.NewMediaDevices(peerConnection)
vp8Params, err := vpx.NewVP8Params()
if err != nil {
panic(err)
}
vp8Params.BitRate = 100000 // 100kbps
s, err := md.GetUserMedia(mediadevices.MediaStreamConstraints{
Video: func(c *mediadevices.MediaTrackConstraints) {
c.FrameFormat = prop.FrameFormatExact(frame.FormatI420) // most of the encoder accepts I420
c.Enabled = true
c.Width = prop.Int(640)
c.Height = prop.Int(480)
c.VideoTransform = markFacesTransformer
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
},
})
if err != nil {
panic(err)
}
for _, tracker := range s.GetTracks() {
t := tracker.Track()
tracker.OnEnded(func(err error) {
fmt.Printf("Track (ID: %s, Label: %s) ended with error: %v\n",
t.ID(), t.Label(), err)
})
_, err = peerConnection.AddTransceiverFromTrack(t,
webrtc.RtpTransceiverInit{
Direction: webrtc.RTPTransceiverDirectionSendonly,
},
)
if err != nil {
panic(err)
}
}
// Set the remote SessionDescription
err = peerConnection.SetRemoteDescription(offer)
if err != nil {
panic(err)
}
// Create an answer
answer, err := peerConnection.CreateAnswer(nil)
if err != nil {
panic(err)
}
// Sets the LocalDescription, and starts our UDP listeners
err = peerConnection.SetLocalDescription(answer)
if err != nil {
panic(err)
}
// Output the answer in base64 so we can paste it in browser
fmt.Println(signal.Encode(answer))
select {}
}

View File

@@ -2,8 +2,8 @@ module github.com/pion/mediadevices/examples
go 1.14
replace github.com/pion/mediadevices => ../
// Please don't commit require entries of examples.
// `git checkout master examples/go.mod` to revert this file.
require github.com/pion/mediadevices v0.0.0
replace github.com/pion/mediadevices v0.0.0 => ../
require github.com/pion/mediadevices v0.0.0-00010101000000-000000000000

View File

@@ -1,77 +0,0 @@
// This is an example of using mediadevices to broadcast your camera through http.
// The example doesn't aim to be performant, but rather it strives to be simple.
package main
import (
"bytes"
"fmt"
"image/jpeg"
"io"
"log"
"mime/multipart"
"net/http"
"net/textproto"
"github.com/pion/mediadevices"
"github.com/pion/mediadevices/pkg/prop"
// Note: If you don't have a camera or microphone or your adapters are not supported,
// you can always swap your adapters with our dummy adapters below.
// _ "github.com/pion/mediadevices/pkg/driver/videotest"
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
)
func must(err error) {
if err != nil {
panic(err)
}
}
func main() {
s, err := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
Video: func(constraint *mediadevices.MediaTrackConstraints) {
constraint.Width = prop.Int(600)
constraint.Height = prop.Int(400)
},
})
must(err)
t := s.GetVideoTracks()[0]
videoTrack := t.(*mediadevices.VideoTrack)
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
var buf bytes.Buffer
videoReader := videoTrack.NewReader(false)
mimeWriter := multipart.NewWriter(w)
contentType := fmt.Sprintf("multipart/x-mixed-replace;boundary=%s", mimeWriter.Boundary())
w.Header().Add("Content-Type", contentType)
partHeader := make(textproto.MIMEHeader)
partHeader.Add("Content-Type", "image/jpeg")
for {
frame, release, err := videoReader.Read()
if err == io.EOF {
return
}
must(err)
err = jpeg.Encode(&buf, frame, nil)
// Since we're done with img, we need to release img so that that the original owner can reuse
// this memory.
release()
must(err)
partWriter, err := mimeWriter.CreatePart(partHeader)
must(err)
_, err = partWriter.Write(buf.Bytes())
buf.Reset()
must(err)
}
})
fmt.Println("listening on http://localhost:1313")
log.Println(http.ListenAndServe("localhost:1313", nil))
}

View File

@@ -0,0 +1,29 @@
## Instructions
### Download rtp-send example
```
go get github.com/pion/mediadevices/examples/rtp-send
```
### Listen RTP
Install GStreamer and run:
```
gst-launch-1.0 udpsrc port=5000 caps=application/x-rtp,encode-name=VP8 \
! rtpvp8depay ! vp8dec ! videoconvert ! autovideosink
```
Or run VLC media plyer:
```
vlc ./vp8.sdp
```
### Run rtp-send
Run `rtp-send localhost:5000`
A video should start playing in your GStreamer or VLC window.
It's not WebRTC, but pure RTP.
Congrats, you have used pion-MediaDevices! Now start building something cool

120
examples/rtp-send/main.go Normal file
View File

@@ -0,0 +1,120 @@
package main
import (
"fmt"
"net"
"os"
"github.com/pion/mediadevices"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
"github.com/pion/mediadevices/pkg/frame"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/rtp"
"github.com/pion/webrtc/v2"
"github.com/pion/webrtc/v2/pkg/media"
)
const (
mtu = 1000
)
func main() {
if len(os.Args) != 2 {
fmt.Printf("usage: %s host:port\n", os.Args[0])
return
}
md := mediadevices.NewMediaDevicesFromCodecs(
map[webrtc.RTPCodecType][]*webrtc.RTPCodec{
webrtc.RTPCodecTypeVideo: []*webrtc.RTPCodec{
webrtc.NewRTPVP8Codec(100, 90000),
},
},
mediadevices.WithTrackGenerator(
func(_ uint8, _ uint32, id, _ string, codec *webrtc.RTPCodec) (
mediadevices.LocalTrack, error,
) {
return newTrack(codec, id, os.Args[1]), nil
},
),
)
vp8Params, err := vpx.NewVP8Params()
if err != nil {
panic(err)
}
vp8Params.BitRate = 100000 // 100kbps
_, err = md.GetUserMedia(mediadevices.MediaStreamConstraints{
Video: func(c *mediadevices.MediaTrackConstraints) {
c.FrameFormat = prop.FrameFormat(frame.FormatYUY2)
c.Enabled = true
c.Width = prop.Int(640)
c.Height = prop.Int(480)
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
},
})
if err != nil {
panic(err)
}
select {}
}
type track struct {
codec *webrtc.RTPCodec
packetizer rtp.Packetizer
id string
conn net.Conn
}
func newTrack(codec *webrtc.RTPCodec, id, dest string) *track {
addr, err := net.ResolveUDPAddr("udp", dest)
if err != nil {
panic(err)
}
conn, err := net.DialUDP("udp", nil, addr)
if err != nil {
panic(err)
}
return &track{
codec: codec,
packetizer: rtp.NewPacketizer(
mtu,
codec.PayloadType,
1,
codec.Payloader,
rtp.NewRandomSequencer(),
codec.ClockRate,
),
id: id,
conn: conn,
}
}
func (t *track) WriteSample(s media.Sample) error {
buf := make([]byte, mtu)
pkts := t.packetizer.Packetize(s.Data, s.Samples)
for _, p := range pkts {
n, err := p.MarshalTo(buf)
if err != nil {
panic(err)
}
_, _ = t.conn.Write(buf[:n])
}
return nil
}
func (t *track) Codec() *webrtc.RTPCodec {
return t.codec
}
func (t *track) ID() string {
return t.id
}
func (t *track) Kind() webrtc.RTPCodecType {
return t.codec.Type
}

View File

@@ -0,0 +1,9 @@
v=0
o=- 1234567890 1234567890 IN IP4 0.0.0.0
s=RTP-Send Example
i=Example
c=IN IP4 0.0.0.0
t=0 0
a=recvonly
m=video 5000 RTP/AVP 100
a=rtpmap:100 VP8/90000

View File

@@ -0,0 +1,29 @@
## Instructions
### Download screenshare
```
go get github.com/pion/mediadevices/examples/screenshare
```
### Open example page
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/audio-and-video) you should see two text-areas and a 'Start Session' button
### Run screenshare with your browsers SessionDescription as stdin
In the jsfiddle the top textarea is your browser, copy that and:
#### Linux
Run `echo $BROWSER_SDP | screenshare`
### Input screenshare's SessionDescription into your browser
Copy the text that `screenshare` just emitted and copy into second text area
### Hit 'Start Session' in jsfiddle, enjoy your video!
A video should start playing in your browser above the input boxes, and will continue playing until you close the application.
Congrats, you have used pion-WebRTC! Now start building something cool

View File

@@ -0,0 +1,101 @@
package main
import (
"fmt"
"github.com/pion/mediadevices"
"github.com/pion/mediadevices/examples/internal/signal"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
_ "github.com/pion/mediadevices/pkg/driver/screen" // This is required to register screen capture adapter
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/webrtc/v2"
)
func main() {
config := webrtc.Configuration{
ICEServers: []webrtc.ICEServer{
{
URLs: []string{"stun:stun.l.google.com:19302"},
},
},
}
// Wait for the offer to be pasted
offer := webrtc.SessionDescription{}
signal.Decode(signal.MustReadStdin(), &offer)
// Create a new RTCPeerConnection
mediaEngine := webrtc.MediaEngine{}
if err := mediaEngine.PopulateFromSDP(offer); err != nil {
panic(err)
}
api := webrtc.NewAPI(webrtc.WithMediaEngine(mediaEngine))
peerConnection, err := api.NewPeerConnection(config)
if err != nil {
panic(err)
}
// Set the handler for ICE connection state
// This will notify you when the peer has connected/disconnected
peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
fmt.Printf("Connection State has changed %s \n", connectionState.String())
})
md := mediadevices.NewMediaDevices(peerConnection)
vp8Params, err := vpx.NewVP8Params()
if err != nil {
panic(err)
}
vp8Params.BitRate = 100000 // 100kbps
s, err := md.GetDisplayMedia(mediadevices.MediaStreamConstraints{
Video: func(c *mediadevices.MediaTrackConstraints) {
c.Enabled = true
c.VideoTransform = video.Scale(-1, 360, nil) // Resize to 360p
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
},
})
if err != nil {
panic(err)
}
for _, tracker := range s.GetTracks() {
t := tracker.Track()
tracker.OnEnded(func(err error) {
fmt.Printf("Track (ID: %s, Label: %s) ended with error: %v\n",
t.ID(), t.Label(), err)
})
_, err = peerConnection.AddTransceiverFromTrack(t,
webrtc.RtpTransceiverInit{
Direction: webrtc.RTPTransceiverDirectionSendonly,
},
)
if err != nil {
panic(err)
}
}
// Set the remote SessionDescription
err = peerConnection.SetRemoteDescription(offer)
if err != nil {
panic(err)
}
// Create an answer
answer, err := peerConnection.CreateAnswer(nil)
if err != nil {
panic(err)
}
// Sets the LocalDescription, and starts our UDP listeners
err = peerConnection.SetLocalDescription(answer)
if err != nil {
panic(err)
}
// Output the answer in base64 so we can paste it in browser
fmt.Println(signal.Encode(answer))
select {}
}

View File

@@ -3,24 +3,24 @@
### Download gstreamer-send
```
go get github.com/pion/mediadevices/examples/webrtc
go get github.com/pion/mediadevices/examples/simple
```
### Open example page
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/audio-and-video) you should see two text-areas and a 'Start Session' button
### Run webrtc with your browsers SessionDescription as stdin
### Run simple with your browsers SessionDescription as stdin
In the jsfiddle the top textarea is your browser, copy that and:
#### Linux
Run `echo $BROWSER_SDP | webrtc`
Run `echo $BROWSER_SDP | simple`
### Input webrtc's SessionDescription into your browser
### Input simple's SessionDescription into your browser
Copy the text that `webrtc` just emitted and copy into second text area
Copy the text that `simple` just emitted and copy into second text area
### Hit 'Start Session' in jsfiddle, enjoy your video!

View File

@@ -5,23 +5,26 @@ import (
"github.com/pion/mediadevices"
"github.com/pion/mediadevices/examples/internal/signal"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/frame"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
// This is required to use opus audio encoder
"github.com/pion/mediadevices/pkg/codec/opus"
// If you don't like vpx, you can also use x264 by importing as below
// "github.com/pion/mediadevices/pkg/codec/x264" // This is required to use h264 video encoder
// or you can also use openh264 for alternative h264 implementation
// "github.com/pion/mediadevices/pkg/codec/openh264"
"github.com/pion/mediadevices/pkg/codec/openh264" // This is required to use VP8/VP9 video encoder
"github.com/pion/mediadevices/pkg/codec/opus" // This is required to use VP8/VP9 video encoder
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
// Note: If you don't have a camera or microphone or your adapters are not supported,
// you can always swap your adapters with our dummy adapters below.
// _ "github.com/pion/mediadevices/pkg/driver/videotest"
// _ "github.com/pion/mediadevices/pkg/driver/audiotest"
_ "github.com/pion/mediadevices/pkg/driver/audiotest"
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
_ "github.com/pion/mediadevices/pkg/driver/microphone" // This is required to register microphone adapter
)
const (
@@ -58,48 +61,44 @@ func main() {
fmt.Printf("Connection State has changed %s \n", connectionState.String())
})
vp8Params, err := openh264.NewParams()
if err != nil {
panic(err)
}
vp8Params.BitRate = 300_000 // 300kbps
md := mediadevices.NewMediaDevices(peerConnection)
opusParams, err := opus.NewParams()
if err != nil {
panic(err)
}
codecSelector := mediadevices.NewCodecSelector(
mediadevices.WithVideoEncoders(&vp8Params),
mediadevices.WithAudioEncoders(&opusParams),
)
opusParams.BitRate = 32000 // 32kbps
s, err := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
vp8Params, err := vpx.NewVP8Params()
if err != nil {
panic(err)
}
vp8Params.BitRate = 100000 // 100kbps
s, err := md.GetUserMedia(mediadevices.MediaStreamConstraints{
Audio: func(c *mediadevices.MediaTrackConstraints) {
c.Enabled = true
c.AudioEncoderBuilders = []codec.AudioEncoderBuilder{&opusParams}
},
Video: func(c *mediadevices.MediaTrackConstraints) {
c.FrameFormat = prop.FrameFormat(frame.FormatYUY2)
c.Enabled = true
c.Width = prop.Int(640)
c.Height = prop.Int(480)
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
},
Audio: func(c *mediadevices.MediaTrackConstraints) {
},
Codec: codecSelector,
})
if err != nil {
panic(err)
}
for _, tracker := range s.GetTracks() {
t := tracker.Track()
tracker.OnEnded(func(err error) {
fmt.Printf("Track (ID: %s) ended with error: %v\n",
tracker.ID(), err)
fmt.Printf("Track (ID: %s, Label: %s) ended with error: %v\n",
t.ID(), t.Label(), err)
})
// In Pion/webrtc v3, bind will be called automatically after SDP negotiation
webrtcTrack, err := tracker.Bind(peerConnection)
if err != nil {
panic(err)
}
_, err = peerConnection.AddTransceiverFromTrack(webrtcTrack,
_, err = peerConnection.AddTransceiverFromTrack(t,
webrtc.RtpTransceiverInit{
Direction: webrtc.RTPTransceiverDirectionSendonly,
},

11
go.mod
View File

@@ -4,10 +4,11 @@ go 1.13
require (
github.com/blackjack/webcam v0.0.0-20200313125108-10ed912a8539
github.com/jfreymuth/pulse v0.0.0-20201014123913-1e525c426c93
github.com/lherman-cs/opus v0.0.2
github.com/pion/webrtc/v2 v2.2.26
github.com/faiface/beep v1.0.2
github.com/jfreymuth/pulse v0.0.0-20200506145638-1534c4af9659
github.com/lherman-cs/opus v0.0.0-20200223204610-6a4b98199ea4
github.com/pion/webrtc/v2 v2.2.14
github.com/satori/go.uuid v1.2.0
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f
golang.org/x/image v0.0.0-20200430140353-33d19683fad8
golang.org/x/sys v0.0.0-20200501145240-bc7a7d42d5c3
)

122
go.sum
View File

@@ -5,73 +5,86 @@ github.com/cheekybits/genny v1.0.0/go.mod h1:+tQajlRqAUrPI7DOSpB0XAqZYtQakVtB7wX
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/faiface/beep v1.0.2 h1:UB5DiRNmA4erfUYnHbgU4UB6DlBOrsdEFRtcc8sCkdQ=
github.com/faiface/beep v1.0.2/go.mod h1:1yLb5yRdHMsovYYWVqYLioXkVuziCSITW1oarTeduQM=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/gdamore/encoding v1.0.0/go.mod h1:alR0ol34c49FCSBLjhosxzcPHQbf2trDkoo5dl+VrEg=
github.com/gdamore/tcell v1.1.1/go.mod h1:K1udHkiR3cOtlpKG5tZPD5XxrF7v2y7lDq7Whcj+xkQ=
github.com/golang/mock v1.2.0 h1:28o5sBqPkBsMGnC6b4MvE2TzSr5/AT4c/1fLqVGIwlk=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gopherjs/gopherjs v0.0.0-20180628210949-0892b62f0d9f/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20180825215210-0210a2f0f73c h1:16eHWuMGvCjSfgRJKqIzapE78onvvTbdi1rMkU00lZw=
github.com/gopherjs/gopherjs v0.0.0-20180825215210-0210a2f0f73c/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherwasm v0.1.1/go.mod h1:kx4n9a+MzHH0BJJhvlsQ65hqLFXDO/m256AsaDPQ+/4=
github.com/gopherjs/gopherwasm v1.0.0 h1:32nge/RlujS1Im4HNCJPp0NbBOAeBXFuT1KonUuLl+Y=
github.com/gopherjs/gopherwasm v1.0.0/go.mod h1:SkZ8z7CWBz5VXbhJel8TxCmAcsQqzgWGR/8nMhyhZSI=
github.com/hajimehoshi/go-mp3 v0.1.1/go.mod h1:4i+c5pDNKDrxl1iu9iG90/+fhP37lio6gNhjCx9WBJw=
github.com/hajimehoshi/oto v0.1.1/go.mod h1:hUiLWeBQnbDu4pZsAhOnGqMI1ZGibS6e2qhQdfpwz04=
github.com/hajimehoshi/oto v0.3.1 h1:cpf/uIv4Q0oc5uf9loQn7PIehv+mZerh+0KKma6gzMk=
github.com/hajimehoshi/oto v0.3.1/go.mod h1:e9eTLBB9iZto045HLbzfHJIc+jP3xaKrjZTghvb6fdM=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/jfreymuth/pulse v0.0.0-20201014123913-1e525c426c93 h1:gDcaH96SZ7q1JU6hj0tSv8FiuqadFERU17lLxhphLa8=
github.com/jfreymuth/pulse v0.0.0-20201014123913-1e525c426c93/go.mod h1:cpYspI6YljhkUf1WLXLLDmeaaPFc3CnGLjDZf9dZ4no=
github.com/jfreymuth/oggvorbis v1.0.0/go.mod h1:abe6F9QRjuU9l+2jek3gj46lu40N4qlYxh2grqkLEDM=
github.com/jfreymuth/pulse v0.0.0-20200506145638-1534c4af9659 h1:DRA4BuRlhEILiud720WFWqqdADPzp1jTjQvyCr/PP80=
github.com/jfreymuth/pulse v0.0.0-20200506145638-1534c4af9659/go.mod h1:cpYspI6YljhkUf1WLXLLDmeaaPFc3CnGLjDZf9dZ4no=
github.com/jfreymuth/vorbis v1.0.0/go.mod h1:8zy3lUAm9K/rJJk223RKy6vjCZTWC61NA2QD06bfOE0=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/lherman-cs/opus v0.0.2 h1:fE9Du3NKXDBztqvoTd6P2y9eJ9vgIHahGK8yQostnhA=
github.com/lherman-cs/opus v0.0.2/go.mod h1:v9KQvlDYMuvlwniumBVMlrB0VHQvyTgxNvaXjPmTmps=
github.com/lherman-cs/opus v0.0.0-20200223204610-6a4b98199ea4 h1:2ydMA2KbxRkYmIw3R8Me8dn90bejxBR4MKYXJ5THK3I=
github.com/lherman-cs/opus v0.0.0-20200223204610-6a4b98199ea4/go.mod h1:v9KQvlDYMuvlwniumBVMlrB0VHQvyTgxNvaXjPmTmps=
github.com/lucas-clemente/quic-go v0.7.1-0.20190401152353-907071221cf9 h1:tbuodUh2vuhOVZAdW3NEUvosFHUMJwUNl7jk/VSEiwc=
github.com/lucas-clemente/quic-go v0.7.1-0.20190401152353-907071221cf9/go.mod h1:PpMmPfPKO9nKJ/psF49ESTAGQSdfXxlg1otPbEB2nOw=
github.com/lucasb-eyer/go-colorful v0.0.0-20181028223441-12d3b2882a08/go.mod h1:NXg0ArsFk0Y01623LgUqoqcouGDB+PwCCQlrwrG6xJ4=
github.com/marten-seemann/qtls v0.2.3 h1:0yWJ43C62LsZt08vuQJDK1uC1czUc3FJeCLPoNAI4vA=
github.com/marten-seemann/qtls v0.2.3/go.mod h1:xzjG7avBwGGbdZ8dTGxlBnLArsVKLvwmjgmPuiQEcYk=
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mewkiz/flac v1.0.5/go.mod h1:EHZNU32dMF6alpurYyKHDLYpW1lYpBZ5WrXi/VuNIGs=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/pion/datachannel v1.4.21 h1:3ZvhNyfmxsAqltQrApLPQMhSFNA+aT87RqyCq4OXmf0=
github.com/pion/datachannel v1.4.21/go.mod h1:oiNyP4gHx2DIwRzX/MFyH0Rz/Gz05OgBlayAI2hAWjg=
github.com/pion/dtls/v2 v2.0.1/go.mod h1:uMQkz2W0cSqY00xav7WByQ4Hb+18xeQh2oH2fRezr5U=
github.com/pion/dtls/v2 v2.0.2 h1:FHCHTiM182Y8e15aFTiORroiATUI16ryHiQh8AIOJ1E=
github.com/pion/dtls/v2 v2.0.2/go.mod h1:27PEO3MDdaCfo21heT59/vsdmZc0zMt9wQPcSlLu/1I=
github.com/pion/ice v0.7.18 h1:KbAWlzWRUdX9SmehBh3gYpIFsirjhSQsCw6K2MjYMK0=
github.com/pion/ice v0.7.18/go.mod h1:+Bvnm3nYC6Nnp7VV6glUkuOfToB/AtMRZpOU8ihuf4c=
github.com/pion/datachannel v1.4.17 h1:8CChK5VrJoGrwKCysoTscoWvshCAFpUkgY11Tqgz5hE=
github.com/pion/datachannel v1.4.17/go.mod h1:+vPQfypU9vSsyPXogYj1hBThWQ6MNXEQoQAzxoPvjYM=
github.com/pion/dtls/v2 v2.0.0 h1:Fk+MBhLZ/U1bImzAhmzwbO/pP2rKhtTw8iA934H3ybE=
github.com/pion/dtls/v2 v2.0.0/go.mod h1:VkY5VL2wtsQQOG60xQ4lkV5pdn0wwBBTzCfRJqXhp3A=
github.com/pion/ice v0.7.15 h1:s1In+gnuyVq7WKWGVQL+1p+OcrMsbfL+VfSe2isH8Ag=
github.com/pion/ice v0.7.15/go.mod h1:Z6zybEQgky5mZkKcLfmvc266JukK2srz3VZBBD1iXBw=
github.com/pion/logging v0.2.2 h1:M9+AIj/+pxNsDfAT64+MAVgJO0rsyLnoJKCqf//DoeY=
github.com/pion/logging v0.2.2/go.mod h1:k0/tDVsRCX2Mb2ZEmTqNa7CWsQPc+YYCB7Q+5pahoms=
github.com/pion/mdns v0.0.4 h1:O4vvVqr4DGX63vzmO6Fw9vpy3lfztVWHGCQfyw0ZLSY=
github.com/pion/mdns v0.0.4/go.mod h1:R1sL0p50l42S5lJs91oNdUL58nm0QHrhxnSegr++qC0=
github.com/pion/quic v0.1.1 h1:D951FV+TOqI9A0rTF7tHx0Loooqz+nyzjEyj8o3PuMA=
github.com/pion/quic v0.1.1/go.mod h1:zEU51v7ru8Mp4AUBJvj6psrSth5eEFNnVQK5K48oV3k=
github.com/pion/randutil v0.0.0/go.mod h1:XcJrSMMbbMRhASFVOlj/5hQial/Y8oH/HVo7TBZq+j8=
github.com/pion/randutil v0.1.0 h1:CFG1UdESneORglEsnimhUjf33Rwjubwj6xfiOXBa3mA=
github.com/pion/randutil v0.1.0/go.mod h1:XcJrSMMbbMRhASFVOlj/5hQial/Y8oH/HVo7TBZq+j8=
github.com/pion/rtcp v1.2.3 h1:2wrhKnqgSz91Q5nzYTO07mQXztYPtxL8a0XOss4rJqA=
github.com/pion/rtcp v1.2.3/go.mod h1:zGhIv0RPRF0Z1Wiij22pUt5W/c9fevqSzT4jje/oK7I=
github.com/pion/rtp v1.6.0 h1:4Ssnl/T5W2LzxHj9ssYpGVEQh3YYhQFNVmSWO88MMwk=
github.com/pion/rtp v1.6.0/go.mod h1:QgfogHsMBVE/RFNno467U/KBqfUywEH+HK+0rtnwsdI=
github.com/pion/sctp v1.7.10 h1:o3p3/hZB5Cx12RMGyWmItevJtZ6o2cpuxaw6GOS4x+8=
github.com/pion/sctp v1.7.10/go.mod h1:EhpTUQu1/lcK3xI+eriS6/96fWetHGCvBi9MSsnaBN0=
github.com/pion/sdp/v2 v2.4.0 h1:luUtaETR5x2KNNpvEMv/r4Y+/kzImzbz4Lm1z8eQNQI=
github.com/pion/sdp/v2 v2.4.0/go.mod h1:L2LxrOpSTJbAns244vfPChbciR/ReU1KWfG04OpkR7E=
github.com/pion/srtp v1.5.1 h1:9Q3jAfslYZBt+C69SI/ZcONJh9049JUHZWYRRf5KEKw=
github.com/pion/srtp v1.5.1/go.mod h1:B+QgX5xPeQTNc1CJStJPHzOlHK66ViMDWTT0HZTCkcA=
github.com/pion/stun v0.3.5 h1:uLUCBCkQby4S1cf6CGuR9QrVOKcvUwFeemaC865QHDg=
github.com/pion/stun v0.3.5/go.mod h1:gDMim+47EeEtfWogA37n6qXZS88L5V6LqFcf+DZA2UA=
github.com/pion/rtcp v1.2.1 h1:S3yG4KpYAiSmBVqKAfgRa5JdwBNj4zK3RLUa8JYdhak=
github.com/pion/rtcp v1.2.1/go.mod h1:a5dj2d6BKIKHl43EnAOIrCczcjESrtPuMgfmL6/K6QM=
github.com/pion/rtp v1.5.4 h1:PuNg6xqV3brIUihatcKZj1YDUs+M45L0ZbrZWYtkDxY=
github.com/pion/rtp v1.5.4/go.mod h1:bg60AL5GotNOlYZsqycbhDtEV3TkfbpXG0KBiUq29Mg=
github.com/pion/sctp v1.7.6 h1:8qZTdJtbKfAns/Hv5L0PAj8FyXcsKhMH1pKUCGisQg4=
github.com/pion/sctp v1.7.6/go.mod h1:ichkYQ5tlgCQwEwvgfdcAolqx1nHbYCxo4D7zK/K0X8=
github.com/pion/sdp/v2 v2.3.7 h1:WUZHI3pfiYCaE8UGUYcabk863LCK+Bq3AklV5O0oInQ=
github.com/pion/sdp/v2 v2.3.7/go.mod h1:+ZZf35r1+zbaWYiZLfPutWfx58DAWcGb2QsS3D/s9M8=
github.com/pion/srtp v1.3.3 h1:8bjs9YaSNvSrbH0OfKxzPX+PTrCyAC2LoT9Qesugi+U=
github.com/pion/srtp v1.3.3/go.mod h1:jNe0jmIOqksuurR9S/7yoKDalfPeluUFrNPCBqI4FOI=
github.com/pion/stun v0.3.3 h1:brYuPl9bN9w/VM7OdNzRSLoqsnwlyNvD9MVeJrHjDQw=
github.com/pion/stun v0.3.3/go.mod h1:xrCld6XM+6GWDZdvjPlLMsTU21rNxnO6UO8XsAvHr/M=
github.com/pion/transport v0.6.0/go.mod h1:iWZ07doqOosSLMhZ+FXUTq+TamDoXSllxpbGcfkCmbE=
github.com/pion/transport v0.8.10 h1:lTiobMEw2PG6BH/mgIVqTV2mBp/mPT+IJLaN8ZxgdHk=
github.com/pion/transport v0.8.10/go.mod h1:tBmha/UCjpum5hqTWhfAEs3CO4/tHSg0MYRhSzR+CZ8=
github.com/pion/transport v0.10.0 h1:9M12BSneJm6ggGhJyWpDveFOstJsTiQjkLf4M44rm80=
github.com/pion/transport v0.10.0/go.mod h1:BnHnUipd0rZQyTVB2SBGojFHT9CBt5C5TcsJSQGkvSE=
github.com/pion/transport v0.10.1 h1:2W+yJT+0mOQ160ThZYUx5Zp2skzshiNgxrNE9GUfhJM=
github.com/pion/transport v0.10.1/go.mod h1:PBis1stIILMiis0PewDw91WJeLJkyIMcEk+DwKOzf4A=
github.com/pion/turn/v2 v2.0.4 h1:oDguhEv2L/4rxwbL9clGLgtzQPjtuZwCdoM7Te8vQVk=
github.com/pion/turn/v2 v2.0.4/go.mod h1:1812p4DcGVbYVBTiraUmP50XoKye++AMkbfp+N27mog=
github.com/pion/udp v0.1.0 h1:uGxQsNyrqG3GLINv36Ff60covYmfrLoxzwnCsIYspXI=
github.com/pion/udp v0.1.0/go.mod h1:BPELIjbwE9PRbd/zxI/KYBnbo7B6+oA6YuEaNE8lths=
github.com/pion/webrtc/v2 v2.2.26 h1:01hWE26pL3LgqfxvQ1fr6O4ZtyRFFJmQEZK39pHWfFc=
github.com/pion/webrtc/v2 v2.2.26/go.mod h1:XMZbZRNHyPDe1gzTIHFcQu02283YO45CbiwFgKvXnmc=
github.com/pion/turn/v2 v2.0.3 h1:SJUUIbcPoehlyZgMyIUbBBDhI03sBx32x3JuSIBKBWA=
github.com/pion/turn/v2 v2.0.3/go.mod h1:kl1hmT3NxcLynpXVnwJgObL8C9NaCyPTeqI2DcCpSZs=
github.com/pion/webrtc/v2 v2.2.14 h1:bRjnXTqMDJ3VERPF45z439Sv6QfDfjdYvdQk1QcIx8M=
github.com/pion/webrtc/v2 v2.2.14/go.mod h1:G+8lShCMbHhjpMF1ZJBkyuvrxXrvW4bxs3nOt+mJ2UI=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
@@ -81,40 +94,45 @@ github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdh
github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
golang.org/x/crypto v0.0.0-20190228161510-8dd112bcdc25/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200602180216-279210d13fed/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899 h1:DZhuSZLsGlFL4CmhA8BcRA0mnthyA/nZ00AqCUo7vHg=
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5 h1:QelT11PB4FXiDEXucrfNckHoFxwt8USGY1ajP1ZF5lM=
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59 h1:3zb4D3T4G8jdExgVU/95+vQXfpEPiMdCaZgmGVxjNHM=
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/exp v0.0.0-20180710024300-14dda7b62fcd h1:nLIcFw7GiqKXUS7HiChg6OAYWgASB2H97dZKd1GhDSs=
golang.org/x/exp v0.0.0-20180710024300-14dda7b62fcd/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81 h1:00VmoueYNlNz/aHIilyyQz/MHSqGoWJzpFv/HW8xpzI=
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
golang.org/x/image v0.0.0-20200430140353-33d19683fad8 h1:6WW6V3x1P/jokJBpRQYUJnMHRP6isStQwCozxnU7XQw=
golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/mobile v0.0.0-20180806140643-507816974b79 h1:t2JRgCWkY7Qaa1J2jal+wqC9OjbyHCHwIA9rVlRUSMo=
golang.org/x/mobile v0.0.0-20180806140643-507816974b79/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20191126235420-ef20fe5d7933/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200707034311-ab3426394381 h1:VXak5I6aEWmAXeQjA+QSZzlgNrpq9mjcfDemuexIKsU=
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e h1:3G+cUijn7XD+S4eJFddp53Pv7+slrESplyjG25HgL+k=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5 h1:WQ8q63x+f/zpC8Ac1s9wLElVoHhm32p6tudrU72n1QA=
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181228144115-9a3f9b0469bb/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190228124157-a34e9553db1e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200724161237-0e2f3a69832c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200501145240-bc7a7d42d5c3 h1:5B6i6EAiSYyejWfvc5Rc9BbI3rzIsrrXfAQBWnYfn+w=
golang.org/x/sys v0.0.0-20200501145240-bc7a7d42d5c3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0/go.mod h1:OdE7CF6DbADk7lN8LIKRzRJTTZXIjtWgA5THM5lhBAw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@@ -125,5 +143,3 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWD
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -7,7 +7,7 @@ type MediaDeviceType int
// MediaDeviceType definitions.
const (
VideoInput MediaDeviceType = iota + 1
VideoInput MediaDeviceType = iota
AudioInput
AudioOutput
)

View File

@@ -3,30 +3,98 @@ package mediadevices
import (
"fmt"
"math"
"strings"
"github.com/pion/mediadevices/pkg/driver"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
var errNotFound = fmt.Errorf("failed to find the best driver that fits the constraints")
// MediaDevices is an interface that's defined on https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices
type MediaDevices interface {
GetDisplayMedia(constraints MediaStreamConstraints) (MediaStream, error)
GetUserMedia(constraints MediaStreamConstraints) (MediaStream, error)
EnumerateDevices() []MediaDeviceInfo
}
// NewMediaDevices creates MediaDevices interface that provides access to connected media input devices
// like cameras and microphones, as well as screen sharing.
// In essence, it lets you obtain access to any hardware source of media data.
func NewMediaDevices(pc *webrtc.PeerConnection, opts ...MediaDevicesOption) MediaDevices {
codecs := make(map[webrtc.RTPCodecType][]*webrtc.RTPCodec)
for _, kind := range []webrtc.RTPCodecType{
webrtc.RTPCodecTypeAudio,
webrtc.RTPCodecTypeVideo,
} {
codecs[kind] = pc.GetRegisteredRTPCodecs(kind)
}
return NewMediaDevicesFromCodecs(codecs, opts...)
}
// NewMediaDevicesFromCodecs creates MediaDevices interface from lists of the available codecs
// that provides access to connected media input devices like cameras and microphones,
// as well as screen sharing.
// In essence, it lets you obtain access to any hardware source of media data.
func NewMediaDevicesFromCodecs(codecs map[webrtc.RTPCodecType][]*webrtc.RTPCodec, opts ...MediaDevicesOption) MediaDevices {
mdo := MediaDevicesOptions{
codecs: codecs,
trackGenerator: defaultTrackGenerator,
}
for _, o := range opts {
o(&mdo)
}
return &mediaDevices{
MediaDevicesOptions: mdo,
}
}
// TrackGenerator is a function to create new track.
type TrackGenerator func(payloadType uint8, ssrc uint32, id, label string, codec *webrtc.RTPCodec) (LocalTrack, error)
var defaultTrackGenerator = TrackGenerator(func(pt uint8, ssrc uint32, id, label string, codec *webrtc.RTPCodec) (LocalTrack, error) {
return webrtc.NewTrack(pt, ssrc, id, label, codec)
})
type mediaDevices struct {
MediaDevicesOptions
}
// MediaDevicesOptions stores parameters used by MediaDevices.
type MediaDevicesOptions struct {
codecs map[webrtc.RTPCodecType][]*webrtc.RTPCodec
trackGenerator TrackGenerator
}
// MediaDevicesOption is a type of MediaDevices functional option.
type MediaDevicesOption func(*MediaDevicesOptions)
// WithTrackGenerator specifies a TrackGenerator to use customized track.
func WithTrackGenerator(gen TrackGenerator) MediaDevicesOption {
return func(o *MediaDevicesOptions) {
o.trackGenerator = gen
}
}
// GetDisplayMedia prompts the user to select and grant permission to capture the contents
// of a display or portion thereof (such as a window) as a MediaStream.
// Reference: https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getDisplayMedia
func GetDisplayMedia(constraints MediaStreamConstraints) (MediaStream, error) {
trackers := make([]Track, 0)
func (m *mediaDevices) GetDisplayMedia(constraints MediaStreamConstraints) (MediaStream, error) {
trackers := make([]Tracker, 0)
cleanTrackers := func() {
for _, t := range trackers {
t.Close()
t.Stop()
}
}
var videoConstraints MediaTrackConstraints
if constraints.Video != nil {
constraints.Video(&videoConstraints)
tracker, err := selectScreen(videoConstraints, constraints.Codec)
}
if videoConstraints.Enabled {
tracker, err := m.selectScreen(videoConstraints)
if err != nil {
cleanTrackers()
return nil, err
@@ -47,20 +115,27 @@ func GetDisplayMedia(constraints MediaStreamConstraints) (MediaStream, error) {
// GetUserMedia prompts the user for permission to use a media input which produces a MediaStream
// with tracks containing the requested types of media.
// Reference: https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
func GetUserMedia(constraints MediaStreamConstraints) (MediaStream, error) {
func (m *mediaDevices) GetUserMedia(constraints MediaStreamConstraints) (MediaStream, error) {
// TODO: It should return media stream based on constraints
trackers := make([]Track, 0)
trackers := make([]Tracker, 0)
cleanTrackers := func() {
for _, t := range trackers {
t.Close()
t.Stop()
}
}
var videoConstraints, audioConstraints MediaTrackConstraints
if constraints.Video != nil {
constraints.Video(&videoConstraints)
tracker, err := selectVideo(videoConstraints, constraints.Codec)
}
if constraints.Audio != nil {
constraints.Audio(&audioConstraints)
}
if videoConstraints.Enabled {
tracker, err := m.selectVideo(videoConstraints)
if err != nil {
cleanTrackers()
return nil, err
@@ -69,9 +144,8 @@ func GetUserMedia(constraints MediaStreamConstraints) (MediaStream, error) {
trackers = append(trackers, tracker)
}
if constraints.Audio != nil {
constraints.Audio(&audioConstraints)
tracker, err := selectAudio(audioConstraints, constraints.Codec)
if audioConstraints.Enabled {
tracker, err := m.selectAudio(audioConstraints)
if err != nil {
cleanTrackers()
return nil, err
@@ -140,32 +214,15 @@ func selectBestDriver(filter driver.FilterFn, constraints MediaTrackConstraints)
}
if bestDriver == nil {
var foundProperties []string
for _, props := range driverProperties {
for _, p := range props {
foundProperties = append(foundProperties, fmt.Sprint(&p))
}
}
err := fmt.Errorf(`%w:
============ Found Properties ============
%s
=============== Constraints ==============
%s
`, errNotFound, strings.Join(foundProperties, "\n\n"), &constraints)
return nil, MediaTrackConstraints{}, err
return nil, MediaTrackConstraints{}, errNotFound
}
constraints.selectedMedia = prop.Media{}
constraints.selectedMedia.MergeConstraints(constraints.MediaConstraints)
constraints.selectedMedia.Merge(bestProp)
constraints.selectedMedia = bestProp
constraints.selectedMedia.Merge(constraints.MediaConstraints)
return bestDriver, constraints, nil
}
func selectAudio(constraints MediaTrackConstraints, selector *CodecSelector) (Track, error) {
func (m *mediaDevices) selectAudio(constraints MediaTrackConstraints) (Tracker, error) {
typeFilter := driver.FilterAudioRecorder()
d, c, err := selectBestDriver(typeFilter, constraints)
@@ -173,9 +230,9 @@ func selectAudio(constraints MediaTrackConstraints, selector *CodecSelector) (Tr
return nil, err
}
return newTrackFromDriver(d, c, selector)
return newTrack(&m.MediaDevicesOptions, d, c)
}
func selectVideo(constraints MediaTrackConstraints, selector *CodecSelector) (Track, error) {
func (m *mediaDevices) selectVideo(constraints MediaTrackConstraints) (Tracker, error) {
typeFilter := driver.FilterVideoRecorder()
notScreenFilter := driver.FilterNot(driver.FilterDeviceType(driver.Screen))
filter := driver.FilterAnd(typeFilter, notScreenFilter)
@@ -185,10 +242,10 @@ func selectVideo(constraints MediaTrackConstraints, selector *CodecSelector) (Tr
return nil, err
}
return newTrackFromDriver(d, c, selector)
return newTrack(&m.MediaDevicesOptions, d, c)
}
func selectScreen(constraints MediaTrackConstraints, selector *CodecSelector) (Track, error) {
func (m *mediaDevices) selectScreen(constraints MediaTrackConstraints) (Tracker, error) {
typeFilter := driver.FilterVideoRecorder()
screenFilter := driver.FilterDeviceType(driver.Screen)
filter := driver.FilterAnd(typeFilter, screenFilter)
@@ -198,10 +255,10 @@ func selectScreen(constraints MediaTrackConstraints, selector *CodecSelector) (T
return nil, err
}
return newTrackFromDriver(d, c, selector)
return newTrack(&m.MediaDevicesOptions, d, c)
}
func EnumerateDevices() []MediaDeviceInfo {
func (m *mediaDevices) EnumerateDevices() []MediaDeviceInfo {
drivers := driver.GetManager().Query(
driver.FilterFn(func(driver.Driver) bool { return true }))
info := make([]MediaDeviceInfo, 0, len(drivers))

View File

@@ -1,42 +1,90 @@
package mediadevices
import (
"errors"
"io"
"testing"
"time"
"github.com/pion/mediadevices/pkg/driver"
"github.com/pion/webrtc/v2"
"github.com/pion/webrtc/v2/pkg/media"
"github.com/pion/mediadevices/pkg/codec"
_ "github.com/pion/mediadevices/pkg/driver/audiotest"
_ "github.com/pion/mediadevices/pkg/driver/videotest"
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
func TestGetUserMedia(t *testing.T) {
videoParams := mockParams{
BaseParams: codec.BaseParams{
BitRate: 100000,
},
name: "MockVideo",
}
audioParams := mockParams{
BaseParams: codec.BaseParams{
BitRate: 32000,
},
name: "MockAudio",
}
md := NewMediaDevicesFromCodecs(
map[webrtc.RTPCodecType][]*webrtc.RTPCodec{
webrtc.RTPCodecTypeVideo: []*webrtc.RTPCodec{
&webrtc.RTPCodec{Type: webrtc.RTPCodecTypeVideo, Name: "MockVideo", PayloadType: 1},
},
webrtc.RTPCodecTypeAudio: []*webrtc.RTPCodec{
&webrtc.RTPCodec{Type: webrtc.RTPCodecTypeAudio, Name: "MockAudio", PayloadType: 2},
},
},
WithTrackGenerator(
func(_ uint8, _ uint32, id, _ string, codec *webrtc.RTPCodec) (
LocalTrack, error,
) {
return newMockTrack(codec, id), nil
},
),
)
constraints := MediaStreamConstraints{
Video: func(c *MediaTrackConstraints) {
c.Enabled = true
c.Width = prop.Int(640)
c.Height = prop.Int(480)
params := videoParams
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&params}
},
Audio: func(c *MediaTrackConstraints) {
c.Enabled = true
params := audioParams
c.AudioEncoderBuilders = []codec.AudioEncoderBuilder{&params}
},
}
constraintsWrong := MediaStreamConstraints{
Video: func(c *MediaTrackConstraints) {
c.Width = prop.IntExact(10000)
c.Enabled = true
c.Width = prop.Int(640)
c.Height = prop.Int(480)
params := videoParams
params.BitRate = 0
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&params}
},
Audio: func(c *MediaTrackConstraints) {
c.Enabled = true
params := audioParams
c.AudioEncoderBuilders = []codec.AudioEncoderBuilder{&params}
},
}
// GetUserMedia with broken parameters
ms, err := GetUserMedia(constraintsWrong)
ms, err := md.GetUserMedia(constraintsWrong)
if err == nil {
t.Fatal("Expected error, but got nil")
}
// GetUserMedia with correct parameters
ms, err = GetUserMedia(constraints)
ms, err = md.GetUserMedia(constraints)
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
@@ -54,11 +102,11 @@ func TestGetUserMedia(t *testing.T) {
time.Sleep(50 * time.Millisecond)
for _, track := range tracks {
track.Close()
track.Stop()
}
// Stop and retry GetUserMedia
ms, err = GetUserMedia(constraints)
ms, err = md.GetUserMedia(constraints)
if err != nil {
t.Fatalf("Failed to GetUserMedia after the previsous tracks stopped: %v", err)
}
@@ -74,60 +122,98 @@ func TestGetUserMedia(t *testing.T) {
})
}
time.Sleep(50 * time.Millisecond)
for _, track := range tracks {
track.Close()
}
type mockTrack struct {
codec *webrtc.RTPCodec
id string
}
func newMockTrack(codec *webrtc.RTPCodec, id string) *mockTrack {
return &mockTrack{
codec: codec,
id: id,
}
}
func TestSelectBestDriverConstraintsResultIsSetProperly(t *testing.T) {
filterFn := driver.FilterVideoRecorder()
drivers := driver.GetManager().Query(filterFn)
if len(drivers) == 0 {
t.Fatal("expect to get at least 1 driver")
}
driver := drivers[0]
err := driver.Open()
if err != nil {
t.Fatal("expect to open driver successfully")
}
defer driver.Close()
if len(driver.Properties()) == 0 {
t.Fatal("expect to get at least 1 property")
}
expectedProp := driver.Properties()[0]
// Since this is a continuous value, bestConstraints should be set with the value that user specified
expectedProp.FrameRate = 30.0
wantConstraints := MediaTrackConstraints{
MediaConstraints: prop.MediaConstraints{
VideoConstraints: prop.VideoConstraints{
// By reducing the width from the driver by a tiny amount, this property should be chosen.
// At the same time, we'll be able to find out if the return constraints will be properly set
// to the best constraints.
Width: prop.Int(expectedProp.Width - 1),
Height: prop.Int(expectedProp.Width),
FrameFormat: prop.FrameFormat(expectedProp.FrameFormat),
FrameRate: prop.Float(30.0),
},
},
}
bestDriver, bestConstraints, err := selectBestDriver(filterFn, wantConstraints)
if err != nil {
t.Fatal(err)
}
if driver != bestDriver {
t.Fatal("best driver is not expected")
}
s := bestConstraints.selectedMedia
if s.Width != expectedProp.Width ||
s.Height != expectedProp.Height ||
s.FrameFormat != expectedProp.FrameFormat ||
s.FrameRate != expectedProp.FrameRate {
t.Fatalf("failed to return best constraints\nexpected:\n%v\n\ngot:\n%v", expectedProp, bestConstraints.selectedMedia)
}
func (t *mockTrack) WriteSample(s media.Sample) error {
return nil
}
func (t *mockTrack) Codec() *webrtc.RTPCodec {
return t.codec
}
func (t *mockTrack) ID() string {
return t.id
}
func (t *mockTrack) Kind() webrtc.RTPCodecType {
return t.codec.Type
}
type mockParams struct {
codec.BaseParams
name string
}
func (params *mockParams) Name() string {
return params.name
}
func (params *mockParams) BuildVideoEncoder(r video.Reader, p prop.Media) (codec.ReadCloser, error) {
if params.BitRate == 0 {
// This is a dummy error to test the failure condition.
return nil, errors.New("wrong codec parameter")
}
return &mockVideoCodec{
r: r,
closed: make(chan struct{}),
}, nil
}
func (params *mockParams) BuildAudioEncoder(r audio.Reader, p prop.Media) (codec.ReadCloser, error) {
return &mockAudioCodec{
r: r,
closed: make(chan struct{}),
}, nil
}
type mockCodec struct{}
func (e *mockCodec) SetBitRate(b int) error {
return nil
}
func (e *mockCodec) ForceKeyFrame() error {
return nil
}
type mockVideoCodec struct {
mockCodec
r video.Reader
closed chan struct{}
}
func (m *mockVideoCodec) Read(b []byte) (int, error) {
if _, err := m.r.Read(); err != nil {
return 0, err
}
return len(b), nil
}
func (m *mockVideoCodec) Close() error { return nil }
type mockAudioCodec struct {
mockCodec
r audio.Reader
closed chan struct{}
}
func (m *mockAudioCodec) Read(b []byte) (int, error) {
if _, err := m.r.Read(); err != nil {
return 0, err
}
return len(b), nil
}
func (m *mockAudioCodec) Close() error { return nil }

View File

@@ -2,85 +2,89 @@ package mediadevices
import (
"sync"
"github.com/pion/webrtc/v2"
)
// MediaStream is an interface that represents a collection of existing tracks.
type MediaStream interface {
// GetAudioTracks implements https://w3c.github.io/mediacapture-main/#dom-mediastream-getaudiotracks
GetAudioTracks() []Track
GetAudioTracks() []Tracker
// GetVideoTracks implements https://w3c.github.io/mediacapture-main/#dom-mediastream-getvideotracks
GetVideoTracks() []Track
GetVideoTracks() []Tracker
// GetTracks implements https://w3c.github.io/mediacapture-main/#dom-mediastream-gettracks
GetTracks() []Track
GetTracks() []Tracker
// AddTrack implements https://w3c.github.io/mediacapture-main/#dom-mediastream-addtrack
AddTrack(t Track)
AddTrack(t Tracker)
// RemoveTrack implements https://w3c.github.io/mediacapture-main/#dom-mediastream-removetrack
RemoveTrack(t Track)
RemoveTrack(t Tracker)
}
type mediaStream struct {
tracks map[Track]struct{}
l sync.RWMutex
trackers map[string]Tracker
l sync.RWMutex
}
const trackTypeDefault MediaDeviceType = 0
const rtpCodecTypeDefault webrtc.RTPCodecType = 0
// NewMediaStream creates a MediaStream interface that's defined in
// https://w3c.github.io/mediacapture-main/#dom-mediastream
func NewMediaStream(tracks ...Track) (MediaStream, error) {
m := mediaStream{tracks: make(map[Track]struct{})}
func NewMediaStream(trackers ...Tracker) (MediaStream, error) {
m := mediaStream{trackers: make(map[string]Tracker)}
for _, track := range tracks {
if _, ok := m.tracks[track]; !ok {
m.tracks[track] = struct{}{}
for _, tracker := range trackers {
id := tracker.LocalTrack().ID()
if _, ok := m.trackers[id]; !ok {
m.trackers[id] = tracker
}
}
return &m, nil
}
func (m *mediaStream) GetAudioTracks() []Track {
return m.queryTracks(AudioInput)
func (m *mediaStream) GetAudioTracks() []Tracker {
return m.queryTracks(webrtc.RTPCodecTypeAudio)
}
func (m *mediaStream) GetVideoTracks() []Track {
return m.queryTracks(VideoInput)
func (m *mediaStream) GetVideoTracks() []Tracker {
return m.queryTracks(webrtc.RTPCodecTypeVideo)
}
func (m *mediaStream) GetTracks() []Track {
return m.queryTracks(trackTypeDefault)
func (m *mediaStream) GetTracks() []Tracker {
return m.queryTracks(rtpCodecTypeDefault)
}
// queryTracks returns all tracks that are the same kind as t.
// If t is 0, which is the default, queryTracks will return all the tracks.
func (m *mediaStream) queryTracks(t MediaDeviceType) []Track {
func (m *mediaStream) queryTracks(t webrtc.RTPCodecType) []Tracker {
m.l.RLock()
defer m.l.RUnlock()
result := make([]Track, 0)
for track := range m.tracks {
if track.Kind() == t || t == trackTypeDefault {
result = append(result, track)
result := make([]Tracker, 0)
for _, tracker := range m.trackers {
if tracker.LocalTrack().Kind() == t || t == rtpCodecTypeDefault {
result = append(result, tracker)
}
}
return result
}
func (m *mediaStream) AddTrack(t Track) {
func (m *mediaStream) AddTrack(t Tracker) {
m.l.Lock()
defer m.l.Unlock()
if _, ok := m.tracks[t]; ok {
id := t.LocalTrack().ID()
if _, ok := m.trackers[id]; ok {
return
}
m.tracks[t] = struct{}{}
m.trackers[id] = t
}
func (m *mediaStream) RemoveTrack(t Track) {
func (m *mediaStream) RemoveTrack(t Tracker) {
m.l.Lock()
defer m.l.Unlock()
delete(m.tracks, t)
delete(m.trackers, t.LocalTrack().ID())
}

View File

@@ -1,88 +0,0 @@
package mediadevices
import (
"testing"
"github.com/pion/webrtc/v2"
)
type mockMediaStreamTrack struct {
kind MediaDeviceType
}
func (track *mockMediaStreamTrack) ID() string {
return ""
}
func (track *mockMediaStreamTrack) Close() error {
return nil
}
func (track *mockMediaStreamTrack) Kind() MediaDeviceType {
return track.kind
}
func (track *mockMediaStreamTrack) OnEnded(handler func(error)) {
}
func (track *mockMediaStreamTrack) Bind(pc *webrtc.PeerConnection) (*webrtc.Track, error) {
return nil, nil
}
func (track *mockMediaStreamTrack) Unbind(pc *webrtc.PeerConnection) error {
return nil
}
func TestMediaStreamFilters(t *testing.T) {
audioTracks := []Track{
&mockMediaStreamTrack{AudioInput},
&mockMediaStreamTrack{AudioInput},
&mockMediaStreamTrack{AudioInput},
&mockMediaStreamTrack{AudioInput},
&mockMediaStreamTrack{AudioInput},
}
videoTracks := []Track{
&mockMediaStreamTrack{VideoInput},
&mockMediaStreamTrack{VideoInput},
&mockMediaStreamTrack{VideoInput},
}
tracks := append(audioTracks, videoTracks...)
stream, err := NewMediaStream(tracks...)
if err != nil {
t.Fatal(err)
}
expect := func(t *testing.T, actual, expected []Track) {
if len(actual) != len(expected) {
t.Fatalf("%s: Expected to get %d trackers, but got %d trackers", t.Name(), len(expected), len(actual))
}
for _, a := range actual {
found := false
for _, e := range expected {
if e == a {
found = true
break
}
}
if !found {
t.Fatalf("%s: Expected to find %p in the query results", t.Name(), a)
}
}
}
t.Run("GetAudioTracks", func(t *testing.T) {
expect(t, stream.GetAudioTracks(), audioTracks)
})
t.Run("GetVideoTracks", func(t *testing.T) {
expect(t, stream.GetVideoTracks(), videoTracks)
})
t.Run("GetTracks", func(t *testing.T) {
expect(t, stream.GetTracks(), tracks)
})
}

View File

@@ -1,18 +1,40 @@
package mediadevices
import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
type MediaStreamConstraints struct {
Audio MediaOption
Video MediaOption
Codec *CodecSelector
}
// MediaTrackConstraints represents https://w3c.github.io/mediacapture-main/#dom-mediatrackconstraints
type MediaTrackConstraints struct {
prop.MediaConstraints
Enabled bool
// VideoEncoderBuilders are codec builders that are used for encoding the video
// and later being used for sending the appropriate RTP payload type.
//
// If one encoder builder fails to build the codec, the next builder will be used,
// repeating until a codec builds. If no builders build successfully, an error is returned.
VideoEncoderBuilders []codec.VideoEncoderBuilder
// AudioEncoderBuilders are codec builders that are used for encoding the audio
// and later being used for sending the appropriate RTP payload type.
//
// If one encoder builder fails to build the codec, the next builder will be used,
// repeating until a codec builds. If no builders build successfully, an error is returned.
AudioEncoderBuilders []codec.AudioEncoderBuilder
// VideoTransform will be used to transform the video that's coming from the driver.
// So, basically it'll look like following: driver -> VideoTransform -> codec
VideoTransform video.TransformFunc
// AudioTransform will be used to transform the audio that's coming from the driver.
// So, basically it'll look like following: driver -> AudioTransform -> code
AudioTransform audio.TransformFunc
selectedMedia prop.Media
}

35
meta.go
View File

@@ -1,35 +0,0 @@
package mediadevices
import (
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
// detectCurrentVideoProp is a small helper to get current video property
func detectCurrentVideoProp(broadcaster *video.Broadcaster) (prop.Media, error) {
var currentProp prop.Media
// Since broadcaster has a ring buffer internally, a new reader will either read the last
// buffered frame or a new frame from the source. This also implies that no frame will be lost
// in any case.
metaReader := broadcaster.NewReader(false)
metaReader = video.DetectChanges(0, func(p prop.Media) { currentProp = p })(metaReader)
_, _, err := metaReader.Read()
return currentProp, err
}
// detectCurrentAudioProp is a small helper to get current audio property
func detectCurrentAudioProp(broadcaster *audio.Broadcaster) (prop.Media, error) {
var currentProp prop.Media
// Since broadcaster has a ring buffer internally, a new reader will either read the last
// buffered frame or a new frame from the source. This also implies that no frame will be lost
// in any case.
metaReader := broadcaster.NewReader(false)
metaReader = audio.DetectChanges(0, func(p prop.Media) { currentProp = p })(metaReader)
_, _, err := metaReader.Read()
return currentProp, err
}

View File

@@ -1,98 +0,0 @@
package mediadevices
import (
"image"
"testing"
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/wave"
)
func TestDetectCurrentVideoProp(t *testing.T) {
resolution := image.Rect(0, 0, 4, 4)
first := image.NewRGBA(resolution)
first.Pix[0] = 1
second := image.NewRGBA(resolution)
second.Pix[0] = 2
isFirst := true
source := video.ReaderFunc(func() (image.Image, func(), error) {
if isFirst {
isFirst = true
return first, func() {}, nil
} else {
return second, func() {}, nil
}
})
broadcaster := video.NewBroadcaster(source, nil)
currentProp, err := detectCurrentVideoProp(broadcaster)
if err != nil {
t.Fatal(err)
}
if currentProp.Width != resolution.Dx() {
t.Fatalf("Expect the actual width to be %d, but got %d", currentProp.Width, resolution.Dx())
}
if currentProp.Height != resolution.Dy() {
t.Fatalf("Expect the actual height to be %d, but got %d", currentProp.Height, resolution.Dy())
}
reader := broadcaster.NewReader(false)
img, _, err := reader.Read()
if err != nil {
t.Fatal(err)
}
rgba := img.(*image.RGBA)
if rgba.Pix[0] != 1 {
t.Fatal("Expect the frame after reading the current prop is not the first frame")
}
}
func TestDetectCurrentAudioProp(t *testing.T) {
info := wave.ChunkInfo{
Len: 4,
Channels: 2,
SamplingRate: 48000,
}
first := wave.NewInt16Interleaved(info)
first.Data[0] = 1
second := wave.NewInt16Interleaved(info)
second.Data[0] = 2
isFirst := true
source := audio.ReaderFunc(func() (wave.Audio, func(), error) {
if isFirst {
isFirst = true
return first, func() {}, nil
} else {
return second, func() {}, nil
}
})
broadcaster := audio.NewBroadcaster(source, nil)
currentProp, err := detectCurrentAudioProp(broadcaster)
if err != nil {
t.Fatal(err)
}
if currentProp.ChannelCount != info.Channels {
t.Fatalf("Expect the actual channel count to be %d, but got %d", currentProp.ChannelCount, info.Channels)
}
reader := broadcaster.NewReader(false)
chunk, _, err := reader.Read()
if err != nil {
t.Fatal(err)
}
realChunk := chunk.(*wave.Int16Interleaved)
if realChunk.Data[0] != 1 {
t.Fatal("Expect the chunk after reading the current prop is not the first chunk")
}
}

View File

@@ -1,25 +0,0 @@
#User settings
xcuserdata/
## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
*.xcscmblueprint
*.xccheckout
## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
build/
DerivedData/
*.moved-aside
*.pbxuser
!default.pbxuser
*.mode1v3
!default.mode1v3
*.mode2v3
!default.mode2v3
*.perspectivev3
!default.perspectivev3
## Gcc Patch
/*.gcno
.DS_STORE
Build/

View File

@@ -1,294 +0,0 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 50;
objects = {
/* Begin PBXBuildFile section */
F0143CC12479F78E00EC29C9 /* AVFoundationBind.h in Headers */ = {isa = PBXBuildFile; fileRef = F0143CC02479F78E00EC29C9 /* AVFoundationBind.h */; };
F0143CC32479F78E00EC29C9 /* AVFoundationBind.m in Sources */ = {isa = PBXBuildFile; fileRef = F0143CC22479F78E00EC29C9 /* AVFoundationBind.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc"; }; };
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
F0143CBD2479F78E00EC29C9 /* libAVFoundationBind.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libAVFoundationBind.a; sourceTree = BUILT_PRODUCTS_DIR; };
F0143CC02479F78E00EC29C9 /* AVFoundationBind.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AVFoundationBind.h; sourceTree = "<group>"; };
F0143CC22479F78E00EC29C9 /* AVFoundationBind.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AVFoundationBind.m; sourceTree = "<group>"; };
F0FDDA0B247E15D900A3429D /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
F0143CBB2479F78E00EC29C9 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
F0143CB42479F78E00EC29C9 = {
isa = PBXGroup;
children = (
F0143CBF2479F78E00EC29C9 /* AVFoundationBind */,
F0143CBE2479F78E00EC29C9 /* Products */,
F0FDDA0A247E15D900A3429D /* Frameworks */,
);
sourceTree = "<group>";
};
F0143CBE2479F78E00EC29C9 /* Products */ = {
isa = PBXGroup;
children = (
F0143CBD2479F78E00EC29C9 /* libAVFoundationBind.a */,
);
name = Products;
sourceTree = "<group>";
};
F0143CBF2479F78E00EC29C9 /* AVFoundationBind */ = {
isa = PBXGroup;
children = (
F0143CC02479F78E00EC29C9 /* AVFoundationBind.h */,
F0143CC22479F78E00EC29C9 /* AVFoundationBind.m */,
);
path = AVFoundationBind;
sourceTree = "<group>";
};
F0FDDA0A247E15D900A3429D /* Frameworks */ = {
isa = PBXGroup;
children = (
F0FDDA0B247E15D900A3429D /* AVFoundation.framework */,
);
name = Frameworks;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXHeadersBuildPhase section */
F0143CB92479F78E00EC29C9 /* Headers */ = {
isa = PBXHeadersBuildPhase;
buildActionMask = 2147483647;
files = (
F0143CC12479F78E00EC29C9 /* AVFoundationBind.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXHeadersBuildPhase section */
/* Begin PBXNativeTarget section */
F0143CBC2479F78E00EC29C9 /* AVFoundationBind */ = {
isa = PBXNativeTarget;
buildConfigurationList = F0143CC62479F78E00EC29C9 /* Build configuration list for PBXNativeTarget "AVFoundationBind" */;
buildPhases = (
F0143CB92479F78E00EC29C9 /* Headers */,
F0143CBA2479F78E00EC29C9 /* Sources */,
F0143CBB2479F78E00EC29C9 /* Frameworks */,
);
buildRules = (
);
dependencies = (
);
name = AVFoundationBind;
productName = AVFoundationBind;
productReference = F0143CBD2479F78E00EC29C9 /* libAVFoundationBind.a */;
productType = "com.apple.product-type.library.static";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
F0143CB52479F78E00EC29C9 /* Project object */ = {
isa = PBXProject;
attributes = {
LastUpgradeCheck = 1150;
ORGANIZATIONNAME = "Herman, Lukas";
TargetAttributes = {
F0143CBC2479F78E00EC29C9 = {
CreatedOnToolsVersion = 11.5;
};
};
};
buildConfigurationList = F0143CB82479F78E00EC29C9 /* Build configuration list for PBXProject "AVFoundationBind" */;
compatibilityVersion = "Xcode 9.3";
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = F0143CB42479F78E00EC29C9;
productRefGroup = F0143CBE2479F78E00EC29C9 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
F0143CBC2479F78E00EC29C9 /* AVFoundationBind */,
);
};
/* End PBXProject section */
/* Begin PBXSourcesBuildPhase section */
F0143CBA2479F78E00EC29C9 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
F0143CC32479F78E00EC29C9 /* AVFoundationBind.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin XCBuildConfiguration section */
F0143CC42479F78E00EC29C9 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.15;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = macosx;
};
name = Debug;
};
F0143CC52479F78E00EC29C9 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
CLANG_CXX_LIBRARY = "libc++";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
GCC_C_LANGUAGE_STANDARD = gnu11;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
MACOSX_DEPLOYMENT_TARGET = 10.15;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = macosx;
};
name = Release;
};
F0143CC72479F78E00EC29C9 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
EXECUTABLE_PREFIX = lib;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
};
name = Debug;
};
F0143CC82479F78E00EC29C9 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
EXECUTABLE_PREFIX = lib;
PRODUCT_NAME = "$(TARGET_NAME)";
SKIP_INSTALL = YES;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
F0143CB82479F78E00EC29C9 /* Build configuration list for PBXProject "AVFoundationBind" */ = {
isa = XCConfigurationList;
buildConfigurations = (
F0143CC42479F78E00EC29C9 /* Debug */,
F0143CC52479F78E00EC29C9 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
F0143CC62479F78E00EC29C9 /* Build configuration list for PBXNativeTarget "AVFoundationBind" */ = {
isa = XCConfigurationList;
buildConfigurations = (
F0143CC72479F78E00EC29C9 /* Debug */,
F0143CC82479F78E00EC29C9 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
};
rootObject = F0143CB52479F78E00EC29C9 /* Project object */;
}

View File

@@ -1,7 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:AVFoundationBind.xcodeproj">
</FileRef>
</Workspace>

View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>

View File

@@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>PreviewsEnabled</key>
<false/>
</dict>
</plist>

View File

@@ -1,67 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<Scheme
LastUpgradeVersion = "1150"
version = "1.3">
<BuildAction
parallelizeBuildables = "YES"
buildImplicitDependencies = "YES">
<BuildActionEntries>
<BuildActionEntry
buildForTesting = "YES"
buildForRunning = "YES"
buildForProfiling = "YES"
buildForArchiving = "YES"
buildForAnalyzing = "YES">
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "F0143CBC2479F78E00EC29C9"
BuildableName = "libAVFoundationBind.a"
BlueprintName = "AVFoundationBind"
ReferencedContainer = "container:AVFoundationBind.xcodeproj">
</BuildableReference>
</BuildActionEntry>
</BuildActionEntries>
</BuildAction>
<TestAction
buildConfiguration = "Debug"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
shouldUseLaunchSchemeArgsEnv = "YES">
<Testables>
</Testables>
</TestAction>
<LaunchAction
buildConfiguration = "Release"
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
launchStyle = "0"
useCustomWorkingDirectory = "NO"
ignoresPersistentStateOnLaunch = "NO"
debugDocumentVersioning = "YES"
debugServiceExtension = "internal"
allowLocationSimulation = "YES">
</LaunchAction>
<ProfileAction
buildConfiguration = "Release"
shouldUseLaunchSchemeArgsEnv = "YES"
savedToolIdentifier = ""
useCustomWorkingDirectory = "NO"
debugDocumentVersioning = "YES">
<MacroExpansion>
<BuildableReference
BuildableIdentifier = "primary"
BlueprintIdentifier = "F0143CBC2479F78E00EC29C9"
BuildableName = "libAVFoundationBind.a"
BlueprintName = "AVFoundationBind"
ReferencedContainer = "container:AVFoundationBind.xcodeproj">
</BuildableReference>
</MacroExpansion>
</ProfileAction>
<AnalyzeAction
buildConfiguration = "Debug">
</AnalyzeAction>
<ArchiveAction
buildConfiguration = "Release"
revealArchiveInOrganizer = "YES">
</ArchiveAction>
</Scheme>

View File

@@ -1,78 +0,0 @@
// MIT License
//
// Copyright (c) 2019-2020 Pion
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
#pragma once
#include <stddef.h>
#define MAX_DEVICES 8
#define MAX_PROPERTIES 64
#define MAX_DEVICE_UID_CHARS 64
typedef const char* STATUS;
static STATUS STATUS_OK = (STATUS) NULL;
static STATUS STATUS_NULL_ARG = (STATUS) "One of the arguments was null";
static STATUS STATUS_DEVICE_INIT_FAILED = (STATUS) "Failed to init device";
static STATUS STATUS_UNSUPPORTED_FRAME_FORMAT = (STATUS) "Unsupported frame format";
static STATUS STATUS_UNSUPPORTED_MEDIA_TYPE = (STATUS) "Unsupported media type";
static STATUS STATUS_FAILED_TO_ACQUIRE_LOCK = (STATUS) "Failed to acquire a lock";
static STATUS STATUS_UNSUPPORTED_FORMAT = (STATUS) "Unsupported device format";
typedef enum AVBindMediaType {
AVBindMediaTypeVideo,
AVBindMediaTypeAudio,
} AVBindMediaType;
typedef enum AVBindFrameFormat {
AVBindFrameFormatI420,
AVBindFrameFormatNV21,
AVBindFrameFormatYUY2,
AVBindFrameFormatUYVY,
} AVBindFrameFormat;
typedef void (*AVBindDataCallback)(void *userData, void *buf, int len);
typedef struct AVBindMediaProperty {
// video property
int width, height;
AVBindFrameFormat frameFormat;
// audio property
} AVBindMediaProperty, *PAVBindMediaProperty;
typedef struct AVBindSession AVBindSession, *PAVBindSession;
typedef struct {
char uid[MAX_DEVICE_UID_CHARS + 1];
} AVBindDevice, *PAVBindDevice;
// AVBindDevices returns a list of AVBindDevices. The result array is pointing to a static
// memory. The caller is expected to not hold on to the address for a long time and make a copy.
// Everytime this function gets called, the array will be overwritten and the memory will be reused.
STATUS AVBindDevices(AVBindMediaType, PAVBindDevice*, int*);
STATUS AVBindSessionInit(AVBindDevice, PAVBindSession*);
STATUS AVBindSessionFree(PAVBindSession*);
STATUS AVBindSessionOpen(PAVBindSession, AVBindMediaProperty, AVBindDataCallback, void*);
STATUS AVBindSessionClose(PAVBindSession);
STATUS AVBindSessionProperties(PAVBindSession, PAVBindMediaProperty*, int*);

View File

@@ -1,350 +0,0 @@
// MIT License
//
// Copyright (c) 2019-2020 Pion
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// Naming Convention (let "name" as an actual variable name):
// - mName: "name" is a member of an Objective C object
// - pName: "name" is a C pointer
// - refName: "name" is an Objective C object reference
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "AVFoundationBind.h"
#include <string.h>
#define CHK(condition, status) \
do { \
if(!(condition)) { \
retStatus = status; \
goto cleanup; \
} \
} while(0)
#define CHK_STATUS(status) \
do { \
if(status != STATUS_OK) { \
retStatus = status; \
goto cleanup; \
} \
} while(0)
@interface VideoDataDelegate : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
@property (readonly) AVBindDataCallback mCallback;
@property (readonly) void *mPUserData;
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection;
@end
@implementation VideoDataDelegate
- (id) init: (AVBindDataCallback) callback
withUserData: (void*) pUserData {
self = [super init];
_mCallback = callback;
_mPUserData = pUserData;
return self;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
!CMSampleBufferIsValid(sampleBuffer) ||
!CMSampleBufferDataIsReady(sampleBuffer)) {
return;
}
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (imageBuffer == NULL) {
return;
}
imageBuffer = CVBufferRetain(imageBuffer);
CVReturn ret =
CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
if (ret != kCVReturnSuccess) {
return;
}
size_t heightY = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
size_t bytesPerRowY = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
size_t heightUV = CVPixelBufferGetHeightOfPlane(imageBuffer, 1);
size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
int len = (int)((heightY * bytesPerRowY) + (2 * heightUV * bytesPerRowUV));
void *buf = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
_mCallback(_mPUserData, buf, len);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CVBufferRelease(imageBuffer);
}
@end
@interface AudioDataDelegate : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate>
@property (readonly) AVBindDataCallback mCallback;
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection;
@end
@implementation AudioDataDelegate
- (id) init: (AVBindDataCallback) callback {
self = [super init];
_mCallback = callback;
return self;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
// TODO
}
@end
STATUS frameFormatToFourCC(AVBindFrameFormat format, FourCharCode *pFourCC) {
STATUS retStatus = STATUS_OK;
switch (format) {
case AVBindFrameFormatNV21:
*pFourCC = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
break;
case AVBindFrameFormatUYVY:
*pFourCC = kCVPixelFormatType_422YpCbCr8;
break;
// TODO: Add the rest of frame formats
default:
retStatus = STATUS_UNSUPPORTED_FRAME_FORMAT;
}
return retStatus;
}
STATUS frameFormatFromFourCC(FourCharCode fourCC, AVBindFrameFormat *pFormat) {
STATUS retStatus = STATUS_OK;
switch (fourCC) {
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
*pFormat = AVBindFrameFormatNV21;
break;
case kCVPixelFormatType_422YpCbCr8:
*pFormat = AVBindFrameFormatUYVY;
break;
// TODO: Add the rest of frame formats
default:
retStatus = STATUS_UNSUPPORTED_FRAME_FORMAT;
}
return retStatus;
}
STATUS AVBindDevices(AVBindMediaType mediaType, PAVBindDevice *ppDevices, int *pLen) {
static AVBindDevice devices[MAX_DEVICES];
STATUS retStatus = STATUS_OK;
NSAutoreleasePool *refPool = [[NSAutoreleasePool alloc] init];
CHK(mediaType == AVBindMediaTypeVideo || mediaType == AVBindMediaTypeAudio, STATUS_UNSUPPORTED_MEDIA_TYPE);
CHK(ppDevices != NULL && pLen != NULL, STATUS_NULL_ARG);
PAVBindDevice pDevice;
AVMediaType _mediaType = mediaType == AVBindMediaTypeVideo ? AVMediaTypeVideo : AVMediaTypeAudio;
NSArray *refAllTypes = @[
AVCaptureDeviceTypeBuiltInWideAngleCamera,
AVCaptureDeviceTypeBuiltInMicrophone,
AVCaptureDeviceTypeExternalUnknown
];
AVCaptureDeviceDiscoverySession *refSession = [AVCaptureDeviceDiscoverySession
discoverySessionWithDeviceTypes: refAllTypes
mediaType: _mediaType
position: AVCaptureDevicePositionUnspecified];
int i = 0;
for (AVCaptureDevice *refDevice in refSession.devices) {
if (i >= MAX_DEVICES) {
break;
}
pDevice = devices + i;
strncpy(pDevice->uid, refDevice.uniqueID.UTF8String, MAX_DEVICE_UID_CHARS);
pDevice->uid[MAX_DEVICE_UID_CHARS] = '\0';
i++;
}
*ppDevices = devices;
*pLen = i;
cleanup:
[refPool drain];
return retStatus;
}
struct AVBindSession {
AVBindDevice device;
AVCaptureSession *refCaptureSession;
AVBindMediaProperty properties[MAX_PROPERTIES];
};
STATUS AVBindSessionInit(AVBindDevice device, PAVBindSession *ppSessionResult) {
STATUS retStatus = STATUS_OK;
CHK(ppSessionResult != NULL, STATUS_NULL_ARG);
PAVBindSession pSession = malloc(sizeof(AVBindSession));
pSession->device = device;
pSession->refCaptureSession = NULL;
*ppSessionResult = pSession;
cleanup:
return retStatus;
}
STATUS AVBindSessionFree(PAVBindSession *ppSession) {
STATUS retStatus = STATUS_OK;
CHK(ppSession != NULL, STATUS_NULL_ARG);
PAVBindSession pSession = *ppSession;
if (pSession->refCaptureSession != NULL) {
[pSession->refCaptureSession release];
pSession->refCaptureSession = NULL;
}
free(pSession);
*ppSession = NULL;
cleanup:
return retStatus;
}
STATUS AVBindSessionOpen(PAVBindSession pSession,
AVBindMediaProperty property,
AVBindDataCallback dataCallback,
void *pUserData) {
STATUS retStatus = STATUS_OK;
NSAutoreleasePool *refPool = [[NSAutoreleasePool alloc] init];
CHK(pSession != NULL && dataCallback != NULL, STATUS_NULL_ARG);
AVCaptureDeviceInput *refInput;
NSError *refErr = NULL;
NSString *refUID = [NSString stringWithUTF8String: pSession->device.uid];
AVCaptureDevice *refDevice = [AVCaptureDevice deviceWithUniqueID: refUID];
refInput = [[AVCaptureDeviceInput alloc] initWithDevice: refDevice error: &refErr];
CHK(refErr == NULL, STATUS_DEVICE_INIT_FAILED);
AVCaptureSession *refCaptureSession = [[AVCaptureSession alloc] init];
refCaptureSession.sessionPreset = AVCaptureSessionPresetMedium;
[refCaptureSession addInput: refInput];
if ([refDevice hasMediaType: AVMediaTypeVideo]) {
VideoDataDelegate *pDelegate = [[VideoDataDelegate alloc]
init: dataCallback
withUserData: pUserData];
AVCaptureVideoDataOutput *pOutput = [[AVCaptureVideoDataOutput alloc] init];
FourCharCode fourCC;
CHK_STATUS(frameFormatToFourCC(property.frameFormat, &fourCC));
pOutput.videoSettings = @{
(id)kCVPixelBufferWidthKey: @(property.width),
(id)kCVPixelBufferHeightKey: @(property.height),
(id)kCVPixelBufferPixelFormatTypeKey: @(fourCC),
};
pOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue =
dispatch_queue_create("captureQueue", DISPATCH_QUEUE_SERIAL);
[pOutput setSampleBufferDelegate:pDelegate queue:queue];
[refCaptureSession addOutput: pOutput];
} else {
// TODO: implement audio pipeline
}
pSession->refCaptureSession = [refCaptureSession retain];
[refCaptureSession startRunning];
cleanup:
[refPool drain];
return retStatus;
}
STATUS AVBindSessionClose(PAVBindSession pSession) {
STATUS retStatus = STATUS_OK;
CHK(pSession != NULL, STATUS_NULL_ARG);
CHK(pSession->refCaptureSession != NULL, STATUS_OK);
[pSession->refCaptureSession stopRunning];
[pSession->refCaptureSession release];
pSession->refCaptureSession = NULL;
cleanup:
return retStatus;
}
STATUS AVBindSessionProperties(PAVBindSession pSession, PAVBindMediaProperty *ppProperties, int *pLen) {
STATUS retStatus = STATUS_OK;
NSAutoreleasePool *refPool = [[NSAutoreleasePool alloc] init];
CHK(pSession != NULL && ppProperties != NULL && pLen != NULL, STATUS_NULL_ARG);
NSString *refDeviceUID = [NSString stringWithUTF8String: pSession->device.uid];
AVCaptureDevice *refDevice = [AVCaptureDevice deviceWithUniqueID: refDeviceUID];
FourCharCode fourCC;
CMVideoFormatDescriptionRef videoFormat;
CMVideoDimensions videoDimensions;
memset(pSession->properties, 0, sizeof(pSession->properties));
PAVBindMediaProperty pProperty = pSession->properties;
int len = 0;
for (AVCaptureDeviceFormat *refFormat in refDevice.formats) {
// TODO: Probably gives a warn to the user
if (len >= MAX_PROPERTIES) {
break;
}
if ([refFormat.mediaType isEqual:AVMediaTypeVideo]) {
fourCC = CMFormatDescriptionGetMediaSubType(refFormat.formatDescription);
if (frameFormatFromFourCC(fourCC, &pProperty->frameFormat) != STATUS_OK) {
continue;
}
videoFormat = (CMVideoFormatDescriptionRef) refFormat.formatDescription;
videoDimensions = CMVideoFormatDescriptionGetDimensions(videoFormat);
pProperty->height = videoDimensions.height;
pProperty->width = videoDimensions.width;
} else {
// TODO: Get audio properties
}
pProperty++;
len++;
}
*ppProperties = pSession->properties;
*pLen = len;
cleanup:
[refPool drain];
return retStatus;
}

View File

@@ -1,56 +0,0 @@
package avfoundation
// extern void onData(void*, void*, int);
import "C"
import (
"sync"
"unsafe"
)
var mu sync.Mutex
var nextID handleID
type dataCb func(data []byte)
var handles = make(map[handleID]dataCb)
type handleID int
//export onData
func onData(userData unsafe.Pointer, buf unsafe.Pointer, length C.int) {
data := C.GoBytes(buf, length)
handleNum := (*C.int)(userData)
cb, ok := lookup(handleID(*handleNum))
if ok {
cb(data)
}
}
func register(fn dataCb) handleID {
mu.Lock()
defer mu.Unlock()
nextID++
for handles[nextID] != nil {
nextID++
}
handles[nextID] = fn
return nextID
}
func lookup(i handleID) (cb dataCb, ok bool) {
mu.Lock()
defer mu.Unlock()
cb, ok = handles[i]
return
}
func unregister(i handleID) {
mu.Lock()
defer mu.Unlock()
delete(handles, i)
}

View File

@@ -1,217 +0,0 @@
// Package avfoundation provides AVFoundation binding for Go
package avfoundation
// #cgo CFLAGS: -x objective-c
// #cgo LDFLAGS: -framework AVFoundation -framework Foundation -framework CoreMedia -framework CoreVideo
// #include "AVFoundationBind/AVFoundationBind.h"
// #include "AVFoundationBind/AVFoundationBind.m"
// extern void onData(void*, void*, int);
// void onDataBridge(void *userData, void *buf, int len) {
// onData(userData, buf, len);
// }
import "C"
import (
"fmt"
"io"
"unsafe"
"github.com/pion/mediadevices/pkg/frame"
"github.com/pion/mediadevices/pkg/prop"
)
type MediaType C.AVBindMediaType
const (
Video = MediaType(C.AVBindMediaTypeVideo)
Audio = MediaType(C.AVBindMediaTypeAudio)
)
// Device represents a metadata that later can be used to retrieve back the
// underlying device given by AVFoundation
type Device struct {
// UID is a unique identifier for a device
UID string
cDevice C.AVBindDevice
}
func frameFormatToAVBind(f frame.Format) (C.AVBindFrameFormat, bool) {
switch f {
case frame.FormatI420:
return C.AVBindFrameFormatI420, true
case frame.FormatNV21:
return C.AVBindFrameFormatNV21, true
case frame.FormatYUY2:
return C.AVBindFrameFormatYUY2, true
case frame.FormatUYVY:
return C.AVBindFrameFormatUYVY, true
default:
return 0, false
}
}
func frameFormatFromAVBind(f C.AVBindFrameFormat) (frame.Format, bool) {
switch f {
case C.AVBindFrameFormatI420:
return frame.FormatI420, true
case C.AVBindFrameFormatNV21:
return frame.FormatNV21, true
case C.AVBindFrameFormatYUY2:
return frame.FormatYUY2, true
case C.AVBindFrameFormatUYVY:
return frame.FormatUYVY, true
default:
return "", false
}
}
// Devices uses AVFoundation to query a list of devices based on the media type
func Devices(mediaType MediaType) ([]Device, error) {
var cDevicesPtr C.PAVBindDevice
var cDevicesLen C.int
status := C.AVBindDevices(C.AVBindMediaType(mediaType), &cDevicesPtr, &cDevicesLen)
if status != nil {
return nil, fmt.Errorf("%s", C.GoString(status))
}
// https://github.com/golang/go/wiki/cgo#turning-c-arrays-into-go-slices
cDevices := (*[1 << 28]C.AVBindDevice)(unsafe.Pointer(cDevicesPtr))[:cDevicesLen:cDevicesLen]
devices := make([]Device, cDevicesLen)
for i := range devices {
devices[i].UID = C.GoString(&cDevices[i].uid[0])
devices[i].cDevice = cDevices[i]
}
return devices, nil
}
// ReadCloser is a wrapper around the data callback from AVFoundation. The data received from the
// the underlying callback can be retrieved by calling Read.
type ReadCloser struct {
dataChan chan []byte
id handleID
onClose func()
}
func newReadCloser(onClose func()) *ReadCloser {
var rc ReadCloser
rc.dataChan = make(chan []byte, 1)
rc.onClose = onClose
rc.id = register(rc.dataCb)
return &rc
}
func (rc *ReadCloser) dataCb(data []byte) {
// TODO: add a policy for slow reader
rc.dataChan <- data
}
// Read reads raw data, the format is determined by the media type and property:
// - For video, each call will return a frame.
// - For audio, each call will return a chunk which its size configured by Latency
func (rc *ReadCloser) Read() ([]byte, func(), error) {
data, ok := <-rc.dataChan
if !ok {
return nil, func() {}, io.EOF
}
return data, func() {}, nil
}
// Close closes the capturing session, and no data will flow anymore
func (rc *ReadCloser) Close() {
if rc.onClose != nil {
rc.onClose()
}
close(rc.dataChan)
unregister(rc.id)
}
// Session represents a capturing session.
type Session struct {
device Device
cSession C.PAVBindSession
}
// NewSession creates a new capturing session
func NewSession(device Device) (*Session, error) {
var session Session
status := C.AVBindSessionInit(device.cDevice, &session.cSession)
if status != nil {
return nil, fmt.Errorf("%s", C.GoString(status))
}
session.device = device
return &session, nil
}
// Close stops capturing session and frees up resources
func (session *Session) Close() error {
if session.cSession == nil {
return nil
}
status := C.AVBindSessionFree(&session.cSession)
if status != nil {
return fmt.Errorf("%s", C.GoString(status))
}
return nil
}
// Open start capturing session. As soon as it returns successfully, the data will start
// flowing. The raw data can be retrieved by using ReadCloser's Read method.
func (session *Session) Open(property prop.Media) (*ReadCloser, error) {
frameFormat, ok := frameFormatToAVBind(property.FrameFormat)
if !ok {
return nil, fmt.Errorf("Unsupported frame format")
}
cProperty := C.AVBindMediaProperty{
width: C.int(property.Width),
height: C.int(property.Height),
frameFormat: frameFormat,
}
rc := newReadCloser(func() {
C.AVBindSessionClose(session.cSession)
})
status := C.AVBindSessionOpen(
session.cSession,
cProperty,
C.AVBindDataCallback(unsafe.Pointer(C.onDataBridge)),
unsafe.Pointer(&rc.id),
)
if status != nil {
return nil, fmt.Errorf("%s", C.GoString(status))
}
return rc, nil
}
// Properties queries a list of properties that device supports
func (session *Session) Properties() []prop.Media {
var cPropertiesPtr C.PAVBindMediaProperty
var cPropertiesLen C.int
status := C.AVBindSessionProperties(session.cSession, &cPropertiesPtr, &cPropertiesLen)
if status != nil {
return nil
}
// https://github.com/golang/go/wiki/cgo#turning-c-arrays-into-go-slices
cProperties := (*[1 << 28]C.AVBindMediaProperty)(unsafe.Pointer(cPropertiesPtr))[:cPropertiesLen:cPropertiesLen]
var properties []prop.Media
for _, cProperty := range cProperties {
frameFormat, ok := frameFormatFromAVBind(cProperty.frameFormat)
if ok {
properties = append(properties, prop.Media{
Video: prop.Video{
Width: int(cProperty.width),
Height: int(cProperty.height),
FrameFormat: frameFormat,
},
})
}
}
return properties
}

View File

@@ -1,45 +1,21 @@
package codec
import (
"io"
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
// RTPCodec wraps webrtc.RTPCodec. RTPCodec might extend webrtc.RTPCodec in the future.
type RTPCodec struct {
*webrtc.RTPCodec
}
// NewRTPH264Codec is a helper to create an H264 codec
func NewRTPH264Codec(clockrate uint32) *RTPCodec {
return &RTPCodec{webrtc.NewRTPH264Codec(webrtc.DefaultPayloadTypeH264, clockrate)}
}
// NewRTPVP8Codec is a helper to create an VP8 codec
func NewRTPVP8Codec(clockrate uint32) *RTPCodec {
return &RTPCodec{webrtc.NewRTPVP8Codec(webrtc.DefaultPayloadTypeVP8, clockrate)}
}
// NewRTPVP9Codec is a helper to create an VP9 codec
func NewRTPVP9Codec(clockrate uint32) *RTPCodec {
return &RTPCodec{webrtc.NewRTPVP9Codec(webrtc.DefaultPayloadTypeVP9, clockrate)}
}
// NewRTPOpusCodec is a helper to create an Opus codec
func NewRTPOpusCodec(clockrate uint32) *RTPCodec {
return &RTPCodec{webrtc.NewRTPOpusCodec(webrtc.DefaultPayloadTypeOpus, clockrate)}
}
// AudioEncoderBuilder is the interface that wraps basic operations that are
// necessary to build the audio encoder.
//
// This interface is for codec implementors to provide codec specific params,
// but still giving generality for the users.
type AudioEncoderBuilder interface {
// RTPCodec represents the codec metadata
RTPCodec() *RTPCodec
// Name represents the codec name
Name() string
// BuildAudioEncoder builds audio encoder by given media params and audio input
BuildAudioEncoder(r audio.Reader, p prop.Media) (ReadCloser, error)
}
@@ -50,16 +26,15 @@ type AudioEncoderBuilder interface {
// This interface is for codec implementors to provide codec specific params,
// but still giving generality for the users.
type VideoEncoderBuilder interface {
// RTPCodec represents the codec metadata
RTPCodec() *RTPCodec
// Name represents the codec name
Name() string
// BuildVideoEncoder builds video encoder by given media params and video input
BuildVideoEncoder(r video.Reader, p prop.Media) (ReadCloser, error)
}
// ReadCloser is an io.ReadCloser with methods for rate limiting: SetBitRate and ForceKeyFrame
type ReadCloser interface {
Read() (b []byte, release func(), err error)
Close() error
io.ReadCloser
// SetBitRate sets current target bitrate, lower bitrate means smaller data will be transmitted
// but this also means that the quality will also be lower.
SetBitRate(int) error

View File

@@ -1,196 +0,0 @@
#include <interface/mmal/mmal.h>
#include <interface/mmal/util/mmal_default_components.h>
#include <interface/mmal/util/mmal_util_params.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdlib.h>
#define CHK(__status, __msg) \
do { \
status.code = __status; \
if (status.code != MMAL_SUCCESS) { \
status.msg = __msg; \
goto CleanUp; \
} \
} while (0)
typedef struct Status {
MMAL_STATUS_T code;
const char *msg;
} Status;
typedef struct Slice {
uint8_t *data;
int len;
} Slice;
typedef struct Params {
int width, height;
uint32_t bitrate;
uint32_t key_frame_interval;
} Params;
typedef struct Encoder {
MMAL_COMPONENT_T *component;
MMAL_PORT_T *port_in, *port_out;
MMAL_QUEUE_T *queue_out;
MMAL_POOL_T *pool_in, *pool_out;
} Encoder;
Status enc_new(Params, Encoder *);
Status enc_encode(Encoder *, Slice y, Slice cb, Slice cr, MMAL_BUFFER_HEADER_T **);
Status enc_close(Encoder *);
static void encoder_in_cb(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer) { mmal_buffer_header_release(buffer); }
static void encoder_out_cb(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer) {
MMAL_QUEUE_T *queue = (MMAL_QUEUE_T *)port->userdata;
mmal_queue_put(queue, buffer);
}
Status enc_new(Params params, Encoder *encoder) {
Status status = {0};
bool created = false;
memset(encoder, 0, sizeof(Encoder));
CHK(mmal_component_create(MMAL_COMPONENT_DEFAULT_VIDEO_ENCODER, &encoder->component),
"Failed to create video encoder component");
created = true;
encoder->port_in = encoder->component->input[0];
encoder->port_in->format->type = MMAL_ES_TYPE_VIDEO;
encoder->port_in->format->encoding = MMAL_ENCODING_I420;
encoder->port_in->format->es->video.width = params.width;
encoder->port_in->format->es->video.height = params.height;
encoder->port_in->format->es->video.par.num = 1;
encoder->port_in->format->es->video.par.den = 1;
encoder->port_in->format->es->video.crop.x = 0;
encoder->port_in->format->es->video.crop.y = 0;
encoder->port_in->format->es->video.crop.width = params.width;
encoder->port_in->format->es->video.crop.height = params.height;
CHK(mmal_port_format_commit(encoder->port_in), "Failed to commit input port format");
encoder->port_out = encoder->component->output[0];
encoder->port_out->format->type = MMAL_ES_TYPE_VIDEO;
encoder->port_out->format->encoding = MMAL_ENCODING_H264;
encoder->port_out->format->bitrate = params.bitrate;
CHK(mmal_port_format_commit(encoder->port_out), "Failed to commit output port format");
MMAL_PARAMETER_VIDEO_PROFILE_T encoder_param_profile = {0};
encoder_param_profile.hdr.id = MMAL_PARAMETER_PROFILE;
encoder_param_profile.hdr.size = sizeof(encoder_param_profile);
encoder_param_profile.profile[0].profile = MMAL_VIDEO_PROFILE_H264_BASELINE;
encoder_param_profile.profile[0].level = MMAL_VIDEO_LEVEL_H264_42;
CHK(mmal_port_parameter_set(encoder->port_out, &encoder_param_profile.hdr), "Failed to set encoder profile param");
CHK(mmal_port_parameter_set_uint32(encoder->port_out, MMAL_PARAMETER_INTRAPERIOD, params.key_frame_interval),
"Failed to set intra period param");
MMAL_PARAMETER_VIDEO_RATECONTROL_T encoder_param_rate_control = {0};
encoder_param_rate_control.hdr.id = MMAL_PARAMETER_RATECONTROL;
encoder_param_rate_control.hdr.size = sizeof(encoder_param_rate_control);
encoder_param_rate_control.control = MMAL_VIDEO_RATECONTROL_VARIABLE;
CHK(mmal_port_parameter_set(encoder->port_out, &encoder_param_rate_control.hdr), "Failed to set rate control param");
// Some decoders expect SPS/PPS headers to be added to every frame
CHK(mmal_port_parameter_set_boolean(encoder->port_out, MMAL_PARAMETER_VIDEO_ENCODE_INLINE_HEADER, MMAL_TRUE),
"Failed to set inline header param");
CHK(mmal_port_parameter_set_boolean(encoder->port_out, MMAL_PARAMETER_VIDEO_ENCODE_HEADERS_WITH_FRAME, MMAL_TRUE),
"Failed to set headers with frame param");
/* FIXME: Somehow this flag is broken? When this flag is on, the encoder will get stuck.
// Since our use case is mainly for real time streaming, the encoder should optimized for low latency
CHK(mmal_port_parameter_set_boolean(encoder->port_out, MMAL_PARAMETER_VIDEO_ENCODE_H264_LOW_LATENCY, MMAL_TRUE),
"Failed to set low latency param");
*/
// Now we know the format of both ports and the requirements of the encoder, we can create
// our buffer headers and their associated memory buffers. We use the buffer pool API for this.
encoder->port_in->buffer_num = encoder->port_in->buffer_num_min;
// mmal calculates recommended size that's big enough to store all of the pixels
encoder->port_in->buffer_size = encoder->port_in->buffer_size_recommended;
encoder->pool_in = mmal_pool_create(encoder->port_in->buffer_num, encoder->port_in->buffer_size);
encoder->port_out->buffer_num = encoder->port_out->buffer_num_min;
encoder->port_out->buffer_size = encoder->port_out->buffer_size_recommended;
encoder->pool_out = mmal_pool_create(encoder->port_out->buffer_num, encoder->port_out->buffer_size);
// Create a queue to store our encoded video frames. The callback we will get when
// a frame has been encoded will put the frame into this queue.
encoder->queue_out = mmal_queue_create();
encoder->port_out->userdata = (void *)encoder->queue_out;
// Enable all the input port and the output port.
// The callback specified here is the function which will be called when the buffer header
// we sent to the component has been processed.
CHK(mmal_port_enable(encoder->port_in, encoder_in_cb), "Failed to enable input port");
CHK(mmal_port_enable(encoder->port_out, encoder_out_cb), "Failed to enable output port");
// Enable the component. Components will only process data when they are enabled.
CHK(mmal_component_enable(encoder->component), "Failed to enable component");
CleanUp:
if (status.code != MMAL_SUCCESS) {
if (created) {
enc_close(encoder);
}
}
return status;
}
// enc_encode encodes y, cb, cr. The encoded frame is going to be stored in encoded_buffer.
// IMPORTANT: the caller is responsible to release the ownership of encoded_buffer
Status enc_encode(Encoder *encoder, Slice y, Slice cb, Slice cr, MMAL_BUFFER_HEADER_T **encoded_buffer) {
Status status = {0};
MMAL_BUFFER_HEADER_T *buffer;
uint32_t required_size;
// buffer should always be available since the encoding process is blocking
buffer = mmal_queue_get(encoder->pool_in->queue);
assert(buffer != NULL);
// buffer->data should've been allocated with enough memory to contain a frame by pool_in
required_size = y.len + cb.len + cr.len;
assert(buffer->alloc_size >= required_size);
memcpy(buffer->data, y.data, y.len);
memcpy(buffer->data + y.len, cb.data, cb.len);
memcpy(buffer->data + y.len + cb.len, cr.data, cr.len);
buffer->length = required_size;
CHK(mmal_port_send_buffer(encoder->port_in, buffer), "Failed to send filled buffer to input port");
while (1) {
// Send empty buffers to the output port to allow the encoder to start
// producing frames as soon as it gets input data
while ((buffer = mmal_queue_get(encoder->pool_out->queue)) != NULL) {
CHK(mmal_port_send_buffer(encoder->port_out, buffer), "Failed to send empty buffers to output port");
}
while ((buffer = mmal_queue_wait(encoder->queue_out)) != NULL) {
if ((buffer->flags & MMAL_BUFFER_HEADER_FLAG_FRAME_END) != 0) {
*encoded_buffer = buffer;
goto CleanUp;
}
mmal_buffer_header_release(buffer);
}
}
CleanUp:
return status;
}
Status enc_close(Encoder *encoder) {
Status status = {0};
mmal_pool_destroy(encoder->pool_out);
mmal_pool_destroy(encoder->pool_in);
mmal_queue_destroy(encoder->queue_out);
mmal_component_destroy(encoder->component);
CleanUp:
return status;
}

View File

@@ -1,112 +0,0 @@
// Package mmal implements a hardware accelerated H264 encoder for raspberry pi.
// This package requires libmmal headers and libraries to be built.
// Reference: https://github.com/raspberrypi/userland/tree/master/interface/mmal
package mmal
// #cgo pkg-config: mmal
// #include "bridge.h"
import "C"
import (
"fmt"
"image"
"io"
"sync"
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
type encoder struct {
engine C.Encoder
r video.Reader
mu sync.Mutex
closed bool
cntr int
}
func statusToErr(status *C.Status) error {
return fmt.Errorf("(status = %d) %s", int(status.code), C.GoString(status.msg))
}
func newEncoder(r video.Reader, p prop.Media, params Params) (codec.ReadCloser, error) {
if params.KeyFrameInterval == 0 {
params.KeyFrameInterval = 60
}
if params.BitRate == 0 {
params.BitRate = 300000
}
e := encoder{
r: video.ToI420(r),
}
status := C.enc_new(C.Params{
width: C.int(p.Width),
height: C.int(p.Height),
bitrate: C.uint(params.BitRate),
key_frame_interval: C.uint(params.KeyFrameInterval),
}, &e.engine)
if status.code != 0 {
return nil, statusToErr(&status)
}
return &e, nil
}
func (e *encoder) Read() ([]byte, func(), error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return nil, func() {}, io.EOF
}
img, _, err := e.r.Read()
if err != nil {
return nil, func() {}, err
}
imgReal := img.(*image.YCbCr)
var y, cb, cr C.Slice
y.data = (*C.uchar)(&imgReal.Y[0])
y.len = C.int(len(imgReal.Y))
cb.data = (*C.uchar)(&imgReal.Cb[0])
cb.len = C.int(len(imgReal.Cb))
cr.data = (*C.uchar)(&imgReal.Cr[0])
cr.len = C.int(len(imgReal.Cr))
var encodedBuffer *C.MMAL_BUFFER_HEADER_T
status := C.enc_encode(&e.engine, y, cb, cr, &encodedBuffer)
if status.code != 0 {
return nil, func() {}, statusToErr(&status)
}
// GoBytes copies the C array to Go slice. After this, it's safe to release the C array
encoded := C.GoBytes(unsafe.Pointer(encodedBuffer.data), C.int(encodedBuffer.length))
// Release the buffer so that mmal can reuse this memory
C.mmal_buffer_header_release(encodedBuffer)
return encoded, func() {}, err
}
func (e *encoder) SetBitRate(b int) error {
panic("SetBitRate is not implemented")
}
func (e *encoder) ForceKeyFrame() error {
panic("ForceKeyFrame is not implemented")
}
func (e *encoder) Close() error {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return nil
}
e.closed = true
C.enc_close(&e.engine)
return nil
}

View File

@@ -1,31 +0,0 @@
package mmal
import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
// Params stores libmmal specific encoding parameters.
type Params struct {
codec.BaseParams
}
// NewParams returns default mmal codec specific parameters.
func NewParams() (Params, error) {
return Params{
BaseParams: codec.BaseParams{
KeyFrameInterval: 60,
},
}, nil
}
// RTPCodec represents the codec metadata
func (p *Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPH264Codec(90000)
}
// BuildVideoEncoder builds mmal encoder with given params
func (p *Params) BuildVideoEncoder(r video.Reader, property prop.Media) (codec.ReadCloser, error) {
return newEncoder(r, property, *p)
}

View File

@@ -16,6 +16,7 @@ import (
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
mio "github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
@@ -23,6 +24,7 @@ import (
type encoder struct {
engine *C.Encoder
r video.Reader
buff []byte
mu sync.Mutex
closed bool
@@ -50,17 +52,26 @@ func newEncoder(r video.Reader, p prop.Media, params Params) (codec.ReadCloser,
}, nil
}
func (e *encoder) Read() ([]byte, func(), error) {
func (e *encoder) Read(p []byte) (n int, err error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return nil, func() {}, io.EOF
return 0, io.EOF
}
img, _, err := e.r.Read()
if e.buff != nil {
n, err = mio.Copy(p, e.buff)
if err == nil {
e.buff = nil
}
return n, err
}
img, err := e.r.Read()
if err != nil {
return nil, func() {}, err
return 0, err
}
yuvImg := img.(*image.YCbCr)
@@ -74,11 +85,16 @@ func (e *encoder) Read() ([]byte, func(), error) {
width: C.int(bounds.Max.X - bounds.Min.X),
}, &rv)
if err := errResult(rv); err != nil {
return nil, func() {}, fmt.Errorf("failed in encoding: %v", err)
return 0, fmt.Errorf("failed in encoding: %v", err)
}
encoded := C.GoBytes(unsafe.Pointer(s.data), s.data_len)
return encoded, func() {}, nil
n, err = mio.Copy(p, encoded)
if err != nil {
e.buff = encoded
}
return n, err
}
func (e *encoder) SetBitRate(b int) error {

View File

@@ -4,6 +4,7 @@ import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
// Params stores libopenh264 specific encoding parameters.
@@ -20,9 +21,9 @@ func NewParams() (Params, error) {
}, nil
}
// RTPCodec represents the codec metadata
func (p *Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPH264Codec(90000)
// Name represents the codec name
func (p *Params) Name() string {
return webrtc.H264
}
// BuildVideoEncoder builds openh264 encoder with given params

View File

@@ -1,7 +1,6 @@
package opus
import (
"errors"
"fmt"
"math"
@@ -10,12 +9,11 @@ import (
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/mediadevices/pkg/wave"
"github.com/pion/mediadevices/pkg/wave/mixer"
)
type encoder struct {
engine *opus.Encoder
inBuff wave.Audio
inBuff *wave.Float32Interleaved
reader audio.Reader
}
@@ -34,10 +32,6 @@ func newEncoder(r audio.Reader, p prop.Media, params Params) (codec.ReadCloser,
params.BitRate = 32000
}
if params.ChannelMixer == nil {
params.ChannelMixer = &mixer.MonoMixer{}
}
// Select the nearest supported latency
var targetLatency float64
// TODO: use p.Latency.Milliseconds() after Go 1.12 EOL
@@ -53,7 +47,8 @@ func newEncoder(r audio.Reader, p prop.Media, params Params) (codec.ReadCloser,
targetLatency = latency
}
channels := p.ChannelCount
// Since audio.Reader only supports stereo mode, channels is always 2
channels := 2
engine, err := opus.NewEncoder(p.SampleRate, channels, opus.AppVoIP)
if err != nil {
@@ -63,32 +58,47 @@ func newEncoder(r audio.Reader, p prop.Media, params Params) (codec.ReadCloser,
return nil, err
}
rMix := audio.NewChannelMixer(channels, params.ChannelMixer)
rBuf := audio.NewBuffer(int(targetLatency * float64(p.SampleRate) / 1000))
e := encoder{
engine: engine,
reader: rMix(rBuf(r)),
}
inBuffSize := int(targetLatency * float64(p.SampleRate) / 1000)
inBuff := wave.NewFloat32Interleaved(
wave.ChunkInfo{Channels: channels, Len: inBuffSize},
)
inBuff.Data = inBuff.Data[:0]
e := encoder{engine, inBuff, r}
return &e, nil
}
func (e *encoder) Read() ([]byte, func(), error) {
buff, _, err := e.reader.Read()
if err != nil {
return nil, func() {}, err
func (e *encoder) Read(p []byte) (n int, err error) {
// While the buffer is not full, keep reading so that we meet the latency requirement
nLatency := e.inBuff.ChunkInfo().Len * e.inBuff.ChunkInfo().Channels
for len(e.inBuff.Data) < nLatency {
buff, err := e.reader.Read()
if err != nil {
return 0, err
}
// TODO: convert audio format
b, ok := buff.(*wave.Float32Interleaved)
if !ok {
panic("unsupported audio format")
}
switch {
case b.Size.Channels == 1 && e.inBuff.ChunkInfo().Channels != 1:
for _, d := range b.Data {
for ch := 0; ch < e.inBuff.ChunkInfo().Channels; ch++ {
e.inBuff.Data = append(e.inBuff.Data, d)
}
}
case b.Size.Channels == e.inBuff.ChunkInfo().Channels:
e.inBuff.Data = append(e.inBuff.Data, b.Data...)
}
}
encoded := make([]byte, 1024)
switch b := buff.(type) {
case *wave.Int16Interleaved:
n, err := e.engine.Encode(b.Data, encoded)
return encoded[:n:n], func() {}, err
case *wave.Float32Interleaved:
n, err := e.engine.EncodeFloat32(b.Data, encoded)
return encoded[:n:n], func() {}, err
default:
return nil, func() {}, errors.New("unknown type of audio buffer")
n, err = e.engine.EncodeFloat32(e.inBuff.Data[:nLatency], p)
if err != nil {
return n, err
}
e.inBuff.Data = e.inBuff.Data[nLatency:]
return n, nil
}
func (e *encoder) SetBitRate(b int) error {

View File

@@ -4,14 +4,12 @@ import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/audio"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/mediadevices/pkg/wave/mixer"
"github.com/pion/webrtc/v2"
)
// Params stores opus specific encoding parameters.
type Params struct {
codec.BaseParams
// ChannelMixer is a mixer to be used if number of given and expected channels differ.
ChannelMixer mixer.ChannelMixer
}
// NewParams returns default opus codec specific parameters.
@@ -19,9 +17,9 @@ func NewParams() (Params, error) {
return Params{}, nil
}
// RTPCodec represents the codec metadata
func (p *Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPOpusCodec(48000)
// Name represents the codec name
func (p *Params) Name() string {
return webrtc.Opus
}
// BuildAudioEncoder builds opus encoder with given params

View File

@@ -4,6 +4,7 @@ import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
// ParamsVP8 stores VP8 encoding parameters.
@@ -43,9 +44,9 @@ func NewVP8Params() (ParamsVP8, error) {
}, nil
}
// RTPCodec represents the codec metadata
func (p *ParamsVP8) RTPCodec() *codec.RTPCodec {
return codec.NewRTPVP8Codec(90000)
// Name represents the codec name
func (p *ParamsVP8) Name() string {
return webrtc.VP8
}
// BuildVideoEncoder builds VP8 encoder with given params
@@ -112,9 +113,9 @@ func NewVP9Params() (ParamsVP9, error) {
}, nil
}
// RTPCodec represents the codec metadata
func (p *ParamsVP9) RTPCodec() *codec.RTPCodec {
return codec.NewRTPVP9Codec(90000)
// Name represents the codec name
func (p *ParamsVP9) Name() string {
return webrtc.VP9
}
// BuildVideoEncoder builds VP9 encoder with given params

View File

@@ -64,6 +64,7 @@ import (
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
mio "github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
@@ -79,6 +80,7 @@ const (
type encoderVP8 struct {
r video.Reader
buf []byte
frame []byte
fdDRI C.int
@@ -295,17 +297,25 @@ func newVP8Encoder(r video.Reader, p prop.Media, params ParamsVP8) (codec.ReadCl
return e, nil
}
func (e *encoderVP8) Read() ([]byte, func(), error) {
func (e *encoderVP8) Read(p []byte) (int, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return nil, func() {}, io.EOF
return 0, io.EOF
}
img, _, err := e.r.Read()
if e.buf != nil {
n, err := mio.Copy(p, e.buf)
if err == nil {
e.buf = nil
}
return n, err
}
img, err := e.r.Read()
if err != nil {
return nil, func() {}, err
return 0, err
}
yuvImg := img.(*image.YCbCr)
@@ -347,7 +357,7 @@ func (e *encoderVP8) Read() ([]byte, func(), error) {
}
}
if e.picParam.reconstructed_frame == C.VA_INVALID_SURFACE {
return nil, func() {}, errors.New("no available surface")
return 0, errors.New("no available surface")
}
C.setForceKFFlagVP8(&e.picParam, 0)
@@ -415,7 +425,7 @@ func (e *encoderVP8) Read() ([]byte, func(), error) {
C.size_t(uintptr(p.src)),
&id,
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
}
buffs = append(buffs, id)
}
@@ -425,17 +435,17 @@ func (e *encoderVP8) Read() ([]byte, func(), error) {
e.display, e.ctxID,
e.surfs[surfaceVP8Input],
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
}
// Upload image
var vaImg C.VAImage
var rawBuf unsafe.Pointer
if s := C.vaDeriveImage(e.display, e.surfs[surfaceVP8Input], &vaImg); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaMapBuffer(e.display, vaImg.buf, &rawBuf); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
}
// TODO: use vaImg.pitches to support padding
C.memcpy(
@@ -451,10 +461,10 @@ func (e *encoderVP8) Read() ([]byte, func(), error) {
unsafe.Pointer(&yuvImg.Cr[0]), C.size_t(len(yuvImg.Cr)),
)
if s := C.vaUnmapBuffer(e.display, vaImg.buf); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaDestroyImage(e.display, vaImg.image_id); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaRenderPicture(
@@ -462,38 +472,38 @@ func (e *encoderVP8) Read() ([]byte, func(), error) {
&buffs[1], // 0 is for ouput
C.int(len(buffs)-1),
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaEndPicture(
e.display, e.ctxID,
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
}
// Load encoded data
for retry := 3; retry >= 0; retry-- {
if s := C.vaSyncSurface(e.display, e.picParam.reconstructed_frame); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
}
var surfStat C.VASurfaceStatus
if s := C.vaQuerySurfaceStatus(
e.display, e.picParam.reconstructed_frame, &surfStat,
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
}
if surfStat == C.VASurfaceReady {
break
}
if retry == 0 {
return nil, func() {}, fmt.Errorf("failed to sync surface: %d", surfStat)
return 0, fmt.Errorf("failed to sync surface: %d", surfStat)
}
}
var seg *C.VACodedBufferSegment
if s := C.vaMapBufferSeg(e.display, buffs[0], &seg); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
}
if seg.status&C.VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK != 0 {
return nil, func() {}, errors.New("buffer size too small")
return 0, errors.New("buffer size too small")
}
if cap(e.frame) < int(seg.size) {
@@ -506,13 +516,13 @@ func (e *encoderVP8) Read() ([]byte, func(), error) {
)
if s := C.vaUnmapBuffer(e.display, buffs[0]); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
}
// Destroy buffers
for _, b := range buffs {
if s := C.vaDestroyBuffer(e.display, b); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
}
}
@@ -535,9 +545,11 @@ func (e *encoderVP8) Read() ([]byte, func(), error) {
e.picParam.ref_last_frame = e.picParam.reconstructed_frame
C.setRefreshLastFlagVP8(&e.picParam, 1)
encoded := make([]byte, len(e.frame))
copy(encoded, e.frame)
return encoded, func() {}, err
n, err := mio.Copy(p, e.frame)
if err != nil {
e.buf = e.frame
}
return n, err
}
func (e *encoderVP8) SetBitRate(b int) error {

View File

@@ -47,6 +47,7 @@ import (
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
mio "github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
@@ -66,6 +67,7 @@ const (
type encoderVP9 struct {
r video.Reader
buf []byte
frame []byte
fdDRI C.int
@@ -284,17 +286,25 @@ func newVP9Encoder(r video.Reader, p prop.Media, params ParamsVP9) (codec.ReadCl
return e, nil
}
func (e *encoderVP9) Read() ([]byte, func(), error) {
func (e *encoderVP9) Read(p []byte) (int, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return nil, func() {}, io.EOF
return 0, io.EOF
}
img, _, err := e.r.Read()
if e.buf != nil {
n, err := mio.Copy(p, e.buf)
if err == nil {
e.buf = nil
}
return n, err
}
img, err := e.r.Read()
if err != nil {
return nil, func() {}, err
return 0, err
}
yuvImg := img.(*image.YCbCr)
@@ -378,7 +388,7 @@ func (e *encoderVP9) Read() ([]byte, func(), error) {
C.size_t(uintptr(p.src)),
&id,
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
}
buffs = append(buffs, id)
}
@@ -388,17 +398,17 @@ func (e *encoderVP9) Read() ([]byte, func(), error) {
e.display, e.ctxID,
e.surfs[surfaceVP9Input],
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
}
// Upload image
var vaImg C.VAImage
var rawBuf unsafe.Pointer
if s := C.vaDeriveImage(e.display, e.surfs[surfaceVP9Input], &vaImg); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaMapBuffer(e.display, vaImg.buf, &rawBuf); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
}
// TODO: use vaImg.pitches to support padding
C.copyI420toNV12(
@@ -409,10 +419,10 @@ func (e *encoderVP9) Read() ([]byte, func(), error) {
C.uint(len(yuvImg.Y)),
)
if s := C.vaUnmapBuffer(e.display, vaImg.buf); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaDestroyImage(e.display, vaImg.image_id); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaRenderPicture(
@@ -420,27 +430,27 @@ func (e *encoderVP9) Read() ([]byte, func(), error) {
&buffs[1], // 0 is for ouput
C.int(len(buffs)-1),
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
}
if s := C.vaEndPicture(
e.display, e.ctxID,
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
}
// Load encoded data
if s := C.vaSyncSurface(e.display, e.picParam.reconstructed_frame); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
}
var surfStat C.VASurfaceStatus
if s := C.vaQuerySurfaceStatus(
e.display, e.picParam.reconstructed_frame, &surfStat,
); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
}
var seg *C.VACodedBufferSegment
if s := C.vaMapBufferSeg(e.display, buffs[0], &seg); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
}
if cap(e.frame) < int(seg.size) {
e.frame = make([]byte, int(seg.size))
@@ -452,13 +462,13 @@ func (e *encoderVP9) Read() ([]byte, func(), error) {
)
if s := C.vaUnmapBuffer(e.display, buffs[0]); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
}
// Destroy buffers
for _, b := range buffs {
if s := C.vaDestroyBuffer(e.display, b); s != C.VA_STATUS_SUCCESS {
return nil, func() {}, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
return 0, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
}
}
@@ -470,9 +480,11 @@ func (e *encoderVP9) Read() ([]byte, func(), error) {
e.slotCurr = 0
}
encoded := make([]byte, len(e.frame))
copy(encoded, e.frame)
return encoded, func() {}, err
n, err := mio.Copy(p, e.frame)
if err != nil {
e.buf = e.frame
}
return n, err
}
func (e *encoderVP9) SetBitRate(b int) error {

View File

@@ -17,6 +17,13 @@ type Params struct {
RateControlMinQuantizer uint
RateControlMaxQuantizer uint
ErrorResilient ErrorResilientMode
TemporalLayers []TemporalLayer
}
// TemporalLayer represents temporal layer config.
type TemporalLayer struct {
TargetBitrate uint // in kbps
RateDecimator uint
}
// RateControlMode represents rate control mode.

View File

@@ -56,8 +56,10 @@ import (
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
mio "github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
type encoder struct {
@@ -66,6 +68,7 @@ type encoder struct {
cfg *C.vpx_codec_enc_cfg_t
r video.Reader
frameIndex int
buff []byte
tStart int
tLastFrame int
frame []byte
@@ -92,9 +95,9 @@ func NewVP8Params() (VP8Params, error) {
}, nil
}
// RTPCodec represents the codec metadata
func (p *VP8Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPVP8Codec(90000)
// Name represents the codec name
func (p *VP8Params) Name() string {
return webrtc.VP8
}
// BuildVideoEncoder builds VP8 encoder with given params
@@ -119,9 +122,9 @@ func NewVP9Params() (VP9Params, error) {
}, nil
}
// RTPCodec represents the codec metadata
func (p *VP9Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPVP9Codec(90000)
// Name represents the codec name
func (p *VP9Params) Name() string {
return webrtc.VP9
}
// BuildVideoEncoder builds VP9 encoder with given params
@@ -177,6 +180,12 @@ func newEncoder(r video.Reader, p prop.Media, params Params, codecIface *C.vpx_c
cfg.rc_resize_allowed = 0
cfg.g_pass = C.VPX_RC_ONE_PASS
cfg.ts_number_layers = C.uint(len(params.TemporalLayers))
for i := range params.TemporalLayers {
cfg.ts_target_bitrate[i] = C.uint(params.TemporalLayers[i].TargetBitrate)
cfg.ts_rate_decimator[i] = C.uint(params.TemporalLayers[i].RateDecimator)
}
raw := &C.vpx_image_t{}
if C.vpx_img_alloc(raw, C.VPX_IMG_FMT_I420, cfg.g_w, cfg.g_h, 1) == nil {
return nil, errors.New("vpx_img_alloc failed")
@@ -204,17 +213,25 @@ func newEncoder(r video.Reader, p prop.Media, params Params, codecIface *C.vpx_c
}, nil
}
func (e *encoder) Read() ([]byte, func(), error) {
func (e *encoder) Read(p []byte) (int, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return nil, func() {}, io.EOF
return 0, io.EOF
}
img, _, err := e.r.Read()
if e.buff != nil {
n, err := mio.Copy(p, e.buff)
if err == nil {
e.buff = nil
}
return n, err
}
img, err := e.r.Read()
if err != nil {
return nil, func() {}, err
return 0, err
}
yuvImg := img.(*image.YCbCr)
bounds := yuvImg.Bounds()
@@ -230,7 +247,7 @@ func (e *encoder) Read() ([]byte, func(), error) {
if e.cfg.g_w != C.uint(width) || e.cfg.g_h != C.uint(height) {
e.cfg.g_w, e.cfg.g_h = C.uint(width), C.uint(height)
if ec := C.vpx_codec_enc_config_set(e.codec, e.cfg); ec != C.VPX_CODEC_OK {
return nil, func() {}, fmt.Errorf("vpx_codec_enc_config_set failed (%d)", ec)
return 0, fmt.Errorf("vpx_codec_enc_config_set failed (%d)", ec)
}
e.raw.w, e.raw.h = C.uint(width), C.uint(height)
e.raw.r_w, e.raw.r_h = C.uint(width), C.uint(height)
@@ -243,7 +260,7 @@ func (e *encoder) Read() ([]byte, func(), error) {
C.long(t-e.tStart), C.ulong(t-e.tLastFrame), C.long(flags), C.ulong(e.deadline),
(*C.uchar)(&yuvImg.Y[0]), (*C.uchar)(&yuvImg.Cb[0]), (*C.uchar)(&yuvImg.Cr[0]),
); ec != C.VPX_CODEC_OK {
return nil, func() {}, fmt.Errorf("vpx_codec_encode failed (%d)", ec)
return 0, fmt.Errorf("vpx_codec_encode failed (%d)", ec)
}
e.frameIndex++
@@ -261,10 +278,11 @@ func (e *encoder) Read() ([]byte, func(), error) {
e.frame = append(e.frame, encoded...)
}
}
encoded := make([]byte, len(e.frame))
copy(encoded, e.frame)
return encoded, func() {}, err
n, err := mio.Copy(p, e.frame)
if err != nil {
e.buff = e.frame
}
return n, err
}
func (e *encoder) SetBitRate(b int) error {

View File

@@ -4,6 +4,7 @@ import (
"github.com/pion/mediadevices/pkg/codec"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/webrtc/v2"
)
// Params stores libx264 specific encoding parameters.
@@ -39,9 +40,9 @@ func NewParams() (Params, error) {
}, nil
}
// RTPCodec represents the codec metadata
func (p *Params) RTPCodec() *codec.RTPCodec {
return codec.NewRTPH264Codec(90000)
// Name represents the codec name
func (p *Params) Name() string {
return webrtc.H264
}
// BuildVideoEncoder builds x264 encoder with given params

View File

@@ -14,12 +14,14 @@ import (
"unsafe"
"github.com/pion/mediadevices/pkg/codec"
mio "github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
type encoder struct {
engine *C.Encoder
buff []byte
r video.Reader
mu sync.Mutex
closed bool
@@ -94,17 +96,25 @@ func newEncoder(r video.Reader, p prop.Media, params Params) (codec.ReadCloser,
return &e, nil
}
func (e *encoder) Read() ([]byte, func(), error) {
func (e *encoder) Read(p []byte) (int, error) {
e.mu.Lock()
defer e.mu.Unlock()
if e.closed {
return nil, func() {}, io.EOF
return 0, io.EOF
}
img, _, err := e.r.Read()
if e.buff != nil {
n, err := mio.Copy(p, e.buff)
if err == nil {
e.buff = nil
}
return n, err
}
img, err := e.r.Read()
if err != nil {
return nil, func() {}, err
return 0, err
}
yuvImg := img.(*image.YCbCr)
@@ -117,11 +127,15 @@ func (e *encoder) Read() ([]byte, func(), error) {
&rc,
)
if err := errFromC(rc); err != nil {
return nil, func() {}, err
return 0, err
}
encoded := C.GoBytes(unsafe.Pointer(s.data), s.data_len)
return encoded, func() {}, err
n, err := mio.Copy(p, encoded)
if err != nil {
e.buff = encoded
}
return n, err
}
func (e *encoder) SetBitRate(b int) error {

View File

@@ -52,10 +52,10 @@ func (d *dummy) AudioRecord(p prop.Media) (audio.Reader, error) {
closed := d.closed
reader := audio.ReaderFunc(func() (wave.Audio, func(), error) {
reader := audio.ReaderFunc(func() (wave.Audio, error) {
select {
case <-closed:
return nil, func() {}, io.EOF
return nil, io.EOF
default:
}
@@ -78,7 +78,7 @@ func (d *dummy) AudioRecord(p prop.Media) (audio.Reader, error) {
a.SetFloat32(i, ch, wave.Float32Sample(sin[phase]))
}
}
return a, func() {}, nil
return a, nil
})
return reader, nil
}

View File

@@ -1,71 +0,0 @@
package camera
import (
"image"
"github.com/pion/mediadevices/pkg/avfoundation"
"github.com/pion/mediadevices/pkg/driver"
"github.com/pion/mediadevices/pkg/frame"
"github.com/pion/mediadevices/pkg/io/video"
"github.com/pion/mediadevices/pkg/prop"
)
type camera struct {
device avfoundation.Device
session *avfoundation.Session
}
func init() {
devices, err := avfoundation.Devices(avfoundation.Video)
if err != nil {
panic(err)
}
for _, device := range devices {
cam := newCamera(device)
driver.GetManager().Register(cam, driver.Info{
Label: device.UID,
DeviceType: driver.Camera,
})
}
}
func newCamera(device avfoundation.Device) *camera {
return &camera{
device: device,
}
}
func (cam *camera) Open() error {
var err error
cam.session, err = avfoundation.NewSession(cam.device)
return err
}
func (cam *camera) Close() error {
return cam.session.Close()
}
func (cam *camera) VideoRecord(property prop.Media) (video.Reader, error) {
decoder, err := frame.NewDecoder(property.FrameFormat)
if err != nil {
return nil, err
}
rc, err := cam.session.Open(property)
if err != nil {
return nil, err
}
r := video.ReaderFunc(func() (image.Image, func(), error) {
frame, _, err := rc.Read()
if err != nil {
return nil, func() {}, err
}
return decoder.Decode(frame, property.Width, property.Height)
})
return r, nil
}
func (cam *camera) Properties() []prop.Media {
return cam.session.Properties()
}

View File

@@ -8,8 +8,7 @@ import (
"errors"
"image"
"io"
"os"
"path/filepath"
"io/ioutil"
"sync"
"github.com/blackjack/webcam"
@@ -26,36 +25,6 @@ const (
var (
errReadTimeout = errors.New("read timeout")
errEmptyFrame = errors.New("empty frame")
// Reference: https://commons.wikimedia.org/wiki/File:Vector_Video_Standards2.svg
supportedResolutions = [][2]int{
{320, 240},
{640, 480},
{768, 576},
{800, 600},
{1024, 768},
{1280, 854},
{1280, 960},
{1280, 1024},
{1400, 1050},
{1600, 1200},
{2048, 1536},
{320, 200},
{800, 480},
{854, 480},
{1024, 600},
{1152, 768},
{1280, 720},
{1280, 768},
{1366, 768},
{1280, 800},
{1440, 900},
{1440, 960},
{1680, 1050},
{1920, 1080},
{2048, 1080},
{1920, 1200},
{2560, 1600},
}
)
// Camera implementation using v4l2
@@ -71,47 +40,27 @@ type camera struct {
}
func init() {
discovered := make(map[string]struct{})
discover := func(pattern string) {
devices, err := filepath.Glob(pattern)
if err != nil {
// No v4l device.
return
}
for _, device := range devices {
label := filepath.Base(device)
reallink, err := os.Readlink(device)
if err != nil {
reallink = label
} else {
reallink = filepath.Base(reallink)
}
if _, ok := discovered[reallink]; ok {
continue
}
discovered[reallink] = struct{}{}
cam := newCamera(device)
driver.GetManager().Register(cam, driver.Info{
Label: label,
DeviceType: driver.Camera,
})
}
searchPath := "/dev/v4l/by-path/"
devices, err := ioutil.ReadDir(searchPath)
if err != nil {
// No v4l device.
return
}
for _, device := range devices {
cam := newCamera(searchPath + device.Name())
driver.GetManager().Register(cam, driver.Info{
Label: device.Name(),
DeviceType: driver.Camera,
})
}
discover("/dev/v4l/by-path/*")
discover("/dev/video*")
}
func newCamera(path string) *camera {
formats := map[webcam.PixelFormat]frame.Format{
webcam.PixelFormat(C.V4L2_PIX_FMT_YUV420): frame.FormatI420,
webcam.PixelFormat(C.V4L2_PIX_FMT_YUYV): frame.FormatYUYV,
webcam.PixelFormat(C.V4L2_PIX_FMT_UYVY): frame.FormatUYVY,
webcam.PixelFormat(C.V4L2_PIX_FMT_NV12): frame.FormatNV21,
webcam.PixelFormat(C.V4L2_PIX_FMT_MJPEG): frame.FormatMJPEG,
webcam.PixelFormat(C.V4L2_PIX_FMT_YUYV): frame.FormatYUYV,
webcam.PixelFormat(C.V4L2_PIX_FMT_UYVY): frame.FormatUYVY,
webcam.PixelFormat(C.V4L2_PIX_FMT_NV12): frame.FormatNV21,
webcam.PixelFormat(C.V4L2_PIX_FMT_MJPEG): frame.FormatMJPEG,
}
reversedFormats := make(map[frame.Format]webcam.PixelFormat)
@@ -133,8 +82,6 @@ func (c *camera) Open() error {
return err
}
// Late frames should be discarded. Buffering should be handled in higher level.
cam.SetBufferCount(1)
c.cam = cam
return nil
}
@@ -182,7 +129,7 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
ctx, cancel := context.WithCancel(context.Background())
c.cancel = cancel
var buf []byte
r := video.ReaderFunc(func() (img image.Image, release func(), err error) {
r := video.ReaderFunc(func() (img image.Image, err error) {
// Lock to avoid accessing the buffer after StopStreaming()
c.mutex.Lock()
defer c.mutex.Unlock()
@@ -191,23 +138,23 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
for i := 0; i < maxEmptyFrameCount; i++ {
if ctx.Err() != nil {
// Return EOF if the camera is already closed.
return nil, func() {}, io.EOF
return nil, io.EOF
}
err := cam.WaitForFrame(5) // 5 seconds
switch err.(type) {
case nil:
case *webcam.Timeout:
return nil, func() {}, errReadTimeout
return nil, errReadTimeout
default:
// Camera has been stopped.
return nil, func() {}, err
return nil, err
}
b, err := cam.ReadFrame()
if err != nil {
// Camera has been stopped.
return nil, func() {}, err
return nil, err
}
// Frame is empty.
@@ -227,7 +174,7 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
n := copy(buf, b)
return decoder.Decode(buf[:n], p.Width, p.Height)
}
return nil, func() {}, errEmptyFrame
return nil, errEmptyFrame
})
return r, nil
@@ -237,46 +184,13 @@ func (c *camera) Properties() []prop.Media {
properties := make([]prop.Media, 0)
for format := range c.cam.GetSupportedFormats() {
for _, frameSize := range c.cam.GetSupportedFrameSizes(format) {
supportedFormat, ok := c.formats[format]
if !ok {
continue
}
if frameSize.StepWidth == 0 || frameSize.StepHeight == 0 {
properties = append(properties, prop.Media{
Video: prop.Video{
Width: int(frameSize.MaxWidth),
Height: int(frameSize.MaxHeight),
FrameFormat: supportedFormat,
},
})
} else {
// FIXME: we should probably use a custom data structure to capture all of the supported resolutions
for _, supportedResolution := range supportedResolutions {
minWidth, minHeight := int(frameSize.MinWidth), int(frameSize.MinHeight)
maxWidth, maxHeight := int(frameSize.MaxWidth), int(frameSize.MaxHeight)
stepWidth, stepHeight := int(frameSize.StepWidth), int(frameSize.StepHeight)
width, height := supportedResolution[0], supportedResolution[1]
if width < minWidth || width > maxWidth ||
height < minHeight || height > maxHeight {
continue
}
if (width-minWidth)%stepWidth != 0 ||
(height-minHeight)%stepHeight != 0 {
continue
}
properties = append(properties, prop.Media{
Video: prop.Video{
Width: width,
Height: height,
FrameFormat: supportedFormat,
},
})
}
}
properties = append(properties, prop.Media{
Video: prop.Video{
Width: int(frameSize.MaxWidth),
Height: int(frameSize.MaxHeight),
FrameFormat: c.formats[format],
},
})
}
}
return properties

View File

@@ -116,10 +116,10 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
img := &image.YCbCr{}
r := video.ReaderFunc(func() (image.Image, func(), error) {
r := video.ReaderFunc(func() (image.Image, error) {
b, ok := <-c.ch
if !ok {
return nil, func() {}, io.EOF
return nil, io.EOF
}
img.Y = b[:nPix]
img.Cb = b[nPix : nPix+nPix/2]
@@ -128,7 +128,7 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
img.CStride = p.Width / 2
img.SubsampleRatio = image.YCbCrSubsampleRatio422
img.Rect = image.Rect(0, 0, p.Width, p.Height)
return img, func() {}, nil
return img, nil
})
return r, nil
}

View File

@@ -14,7 +14,7 @@ import (
type microphone struct {
c *pulse.Client
id string
samplesChan chan<- []int16
samplesChan chan<- []float32
}
func init() {
@@ -85,35 +85,34 @@ func (m *microphone) AudioRecord(p prop.Media) (audio.Reader, error) {
pulse.RecordSource(src),
)
samplesChan := make(chan []int16, 1)
samplesChan := make(chan []float32, 1)
handler := func(b []int16) (int, error) {
handler := func(b []float32) (int, error) {
samplesChan <- b
return len(b), nil
}
stream, err := m.c.NewRecord(pulse.Int16Writer(handler), options...)
stream, err := m.c.NewRecord(pulse.Float32Writer(handler), options...)
if err != nil {
return nil, err
}
reader := audio.ReaderFunc(func() (wave.Audio, func(), error) {
reader := audio.ReaderFunc(func() (wave.Audio, error) {
buff, ok := <-samplesChan
if !ok {
stream.Close()
return nil, func() {}, io.EOF
return nil, io.EOF
}
a := wave.NewInt16Interleaved(
a := wave.NewFloat32Interleaved(
wave.ChunkInfo{
Channels: p.ChannelCount,
Len: len(buff) / p.ChannelCount,
SamplingRate: p.SampleRate,
Channels: p.ChannelCount,
Len: len(buff) / p.ChannelCount,
},
)
copy(a.Data, buff)
return a, func() {}, nil
return a, nil
})
stream.Start()

View File

@@ -194,10 +194,10 @@ func (m *microphone) AudioRecord(p prop.Media) (audio.Reader, error) {
// TODO: detect microphone device disconnection and return EOF
reader := audio.ReaderFunc(func() (wave.Audio, func(), error) {
reader := audio.ReaderFunc(func() (wave.Audio, error) {
b, ok := <-m.chBuf
if !ok {
return nil, func() {}, io.EOF
return nil, io.EOF
}
select {
@@ -210,27 +210,26 @@ func (m *microphone) AudioRecord(p prop.Media) (audio.Reader, error) {
uintptr(unsafe.Sizeof(b.waveHdr)),
)
if err := errWinmm[ret]; err != nil {
return nil, func() {}, err
return nil, err
}
}
a := wave.NewInt16Interleaved(
a := wave.NewFloat32Interleaved(
wave.ChunkInfo{
Channels: p.ChannelCount,
Len: (int(b.waveHdr.dwBytesRecorded) / 2) / p.ChannelCount,
SamplingRate: p.SampleRate,
Channels: p.ChannelCount,
Len: (int(b.waveHdr.dwBytesRecorded) / 2) / p.ChannelCount,
},
)
j := 0
for i := 0; i < a.Size.Len; i++ {
for ch := 0; ch < a.Size.Channels; ch++ {
a.SetInt16(i, ch, wave.Int16Sample(b.data[j]))
a.SetFloat32(i, ch, wave.Float32Sample(float32(b.data[j])/0x8000))
j++
}
}
return a, func() {}, nil
return a, nil
})
return reader, nil
}

View File

@@ -68,9 +68,9 @@ func (s *screen) VideoRecord(p prop.Media) (video.Reader, error) {
var dst image.RGBA
reader := s.reader
r := video.ReaderFunc(func() (image.Image, func(), error) {
r := video.ReaderFunc(func() (image.Image, error) {
<-s.tick.C
return reader.Read().ToRGBA(&dst), func() {}, nil
return reader.Read().ToRGBA(&dst), nil
})
return r, nil
}

View File

@@ -103,10 +103,10 @@ func (d *dummy) VideoRecord(p prop.Media) (video.Reader, error) {
d.tick = tick
closed := d.closed
r := video.ReaderFunc(func() (image.Image, func(), error) {
r := video.ReaderFunc(func() (image.Image, error) {
select {
case <-closed:
return nil, func() {}, io.EOF
return nil, io.EOF
default:
}
@@ -130,7 +130,7 @@ func (d *dummy) VideoRecord(p prop.Media) (video.Reader, error) {
CStride: p.Width / 2,
SubsampleRatio: image.YCbCrSubsampleRatio422,
Rect: image.Rect(0, 0, p.Width, p.Height),
}, func() {}, nil
}, nil
})
return r, nil

View File

@@ -6,7 +6,6 @@ import (
"image/jpeg"
)
func decodeMJPEG(frame []byte, width, height int) (image.Image, func(), error) {
img, err := jpeg.Decode(bytes.NewReader(frame))
return img, func() {}, err
func decodeMJPEG(frame []byte, width, height int) (image.Image, error) {
return jpeg.Decode(bytes.NewReader(frame))
}

View File

@@ -3,6 +3,8 @@ package frame
type Format string
const (
// YUV Formats
// FormatI420 https://www.fourcc.org/pixel-format/yuv-i420/
FormatI420 Format = "I420"
// FormatI444 is a YUV format without sub-sampling
@@ -14,11 +16,18 @@ const (
// FormatUYVY https://www.fourcc.org/pixel-format/yuv-uyvy/
FormatUYVY = "UYVY"
// RGB Formats
// FormatRGBA https://www.fourcc.org/pixel-format/rgb-rgba/
FormatRGBA Format = "RGBA"
// Compressed Formats
// FormatMJPEG https://www.fourcc.org/mjpg/
FormatMJPEG = "MJPEG"
)
// YUV aliases
// FormatYUYV is an alias of FormatYUY2
const FormatYUYV = FormatYUY2

View File

@@ -5,7 +5,7 @@ import (
)
func NewDecoder(f Format) (Decoder, error) {
var decoder decoderFunc
var decoder DecoderFunc
switch f {
case FormatI420:

View File

@@ -3,12 +3,12 @@ package frame
import "image"
type Decoder interface {
Decode(frame []byte, width, height int) (image.Image, func(), error)
Decode(frame []byte, width, height int) (image.Image, error)
}
// DecoderFunc is a proxy type for Decoder
type decoderFunc func(frame []byte, width, height int) (image.Image, func(), error)
type DecoderFunc func(frame []byte, width, height int) (image.Image, error)
func (f decoderFunc) Decode(frame []byte, width, height int) (image.Image, func(), error) {
func (f DecoderFunc) Decode(frame []byte, width, height int) (image.Image, error) {
return f(frame, width, height)
}

View File

@@ -5,13 +5,13 @@ import (
"image"
)
func decodeI420(frame []byte, width, height int) (image.Image, func(), error) {
func decodeI420(frame []byte, width, height int) (image.Image, error) {
yi := width * height
cbi := yi + width*height/4
cri := cbi + width*height/4
if cri > len(frame) {
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), cri)
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), cri)
}
return &image.YCbCr{
@@ -22,15 +22,15 @@ func decodeI420(frame []byte, width, height int) (image.Image, func(), error) {
CStride: width / 2,
SubsampleRatio: image.YCbCrSubsampleRatio420,
Rect: image.Rect(0, 0, width, height),
}, func() {}, nil
}, nil
}
func decodeNV21(frame []byte, width, height int) (image.Image, func(), error) {
func decodeNV21(frame []byte, width, height int) (image.Image, error) {
yi := width * height
ci := yi + width*height/2
if ci > len(frame) {
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), ci)
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), ci)
}
var cb, cr []byte
@@ -47,5 +47,5 @@ func decodeNV21(frame []byte, width, height int) (image.Image, func(), error) {
CStride: width / 2,
SubsampleRatio: image.YCbCrSubsampleRatio420,
Rect: image.Rect(0, 0, width, height),
}, func() {}, nil
}, nil
}

View File

@@ -12,13 +12,13 @@ import (
// void decodeUYVYCGO(uint8_t* y, uint8_t* cb, uint8_t* cr, uint8_t* uyvy, int width, int height);
import "C"
func decodeYUY2(frame []byte, width, height int) (image.Image, func(), error) {
func decodeYUY2(frame []byte, width, height int) (image.Image, error) {
yi := width * height
ci := yi / 2
fi := yi + 2*ci
if len(frame) != fi {
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
}
y := make([]byte, yi)
@@ -41,16 +41,16 @@ func decodeYUY2(frame []byte, width, height int) (image.Image, func(), error) {
CStride: width / 2,
SubsampleRatio: image.YCbCrSubsampleRatio422,
Rect: image.Rect(0, 0, width, height),
}, func() {}, nil
}, nil
}
func decodeUYVY(frame []byte, width, height int) (image.Image, func(), error) {
func decodeUYVY(frame []byte, width, height int) (image.Image, error) {
yi := width * height
ci := yi / 2
fi := yi + 2*ci
if len(frame) != fi {
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
}
y := make([]byte, yi)
@@ -73,5 +73,5 @@ func decodeUYVY(frame []byte, width, height int) (image.Image, func(), error) {
CStride: width / 2,
SubsampleRatio: image.YCbCrSubsampleRatio422,
Rect: image.Rect(0, 0, width, height),
}, func() {}, nil
}, nil
}

View File

@@ -7,13 +7,13 @@ import (
"image"
)
func decodeYUY2(frame []byte, width, height int) (image.Image, func(), error) {
func decodeYUY2(frame []byte, width, height int) (image.Image, error) {
yi := width * height
ci := yi / 2
fi := yi + 2*ci
if len(frame) != fi {
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
}
y := make([]byte, yi)
@@ -39,16 +39,16 @@ func decodeYUY2(frame []byte, width, height int) (image.Image, func(), error) {
CStride: width / 2,
SubsampleRatio: image.YCbCrSubsampleRatio422,
Rect: image.Rect(0, 0, width, height),
}, func() {}, nil
}, nil
}
func decodeUYVY(frame []byte, width, height int) (image.Image, func(), error) {
func decodeUYVY(frame []byte, width, height int) (image.Image, error) {
yi := width * height
ci := yi / 2
fi := yi + 2*ci
if len(frame) != fi {
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
}
y := make([]byte, yi)
@@ -74,5 +74,5 @@ func decodeUYVY(frame []byte, width, height int) (image.Image, func(), error) {
CStride: width / 2,
SubsampleRatio: image.YCbCrSubsampleRatio422,
Rect: image.Rect(0, 0, width, height),
}, func() {}, nil
}, nil
}

View File

@@ -27,7 +27,7 @@ func TestDecodeYUY2(t *testing.T) {
Rect: image.Rect(0, 0, width, height),
}
img, _, err := decodeYUY2(input, width, height)
img, err := decodeYUY2(input, width, height)
if err != nil {
t.Fatal(err)
}
@@ -56,7 +56,7 @@ func TestDecodeUYVY(t *testing.T) {
Rect: image.Rect(0, 0, width, height),
}
img, _, err := decodeUYVY(input, width, height)
img, err := decodeUYVY(input, width, height)
if err != nil {
t.Fatal(err)
}
@@ -77,7 +77,7 @@ func BenchmarkDecodeYUY2(b *testing.B) {
b.Run(fmt.Sprintf("%dx%d", sz.width, sz.height), func(b *testing.B) {
input := make([]byte, sz.width*sz.height*2)
for i := 0; i < b.N; i++ {
_, _, err := decodeYUY2(input, sz.width, sz.height)
_, err := decodeYUY2(input, sz.width, sz.height)
if err != nil {
b.Fatal(err)
}

View File

@@ -5,14 +5,13 @@ import (
)
type Reader interface {
Read() (chunk wave.Audio, release func(), err error)
Read() (wave.Audio, error)
}
type ReaderFunc func() (chunk wave.Audio, release func(), err error)
type ReaderFunc func() (wave.Audio, error)
func (rf ReaderFunc) Read() (chunk wave.Audio, release func(), err error) {
chunk, release, err = rf()
return
func (rf ReaderFunc) Read() (wave.Audio, error) {
return rf()
}
// TransformFunc produces a new Reader that will produces a transformed audio

89
pkg/io/audio/beep.go Normal file
View File

@@ -0,0 +1,89 @@
package audio
import (
"io"
"github.com/faiface/beep"
"github.com/pion/mediadevices/pkg/wave"
)
type beepStreamer struct {
err error
r Reader
}
func ToBeep(r Reader) beep.Streamer {
if r == nil {
panic("FromReader requires a non-nil Reader")
}
return &beepStreamer{r: r}
}
func (b *beepStreamer) Stream(samples [][2]float64) (int, bool) {
// Since there was an error, the stream has to be drained
if b.err != nil {
return 0, false
}
d, err := b.r.Read()
if err != nil {
b.err = err
if err != io.EOF {
return 0, false
}
}
n := d.ChunkInfo().Len
for i := 0; i < n; i++ {
samples[i][0] = float64(wave.Float32SampleFormat.Convert(d.At(i, 0)).(wave.Float32Sample))
samples[i][1] = float64(wave.Float32SampleFormat.Convert(d.At(i, 1)).(wave.Float32Sample))
}
return n, true
}
func (b *beepStreamer) Err() error {
return b.err
}
type beepReader struct {
s beep.Streamer
buff [][2]float64
size int
}
func FromBeep(s beep.Streamer) Reader {
if s == nil {
panic("FromStreamer requires a non-nil beep.Streamer")
}
return &beepReader{
s: s,
buff: make([][2]float64, 1024), // TODO: configure chunk size
}
}
func (r *beepReader) Read() (wave.Audio, error) {
out := wave.NewFloat32Interleaved(
wave.ChunkInfo{Len: len(r.buff), Channels: 2, SamplingRate: 48000},
)
n, ok := r.s.Stream(r.buff)
if !ok {
err := r.s.Err()
if err == nil {
err = io.EOF
}
return nil, err
}
for i := 0; i < n; i++ {
out.SetFloat32(i, 0, wave.Float32Sample(r.buff[i][0]))
out.SetFloat32(i, 1, wave.Float32Sample(r.buff[i][1]))
}
return out, nil
}

View File

@@ -1,76 +0,0 @@
package audio
import (
"errors"
"github.com/pion/mediadevices/pkg/io"
"github.com/pion/mediadevices/pkg/wave"
)
var errEmptySource = errors.New("Source can't be nil")
// Broadcaster is a specialized video broadcaster.
type Broadcaster struct {
ioBroadcaster *io.Broadcaster
}
type BroadcasterConfig struct {
Core *io.BroadcasterConfig
}
// NewBroadcaster creates a new broadcaster. Source is expected to drop chunks
// when any of the readers is slower than the source.
func NewBroadcaster(source Reader, config *BroadcasterConfig) *Broadcaster {
var coreConfig *io.BroadcasterConfig
if config != nil {
coreConfig = config.Core
}
broadcaster := io.NewBroadcaster(io.ReaderFunc(func() (interface{}, func(), error) {
return source.Read()
}), coreConfig)
return &Broadcaster{broadcaster}
}
// NewReader creates a new reader. Each reader will retrieve the same data from the source.
// copyFn is used to copy the data from the source to individual readers. Broadcaster uses a small ring
// buffer, this means that slow readers might miss some data if they're really late and the data is no longer
// in the ring buffer.
func (broadcaster *Broadcaster) NewReader(copyChunk bool) Reader {
copyFn := func(src interface{}) interface{} { return src }
if copyChunk {
buffer := wave.NewBuffer()
copyFn = func(src interface{}) interface{} {
realSrc, _ := src.(wave.Audio)
buffer.StoreCopy(realSrc)
return buffer.Load()
}
}
reader := broadcaster.ioBroadcaster.NewReader(copyFn)
return ReaderFunc(func() (wave.Audio, func(), error) {
data, _, err := reader.Read()
chunk, _ := data.(wave.Audio)
return chunk, func() {}, err
})
}
// ReplaceSource replaces the underlying source. This operation is thread safe.
func (broadcaster *Broadcaster) ReplaceSource(source Reader) error {
return broadcaster.ioBroadcaster.ReplaceSource(io.ReaderFunc(func() (interface{}, func(), error) {
return source.Read()
}))
}
// Source retrieves the underlying source. This operation is thread safe.
func (broadcaster *Broadcaster) Source() Reader {
source := broadcaster.ioBroadcaster.Source()
return ReaderFunc(func() (wave.Audio, func(), error) {
data, _, err := source.Read()
img, _ := data.(wave.Audio)
return img, func() {}, err
})
}

View File

@@ -1,54 +0,0 @@
package audio
import (
"reflect"
"testing"
"github.com/pion/mediadevices/pkg/wave"
)
func TestBroadcast(t *testing.T) {
chunk := wave.NewFloat32Interleaved(wave.ChunkInfo{
Len: 8,
Channels: 2,
SamplingRate: 48000,
})
source := ReaderFunc(func() (wave.Audio, func(), error) {
return chunk, func() {}, nil
})
broadcaster := NewBroadcaster(source, nil)
readerWithoutCopy1 := broadcaster.NewReader(false)
readerWithoutCopy2 := broadcaster.NewReader(false)
actualWithoutCopy1, _, err := readerWithoutCopy1.Read()
if err != nil {
t.Fatal(err)
}
actualWithoutCopy2, _, err := readerWithoutCopy2.Read()
if err != nil {
t.Fatal(err)
}
if &actualWithoutCopy1.(*wave.Float32Interleaved).Data[0] != &actualWithoutCopy2.(*wave.Float32Interleaved).Data[0] {
t.Fatal("Expected underlying buffer for frame with copy to be the same from broadcaster's buffer")
}
if !reflect.DeepEqual(chunk, actualWithoutCopy1) {
t.Fatal("Expected actual frame without copy to be the same with the original")
}
readerWithCopy := broadcaster.NewReader(true)
actualWithCopy, _, err := readerWithCopy.Read()
if err != nil {
t.Fatal(err)
}
if &actualWithCopy.(*wave.Float32Interleaved).Data[0] == &actualWithoutCopy1.(*wave.Float32Interleaved).Data[0] {
t.Fatal("Expected underlying buffer for frame with copy to be different from broadcaster's buffer")
}
if !reflect.DeepEqual(chunk, actualWithCopy) {
t.Fatal("Expected actual frame without copy to be the same with the original")
}
}

View File

@@ -1,89 +0,0 @@
package audio
import (
"errors"
"github.com/pion/mediadevices/pkg/wave"
)
var errUnsupported = errors.New("unsupported audio format")
// NewBuffer creates audio transform to buffer signal to have exact nSample samples.
func NewBuffer(nSamples int) TransformFunc {
var inBuff wave.Audio
return func(r Reader) Reader {
return ReaderFunc(func() (wave.Audio, func(), error) {
for {
if inBuff != nil && inBuff.ChunkInfo().Len >= nSamples {
break
}
buff, _, err := r.Read()
if err != nil {
return nil, func() {}, err
}
switch b := buff.(type) {
case *wave.Float32Interleaved:
ib, ok := inBuff.(*wave.Float32Interleaved)
if !ok || ib.Size.Channels != b.Size.Channels {
ib = wave.NewFloat32Interleaved(
wave.ChunkInfo{
SamplingRate: b.Size.SamplingRate,
Channels: b.Size.Channels,
Len: nSamples,
},
)
ib.Data = ib.Data[:0]
ib.Size.Len = 0
inBuff = ib
}
ib.Data = append(ib.Data, b.Data...)
ib.Size.Len += b.Size.Len
case *wave.Int16Interleaved:
ib, ok := inBuff.(*wave.Int16Interleaved)
if !ok || ib.Size.Channels != b.Size.Channels {
ib = wave.NewInt16Interleaved(
wave.ChunkInfo{
SamplingRate: b.Size.SamplingRate,
Channels: b.Size.Channels,
Len: nSamples,
},
)
ib.Data = ib.Data[:0]
ib.Size.Len = 0
inBuff = ib
}
ib.Data = append(ib.Data, b.Data...)
ib.Size.Len += b.Size.Len
default:
return nil, func() {}, errUnsupported
}
}
switch ib := inBuff.(type) {
case *wave.Int16Interleaved:
ibCopy := *ib
ibCopy.Size.Len = nSamples
n := nSamples * ib.Size.Channels
ibCopy.Data = make([]int16, n)
copy(ibCopy.Data, ib.Data)
ib.Data = ib.Data[n:]
ib.Size.Len -= nSamples
return &ibCopy, func() {}, nil
case *wave.Float32Interleaved:
ibCopy := *ib
ibCopy.Size.Len = nSamples
n := nSamples * ib.Size.Channels
ibCopy.Data = make([]float32, n)
copy(ibCopy.Data, ib.Data)
ib.Data = ib.Data[n:]
ib.Size.Len -= nSamples
return &ibCopy, func() {}, nil
}
return nil, func() {}, errUnsupported
})
}
}

View File

@@ -1,72 +0,0 @@
package audio
import (
"io"
"reflect"
"testing"
"github.com/pion/mediadevices/pkg/wave"
)
func TestBuffer(t *testing.T) {
input := []wave.Audio{
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 1, Channels: 2, SamplingRate: 1234},
Data: []int16{1, 2},
},
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
Data: []int16{3, 4, 5, 6, 7, 8},
},
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 2, Channels: 2, SamplingRate: 1234},
Data: []int16{9, 10, 11, 12},
},
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 7, Channels: 2, SamplingRate: 1234},
Data: []int16{13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26},
},
}
expected := []wave.Audio{
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
Data: []int16{1, 2, 3, 4, 5, 6},
},
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
Data: []int16{7, 8, 9, 10, 11, 12},
},
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
Data: []int16{13, 14, 15, 16, 17, 18},
},
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
Data: []int16{19, 20, 21, 22, 23, 24},
},
}
trans := NewBuffer(3)
var iSent int
r := trans(ReaderFunc(func() (wave.Audio, func(), error) {
if iSent < len(input) {
iSent++
return input[iSent-1], func() {}, nil
}
return nil, func() {}, io.EOF
}))
for i := 0; ; i++ {
a, _, err := r.Read()
if err != nil {
if err == io.EOF && i >= len(expected) {
break
}
t.Fatal(err)
}
if !reflect.DeepEqual(expected[i], a) {
t.Errorf("Expected wave[%d]: %v, got: %v", i, expected[i], a)
}
}
}

View File

@@ -1,52 +0,0 @@
package audio
import (
"time"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/mediadevices/pkg/wave"
)
// DetectChanges will detect chunk and audio property changes. For audio property detection,
// since it's time related, interval will be used to determine the sample rate.
func DetectChanges(interval time.Duration, onChange func(prop.Media)) TransformFunc {
return func(r Reader) Reader {
var currentProp prop.Media
var chunkCount uint
return ReaderFunc(func() (wave.Audio, func(), error) {
var dirty bool
chunk, _, err := r.Read()
if err != nil {
return nil, func() {}, err
}
info := chunk.ChunkInfo()
if currentProp.ChannelCount != info.Channels {
currentProp.ChannelCount = info.Channels
dirty = true
}
if currentProp.SampleRate != info.SamplingRate {
currentProp.SampleRate = info.SamplingRate
dirty = true
}
latency := time.Duration(chunk.ChunkInfo().Len) * time.Second / time.Nanosecond / time.Duration(currentProp.SampleRate)
if currentProp.Latency != latency {
currentProp.Latency = latency
dirty = true
}
// TODO: Also detect sample format changes?
// TODO: Add audio detect changes. As of now, there's no useful property to track.
if dirty {
onChange(currentProp)
}
chunkCount++
return chunk, func() {}, nil
})
}
}

View File

@@ -1,76 +0,0 @@
package audio
import (
"reflect"
"testing"
"time"
"github.com/pion/mediadevices/pkg/prop"
"github.com/pion/mediadevices/pkg/wave"
)
func TestDetectChanges(t *testing.T) {
buildSource := func(p prop.Media) (Reader, func(prop.Media)) {
return ReaderFunc(func() (wave.Audio, func(), error) {
return wave.NewFloat32Interleaved(wave.ChunkInfo{
Len: 960,
Channels: p.ChannelCount,
SamplingRate: p.SampleRate,
}), func() {}, nil
}), func(newProp prop.Media) {
p = newProp
}
}
t.Run("OnChangeCalledBeforeFirstFrame", func(t *testing.T) {
var detectBeforeFirstChunk bool
var expected prop.Media
var actual prop.Media
expected.ChannelCount = 2
expected.SampleRate = 48000
expected.Latency = time.Millisecond * 20
src, _ := buildSource(expected)
src = DetectChanges(time.Second, func(p prop.Media) {
actual = p
detectBeforeFirstChunk = true
})(src)
_, _, err := src.Read()
if err != nil {
t.Fatal(err)
}
if !detectBeforeFirstChunk {
t.Fatal("on change callback should have called before first chunk")
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("Received an unexpected prop\nExpected:\n%v\nActual:\n%v\n", expected, actual)
}
})
t.Run("DetectChangesOnEveryUpdate", func(t *testing.T) {
var expected prop.Media
var actual prop.Media
expected.ChannelCount = 2
expected.SampleRate = 48000
expected.Latency = 20 * time.Millisecond
src, update := buildSource(expected)
src = DetectChanges(time.Second, func(p prop.Media) {
actual = p
})(src)
for channelCount := 1; channelCount < 8; channelCount++ {
expected.ChannelCount = channelCount
update(expected)
_, _, err := src.Read()
if err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(actual, expected) {
t.Fatalf("Received an unexpected prop\nExpected:\n%v\nActual:\n%v\n", expected, actual)
}
}
})
}

View File

@@ -1,40 +0,0 @@
package audio
import (
"github.com/pion/mediadevices/pkg/wave"
"github.com/pion/mediadevices/pkg/wave/mixer"
)
// NewChannelMixer creates audio transform to mix audio channels.
func NewChannelMixer(channels int, mixer mixer.ChannelMixer) TransformFunc {
return func(r Reader) Reader {
return ReaderFunc(func() (wave.Audio, func(), error) {
buff, _, err := r.Read()
if err != nil {
return nil, func() {}, err
}
ci := buff.ChunkInfo()
if ci.Channels == channels {
return buff, func() {}, nil
}
ci.Channels = channels
var mixed wave.Audio
switch buff.(type) {
case *wave.Int16Interleaved:
mixed = wave.NewInt16Interleaved(ci)
case *wave.Int16NonInterleaved:
mixed = wave.NewInt16NonInterleaved(ci)
case *wave.Float32Interleaved:
mixed = wave.NewFloat32Interleaved(ci)
case *wave.Float32NonInterleaved:
mixed = wave.NewFloat32NonInterleaved(ci)
}
if err := mixer.Mix(mixed, buff); err != nil {
return nil, func() {}, err
}
return mixed, func() {}, nil
})
}
}

View File

@@ -1,57 +0,0 @@
package audio
import (
"io"
"reflect"
"testing"
"github.com/pion/mediadevices/pkg/wave"
"github.com/pion/mediadevices/pkg/wave/mixer"
)
func TestMixer(t *testing.T) {
input := []wave.Audio{
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 1, Channels: 2, SamplingRate: 1234},
Data: []int16{1, 3},
},
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
Data: []int16{2, 4, 3, 5, 4, 6},
},
}
expected := []wave.Audio{
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 1, Channels: 1, SamplingRate: 1234},
Data: []int16{2},
},
&wave.Int16Interleaved{
Size: wave.ChunkInfo{Len: 3, Channels: 1, SamplingRate: 1234},
Data: []int16{3, 4, 5},
},
}
trans := NewChannelMixer(1, &mixer.MonoMixer{})
var iSent int
r := trans(ReaderFunc(func() (wave.Audio, func(), error) {
if iSent < len(input) {
iSent++
return input[iSent-1], func() {}, nil
}
return nil, func() {}, io.EOF
}))
for i := 0; ; i++ {
a, _, err := r.Read()
if err != nil {
if err == io.EOF && i >= len(expected) {
break
}
t.Fatal(err)
}
if !reflect.DeepEqual(expected[i], a) {
t.Errorf("Expected wave[%d]: %v, got: %v", i, expected[i], a)
}
}
}

View File

@@ -1,162 +0,0 @@
package io
import (
"fmt"
"sync/atomic"
"time"
)
const (
maskReading = 1 << 63
defaultBroadcasterRingSize = 32
// TODO: If the data source has fps greater than 30, they'll see some
// fps fluctuation. But, 30 fps should be enough for general cases.
defaultBroadcasterRingPollDuration = time.Millisecond * 33
)
var errEmptySource = fmt.Errorf("Source can't be nil")
type broadcasterData struct {
data interface{}
count uint32
err error
}
type broadcasterRing struct {
// reading (1 bit) + reserved (31 bits) + data count (32 bits)
// IMPORTANT: state has to be the first element in struct, otherwise LoadUint64 will panic in 32 bits systems
// due to unallignment
state uint64
buffer []atomic.Value
pollDuration time.Duration
}
func newBroadcasterRing(size uint, pollDuration time.Duration) *broadcasterRing {
return &broadcasterRing{buffer: make([]atomic.Value, size), pollDuration: pollDuration}
}
func (ring *broadcasterRing) index(count uint32) int {
return int(count) % len(ring.buffer)
}
func (ring *broadcasterRing) acquire(count uint32) func(*broadcasterData) {
// Reader has reached the latest data, should read from the source.
// Only allow 1 reader to read from the source. When there are more than 1 readers,
// the other readers will need to share the same data that the first reader gets from
// the source.
state := uint64(count)
if atomic.CompareAndSwapUint64(&ring.state, state, state|maskReading) {
return func(data *broadcasterData) {
i := ring.index(count)
ring.buffer[i].Store(data)
atomic.StoreUint64(&ring.state, uint64(count+1))
}
}
return nil
}
func (ring *broadcasterRing) get(count uint32) *broadcasterData {
for {
reading := uint64(count) | maskReading
// TODO: since it's lockless, it spends a lot of resources in the scheduling.
for atomic.LoadUint64(&ring.state) == reading {
// Yield current goroutine to let other goroutines to run instead
time.Sleep(ring.pollDuration)
}
i := ring.index(count)
data := ring.buffer[i].Load().(*broadcasterData)
if data.count == count {
return data
}
count++
}
}
func (ring *broadcasterRing) lastCount() uint32 {
// ring.state always keeps track the next count, so we need to subtract it by 1 to get the
// last count
return uint32(atomic.LoadUint64(&ring.state)) - 1
}
// Broadcaster is a generic pull-based broadcaster. Broadcaster is unique in a sense that
// readers can come and go at anytime, and readers don't need to close or notify broadcaster.
type Broadcaster struct {
source atomic.Value
buffer *broadcasterRing
}
// BroadcasterConfig is a config to control broadcaster behaviour
type BroadcasterConfig struct {
// BufferSize configures the underlying ring buffer size that's being used
// to avoid data lost for late readers. The default value is 32.
BufferSize uint
// PollDuration configures the sleep duration in waiting for new data to come.
// The default value is 33 ms.
PollDuration time.Duration
}
// NewBroadcaster creates a new broadcaster. Source is expected to drop frames
// when any of the readers is slower than the source.
func NewBroadcaster(source Reader, config *BroadcasterConfig) *Broadcaster {
pollDuration := defaultBroadcasterRingPollDuration
var bufferSize uint = defaultBroadcasterRingSize
if config != nil {
if config.PollDuration != 0 {
pollDuration = config.PollDuration
}
if config.BufferSize != 0 {
bufferSize = config.BufferSize
}
}
var broadcaster Broadcaster
broadcaster.buffer = newBroadcasterRing(bufferSize, pollDuration)
broadcaster.ReplaceSource(source)
return &broadcaster
}
// NewReader creates a new reader. Each reader will retrieve the same data from the source.
// copyFn is used to copy the data from the source to individual readers. Broadcaster uses a small ring
// buffer, this means that slow readers might miss some data if they're really late and the data is no longer
// in the ring buffer.
func (broadcaster *Broadcaster) NewReader(copyFn func(interface{}) interface{}) Reader {
currentCount := broadcaster.buffer.lastCount()
return ReaderFunc(func() (data interface{}, release func(), err error) {
currentCount++
if push := broadcaster.buffer.acquire(currentCount); push != nil {
data, _, err = broadcaster.source.Load().(Reader).Read()
push(&broadcasterData{
data: data,
err: err,
count: currentCount,
})
} else {
ringData := broadcaster.buffer.get(currentCount)
data, err, currentCount = ringData.data, ringData.err, ringData.count
}
data = copyFn(data)
return
})
}
// ReplaceSource replaces the underlying source. This operation is thread safe.
func (broadcaster *Broadcaster) ReplaceSource(source Reader) error {
if source == nil {
return errEmptySource
}
broadcaster.source.Store(source)
return nil
}
// ReplaceSource retrieves the underlying source. This operation is thread safe.
func (broadcaster *Broadcaster) Source() Reader {
return broadcaster.source.Load().(Reader)
}

View File

@@ -1,148 +0,0 @@
package io
import (
"fmt"
"runtime"
"sync"
"sync/atomic"
"testing"
"time"
)
func TestBroadcast(t *testing.T) {
// https://github.com/pion/mediadevices/issues/198
if runtime.GOOS == "darwin" {
t.Skip("Skipping because Darwin CI is not reliable for timing related tests.")
}
frames := make([]int, 5*30) // 5 seconds worth of frames
for i := range frames {
frames[i] = i
}
routinePauseConds := []struct {
src bool
dst bool
expectedFPS float64
expectedDrop float64
}{
{
src: false,
dst: false,
expectedFPS: 30,
},
{
src: true,
dst: false,
expectedFPS: 20,
expectedDrop: 10,
},
{
src: false,
dst: true,
expectedFPS: 20,
expectedDrop: 10,
},
}
for _, pauseCond := range routinePauseConds {
pauseCond := pauseCond
t.Run(fmt.Sprintf("SrcPause-%v/DstPause-%v", pauseCond.src, pauseCond.dst), func(t *testing.T) {
for n := 1; n <= 256; n *= 16 {
n := n
t.Run(fmt.Sprintf("Readers-%d", n), func(t *testing.T) {
var src Reader
interval := time.NewTicker(time.Millisecond * 33) // 30 fps
defer interval.Stop()
frameCount := 0
frameSent := 0
lastSend := time.Now()
src = ReaderFunc(func() (interface{}, func(), error) {
if pauseCond.src && frameSent == 30 {
time.Sleep(time.Second)
}
<-interval.C
now := time.Now()
if interval := now.Sub(lastSend); interval > time.Millisecond*33*3/2 {
// Source reader should drop frames to catch up the latest frame.
drop := int(interval/(time.Millisecond*33)) - 1
frameCount += drop
t.Logf("Skipped %d frames", drop)
}
lastSend = now
frame := frames[frameCount]
frameCount++
frameSent++
return frame, func() {}, nil
})
broadcaster := NewBroadcaster(src, nil)
var done uint32
duration := time.Second * 3
fpsChan := make(chan []float64)
var wg sync.WaitGroup
wg.Add(n)
for i := 0; i < n; i++ {
go func() {
reader := broadcaster.NewReader(func(src interface{}) interface{} { return src })
count := 0
lastFrameCount := -1
droppedFrames := 0
wg.Done()
wg.Wait()
for atomic.LoadUint32(&done) == 0 {
if pauseCond.dst && count == 30 {
time.Sleep(time.Second)
}
frame, _, err := reader.Read()
if err != nil {
t.Error(err)
}
frameCount := frame.(int)
droppedFrames += (frameCount - lastFrameCount - 1)
lastFrameCount = frameCount
count++
}
fps := float64(count) / duration.Seconds()
if fps < pauseCond.expectedFPS-2 || fps > pauseCond.expectedFPS+2 {
t.Fatal("Unexpected average FPS")
}
droppedFramesPerSecond := float64(droppedFrames) / duration.Seconds()
if droppedFramesPerSecond < pauseCond.expectedDrop-2 || droppedFramesPerSecond > pauseCond.expectedDrop+2 {
t.Fatal("Unexpected drop count")
}
fpsChan <- []float64{fps, droppedFramesPerSecond, float64(lastFrameCount)}
}()
}
time.Sleep(duration)
atomic.StoreUint32(&done, 1)
var fpsAvg float64
var droppedFramesPerSecondAvg float64
var lastFrameCountAvg float64
var count int
for metric := range fpsChan {
fps, droppedFramesPerSecond, lastFrameCount := metric[0], metric[1], metric[2]
fpsAvg += fps
droppedFramesPerSecondAvg += droppedFramesPerSecond
lastFrameCountAvg += lastFrameCount
count++
if count == n {
break
}
}
t.Log("Average FPS :", fpsAvg/float64(n))
t.Log("Average dropped frames per second:", droppedFramesPerSecondAvg/float64(n))
t.Log("Last frame count (src) :", frameCount)
t.Log("Average last frame count (dst) :", lastFrameCountAvg/float64(n))
})
}
})
}
}

11
pkg/io/io.go Normal file
View File

@@ -0,0 +1,11 @@
package io
// Copy copies data from src to dst. If dst is not big enough, return an
// InsufficientBufferError.
func Copy(dst, src []byte) (n int, err error) {
if len(dst) < len(src) {
return 0, &InsufficientBufferError{len(src)}
}
return copy(dst, src), nil
}

45
pkg/io/io_test.go Normal file
View File

@@ -0,0 +1,45 @@
package io
import (
"log"
"testing"
)
func TestCopy(t *testing.T) {
var dst []byte
src := make([]byte, 4)
n, err := Copy(dst, src)
if err == nil {
t.Fatal("expected err to be non-nill")
}
if n != 0 {
t.Fatalf("expected n to be 0, but got %d", n)
}
e, ok := err.(*InsufficientBufferError)
if !ok {
t.Fatalf("expected error to be InsufficientBufferError")
}
if e.RequiredSize != len(src) {
t.Fatalf("expected required size to be %d, but got %d", len(src), e.RequiredSize)
}
dst = make([]byte, 2*e.RequiredSize)
n, err = Copy(dst, src)
if err != nil {
t.Fatalf("expected to not get an error after expanding the buffer")
}
if n != len(src) {
t.Fatalf("expected n to be %d, but got %d", len(src), n)
}
for i := 0; i < len(src); i++ {
if src[i] != dst[i] {
log.Fatalf("expected value at %d to be %d, but got %d", i, src[i], dst[i])
}
}
}

View File

@@ -1,15 +0,0 @@
package io
// Reader is a generic data reader. In the future, interface{} should be replaced by a generic type
// to provide strong type.
type Reader interface {
Read() (data interface{}, release func(), err error)
}
// ReaderFunc is a proxy type for Reader
type ReaderFunc func() (data interface{}, release func(), err error)
func (f ReaderFunc) Read() (data interface{}, release func(), err error) {
data, release, err = f()
return
}

View File

@@ -1,76 +0,0 @@
package video
import (
"fmt"
"image"
"github.com/pion/mediadevices/pkg/io"
)
var errEmptySource = fmt.Errorf("Source can't be nil")
// Broadcaster is a specialized video broadcaster.
type Broadcaster struct {
ioBroadcaster *io.Broadcaster
}
type BroadcasterConfig struct {
Core *io.BroadcasterConfig
}
// NewBroadcaster creates a new broadcaster. Source is expected to drop frames
// when any of the readers is slower than the source.
func NewBroadcaster(source Reader, config *BroadcasterConfig) *Broadcaster {
var coreConfig *io.BroadcasterConfig
if config != nil {
coreConfig = config.Core
}
broadcaster := io.NewBroadcaster(io.ReaderFunc(func() (interface{}, func(), error) {
return source.Read()
}), coreConfig)
return &Broadcaster{broadcaster}
}
// NewReader creates a new reader. Each reader will retrieve the same data from the source.
// copyFn is used to copy the data from the source to individual readers. Broadcaster uses a small ring
// buffer, this means that slow readers might miss some data if they're really late and the data is no longer
// in the ring buffer.
func (broadcaster *Broadcaster) NewReader(copyFrame bool) Reader {
copyFn := func(src interface{}) interface{} { return src }
if copyFrame {
buffer := NewFrameBuffer(0)
copyFn = func(src interface{}) interface{} {
realSrc, _ := src.(image.Image)
buffer.StoreCopy(realSrc)
return buffer.Load()
}
}
reader := broadcaster.ioBroadcaster.NewReader(copyFn)
return ReaderFunc(func() (image.Image, func(), error) {
data, _, err := reader.Read()
img, _ := data.(image.Image)
return img, func() {}, err
})
}
// ReplaceSource replaces the underlying source. This operation is thread safe.
func (broadcaster *Broadcaster) ReplaceSource(source Reader) error {
return broadcaster.ioBroadcaster.ReplaceSource(io.ReaderFunc(func() (interface{}, func(), error) {
return source.Read()
}))
}
// Source retrieves the underlying source. This operation is thread safe.
func (broadcaster *Broadcaster) Source() Reader {
source := broadcaster.ioBroadcaster.Source()
return ReaderFunc(func() (image.Image, func(), error) {
data, _, err := source.Read()
img, _ := data.(image.Image)
return img, func() {}, err
})
}

View File

@@ -1,49 +0,0 @@
package video
import (
"image"
"reflect"
"testing"
)
func TestBroadcast(t *testing.T) {
resolution := image.Rect(0, 0, 1920, 1080)
img := image.NewGray(resolution)
source := ReaderFunc(func() (image.Image, func(), error) {
return img, func() {}, nil
})
broadcaster := NewBroadcaster(source, nil)
readerWithoutCopy1 := broadcaster.NewReader(false)
readerWithoutCopy2 := broadcaster.NewReader(false)
actualWithoutCopy1, _, err := readerWithoutCopy1.Read()
if err != nil {
t.Fatal(err)
}
actualWithoutCopy2, _, err := readerWithoutCopy2.Read()
if err != nil {
t.Fatal(err)
}
if &actualWithoutCopy1.(*image.Gray).Pix[0] != &actualWithoutCopy2.(*image.Gray).Pix[0] {
t.Fatal("Expected underlying buffer for frame with copy to be the same from broadcaster's buffer")
}
if !reflect.DeepEqual(img, actualWithoutCopy1) {
t.Fatal("Expected actual frame without copy to be the same with the original")
}
readerWithCopy := broadcaster.NewReader(true)
actualWithCopy, _, err := readerWithCopy.Read()
if err != nil {
t.Fatal(err)
}
if &actualWithCopy.(*image.Gray).Pix[0] == &actualWithoutCopy1.(*image.Gray).Pix[0] {
t.Fatal("Expected underlying buffer for frame with copy to be different from broadcaster's buffer")
}
if !reflect.DeepEqual(img, actualWithCopy) {
t.Fatal("Expected actual frame without copy to be the same with the original")
}
}

View File

@@ -63,10 +63,10 @@ func imageToYCbCr(dst *image.YCbCr, src image.Image) {
// ToI420 converts r to a new reader that will output images in I420 format
func ToI420(r Reader) Reader {
var yuvImg image.YCbCr
return ReaderFunc(func() (image.Image, func(), error) {
img, _, err := r.Read()
return ReaderFunc(func() (image.Image, error) {
img, err := r.Read()
if err != nil {
return nil, func() {}, err
return nil, err
}
imageToYCbCr(&yuvImg, img)
@@ -79,11 +79,11 @@ func ToI420(r Reader) Reader {
i422ToI420(&yuvImg)
case image.YCbCrSubsampleRatio420:
default:
return nil, func() {}, fmt.Errorf("unsupported pixel format: %s", yuvImg.SubsampleRatio)
return nil, fmt.Errorf("unsupported pixel format: %s", yuvImg.SubsampleRatio)
}
yuvImg.SubsampleRatio = image.YCbCrSubsampleRatio420
return &yuvImg, func() {}, nil
return &yuvImg, nil
})
}
@@ -130,13 +130,13 @@ func imageToRGBA(dst *image.RGBA, src image.Image) {
// ToRGBA converts r to a new reader that will output images in RGBA format
func ToRGBA(r Reader) Reader {
var dst image.RGBA
return ReaderFunc(func() (image.Image, func(), error) {
img, _, err := r.Read()
return ReaderFunc(func() (image.Image, error) {
img, err := r.Read()
if err != nil {
return nil, func() {}, err
return nil, err
}
imageToRGBA(&dst, img)
return &dst, func() {}, nil
return &dst, nil
})
}

View File

@@ -144,10 +144,10 @@ func TestToI420(t *testing.T) {
for name, c := range cases {
c := c
t.Run(name, func(t *testing.T) {
r := ToI420(ReaderFunc(func() (image.Image, func(), error) {
return c.src, func() {}, nil
r := ToI420(ReaderFunc(func() (image.Image, error) {
return c.src, nil
}))
out, _, err := r.Read()
out, err := r.Read()
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
@@ -199,10 +199,10 @@ func TestToRGBA(t *testing.T) {
for name, c := range cases {
c := c
t.Run(name, func(t *testing.T) {
r := ToRGBA(ReaderFunc(func() (image.Image, func(), error) {
return c.src, func() {}, nil
r := ToRGBA(ReaderFunc(func() (image.Image, error) {
return c.src, nil
}))
out, _, err := r.Read()
out, err := r.Read()
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
@@ -225,12 +225,12 @@ func BenchmarkToI420(b *testing.B) {
for name, img := range cases {
img := img
b.Run(name, func(b *testing.B) {
r := ToI420(ReaderFunc(func() (image.Image, func(), error) {
return img, func() {}, nil
r := ToI420(ReaderFunc(func() (image.Image, error) {
return img, nil
}))
for i := 0; i < b.N; i++ {
_, _, err := r.Read()
_, err := r.Read()
if err != nil {
b.Fatalf("Unexpected error: %v", err)
}
@@ -253,12 +253,12 @@ func BenchmarkToRGBA(b *testing.B) {
for name, img := range cases {
img := img
b.Run(name, func(b *testing.B) {
r := ToRGBA(ReaderFunc(func() (image.Image, func(), error) {
return img, func() {}, nil
r := ToRGBA(ReaderFunc(func() (image.Image, error) {
return img, nil
}))
for i := 0; i < b.N; i++ {
_, _, err := r.Read()
_, err := r.Read()
if err != nil {
b.Fatalf("Unexpected error: %v", err)
}

View File

@@ -1,58 +0,0 @@
package video
import (
"image"
"time"
"github.com/pion/mediadevices/pkg/prop"
)
// DetectChanges will detect frame and video property changes. For video property detection,
// since it's time related, interval will be used to determine the sample rate.
func DetectChanges(interval time.Duration, onChange func(prop.Media)) TransformFunc {
return func(r Reader) Reader {
var currentProp prop.Media
var lastTaken time.Time
var frames uint
return ReaderFunc(func() (image.Image, func(), error) {
var dirty bool
img, _, err := r.Read()
if err != nil {
return nil, func() {}, err
}
bounds := img.Bounds()
if currentProp.Width != bounds.Dx() {
currentProp.Width = bounds.Dx()
dirty = true
}
if currentProp.Height != bounds.Dy() {
currentProp.Height = bounds.Dy()
dirty = true
}
// TODO: maybe detect frame format? It probably doesn't make sense since some
// formats only are about memory layout, e.g. YUV2 vs NV12.
now := time.Now()
elapsed := now.Sub(lastTaken)
if elapsed >= interval {
fps := float32(float64(frames) / elapsed.Seconds())
// TODO: maybe add some epsilon so that small changes will not mark as dirty
currentProp.FrameRate = fps
frames = 0
lastTaken = now
dirty = true
}
if dirty {
onChange(currentProp)
}
frames++
return img, func() {}, nil
})
}
}

View File

@@ -1,158 +0,0 @@
package video
import (
"fmt"
"image"
"runtime"
"testing"
"time"
"github.com/pion/mediadevices/pkg/prop"
)
func BenchmarkDetectChanges(b *testing.B) {
var src Reader
src = ReaderFunc(func() (image.Image, func(), error) {
return image.NewRGBA(image.Rect(0, 0, 1920, 1080)), func() {}, nil
})
b.Run("WithoutDetectChanges", func(b *testing.B) {
for i := 0; i < b.N; i++ {
src.Read()
}
})
ns := []int{1, 8, 64, 256}
for _, n := range ns {
n := n
src := src
b.Run(fmt.Sprintf("WithDetectChanges%d", n), func(b *testing.B) {
for i := 0; i < n; i++ {
src = DetectChanges(time.Microsecond, func(p prop.Media) {})(src)
}
for i := 0; i < b.N; i++ {
src.Read()
}
})
}
}
func TestDetectChanges(t *testing.T) {
buildSource := func(p prop.Media) (Reader, func(prop.Media)) {
return ReaderFunc(func() (image.Image, func(), error) {
return image.NewRGBA(image.Rect(0, 0, p.Width, p.Height)), func() {}, nil
}), func(newProp prop.Media) {
p = newProp
}
}
assertEq := func(t *testing.T, actual prop.Media, expected prop.Media, output image.Image, assertFrameRate bool) {
if actual.Height != expected.Height {
t.Fatalf("expected height from to be %d but got %d", expected.Height, actual.Height)
}
if actual.Width != expected.Width {
t.Fatalf("expected width from to be %d but got %d", expected.Width, actual.Width)
}
if assertFrameRate {
diff := actual.FrameRate - expected.FrameRate
// TODO: reduce this eps. Darwin CI keeps failing if we use a lower value
var eps float32 = 1.5
if diff < -eps || diff > eps {
t.Fatalf("expected frame rate to be %f (+-%f) but got %f", expected.FrameRate, eps, actual.FrameRate)
}
}
if output.Bounds().Dy() != expected.Height {
t.Fatalf("expected output height from to be %d but got %d", expected.Height, output.Bounds().Dy())
}
if output.Bounds().Dx() != expected.Width {
t.Fatalf("expected output width from to be %d but got %d", expected.Width, output.Bounds().Dx())
}
}
t.Run("OnChangeCalledBeforeFirstFrame", func(t *testing.T) {
var detectBeforeFirstFrame bool
var expected prop.Media
var actual prop.Media
expected.Width = 1920
expected.Height = 1080
src, _ := buildSource(expected)
src = DetectChanges(time.Second, func(p prop.Media) {
actual = p
detectBeforeFirstFrame = true
})(src)
frame, _, err := src.Read()
if err != nil {
t.Fatal(err)
}
if !detectBeforeFirstFrame {
t.Fatal("on change callback should have called before first frame")
}
assertEq(t, actual, expected, frame, false)
})
t.Run("DetectChangesOnEveryUpdate", func(t *testing.T) {
var expected prop.Media
var actual prop.Media
expected.Width = 1920
expected.Height = 1080
src, update := buildSource(expected)
src = DetectChanges(time.Second, func(p prop.Media) {
actual = p
})(src)
for width := 1920; width < 4000; width += 100 {
for height := 1080; height < 2000; height += 100 {
expected.Width = width
expected.Height = height
update(expected)
frame, _, err := src.Read()
if err != nil {
t.Fatal(err)
}
assertEq(t, actual, expected, frame, false)
}
}
})
t.Run("FrameRateAccuracy", func(t *testing.T) {
// https://github.com/pion/mediadevices/issues/198
if runtime.GOOS == "darwin" {
t.Skip("Skipping because Darwin CI is not reliable for timing related tests.")
}
var expected prop.Media
var actual prop.Media
var count int
expected.Width = 1920
expected.Height = 1080
expected.FrameRate = 30
src, _ := buildSource(expected)
src = Throttle(expected.FrameRate)(src)
src = DetectChanges(time.Second*5, func(p prop.Media) {
actual = p
count++
})(src)
for count < 3 {
frame, _, err := src.Read()
if err != nil {
t.Fatal(err)
}
checkFrameRate := false
if actual.FrameRate != 0.0 {
checkFrameRate = true
}
assertEq(t, actual, expected, frame, checkFrameRate)
}
})
}

View File

@@ -1,214 +0,0 @@
package video
import (
"image"
)
// FrameBuffer is a buffer that can store any image format.
type FrameBuffer struct {
buffer []uint8
tmp image.Image
}
// NewFrameBuffer creates a new FrameBuffer instance and initialize internal buffer
// with initialSize
func NewFrameBuffer(initialSize int) *FrameBuffer {
return &FrameBuffer{
buffer: make([]uint8, initialSize),
}
}
func (buff *FrameBuffer) storeInOrder(srcs ...[]uint8) {
var neededSize int
for _, src := range srcs {
neededSize += len(src)
}
if len(buff.buffer) < neededSize {
if cap(buff.buffer) >= neededSize {
buff.buffer = buff.buffer[:neededSize]
} else {
buff.buffer = make([]uint8, neededSize)
}
}
var currentLen int
for _, src := range srcs {
copy(buff.buffer[currentLen:], src)
currentLen += len(src)
}
}
// Load loads the current owned image
func (buff *FrameBuffer) Load() image.Image {
return buff.tmp
}
// StoreCopy makes a copy of src and store its copy. StoreCopy will reuse as much memory as it can
// from the previous copies. For example, if StoreCopy is given an image that has the same resolution
// and format from the previous call, StoreCopy will not allocate extra memory and only copy the content
// from src to the previous buffer.
func (buff *FrameBuffer) StoreCopy(src image.Image) {
switch src := src.(type) {
case *image.Alpha:
clone, ok := buff.tmp.(*image.Alpha)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
buff.storeInOrder(src.Pix)
clone.Pix = buff.buffer[:len(src.Pix)]
buff.tmp = clone
case *image.Alpha16:
clone, ok := buff.tmp.(*image.Alpha16)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
buff.storeInOrder(src.Pix)
clone.Pix = buff.buffer[:len(src.Pix)]
buff.tmp = clone
case *image.CMYK:
clone, ok := buff.tmp.(*image.CMYK)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
buff.storeInOrder(src.Pix)
clone.Pix = buff.buffer[:len(src.Pix)]
buff.tmp = clone
case *image.Gray:
clone, ok := buff.tmp.(*image.Gray)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
buff.storeInOrder(src.Pix)
clone.Pix = buff.buffer[:len(src.Pix)]
buff.tmp = clone
case *image.Gray16:
clone, ok := buff.tmp.(*image.Gray16)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
buff.storeInOrder(src.Pix)
clone.Pix = buff.buffer[:len(src.Pix)]
buff.tmp = clone
case *image.NRGBA:
clone, ok := buff.tmp.(*image.NRGBA)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
buff.storeInOrder(src.Pix)
clone.Pix = buff.buffer[:len(src.Pix)]
buff.tmp = clone
case *image.NRGBA64:
clone, ok := buff.tmp.(*image.NRGBA64)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
buff.storeInOrder(src.Pix)
clone.Pix = buff.buffer[:len(src.Pix)]
buff.tmp = clone
case *image.RGBA:
clone, ok := buff.tmp.(*image.RGBA)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
buff.storeInOrder(src.Pix)
clone.Pix = buff.buffer[:len(src.Pix)]
buff.tmp = clone
case *image.RGBA64:
clone, ok := buff.tmp.(*image.RGBA64)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
buff.storeInOrder(src.Pix)
clone.Pix = buff.buffer[:len(src.Pix)]
buff.tmp = clone
case *image.NYCbCrA:
clone, ok := buff.tmp.(*image.NYCbCrA)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
var currentLen int
buff.storeInOrder(src.Y, src.Cb, src.Cr, src.A)
clone.Y = buff.buffer[currentLen : currentLen+len(src.Y) : currentLen+len(src.Y)]
currentLen += len(src.Y)
clone.Cb = buff.buffer[currentLen : currentLen+len(src.Cb) : currentLen+len(src.Cb)]
currentLen += len(src.Cb)
clone.Cr = buff.buffer[currentLen : currentLen+len(src.Cr) : currentLen+len(src.Cr)]
currentLen += len(src.Cr)
clone.A = buff.buffer[currentLen : currentLen+len(src.A) : currentLen+len(src.A)]
buff.tmp = clone
case *image.YCbCr:
clone, ok := buff.tmp.(*image.YCbCr)
if ok {
*clone = *src
} else {
copied := *src
clone = &copied
}
var currentLen int
buff.storeInOrder(src.Y, src.Cb, src.Cr)
clone.Y = buff.buffer[currentLen : currentLen+len(src.Y) : currentLen+len(src.Y)]
currentLen += len(src.Y)
clone.Cb = buff.buffer[currentLen : currentLen+len(src.Cb) : currentLen+len(src.Cb)]
currentLen += len(src.Cb)
clone.Cr = buff.buffer[currentLen : currentLen+len(src.Cr) : currentLen+len(src.Cr)]
buff.tmp = clone
default:
var converted image.RGBA
imageToRGBA(&converted, src)
buff.StoreCopy(&converted)
}
}

View File

@@ -1,195 +0,0 @@
package video
import (
"image"
"math/rand"
"reflect"
"testing"
)
func randomize(arr []uint8) {
for i := range arr {
arr[i] = uint8(rand.Uint32())
}
}
func BenchmarkFrameBufferCopyOptimized(b *testing.B) {
frameBuffer := NewFrameBuffer(0)
resolution := image.Rect(0, 0, 1920, 1080)
src := image.NewYCbCr(resolution, image.YCbCrSubsampleRatio420)
for i := 0; i < b.N; i++ {
frameBuffer.StoreCopy(src)
}
}
func BenchmarkFrameBufferCopyNaive(b *testing.B) {
resolution := image.Rect(0, 0, 1920, 1080)
src := image.NewYCbCr(resolution, image.YCbCrSubsampleRatio420)
var dst image.Image
for i := 0; i < b.N; i++ {
clone := *src
clone.Cb = make([]uint8, len(src.Cb))
clone.Cr = make([]uint8, len(src.Cr))
clone.Y = make([]uint8, len(src.Y))
copy(clone.Cb, src.Cb)
copy(clone.Cr, src.Cr)
copy(clone.Y, src.Y)
dst = &clone
_ = dst
}
}
func TestFrameBufferStoreCopyAndLoad(t *testing.T) {
resolution := image.Rect(0, 0, 16, 8)
rgbaLike := image.NewRGBA64(resolution)
randomize(rgbaLike.Pix)
testCases := map[string]struct {
New func() image.Image
Update func(image.Image)
}{
"Alpha": {
New: func() image.Image {
return (*image.Alpha)(rgbaLike)
},
Update: func(src image.Image) {
img := src.(*image.Alpha)
randomize(img.Pix)
},
},
"Alpha16": {
New: func() image.Image {
return (*image.Alpha16)(rgbaLike)
},
Update: func(src image.Image) {
img := src.(*image.Alpha16)
randomize(img.Pix)
},
},
"CMYK": {
New: func() image.Image {
return (*image.CMYK)(rgbaLike)
},
Update: func(src image.Image) {
img := src.(*image.CMYK)
randomize(img.Pix)
},
},
"Gray": {
New: func() image.Image {
return (*image.Gray)(rgbaLike)
},
Update: func(src image.Image) {
img := src.(*image.Gray)
randomize(img.Pix)
},
},
"Gray16": {
New: func() image.Image {
return (*image.Gray16)(rgbaLike)
},
Update: func(src image.Image) {
img := src.(*image.Gray16)
randomize(img.Pix)
},
},
"NRGBA": {
New: func() image.Image {
return (*image.NRGBA)(rgbaLike)
},
Update: func(src image.Image) {
img := src.(*image.NRGBA)
randomize(img.Pix)
},
},
"NRGBA64": {
New: func() image.Image {
return (*image.NRGBA64)(rgbaLike)
},
Update: func(src image.Image) {
img := src.(*image.NRGBA64)
randomize(img.Pix)
},
},
"RGBA": {
New: func() image.Image {
return (*image.RGBA)(rgbaLike)
},
Update: func(src image.Image) {
img := src.(*image.RGBA)
randomize(img.Pix)
},
},
"RGBA64": {
New: func() image.Image {
return (*image.RGBA64)(rgbaLike)
},
Update: func(src image.Image) {
img := src.(*image.RGBA64)
randomize(img.Pix)
},
},
"NYCbCrA": {
New: func() image.Image {
img := image.NewNYCbCrA(resolution, image.YCbCrSubsampleRatio420)
randomize(img.Y)
randomize(img.Cb)
randomize(img.Cr)
randomize(img.A)
img.CStride = 10
img.YStride = 5
return img
},
Update: func(src image.Image) {
img := src.(*image.NYCbCrA)
randomize(img.Y)
randomize(img.Cb)
randomize(img.Cr)
randomize(img.A)
img.CStride = 3
img.YStride = 2
},
},
"YCbCr": {
New: func() image.Image {
img := image.NewYCbCr(resolution, image.YCbCrSubsampleRatio420)
randomize(img.Y)
randomize(img.Cb)
randomize(img.Cr)
img.CStride = 10
img.YStride = 5
return img
},
Update: func(src image.Image) {
img := src.(*image.YCbCr)
randomize(img.Y)
randomize(img.Cb)
randomize(img.Cr)
img.CStride = 3
img.YStride = 2
},
},
}
frameBuffer := NewFrameBuffer(0)
for name, testCase := range testCases {
// Since the test also wants to make sure that Copier can convert from 1 type to another,
// t.Run is not ideal since it'll run the tests separately
t.Log("Testing", name)
src := testCase.New()
frameBuffer.StoreCopy(src)
if !reflect.DeepEqual(frameBuffer.Load(), src) {
t.Fatal("Expected the copied image to be identical with the source")
}
testCase.Update(src)
frameBuffer.StoreCopy(src)
if !reflect.DeepEqual(frameBuffer.Load(), src) {
t.Fatal("Expected the copied image to be identical with the source after an update in source")
}
}
}

View File

@@ -156,10 +156,10 @@ func Scale(width, height int, scaler Scaler) TransformFunc {
}
}
return ReaderFunc(func() (image.Image, func(), error) {
img, _, err := r.Read()
return ReaderFunc(func() (image.Image, error) {
img, err := r.Read()
if err != nil {
return nil, func() {}, err
return nil, err
}
switch v := img.(type) {
@@ -169,7 +169,7 @@ func Scale(width, height int, scaler Scaler) TransformFunc {
scalerCached.Scale(dst, rect, v, v.Rect, draw.Src, nil)
cloned := *dst // clone metadata
return &cloned, func() {}, nil
return &cloned, nil
case *image.YCbCr:
ycbcrRealloc(v)
@@ -184,10 +184,10 @@ func Scale(width, height int, scaler Scaler) TransformFunc {
scalerCached.Scale(dst, dst.Bounds(), src, src.Bounds(), draw.Src, nil)
cloned := *(imgScaled.(*image.YCbCr)) // clone metadata
return &cloned, func() {}, nil
return &cloned, nil
default:
return nil, func() {}, errUnsupportedImageType
return nil, errUnsupportedImageType
}
})
}

View File

@@ -215,11 +215,11 @@ func TestScale(t *testing.T) {
c := c
t.Run(name, func(t *testing.T) {
trans := Scale(c.width, c.height, algo)
r := trans(ReaderFunc(func() (image.Image, func(), error) {
return c.src, func() {}, nil
r := trans(ReaderFunc(func() (image.Image, error) {
return c.src, nil
}))
for i := 0; i < 4; i++ {
out, _, err := r.Read()
out, err := r.Read()
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
@@ -261,12 +261,12 @@ func BenchmarkScale(b *testing.B) {
img := img
b.Run(name, func(b *testing.B) {
trans := Scale(640, 360, algo)
r := trans(ReaderFunc(func() (image.Image, func(), error) {
return img, func() {}, nil
r := trans(ReaderFunc(func() (image.Image, error) {
return img, nil
}))
for i := 0; i < b.N; i++ {
_, _, err := r.Read()
_, err := r.Read()
if err != nil {
b.Fatalf("Unexpected error: %v", err)
}

View File

@@ -10,16 +10,16 @@ import (
func Throttle(rate float32) TransformFunc {
return func(r Reader) Reader {
ticker := time.NewTicker(time.Duration(int64(float64(time.Second) / float64(rate))))
return ReaderFunc(func() (image.Image, func(), error) {
return ReaderFunc(func() (image.Image, error) {
for {
img, _, err := r.Read()
img, err := r.Read()
if err != nil {
ticker.Stop()
return nil, func() {}, err
return nil, err
}
select {
case <-ticker.C:
return img, func() {}, nil
return img, nil
default:
}
}

View File

@@ -2,37 +2,32 @@ package video
import (
"image"
"runtime"
"testing"
"time"
)
func TestThrottle(t *testing.T) {
// https://github.com/pion/mediadevices/issues/198
if runtime.GOOS == "darwin" {
t.Skip("Skipping because Darwin CI is not reliable for timing related tests.")
}
img := image.NewRGBA(image.Rect(0, 0, 640, 480))
ticker := time.NewTicker(20 * time.Millisecond)
ticker := time.NewTicker(time.Millisecond)
defer ticker.Stop()
var cntPush int
trans := Throttle(50)
r := trans(ReaderFunc(func() (image.Image, func(), error) {
trans := Throttle(100)
r := trans(ReaderFunc(func() (image.Image, error) {
<-ticker.C
cntPush++
return img, func() {}, nil
return img, nil
}))
for i := 0; i < 20; i++ {
_, _, err := r.Read()
for i := 0; i < 50; i++ {
_, err := r.Read()
if err != nil {
t.Fatalf("Unexpected error: %v", err)
}
}
cntExpected := 20
if cntPush < cntExpected*8/10 || cntExpected*12/10 < cntPush {
cntExpected := 500
if cntPush < cntExpected*9/10 || cntExpected*11/10 < cntPush {
t.Fatalf("Number of pushed images is expected to be %d, but pushed %d", cntExpected, cntPush)
}
t.Log(cntPush)

View File

@@ -5,14 +5,13 @@ import (
)
type Reader interface {
Read() (img image.Image, release func(), err error)
Read() (img image.Image, err error)
}
type ReaderFunc func() (img image.Image, release func(), err error)
type ReaderFunc func() (img image.Image, err error)
func (rf ReaderFunc) Read() (img image.Image, release func(), err error) {
img, release, err = rf()
return
func (rf ReaderFunc) Read() (img image.Image, err error) {
return rf()
}
// TransformFunc produces a new Reader that will produces a transformed video

View File

@@ -1,37 +0,0 @@
package prop
import "fmt"
// BoolConstraint is an interface to represent bool value constraint.
type BoolConstraint interface {
Compare(bool) (float64, bool)
Value() bool
}
// BoolExact specifies exact bool value.
type BoolExact bool
// Compare implements BoolConstraint.
func (b BoolExact) Compare(o bool) (float64, bool) {
if bool(b) == o {
return 0.0, true
}
return 1.0, false
}
// Value implements BoolConstraint.
func (b BoolExact) Value() bool { return bool(b) }
// String implements Stringify
func (b BoolExact) String() string {
return fmt.Sprintf("%t (exact)", b)
}
// Bool specifies ideal bool value.
type Bool BoolExact
// Compare implements BoolConstraint.
func (b Bool) Compare(o bool) (float64, bool) {
dist, _ := BoolExact(b).Compare(o)
return dist, true
}

View File

@@ -1,9 +1,7 @@
package prop
import (
"fmt"
"math"
"strings"
"time"
)
@@ -25,11 +23,6 @@ func (d Duration) Compare(a time.Duration) (float64, bool) {
// Value implements DurationConstraint.
func (d Duration) Value() (time.Duration, bool) { return time.Duration(d), true }
// String implements Stringify
func (d Duration) String() string {
return fmt.Sprintf("%v (ideal)", time.Duration(d))
}
// DurationExact specifies exact duration value.
type DurationExact time.Duration
@@ -44,11 +37,6 @@ func (d DurationExact) Compare(a time.Duration) (float64, bool) {
// Value implements DurationConstraint.
func (d DurationExact) Value() (time.Duration, bool) { return time.Duration(d), true }
// String implements Stringify
func (d DurationExact) String() string {
return fmt.Sprintf("%v (exact)", time.Duration(d))
}
// DurationOneOf specifies list of expected duration values.
type DurationOneOf []time.Duration
@@ -65,16 +53,6 @@ func (d DurationOneOf) Compare(a time.Duration) (float64, bool) {
// Value implements DurationConstraint.
func (DurationOneOf) Value() (time.Duration, bool) { return 0, false }
// String implements Stringify
func (d DurationOneOf) String() string {
var opts []string
for _, v := range d {
opts = append(opts, fmt.Sprint(v))
}
return fmt.Sprintf("%s (one of values)", strings.Join(opts, ","))
}
// DurationRanged specifies range of expected duration value.
// If Ideal is non-zero, closest value to Ideal takes priority.
type DurationRanged struct {
@@ -118,8 +96,3 @@ func (d DurationRanged) Compare(a time.Duration) (float64, bool) {
// Value implements DurationConstraint.
func (DurationRanged) Value() (time.Duration, bool) { return 0, false }
// String implements Stringify
func (d DurationRanged) String() string {
return fmt.Sprintf("%s - %s (range), %s (ideal)", d.Min, d.Max, d.Ideal)
}

Some files were not shown because too many files have changed in this diff Show More