mirror of
https://github.com/pion/mediadevices.git
synced 2025-09-27 04:46:10 +08:00
Compare commits
124 Commits
vpx-suppor
...
v0.1.5
Author | SHA1 | Date | |
---|---|---|---|
![]() |
716da16e4a | ||
![]() |
1550a68003 | ||
![]() |
d65170dfe3 | ||
![]() |
4057524bf0 | ||
![]() |
8dd84b269c | ||
![]() |
a73b1922ed | ||
![]() |
11aea3eb85 | ||
![]() |
cd49cd9910 | ||
![]() |
6900da9a5e | ||
![]() |
0a1944dc77 | ||
![]() |
c3100355e5 | ||
![]() |
b35246730d | ||
![]() |
0c61817369 | ||
![]() |
2fe26ea1f7 | ||
![]() |
9d98eb8aaf | ||
![]() |
3ea35bebab | ||
![]() |
83c08e6c5f | ||
![]() |
2f17017450 | ||
![]() |
7cbda134b0 | ||
![]() |
115be126ec | ||
![]() |
79dcb4f1af | ||
![]() |
5db4007e73 | ||
![]() |
77ebcecac6 | ||
![]() |
a0d0949954 | ||
![]() |
f396092609 | ||
![]() |
ee6cf08c44 | ||
![]() |
6a211aa19f | ||
![]() |
b089610c27 | ||
![]() |
1d34ec9c5d | ||
![]() |
7bd3efc8b7 | ||
![]() |
8396fd7aac | ||
![]() |
3787158dba | ||
![]() |
640eeb0cc0 | ||
![]() |
16ceb45c25 | ||
![]() |
c98b3b0909 | ||
![]() |
e6c98a844f | ||
![]() |
2a70c031b8 | ||
![]() |
047013be95 | ||
![]() |
765318feb6 | ||
![]() |
af6d31fde5 | ||
![]() |
2f5e4ee914 | ||
![]() |
1720eee38c | ||
![]() |
00877c74a0 | ||
![]() |
559c6a13a1 | ||
![]() |
f4a4edcabd | ||
![]() |
c8547c4597 | ||
![]() |
21bb12dd6b | ||
![]() |
fd43659fed | ||
![]() |
82f33cb572 | ||
![]() |
4f9822349a | ||
![]() |
16bcd0b7dd | ||
![]() |
2022a4b7f7 | ||
![]() |
0b6549eb8f | ||
![]() |
1b0a237438 | ||
![]() |
36edbd9485 | ||
![]() |
eb689a3c79 | ||
![]() |
e4b1b1aaba | ||
![]() |
0f5df05c16 | ||
![]() |
9dcfaf1c1e | ||
![]() |
238f190e71 | ||
![]() |
0210ec6ca6 | ||
![]() |
abdd96e6b2 | ||
![]() |
c9779e7f73 | ||
![]() |
5703fd7e4b | ||
![]() |
db5d8f23bd | ||
![]() |
d6ba28af8c | ||
![]() |
09c2998408 | ||
![]() |
d129e982c7 | ||
![]() |
74986c010b | ||
![]() |
b8be865ff3 | ||
![]() |
7aad89ef37 | ||
![]() |
943906e125 | ||
![]() |
f3e3dc9589 | ||
![]() |
a3d374f528 | ||
![]() |
cba0042f5d | ||
![]() |
1732e2751d | ||
![]() |
5b1527d455 | ||
![]() |
00f0a44ab1 | ||
![]() |
a44240be5f | ||
![]() |
70f7360b92 | ||
![]() |
30d49e1fd3 | ||
![]() |
0cd870fd4b | ||
![]() |
13e6dcc437 | ||
![]() |
366885e01c | ||
![]() |
86e3a3f14c | ||
![]() |
b4c11d5a0c | ||
![]() |
18da7ff1c6 | ||
![]() |
f7068296d3 | ||
![]() |
6d07cc2a58 | ||
![]() |
d857d04dc9 | ||
![]() |
cfdb2221a4 | ||
![]() |
297b4adb4b | ||
![]() |
6269ed6508 | ||
![]() |
aacb05c421 | ||
![]() |
4692cd76e9 | ||
![]() |
2f437a5cc6 | ||
![]() |
fa82237095 | ||
![]() |
74f1fa4910 | ||
![]() |
714d0fa839 | ||
![]() |
6d3f9dbc3e | ||
![]() |
45056e6922 | ||
![]() |
a4faa89c6c | ||
![]() |
122aec0536 | ||
![]() |
c3c1177455 | ||
![]() |
74723dd9f1 | ||
![]() |
4fbce4769b | ||
![]() |
09ff95645e | ||
![]() |
1ebba951fb | ||
![]() |
cce22b117a | ||
![]() |
e87f899777 | ||
![]() |
0d1e856f7d | ||
![]() |
d2d9259f15 | ||
![]() |
0c3bf8af3b | ||
![]() |
438ee8a3d0 | ||
![]() |
8c49553179 | ||
![]() |
6735d5541e | ||
![]() |
94b57d40e3 | ||
![]() |
8d7947b594 | ||
![]() |
fad6c3ec4b | ||
![]() |
73812503a3 | ||
![]() |
96c19f3635 | ||
![]() |
ea879e1172 | ||
![]() |
f641417d1e | ||
![]() |
8bfce0c818 |
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
||||
*.gif filter=lfs diff=lfs merge=lfs -text
|
52
.github/workflows/ci.yaml
vendored
52
.github/workflows/ci.yaml
vendored
@@ -8,17 +8,17 @@ on:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
build:
|
||||
build-linux:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
go: [ '1.14', '1.13' ]
|
||||
name: Go ${{ matrix.go }}
|
||||
go: [ '1.15', '1.14' ]
|
||||
name: Linux Go ${{ matrix.go }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v1
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: ${{ matrix.go }}
|
||||
- name: Install dependencies
|
||||
@@ -30,18 +30,52 @@ jobs:
|
||||
libvpx-dev \
|
||||
libx264-dev
|
||||
- name: go vet
|
||||
run: go vet ./...
|
||||
run: go vet $(go list ./... | grep -v mmal)
|
||||
- name: go build
|
||||
run: go build ./...
|
||||
run: go build $(go list ./... | grep -v mmal)
|
||||
- name: go build without CGO
|
||||
run: go build . pkg/...
|
||||
env:
|
||||
CGO_ENABLED: 0
|
||||
- name: go test
|
||||
run: go test ./... -v -race
|
||||
run: go test -v -race -coverprofile=coverage.txt -covermode=atomic $(go list ./... | grep -v mmal)
|
||||
- uses: codecov/codecov-action@v1
|
||||
if: matrix.go == '1.15'
|
||||
- name: go test without CGO
|
||||
run: go test . pkg/... -v
|
||||
env:
|
||||
CGO_ENABLED: 0
|
||||
build-darwin:
|
||||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
go: [ '1.15', '1.14' ]
|
||||
name: Darwin Go ${{ matrix.go }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: ${{ matrix.go }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
brew install \
|
||||
pkg-config \
|
||||
opus \
|
||||
libvpx \
|
||||
x264
|
||||
- name: go vet
|
||||
run: go vet $(go list ./... | grep -v mmal)
|
||||
- name: go build
|
||||
run: go build $(go list ./... | grep -v mmal)
|
||||
- name: go build without CGO
|
||||
run: go build . pkg/...
|
||||
env:
|
||||
CGO_ENABLED: 0
|
||||
- name: go test
|
||||
run: go test -v -race $(go list ./... | grep -v mmal)
|
||||
- name: go test without CGO
|
||||
run: go test . pkg/... -v
|
||||
env:
|
||||
CGO_ENABLED: 0
|
||||
#- name: golint
|
||||
# run: go lint ./...
|
||||
|
@@ -1 +0,0 @@
|
||||
* @lherman-cs @at-wat
|
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 Pion
|
||||
Copyright (c) 2019-2020 Pion
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
289
README.md
289
README.md
@@ -1,75 +1,240 @@
|
||||
# mediadevices
|
||||
<h1 align="center">
|
||||
<br>
|
||||
Pion MediaDevices
|
||||
<br>
|
||||
</h1>
|
||||
<h4 align="center">Go implementation of the <a href="https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices">MediaDevices</a> API</h4>
|
||||
<p align="center">
|
||||
<a href="https://pion.ly/slack"><img src="https://img.shields.io/badge/join-us%20on%20slack-gray.svg?longCache=true&logo=slack&colorB=brightgreen" alt="Slack Widget"></a>
|
||||
<a href="https://github.com/pion/mediadevices/actions"><img src="https://github.com/pion/mediadevices/workflows/CI/badge.svg?branch=master" alt="Build status"></a>
|
||||
<a href="https://pkg.go.dev/github.com/pion/mediadevices"><img src="https://godoc.org/github.com/pion/mediadevices?status.svg" alt="GoDoc"></a>
|
||||
<a href="https://codecov.io/gh/pion/mediadevices"><img src="https://codecov.io/gh/pion/mediadevices/branch/master/graph/badge.svg" alt="Coverage Status"></a>
|
||||
<a href="LICENSE"><img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License: MIT"></a>
|
||||
</p>
|
||||
<br>
|
||||
|
||||
Go implementation of the [MediaDevices](https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices) API.
|
||||
`mediadevices` provides access to media input devices like cameras, microphones, and screen capture. It can also be used to encode your video/audio stream to various codec selections. `mediadevices` abstracts away the complexities of interacting with things like hardware and codecs allowing you to focus on building appilcations, interacting only with an amazingly simple, easy, and elegant API!
|
||||
|
||||

|
||||
|
||||
## Interfaces
|
||||
## Install
|
||||
|
||||
| Interface | Linux | Mac | Windows |
|
||||
| :--------: | :---: | :-: | :-----: |
|
||||
| Camera | ✔️ | ✖️ | ✔️ |
|
||||
| Microphone | ✔️ | ✖️ | ✔️ |
|
||||
| Screen | ✔️ | ✖️ | ✖️ |
|
||||
|
||||
### Camera
|
||||
|
||||
| OS | Library/Interface |
|
||||
| :-----: | :---------------------------------------------------------------------: |
|
||||
| Linux | [Video4Linux](https://en.wikipedia.org/wiki/Video4Linux) |
|
||||
| Mac | N/A |
|
||||
| Windows | [DirectShow](https://docs.microsoft.com/en-us/windows/win32/directshow) |
|
||||
|
||||
| Pixel Format | Linux | Mac | Windows |
|
||||
| :---------------------------------------------------: | :---: | :-: | :-----: |
|
||||
| [YUY2](https://www.fourcc.org/pixel-format/yuv-yuy2/) | ✔️ | ✖️ | ✔️ |
|
||||
| [UYVY](https://www.fourcc.org/pixel-format/yuv-uyvy/) | ✔️ | ✖️ | ✖️ |
|
||||
| [I420](https://www.fourcc.org/pixel-format/yuv-i420/) | ✔️ | ✖️ | ✖️ |
|
||||
| [NV21](https://www.fourcc.org/pixel-format/yuv-nv21/) | ✔️ | ✖️ | ✖️ |
|
||||
| [MJPEG](https://www.fourcc.org/mjpg/) | ✔️ | ✖️ | ✖️ |
|
||||
|
||||
### Microphone
|
||||
|
||||
| OS | Library/Interface |
|
||||
| :-----: | :---------------------------------------------------------------------: |
|
||||
| Linux | [PulseAudio](https://en.wikipedia.org/wiki/PulseAudio) |
|
||||
| Mac | N/A |
|
||||
| Windows | [waveIn](https://docs.microsoft.com/en-us/windows/win32/api/mmeapi/) |
|
||||
|
||||
### Screen casting
|
||||
|
||||
| OS | Library/Interface |
|
||||
| :-----: | :---------------------------------------------------------------------: |
|
||||
| Linux | [X11](https://en.wikipedia.org/wiki/X_Window_System) |
|
||||
| Mac | N/A |
|
||||
| Windows | N/A |
|
||||
|
||||
## Codecs
|
||||
|
||||
| Audio Codec | Library/Interface |
|
||||
| :---------: | :------------------------------------------------------: |
|
||||
| OPUS | [libopus](http://opus-codec.org/) |
|
||||
|
||||
| Video Codec | Library/Interface |
|
||||
| :---------: | :------------------------------------------------------: |
|
||||
| H.264 | [OpenH264](https://www.openh264.org/) |
|
||||
| VP8 | [libvpx](https://www.webmproject.org/code/) |
|
||||
| VP9 | [libvpx](https://www.webmproject.org/code/) |
|
||||
`go get -u github.com/pion/mediadevices`
|
||||
|
||||
## Usage
|
||||
|
||||
[Wiki](https://github.com/pion/mediadevices/wiki)
|
||||
The following snippet shows how to capture a camera stream and store a frame as a jpeg image:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"image/jpeg"
|
||||
"os"
|
||||
|
||||
"github.com/pion/mediadevices"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
|
||||
// This is required to register camera adapter
|
||||
_ "github.com/pion/mediadevices/pkg/driver/camera"
|
||||
// Note: If you don't have a camera or your adapters are not supported,
|
||||
// you can always swap your adapters with our dummy adapters below.
|
||||
// _ "github.com/pion/mediadevices/pkg/driver/videotest"
|
||||
)
|
||||
|
||||
func main() {
|
||||
stream, _ := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(constraint *mediadevices.MediaTrackConstraints) {
|
||||
// Query for ideal resolutions
|
||||
constraint.Width = prop.Int(600)
|
||||
constraint.Height = prop.Int(400)
|
||||
},
|
||||
})
|
||||
|
||||
// Since track can represent audio as well, we need to cast it to
|
||||
// *mediadevices.VideoTrack to get video specific functionalities
|
||||
track := stream.GetVideoTracks()[0]
|
||||
videoTrack := track.(*mediadevices.VideoTrack)
|
||||
defer videoTrack.Close()
|
||||
|
||||
// Create a new video reader to get the decoded frames. Release is used
|
||||
// to return the buffer to hold frame back to the source so that the buffer
|
||||
// can be reused for the next frames.
|
||||
videoReader := videoTrack.NewReader(false)
|
||||
frame, release, _ := videoReader.Read()
|
||||
defer release()
|
||||
|
||||
// Since frame is the standard image.Image, it's compatible with Go standard
|
||||
// library. For example, capturing the first frame and store it as a jpeg image.
|
||||
output, _ := os.Create("frame.jpg")
|
||||
jpeg.Encode(output, frame, nil)
|
||||
}
|
||||
|
||||
|
||||
```
|
||||
|
||||
## More Examples
|
||||
|
||||
* [Webrtc](/examples/webrtc) - Use Webrtc to create a realtime peer-to-peer video call
|
||||
* [Face Detection](/examples/facedetection) - Use a machine learning algorithm to detect faces in a camera stream
|
||||
* [RTP Stream](examples/rtp) - Capture camera stream, encode it in H264/VP8/VP9, and send it to a RTP server
|
||||
* [HTTP Broadcast](/examples/http) - Broadcast camera stream through HTTP with MJPEG
|
||||
* [Archive](/examples/archive) - Archive H264 encoded video stream from a camera
|
||||
|
||||
## Available Media Inputs
|
||||
|
||||
| Input | Linux | Mac | Windows |
|
||||
| :--------: | :---: | :-: | :-----: |
|
||||
| Camera | ✔️ | ✔️ | ✔️ |
|
||||
| Microphone | ✔️ | ✔️ | ✔️ |
|
||||
| Screen | ✔️ | ✖️ | ✖️ |
|
||||
|
||||
By default, there's no media input registered. This decision was made to allow you to pay what you need. Therefore, you need to import the associated packages for the media inputs. For example, if you want to use a camera, you need to import the camera package as a side effect:
|
||||
|
||||
```go
|
||||
import (
|
||||
...
|
||||
_ "github.com/pion/mediadevices/pkg/driver/camera"
|
||||
)
|
||||
```
|
||||
|
||||
## Available Codecs
|
||||
|
||||
In order to encode your video/audio, `mediadevices` needs to know what codecs that you want to use and their parameters. To do this, you need to import the associated packages for the codecs, and add them to the codec selector that you'll pass to `GetUserMedia`:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/pion/mediadevices"
|
||||
"github.com/pion/mediadevices/pkg/codec/x264" // This is required to use H264 video encoder
|
||||
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
|
||||
)
|
||||
|
||||
func main() {
|
||||
// configure codec specific parameters
|
||||
x264Params, _ := x264.NewParams()
|
||||
x264Params.Preset = x264.PresetMedium
|
||||
x264Params.BitRate = 1_000_000 // 1mbps
|
||||
|
||||
codecSelector := mediadevices.NewCodecSelector(
|
||||
mediadevices.WithVideoEncoders(&x264Params),
|
||||
)
|
||||
|
||||
mediaStream, _ := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(c *mediadevices.MediaTrackConstraints) {},
|
||||
Codec: codecSelector, // let GetUsermedia know available codecs
|
||||
})
|
||||
}
|
||||
```
|
||||
|
||||
Since `mediadevices` doesn't implement the video/audio codecs, it needs to call the codec libraries from the system through cgo. Therefore, you're required to install the codec libraries before you can use them in `mediadevices`. In the next section, it shows a list of available codecs, where the packages are defined (documentation linked), and installation instructions.
|
||||
|
||||
Note: we do not provide recommendations on choosing one codec or another as it is very complex and can be subjective.
|
||||
|
||||
### Video Codecs
|
||||
|
||||
#### x264
|
||||
A free software library and application for encoding video streams into the H.264/MPEG-4 AVC compression format.
|
||||
|
||||
* Package: [github.com/pion/mediadevices/pkg/codec/x264](https://pkg.go.dev/github.com/pion/mediadevices/pkg/codec/x264)
|
||||
* Installation:
|
||||
* Mac: `brew install x264`
|
||||
* Ubuntu: `apt install libx264-dev`
|
||||
|
||||
#### mmal
|
||||
A framework to enable H264 hardware encoding for Raspberry Pi or boards that use VideoCore GPUs.
|
||||
|
||||
* Package: [github.com/pion/mediadevices/pkg/codec/mmal](https://pkg.go.dev/github.com/pion/mediadevices/pkg/codec/mmal)
|
||||
* Installation:
|
||||
* Raspbian: `export PKG_CONFIG_PATH=/opt/vc/lib/pkgconfig`
|
||||
|
||||
#### openh264
|
||||
A codec library which supports H.264 encoding and decoding. It is suitable for use in real time applications.
|
||||
|
||||
* Package: [github.com/pion/mediadevices/pkg/codec/openh264](https://pkg.go.dev/github.com/pion/mediadevices/pkg/codec/openh264)
|
||||
* Installation: no installation needed, included as a static binary
|
||||
|
||||
#### vpx
|
||||
A free software video codec library from Google and the Alliance for Open Media that implements VP8/VP9 video coding formats.
|
||||
|
||||
* Package: [github.com/pion/mediadevices/pkg/codec/vpx](https://pkg.go.dev/github.com/pion/mediadevices/pkg/codec/vpx)
|
||||
* Installation:
|
||||
* Mac: `brew install libvpx`
|
||||
* Ubuntu: `apt install libvpx-dev`
|
||||
|
||||
#### vaapi
|
||||
An open source API that allows applications such as VLC media player or GStreamer to use hardware video acceleration capabilities (currently support VP8/VP9).
|
||||
|
||||
* Package: [github.com/pion/mediadevices/pkg/codec/vaapi](https://pkg.go.dev/github.com/pion/mediadevices/pkg/codec/vaapi)
|
||||
* Installation:
|
||||
* Ubuntu: `apt install libva-dev`
|
||||
|
||||
|
||||
### Audio Codecs
|
||||
|
||||
#### opus
|
||||
A totally open, royalty-free, highly versatile audio codec.
|
||||
|
||||
* Package: [github.com/pion/mediadevices/pkg/codec/opus](https://pkg.go.dev/github.com/pion/mediadevices/pkg/codec/opus)
|
||||
* Installation:
|
||||
* Mac: `brew install opus`
|
||||
* Ubuntu: `apt install libopus-dev`
|
||||
|
||||
## Benchmark
|
||||
|
||||
Result as of Nov 4, 2020 with Go 1.14 on a Raspberry pi 3, `mediadevices` can produce video, encode, send across network, and decode at **720p, 30 fps with < 500 ms latency**.
|
||||
|
||||
The test was taken by capturing a camera stream, decoding the raw frames, encoding the video stream with mmal, and sending the stream through Webrtc.
|
||||
|
||||
## FAQ
|
||||
|
||||
### Failed to find the best driver that fits the constraints
|
||||
|
||||
`mediadevices` provides an automated driver discovery through `GetUserMedia` and `GetDisplayMedia`. The driver discover algorithm works something like:
|
||||
|
||||
1. Open all registered drivers
|
||||
2. Get all properties (property describes what a driver is capable of, e.g. resolution, frame rate, etc.) from opened drivers
|
||||
3. Find the best property that meets the criteria
|
||||
|
||||
So, when `mediadevices` returns `failed to find the best driver that fits the constraints` error, one of the following conditions might have occured:
|
||||
* Driver was not imported as a side effect in your program, e.g. `import _ github.com/pion/mediadevices/pkg/driver/camera`
|
||||
* Your constraint is too strict that there's no driver can fullfil your requirements. In this case, you can try to turn up the debug level by specifying the following environment variable: `export PION_LOG_DEBUG=all` to see what was too strict and tune that.
|
||||
* Your driver is not supported/implemented. In this case, you can either let us know (file an issue) and wait for the maintainers to implement it. Or, you can implement it yourself and register it through `RegisterDriverAdapter`
|
||||
|
||||
### Failed to find vpx/x264/mmal/opus codecs
|
||||
|
||||
Since `mediadevices` uses cgo to access video/audio codecs, it needs to find these libraries from the system. To accomplish this, [pkg-config](https://www.freedesktop.org/wiki/Software/pkg-config/) is used for library discovery.
|
||||
|
||||
If you see the following error message at compile time:
|
||||
```
|
||||
# pkg-config --cflags -- vpx
|
||||
Package vpx was not found in the pkg-config search path.
|
||||
Perhaps you should add the directory containing `vpx.pc'
|
||||
to the PKG_CONFIG_PATH environment variable
|
||||
No package 'vpx' found
|
||||
pkg-config: exit status 1
|
||||
```
|
||||
|
||||
There are 2 common problems:
|
||||
|
||||
* The required codec library is not installed (vpx in this example). In this case, please refer to the [available codecs](#available-codecs).
|
||||
* Pkg-config fails to find the `.pc` files for this codec ([reference](https://people.freedesktop.org/~dbn/pkg-config-guide.html#using)). In this case, you need to find where the codec library's `.pc` is stored, and let pkg-config knows with: `export PKG_CONFIG_PATH=/path/to/directory`.
|
||||
|
||||
|
||||
## Community
|
||||
Pion has an active community on the [Slack](https://pion.ly/slack).
|
||||
|
||||
Follow the [Pion Twitter](https://twitter.com/_pion) for project updates and important WebRTC news.
|
||||
|
||||
We are always looking to support **your projects**. Please reach out if you have something to build!
|
||||
If you need commercial support or don't want to use public methods you can contact us at [team@pion.ly](mailto:team@pion.ly)
|
||||
|
||||
## Contributing
|
||||
Check out the **[contributing wiki](https://github.com/pion/webrtc/wiki/Contributing)** to join the group of amazing people making this project possible:
|
||||
|
||||
- [Lukas Herman](https://github.com/lherman-cs) - _Original Author_
|
||||
* [Lukas Herman](https://github.com/lherman-cs) - _Original Author_
|
||||
* [Atsushi Watanabe](https://github.com/at-wat) - _VP8, Screencast, etc._
|
||||
|
||||
## Project Status
|
||||
|
||||
[](https://starchart.cc/pion/mediadevices)
|
||||
|
||||
## References
|
||||
|
||||
- https://developer.mozilla.org/en-US/docs/Web/Media/Formats/WebRTC_codecs
|
||||
- https://tools.ietf.org/html/rfc7742
|
||||
## License
|
||||
MIT License - see [LICENSE](LICENSE) for full text
|
||||
|
135
codec.go
Normal file
135
codec.go
Normal file
@@ -0,0 +1,135 @@
|
||||
package mediadevices
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
// CodecSelector is a container of video and audio encoder builders, which later will be used
|
||||
// for codec matching.
|
||||
type CodecSelector struct {
|
||||
videoEncoders []codec.VideoEncoderBuilder
|
||||
audioEncoders []codec.AudioEncoderBuilder
|
||||
}
|
||||
|
||||
// CodecSelectorOption is a type for specifying CodecSelector options
|
||||
type CodecSelectorOption func(*CodecSelector)
|
||||
|
||||
// WithVideoEncoders replace current video codecs with listed encoders
|
||||
func WithVideoEncoders(encoders ...codec.VideoEncoderBuilder) CodecSelectorOption {
|
||||
return func(t *CodecSelector) {
|
||||
t.videoEncoders = encoders
|
||||
}
|
||||
}
|
||||
|
||||
// WithVideoEncoders replace current audio codecs with listed encoders
|
||||
func WithAudioEncoders(encoders ...codec.AudioEncoderBuilder) CodecSelectorOption {
|
||||
return func(t *CodecSelector) {
|
||||
t.audioEncoders = encoders
|
||||
}
|
||||
}
|
||||
|
||||
// NewCodecSelector constructs CodecSelector with given variadic options
|
||||
func NewCodecSelector(opts ...CodecSelectorOption) *CodecSelector {
|
||||
var track CodecSelector
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(&track)
|
||||
}
|
||||
|
||||
return &track
|
||||
}
|
||||
|
||||
// Populate lets the webrtc engine be aware of supported codecs that are contained in CodecSelector
|
||||
func (selector *CodecSelector) Populate(setting *webrtc.MediaEngine) {
|
||||
for _, encoder := range selector.videoEncoders {
|
||||
setting.RegisterCodec(encoder.RTPCodec().RTPCodec)
|
||||
}
|
||||
|
||||
for _, encoder := range selector.audioEncoders {
|
||||
setting.RegisterCodec(encoder.RTPCodec().RTPCodec)
|
||||
}
|
||||
}
|
||||
|
||||
func (selector *CodecSelector) selectVideoCodecByNames(reader video.Reader, inputProp prop.Media, codecNames ...string) (codec.ReadCloser, *codec.RTPCodec, error) {
|
||||
var selectedEncoder codec.VideoEncoderBuilder
|
||||
var encodedReader codec.ReadCloser
|
||||
var errReasons []string
|
||||
var err error
|
||||
|
||||
outer:
|
||||
for _, wantCodec := range codecNames {
|
||||
for _, encoder := range selector.videoEncoders {
|
||||
if encoder.RTPCodec().Name == wantCodec {
|
||||
encodedReader, err = encoder.BuildVideoEncoder(reader, inputProp)
|
||||
if err == nil {
|
||||
selectedEncoder = encoder
|
||||
break outer
|
||||
}
|
||||
}
|
||||
|
||||
errReasons = append(errReasons, fmt.Sprintf("%s: %s", encoder.RTPCodec().Name, err))
|
||||
}
|
||||
}
|
||||
|
||||
if selectedEncoder == nil {
|
||||
return nil, nil, errors.New(strings.Join(errReasons, "\n\n"))
|
||||
}
|
||||
|
||||
return encodedReader, selectedEncoder.RTPCodec(), nil
|
||||
}
|
||||
|
||||
func (selector *CodecSelector) selectVideoCodec(reader video.Reader, inputProp prop.Media, codecs ...*webrtc.RTPCodec) (codec.ReadCloser, *codec.RTPCodec, error) {
|
||||
var codecNames []string
|
||||
|
||||
for _, codec := range codecs {
|
||||
codecNames = append(codecNames, codec.Name)
|
||||
}
|
||||
|
||||
return selector.selectVideoCodecByNames(reader, inputProp, codecNames...)
|
||||
}
|
||||
|
||||
func (selector *CodecSelector) selectAudioCodecByNames(reader audio.Reader, inputProp prop.Media, codecNames ...string) (codec.ReadCloser, *codec.RTPCodec, error) {
|
||||
var selectedEncoder codec.AudioEncoderBuilder
|
||||
var encodedReader codec.ReadCloser
|
||||
var errReasons []string
|
||||
var err error
|
||||
|
||||
outer:
|
||||
for _, wantCodec := range codecNames {
|
||||
for _, encoder := range selector.audioEncoders {
|
||||
if encoder.RTPCodec().Name == wantCodec {
|
||||
encodedReader, err = encoder.BuildAudioEncoder(reader, inputProp)
|
||||
if err == nil {
|
||||
selectedEncoder = encoder
|
||||
break outer
|
||||
}
|
||||
}
|
||||
|
||||
errReasons = append(errReasons, fmt.Sprintf("%s: %s", encoder.RTPCodec().Name, err))
|
||||
}
|
||||
}
|
||||
|
||||
if selectedEncoder == nil {
|
||||
return nil, nil, errors.New(strings.Join(errReasons, "\n\n"))
|
||||
}
|
||||
|
||||
return encodedReader, selectedEncoder.RTPCodec(), nil
|
||||
}
|
||||
|
||||
func (selector *CodecSelector) selectAudioCodec(reader audio.Reader, inputProp prop.Media, codecs ...*webrtc.RTPCodec) (codec.ReadCloser, *codec.RTPCodec, error) {
|
||||
var codecNames []string
|
||||
|
||||
for _, codec := range codecs {
|
||||
codecNames = append(codecNames, codec.Name)
|
||||
}
|
||||
|
||||
return selector.selectAudioCodecByNames(reader, inputProp, codecNames...)
|
||||
}
|
10
codecov.yml
Normal file
10
codecov.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
# Allow decreasing 2% of total coverage to avoid noise.
|
||||
threshold: 2%
|
||||
patch: off
|
||||
|
||||
ignore:
|
||||
- "examples/*"
|
36
examples/archive/README.md
Normal file
36
examples/archive/README.md
Normal file
@@ -0,0 +1,36 @@
|
||||
## Instructions
|
||||
|
||||
### Install required codecs
|
||||
|
||||
In this example, we'll be using x264 as our video codec. Therefore, we need to make sure that these codecs are installed within our system.
|
||||
|
||||
Installation steps:
|
||||
|
||||
* [x264](https://github.com/pion/mediadevices#x264)
|
||||
|
||||
### Download archive examplee
|
||||
|
||||
```
|
||||
git clone https://github.com/pion/mediadevices.git
|
||||
```
|
||||
|
||||
### Run archive example
|
||||
|
||||
Run `cd mediadevices/examples/archive && go build && ./archive recorded.h264`
|
||||
|
||||
### Playback recorded video
|
||||
|
||||
Install GStreamer and run:
|
||||
```
|
||||
gst-launch-1.0 playbin uri=file://${PWD}/recorded.h264
|
||||
```
|
||||
|
||||
Or run VLC media plyer:
|
||||
```
|
||||
vlc recorded.h264
|
||||
```
|
||||
|
||||
A video should start playing in your GStreamer or VLC window.
|
||||
|
||||
Congrats, you have used pion-MediaDevices! Now start building something cool
|
||||
|
BIN
examples/archive/archive
Executable file
BIN
examples/archive/archive
Executable file
Binary file not shown.
82
examples/archive/main.go
Normal file
82
examples/archive/main.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"image"
|
||||
"io"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
|
||||
"github.com/pion/mediadevices"
|
||||
"github.com/pion/mediadevices/pkg/codec/x264" // This is required to use H264 video encoder
|
||||
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
|
||||
"github.com/pion/mediadevices/pkg/frame"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
func must(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 2 {
|
||||
fmt.Printf("usage: %s <path/to/file.h264>\n", os.Args[0])
|
||||
return
|
||||
}
|
||||
dest := os.Args[1]
|
||||
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGINT)
|
||||
|
||||
x264Params, err := x264.NewParams()
|
||||
must(err)
|
||||
x264Params.Preset = x264.PresetMedium
|
||||
x264Params.BitRate = 1_000_000 // 1mbps
|
||||
|
||||
codecSelector := mediadevices.NewCodecSelector(
|
||||
mediadevices.WithVideoEncoders(&x264Params),
|
||||
)
|
||||
|
||||
mediaStream, err := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(c *mediadevices.MediaTrackConstraints) {
|
||||
c.FrameFormat = prop.FrameFormat(frame.FormatYUY2)
|
||||
c.Width = prop.Int(640)
|
||||
c.Height = prop.Int(480)
|
||||
},
|
||||
Codec: codecSelector,
|
||||
})
|
||||
must(err)
|
||||
|
||||
videoTrack := mediaStream.GetVideoTracks()[0].(*mediadevices.VideoTrack)
|
||||
defer videoTrack.Close()
|
||||
|
||||
videoTrack.Transform(video.TransformFunc(func(r video.Reader) video.Reader {
|
||||
return video.ReaderFunc(func() (img image.Image, release func(), err error) {
|
||||
// we send io.EOF signal to the encoder reader to stop reading. Therefore, io.Copy
|
||||
// will finish its execution and the program will finish
|
||||
select {
|
||||
case <-sigs:
|
||||
return nil, func() {}, io.EOF
|
||||
default:
|
||||
}
|
||||
|
||||
return r.Read()
|
||||
})
|
||||
}))
|
||||
|
||||
reader, err := videoTrack.NewEncodedReader(x264Params.RTPCodec().Name)
|
||||
must(err)
|
||||
defer reader.Close()
|
||||
|
||||
out, err := os.Create(dest)
|
||||
must(err)
|
||||
|
||||
fmt.Println("Recording... Press Ctrl+c to stop")
|
||||
_, err = io.Copy(out, reader)
|
||||
must(err)
|
||||
fmt.Println("Your video has been recorded to", dest)
|
||||
}
|
@@ -1,29 +1,15 @@
|
||||
## Instructions
|
||||
|
||||
### Download facedetection
|
||||
### Download facedetection example
|
||||
|
||||
```
|
||||
go get github.com/pion/mediadevices/examples/facedetection
|
||||
git clone https://github.com/pion/mediadevices.git
|
||||
```
|
||||
|
||||
### Open example page
|
||||
### Compile and Run facedetection
|
||||
|
||||
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/video) you should see two text-areas and a 'Start Session' button
|
||||
Run `cd mediadevices/examples/facedetection && go build && ./facedetection`
|
||||
|
||||
### Run facedetection with your browsers SessionDescription as stdin
|
||||
You should be able to see some loggings when it can see faces.
|
||||
|
||||
In the jsfiddle the top textarea is your browser, copy that and:
|
||||
|
||||
#### Linux
|
||||
|
||||
Run `echo $BROWSER_SDP | facedetection`
|
||||
|
||||
### Input facedetection's SessionDescription into your browser
|
||||
|
||||
Copy the text that `facedetection` just emitted and copy into second text area
|
||||
|
||||
### Hit 'Start Session' in jsfiddle, enjoy your video!
|
||||
|
||||
A video should start playing in your browser above the input boxes, and will continue playing until you close the application.
|
||||
|
||||
Congrats, you have used pion-WebRTC! Now start building something cool
|
||||
Congrats, you have used pion-MediaDevices! Now start building something cool
|
||||
|
@@ -1,118 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"image"
|
||||
"image/color"
|
||||
"image/draw"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
|
||||
"github.com/disintegration/imaging"
|
||||
pigo "github.com/esimov/pigo/core"
|
||||
)
|
||||
|
||||
var (
|
||||
cascade []byte
|
||||
err error
|
||||
classifier *pigo.Pigo
|
||||
)
|
||||
|
||||
func imgToGrayscale(img image.Image) []uint8 {
|
||||
bounds := img.Bounds()
|
||||
flatten := bounds.Dy() * bounds.Dx()
|
||||
grayImg := make([]uint8, flatten)
|
||||
|
||||
i := 0
|
||||
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
|
||||
for x := bounds.Min.X; x < bounds.Max.X; x++ {
|
||||
pix := img.At(x, y)
|
||||
grayPix := color.GrayModel.Convert(pix).(color.Gray)
|
||||
grayImg[i] = grayPix.Y
|
||||
i++
|
||||
}
|
||||
}
|
||||
return grayImg
|
||||
}
|
||||
|
||||
// clusterDetection runs Pigo face detector core methods
|
||||
// and returns a cluster with the detected faces coordinates.
|
||||
func clusterDetection(img image.Image) []pigo.Detection {
|
||||
grayscale := imgToGrayscale(img)
|
||||
bounds := img.Bounds()
|
||||
cParams := pigo.CascadeParams{
|
||||
MinSize: 100,
|
||||
MaxSize: 600,
|
||||
ShiftFactor: 0.15,
|
||||
ScaleFactor: 1.1,
|
||||
ImageParams: pigo.ImageParams{
|
||||
Pixels: grayscale,
|
||||
Rows: bounds.Dy(),
|
||||
Cols: bounds.Dx(),
|
||||
Dim: bounds.Dx(),
|
||||
},
|
||||
}
|
||||
|
||||
if len(cascade) == 0 {
|
||||
cascade, err = ioutil.ReadFile("facefinder")
|
||||
if err != nil {
|
||||
log.Fatalf("Error reading the cascade file: %s", err)
|
||||
}
|
||||
p := pigo.NewPigo()
|
||||
|
||||
// Unpack the binary file. This will return the number of cascade trees,
|
||||
// the tree depth, the threshold and the prediction from tree's leaf nodes.
|
||||
classifier, err = p.Unpack(cascade)
|
||||
if err != nil {
|
||||
log.Fatalf("Error unpacking the cascade file: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Run the classifier over the obtained leaf nodes and return the detection results.
|
||||
// The result contains quadruplets representing the row, column, scale and detection score.
|
||||
dets := classifier.RunCascade(cParams, 0.0)
|
||||
|
||||
// Calculate the intersection over union (IoU) of two clusters.
|
||||
dets = classifier.ClusterDetections(dets, 0)
|
||||
|
||||
return dets
|
||||
}
|
||||
|
||||
func drawCircle(img draw.Image, x0, y0, r int, c color.Color) {
|
||||
x, y, dx, dy := r-1, 0, 1, 1
|
||||
err := dx - (r * 2)
|
||||
|
||||
for x > y {
|
||||
img.Set(x0+x, y0+y, c)
|
||||
img.Set(x0+y, y0+x, c)
|
||||
img.Set(x0-y, y0+x, c)
|
||||
img.Set(x0-x, y0+y, c)
|
||||
img.Set(x0-x, y0-y, c)
|
||||
img.Set(x0-y, y0-x, c)
|
||||
img.Set(x0+y, y0-x, c)
|
||||
img.Set(x0+x, y0-y, c)
|
||||
|
||||
if err <= 0 {
|
||||
y++
|
||||
err += dy
|
||||
dy += 2
|
||||
}
|
||||
if err > 0 {
|
||||
x--
|
||||
dx += 2
|
||||
err += dx - (r * 2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func markFaces(img image.Image) image.Image {
|
||||
nrgba := imaging.Clone(img)
|
||||
dets := clusterDetection(img)
|
||||
for _, det := range dets {
|
||||
if det.Q < 5.0 {
|
||||
continue
|
||||
}
|
||||
|
||||
drawCircle(nrgba, det.Col, det.Row, det.Scale/2, color.Black)
|
||||
}
|
||||
return nrgba
|
||||
}
|
@@ -1,119 +1,107 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"image"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
pigo "github.com/esimov/pigo/core"
|
||||
"github.com/pion/mediadevices"
|
||||
"github.com/pion/mediadevices/examples/internal/signal"
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
|
||||
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
|
||||
"github.com/pion/mediadevices/pkg/frame"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
func markFacesTransformer(r video.Reader) video.Reader {
|
||||
return video.ReaderFunc(func() (img image.Image, err error) {
|
||||
img, err = r.Read()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
const (
|
||||
confidenceLevel = 5.0
|
||||
)
|
||||
|
||||
img = markFaces(img)
|
||||
return
|
||||
})
|
||||
var (
|
||||
cascade []byte
|
||||
classifier *pigo.Pigo
|
||||
)
|
||||
|
||||
func must(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func detectFace(frame *image.YCbCr) bool {
|
||||
bounds := frame.Bounds()
|
||||
cascadeParams := pigo.CascadeParams{
|
||||
MinSize: 100,
|
||||
MaxSize: 600,
|
||||
ShiftFactor: 0.15,
|
||||
ScaleFactor: 1.1,
|
||||
ImageParams: pigo.ImageParams{
|
||||
Pixels: frame.Y, // Y in YCbCr should be enough to detect faces
|
||||
Rows: bounds.Dy(),
|
||||
Cols: bounds.Dx(),
|
||||
Dim: bounds.Dx(),
|
||||
},
|
||||
}
|
||||
|
||||
// Run the classifier over the obtained leaf nodes and return the detection results.
|
||||
// The result contains quadruplets representing the row, column, scale and detection score.
|
||||
dets := classifier.RunCascade(cascadeParams, 0.0)
|
||||
|
||||
// Calculate the intersection over union (IoU) of two clusters.
|
||||
dets = classifier.ClusterDetections(dets, 0)
|
||||
|
||||
for _, det := range dets {
|
||||
if det.Q >= confidenceLevel {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func main() {
|
||||
config := webrtc.Configuration{
|
||||
ICEServers: []webrtc.ICEServer{
|
||||
{
|
||||
URLs: []string{"stun:stun.l.google.com:19302"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Wait for the offer to be pasted
|
||||
offer := webrtc.SessionDescription{}
|
||||
signal.Decode(signal.MustReadStdin(), &offer)
|
||||
|
||||
// Create a new RTCPeerConnection
|
||||
mediaEngine := webrtc.MediaEngine{}
|
||||
if err := mediaEngine.PopulateFromSDP(offer); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
api := webrtc.NewAPI(webrtc.WithMediaEngine(mediaEngine))
|
||||
peerConnection, err := api.NewPeerConnection(config)
|
||||
// prepare face detector
|
||||
var err error
|
||||
cascade, err = ioutil.ReadFile("facefinder")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
log.Fatalf("Error reading the cascade file: %s", err)
|
||||
}
|
||||
p := pigo.NewPigo()
|
||||
|
||||
// Set the handler for ICE connection state
|
||||
// This will notify you when the peer has connected/disconnected
|
||||
peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
|
||||
fmt.Printf("Connection State has changed %s \n", connectionState.String())
|
||||
})
|
||||
|
||||
md := mediadevices.NewMediaDevices(peerConnection)
|
||||
|
||||
vp8Params, err := vpx.NewVP8Params()
|
||||
// Unpack the binary file. This will return the number of cascade trees,
|
||||
// the tree depth, the threshold and the prediction from tree's leaf nodes.
|
||||
classifier, err = p.Unpack(cascade)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
log.Fatalf("Error unpacking the cascade file: %s", err)
|
||||
}
|
||||
vp8Params.BitRate = 100000 // 100kbps
|
||||
|
||||
s, err := md.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
mediaStream, err := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(c *mediadevices.MediaTrackConstraints) {
|
||||
c.FrameFormat = prop.FrameFormatExact(frame.FormatI420) // most of the encoder accepts I420
|
||||
c.Enabled = true
|
||||
c.FrameFormat = prop.FrameFormatExact(frame.FormatUYVY)
|
||||
c.Width = prop.Int(640)
|
||||
c.Height = prop.Int(480)
|
||||
c.VideoTransform = markFacesTransformer
|
||||
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
must(err)
|
||||
|
||||
for _, tracker := range s.GetTracks() {
|
||||
t := tracker.Track()
|
||||
tracker.OnEnded(func(err error) {
|
||||
fmt.Printf("Track (ID: %s, Label: %s) ended with error: %v\n",
|
||||
t.ID(), t.Label(), err)
|
||||
})
|
||||
_, err = peerConnection.AddTransceiverFromTrack(t,
|
||||
webrtc.RtpTransceiverInit{
|
||||
Direction: webrtc.RTPTransceiverDirectionSendonly,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
// since we're trying to access the raw data, we need to cast Track to its real type, *mediadevices.VideoTrack
|
||||
videoTrack := mediaStream.GetVideoTracks()[0].(*mediadevices.VideoTrack)
|
||||
defer videoTrack.Close()
|
||||
|
||||
videoReader := videoTrack.NewReader(false)
|
||||
// To save resources, we can simply use 4 fps to detect faces.
|
||||
ticker := time.NewTicker(time.Millisecond * 250)
|
||||
defer ticker.Stop()
|
||||
|
||||
for range ticker.C {
|
||||
frame, release, err := videoReader.Read()
|
||||
must(err)
|
||||
|
||||
// Since we asked the frame format to be exactly YUY2 in GetUserMedia, we can guarantee that it must be YCbCr
|
||||
if detectFace(frame.(*image.YCbCr)) {
|
||||
log.Println("Detect a face")
|
||||
}
|
||||
}
|
||||
|
||||
// Set the remote SessionDescription
|
||||
err = peerConnection.SetRemoteDescription(offer)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
release()
|
||||
}
|
||||
|
||||
// Create an answer
|
||||
answer, err := peerConnection.CreateAnswer(nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Sets the LocalDescription, and starts our UDP listeners
|
||||
err = peerConnection.SetLocalDescription(answer)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Output the answer in base64 so we can paste it in browser
|
||||
fmt.Println(signal.Encode(answer))
|
||||
select {}
|
||||
}
|
||||
|
@@ -2,8 +2,8 @@ module github.com/pion/mediadevices/examples
|
||||
|
||||
go 1.14
|
||||
|
||||
replace github.com/pion/mediadevices => ../
|
||||
|
||||
// Please don't commit require entries of examples.
|
||||
// `git checkout master examples/go.mod` to revert this file.
|
||||
require github.com/pion/mediadevices v0.0.0-00010101000000-000000000000
|
||||
require github.com/pion/mediadevices v0.0.0
|
||||
|
||||
replace github.com/pion/mediadevices v0.0.0 => ../
|
||||
|
19
examples/http/README.md
Normal file
19
examples/http/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
## Instructions
|
||||
|
||||
### Download http example
|
||||
|
||||
```
|
||||
git clone https://github.com/pion/mediadevices.git
|
||||
```
|
||||
|
||||
### Compile and Run HTTP server
|
||||
|
||||
Run `cd mediadevices/examples/http && go build && ./http :1313`
|
||||
|
||||
|
||||
### Access the camera stream from the browser
|
||||
|
||||
Go to "http://localhost:1313"
|
||||
|
||||
|
||||
Congrats, you have used pion-MediaDevices! Now start building something cool
|
85
examples/http/main.go
Normal file
85
examples/http/main.go
Normal file
@@ -0,0 +1,85 @@
|
||||
// This is an example of using mediadevices to broadcast your camera through http.
|
||||
// The example doesn't aim to be performant, but rather it strives to be simple.
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"image/jpeg"
|
||||
"io"
|
||||
"log"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"net/textproto"
|
||||
"os"
|
||||
|
||||
"github.com/pion/mediadevices"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
|
||||
// Note: If you don't have a camera or microphone or your adapters are not supported,
|
||||
// you can always swap your adapters with our dummy adapters below.
|
||||
// _ "github.com/pion/mediadevices/pkg/driver/videotest"
|
||||
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
|
||||
)
|
||||
|
||||
func must(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 2 {
|
||||
fmt.Printf("usage: %s host:port\n", os.Args[0])
|
||||
return
|
||||
}
|
||||
dest := os.Args[1]
|
||||
|
||||
mediaStream, err := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(constraint *mediadevices.MediaTrackConstraints) {
|
||||
constraint.Width = prop.Int(600)
|
||||
constraint.Height = prop.Int(400)
|
||||
},
|
||||
})
|
||||
must(err)
|
||||
|
||||
track := mediaStream.GetVideoTracks()[0]
|
||||
videoTrack := track.(*mediadevices.VideoTrack)
|
||||
defer videoTrack.Close()
|
||||
|
||||
http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {
|
||||
var buf bytes.Buffer
|
||||
videoReader := videoTrack.NewReader(false)
|
||||
mimeWriter := multipart.NewWriter(w)
|
||||
|
||||
contentType := fmt.Sprintf("multipart/x-mixed-replace;boundary=%s", mimeWriter.Boundary())
|
||||
w.Header().Add("Content-Type", contentType)
|
||||
|
||||
partHeader := make(textproto.MIMEHeader)
|
||||
partHeader.Add("Content-Type", "image/jpeg")
|
||||
|
||||
for {
|
||||
frame, release, err := videoReader.Read()
|
||||
if err == io.EOF {
|
||||
return
|
||||
}
|
||||
must(err)
|
||||
|
||||
err = jpeg.Encode(&buf, frame, nil)
|
||||
// Since we're done with img, we need to release img so that that the original owner can reuse
|
||||
// this memory.
|
||||
release()
|
||||
must(err)
|
||||
|
||||
partWriter, err := mimeWriter.CreatePart(partHeader)
|
||||
must(err)
|
||||
|
||||
_, err = partWriter.Write(buf.Bytes())
|
||||
buf.Reset()
|
||||
must(err)
|
||||
}
|
||||
})
|
||||
|
||||
fmt.Printf("listening on %s\n", dest)
|
||||
log.Println(http.ListenAndServe(dest, nil))
|
||||
}
|
@@ -1,29 +0,0 @@
|
||||
## Instructions
|
||||
|
||||
### Download rtp-send example
|
||||
|
||||
```
|
||||
go get github.com/pion/mediadevices/examples/rtp-send
|
||||
```
|
||||
|
||||
### Listen RTP
|
||||
|
||||
Install GStreamer and run:
|
||||
```
|
||||
gst-launch-1.0 udpsrc port=5000 caps=application/x-rtp,encode-name=VP8 \
|
||||
! rtpvp8depay ! vp8dec ! videoconvert ! autovideosink
|
||||
```
|
||||
|
||||
Or run VLC media plyer:
|
||||
```
|
||||
vlc ./vp8.sdp
|
||||
```
|
||||
|
||||
### Run rtp-send
|
||||
|
||||
Run `rtp-send localhost:5000`
|
||||
|
||||
A video should start playing in your GStreamer or VLC window.
|
||||
It's not WebRTC, but pure RTP.
|
||||
|
||||
Congrats, you have used pion-MediaDevices! Now start building something cool
|
@@ -1,120 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
|
||||
"github.com/pion/mediadevices"
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
|
||||
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
|
||||
"github.com/pion/mediadevices/pkg/frame"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/rtp"
|
||||
"github.com/pion/webrtc/v2"
|
||||
"github.com/pion/webrtc/v2/pkg/media"
|
||||
)
|
||||
|
||||
const (
|
||||
mtu = 1000
|
||||
)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 2 {
|
||||
fmt.Printf("usage: %s host:port\n", os.Args[0])
|
||||
return
|
||||
}
|
||||
|
||||
md := mediadevices.NewMediaDevicesFromCodecs(
|
||||
map[webrtc.RTPCodecType][]*webrtc.RTPCodec{
|
||||
webrtc.RTPCodecTypeVideo: []*webrtc.RTPCodec{
|
||||
webrtc.NewRTPVP8Codec(100, 90000),
|
||||
},
|
||||
},
|
||||
mediadevices.WithTrackGenerator(
|
||||
func(_ uint8, _ uint32, id, _ string, codec *webrtc.RTPCodec) (
|
||||
mediadevices.LocalTrack, error,
|
||||
) {
|
||||
return newTrack(codec, id, os.Args[1]), nil
|
||||
},
|
||||
),
|
||||
)
|
||||
|
||||
vp8Params, err := vpx.NewVP8Params()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
vp8Params.BitRate = 100000 // 100kbps
|
||||
|
||||
_, err = md.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(c *mediadevices.MediaTrackConstraints) {
|
||||
c.FrameFormat = prop.FrameFormat(frame.FormatYUY2)
|
||||
c.Enabled = true
|
||||
c.Width = prop.Int(640)
|
||||
c.Height = prop.Int(480)
|
||||
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
select {}
|
||||
}
|
||||
|
||||
type track struct {
|
||||
codec *webrtc.RTPCodec
|
||||
packetizer rtp.Packetizer
|
||||
id string
|
||||
conn net.Conn
|
||||
}
|
||||
|
||||
func newTrack(codec *webrtc.RTPCodec, id, dest string) *track {
|
||||
addr, err := net.ResolveUDPAddr("udp", dest)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
conn, err := net.DialUDP("udp", nil, addr)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return &track{
|
||||
codec: codec,
|
||||
packetizer: rtp.NewPacketizer(
|
||||
mtu,
|
||||
codec.PayloadType,
|
||||
1,
|
||||
codec.Payloader,
|
||||
rtp.NewRandomSequencer(),
|
||||
codec.ClockRate,
|
||||
),
|
||||
id: id,
|
||||
conn: conn,
|
||||
}
|
||||
}
|
||||
|
||||
func (t *track) WriteSample(s media.Sample) error {
|
||||
buf := make([]byte, mtu)
|
||||
pkts := t.packetizer.Packetize(s.Data, s.Samples)
|
||||
for _, p := range pkts {
|
||||
n, err := p.MarshalTo(buf)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
_, _ = t.conn.Write(buf[:n])
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *track) Codec() *webrtc.RTPCodec {
|
||||
return t.codec
|
||||
}
|
||||
|
||||
func (t *track) ID() string {
|
||||
return t.id
|
||||
}
|
||||
|
||||
func (t *track) Kind() webrtc.RTPCodecType {
|
||||
return t.codec.Type
|
||||
}
|
39
examples/rtp/README.md
Normal file
39
examples/rtp/README.md
Normal file
@@ -0,0 +1,39 @@
|
||||
## Instructions
|
||||
|
||||
### Install required codecs
|
||||
|
||||
In this example, we'll be using x264 and opus as our video and audio codecs. Therefore, we need to make sure that these codecs are installed within our system.
|
||||
|
||||
Installation steps:
|
||||
|
||||
* [x264](https://github.com/pion/mediadevices#x264)
|
||||
* [opus](https://github.com/pion/mediadevices#opus)
|
||||
|
||||
### Download rtpexample
|
||||
|
||||
```
|
||||
go get github.com/pion/mediadevices/examples/rtp
|
||||
```
|
||||
|
||||
### Listen RTP
|
||||
|
||||
Install GStreamer and run:
|
||||
```
|
||||
gst-launch-1.0 udpsrc port=5000 caps=application/x-rtp,encode-name=H264 \
|
||||
! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
|
||||
```
|
||||
|
||||
Or run VLC media plyer:
|
||||
```
|
||||
vlc ./h264.sdp
|
||||
```
|
||||
|
||||
### Run rtp
|
||||
|
||||
Run `rtp localhost:5000`
|
||||
|
||||
A video should start playing in your GStreamer or VLC window.
|
||||
It's not WebRTC, but pure RTP.
|
||||
|
||||
Congrats, you have used pion-MediaDevices! Now start building something cool
|
||||
|
@@ -6,4 +6,4 @@ c=IN IP4 0.0.0.0
|
||||
t=0 0
|
||||
a=recvonly
|
||||
m=video 5000 RTP/AVP 100
|
||||
a=rtpmap:100 VP8/90000
|
||||
a=rtpmap:100 H264/90000
|
77
examples/rtp/main.go
Normal file
77
examples/rtp/main.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net"
|
||||
"os"
|
||||
|
||||
"github.com/pion/mediadevices"
|
||||
"github.com/pion/mediadevices/pkg/codec/x264" // This is required to use H264 video encoder
|
||||
_ "github.com/pion/mediadevices/pkg/driver/camera" // This is required to register camera adapter
|
||||
"github.com/pion/mediadevices/pkg/frame"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
const (
|
||||
mtu = 1000
|
||||
)
|
||||
|
||||
func must(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 2 {
|
||||
fmt.Printf("usage: %s host:port\n", os.Args[0])
|
||||
return
|
||||
}
|
||||
dest := os.Args[1]
|
||||
|
||||
x264Params, err := x264.NewParams()
|
||||
must(err)
|
||||
x264Params.Preset = x264.PresetMedium
|
||||
x264Params.BitRate = 1_000_000 // 1mbps
|
||||
|
||||
codecSelector := mediadevices.NewCodecSelector(
|
||||
mediadevices.WithVideoEncoders(&x264Params),
|
||||
)
|
||||
|
||||
mediaStream, err := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(c *mediadevices.MediaTrackConstraints) {
|
||||
c.FrameFormat = prop.FrameFormat(frame.FormatYUY2)
|
||||
c.Width = prop.Int(640)
|
||||
c.Height = prop.Int(480)
|
||||
},
|
||||
Codec: codecSelector,
|
||||
})
|
||||
must(err)
|
||||
|
||||
videoTrack := mediaStream.GetVideoTracks()[0]
|
||||
defer videoTrack.Close()
|
||||
|
||||
rtpReader, err := videoTrack.NewRTPReader(x264Params.RTPCodec().Name, mtu)
|
||||
must(err)
|
||||
|
||||
addr, err := net.ResolveUDPAddr("udp", dest)
|
||||
must(err)
|
||||
conn, err := net.DialUDP("udp", nil, addr)
|
||||
must(err)
|
||||
|
||||
buff := make([]byte, mtu)
|
||||
for {
|
||||
pkts, release, err := rtpReader.Read()
|
||||
must(err)
|
||||
|
||||
for _, pkt := range pkts {
|
||||
n, err := pkt.MarshalTo(buff)
|
||||
must(err)
|
||||
|
||||
_, err = conn.Write(buff[:n])
|
||||
must(err)
|
||||
}
|
||||
|
||||
release()
|
||||
}
|
||||
}
|
@@ -1,29 +0,0 @@
|
||||
## Instructions
|
||||
|
||||
### Download screenshare
|
||||
|
||||
```
|
||||
go get github.com/pion/mediadevices/examples/screenshare
|
||||
```
|
||||
|
||||
### Open example page
|
||||
|
||||
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/audio-and-video) you should see two text-areas and a 'Start Session' button
|
||||
|
||||
### Run screenshare with your browsers SessionDescription as stdin
|
||||
|
||||
In the jsfiddle the top textarea is your browser, copy that and:
|
||||
|
||||
#### Linux
|
||||
|
||||
Run `echo $BROWSER_SDP | screenshare`
|
||||
|
||||
### Input screenshare's SessionDescription into your browser
|
||||
|
||||
Copy the text that `screenshare` just emitted and copy into second text area
|
||||
|
||||
### Hit 'Start Session' in jsfiddle, enjoy your video!
|
||||
|
||||
A video should start playing in your browser above the input boxes, and will continue playing until you close the application.
|
||||
|
||||
Congrats, you have used pion-WebRTC! Now start building something cool
|
@@ -1,101 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/pion/mediadevices"
|
||||
"github.com/pion/mediadevices/examples/internal/signal"
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
|
||||
_ "github.com/pion/mediadevices/pkg/driver/screen" // This is required to register screen capture adapter
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
func main() {
|
||||
config := webrtc.Configuration{
|
||||
ICEServers: []webrtc.ICEServer{
|
||||
{
|
||||
URLs: []string{"stun:stun.l.google.com:19302"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Wait for the offer to be pasted
|
||||
offer := webrtc.SessionDescription{}
|
||||
signal.Decode(signal.MustReadStdin(), &offer)
|
||||
|
||||
// Create a new RTCPeerConnection
|
||||
mediaEngine := webrtc.MediaEngine{}
|
||||
if err := mediaEngine.PopulateFromSDP(offer); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
api := webrtc.NewAPI(webrtc.WithMediaEngine(mediaEngine))
|
||||
peerConnection, err := api.NewPeerConnection(config)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Set the handler for ICE connection state
|
||||
// This will notify you when the peer has connected/disconnected
|
||||
peerConnection.OnICEConnectionStateChange(func(connectionState webrtc.ICEConnectionState) {
|
||||
fmt.Printf("Connection State has changed %s \n", connectionState.String())
|
||||
})
|
||||
|
||||
md := mediadevices.NewMediaDevices(peerConnection)
|
||||
|
||||
vp8Params, err := vpx.NewVP8Params()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
vp8Params.BitRate = 100000 // 100kbps
|
||||
|
||||
s, err := md.GetDisplayMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(c *mediadevices.MediaTrackConstraints) {
|
||||
c.Enabled = true
|
||||
c.VideoTransform = video.Scale(-1, 360, nil) // Resize to 360p
|
||||
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, tracker := range s.GetTracks() {
|
||||
t := tracker.Track()
|
||||
tracker.OnEnded(func(err error) {
|
||||
fmt.Printf("Track (ID: %s, Label: %s) ended with error: %v\n",
|
||||
t.ID(), t.Label(), err)
|
||||
})
|
||||
_, err = peerConnection.AddTransceiverFromTrack(t,
|
||||
webrtc.RtpTransceiverInit{
|
||||
Direction: webrtc.RTPTransceiverDirectionSendonly,
|
||||
},
|
||||
)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// Set the remote SessionDescription
|
||||
err = peerConnection.SetRemoteDescription(offer)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Create an answer
|
||||
answer, err := peerConnection.CreateAnswer(nil)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Sets the LocalDescription, and starts our UDP listeners
|
||||
err = peerConnection.SetLocalDescription(answer)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Output the answer in base64 so we can paste it in browser
|
||||
fmt.Println(signal.Encode(answer))
|
||||
select {}
|
||||
}
|
@@ -1,29 +0,0 @@
|
||||
## Instructions
|
||||
|
||||
### Download gstreamer-send
|
||||
|
||||
```
|
||||
go get github.com/pion/mediadevices/examples/simple
|
||||
```
|
||||
|
||||
### Open example page
|
||||
|
||||
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/audio-and-video) you should see two text-areas and a 'Start Session' button
|
||||
|
||||
### Run simple with your browsers SessionDescription as stdin
|
||||
|
||||
In the jsfiddle the top textarea is your browser, copy that and:
|
||||
|
||||
#### Linux
|
||||
|
||||
Run `echo $BROWSER_SDP | simple`
|
||||
|
||||
### Input simple's SessionDescription into your browser
|
||||
|
||||
Copy the text that `simple` just emitted and copy into second text area
|
||||
|
||||
### Hit 'Start Session' in jsfiddle, enjoy your video!
|
||||
|
||||
A video should start playing in your browser above the input boxes, and will continue playing until you close the application.
|
||||
|
||||
Congrats, you have used pion-WebRTC! Now start building something cool
|
42
examples/webrtc/README.md
Normal file
42
examples/webrtc/README.md
Normal file
@@ -0,0 +1,42 @@
|
||||
## Instructions
|
||||
|
||||
### Install required codecs
|
||||
|
||||
In this example, we'll be using x264 and opus as our video and audio codecs. Therefore, we need to make sure that these codecs are installed within our system.
|
||||
|
||||
Installation steps:
|
||||
|
||||
* [x264](https://github.com/pion/mediadevices#x264)
|
||||
* [opus](https://github.com/pion/mediadevices#opus)
|
||||
|
||||
### Download webrtc example
|
||||
|
||||
```
|
||||
git clone https://github.com/pion/mediadevices.git
|
||||
```
|
||||
|
||||
#### Compile webrtc example
|
||||
|
||||
```
|
||||
cd mediadevices/examples/webrtc && go build
|
||||
```
|
||||
|
||||
### Open example page
|
||||
|
||||
[jsfiddle.net](https://jsfiddle.net/gh/get/library/pure/pion/mediadevices/tree/master/examples/internal/jsfiddle/audio-and-video) you should see two text-areas and a 'Start Session' button
|
||||
|
||||
### Run the webrtc example with your browsers SessionDescription as stdin
|
||||
|
||||
In the jsfiddle the top textarea is your browser, copy that, and store the session description in an environment variable, `export SDP=<put_the_sdp_here>`
|
||||
|
||||
Run `echo $SDP | ./webrtc`
|
||||
|
||||
### Input webrtc's SessionDescription into your browser
|
||||
|
||||
Copy the text that `./webrtc` just emitted and copy into second text area
|
||||
|
||||
### Hit 'Start Session' in jsfiddle, enjoy your video!
|
||||
|
||||
A video should start playing in your browser above the input boxes, and will continue playing until you close the application.
|
||||
|
||||
Congrats, you have used pion-MediaDevices! Now start building something cool
|
@@ -5,19 +5,18 @@ import (
|
||||
|
||||
"github.com/pion/mediadevices"
|
||||
"github.com/pion/mediadevices/examples/internal/signal"
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/frame"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
|
||||
// This is required to use opus audio encoder
|
||||
"github.com/pion/mediadevices/pkg/codec/opus"
|
||||
|
||||
// If you don't like vpx, you can also use x264 by importing as below
|
||||
// "github.com/pion/mediadevices/pkg/codec/x264" // This is required to use h264 video encoder
|
||||
// If you don't like x264, you can also use vpx by importing as below
|
||||
// "github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
|
||||
// or you can also use openh264 for alternative h264 implementation
|
||||
// "github.com/pion/mediadevices/pkg/codec/openh264"
|
||||
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
|
||||
// or if you use a raspberry pi like, you can use mmal for using its hardware encoder
|
||||
// "github.com/pion/mediadevices/pkg/codec/mmal"
|
||||
"github.com/pion/mediadevices/pkg/codec/opus" // This is required to use opus audio encoder
|
||||
"github.com/pion/mediadevices/pkg/codec/x264" // This is required to use h264 video encoder
|
||||
|
||||
// Note: If you don't have a camera or microphone or your adapters are not supported,
|
||||
// you can always swap your adapters with our dummy adapters below.
|
||||
@@ -27,10 +26,6 @@ import (
|
||||
_ "github.com/pion/mediadevices/pkg/driver/microphone" // This is required to register microphone adapter
|
||||
)
|
||||
|
||||
const (
|
||||
videoCodecName = webrtc.VP8
|
||||
)
|
||||
|
||||
func main() {
|
||||
config := webrtc.Configuration{
|
||||
ICEServers: []webrtc.ICEServer{
|
||||
@@ -45,7 +40,23 @@ func main() {
|
||||
signal.Decode(signal.MustReadStdin(), &offer)
|
||||
|
||||
// Create a new RTCPeerConnection
|
||||
x264Params, err := x264.NewParams()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
x264Params.BitRate = 500_000 // 500kbps
|
||||
|
||||
opusParams, err := opus.NewParams()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
codecSelector := mediadevices.NewCodecSelector(
|
||||
mediadevices.WithVideoEncoders(&x264Params),
|
||||
mediadevices.WithAudioEncoders(&opusParams),
|
||||
)
|
||||
|
||||
mediaEngine := webrtc.MediaEngine{}
|
||||
codecSelector.Populate(&mediaEngine)
|
||||
if err := mediaEngine.PopulateFromSDP(offer); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
@@ -61,44 +72,33 @@ func main() {
|
||||
fmt.Printf("Connection State has changed %s \n", connectionState.String())
|
||||
})
|
||||
|
||||
md := mediadevices.NewMediaDevices(peerConnection)
|
||||
|
||||
opusParams, err := opus.NewParams()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
opusParams.BitRate = 32000 // 32kbps
|
||||
|
||||
vp8Params, err := vpx.NewVP8Params()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
vp8Params.BitRate = 100000 // 100kbps
|
||||
|
||||
s, err := md.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Audio: func(c *mediadevices.MediaTrackConstraints) {
|
||||
c.Enabled = true
|
||||
c.AudioEncoderBuilders = []codec.AudioEncoderBuilder{&opusParams}
|
||||
},
|
||||
s, err := mediadevices.GetUserMedia(mediadevices.MediaStreamConstraints{
|
||||
Video: func(c *mediadevices.MediaTrackConstraints) {
|
||||
c.FrameFormat = prop.FrameFormat(frame.FormatYUY2)
|
||||
c.Enabled = true
|
||||
c.Width = prop.Int(640)
|
||||
c.Height = prop.Int(480)
|
||||
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{&vp8Params}
|
||||
},
|
||||
Audio: func(c *mediadevices.MediaTrackConstraints) {
|
||||
},
|
||||
Codec: codecSelector,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, tracker := range s.GetTracks() {
|
||||
t := tracker.Track()
|
||||
tracker.OnEnded(func(err error) {
|
||||
fmt.Printf("Track (ID: %s, Label: %s) ended with error: %v\n",
|
||||
t.ID(), t.Label(), err)
|
||||
fmt.Printf("Track (ID: %s) ended with error: %v\n",
|
||||
tracker.ID(), err)
|
||||
})
|
||||
_, err = peerConnection.AddTransceiverFromTrack(t,
|
||||
|
||||
// In Pion/webrtc v3, bind will be called automatically after SDP negotiation
|
||||
webrtcTrack, err := tracker.Bind(peerConnection)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
_, err = peerConnection.AddTransceiverFromTrack(webrtcTrack,
|
||||
webrtc.RtpTransceiverInit{
|
||||
Direction: webrtc.RTPTransceiverDirectionSendonly,
|
||||
},
|
13
go.mod
13
go.mod
@@ -4,11 +4,12 @@ go 1.13
|
||||
|
||||
require (
|
||||
github.com/blackjack/webcam v0.0.0-20200313125108-10ed912a8539
|
||||
github.com/faiface/beep v1.0.2
|
||||
github.com/jfreymuth/pulse v0.0.0-20200506145638-1534c4af9659
|
||||
github.com/lherman-cs/opus v0.0.0-20200223204610-6a4b98199ea4
|
||||
github.com/pion/webrtc/v2 v2.2.14
|
||||
github.com/gen2brain/malgo v0.10.19
|
||||
github.com/lherman-cs/opus v0.0.2
|
||||
github.com/pion/logging v0.2.2
|
||||
github.com/pion/rtp v1.6.0
|
||||
github.com/pion/webrtc/v2 v2.2.26
|
||||
github.com/satori/go.uuid v1.2.0
|
||||
golang.org/x/image v0.0.0-20200430140353-33d19683fad8
|
||||
golang.org/x/sys v0.0.0-20200501145240-bc7a7d42d5c3
|
||||
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5
|
||||
golang.org/x/sys v0.0.0-20201029080932-201ba4db2418 // indirect
|
||||
)
|
||||
|
122
go.sum
122
go.sum
@@ -5,86 +5,73 @@ github.com/cheekybits/genny v1.0.0/go.mod h1:+tQajlRqAUrPI7DOSpB0XAqZYtQakVtB7wX
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/faiface/beep v1.0.2 h1:UB5DiRNmA4erfUYnHbgU4UB6DlBOrsdEFRtcc8sCkdQ=
|
||||
github.com/faiface/beep v1.0.2/go.mod h1:1yLb5yRdHMsovYYWVqYLioXkVuziCSITW1oarTeduQM=
|
||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/gdamore/encoding v1.0.0/go.mod h1:alR0ol34c49FCSBLjhosxzcPHQbf2trDkoo5dl+VrEg=
|
||||
github.com/gdamore/tcell v1.1.1/go.mod h1:K1udHkiR3cOtlpKG5tZPD5XxrF7v2y7lDq7Whcj+xkQ=
|
||||
github.com/gen2brain/malgo v0.10.19 h1:IUVF6WdVV7Txt47Kx2ajz0rWQ0MU0zO+tbcKmhva7l8=
|
||||
github.com/gen2brain/malgo v0.10.19/go.mod h1:zHSUNZAXfCeNsZou0RtQ6Zk7gDYLIcKOrUWtAdksnEs=
|
||||
github.com/golang/mock v1.2.0 h1:28o5sBqPkBsMGnC6b4MvE2TzSr5/AT4c/1fLqVGIwlk=
|
||||
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||
github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20180628210949-0892b62f0d9f/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20180825215210-0210a2f0f73c h1:16eHWuMGvCjSfgRJKqIzapE78onvvTbdi1rMkU00lZw=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20180825215210-0210a2f0f73c/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gopherjs/gopherwasm v0.1.1/go.mod h1:kx4n9a+MzHH0BJJhvlsQ65hqLFXDO/m256AsaDPQ+/4=
|
||||
github.com/gopherjs/gopherwasm v1.0.0 h1:32nge/RlujS1Im4HNCJPp0NbBOAeBXFuT1KonUuLl+Y=
|
||||
github.com/gopherjs/gopherwasm v1.0.0/go.mod h1:SkZ8z7CWBz5VXbhJel8TxCmAcsQqzgWGR/8nMhyhZSI=
|
||||
github.com/hajimehoshi/go-mp3 v0.1.1/go.mod h1:4i+c5pDNKDrxl1iu9iG90/+fhP37lio6gNhjCx9WBJw=
|
||||
github.com/hajimehoshi/oto v0.1.1/go.mod h1:hUiLWeBQnbDu4pZsAhOnGqMI1ZGibS6e2qhQdfpwz04=
|
||||
github.com/hajimehoshi/oto v0.3.1 h1:cpf/uIv4Q0oc5uf9loQn7PIehv+mZerh+0KKma6gzMk=
|
||||
github.com/hajimehoshi/oto v0.3.1/go.mod h1:e9eTLBB9iZto045HLbzfHJIc+jP3xaKrjZTghvb6fdM=
|
||||
github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
|
||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
|
||||
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
|
||||
github.com/jfreymuth/oggvorbis v1.0.0/go.mod h1:abe6F9QRjuU9l+2jek3gj46lu40N4qlYxh2grqkLEDM=
|
||||
github.com/jfreymuth/pulse v0.0.0-20200506145638-1534c4af9659 h1:DRA4BuRlhEILiud720WFWqqdADPzp1jTjQvyCr/PP80=
|
||||
github.com/jfreymuth/pulse v0.0.0-20200506145638-1534c4af9659/go.mod h1:cpYspI6YljhkUf1WLXLLDmeaaPFc3CnGLjDZf9dZ4no=
|
||||
github.com/jfreymuth/vorbis v1.0.0/go.mod h1:8zy3lUAm9K/rJJk223RKy6vjCZTWC61NA2QD06bfOE0=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/lherman-cs/opus v0.0.0-20200223204610-6a4b98199ea4 h1:2ydMA2KbxRkYmIw3R8Me8dn90bejxBR4MKYXJ5THK3I=
|
||||
github.com/lherman-cs/opus v0.0.0-20200223204610-6a4b98199ea4/go.mod h1:v9KQvlDYMuvlwniumBVMlrB0VHQvyTgxNvaXjPmTmps=
|
||||
github.com/lherman-cs/opus v0.0.2 h1:fE9Du3NKXDBztqvoTd6P2y9eJ9vgIHahGK8yQostnhA=
|
||||
github.com/lherman-cs/opus v0.0.2/go.mod h1:v9KQvlDYMuvlwniumBVMlrB0VHQvyTgxNvaXjPmTmps=
|
||||
github.com/lucas-clemente/quic-go v0.7.1-0.20190401152353-907071221cf9 h1:tbuodUh2vuhOVZAdW3NEUvosFHUMJwUNl7jk/VSEiwc=
|
||||
github.com/lucas-clemente/quic-go v0.7.1-0.20190401152353-907071221cf9/go.mod h1:PpMmPfPKO9nKJ/psF49ESTAGQSdfXxlg1otPbEB2nOw=
|
||||
github.com/lucasb-eyer/go-colorful v0.0.0-20181028223441-12d3b2882a08/go.mod h1:NXg0ArsFk0Y01623LgUqoqcouGDB+PwCCQlrwrG6xJ4=
|
||||
github.com/marten-seemann/qtls v0.2.3 h1:0yWJ43C62LsZt08vuQJDK1uC1czUc3FJeCLPoNAI4vA=
|
||||
github.com/marten-seemann/qtls v0.2.3/go.mod h1:xzjG7avBwGGbdZ8dTGxlBnLArsVKLvwmjgmPuiQEcYk=
|
||||
github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mewkiz/flac v1.0.5/go.mod h1:EHZNU32dMF6alpurYyKHDLYpW1lYpBZ5WrXi/VuNIGs=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.7.0 h1:WSHQ+IS43OoUrWtD1/bbclrwK8TTH5hzp+umCiuxHgs=
|
||||
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/gomega v1.4.3 h1:RE1xgDvH7imwFD45h+u2SgIfERHlS2yNG4DObb5BSKU=
|
||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/pion/datachannel v1.4.17 h1:8CChK5VrJoGrwKCysoTscoWvshCAFpUkgY11Tqgz5hE=
|
||||
github.com/pion/datachannel v1.4.17/go.mod h1:+vPQfypU9vSsyPXogYj1hBThWQ6MNXEQoQAzxoPvjYM=
|
||||
github.com/pion/dtls/v2 v2.0.0 h1:Fk+MBhLZ/U1bImzAhmzwbO/pP2rKhtTw8iA934H3ybE=
|
||||
github.com/pion/dtls/v2 v2.0.0/go.mod h1:VkY5VL2wtsQQOG60xQ4lkV5pdn0wwBBTzCfRJqXhp3A=
|
||||
github.com/pion/ice v0.7.15 h1:s1In+gnuyVq7WKWGVQL+1p+OcrMsbfL+VfSe2isH8Ag=
|
||||
github.com/pion/ice v0.7.15/go.mod h1:Z6zybEQgky5mZkKcLfmvc266JukK2srz3VZBBD1iXBw=
|
||||
github.com/pion/datachannel v1.4.21 h1:3ZvhNyfmxsAqltQrApLPQMhSFNA+aT87RqyCq4OXmf0=
|
||||
github.com/pion/datachannel v1.4.21/go.mod h1:oiNyP4gHx2DIwRzX/MFyH0Rz/Gz05OgBlayAI2hAWjg=
|
||||
github.com/pion/dtls/v2 v2.0.1/go.mod h1:uMQkz2W0cSqY00xav7WByQ4Hb+18xeQh2oH2fRezr5U=
|
||||
github.com/pion/dtls/v2 v2.0.2 h1:FHCHTiM182Y8e15aFTiORroiATUI16ryHiQh8AIOJ1E=
|
||||
github.com/pion/dtls/v2 v2.0.2/go.mod h1:27PEO3MDdaCfo21heT59/vsdmZc0zMt9wQPcSlLu/1I=
|
||||
github.com/pion/ice v0.7.18 h1:KbAWlzWRUdX9SmehBh3gYpIFsirjhSQsCw6K2MjYMK0=
|
||||
github.com/pion/ice v0.7.18/go.mod h1:+Bvnm3nYC6Nnp7VV6glUkuOfToB/AtMRZpOU8ihuf4c=
|
||||
github.com/pion/logging v0.2.2 h1:M9+AIj/+pxNsDfAT64+MAVgJO0rsyLnoJKCqf//DoeY=
|
||||
github.com/pion/logging v0.2.2/go.mod h1:k0/tDVsRCX2Mb2ZEmTqNa7CWsQPc+YYCB7Q+5pahoms=
|
||||
github.com/pion/mdns v0.0.4 h1:O4vvVqr4DGX63vzmO6Fw9vpy3lfztVWHGCQfyw0ZLSY=
|
||||
github.com/pion/mdns v0.0.4/go.mod h1:R1sL0p50l42S5lJs91oNdUL58nm0QHrhxnSegr++qC0=
|
||||
github.com/pion/quic v0.1.1 h1:D951FV+TOqI9A0rTF7tHx0Loooqz+nyzjEyj8o3PuMA=
|
||||
github.com/pion/quic v0.1.1/go.mod h1:zEU51v7ru8Mp4AUBJvj6psrSth5eEFNnVQK5K48oV3k=
|
||||
github.com/pion/rtcp v1.2.1 h1:S3yG4KpYAiSmBVqKAfgRa5JdwBNj4zK3RLUa8JYdhak=
|
||||
github.com/pion/rtcp v1.2.1/go.mod h1:a5dj2d6BKIKHl43EnAOIrCczcjESrtPuMgfmL6/K6QM=
|
||||
github.com/pion/rtp v1.5.4 h1:PuNg6xqV3brIUihatcKZj1YDUs+M45L0ZbrZWYtkDxY=
|
||||
github.com/pion/rtp v1.5.4/go.mod h1:bg60AL5GotNOlYZsqycbhDtEV3TkfbpXG0KBiUq29Mg=
|
||||
github.com/pion/sctp v1.7.6 h1:8qZTdJtbKfAns/Hv5L0PAj8FyXcsKhMH1pKUCGisQg4=
|
||||
github.com/pion/sctp v1.7.6/go.mod h1:ichkYQ5tlgCQwEwvgfdcAolqx1nHbYCxo4D7zK/K0X8=
|
||||
github.com/pion/sdp/v2 v2.3.7 h1:WUZHI3pfiYCaE8UGUYcabk863LCK+Bq3AklV5O0oInQ=
|
||||
github.com/pion/sdp/v2 v2.3.7/go.mod h1:+ZZf35r1+zbaWYiZLfPutWfx58DAWcGb2QsS3D/s9M8=
|
||||
github.com/pion/srtp v1.3.3 h1:8bjs9YaSNvSrbH0OfKxzPX+PTrCyAC2LoT9Qesugi+U=
|
||||
github.com/pion/srtp v1.3.3/go.mod h1:jNe0jmIOqksuurR9S/7yoKDalfPeluUFrNPCBqI4FOI=
|
||||
github.com/pion/stun v0.3.3 h1:brYuPl9bN9w/VM7OdNzRSLoqsnwlyNvD9MVeJrHjDQw=
|
||||
github.com/pion/stun v0.3.3/go.mod h1:xrCld6XM+6GWDZdvjPlLMsTU21rNxnO6UO8XsAvHr/M=
|
||||
github.com/pion/randutil v0.0.0/go.mod h1:XcJrSMMbbMRhASFVOlj/5hQial/Y8oH/HVo7TBZq+j8=
|
||||
github.com/pion/randutil v0.1.0 h1:CFG1UdESneORglEsnimhUjf33Rwjubwj6xfiOXBa3mA=
|
||||
github.com/pion/randutil v0.1.0/go.mod h1:XcJrSMMbbMRhASFVOlj/5hQial/Y8oH/HVo7TBZq+j8=
|
||||
github.com/pion/rtcp v1.2.3 h1:2wrhKnqgSz91Q5nzYTO07mQXztYPtxL8a0XOss4rJqA=
|
||||
github.com/pion/rtcp v1.2.3/go.mod h1:zGhIv0RPRF0Z1Wiij22pUt5W/c9fevqSzT4jje/oK7I=
|
||||
github.com/pion/rtp v1.6.0 h1:4Ssnl/T5W2LzxHj9ssYpGVEQh3YYhQFNVmSWO88MMwk=
|
||||
github.com/pion/rtp v1.6.0/go.mod h1:QgfogHsMBVE/RFNno467U/KBqfUywEH+HK+0rtnwsdI=
|
||||
github.com/pion/sctp v1.7.10 h1:o3p3/hZB5Cx12RMGyWmItevJtZ6o2cpuxaw6GOS4x+8=
|
||||
github.com/pion/sctp v1.7.10/go.mod h1:EhpTUQu1/lcK3xI+eriS6/96fWetHGCvBi9MSsnaBN0=
|
||||
github.com/pion/sdp/v2 v2.4.0 h1:luUtaETR5x2KNNpvEMv/r4Y+/kzImzbz4Lm1z8eQNQI=
|
||||
github.com/pion/sdp/v2 v2.4.0/go.mod h1:L2LxrOpSTJbAns244vfPChbciR/ReU1KWfG04OpkR7E=
|
||||
github.com/pion/srtp v1.5.1 h1:9Q3jAfslYZBt+C69SI/ZcONJh9049JUHZWYRRf5KEKw=
|
||||
github.com/pion/srtp v1.5.1/go.mod h1:B+QgX5xPeQTNc1CJStJPHzOlHK66ViMDWTT0HZTCkcA=
|
||||
github.com/pion/stun v0.3.5 h1:uLUCBCkQby4S1cf6CGuR9QrVOKcvUwFeemaC865QHDg=
|
||||
github.com/pion/stun v0.3.5/go.mod h1:gDMim+47EeEtfWogA37n6qXZS88L5V6LqFcf+DZA2UA=
|
||||
github.com/pion/transport v0.6.0/go.mod h1:iWZ07doqOosSLMhZ+FXUTq+TamDoXSllxpbGcfkCmbE=
|
||||
github.com/pion/transport v0.8.10 h1:lTiobMEw2PG6BH/mgIVqTV2mBp/mPT+IJLaN8ZxgdHk=
|
||||
github.com/pion/transport v0.8.10/go.mod h1:tBmha/UCjpum5hqTWhfAEs3CO4/tHSg0MYRhSzR+CZ8=
|
||||
github.com/pion/transport v0.10.0 h1:9M12BSneJm6ggGhJyWpDveFOstJsTiQjkLf4M44rm80=
|
||||
github.com/pion/transport v0.10.0/go.mod h1:BnHnUipd0rZQyTVB2SBGojFHT9CBt5C5TcsJSQGkvSE=
|
||||
github.com/pion/turn/v2 v2.0.3 h1:SJUUIbcPoehlyZgMyIUbBBDhI03sBx32x3JuSIBKBWA=
|
||||
github.com/pion/turn/v2 v2.0.3/go.mod h1:kl1hmT3NxcLynpXVnwJgObL8C9NaCyPTeqI2DcCpSZs=
|
||||
github.com/pion/webrtc/v2 v2.2.14 h1:bRjnXTqMDJ3VERPF45z439Sv6QfDfjdYvdQk1QcIx8M=
|
||||
github.com/pion/webrtc/v2 v2.2.14/go.mod h1:G+8lShCMbHhjpMF1ZJBkyuvrxXrvW4bxs3nOt+mJ2UI=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pion/transport v0.10.1 h1:2W+yJT+0mOQ160ThZYUx5Zp2skzshiNgxrNE9GUfhJM=
|
||||
github.com/pion/transport v0.10.1/go.mod h1:PBis1stIILMiis0PewDw91WJeLJkyIMcEk+DwKOzf4A=
|
||||
github.com/pion/turn/v2 v2.0.4 h1:oDguhEv2L/4rxwbL9clGLgtzQPjtuZwCdoM7Te8vQVk=
|
||||
github.com/pion/turn/v2 v2.0.4/go.mod h1:1812p4DcGVbYVBTiraUmP50XoKye++AMkbfp+N27mog=
|
||||
github.com/pion/udp v0.1.0 h1:uGxQsNyrqG3GLINv36Ff60covYmfrLoxzwnCsIYspXI=
|
||||
github.com/pion/udp v0.1.0/go.mod h1:BPELIjbwE9PRbd/zxI/KYBnbo7B6+oA6YuEaNE8lths=
|
||||
github.com/pion/webrtc/v2 v2.2.26 h1:01hWE26pL3LgqfxvQ1fr6O4ZtyRFFJmQEZK39pHWfFc=
|
||||
github.com/pion/webrtc/v2 v2.2.26/go.mod h1:XMZbZRNHyPDe1gzTIHFcQu02283YO45CbiwFgKvXnmc=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
@@ -94,45 +81,40 @@ github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdh
|
||||
github.com/sclevine/agouti v3.0.0+incompatible/go.mod h1:b4WX9W9L1sfQKXeJf1mUTLZKJ48R1S7H23Ji7oFO5Bw=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
golang.org/x/crypto v0.0.0-20190228161510-8dd112bcdc25/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59 h1:3zb4D3T4G8jdExgVU/95+vQXfpEPiMdCaZgmGVxjNHM=
|
||||
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/exp v0.0.0-20180710024300-14dda7b62fcd h1:nLIcFw7GiqKXUS7HiChg6OAYWgASB2H97dZKd1GhDSs=
|
||||
golang.org/x/exp v0.0.0-20180710024300-14dda7b62fcd/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81 h1:00VmoueYNlNz/aHIilyyQz/MHSqGoWJzpFv/HW8xpzI=
|
||||
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
|
||||
golang.org/x/image v0.0.0-20200430140353-33d19683fad8 h1:6WW6V3x1P/jokJBpRQYUJnMHRP6isStQwCozxnU7XQw=
|
||||
golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/mobile v0.0.0-20180806140643-507816974b79 h1:t2JRgCWkY7Qaa1J2jal+wqC9OjbyHCHwIA9rVlRUSMo=
|
||||
golang.org/x/mobile v0.0.0-20180806140643-507816974b79/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
|
||||
golang.org/x/crypto v0.0.0-20200602180216-279210d13fed/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899 h1:DZhuSZLsGlFL4CmhA8BcRA0mnthyA/nZ00AqCUo7vHg=
|
||||
golang.org/x/crypto v0.0.0-20200709230013-948cd5f35899/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5 h1:QelT11PB4FXiDEXucrfNckHoFxwt8USGY1ajP1ZF5lM=
|
||||
golang.org/x/image v0.0.0-20200927104501-e162460cd6b5/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20191126235420-ef20fe5d7933/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e h1:3G+cUijn7XD+S4eJFddp53Pv7+slrESplyjG25HgL+k=
|
||||
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5 h1:WQ8q63x+f/zpC8Ac1s9wLElVoHhm32p6tudrU72n1QA=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381 h1:VXak5I6aEWmAXeQjA+QSZzlgNrpq9mjcfDemuexIKsU=
|
||||
golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181228144115-9a3f9b0469bb/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190228124157-a34e9553db1e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200501145240-bc7a7d42d5c3 h1:5B6i6EAiSYyejWfvc5Rc9BbI3rzIsrrXfAQBWnYfn+w=
|
||||
golang.org/x/sys v0.0.0-20200501145240-bc7a7d42d5c3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200724161237-0e2f3a69832c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201029080932-201ba4db2418 h1:HlFl4V6pEMziuLXyRkm5BIYq1y1GAbb02pRlWvI54OM=
|
||||
golang.org/x/sys v0.0.0-20201029080932-201ba4db2418/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/DATA-DOG/go-sqlmock.v1 v1.3.0/go.mod h1:OdE7CF6DbADk7lN8LIKRzRJTTZXIjtWgA5THM5lhBAw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
@@ -143,3 +125,5 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWD
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
|
BIN
img/demo.gif
BIN
img/demo.gif
Binary file not shown.
Before Width: | Height: | Size: 9.6 MiB After Width: | Height: | Size: 133 B |
11
internal/logging/logging.go
Normal file
11
internal/logging/logging.go
Normal file
@@ -0,0 +1,11 @@
|
||||
package logging
|
||||
|
||||
import (
|
||||
"github.com/pion/logging"
|
||||
)
|
||||
|
||||
var loggerFactory = logging.NewDefaultLoggerFactory()
|
||||
|
||||
func NewLogger(scope string) logging.LeveledLogger {
|
||||
return loggerFactory.NewLogger(scope)
|
||||
}
|
14
ioreader.go
Normal file
14
ioreader.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package mediadevices
|
||||
|
||||
type encodedReadCloserImpl struct {
|
||||
readFn func([]byte) (int, error)
|
||||
closeFn func() error
|
||||
}
|
||||
|
||||
func (r *encodedReadCloserImpl) Read(b []byte) (int, error) {
|
||||
return r.readFn(b)
|
||||
}
|
||||
|
||||
func (r *encodedReadCloserImpl) Close() error {
|
||||
return r.closeFn()
|
||||
}
|
7
logging.go
Normal file
7
logging.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package mediadevices
|
||||
|
||||
import (
|
||||
"github.com/pion/mediadevices/internal/logging"
|
||||
)
|
||||
|
||||
var logger = logging.NewLogger("mediadevices")
|
@@ -7,7 +7,7 @@ type MediaDeviceType int
|
||||
|
||||
// MediaDeviceType definitions.
|
||||
const (
|
||||
VideoInput MediaDeviceType = iota
|
||||
VideoInput MediaDeviceType = iota + 1
|
||||
AudioInput
|
||||
AudioOutput
|
||||
)
|
||||
|
128
mediadevices.go
128
mediadevices.go
@@ -3,98 +3,30 @@ package mediadevices
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
var errNotFound = fmt.Errorf("failed to find the best driver that fits the constraints")
|
||||
|
||||
// MediaDevices is an interface that's defined on https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices
|
||||
type MediaDevices interface {
|
||||
GetDisplayMedia(constraints MediaStreamConstraints) (MediaStream, error)
|
||||
GetUserMedia(constraints MediaStreamConstraints) (MediaStream, error)
|
||||
EnumerateDevices() []MediaDeviceInfo
|
||||
}
|
||||
|
||||
// NewMediaDevices creates MediaDevices interface that provides access to connected media input devices
|
||||
// like cameras and microphones, as well as screen sharing.
|
||||
// In essence, it lets you obtain access to any hardware source of media data.
|
||||
func NewMediaDevices(pc *webrtc.PeerConnection, opts ...MediaDevicesOption) MediaDevices {
|
||||
codecs := make(map[webrtc.RTPCodecType][]*webrtc.RTPCodec)
|
||||
for _, kind := range []webrtc.RTPCodecType{
|
||||
webrtc.RTPCodecTypeAudio,
|
||||
webrtc.RTPCodecTypeVideo,
|
||||
} {
|
||||
codecs[kind] = pc.GetRegisteredRTPCodecs(kind)
|
||||
}
|
||||
return NewMediaDevicesFromCodecs(codecs, opts...)
|
||||
}
|
||||
|
||||
// NewMediaDevicesFromCodecs creates MediaDevices interface from lists of the available codecs
|
||||
// that provides access to connected media input devices like cameras and microphones,
|
||||
// as well as screen sharing.
|
||||
// In essence, it lets you obtain access to any hardware source of media data.
|
||||
func NewMediaDevicesFromCodecs(codecs map[webrtc.RTPCodecType][]*webrtc.RTPCodec, opts ...MediaDevicesOption) MediaDevices {
|
||||
mdo := MediaDevicesOptions{
|
||||
codecs: codecs,
|
||||
trackGenerator: defaultTrackGenerator,
|
||||
}
|
||||
for _, o := range opts {
|
||||
o(&mdo)
|
||||
}
|
||||
return &mediaDevices{
|
||||
MediaDevicesOptions: mdo,
|
||||
}
|
||||
}
|
||||
|
||||
// TrackGenerator is a function to create new track.
|
||||
type TrackGenerator func(payloadType uint8, ssrc uint32, id, label string, codec *webrtc.RTPCodec) (LocalTrack, error)
|
||||
|
||||
var defaultTrackGenerator = TrackGenerator(func(pt uint8, ssrc uint32, id, label string, codec *webrtc.RTPCodec) (LocalTrack, error) {
|
||||
return webrtc.NewTrack(pt, ssrc, id, label, codec)
|
||||
})
|
||||
|
||||
type mediaDevices struct {
|
||||
MediaDevicesOptions
|
||||
}
|
||||
|
||||
// MediaDevicesOptions stores parameters used by MediaDevices.
|
||||
type MediaDevicesOptions struct {
|
||||
codecs map[webrtc.RTPCodecType][]*webrtc.RTPCodec
|
||||
trackGenerator TrackGenerator
|
||||
}
|
||||
|
||||
// MediaDevicesOption is a type of MediaDevices functional option.
|
||||
type MediaDevicesOption func(*MediaDevicesOptions)
|
||||
|
||||
// WithTrackGenerator specifies a TrackGenerator to use customized track.
|
||||
func WithTrackGenerator(gen TrackGenerator) MediaDevicesOption {
|
||||
return func(o *MediaDevicesOptions) {
|
||||
o.trackGenerator = gen
|
||||
}
|
||||
}
|
||||
|
||||
// GetDisplayMedia prompts the user to select and grant permission to capture the contents
|
||||
// of a display or portion thereof (such as a window) as a MediaStream.
|
||||
// Reference: https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getDisplayMedia
|
||||
func (m *mediaDevices) GetDisplayMedia(constraints MediaStreamConstraints) (MediaStream, error) {
|
||||
trackers := make([]Tracker, 0)
|
||||
func GetDisplayMedia(constraints MediaStreamConstraints) (MediaStream, error) {
|
||||
trackers := make([]Track, 0)
|
||||
|
||||
cleanTrackers := func() {
|
||||
for _, t := range trackers {
|
||||
t.Stop()
|
||||
t.Close()
|
||||
}
|
||||
}
|
||||
|
||||
var videoConstraints MediaTrackConstraints
|
||||
if constraints.Video != nil {
|
||||
constraints.Video(&videoConstraints)
|
||||
}
|
||||
|
||||
if videoConstraints.Enabled {
|
||||
tracker, err := m.selectScreen(videoConstraints)
|
||||
tracker, err := selectScreen(videoConstraints, constraints.Codec)
|
||||
if err != nil {
|
||||
cleanTrackers()
|
||||
return nil, err
|
||||
@@ -115,27 +47,20 @@ func (m *mediaDevices) GetDisplayMedia(constraints MediaStreamConstraints) (Medi
|
||||
// GetUserMedia prompts the user for permission to use a media input which produces a MediaStream
|
||||
// with tracks containing the requested types of media.
|
||||
// Reference: https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia
|
||||
func (m *mediaDevices) GetUserMedia(constraints MediaStreamConstraints) (MediaStream, error) {
|
||||
func GetUserMedia(constraints MediaStreamConstraints) (MediaStream, error) {
|
||||
// TODO: It should return media stream based on constraints
|
||||
trackers := make([]Tracker, 0)
|
||||
trackers := make([]Track, 0)
|
||||
|
||||
cleanTrackers := func() {
|
||||
for _, t := range trackers {
|
||||
t.Stop()
|
||||
t.Close()
|
||||
}
|
||||
}
|
||||
|
||||
var videoConstraints, audioConstraints MediaTrackConstraints
|
||||
if constraints.Video != nil {
|
||||
constraints.Video(&videoConstraints)
|
||||
}
|
||||
|
||||
if constraints.Audio != nil {
|
||||
constraints.Audio(&audioConstraints)
|
||||
}
|
||||
|
||||
if videoConstraints.Enabled {
|
||||
tracker, err := m.selectVideo(videoConstraints)
|
||||
tracker, err := selectVideo(videoConstraints, constraints.Codec)
|
||||
if err != nil {
|
||||
cleanTrackers()
|
||||
return nil, err
|
||||
@@ -144,8 +69,9 @@ func (m *mediaDevices) GetUserMedia(constraints MediaStreamConstraints) (MediaSt
|
||||
trackers = append(trackers, tracker)
|
||||
}
|
||||
|
||||
if audioConstraints.Enabled {
|
||||
tracker, err := m.selectAudio(audioConstraints)
|
||||
if constraints.Audio != nil {
|
||||
constraints.Audio(&audioConstraints)
|
||||
tracker, err := selectAudio(audioConstraints, constraints.Codec)
|
||||
if err != nil {
|
||||
cleanTrackers()
|
||||
return nil, err
|
||||
@@ -194,12 +120,15 @@ func queryDriverProperties(filter driver.FilterFn) map[driver.Driver][]prop.Medi
|
||||
func selectBestDriver(filter driver.FilterFn, constraints MediaTrackConstraints) (driver.Driver, MediaTrackConstraints, error) {
|
||||
var bestDriver driver.Driver
|
||||
var bestProp prop.Media
|
||||
var foundPropertiesLog []string
|
||||
minFitnessDist := math.Inf(1)
|
||||
|
||||
foundPropertiesLog = append(foundPropertiesLog, "\n============ Found Properties ============")
|
||||
driverProperties := queryDriverProperties(filter)
|
||||
for d, props := range driverProperties {
|
||||
priority := float64(d.Info().Priority)
|
||||
for _, p := range props {
|
||||
foundPropertiesLog = append(foundPropertiesLog, p.String())
|
||||
fitnessDist, ok := constraints.MediaConstraints.FitnessDistance(p)
|
||||
if !ok {
|
||||
continue
|
||||
@@ -213,16 +142,25 @@ func selectBestDriver(filter driver.FilterFn, constraints MediaTrackConstraints)
|
||||
}
|
||||
}
|
||||
|
||||
foundPropertiesLog = append(foundPropertiesLog, "=============== Constraints ==============")
|
||||
foundPropertiesLog = append(foundPropertiesLog, constraints.String())
|
||||
foundPropertiesLog = append(foundPropertiesLog, "================ Best Fit ================")
|
||||
|
||||
if bestDriver == nil {
|
||||
foundPropertiesLog = append(foundPropertiesLog, "Not found")
|
||||
logger.Debug(strings.Join(foundPropertiesLog, "\n\n"))
|
||||
return nil, MediaTrackConstraints{}, errNotFound
|
||||
}
|
||||
|
||||
constraints.selectedMedia = bestProp
|
||||
constraints.selectedMedia.Merge(constraints.MediaConstraints)
|
||||
foundPropertiesLog = append(foundPropertiesLog, bestProp.String())
|
||||
logger.Debug(strings.Join(foundPropertiesLog, "\n\n"))
|
||||
constraints.selectedMedia = prop.Media{}
|
||||
constraints.selectedMedia.MergeConstraints(constraints.MediaConstraints)
|
||||
constraints.selectedMedia.Merge(bestProp)
|
||||
return bestDriver, constraints, nil
|
||||
}
|
||||
|
||||
func (m *mediaDevices) selectAudio(constraints MediaTrackConstraints) (Tracker, error) {
|
||||
func selectAudio(constraints MediaTrackConstraints, selector *CodecSelector) (Track, error) {
|
||||
typeFilter := driver.FilterAudioRecorder()
|
||||
|
||||
d, c, err := selectBestDriver(typeFilter, constraints)
|
||||
@@ -230,9 +168,9 @@ func (m *mediaDevices) selectAudio(constraints MediaTrackConstraints) (Tracker,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newTrack(&m.MediaDevicesOptions, d, c)
|
||||
return newTrackFromDriver(d, c, selector)
|
||||
}
|
||||
func (m *mediaDevices) selectVideo(constraints MediaTrackConstraints) (Tracker, error) {
|
||||
func selectVideo(constraints MediaTrackConstraints, selector *CodecSelector) (Track, error) {
|
||||
typeFilter := driver.FilterVideoRecorder()
|
||||
notScreenFilter := driver.FilterNot(driver.FilterDeviceType(driver.Screen))
|
||||
filter := driver.FilterAnd(typeFilter, notScreenFilter)
|
||||
@@ -242,10 +180,10 @@ func (m *mediaDevices) selectVideo(constraints MediaTrackConstraints) (Tracker,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newTrack(&m.MediaDevicesOptions, d, c)
|
||||
return newTrackFromDriver(d, c, selector)
|
||||
}
|
||||
|
||||
func (m *mediaDevices) selectScreen(constraints MediaTrackConstraints) (Tracker, error) {
|
||||
func selectScreen(constraints MediaTrackConstraints, selector *CodecSelector) (Track, error) {
|
||||
typeFilter := driver.FilterVideoRecorder()
|
||||
screenFilter := driver.FilterDeviceType(driver.Screen)
|
||||
filter := driver.FilterAnd(typeFilter, screenFilter)
|
||||
@@ -255,10 +193,10 @@ func (m *mediaDevices) selectScreen(constraints MediaTrackConstraints) (Tracker,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return newTrack(&m.MediaDevicesOptions, d, c)
|
||||
return newTrackFromDriver(d, c, selector)
|
||||
}
|
||||
|
||||
func (m *mediaDevices) EnumerateDevices() []MediaDeviceInfo {
|
||||
func EnumerateDevices() []MediaDeviceInfo {
|
||||
drivers := driver.GetManager().Query(
|
||||
driver.FilterFn(func(driver.Driver) bool { return true }))
|
||||
info := make([]MediaDeviceInfo, 0, len(drivers))
|
||||
|
82
mediadevices_bench_test.go
Normal file
82
mediadevices_bench_test.go
Normal file
@@ -0,0 +1,82 @@
|
||||
// +build e2e
|
||||
|
||||
package mediadevices
|
||||
|
||||
import (
|
||||
"image"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/codec/x264"
|
||||
"github.com/pion/mediadevices/pkg/frame"
|
||||
)
|
||||
|
||||
type mockVideoSource struct {
|
||||
width, height int
|
||||
pool sync.Pool
|
||||
decoder frame.Decoder
|
||||
}
|
||||
|
||||
func newMockVideoSource(width, height int) *mockVideoSource {
|
||||
decoder, err := frame.NewDecoder(frame.FormatYUY2)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
return &mockVideoSource{
|
||||
width: width,
|
||||
height: height,
|
||||
pool: sync.Pool{
|
||||
New: func() interface{} {
|
||||
resolution := width * height
|
||||
return make([]byte, resolution*2)
|
||||
},
|
||||
},
|
||||
decoder: decoder,
|
||||
}
|
||||
}
|
||||
|
||||
func (source *mockVideoSource) ID() string { return "" }
|
||||
func (source *mockVideoSource) Close() error { return nil }
|
||||
func (source *mockVideoSource) Read() (image.Image, func(), error) {
|
||||
raw := source.pool.Get().([]byte)
|
||||
decoded, release, err := source.decoder.Decode(raw, source.width, source.height)
|
||||
source.pool.Put(raw)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return decoded, release, nil
|
||||
}
|
||||
|
||||
func BenchmarkEndToEnd(b *testing.B) {
|
||||
params, err := x264.NewParams()
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
params.BitRate = 300_000
|
||||
|
||||
videoSource := newMockVideoSource(1920, 1080)
|
||||
track := NewVideoTrack(videoSource, nil).(*VideoTrack)
|
||||
defer track.Close()
|
||||
|
||||
reader := track.NewReader(false)
|
||||
inputProp, err := detectCurrentVideoProp(track.Broadcaster)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
encodedReader, err := params.BuildVideoEncoder(reader, inputProp)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
defer encodedReader.Close()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, release, err := encodedReader.Read()
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
release()
|
||||
}
|
||||
}
|
@@ -1,90 +1,42 @@
|
||||
package mediadevices
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/pion/webrtc/v2"
|
||||
"github.com/pion/webrtc/v2/pkg/media"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
_ "github.com/pion/mediadevices/pkg/driver/audiotest"
|
||||
_ "github.com/pion/mediadevices/pkg/driver/videotest"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
func TestGetUserMedia(t *testing.T) {
|
||||
videoParams := mockParams{
|
||||
BaseParams: codec.BaseParams{
|
||||
BitRate: 100000,
|
||||
},
|
||||
name: "MockVideo",
|
||||
}
|
||||
audioParams := mockParams{
|
||||
BaseParams: codec.BaseParams{
|
||||
BitRate: 32000,
|
||||
},
|
||||
name: "MockAudio",
|
||||
}
|
||||
md := NewMediaDevicesFromCodecs(
|
||||
map[webrtc.RTPCodecType][]*webrtc.RTPCodec{
|
||||
webrtc.RTPCodecTypeVideo: []*webrtc.RTPCodec{
|
||||
&webrtc.RTPCodec{Type: webrtc.RTPCodecTypeVideo, Name: "MockVideo", PayloadType: 1},
|
||||
},
|
||||
webrtc.RTPCodecTypeAudio: []*webrtc.RTPCodec{
|
||||
&webrtc.RTPCodec{Type: webrtc.RTPCodecTypeAudio, Name: "MockAudio", PayloadType: 2},
|
||||
},
|
||||
},
|
||||
WithTrackGenerator(
|
||||
func(_ uint8, _ uint32, id, _ string, codec *webrtc.RTPCodec) (
|
||||
LocalTrack, error,
|
||||
) {
|
||||
return newMockTrack(codec, id), nil
|
||||
},
|
||||
),
|
||||
)
|
||||
constraints := MediaStreamConstraints{
|
||||
Video: func(c *MediaTrackConstraints) {
|
||||
c.Enabled = true
|
||||
c.Width = prop.Int(640)
|
||||
c.Height = prop.Int(480)
|
||||
params := videoParams
|
||||
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{¶ms}
|
||||
},
|
||||
Audio: func(c *MediaTrackConstraints) {
|
||||
c.Enabled = true
|
||||
params := audioParams
|
||||
c.AudioEncoderBuilders = []codec.AudioEncoderBuilder{¶ms}
|
||||
},
|
||||
}
|
||||
constraintsWrong := MediaStreamConstraints{
|
||||
Video: func(c *MediaTrackConstraints) {
|
||||
c.Enabled = true
|
||||
c.Width = prop.Int(640)
|
||||
c.Width = prop.IntExact(10000)
|
||||
c.Height = prop.Int(480)
|
||||
params := videoParams
|
||||
params.BitRate = 0
|
||||
c.VideoEncoderBuilders = []codec.VideoEncoderBuilder{¶ms}
|
||||
},
|
||||
Audio: func(c *MediaTrackConstraints) {
|
||||
c.Enabled = true
|
||||
params := audioParams
|
||||
c.AudioEncoderBuilders = []codec.AudioEncoderBuilder{¶ms}
|
||||
},
|
||||
}
|
||||
|
||||
// GetUserMedia with broken parameters
|
||||
ms, err := md.GetUserMedia(constraintsWrong)
|
||||
ms, err := GetUserMedia(constraintsWrong)
|
||||
if err == nil {
|
||||
t.Fatal("Expected error, but got nil")
|
||||
}
|
||||
|
||||
// GetUserMedia with correct parameters
|
||||
ms, err = md.GetUserMedia(constraints)
|
||||
ms, err = GetUserMedia(constraints)
|
||||
if err != nil {
|
||||
t.Fatalf("Unexpected error: %v", err)
|
||||
}
|
||||
@@ -102,11 +54,11 @@ func TestGetUserMedia(t *testing.T) {
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
|
||||
for _, track := range tracks {
|
||||
track.Stop()
|
||||
track.Close()
|
||||
}
|
||||
|
||||
// Stop and retry GetUserMedia
|
||||
ms, err = md.GetUserMedia(constraints)
|
||||
ms, err = GetUserMedia(constraints)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to GetUserMedia after the previsous tracks stopped: %v", err)
|
||||
}
|
||||
@@ -122,98 +74,60 @@ func TestGetUserMedia(t *testing.T) {
|
||||
})
|
||||
}
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
}
|
||||
|
||||
type mockTrack struct {
|
||||
codec *webrtc.RTPCodec
|
||||
id string
|
||||
}
|
||||
|
||||
func newMockTrack(codec *webrtc.RTPCodec, id string) *mockTrack {
|
||||
return &mockTrack{
|
||||
codec: codec,
|
||||
id: id,
|
||||
for _, track := range tracks {
|
||||
track.Close()
|
||||
}
|
||||
}
|
||||
|
||||
func (t *mockTrack) WriteSample(s media.Sample) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (t *mockTrack) Codec() *webrtc.RTPCodec {
|
||||
return t.codec
|
||||
}
|
||||
|
||||
func (t *mockTrack) ID() string {
|
||||
return t.id
|
||||
}
|
||||
|
||||
func (t *mockTrack) Kind() webrtc.RTPCodecType {
|
||||
return t.codec.Type
|
||||
}
|
||||
|
||||
type mockParams struct {
|
||||
codec.BaseParams
|
||||
name string
|
||||
}
|
||||
|
||||
func (params *mockParams) Name() string {
|
||||
return params.name
|
||||
}
|
||||
|
||||
func (params *mockParams) BuildVideoEncoder(r video.Reader, p prop.Media) (codec.ReadCloser, error) {
|
||||
if params.BitRate == 0 {
|
||||
// This is a dummy error to test the failure condition.
|
||||
return nil, errors.New("wrong codec parameter")
|
||||
func TestSelectBestDriverConstraintsResultIsSetProperly(t *testing.T) {
|
||||
filterFn := driver.FilterVideoRecorder()
|
||||
drivers := driver.GetManager().Query(filterFn)
|
||||
if len(drivers) == 0 {
|
||||
t.Fatal("expect to get at least 1 driver")
|
||||
}
|
||||
return &mockVideoCodec{
|
||||
r: r,
|
||||
closed: make(chan struct{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (params *mockParams) BuildAudioEncoder(r audio.Reader, p prop.Media) (codec.ReadCloser, error) {
|
||||
return &mockAudioCodec{
|
||||
r: r,
|
||||
closed: make(chan struct{}),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type mockCodec struct{}
|
||||
|
||||
func (e *mockCodec) SetBitRate(b int) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e *mockCodec) ForceKeyFrame() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type mockVideoCodec struct {
|
||||
mockCodec
|
||||
r video.Reader
|
||||
closed chan struct{}
|
||||
}
|
||||
|
||||
func (m *mockVideoCodec) Read(b []byte) (int, error) {
|
||||
if _, err := m.r.Read(); err != nil {
|
||||
return 0, err
|
||||
driver := drivers[0]
|
||||
err := driver.Open()
|
||||
if err != nil {
|
||||
t.Fatal("expect to open driver successfully")
|
||||
}
|
||||
return len(b), nil
|
||||
}
|
||||
defer driver.Close()
|
||||
|
||||
func (m *mockVideoCodec) Close() error { return nil }
|
||||
|
||||
type mockAudioCodec struct {
|
||||
mockCodec
|
||||
r audio.Reader
|
||||
closed chan struct{}
|
||||
}
|
||||
|
||||
func (m *mockAudioCodec) Read(b []byte) (int, error) {
|
||||
if _, err := m.r.Read(); err != nil {
|
||||
return 0, err
|
||||
if len(driver.Properties()) == 0 {
|
||||
t.Fatal("expect to get at least 1 property")
|
||||
}
|
||||
expectedProp := driver.Properties()[0]
|
||||
// Since this is a continuous value, bestConstraints should be set with the value that user specified
|
||||
expectedProp.FrameRate = 30.0
|
||||
|
||||
wantConstraints := MediaTrackConstraints{
|
||||
MediaConstraints: prop.MediaConstraints{
|
||||
VideoConstraints: prop.VideoConstraints{
|
||||
// By reducing the width from the driver by a tiny amount, this property should be chosen.
|
||||
// At the same time, we'll be able to find out if the return constraints will be properly set
|
||||
// to the best constraints.
|
||||
Width: prop.Int(expectedProp.Width - 1),
|
||||
Height: prop.Int(expectedProp.Width),
|
||||
FrameFormat: prop.FrameFormat(expectedProp.FrameFormat),
|
||||
FrameRate: prop.Float(30.0),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
bestDriver, bestConstraints, err := selectBestDriver(filterFn, wantConstraints)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if driver != bestDriver {
|
||||
t.Fatal("best driver is not expected")
|
||||
}
|
||||
|
||||
s := bestConstraints.selectedMedia
|
||||
if s.Width != expectedProp.Width ||
|
||||
s.Height != expectedProp.Height ||
|
||||
s.FrameFormat != expectedProp.FrameFormat ||
|
||||
s.FrameRate != expectedProp.FrameRate {
|
||||
t.Fatalf("failed to return best constraints\nexpected:\n%v\n\ngot:\n%v", expectedProp, bestConstraints.selectedMedia)
|
||||
}
|
||||
return len(b), nil
|
||||
}
|
||||
func (m *mockAudioCodec) Close() error { return nil }
|
||||
|
@@ -2,89 +2,85 @@ package mediadevices
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
// MediaStream is an interface that represents a collection of existing tracks.
|
||||
type MediaStream interface {
|
||||
// GetAudioTracks implements https://w3c.github.io/mediacapture-main/#dom-mediastream-getaudiotracks
|
||||
GetAudioTracks() []Tracker
|
||||
GetAudioTracks() []Track
|
||||
// GetVideoTracks implements https://w3c.github.io/mediacapture-main/#dom-mediastream-getvideotracks
|
||||
GetVideoTracks() []Tracker
|
||||
GetVideoTracks() []Track
|
||||
// GetTracks implements https://w3c.github.io/mediacapture-main/#dom-mediastream-gettracks
|
||||
GetTracks() []Tracker
|
||||
GetTracks() []Track
|
||||
// AddTrack implements https://w3c.github.io/mediacapture-main/#dom-mediastream-addtrack
|
||||
AddTrack(t Tracker)
|
||||
AddTrack(t Track)
|
||||
// RemoveTrack implements https://w3c.github.io/mediacapture-main/#dom-mediastream-removetrack
|
||||
RemoveTrack(t Tracker)
|
||||
RemoveTrack(t Track)
|
||||
}
|
||||
|
||||
type mediaStream struct {
|
||||
trackers map[string]Tracker
|
||||
l sync.RWMutex
|
||||
tracks map[Track]struct{}
|
||||
l sync.RWMutex
|
||||
}
|
||||
|
||||
const rtpCodecTypeDefault webrtc.RTPCodecType = 0
|
||||
const trackTypeDefault MediaDeviceType = 0
|
||||
|
||||
// NewMediaStream creates a MediaStream interface that's defined in
|
||||
// https://w3c.github.io/mediacapture-main/#dom-mediastream
|
||||
func NewMediaStream(trackers ...Tracker) (MediaStream, error) {
|
||||
m := mediaStream{trackers: make(map[string]Tracker)}
|
||||
func NewMediaStream(tracks ...Track) (MediaStream, error) {
|
||||
m := mediaStream{tracks: make(map[Track]struct{})}
|
||||
|
||||
for _, tracker := range trackers {
|
||||
id := tracker.LocalTrack().ID()
|
||||
if _, ok := m.trackers[id]; !ok {
|
||||
m.trackers[id] = tracker
|
||||
for _, track := range tracks {
|
||||
if _, ok := m.tracks[track]; !ok {
|
||||
m.tracks[track] = struct{}{}
|
||||
}
|
||||
}
|
||||
|
||||
return &m, nil
|
||||
}
|
||||
|
||||
func (m *mediaStream) GetAudioTracks() []Tracker {
|
||||
return m.queryTracks(webrtc.RTPCodecTypeAudio)
|
||||
func (m *mediaStream) GetAudioTracks() []Track {
|
||||
return m.queryTracks(AudioInput)
|
||||
}
|
||||
|
||||
func (m *mediaStream) GetVideoTracks() []Tracker {
|
||||
return m.queryTracks(webrtc.RTPCodecTypeVideo)
|
||||
func (m *mediaStream) GetVideoTracks() []Track {
|
||||
return m.queryTracks(VideoInput)
|
||||
}
|
||||
|
||||
func (m *mediaStream) GetTracks() []Tracker {
|
||||
return m.queryTracks(rtpCodecTypeDefault)
|
||||
func (m *mediaStream) GetTracks() []Track {
|
||||
return m.queryTracks(trackTypeDefault)
|
||||
}
|
||||
|
||||
// queryTracks returns all tracks that are the same kind as t.
|
||||
// If t is 0, which is the default, queryTracks will return all the tracks.
|
||||
func (m *mediaStream) queryTracks(t webrtc.RTPCodecType) []Tracker {
|
||||
func (m *mediaStream) queryTracks(t MediaDeviceType) []Track {
|
||||
m.l.RLock()
|
||||
defer m.l.RUnlock()
|
||||
|
||||
result := make([]Tracker, 0)
|
||||
for _, tracker := range m.trackers {
|
||||
if tracker.LocalTrack().Kind() == t || t == rtpCodecTypeDefault {
|
||||
result = append(result, tracker)
|
||||
result := make([]Track, 0)
|
||||
for track := range m.tracks {
|
||||
if track.Kind() == t || t == trackTypeDefault {
|
||||
result = append(result, track)
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func (m *mediaStream) AddTrack(t Tracker) {
|
||||
func (m *mediaStream) AddTrack(t Track) {
|
||||
m.l.Lock()
|
||||
defer m.l.Unlock()
|
||||
|
||||
id := t.LocalTrack().ID()
|
||||
if _, ok := m.trackers[id]; ok {
|
||||
if _, ok := m.tracks[t]; ok {
|
||||
return
|
||||
}
|
||||
|
||||
m.trackers[id] = t
|
||||
m.tracks[t] = struct{}{}
|
||||
}
|
||||
|
||||
func (m *mediaStream) RemoveTrack(t Tracker) {
|
||||
func (m *mediaStream) RemoveTrack(t Track) {
|
||||
m.l.Lock()
|
||||
defer m.l.Unlock()
|
||||
|
||||
delete(m.trackers, t.LocalTrack().ID())
|
||||
delete(m.tracks, t)
|
||||
}
|
||||
|
97
mediastream_test.go
Normal file
97
mediastream_test.go
Normal file
@@ -0,0 +1,97 @@
|
||||
package mediadevices
|
||||
|
||||
import (
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
type mockMediaStreamTrack struct {
|
||||
kind MediaDeviceType
|
||||
}
|
||||
|
||||
func (track *mockMediaStreamTrack) ID() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (track *mockMediaStreamTrack) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (track *mockMediaStreamTrack) Kind() MediaDeviceType {
|
||||
return track.kind
|
||||
}
|
||||
|
||||
func (track *mockMediaStreamTrack) OnEnded(handler func(error)) {
|
||||
}
|
||||
|
||||
func (track *mockMediaStreamTrack) Bind(pc *webrtc.PeerConnection) (*webrtc.Track, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (track *mockMediaStreamTrack) Unbind(pc *webrtc.PeerConnection) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (track *mockMediaStreamTrack) NewRTPReader(codecName string, mtu int) (RTPReadCloser, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (track *mockMediaStreamTrack) NewEncodedReader(codecName string) (io.ReadCloser, error) {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func TestMediaStreamFilters(t *testing.T) {
|
||||
audioTracks := []Track{
|
||||
&mockMediaStreamTrack{AudioInput},
|
||||
&mockMediaStreamTrack{AudioInput},
|
||||
&mockMediaStreamTrack{AudioInput},
|
||||
&mockMediaStreamTrack{AudioInput},
|
||||
&mockMediaStreamTrack{AudioInput},
|
||||
}
|
||||
|
||||
videoTracks := []Track{
|
||||
&mockMediaStreamTrack{VideoInput},
|
||||
&mockMediaStreamTrack{VideoInput},
|
||||
&mockMediaStreamTrack{VideoInput},
|
||||
}
|
||||
|
||||
tracks := append(audioTracks, videoTracks...)
|
||||
stream, err := NewMediaStream(tracks...)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
expect := func(t *testing.T, actual, expected []Track) {
|
||||
if len(actual) != len(expected) {
|
||||
t.Fatalf("%s: Expected to get %d trackers, but got %d trackers", t.Name(), len(expected), len(actual))
|
||||
}
|
||||
|
||||
for _, a := range actual {
|
||||
found := false
|
||||
for _, e := range expected {
|
||||
if e == a {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
t.Fatalf("%s: Expected to find %p in the query results", t.Name(), a)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
t.Run("GetAudioTracks", func(t *testing.T) {
|
||||
expect(t, stream.GetAudioTracks(), audioTracks)
|
||||
})
|
||||
|
||||
t.Run("GetVideoTracks", func(t *testing.T) {
|
||||
expect(t, stream.GetVideoTracks(), videoTracks)
|
||||
})
|
||||
|
||||
t.Run("GetTracks", func(t *testing.T) {
|
||||
expect(t, stream.GetTracks(), tracks)
|
||||
})
|
||||
}
|
@@ -1,40 +1,18 @@
|
||||
package mediadevices
|
||||
|
||||
import (
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
type MediaStreamConstraints struct {
|
||||
Audio MediaOption
|
||||
Video MediaOption
|
||||
Codec *CodecSelector
|
||||
}
|
||||
|
||||
// MediaTrackConstraints represents https://w3c.github.io/mediacapture-main/#dom-mediatrackconstraints
|
||||
type MediaTrackConstraints struct {
|
||||
prop.MediaConstraints
|
||||
Enabled bool
|
||||
// VideoEncoderBuilders are codec builders that are used for encoding the video
|
||||
// and later being used for sending the appropriate RTP payload type.
|
||||
//
|
||||
// If one encoder builder fails to build the codec, the next builder will be used,
|
||||
// repeating until a codec builds. If no builders build successfully, an error is returned.
|
||||
VideoEncoderBuilders []codec.VideoEncoderBuilder
|
||||
// AudioEncoderBuilders are codec builders that are used for encoding the audio
|
||||
// and later being used for sending the appropriate RTP payload type.
|
||||
//
|
||||
// If one encoder builder fails to build the codec, the next builder will be used,
|
||||
// repeating until a codec builds. If no builders build successfully, an error is returned.
|
||||
AudioEncoderBuilders []codec.AudioEncoderBuilder
|
||||
// VideoTransform will be used to transform the video that's coming from the driver.
|
||||
// So, basically it'll look like following: driver -> VideoTransform -> codec
|
||||
VideoTransform video.TransformFunc
|
||||
// AudioTransform will be used to transform the audio that's coming from the driver.
|
||||
// So, basically it'll look like following: driver -> AudioTransform -> code
|
||||
AudioTransform audio.TransformFunc
|
||||
|
||||
selectedMedia prop.Media
|
||||
}
|
||||
|
||||
|
35
meta.go
Normal file
35
meta.go
Normal file
@@ -0,0 +1,35 @@
|
||||
package mediadevices
|
||||
|
||||
import (
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
// detectCurrentVideoProp is a small helper to get current video property
|
||||
func detectCurrentVideoProp(broadcaster *video.Broadcaster) (prop.Media, error) {
|
||||
var currentProp prop.Media
|
||||
|
||||
// Since broadcaster has a ring buffer internally, a new reader will either read the last
|
||||
// buffered frame or a new frame from the source. This also implies that no frame will be lost
|
||||
// in any case.
|
||||
metaReader := broadcaster.NewReader(false)
|
||||
metaReader = video.DetectChanges(0, func(p prop.Media) { currentProp = p })(metaReader)
|
||||
_, _, err := metaReader.Read()
|
||||
|
||||
return currentProp, err
|
||||
}
|
||||
|
||||
// detectCurrentAudioProp is a small helper to get current audio property
|
||||
func detectCurrentAudioProp(broadcaster *audio.Broadcaster) (prop.Media, error) {
|
||||
var currentProp prop.Media
|
||||
|
||||
// Since broadcaster has a ring buffer internally, a new reader will either read the last
|
||||
// buffered frame or a new frame from the source. This also implies that no frame will be lost
|
||||
// in any case.
|
||||
metaReader := broadcaster.NewReader(false)
|
||||
metaReader = audio.DetectChanges(0, func(p prop.Media) { currentProp = p })(metaReader)
|
||||
_, _, err := metaReader.Read()
|
||||
|
||||
return currentProp, err
|
||||
}
|
98
meta_test.go
Normal file
98
meta_test.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package mediadevices
|
||||
|
||||
import (
|
||||
"image"
|
||||
"testing"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
func TestDetectCurrentVideoProp(t *testing.T) {
|
||||
resolution := image.Rect(0, 0, 4, 4)
|
||||
first := image.NewRGBA(resolution)
|
||||
first.Pix[0] = 1
|
||||
second := image.NewRGBA(resolution)
|
||||
second.Pix[0] = 2
|
||||
|
||||
isFirst := true
|
||||
source := video.ReaderFunc(func() (image.Image, func(), error) {
|
||||
if isFirst {
|
||||
isFirst = true
|
||||
return first, func() {}, nil
|
||||
} else {
|
||||
return second, func() {}, nil
|
||||
}
|
||||
})
|
||||
|
||||
broadcaster := video.NewBroadcaster(source, nil)
|
||||
|
||||
currentProp, err := detectCurrentVideoProp(broadcaster)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if currentProp.Width != resolution.Dx() {
|
||||
t.Fatalf("Expect the actual width to be %d, but got %d", currentProp.Width, resolution.Dx())
|
||||
}
|
||||
|
||||
if currentProp.Height != resolution.Dy() {
|
||||
t.Fatalf("Expect the actual height to be %d, but got %d", currentProp.Height, resolution.Dy())
|
||||
}
|
||||
|
||||
reader := broadcaster.NewReader(false)
|
||||
img, _, err := reader.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
rgba := img.(*image.RGBA)
|
||||
if rgba.Pix[0] != 1 {
|
||||
t.Fatal("Expect the frame after reading the current prop is not the first frame")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDetectCurrentAudioProp(t *testing.T) {
|
||||
info := wave.ChunkInfo{
|
||||
Len: 4,
|
||||
Channels: 2,
|
||||
SamplingRate: 48000,
|
||||
}
|
||||
first := wave.NewInt16Interleaved(info)
|
||||
first.Data[0] = 1
|
||||
second := wave.NewInt16Interleaved(info)
|
||||
second.Data[0] = 2
|
||||
|
||||
isFirst := true
|
||||
source := audio.ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
if isFirst {
|
||||
isFirst = true
|
||||
return first, func() {}, nil
|
||||
} else {
|
||||
return second, func() {}, nil
|
||||
}
|
||||
})
|
||||
|
||||
broadcaster := audio.NewBroadcaster(source, nil)
|
||||
|
||||
currentProp, err := detectCurrentAudioProp(broadcaster)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if currentProp.ChannelCount != info.Channels {
|
||||
t.Fatalf("Expect the actual channel count to be %d, but got %d", currentProp.ChannelCount, info.Channels)
|
||||
}
|
||||
|
||||
reader := broadcaster.NewReader(false)
|
||||
chunk, _, err := reader.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
realChunk := chunk.(*wave.Int16Interleaved)
|
||||
if realChunk.Data[0] != 1 {
|
||||
t.Fatal("Expect the chunk after reading the current prop is not the first chunk")
|
||||
}
|
||||
}
|
25
pkg/avfoundation/.gitignore
vendored
Normal file
25
pkg/avfoundation/.gitignore
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
#User settings
|
||||
xcuserdata/
|
||||
|
||||
## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
|
||||
*.xcscmblueprint
|
||||
*.xccheckout
|
||||
|
||||
## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
|
||||
build/
|
||||
DerivedData/
|
||||
*.moved-aside
|
||||
*.pbxuser
|
||||
!default.pbxuser
|
||||
*.mode1v3
|
||||
!default.mode1v3
|
||||
*.mode2v3
|
||||
!default.mode2v3
|
||||
*.perspectivev3
|
||||
!default.perspectivev3
|
||||
|
||||
## Gcc Patch
|
||||
/*.gcno
|
||||
.DS_STORE
|
||||
|
||||
Build/
|
294
pkg/avfoundation/AVFoundationBind.xcodeproj/project.pbxproj
Normal file
294
pkg/avfoundation/AVFoundationBind.xcodeproj/project.pbxproj
Normal file
@@ -0,0 +1,294 @@
|
||||
// !$*UTF8*$!
|
||||
{
|
||||
archiveVersion = 1;
|
||||
classes = {
|
||||
};
|
||||
objectVersion = 50;
|
||||
objects = {
|
||||
|
||||
/* Begin PBXBuildFile section */
|
||||
F0143CC12479F78E00EC29C9 /* AVFoundationBind.h in Headers */ = {isa = PBXBuildFile; fileRef = F0143CC02479F78E00EC29C9 /* AVFoundationBind.h */; };
|
||||
F0143CC32479F78E00EC29C9 /* AVFoundationBind.m in Sources */ = {isa = PBXBuildFile; fileRef = F0143CC22479F78E00EC29C9 /* AVFoundationBind.m */; settings = {COMPILER_FLAGS = "-fno-objc-arc"; }; };
|
||||
/* End PBXBuildFile section */
|
||||
|
||||
/* Begin PBXFileReference section */
|
||||
F0143CBD2479F78E00EC29C9 /* libAVFoundationBind.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libAVFoundationBind.a; sourceTree = BUILT_PRODUCTS_DIR; };
|
||||
F0143CC02479F78E00EC29C9 /* AVFoundationBind.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AVFoundationBind.h; sourceTree = "<group>"; };
|
||||
F0143CC22479F78E00EC29C9 /* AVFoundationBind.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AVFoundationBind.m; sourceTree = "<group>"; };
|
||||
F0FDDA0B247E15D900A3429D /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
|
||||
/* End PBXFileReference section */
|
||||
|
||||
/* Begin PBXFrameworksBuildPhase section */
|
||||
F0143CBB2479F78E00EC29C9 /* Frameworks */ = {
|
||||
isa = PBXFrameworksBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXFrameworksBuildPhase section */
|
||||
|
||||
/* Begin PBXGroup section */
|
||||
F0143CB42479F78E00EC29C9 = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
F0143CBF2479F78E00EC29C9 /* AVFoundationBind */,
|
||||
F0143CBE2479F78E00EC29C9 /* Products */,
|
||||
F0FDDA0A247E15D900A3429D /* Frameworks */,
|
||||
);
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
F0143CBE2479F78E00EC29C9 /* Products */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
F0143CBD2479F78E00EC29C9 /* libAVFoundationBind.a */,
|
||||
);
|
||||
name = Products;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
F0143CBF2479F78E00EC29C9 /* AVFoundationBind */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
F0143CC02479F78E00EC29C9 /* AVFoundationBind.h */,
|
||||
F0143CC22479F78E00EC29C9 /* AVFoundationBind.m */,
|
||||
);
|
||||
path = AVFoundationBind;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
F0FDDA0A247E15D900A3429D /* Frameworks */ = {
|
||||
isa = PBXGroup;
|
||||
children = (
|
||||
F0FDDA0B247E15D900A3429D /* AVFoundation.framework */,
|
||||
);
|
||||
name = Frameworks;
|
||||
sourceTree = "<group>";
|
||||
};
|
||||
/* End PBXGroup section */
|
||||
|
||||
/* Begin PBXHeadersBuildPhase section */
|
||||
F0143CB92479F78E00EC29C9 /* Headers */ = {
|
||||
isa = PBXHeadersBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
F0143CC12479F78E00EC29C9 /* AVFoundationBind.h in Headers */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXHeadersBuildPhase section */
|
||||
|
||||
/* Begin PBXNativeTarget section */
|
||||
F0143CBC2479F78E00EC29C9 /* AVFoundationBind */ = {
|
||||
isa = PBXNativeTarget;
|
||||
buildConfigurationList = F0143CC62479F78E00EC29C9 /* Build configuration list for PBXNativeTarget "AVFoundationBind" */;
|
||||
buildPhases = (
|
||||
F0143CB92479F78E00EC29C9 /* Headers */,
|
||||
F0143CBA2479F78E00EC29C9 /* Sources */,
|
||||
F0143CBB2479F78E00EC29C9 /* Frameworks */,
|
||||
);
|
||||
buildRules = (
|
||||
);
|
||||
dependencies = (
|
||||
);
|
||||
name = AVFoundationBind;
|
||||
productName = AVFoundationBind;
|
||||
productReference = F0143CBD2479F78E00EC29C9 /* libAVFoundationBind.a */;
|
||||
productType = "com.apple.product-type.library.static";
|
||||
};
|
||||
/* End PBXNativeTarget section */
|
||||
|
||||
/* Begin PBXProject section */
|
||||
F0143CB52479F78E00EC29C9 /* Project object */ = {
|
||||
isa = PBXProject;
|
||||
attributes = {
|
||||
LastUpgradeCheck = 1150;
|
||||
ORGANIZATIONNAME = "Herman, Lukas";
|
||||
TargetAttributes = {
|
||||
F0143CBC2479F78E00EC29C9 = {
|
||||
CreatedOnToolsVersion = 11.5;
|
||||
};
|
||||
};
|
||||
};
|
||||
buildConfigurationList = F0143CB82479F78E00EC29C9 /* Build configuration list for PBXProject "AVFoundationBind" */;
|
||||
compatibilityVersion = "Xcode 9.3";
|
||||
developmentRegion = en;
|
||||
hasScannedForEncodings = 0;
|
||||
knownRegions = (
|
||||
en,
|
||||
Base,
|
||||
);
|
||||
mainGroup = F0143CB42479F78E00EC29C9;
|
||||
productRefGroup = F0143CBE2479F78E00EC29C9 /* Products */;
|
||||
projectDirPath = "";
|
||||
projectRoot = "";
|
||||
targets = (
|
||||
F0143CBC2479F78E00EC29C9 /* AVFoundationBind */,
|
||||
);
|
||||
};
|
||||
/* End PBXProject section */
|
||||
|
||||
/* Begin PBXSourcesBuildPhase section */
|
||||
F0143CBA2479F78E00EC29C9 /* Sources */ = {
|
||||
isa = PBXSourcesBuildPhase;
|
||||
buildActionMask = 2147483647;
|
||||
files = (
|
||||
F0143CC32479F78E00EC29C9 /* AVFoundationBind.m in Sources */,
|
||||
);
|
||||
runOnlyForDeploymentPostprocessing = 0;
|
||||
};
|
||||
/* End PBXSourcesBuildPhase section */
|
||||
|
||||
/* Begin XCBuildConfiguration section */
|
||||
F0143CC42479F78E00EC29C9 /* Debug */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||
CLANG_ANALYZER_NONNULL = YES;
|
||||
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
||||
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
|
||||
CLANG_CXX_LIBRARY = "libc++";
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CLANG_ENABLE_OBJC_ARC = YES;
|
||||
CLANG_ENABLE_OBJC_WEAK = YES;
|
||||
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
||||
CLANG_WARN_BOOL_CONVERSION = YES;
|
||||
CLANG_WARN_COMMA = YES;
|
||||
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
||||
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
||||
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
||||
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
||||
CLANG_WARN_EMPTY_BODY = YES;
|
||||
CLANG_WARN_ENUM_CONVERSION = YES;
|
||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||
CLANG_WARN_INT_CONVERSION = YES;
|
||||
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
||||
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||
COPY_PHASE_STRIP = NO;
|
||||
DEBUG_INFORMATION_FORMAT = dwarf;
|
||||
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
||||
ENABLE_TESTABILITY = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu11;
|
||||
GCC_DYNAMIC_NO_PIC = NO;
|
||||
GCC_NO_COMMON_BLOCKS = YES;
|
||||
GCC_OPTIMIZATION_LEVEL = 0;
|
||||
GCC_PREPROCESSOR_DEFINITIONS = (
|
||||
"DEBUG=1",
|
||||
"$(inherited)",
|
||||
);
|
||||
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
||||
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||
MACOSX_DEPLOYMENT_TARGET = 10.15;
|
||||
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
|
||||
MTL_FAST_MATH = YES;
|
||||
ONLY_ACTIVE_ARCH = YES;
|
||||
SDKROOT = macosx;
|
||||
};
|
||||
name = Debug;
|
||||
};
|
||||
F0143CC52479F78E00EC29C9 /* Release */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
ALWAYS_SEARCH_USER_PATHS = NO;
|
||||
CLANG_ANALYZER_NONNULL = YES;
|
||||
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
|
||||
CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
|
||||
CLANG_CXX_LIBRARY = "libc++";
|
||||
CLANG_ENABLE_MODULES = YES;
|
||||
CLANG_ENABLE_OBJC_ARC = YES;
|
||||
CLANG_ENABLE_OBJC_WEAK = YES;
|
||||
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
|
||||
CLANG_WARN_BOOL_CONVERSION = YES;
|
||||
CLANG_WARN_COMMA = YES;
|
||||
CLANG_WARN_CONSTANT_CONVERSION = YES;
|
||||
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
|
||||
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
|
||||
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
|
||||
CLANG_WARN_EMPTY_BODY = YES;
|
||||
CLANG_WARN_ENUM_CONVERSION = YES;
|
||||
CLANG_WARN_INFINITE_RECURSION = YES;
|
||||
CLANG_WARN_INT_CONVERSION = YES;
|
||||
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
|
||||
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
|
||||
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
|
||||
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
|
||||
CLANG_WARN_STRICT_PROTOTYPES = YES;
|
||||
CLANG_WARN_SUSPICIOUS_MOVE = YES;
|
||||
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
|
||||
CLANG_WARN_UNREACHABLE_CODE = YES;
|
||||
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
|
||||
COPY_PHASE_STRIP = NO;
|
||||
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
|
||||
ENABLE_NS_ASSERTIONS = NO;
|
||||
ENABLE_STRICT_OBJC_MSGSEND = YES;
|
||||
GCC_C_LANGUAGE_STANDARD = gnu11;
|
||||
GCC_NO_COMMON_BLOCKS = YES;
|
||||
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
|
||||
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
|
||||
GCC_WARN_UNDECLARED_SELECTOR = YES;
|
||||
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
|
||||
GCC_WARN_UNUSED_FUNCTION = YES;
|
||||
GCC_WARN_UNUSED_VARIABLE = YES;
|
||||
MACOSX_DEPLOYMENT_TARGET = 10.15;
|
||||
MTL_ENABLE_DEBUG_INFO = NO;
|
||||
MTL_FAST_MATH = YES;
|
||||
SDKROOT = macosx;
|
||||
};
|
||||
name = Release;
|
||||
};
|
||||
F0143CC72479F78E00EC29C9 /* Debug */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
EXECUTABLE_PREFIX = lib;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SKIP_INSTALL = YES;
|
||||
};
|
||||
name = Debug;
|
||||
};
|
||||
F0143CC82479F78E00EC29C9 /* Release */ = {
|
||||
isa = XCBuildConfiguration;
|
||||
buildSettings = {
|
||||
CODE_SIGN_STYLE = Automatic;
|
||||
EXECUTABLE_PREFIX = lib;
|
||||
PRODUCT_NAME = "$(TARGET_NAME)";
|
||||
SKIP_INSTALL = YES;
|
||||
};
|
||||
name = Release;
|
||||
};
|
||||
/* End XCBuildConfiguration section */
|
||||
|
||||
/* Begin XCConfigurationList section */
|
||||
F0143CB82479F78E00EC29C9 /* Build configuration list for PBXProject "AVFoundationBind" */ = {
|
||||
isa = XCConfigurationList;
|
||||
buildConfigurations = (
|
||||
F0143CC42479F78E00EC29C9 /* Debug */,
|
||||
F0143CC52479F78E00EC29C9 /* Release */,
|
||||
);
|
||||
defaultConfigurationIsVisible = 0;
|
||||
defaultConfigurationName = Release;
|
||||
};
|
||||
F0143CC62479F78E00EC29C9 /* Build configuration list for PBXNativeTarget "AVFoundationBind" */ = {
|
||||
isa = XCConfigurationList;
|
||||
buildConfigurations = (
|
||||
F0143CC72479F78E00EC29C9 /* Debug */,
|
||||
F0143CC82479F78E00EC29C9 /* Release */,
|
||||
);
|
||||
defaultConfigurationIsVisible = 0;
|
||||
defaultConfigurationName = Release;
|
||||
};
|
||||
/* End XCConfigurationList section */
|
||||
};
|
||||
rootObject = F0143CB52479F78E00EC29C9 /* Project object */;
|
||||
}
|
7
pkg/avfoundation/AVFoundationBind.xcodeproj/project.xcworkspace/contents.xcworkspacedata
generated
Normal file
7
pkg/avfoundation/AVFoundationBind.xcodeproj/project.xcworkspace/contents.xcworkspacedata
generated
Normal file
@@ -0,0 +1,7 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Workspace
|
||||
version = "1.0">
|
||||
<FileRef
|
||||
location = "self:AVFoundationBind.xcodeproj">
|
||||
</FileRef>
|
||||
</Workspace>
|
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>IDEDidComputeMac32BitWarning</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
@@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>PreviewsEnabled</key>
|
||||
<false/>
|
||||
</dict>
|
||||
</plist>
|
@@ -0,0 +1,67 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Scheme
|
||||
LastUpgradeVersion = "1150"
|
||||
version = "1.3">
|
||||
<BuildAction
|
||||
parallelizeBuildables = "YES"
|
||||
buildImplicitDependencies = "YES">
|
||||
<BuildActionEntries>
|
||||
<BuildActionEntry
|
||||
buildForTesting = "YES"
|
||||
buildForRunning = "YES"
|
||||
buildForProfiling = "YES"
|
||||
buildForArchiving = "YES"
|
||||
buildForAnalyzing = "YES">
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "F0143CBC2479F78E00EC29C9"
|
||||
BuildableName = "libAVFoundationBind.a"
|
||||
BlueprintName = "AVFoundationBind"
|
||||
ReferencedContainer = "container:AVFoundationBind.xcodeproj">
|
||||
</BuildableReference>
|
||||
</BuildActionEntry>
|
||||
</BuildActionEntries>
|
||||
</BuildAction>
|
||||
<TestAction
|
||||
buildConfiguration = "Debug"
|
||||
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
|
||||
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
|
||||
shouldUseLaunchSchemeArgsEnv = "YES">
|
||||
<Testables>
|
||||
</Testables>
|
||||
</TestAction>
|
||||
<LaunchAction
|
||||
buildConfiguration = "Release"
|
||||
selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB"
|
||||
selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB"
|
||||
launchStyle = "0"
|
||||
useCustomWorkingDirectory = "NO"
|
||||
ignoresPersistentStateOnLaunch = "NO"
|
||||
debugDocumentVersioning = "YES"
|
||||
debugServiceExtension = "internal"
|
||||
allowLocationSimulation = "YES">
|
||||
</LaunchAction>
|
||||
<ProfileAction
|
||||
buildConfiguration = "Release"
|
||||
shouldUseLaunchSchemeArgsEnv = "YES"
|
||||
savedToolIdentifier = ""
|
||||
useCustomWorkingDirectory = "NO"
|
||||
debugDocumentVersioning = "YES">
|
||||
<MacroExpansion>
|
||||
<BuildableReference
|
||||
BuildableIdentifier = "primary"
|
||||
BlueprintIdentifier = "F0143CBC2479F78E00EC29C9"
|
||||
BuildableName = "libAVFoundationBind.a"
|
||||
BlueprintName = "AVFoundationBind"
|
||||
ReferencedContainer = "container:AVFoundationBind.xcodeproj">
|
||||
</BuildableReference>
|
||||
</MacroExpansion>
|
||||
</ProfileAction>
|
||||
<AnalyzeAction
|
||||
buildConfiguration = "Debug">
|
||||
</AnalyzeAction>
|
||||
<ArchiveAction
|
||||
buildConfiguration = "Release"
|
||||
revealArchiveInOrganizer = "YES">
|
||||
</ArchiveAction>
|
||||
</Scheme>
|
78
pkg/avfoundation/AVFoundationBind/AVFoundationBind.h
Normal file
78
pkg/avfoundation/AVFoundationBind/AVFoundationBind.h
Normal file
@@ -0,0 +1,78 @@
|
||||
// MIT License
|
||||
//
|
||||
// Copyright (c) 2019-2020 Pion
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <stddef.h>
|
||||
|
||||
#define MAX_DEVICES 8
|
||||
#define MAX_PROPERTIES 64
|
||||
#define MAX_DEVICE_UID_CHARS 64
|
||||
|
||||
typedef const char* STATUS;
|
||||
static STATUS STATUS_OK = (STATUS) NULL;
|
||||
static STATUS STATUS_NULL_ARG = (STATUS) "One of the arguments was null";
|
||||
static STATUS STATUS_DEVICE_INIT_FAILED = (STATUS) "Failed to init device";
|
||||
static STATUS STATUS_UNSUPPORTED_FRAME_FORMAT = (STATUS) "Unsupported frame format";
|
||||
static STATUS STATUS_UNSUPPORTED_MEDIA_TYPE = (STATUS) "Unsupported media type";
|
||||
static STATUS STATUS_FAILED_TO_ACQUIRE_LOCK = (STATUS) "Failed to acquire a lock";
|
||||
static STATUS STATUS_UNSUPPORTED_FORMAT = (STATUS) "Unsupported device format";
|
||||
|
||||
typedef enum AVBindMediaType {
|
||||
AVBindMediaTypeVideo,
|
||||
AVBindMediaTypeAudio,
|
||||
} AVBindMediaType;
|
||||
|
||||
typedef enum AVBindFrameFormat {
|
||||
AVBindFrameFormatI420,
|
||||
AVBindFrameFormatNV21,
|
||||
AVBindFrameFormatYUY2,
|
||||
AVBindFrameFormatUYVY,
|
||||
} AVBindFrameFormat;
|
||||
|
||||
typedef void (*AVBindDataCallback)(void *userData, void *buf, int len);
|
||||
|
||||
typedef struct AVBindMediaProperty {
|
||||
// video property
|
||||
int width, height;
|
||||
AVBindFrameFormat frameFormat;
|
||||
|
||||
// audio property
|
||||
|
||||
} AVBindMediaProperty, *PAVBindMediaProperty;
|
||||
|
||||
typedef struct AVBindSession AVBindSession, *PAVBindSession;
|
||||
|
||||
typedef struct {
|
||||
char uid[MAX_DEVICE_UID_CHARS + 1];
|
||||
} AVBindDevice, *PAVBindDevice;
|
||||
|
||||
// AVBindDevices returns a list of AVBindDevices. The result array is pointing to a static
|
||||
// memory. The caller is expected to not hold on to the address for a long time and make a copy.
|
||||
// Everytime this function gets called, the array will be overwritten and the memory will be reused.
|
||||
STATUS AVBindDevices(AVBindMediaType, PAVBindDevice*, int*);
|
||||
|
||||
STATUS AVBindSessionInit(AVBindDevice, PAVBindSession*);
|
||||
STATUS AVBindSessionFree(PAVBindSession*);
|
||||
STATUS AVBindSessionOpen(PAVBindSession, AVBindMediaProperty, AVBindDataCallback, void*);
|
||||
STATUS AVBindSessionClose(PAVBindSession);
|
||||
STATUS AVBindSessionProperties(PAVBindSession, PAVBindMediaProperty*, int*);
|
350
pkg/avfoundation/AVFoundationBind/AVFoundationBind.m
Normal file
350
pkg/avfoundation/AVFoundationBind/AVFoundationBind.m
Normal file
@@ -0,0 +1,350 @@
|
||||
// MIT License
|
||||
//
|
||||
// Copyright (c) 2019-2020 Pion
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
// of this software and associated documentation files (the "Software"), to deal
|
||||
// in the Software without restriction, including without limitation the rights
|
||||
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
// copies of the Software, and to permit persons to whom the Software is
|
||||
// furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included in all
|
||||
// copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
// SOFTWARE.
|
||||
|
||||
// Naming Convention (let "name" as an actual variable name):
|
||||
// - mName: "name" is a member of an Objective C object
|
||||
// - pName: "name" is a C pointer
|
||||
// - refName: "name" is an Objective C object reference
|
||||
|
||||
#import <Foundation/Foundation.h>
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import "AVFoundationBind.h"
|
||||
#include <string.h>
|
||||
|
||||
#define CHK(condition, status) \
|
||||
do { \
|
||||
if(!(condition)) { \
|
||||
retStatus = status; \
|
||||
goto cleanup; \
|
||||
} \
|
||||
} while(0)
|
||||
|
||||
#define CHK_STATUS(status) \
|
||||
do { \
|
||||
if(status != STATUS_OK) { \
|
||||
retStatus = status; \
|
||||
goto cleanup; \
|
||||
} \
|
||||
} while(0)
|
||||
|
||||
@interface VideoDataDelegate : NSObject<AVCaptureVideoDataOutputSampleBufferDelegate>
|
||||
|
||||
@property (readonly) AVBindDataCallback mCallback;
|
||||
@property (readonly) void *mPUserData;
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection;
|
||||
|
||||
@end
|
||||
|
||||
@implementation VideoDataDelegate
|
||||
|
||||
- (id) init: (AVBindDataCallback) callback
|
||||
withUserData: (void*) pUserData {
|
||||
self = [super init];
|
||||
_mCallback = callback;
|
||||
_mPUserData = pUserData;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection {
|
||||
if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 ||
|
||||
!CMSampleBufferIsValid(sampleBuffer) ||
|
||||
!CMSampleBufferDataIsReady(sampleBuffer)) {
|
||||
return;
|
||||
}
|
||||
|
||||
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
if (imageBuffer == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
imageBuffer = CVBufferRetain(imageBuffer);
|
||||
CVReturn ret =
|
||||
CVPixelBufferLockBaseAddress(imageBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
if (ret != kCVReturnSuccess) {
|
||||
return;
|
||||
}
|
||||
|
||||
size_t heightY = CVPixelBufferGetHeightOfPlane(imageBuffer, 0);
|
||||
size_t bytesPerRowY = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 0);
|
||||
|
||||
size_t heightUV = CVPixelBufferGetHeightOfPlane(imageBuffer, 1);
|
||||
size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, 1);
|
||||
|
||||
int len = (int)((heightY * bytesPerRowY) + (2 * heightUV * bytesPerRowUV));
|
||||
void *buf = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
|
||||
_mCallback(_mPUserData, buf, len);
|
||||
|
||||
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
|
||||
CVBufferRelease(imageBuffer);
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@interface AudioDataDelegate : NSObject<AVCaptureAudioDataOutputSampleBufferDelegate>
|
||||
|
||||
@property (readonly) AVBindDataCallback mCallback;
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection;
|
||||
|
||||
@end
|
||||
|
||||
@implementation AudioDataDelegate
|
||||
|
||||
- (id) init: (AVBindDataCallback) callback {
|
||||
self = [super init];
|
||||
_mCallback = callback;
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput
|
||||
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
|
||||
fromConnection:(AVCaptureConnection *)connection {
|
||||
// TODO
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
STATUS frameFormatToFourCC(AVBindFrameFormat format, FourCharCode *pFourCC) {
|
||||
STATUS retStatus = STATUS_OK;
|
||||
switch (format) {
|
||||
case AVBindFrameFormatNV21:
|
||||
*pFourCC = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
|
||||
break;
|
||||
case AVBindFrameFormatUYVY:
|
||||
*pFourCC = kCVPixelFormatType_422YpCbCr8;
|
||||
break;
|
||||
// TODO: Add the rest of frame formats
|
||||
default:
|
||||
retStatus = STATUS_UNSUPPORTED_FRAME_FORMAT;
|
||||
}
|
||||
return retStatus;
|
||||
}
|
||||
|
||||
STATUS frameFormatFromFourCC(FourCharCode fourCC, AVBindFrameFormat *pFormat) {
|
||||
STATUS retStatus = STATUS_OK;
|
||||
switch (fourCC) {
|
||||
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
|
||||
*pFormat = AVBindFrameFormatNV21;
|
||||
break;
|
||||
case kCVPixelFormatType_422YpCbCr8:
|
||||
*pFormat = AVBindFrameFormatUYVY;
|
||||
break;
|
||||
// TODO: Add the rest of frame formats
|
||||
default:
|
||||
retStatus = STATUS_UNSUPPORTED_FRAME_FORMAT;
|
||||
}
|
||||
return retStatus;
|
||||
}
|
||||
|
||||
|
||||
STATUS AVBindDevices(AVBindMediaType mediaType, PAVBindDevice *ppDevices, int *pLen) {
|
||||
static AVBindDevice devices[MAX_DEVICES];
|
||||
STATUS retStatus = STATUS_OK;
|
||||
NSAutoreleasePool *refPool = [[NSAutoreleasePool alloc] init];
|
||||
CHK(mediaType == AVBindMediaTypeVideo || mediaType == AVBindMediaTypeAudio, STATUS_UNSUPPORTED_MEDIA_TYPE);
|
||||
CHK(ppDevices != NULL && pLen != NULL, STATUS_NULL_ARG);
|
||||
|
||||
PAVBindDevice pDevice;
|
||||
AVMediaType _mediaType = mediaType == AVBindMediaTypeVideo ? AVMediaTypeVideo : AVMediaTypeAudio;
|
||||
NSArray *refAllTypes = @[
|
||||
AVCaptureDeviceTypeBuiltInWideAngleCamera,
|
||||
AVCaptureDeviceTypeBuiltInMicrophone,
|
||||
AVCaptureDeviceTypeExternalUnknown
|
||||
];
|
||||
AVCaptureDeviceDiscoverySession *refSession = [AVCaptureDeviceDiscoverySession
|
||||
discoverySessionWithDeviceTypes: refAllTypes
|
||||
mediaType: _mediaType
|
||||
position: AVCaptureDevicePositionUnspecified];
|
||||
|
||||
int i = 0;
|
||||
for (AVCaptureDevice *refDevice in refSession.devices) {
|
||||
if (i >= MAX_DEVICES) {
|
||||
break;
|
||||
}
|
||||
|
||||
pDevice = devices + i;
|
||||
strncpy(pDevice->uid, refDevice.uniqueID.UTF8String, MAX_DEVICE_UID_CHARS);
|
||||
pDevice->uid[MAX_DEVICE_UID_CHARS] = '\0';
|
||||
i++;
|
||||
}
|
||||
|
||||
*ppDevices = devices;
|
||||
*pLen = i;
|
||||
|
||||
cleanup:
|
||||
[refPool drain];
|
||||
return retStatus;
|
||||
}
|
||||
|
||||
struct AVBindSession {
|
||||
AVBindDevice device;
|
||||
AVCaptureSession *refCaptureSession;
|
||||
AVBindMediaProperty properties[MAX_PROPERTIES];
|
||||
};
|
||||
|
||||
|
||||
STATUS AVBindSessionInit(AVBindDevice device, PAVBindSession *ppSessionResult) {
|
||||
STATUS retStatus = STATUS_OK;
|
||||
CHK(ppSessionResult != NULL, STATUS_NULL_ARG);
|
||||
PAVBindSession pSession = malloc(sizeof(AVBindSession));
|
||||
pSession->device = device;
|
||||
pSession->refCaptureSession = NULL;
|
||||
*ppSessionResult = pSession;
|
||||
|
||||
cleanup:
|
||||
return retStatus;
|
||||
}
|
||||
|
||||
STATUS AVBindSessionFree(PAVBindSession *ppSession) {
|
||||
STATUS retStatus = STATUS_OK;
|
||||
CHK(ppSession != NULL, STATUS_NULL_ARG);
|
||||
PAVBindSession pSession = *ppSession;
|
||||
if (pSession->refCaptureSession != NULL) {
|
||||
[pSession->refCaptureSession release];
|
||||
pSession->refCaptureSession = NULL;
|
||||
}
|
||||
free(pSession);
|
||||
*ppSession = NULL;
|
||||
|
||||
cleanup:
|
||||
return retStatus;
|
||||
}
|
||||
|
||||
STATUS AVBindSessionOpen(PAVBindSession pSession,
|
||||
AVBindMediaProperty property,
|
||||
AVBindDataCallback dataCallback,
|
||||
void *pUserData) {
|
||||
STATUS retStatus = STATUS_OK;
|
||||
NSAutoreleasePool *refPool = [[NSAutoreleasePool alloc] init];
|
||||
CHK(pSession != NULL && dataCallback != NULL, STATUS_NULL_ARG);
|
||||
|
||||
AVCaptureDeviceInput *refInput;
|
||||
NSError *refErr = NULL;
|
||||
NSString *refUID = [NSString stringWithUTF8String: pSession->device.uid];
|
||||
AVCaptureDevice *refDevice = [AVCaptureDevice deviceWithUniqueID: refUID];
|
||||
|
||||
refInput = [[AVCaptureDeviceInput alloc] initWithDevice: refDevice error: &refErr];
|
||||
CHK(refErr == NULL, STATUS_DEVICE_INIT_FAILED);
|
||||
|
||||
AVCaptureSession *refCaptureSession = [[AVCaptureSession alloc] init];
|
||||
refCaptureSession.sessionPreset = AVCaptureSessionPresetMedium;
|
||||
[refCaptureSession addInput: refInput];
|
||||
|
||||
if ([refDevice hasMediaType: AVMediaTypeVideo]) {
|
||||
VideoDataDelegate *pDelegate = [[VideoDataDelegate alloc]
|
||||
init: dataCallback
|
||||
withUserData: pUserData];
|
||||
|
||||
AVCaptureVideoDataOutput *pOutput = [[AVCaptureVideoDataOutput alloc] init];
|
||||
FourCharCode fourCC;
|
||||
CHK_STATUS(frameFormatToFourCC(property.frameFormat, &fourCC));
|
||||
|
||||
pOutput.videoSettings = @{
|
||||
(id)kCVPixelBufferWidthKey: @(property.width),
|
||||
(id)kCVPixelBufferHeightKey: @(property.height),
|
||||
(id)kCVPixelBufferPixelFormatTypeKey: @(fourCC),
|
||||
};
|
||||
pOutput.alwaysDiscardsLateVideoFrames = YES;
|
||||
dispatch_queue_t queue =
|
||||
dispatch_queue_create("captureQueue", DISPATCH_QUEUE_SERIAL);
|
||||
[pOutput setSampleBufferDelegate:pDelegate queue:queue];
|
||||
[refCaptureSession addOutput: pOutput];
|
||||
} else {
|
||||
// TODO: implement audio pipeline
|
||||
}
|
||||
|
||||
pSession->refCaptureSession = [refCaptureSession retain];
|
||||
[refCaptureSession startRunning];
|
||||
|
||||
cleanup:
|
||||
[refPool drain];
|
||||
return retStatus;
|
||||
}
|
||||
|
||||
|
||||
STATUS AVBindSessionClose(PAVBindSession pSession) {
|
||||
STATUS retStatus = STATUS_OK;
|
||||
CHK(pSession != NULL, STATUS_NULL_ARG);
|
||||
CHK(pSession->refCaptureSession != NULL, STATUS_OK);
|
||||
|
||||
[pSession->refCaptureSession stopRunning];
|
||||
[pSession->refCaptureSession release];
|
||||
pSession->refCaptureSession = NULL;
|
||||
|
||||
cleanup:
|
||||
return retStatus;
|
||||
}
|
||||
|
||||
STATUS AVBindSessionProperties(PAVBindSession pSession, PAVBindMediaProperty *ppProperties, int *pLen) {
|
||||
STATUS retStatus = STATUS_OK;
|
||||
NSAutoreleasePool *refPool = [[NSAutoreleasePool alloc] init];
|
||||
CHK(pSession != NULL && ppProperties != NULL && pLen != NULL, STATUS_NULL_ARG);
|
||||
|
||||
NSString *refDeviceUID = [NSString stringWithUTF8String: pSession->device.uid];
|
||||
AVCaptureDevice *refDevice = [AVCaptureDevice deviceWithUniqueID: refDeviceUID];
|
||||
FourCharCode fourCC;
|
||||
CMVideoFormatDescriptionRef videoFormat;
|
||||
CMVideoDimensions videoDimensions;
|
||||
|
||||
memset(pSession->properties, 0, sizeof(pSession->properties));
|
||||
PAVBindMediaProperty pProperty = pSession->properties;
|
||||
int len = 0;
|
||||
for (AVCaptureDeviceFormat *refFormat in refDevice.formats) {
|
||||
// TODO: Probably gives a warn to the user
|
||||
if (len >= MAX_PROPERTIES) {
|
||||
break;
|
||||
}
|
||||
|
||||
if ([refFormat.mediaType isEqual:AVMediaTypeVideo]) {
|
||||
fourCC = CMFormatDescriptionGetMediaSubType(refFormat.formatDescription);
|
||||
if (frameFormatFromFourCC(fourCC, &pProperty->frameFormat) != STATUS_OK) {
|
||||
continue;
|
||||
}
|
||||
|
||||
videoFormat = (CMVideoFormatDescriptionRef) refFormat.formatDescription;
|
||||
videoDimensions = CMVideoFormatDescriptionGetDimensions(videoFormat);
|
||||
pProperty->height = videoDimensions.height;
|
||||
pProperty->width = videoDimensions.width;
|
||||
} else {
|
||||
// TODO: Get audio properties
|
||||
}
|
||||
|
||||
pProperty++;
|
||||
len++;
|
||||
}
|
||||
|
||||
*ppProperties = pSession->properties;
|
||||
*pLen = len;
|
||||
|
||||
cleanup:
|
||||
|
||||
[refPool drain];
|
||||
return retStatus;
|
||||
}
|
56
pkg/avfoundation/avfoundation_callback_darwin.go
Normal file
56
pkg/avfoundation/avfoundation_callback_darwin.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package avfoundation
|
||||
|
||||
// extern void onData(void*, void*, int);
|
||||
import "C"
|
||||
import (
|
||||
"sync"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var mu sync.Mutex
|
||||
var nextID handleID
|
||||
|
||||
type dataCb func(data []byte)
|
||||
|
||||
var handles = make(map[handleID]dataCb)
|
||||
|
||||
type handleID int
|
||||
|
||||
//export onData
|
||||
func onData(userData unsafe.Pointer, buf unsafe.Pointer, length C.int) {
|
||||
data := C.GoBytes(buf, length)
|
||||
|
||||
handleNum := (*C.int)(userData)
|
||||
cb, ok := lookup(handleID(*handleNum))
|
||||
if ok {
|
||||
cb(data)
|
||||
}
|
||||
}
|
||||
|
||||
func register(fn dataCb) handleID {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
nextID++
|
||||
for handles[nextID] != nil {
|
||||
nextID++
|
||||
}
|
||||
handles[nextID] = fn
|
||||
|
||||
return nextID
|
||||
}
|
||||
|
||||
func lookup(i handleID) (cb dataCb, ok bool) {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
cb, ok = handles[i]
|
||||
return
|
||||
}
|
||||
|
||||
func unregister(i handleID) {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
|
||||
delete(handles, i)
|
||||
}
|
217
pkg/avfoundation/avfoundation_darwin.go
Normal file
217
pkg/avfoundation/avfoundation_darwin.go
Normal file
@@ -0,0 +1,217 @@
|
||||
// Package avfoundation provides AVFoundation binding for Go
|
||||
package avfoundation
|
||||
|
||||
// #cgo CFLAGS: -x objective-c
|
||||
// #cgo LDFLAGS: -framework AVFoundation -framework Foundation -framework CoreMedia -framework CoreVideo
|
||||
// #include "AVFoundationBind/AVFoundationBind.h"
|
||||
// #include "AVFoundationBind/AVFoundationBind.m"
|
||||
// extern void onData(void*, void*, int);
|
||||
// void onDataBridge(void *userData, void *buf, int len) {
|
||||
// onData(userData, buf, len);
|
||||
// }
|
||||
import "C"
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"unsafe"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/frame"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
type MediaType C.AVBindMediaType
|
||||
|
||||
const (
|
||||
Video = MediaType(C.AVBindMediaTypeVideo)
|
||||
Audio = MediaType(C.AVBindMediaTypeAudio)
|
||||
)
|
||||
|
||||
// Device represents a metadata that later can be used to retrieve back the
|
||||
// underlying device given by AVFoundation
|
||||
type Device struct {
|
||||
// UID is a unique identifier for a device
|
||||
UID string
|
||||
cDevice C.AVBindDevice
|
||||
}
|
||||
|
||||
func frameFormatToAVBind(f frame.Format) (C.AVBindFrameFormat, bool) {
|
||||
switch f {
|
||||
case frame.FormatI420:
|
||||
return C.AVBindFrameFormatI420, true
|
||||
case frame.FormatNV21:
|
||||
return C.AVBindFrameFormatNV21, true
|
||||
case frame.FormatYUY2:
|
||||
return C.AVBindFrameFormatYUY2, true
|
||||
case frame.FormatUYVY:
|
||||
return C.AVBindFrameFormatUYVY, true
|
||||
default:
|
||||
return 0, false
|
||||
}
|
||||
}
|
||||
|
||||
func frameFormatFromAVBind(f C.AVBindFrameFormat) (frame.Format, bool) {
|
||||
switch f {
|
||||
case C.AVBindFrameFormatI420:
|
||||
return frame.FormatI420, true
|
||||
case C.AVBindFrameFormatNV21:
|
||||
return frame.FormatNV21, true
|
||||
case C.AVBindFrameFormatYUY2:
|
||||
return frame.FormatYUY2, true
|
||||
case C.AVBindFrameFormatUYVY:
|
||||
return frame.FormatUYVY, true
|
||||
default:
|
||||
return "", false
|
||||
}
|
||||
}
|
||||
|
||||
// Devices uses AVFoundation to query a list of devices based on the media type
|
||||
func Devices(mediaType MediaType) ([]Device, error) {
|
||||
var cDevicesPtr C.PAVBindDevice
|
||||
var cDevicesLen C.int
|
||||
|
||||
status := C.AVBindDevices(C.AVBindMediaType(mediaType), &cDevicesPtr, &cDevicesLen)
|
||||
if status != nil {
|
||||
return nil, fmt.Errorf("%s", C.GoString(status))
|
||||
}
|
||||
|
||||
// https://github.com/golang/go/wiki/cgo#turning-c-arrays-into-go-slices
|
||||
cDevices := (*[1 << 28]C.AVBindDevice)(unsafe.Pointer(cDevicesPtr))[:cDevicesLen:cDevicesLen]
|
||||
devices := make([]Device, cDevicesLen)
|
||||
|
||||
for i := range devices {
|
||||
devices[i].UID = C.GoString(&cDevices[i].uid[0])
|
||||
devices[i].cDevice = cDevices[i]
|
||||
}
|
||||
|
||||
return devices, nil
|
||||
}
|
||||
|
||||
// ReadCloser is a wrapper around the data callback from AVFoundation. The data received from the
|
||||
// the underlying callback can be retrieved by calling Read.
|
||||
type ReadCloser struct {
|
||||
dataChan chan []byte
|
||||
id handleID
|
||||
onClose func()
|
||||
}
|
||||
|
||||
func newReadCloser(onClose func()) *ReadCloser {
|
||||
var rc ReadCloser
|
||||
rc.dataChan = make(chan []byte, 1)
|
||||
rc.onClose = onClose
|
||||
rc.id = register(rc.dataCb)
|
||||
return &rc
|
||||
}
|
||||
|
||||
func (rc *ReadCloser) dataCb(data []byte) {
|
||||
// TODO: add a policy for slow reader
|
||||
rc.dataChan <- data
|
||||
}
|
||||
|
||||
// Read reads raw data, the format is determined by the media type and property:
|
||||
// - For video, each call will return a frame.
|
||||
// - For audio, each call will return a chunk which its size configured by Latency
|
||||
func (rc *ReadCloser) Read() ([]byte, func(), error) {
|
||||
data, ok := <-rc.dataChan
|
||||
if !ok {
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
return data, func() {}, nil
|
||||
}
|
||||
|
||||
// Close closes the capturing session, and no data will flow anymore
|
||||
func (rc *ReadCloser) Close() {
|
||||
if rc.onClose != nil {
|
||||
rc.onClose()
|
||||
}
|
||||
close(rc.dataChan)
|
||||
unregister(rc.id)
|
||||
}
|
||||
|
||||
// Session represents a capturing session.
|
||||
type Session struct {
|
||||
device Device
|
||||
cSession C.PAVBindSession
|
||||
}
|
||||
|
||||
// NewSession creates a new capturing session
|
||||
func NewSession(device Device) (*Session, error) {
|
||||
var session Session
|
||||
|
||||
status := C.AVBindSessionInit(device.cDevice, &session.cSession)
|
||||
if status != nil {
|
||||
return nil, fmt.Errorf("%s", C.GoString(status))
|
||||
}
|
||||
|
||||
session.device = device
|
||||
return &session, nil
|
||||
}
|
||||
|
||||
// Close stops capturing session and frees up resources
|
||||
func (session *Session) Close() error {
|
||||
if session.cSession == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
status := C.AVBindSessionFree(&session.cSession)
|
||||
if status != nil {
|
||||
return fmt.Errorf("%s", C.GoString(status))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Open start capturing session. As soon as it returns successfully, the data will start
|
||||
// flowing. The raw data can be retrieved by using ReadCloser's Read method.
|
||||
func (session *Session) Open(property prop.Media) (*ReadCloser, error) {
|
||||
frameFormat, ok := frameFormatToAVBind(property.FrameFormat)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("Unsupported frame format")
|
||||
}
|
||||
|
||||
cProperty := C.AVBindMediaProperty{
|
||||
width: C.int(property.Width),
|
||||
height: C.int(property.Height),
|
||||
frameFormat: frameFormat,
|
||||
}
|
||||
|
||||
rc := newReadCloser(func() {
|
||||
C.AVBindSessionClose(session.cSession)
|
||||
})
|
||||
status := C.AVBindSessionOpen(
|
||||
session.cSession,
|
||||
cProperty,
|
||||
C.AVBindDataCallback(unsafe.Pointer(C.onDataBridge)),
|
||||
unsafe.Pointer(&rc.id),
|
||||
)
|
||||
if status != nil {
|
||||
return nil, fmt.Errorf("%s", C.GoString(status))
|
||||
}
|
||||
return rc, nil
|
||||
}
|
||||
|
||||
// Properties queries a list of properties that device supports
|
||||
func (session *Session) Properties() []prop.Media {
|
||||
var cPropertiesPtr C.PAVBindMediaProperty
|
||||
var cPropertiesLen C.int
|
||||
|
||||
status := C.AVBindSessionProperties(session.cSession, &cPropertiesPtr, &cPropertiesLen)
|
||||
if status != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
// https://github.com/golang/go/wiki/cgo#turning-c-arrays-into-go-slices
|
||||
cProperties := (*[1 << 28]C.AVBindMediaProperty)(unsafe.Pointer(cPropertiesPtr))[:cPropertiesLen:cPropertiesLen]
|
||||
var properties []prop.Media
|
||||
for _, cProperty := range cProperties {
|
||||
frameFormat, ok := frameFormatFromAVBind(cProperty.frameFormat)
|
||||
if ok {
|
||||
properties = append(properties, prop.Media{
|
||||
Video: prop.Video{
|
||||
Width: int(cProperty.width),
|
||||
Height: int(cProperty.height),
|
||||
FrameFormat: frameFormat,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
return properties
|
||||
}
|
@@ -1,21 +1,45 @@
|
||||
package codec
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
// RTPCodec wraps webrtc.RTPCodec. RTPCodec might extend webrtc.RTPCodec in the future.
|
||||
type RTPCodec struct {
|
||||
*webrtc.RTPCodec
|
||||
}
|
||||
|
||||
// NewRTPH264Codec is a helper to create an H264 codec
|
||||
func NewRTPH264Codec(clockrate uint32) *RTPCodec {
|
||||
return &RTPCodec{webrtc.NewRTPH264Codec(webrtc.DefaultPayloadTypeH264, clockrate)}
|
||||
}
|
||||
|
||||
// NewRTPVP8Codec is a helper to create an VP8 codec
|
||||
func NewRTPVP8Codec(clockrate uint32) *RTPCodec {
|
||||
return &RTPCodec{webrtc.NewRTPVP8Codec(webrtc.DefaultPayloadTypeVP8, clockrate)}
|
||||
}
|
||||
|
||||
// NewRTPVP9Codec is a helper to create an VP9 codec
|
||||
func NewRTPVP9Codec(clockrate uint32) *RTPCodec {
|
||||
return &RTPCodec{webrtc.NewRTPVP9Codec(webrtc.DefaultPayloadTypeVP9, clockrate)}
|
||||
}
|
||||
|
||||
// NewRTPOpusCodec is a helper to create an Opus codec
|
||||
func NewRTPOpusCodec(clockrate uint32) *RTPCodec {
|
||||
return &RTPCodec{webrtc.NewRTPOpusCodec(webrtc.DefaultPayloadTypeOpus, clockrate)}
|
||||
}
|
||||
|
||||
// AudioEncoderBuilder is the interface that wraps basic operations that are
|
||||
// necessary to build the audio encoder.
|
||||
//
|
||||
// This interface is for codec implementors to provide codec specific params,
|
||||
// but still giving generality for the users.
|
||||
type AudioEncoderBuilder interface {
|
||||
// Name represents the codec name
|
||||
Name() string
|
||||
// RTPCodec represents the codec metadata
|
||||
RTPCodec() *RTPCodec
|
||||
// BuildAudioEncoder builds audio encoder by given media params and audio input
|
||||
BuildAudioEncoder(r audio.Reader, p prop.Media) (ReadCloser, error)
|
||||
}
|
||||
@@ -26,15 +50,16 @@ type AudioEncoderBuilder interface {
|
||||
// This interface is for codec implementors to provide codec specific params,
|
||||
// but still giving generality for the users.
|
||||
type VideoEncoderBuilder interface {
|
||||
// Name represents the codec name
|
||||
Name() string
|
||||
// RTPCodec represents the codec metadata
|
||||
RTPCodec() *RTPCodec
|
||||
// BuildVideoEncoder builds video encoder by given media params and video input
|
||||
BuildVideoEncoder(r video.Reader, p prop.Media) (ReadCloser, error)
|
||||
}
|
||||
|
||||
// ReadCloser is an io.ReadCloser with methods for rate limiting: SetBitRate and ForceKeyFrame
|
||||
type ReadCloser interface {
|
||||
io.ReadCloser
|
||||
Read() (b []byte, release func(), err error)
|
||||
Close() error
|
||||
// SetBitRate sets current target bitrate, lower bitrate means smaller data will be transmitted
|
||||
// but this also means that the quality will also be lower.
|
||||
SetBitRate(int) error
|
||||
|
196
pkg/codec/mmal/bridge.h
Normal file
196
pkg/codec/mmal/bridge.h
Normal file
@@ -0,0 +1,196 @@
|
||||
#include <interface/mmal/mmal.h>
|
||||
#include <interface/mmal/util/mmal_default_components.h>
|
||||
#include <interface/mmal/util/mmal_util_params.h>
|
||||
#include <stdbool.h>
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#define CHK(__status, __msg) \
|
||||
do { \
|
||||
status.code = __status; \
|
||||
if (status.code != MMAL_SUCCESS) { \
|
||||
status.msg = __msg; \
|
||||
goto CleanUp; \
|
||||
} \
|
||||
} while (0)
|
||||
|
||||
typedef struct Status {
|
||||
MMAL_STATUS_T code;
|
||||
const char *msg;
|
||||
} Status;
|
||||
|
||||
typedef struct Slice {
|
||||
uint8_t *data;
|
||||
int len;
|
||||
} Slice;
|
||||
|
||||
typedef struct Params {
|
||||
int width, height;
|
||||
uint32_t bitrate;
|
||||
uint32_t key_frame_interval;
|
||||
} Params;
|
||||
|
||||
typedef struct Encoder {
|
||||
MMAL_COMPONENT_T *component;
|
||||
MMAL_PORT_T *port_in, *port_out;
|
||||
MMAL_QUEUE_T *queue_out;
|
||||
MMAL_POOL_T *pool_in, *pool_out;
|
||||
} Encoder;
|
||||
|
||||
Status enc_new(Params, Encoder *);
|
||||
Status enc_encode(Encoder *, Slice y, Slice cb, Slice cr, MMAL_BUFFER_HEADER_T **);
|
||||
Status enc_close(Encoder *);
|
||||
|
||||
static void encoder_in_cb(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer) { mmal_buffer_header_release(buffer); }
|
||||
|
||||
static void encoder_out_cb(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer) {
|
||||
MMAL_QUEUE_T *queue = (MMAL_QUEUE_T *)port->userdata;
|
||||
mmal_queue_put(queue, buffer);
|
||||
}
|
||||
|
||||
Status enc_new(Params params, Encoder *encoder) {
|
||||
Status status = {0};
|
||||
bool created = false;
|
||||
|
||||
memset(encoder, 0, sizeof(Encoder));
|
||||
|
||||
CHK(mmal_component_create(MMAL_COMPONENT_DEFAULT_VIDEO_ENCODER, &encoder->component),
|
||||
"Failed to create video encoder component");
|
||||
created = true;
|
||||
|
||||
encoder->port_in = encoder->component->input[0];
|
||||
encoder->port_in->format->type = MMAL_ES_TYPE_VIDEO;
|
||||
encoder->port_in->format->encoding = MMAL_ENCODING_I420;
|
||||
encoder->port_in->format->es->video.width = params.width;
|
||||
encoder->port_in->format->es->video.height = params.height;
|
||||
encoder->port_in->format->es->video.par.num = 1;
|
||||
encoder->port_in->format->es->video.par.den = 1;
|
||||
encoder->port_in->format->es->video.crop.x = 0;
|
||||
encoder->port_in->format->es->video.crop.y = 0;
|
||||
encoder->port_in->format->es->video.crop.width = params.width;
|
||||
encoder->port_in->format->es->video.crop.height = params.height;
|
||||
CHK(mmal_port_format_commit(encoder->port_in), "Failed to commit input port format");
|
||||
|
||||
encoder->port_out = encoder->component->output[0];
|
||||
encoder->port_out->format->type = MMAL_ES_TYPE_VIDEO;
|
||||
encoder->port_out->format->encoding = MMAL_ENCODING_H264;
|
||||
encoder->port_out->format->bitrate = params.bitrate;
|
||||
CHK(mmal_port_format_commit(encoder->port_out), "Failed to commit output port format");
|
||||
|
||||
MMAL_PARAMETER_VIDEO_PROFILE_T encoder_param_profile = {0};
|
||||
encoder_param_profile.hdr.id = MMAL_PARAMETER_PROFILE;
|
||||
encoder_param_profile.hdr.size = sizeof(encoder_param_profile);
|
||||
encoder_param_profile.profile[0].profile = MMAL_VIDEO_PROFILE_H264_BASELINE;
|
||||
encoder_param_profile.profile[0].level = MMAL_VIDEO_LEVEL_H264_42;
|
||||
CHK(mmal_port_parameter_set(encoder->port_out, &encoder_param_profile.hdr), "Failed to set encoder profile param");
|
||||
|
||||
CHK(mmal_port_parameter_set_uint32(encoder->port_out, MMAL_PARAMETER_INTRAPERIOD, params.key_frame_interval),
|
||||
"Failed to set intra period param");
|
||||
|
||||
MMAL_PARAMETER_VIDEO_RATECONTROL_T encoder_param_rate_control = {0};
|
||||
encoder_param_rate_control.hdr.id = MMAL_PARAMETER_RATECONTROL;
|
||||
encoder_param_rate_control.hdr.size = sizeof(encoder_param_rate_control);
|
||||
encoder_param_rate_control.control = MMAL_VIDEO_RATECONTROL_VARIABLE;
|
||||
CHK(mmal_port_parameter_set(encoder->port_out, &encoder_param_rate_control.hdr), "Failed to set rate control param");
|
||||
|
||||
// Some decoders expect SPS/PPS headers to be added to every frame
|
||||
CHK(mmal_port_parameter_set_boolean(encoder->port_out, MMAL_PARAMETER_VIDEO_ENCODE_INLINE_HEADER, MMAL_TRUE),
|
||||
"Failed to set inline header param");
|
||||
|
||||
CHK(mmal_port_parameter_set_boolean(encoder->port_out, MMAL_PARAMETER_VIDEO_ENCODE_HEADERS_WITH_FRAME, MMAL_TRUE),
|
||||
"Failed to set headers with frame param");
|
||||
|
||||
/* FIXME: Somehow this flag is broken? When this flag is on, the encoder will get stuck.
|
||||
// Since our use case is mainly for real time streaming, the encoder should optimized for low latency
|
||||
CHK(mmal_port_parameter_set_boolean(encoder->port_out, MMAL_PARAMETER_VIDEO_ENCODE_H264_LOW_LATENCY, MMAL_TRUE),
|
||||
"Failed to set low latency param");
|
||||
*/
|
||||
|
||||
// Now we know the format of both ports and the requirements of the encoder, we can create
|
||||
// our buffer headers and their associated memory buffers. We use the buffer pool API for this.
|
||||
encoder->port_in->buffer_num = encoder->port_in->buffer_num_min;
|
||||
// mmal calculates recommended size that's big enough to store all of the pixels
|
||||
encoder->port_in->buffer_size = encoder->port_in->buffer_size_recommended;
|
||||
encoder->pool_in = mmal_pool_create(encoder->port_in->buffer_num, encoder->port_in->buffer_size);
|
||||
encoder->port_out->buffer_num = encoder->port_out->buffer_num_min;
|
||||
encoder->port_out->buffer_size = encoder->port_out->buffer_size_recommended;
|
||||
encoder->pool_out = mmal_pool_create(encoder->port_out->buffer_num, encoder->port_out->buffer_size);
|
||||
|
||||
// Create a queue to store our encoded video frames. The callback we will get when
|
||||
// a frame has been encoded will put the frame into this queue.
|
||||
encoder->queue_out = mmal_queue_create();
|
||||
encoder->port_out->userdata = (void *)encoder->queue_out;
|
||||
|
||||
// Enable all the input port and the output port.
|
||||
// The callback specified here is the function which will be called when the buffer header
|
||||
// we sent to the component has been processed.
|
||||
CHK(mmal_port_enable(encoder->port_in, encoder_in_cb), "Failed to enable input port");
|
||||
CHK(mmal_port_enable(encoder->port_out, encoder_out_cb), "Failed to enable output port");
|
||||
|
||||
// Enable the component. Components will only process data when they are enabled.
|
||||
CHK(mmal_component_enable(encoder->component), "Failed to enable component");
|
||||
|
||||
CleanUp:
|
||||
|
||||
if (status.code != MMAL_SUCCESS) {
|
||||
if (created) {
|
||||
enc_close(encoder);
|
||||
}
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
// enc_encode encodes y, cb, cr. The encoded frame is going to be stored in encoded_buffer.
|
||||
// IMPORTANT: the caller is responsible to release the ownership of encoded_buffer
|
||||
Status enc_encode(Encoder *encoder, Slice y, Slice cb, Slice cr, MMAL_BUFFER_HEADER_T **encoded_buffer) {
|
||||
Status status = {0};
|
||||
MMAL_BUFFER_HEADER_T *buffer;
|
||||
uint32_t required_size;
|
||||
|
||||
// buffer should always be available since the encoding process is blocking
|
||||
buffer = mmal_queue_get(encoder->pool_in->queue);
|
||||
assert(buffer != NULL);
|
||||
// buffer->data should've been allocated with enough memory to contain a frame by pool_in
|
||||
required_size = y.len + cb.len + cr.len;
|
||||
assert(buffer->alloc_size >= required_size);
|
||||
memcpy(buffer->data, y.data, y.len);
|
||||
memcpy(buffer->data + y.len, cb.data, cb.len);
|
||||
memcpy(buffer->data + y.len + cb.len, cr.data, cr.len);
|
||||
buffer->length = required_size;
|
||||
CHK(mmal_port_send_buffer(encoder->port_in, buffer), "Failed to send filled buffer to input port");
|
||||
|
||||
while (1) {
|
||||
// Send empty buffers to the output port to allow the encoder to start
|
||||
// producing frames as soon as it gets input data
|
||||
while ((buffer = mmal_queue_get(encoder->pool_out->queue)) != NULL) {
|
||||
CHK(mmal_port_send_buffer(encoder->port_out, buffer), "Failed to send empty buffers to output port");
|
||||
}
|
||||
|
||||
while ((buffer = mmal_queue_wait(encoder->queue_out)) != NULL) {
|
||||
if ((buffer->flags & MMAL_BUFFER_HEADER_FLAG_FRAME_END) != 0) {
|
||||
*encoded_buffer = buffer;
|
||||
goto CleanUp;
|
||||
}
|
||||
|
||||
mmal_buffer_header_release(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
CleanUp:
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
Status enc_close(Encoder *encoder) {
|
||||
Status status = {0};
|
||||
|
||||
mmal_pool_destroy(encoder->pool_out);
|
||||
mmal_pool_destroy(encoder->pool_in);
|
||||
mmal_queue_destroy(encoder->queue_out);
|
||||
mmal_component_destroy(encoder->component);
|
||||
|
||||
CleanUp:
|
||||
|
||||
return status;
|
||||
}
|
112
pkg/codec/mmal/mmal.go
Normal file
112
pkg/codec/mmal/mmal.go
Normal file
@@ -0,0 +1,112 @@
|
||||
// Package mmal implements a hardware accelerated H264 encoder for raspberry pi.
|
||||
// This package requires libmmal headers and libraries to be built.
|
||||
// Reference: https://github.com/raspberrypi/userland/tree/master/interface/mmal
|
||||
package mmal
|
||||
|
||||
// #cgo pkg-config: mmal
|
||||
// #include "bridge.h"
|
||||
import "C"
|
||||
import (
|
||||
"fmt"
|
||||
"image"
|
||||
"io"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
type encoder struct {
|
||||
engine C.Encoder
|
||||
r video.Reader
|
||||
mu sync.Mutex
|
||||
closed bool
|
||||
cntr int
|
||||
}
|
||||
|
||||
func statusToErr(status *C.Status) error {
|
||||
return fmt.Errorf("(status = %d) %s", int(status.code), C.GoString(status.msg))
|
||||
}
|
||||
|
||||
func newEncoder(r video.Reader, p prop.Media, params Params) (codec.ReadCloser, error) {
|
||||
if params.KeyFrameInterval == 0 {
|
||||
params.KeyFrameInterval = 60
|
||||
}
|
||||
|
||||
if params.BitRate == 0 {
|
||||
params.BitRate = 300000
|
||||
}
|
||||
|
||||
e := encoder{
|
||||
r: video.ToI420(r),
|
||||
}
|
||||
status := C.enc_new(C.Params{
|
||||
width: C.int(p.Width),
|
||||
height: C.int(p.Height),
|
||||
bitrate: C.uint(params.BitRate),
|
||||
key_frame_interval: C.uint(params.KeyFrameInterval),
|
||||
}, &e.engine)
|
||||
if status.code != 0 {
|
||||
return nil, statusToErr(&status)
|
||||
}
|
||||
|
||||
return &e, nil
|
||||
}
|
||||
|
||||
func (e *encoder) Read() ([]byte, func(), error) {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
|
||||
if e.closed {
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
img, _, err := e.r.Read()
|
||||
if err != nil {
|
||||
return nil, func() {}, err
|
||||
}
|
||||
imgReal := img.(*image.YCbCr)
|
||||
var y, cb, cr C.Slice
|
||||
y.data = (*C.uchar)(&imgReal.Y[0])
|
||||
y.len = C.int(len(imgReal.Y))
|
||||
cb.data = (*C.uchar)(&imgReal.Cb[0])
|
||||
cb.len = C.int(len(imgReal.Cb))
|
||||
cr.data = (*C.uchar)(&imgReal.Cr[0])
|
||||
cr.len = C.int(len(imgReal.Cr))
|
||||
|
||||
var encodedBuffer *C.MMAL_BUFFER_HEADER_T
|
||||
status := C.enc_encode(&e.engine, y, cb, cr, &encodedBuffer)
|
||||
if status.code != 0 {
|
||||
return nil, func() {}, statusToErr(&status)
|
||||
}
|
||||
|
||||
// GoBytes copies the C array to Go slice. After this, it's safe to release the C array
|
||||
encoded := C.GoBytes(unsafe.Pointer(encodedBuffer.data), C.int(encodedBuffer.length))
|
||||
// Release the buffer so that mmal can reuse this memory
|
||||
C.mmal_buffer_header_release(encodedBuffer)
|
||||
|
||||
return encoded, func() {}, err
|
||||
}
|
||||
|
||||
func (e *encoder) SetBitRate(b int) error {
|
||||
panic("SetBitRate is not implemented")
|
||||
}
|
||||
|
||||
func (e *encoder) ForceKeyFrame() error {
|
||||
panic("ForceKeyFrame is not implemented")
|
||||
}
|
||||
|
||||
func (e *encoder) Close() error {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
|
||||
if e.closed {
|
||||
return nil
|
||||
}
|
||||
|
||||
e.closed = true
|
||||
C.enc_close(&e.engine)
|
||||
return nil
|
||||
}
|
31
pkg/codec/mmal/params.go
Normal file
31
pkg/codec/mmal/params.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package mmal
|
||||
|
||||
import (
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
// Params stores libmmal specific encoding parameters.
|
||||
type Params struct {
|
||||
codec.BaseParams
|
||||
}
|
||||
|
||||
// NewParams returns default mmal codec specific parameters.
|
||||
func NewParams() (Params, error) {
|
||||
return Params{
|
||||
BaseParams: codec.BaseParams{
|
||||
KeyFrameInterval: 60,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// RTPCodec represents the codec metadata
|
||||
func (p *Params) RTPCodec() *codec.RTPCodec {
|
||||
return codec.NewRTPH264Codec(90000)
|
||||
}
|
||||
|
||||
// BuildVideoEncoder builds mmal encoder with given params
|
||||
func (p *Params) BuildVideoEncoder(r video.Reader, property prop.Media) (codec.ReadCloser, error) {
|
||||
return newEncoder(r, property, *p)
|
||||
}
|
@@ -16,7 +16,6 @@ import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
mio "github.com/pion/mediadevices/pkg/io"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
@@ -24,7 +23,6 @@ import (
|
||||
type encoder struct {
|
||||
engine *C.Encoder
|
||||
r video.Reader
|
||||
buff []byte
|
||||
|
||||
mu sync.Mutex
|
||||
closed bool
|
||||
@@ -52,26 +50,17 @@ func newEncoder(r video.Reader, p prop.Media, params Params) (codec.ReadCloser,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (e *encoder) Read(p []byte) (n int, err error) {
|
||||
func (e *encoder) Read() ([]byte, func(), error) {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
|
||||
if e.closed {
|
||||
return 0, io.EOF
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
if e.buff != nil {
|
||||
n, err = mio.Copy(p, e.buff)
|
||||
if err == nil {
|
||||
e.buff = nil
|
||||
}
|
||||
|
||||
return n, err
|
||||
}
|
||||
|
||||
img, err := e.r.Read()
|
||||
img, _, err := e.r.Read()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return nil, func() {}, err
|
||||
}
|
||||
|
||||
yuvImg := img.(*image.YCbCr)
|
||||
@@ -85,16 +74,11 @@ func (e *encoder) Read(p []byte) (n int, err error) {
|
||||
width: C.int(bounds.Max.X - bounds.Min.X),
|
||||
}, &rv)
|
||||
if err := errResult(rv); err != nil {
|
||||
return 0, fmt.Errorf("failed in encoding: %v", err)
|
||||
return nil, func() {}, fmt.Errorf("failed in encoding: %v", err)
|
||||
}
|
||||
|
||||
encoded := C.GoBytes(unsafe.Pointer(s.data), s.data_len)
|
||||
n, err = mio.Copy(p, encoded)
|
||||
if err != nil {
|
||||
e.buff = encoded
|
||||
}
|
||||
|
||||
return n, err
|
||||
return encoded, func() {}, nil
|
||||
}
|
||||
|
||||
func (e *encoder) SetBitRate(b int) error {
|
||||
|
@@ -4,7 +4,6 @@ import (
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
// Params stores libopenh264 specific encoding parameters.
|
||||
@@ -21,9 +20,9 @@ func NewParams() (Params, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Name represents the codec name
|
||||
func (p *Params) Name() string {
|
||||
return webrtc.H264
|
||||
// RTPCodec represents the codec metadata
|
||||
func (p *Params) RTPCodec() *codec.RTPCodec {
|
||||
return codec.NewRTPH264Codec(90000)
|
||||
}
|
||||
|
||||
// BuildVideoEncoder builds openh264 encoder with given params
|
||||
|
@@ -1,6 +1,7 @@
|
||||
package opus
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
|
||||
@@ -9,11 +10,12 @@ import (
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
"github.com/pion/mediadevices/pkg/wave/mixer"
|
||||
)
|
||||
|
||||
type encoder struct {
|
||||
engine *opus.Encoder
|
||||
inBuff *wave.Float32Interleaved
|
||||
inBuff wave.Audio
|
||||
reader audio.Reader
|
||||
}
|
||||
|
||||
@@ -32,6 +34,10 @@ func newEncoder(r audio.Reader, p prop.Media, params Params) (codec.ReadCloser,
|
||||
params.BitRate = 32000
|
||||
}
|
||||
|
||||
if params.ChannelMixer == nil {
|
||||
params.ChannelMixer = &mixer.MonoMixer{}
|
||||
}
|
||||
|
||||
// Select the nearest supported latency
|
||||
var targetLatency float64
|
||||
// TODO: use p.Latency.Milliseconds() after Go 1.12 EOL
|
||||
@@ -47,8 +53,7 @@ func newEncoder(r audio.Reader, p prop.Media, params Params) (codec.ReadCloser,
|
||||
targetLatency = latency
|
||||
}
|
||||
|
||||
// Since audio.Reader only supports stereo mode, channels is always 2
|
||||
channels := 2
|
||||
channels := p.ChannelCount
|
||||
|
||||
engine, err := opus.NewEncoder(p.SampleRate, channels, opus.AppVoIP)
|
||||
if err != nil {
|
||||
@@ -58,47 +63,32 @@ func newEncoder(r audio.Reader, p prop.Media, params Params) (codec.ReadCloser,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
inBuffSize := int(targetLatency * float64(p.SampleRate) / 1000)
|
||||
inBuff := wave.NewFloat32Interleaved(
|
||||
wave.ChunkInfo{Channels: channels, Len: inBuffSize},
|
||||
)
|
||||
inBuff.Data = inBuff.Data[:0]
|
||||
e := encoder{engine, inBuff, r}
|
||||
rMix := audio.NewChannelMixer(channels, params.ChannelMixer)
|
||||
rBuf := audio.NewBuffer(int(targetLatency * float64(p.SampleRate) / 1000))
|
||||
e := encoder{
|
||||
engine: engine,
|
||||
reader: rMix(rBuf(r)),
|
||||
}
|
||||
return &e, nil
|
||||
}
|
||||
|
||||
func (e *encoder) Read(p []byte) (n int, err error) {
|
||||
// While the buffer is not full, keep reading so that we meet the latency requirement
|
||||
nLatency := e.inBuff.ChunkInfo().Len * e.inBuff.ChunkInfo().Channels
|
||||
for len(e.inBuff.Data) < nLatency {
|
||||
buff, err := e.reader.Read()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
// TODO: convert audio format
|
||||
b, ok := buff.(*wave.Float32Interleaved)
|
||||
if !ok {
|
||||
panic("unsupported audio format")
|
||||
}
|
||||
switch {
|
||||
case b.Size.Channels == 1 && e.inBuff.ChunkInfo().Channels != 1:
|
||||
for _, d := range b.Data {
|
||||
for ch := 0; ch < e.inBuff.ChunkInfo().Channels; ch++ {
|
||||
e.inBuff.Data = append(e.inBuff.Data, d)
|
||||
}
|
||||
}
|
||||
case b.Size.Channels == e.inBuff.ChunkInfo().Channels:
|
||||
e.inBuff.Data = append(e.inBuff.Data, b.Data...)
|
||||
}
|
||||
}
|
||||
|
||||
n, err = e.engine.EncodeFloat32(e.inBuff.Data[:nLatency], p)
|
||||
func (e *encoder) Read() ([]byte, func(), error) {
|
||||
buff, _, err := e.reader.Read()
|
||||
if err != nil {
|
||||
return n, err
|
||||
return nil, func() {}, err
|
||||
}
|
||||
e.inBuff.Data = e.inBuff.Data[nLatency:]
|
||||
|
||||
return n, nil
|
||||
encoded := make([]byte, 1024)
|
||||
switch b := buff.(type) {
|
||||
case *wave.Int16Interleaved:
|
||||
n, err := e.engine.Encode(b.Data, encoded)
|
||||
return encoded[:n:n], func() {}, err
|
||||
case *wave.Float32Interleaved:
|
||||
n, err := e.engine.EncodeFloat32(b.Data, encoded)
|
||||
return encoded[:n:n], func() {}, err
|
||||
default:
|
||||
return nil, func() {}, errors.New("unknown type of audio buffer")
|
||||
}
|
||||
}
|
||||
|
||||
func (e *encoder) SetBitRate(b int) error {
|
||||
|
@@ -4,12 +4,14 @@ import (
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
"github.com/pion/mediadevices/pkg/wave/mixer"
|
||||
)
|
||||
|
||||
// Params stores opus specific encoding parameters.
|
||||
type Params struct {
|
||||
codec.BaseParams
|
||||
// ChannelMixer is a mixer to be used if number of given and expected channels differ.
|
||||
ChannelMixer mixer.ChannelMixer
|
||||
}
|
||||
|
||||
// NewParams returns default opus codec specific parameters.
|
||||
@@ -17,9 +19,9 @@ func NewParams() (Params, error) {
|
||||
return Params{}, nil
|
||||
}
|
||||
|
||||
// Name represents the codec name
|
||||
func (p *Params) Name() string {
|
||||
return webrtc.Opus
|
||||
// RTPCodec represents the codec metadata
|
||||
func (p *Params) RTPCodec() *codec.RTPCodec {
|
||||
return codec.NewRTPOpusCodec(48000)
|
||||
}
|
||||
|
||||
// BuildAudioEncoder builds opus encoder with given params
|
||||
|
@@ -4,7 +4,6 @@ import (
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
// ParamsVP8 stores VP8 encoding parameters.
|
||||
@@ -44,9 +43,9 @@ func NewVP8Params() (ParamsVP8, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Name represents the codec name
|
||||
func (p *ParamsVP8) Name() string {
|
||||
return webrtc.VP8
|
||||
// RTPCodec represents the codec metadata
|
||||
func (p *ParamsVP8) RTPCodec() *codec.RTPCodec {
|
||||
return codec.NewRTPVP8Codec(90000)
|
||||
}
|
||||
|
||||
// BuildVideoEncoder builds VP8 encoder with given params
|
||||
@@ -113,9 +112,9 @@ func NewVP9Params() (ParamsVP9, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Name represents the codec name
|
||||
func (p *ParamsVP9) Name() string {
|
||||
return webrtc.VP9
|
||||
// RTPCodec represents the codec metadata
|
||||
func (p *ParamsVP9) RTPCodec() *codec.RTPCodec {
|
||||
return codec.NewRTPVP9Codec(90000)
|
||||
}
|
||||
|
||||
// BuildVideoEncoder builds VP9 encoder with given params
|
||||
|
@@ -64,7 +64,6 @@ import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
mio "github.com/pion/mediadevices/pkg/io"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
@@ -80,7 +79,6 @@ const (
|
||||
|
||||
type encoderVP8 struct {
|
||||
r video.Reader
|
||||
buf []byte
|
||||
frame []byte
|
||||
|
||||
fdDRI C.int
|
||||
@@ -297,25 +295,17 @@ func newVP8Encoder(r video.Reader, p prop.Media, params ParamsVP8) (codec.ReadCl
|
||||
return e, nil
|
||||
}
|
||||
|
||||
func (e *encoderVP8) Read(p []byte) (int, error) {
|
||||
func (e *encoderVP8) Read() ([]byte, func(), error) {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
|
||||
if e.closed {
|
||||
return 0, io.EOF
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
if e.buf != nil {
|
||||
n, err := mio.Copy(p, e.buf)
|
||||
if err == nil {
|
||||
e.buf = nil
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
img, err := e.r.Read()
|
||||
img, _, err := e.r.Read()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return nil, func() {}, err
|
||||
}
|
||||
yuvImg := img.(*image.YCbCr)
|
||||
|
||||
@@ -357,7 +347,7 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
|
||||
}
|
||||
}
|
||||
if e.picParam.reconstructed_frame == C.VA_INVALID_SURFACE {
|
||||
return 0, errors.New("no available surface")
|
||||
return nil, func() {}, errors.New("no available surface")
|
||||
}
|
||||
|
||||
C.setForceKFFlagVP8(&e.picParam, 0)
|
||||
@@ -425,7 +415,7 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
|
||||
C.size_t(uintptr(p.src)),
|
||||
&id,
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
buffs = append(buffs, id)
|
||||
}
|
||||
@@ -435,17 +425,17 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
|
||||
e.display, e.ctxID,
|
||||
e.surfs[surfaceVP8Input],
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
|
||||
// Upload image
|
||||
var vaImg C.VAImage
|
||||
var rawBuf unsafe.Pointer
|
||||
if s := C.vaDeriveImage(e.display, e.surfs[surfaceVP8Input], &vaImg); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
if s := C.vaMapBuffer(e.display, vaImg.buf, &rawBuf); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
// TODO: use vaImg.pitches to support padding
|
||||
C.memcpy(
|
||||
@@ -461,10 +451,10 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
|
||||
unsafe.Pointer(&yuvImg.Cr[0]), C.size_t(len(yuvImg.Cr)),
|
||||
)
|
||||
if s := C.vaUnmapBuffer(e.display, vaImg.buf); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
if s := C.vaDestroyImage(e.display, vaImg.image_id); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
|
||||
if s := C.vaRenderPicture(
|
||||
@@ -472,38 +462,38 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
|
||||
&buffs[1], // 0 is for ouput
|
||||
C.int(len(buffs)-1),
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
if s := C.vaEndPicture(
|
||||
e.display, e.ctxID,
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
|
||||
// Load encoded data
|
||||
for retry := 3; retry >= 0; retry-- {
|
||||
if s := C.vaSyncSurface(e.display, e.picParam.reconstructed_frame); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
var surfStat C.VASurfaceStatus
|
||||
if s := C.vaQuerySurfaceStatus(
|
||||
e.display, e.picParam.reconstructed_frame, &surfStat,
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
if surfStat == C.VASurfaceReady {
|
||||
break
|
||||
}
|
||||
if retry == 0 {
|
||||
return 0, fmt.Errorf("failed to sync surface: %d", surfStat)
|
||||
return nil, func() {}, fmt.Errorf("failed to sync surface: %d", surfStat)
|
||||
}
|
||||
}
|
||||
var seg *C.VACodedBufferSegment
|
||||
if s := C.vaMapBufferSeg(e.display, buffs[0], &seg); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
if seg.status&C.VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK != 0 {
|
||||
return 0, errors.New("buffer size too small")
|
||||
return nil, func() {}, errors.New("buffer size too small")
|
||||
}
|
||||
|
||||
if cap(e.frame) < int(seg.size) {
|
||||
@@ -516,13 +506,13 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
|
||||
)
|
||||
|
||||
if s := C.vaUnmapBuffer(e.display, buffs[0]); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
|
||||
// Destroy buffers
|
||||
for _, b := range buffs {
|
||||
if s := C.vaDestroyBuffer(e.display, b); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -545,11 +535,9 @@ func (e *encoderVP8) Read(p []byte) (int, error) {
|
||||
e.picParam.ref_last_frame = e.picParam.reconstructed_frame
|
||||
C.setRefreshLastFlagVP8(&e.picParam, 1)
|
||||
|
||||
n, err := mio.Copy(p, e.frame)
|
||||
if err != nil {
|
||||
e.buf = e.frame
|
||||
}
|
||||
return n, err
|
||||
encoded := make([]byte, len(e.frame))
|
||||
copy(encoded, e.frame)
|
||||
return encoded, func() {}, err
|
||||
}
|
||||
|
||||
func (e *encoderVP8) SetBitRate(b int) error {
|
||||
|
@@ -47,7 +47,6 @@ import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
mio "github.com/pion/mediadevices/pkg/io"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
@@ -67,7 +66,6 @@ const (
|
||||
|
||||
type encoderVP9 struct {
|
||||
r video.Reader
|
||||
buf []byte
|
||||
frame []byte
|
||||
|
||||
fdDRI C.int
|
||||
@@ -286,25 +284,17 @@ func newVP9Encoder(r video.Reader, p prop.Media, params ParamsVP9) (codec.ReadCl
|
||||
return e, nil
|
||||
}
|
||||
|
||||
func (e *encoderVP9) Read(p []byte) (int, error) {
|
||||
func (e *encoderVP9) Read() ([]byte, func(), error) {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
|
||||
if e.closed {
|
||||
return 0, io.EOF
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
if e.buf != nil {
|
||||
n, err := mio.Copy(p, e.buf)
|
||||
if err == nil {
|
||||
e.buf = nil
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
img, err := e.r.Read()
|
||||
img, _, err := e.r.Read()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return nil, func() {}, err
|
||||
}
|
||||
yuvImg := img.(*image.YCbCr)
|
||||
|
||||
@@ -388,7 +378,7 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
|
||||
C.size_t(uintptr(p.src)),
|
||||
&id,
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to create buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
buffs = append(buffs, id)
|
||||
}
|
||||
@@ -398,17 +388,17 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
|
||||
e.display, e.ctxID,
|
||||
e.surfs[surfaceVP9Input],
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to begin picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
|
||||
// Upload image
|
||||
var vaImg C.VAImage
|
||||
var rawBuf unsafe.Pointer
|
||||
if s := C.vaDeriveImage(e.display, e.surfs[surfaceVP9Input], &vaImg); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to derive image: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
if s := C.vaMapBuffer(e.display, vaImg.buf, &rawBuf); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
// TODO: use vaImg.pitches to support padding
|
||||
C.copyI420toNV12(
|
||||
@@ -419,10 +409,10 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
|
||||
C.uint(len(yuvImg.Y)),
|
||||
)
|
||||
if s := C.vaUnmapBuffer(e.display, vaImg.buf); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
if s := C.vaDestroyImage(e.display, vaImg.image_id); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to destroy image: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
|
||||
if s := C.vaRenderPicture(
|
||||
@@ -430,27 +420,27 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
|
||||
&buffs[1], // 0 is for ouput
|
||||
C.int(len(buffs)-1),
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to render picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
if s := C.vaEndPicture(
|
||||
e.display, e.ctxID,
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to end picture: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
|
||||
// Load encoded data
|
||||
if s := C.vaSyncSurface(e.display, e.picParam.reconstructed_frame); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to sync surface: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
var surfStat C.VASurfaceStatus
|
||||
if s := C.vaQuerySurfaceStatus(
|
||||
e.display, e.picParam.reconstructed_frame, &surfStat,
|
||||
); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to query surface status: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
var seg *C.VACodedBufferSegment
|
||||
if s := C.vaMapBufferSeg(e.display, buffs[0], &seg); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to map buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
if cap(e.frame) < int(seg.size) {
|
||||
e.frame = make([]byte, int(seg.size))
|
||||
@@ -462,13 +452,13 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
|
||||
)
|
||||
|
||||
if s := C.vaUnmapBuffer(e.display, buffs[0]); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to unmap buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
|
||||
// Destroy buffers
|
||||
for _, b := range buffs {
|
||||
if s := C.vaDestroyBuffer(e.display, b); s != C.VA_STATUS_SUCCESS {
|
||||
return 0, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
return nil, func() {}, fmt.Errorf("failed to destroy buffer: %s", C.GoString(C.vaErrorStr(s)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -480,11 +470,9 @@ func (e *encoderVP9) Read(p []byte) (int, error) {
|
||||
e.slotCurr = 0
|
||||
}
|
||||
|
||||
n, err := mio.Copy(p, e.frame)
|
||||
if err != nil {
|
||||
e.buf = e.frame
|
||||
}
|
||||
return n, err
|
||||
encoded := make([]byte, len(e.frame))
|
||||
copy(encoded, e.frame)
|
||||
return encoded, func() {}, err
|
||||
}
|
||||
|
||||
func (e *encoderVP9) SetBitRate(b int) error {
|
||||
|
@@ -56,10 +56,8 @@ import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
mio "github.com/pion/mediadevices/pkg/io"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
type encoder struct {
|
||||
@@ -68,7 +66,6 @@ type encoder struct {
|
||||
cfg *C.vpx_codec_enc_cfg_t
|
||||
r video.Reader
|
||||
frameIndex int
|
||||
buff []byte
|
||||
tStart int
|
||||
tLastFrame int
|
||||
frame []byte
|
||||
@@ -95,9 +92,9 @@ func NewVP8Params() (VP8Params, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Name represents the codec name
|
||||
func (p *VP8Params) Name() string {
|
||||
return webrtc.VP8
|
||||
// RTPCodec represents the codec metadata
|
||||
func (p *VP8Params) RTPCodec() *codec.RTPCodec {
|
||||
return codec.NewRTPVP8Codec(90000)
|
||||
}
|
||||
|
||||
// BuildVideoEncoder builds VP8 encoder with given params
|
||||
@@ -122,9 +119,9 @@ func NewVP9Params() (VP9Params, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Name represents the codec name
|
||||
func (p *VP9Params) Name() string {
|
||||
return webrtc.VP9
|
||||
// RTPCodec represents the codec metadata
|
||||
func (p *VP9Params) RTPCodec() *codec.RTPCodec {
|
||||
return codec.NewRTPVP9Codec(90000)
|
||||
}
|
||||
|
||||
// BuildVideoEncoder builds VP9 encoder with given params
|
||||
@@ -207,25 +204,17 @@ func newEncoder(r video.Reader, p prop.Media, params Params, codecIface *C.vpx_c
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (e *encoder) Read(p []byte) (int, error) {
|
||||
func (e *encoder) Read() ([]byte, func(), error) {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
|
||||
if e.closed {
|
||||
return 0, io.EOF
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
if e.buff != nil {
|
||||
n, err := mio.Copy(p, e.buff)
|
||||
if err == nil {
|
||||
e.buff = nil
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
img, err := e.r.Read()
|
||||
img, _, err := e.r.Read()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return nil, func() {}, err
|
||||
}
|
||||
yuvImg := img.(*image.YCbCr)
|
||||
bounds := yuvImg.Bounds()
|
||||
@@ -241,20 +230,29 @@ func (e *encoder) Read(p []byte) (int, error) {
|
||||
if e.cfg.g_w != C.uint(width) || e.cfg.g_h != C.uint(height) {
|
||||
e.cfg.g_w, e.cfg.g_h = C.uint(width), C.uint(height)
|
||||
if ec := C.vpx_codec_enc_config_set(e.codec, e.cfg); ec != C.VPX_CODEC_OK {
|
||||
return 0, fmt.Errorf("vpx_codec_enc_config_set failed (%d)", ec)
|
||||
return nil, func() {}, fmt.Errorf("vpx_codec_enc_config_set failed (%d)", ec)
|
||||
}
|
||||
e.raw.w, e.raw.h = C.uint(width), C.uint(height)
|
||||
e.raw.r_w, e.raw.r_h = C.uint(width), C.uint(height)
|
||||
e.raw.d_w, e.raw.d_h = C.uint(width), C.uint(height)
|
||||
}
|
||||
|
||||
duration := t - e.tLastFrame
|
||||
// VPX doesn't allow 0 duration. If 0 is given, vpx_codec_encode will fail with VPX_CODEC_INVALID_PARAM.
|
||||
// 0 duration is possible because mediadevices first gets the frame meta data by reading from the source,
|
||||
// and consequently the codec will read the first frame from the buffer. This means the first frame won't
|
||||
// have a pause to the second frame, which means if the delay is <1 ms (vpx duration resolution), duration
|
||||
// is going to be 0.
|
||||
if duration == 0 {
|
||||
duration = 1
|
||||
}
|
||||
var flags int
|
||||
if ec := C.encode_wrapper(
|
||||
e.codec, e.raw,
|
||||
C.long(t-e.tStart), C.ulong(t-e.tLastFrame), C.long(flags), C.ulong(e.deadline),
|
||||
C.long(t-e.tStart), C.ulong(duration), C.long(flags), C.ulong(e.deadline),
|
||||
(*C.uchar)(&yuvImg.Y[0]), (*C.uchar)(&yuvImg.Cb[0]), (*C.uchar)(&yuvImg.Cr[0]),
|
||||
); ec != C.VPX_CODEC_OK {
|
||||
return 0, fmt.Errorf("vpx_codec_encode failed (%d)", ec)
|
||||
return nil, func() {}, fmt.Errorf("vpx_codec_encode failed (%d)", ec)
|
||||
}
|
||||
|
||||
e.frameIndex++
|
||||
@@ -272,11 +270,10 @@ func (e *encoder) Read(p []byte) (int, error) {
|
||||
e.frame = append(e.frame, encoded...)
|
||||
}
|
||||
}
|
||||
n, err := mio.Copy(p, e.frame)
|
||||
if err != nil {
|
||||
e.buff = e.frame
|
||||
}
|
||||
return n, err
|
||||
|
||||
encoded := make([]byte, len(e.frame))
|
||||
copy(encoded, e.frame)
|
||||
return encoded, func() {}, err
|
||||
}
|
||||
|
||||
func (e *encoder) SetBitRate(b int) error {
|
||||
|
@@ -47,7 +47,7 @@ Encoder *enc_new(x264_param_t param, char *preset, int *rc) {
|
||||
e->param.b_repeat_headers = 1;
|
||||
e->param.b_annexb = 1;
|
||||
|
||||
if (x264_param_apply_profile(&e->param, "baseline") < 0) {
|
||||
if (x264_param_apply_profile(&e->param, "high") < 0) {
|
||||
*rc = ERR_APPLY_PROFILE;
|
||||
goto fail;
|
||||
}
|
||||
@@ -95,4 +95,4 @@ void enc_close(Encoder *e, int *rc) {
|
||||
x264_encoder_close(e->h);
|
||||
x264_picture_clean(&e->pic_in);
|
||||
free(e);
|
||||
}
|
||||
}
|
||||
|
@@ -4,7 +4,6 @@ import (
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
)
|
||||
|
||||
// Params stores libx264 specific encoding parameters.
|
||||
@@ -40,9 +39,9 @@ func NewParams() (Params, error) {
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Name represents the codec name
|
||||
func (p *Params) Name() string {
|
||||
return webrtc.H264
|
||||
// RTPCodec represents the codec metadata
|
||||
func (p *Params) RTPCodec() *codec.RTPCodec {
|
||||
return codec.NewRTPH264Codec(90000)
|
||||
}
|
||||
|
||||
// BuildVideoEncoder builds x264 encoder with given params
|
||||
|
@@ -14,14 +14,12 @@ import (
|
||||
"unsafe"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/codec"
|
||||
mio "github.com/pion/mediadevices/pkg/io"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
type encoder struct {
|
||||
engine *C.Encoder
|
||||
buff []byte
|
||||
r video.Reader
|
||||
mu sync.Mutex
|
||||
closed bool
|
||||
@@ -96,25 +94,17 @@ func newEncoder(r video.Reader, p prop.Media, params Params) (codec.ReadCloser,
|
||||
return &e, nil
|
||||
}
|
||||
|
||||
func (e *encoder) Read(p []byte) (int, error) {
|
||||
func (e *encoder) Read() ([]byte, func(), error) {
|
||||
e.mu.Lock()
|
||||
defer e.mu.Unlock()
|
||||
|
||||
if e.closed {
|
||||
return 0, io.EOF
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
if e.buff != nil {
|
||||
n, err := mio.Copy(p, e.buff)
|
||||
if err == nil {
|
||||
e.buff = nil
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
img, err := e.r.Read()
|
||||
img, _, err := e.r.Read()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return nil, func() {}, err
|
||||
}
|
||||
yuvImg := img.(*image.YCbCr)
|
||||
|
||||
@@ -127,15 +117,11 @@ func (e *encoder) Read(p []byte) (int, error) {
|
||||
&rc,
|
||||
)
|
||||
if err := errFromC(rc); err != nil {
|
||||
return 0, err
|
||||
return nil, func() {}, err
|
||||
}
|
||||
|
||||
encoded := C.GoBytes(unsafe.Pointer(s.data), s.data_len)
|
||||
n, err := mio.Copy(p, encoded)
|
||||
if err != nil {
|
||||
e.buff = encoded
|
||||
}
|
||||
return n, err
|
||||
return encoded, func() {}, err
|
||||
}
|
||||
|
||||
func (e *encoder) SetBitRate(b int) error {
|
||||
|
@@ -52,10 +52,10 @@ func (d *dummy) AudioRecord(p prop.Media) (audio.Reader, error) {
|
||||
|
||||
closed := d.closed
|
||||
|
||||
reader := audio.ReaderFunc(func() (wave.Audio, error) {
|
||||
reader := audio.ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
select {
|
||||
case <-closed:
|
||||
return nil, io.EOF
|
||||
return nil, func() {}, io.EOF
|
||||
default:
|
||||
}
|
||||
|
||||
@@ -78,7 +78,7 @@ func (d *dummy) AudioRecord(p prop.Media) (audio.Reader, error) {
|
||||
a.SetFloat32(i, ch, wave.Float32Sample(sin[phase]))
|
||||
}
|
||||
}
|
||||
return a, nil
|
||||
return a, func() {}, nil
|
||||
})
|
||||
return reader, nil
|
||||
}
|
||||
|
71
pkg/driver/camera/camera_darwin.go
Normal file
71
pkg/driver/camera/camera_darwin.go
Normal file
@@ -0,0 +1,71 @@
|
||||
package camera
|
||||
|
||||
import (
|
||||
"image"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/avfoundation"
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
"github.com/pion/mediadevices/pkg/frame"
|
||||
"github.com/pion/mediadevices/pkg/io/video"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
)
|
||||
|
||||
type camera struct {
|
||||
device avfoundation.Device
|
||||
session *avfoundation.Session
|
||||
}
|
||||
|
||||
func init() {
|
||||
devices, err := avfoundation.Devices(avfoundation.Video)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, device := range devices {
|
||||
cam := newCamera(device)
|
||||
driver.GetManager().Register(cam, driver.Info{
|
||||
Label: device.UID,
|
||||
DeviceType: driver.Camera,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func newCamera(device avfoundation.Device) *camera {
|
||||
return &camera{
|
||||
device: device,
|
||||
}
|
||||
}
|
||||
|
||||
func (cam *camera) Open() error {
|
||||
var err error
|
||||
cam.session, err = avfoundation.NewSession(cam.device)
|
||||
return err
|
||||
}
|
||||
|
||||
func (cam *camera) Close() error {
|
||||
return cam.session.Close()
|
||||
}
|
||||
|
||||
func (cam *camera) VideoRecord(property prop.Media) (video.Reader, error) {
|
||||
decoder, err := frame.NewDecoder(property.FrameFormat)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rc, err := cam.session.Open(property)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
r := video.ReaderFunc(func() (image.Image, func(), error) {
|
||||
frame, _, err := rc.Read()
|
||||
if err != nil {
|
||||
return nil, func() {}, err
|
||||
}
|
||||
return decoder.Decode(frame, property.Width, property.Height)
|
||||
})
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (cam *camera) Properties() []prop.Media {
|
||||
return cam.session.Properties()
|
||||
}
|
@@ -8,7 +8,8 @@ import (
|
||||
"errors"
|
||||
"image"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync"
|
||||
|
||||
"github.com/blackjack/webcam"
|
||||
@@ -25,6 +26,36 @@ const (
|
||||
var (
|
||||
errReadTimeout = errors.New("read timeout")
|
||||
errEmptyFrame = errors.New("empty frame")
|
||||
// Reference: https://commons.wikimedia.org/wiki/File:Vector_Video_Standards2.svg
|
||||
supportedResolutions = [][2]int{
|
||||
{320, 240},
|
||||
{640, 480},
|
||||
{768, 576},
|
||||
{800, 600},
|
||||
{1024, 768},
|
||||
{1280, 854},
|
||||
{1280, 960},
|
||||
{1280, 1024},
|
||||
{1400, 1050},
|
||||
{1600, 1200},
|
||||
{2048, 1536},
|
||||
{320, 200},
|
||||
{800, 480},
|
||||
{854, 480},
|
||||
{1024, 600},
|
||||
{1152, 768},
|
||||
{1280, 720},
|
||||
{1280, 768},
|
||||
{1366, 768},
|
||||
{1280, 800},
|
||||
{1440, 900},
|
||||
{1440, 960},
|
||||
{1680, 1050},
|
||||
{1920, 1080},
|
||||
{2048, 1080},
|
||||
{1920, 1200},
|
||||
{2560, 1600},
|
||||
}
|
||||
)
|
||||
|
||||
// Camera implementation using v4l2
|
||||
@@ -40,27 +71,47 @@ type camera struct {
|
||||
}
|
||||
|
||||
func init() {
|
||||
searchPath := "/dev/v4l/by-path/"
|
||||
devices, err := ioutil.ReadDir(searchPath)
|
||||
if err != nil {
|
||||
// No v4l device.
|
||||
return
|
||||
}
|
||||
for _, device := range devices {
|
||||
cam := newCamera(searchPath + device.Name())
|
||||
driver.GetManager().Register(cam, driver.Info{
|
||||
Label: device.Name(),
|
||||
DeviceType: driver.Camera,
|
||||
})
|
||||
discovered := make(map[string]struct{})
|
||||
|
||||
discover := func(pattern string) {
|
||||
devices, err := filepath.Glob(pattern)
|
||||
if err != nil {
|
||||
// No v4l device.
|
||||
return
|
||||
}
|
||||
for _, device := range devices {
|
||||
label := filepath.Base(device)
|
||||
reallink, err := os.Readlink(device)
|
||||
if err != nil {
|
||||
reallink = label
|
||||
} else {
|
||||
reallink = filepath.Base(reallink)
|
||||
}
|
||||
|
||||
if _, ok := discovered[reallink]; ok {
|
||||
continue
|
||||
}
|
||||
|
||||
discovered[reallink] = struct{}{}
|
||||
cam := newCamera(device)
|
||||
driver.GetManager().Register(cam, driver.Info{
|
||||
Label: label,
|
||||
DeviceType: driver.Camera,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
discover("/dev/v4l/by-path/*")
|
||||
discover("/dev/video*")
|
||||
}
|
||||
|
||||
func newCamera(path string) *camera {
|
||||
formats := map[webcam.PixelFormat]frame.Format{
|
||||
webcam.PixelFormat(C.V4L2_PIX_FMT_YUYV): frame.FormatYUYV,
|
||||
webcam.PixelFormat(C.V4L2_PIX_FMT_UYVY): frame.FormatUYVY,
|
||||
webcam.PixelFormat(C.V4L2_PIX_FMT_NV12): frame.FormatNV21,
|
||||
webcam.PixelFormat(C.V4L2_PIX_FMT_MJPEG): frame.FormatMJPEG,
|
||||
webcam.PixelFormat(C.V4L2_PIX_FMT_YUV420): frame.FormatI420,
|
||||
webcam.PixelFormat(C.V4L2_PIX_FMT_YUYV): frame.FormatYUYV,
|
||||
webcam.PixelFormat(C.V4L2_PIX_FMT_UYVY): frame.FormatUYVY,
|
||||
webcam.PixelFormat(C.V4L2_PIX_FMT_NV12): frame.FormatNV21,
|
||||
webcam.PixelFormat(C.V4L2_PIX_FMT_MJPEG): frame.FormatMJPEG,
|
||||
}
|
||||
|
||||
reversedFormats := make(map[frame.Format]webcam.PixelFormat)
|
||||
@@ -82,6 +133,8 @@ func (c *camera) Open() error {
|
||||
return err
|
||||
}
|
||||
|
||||
// Late frames should be discarded. Buffering should be handled in higher level.
|
||||
cam.SetBufferCount(1)
|
||||
c.cam = cam
|
||||
return nil
|
||||
}
|
||||
@@ -129,7 +182,7 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
c.cancel = cancel
|
||||
var buf []byte
|
||||
r := video.ReaderFunc(func() (img image.Image, err error) {
|
||||
r := video.ReaderFunc(func() (img image.Image, release func(), err error) {
|
||||
// Lock to avoid accessing the buffer after StopStreaming()
|
||||
c.mutex.Lock()
|
||||
defer c.mutex.Unlock()
|
||||
@@ -138,23 +191,23 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
|
||||
for i := 0; i < maxEmptyFrameCount; i++ {
|
||||
if ctx.Err() != nil {
|
||||
// Return EOF if the camera is already closed.
|
||||
return nil, io.EOF
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
err := cam.WaitForFrame(5) // 5 seconds
|
||||
switch err.(type) {
|
||||
case nil:
|
||||
case *webcam.Timeout:
|
||||
return nil, errReadTimeout
|
||||
return nil, func() {}, errReadTimeout
|
||||
default:
|
||||
// Camera has been stopped.
|
||||
return nil, err
|
||||
return nil, func() {}, err
|
||||
}
|
||||
|
||||
b, err := cam.ReadFrame()
|
||||
if err != nil {
|
||||
// Camera has been stopped.
|
||||
return nil, err
|
||||
return nil, func() {}, err
|
||||
}
|
||||
|
||||
// Frame is empty.
|
||||
@@ -174,7 +227,7 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
|
||||
n := copy(buf, b)
|
||||
return decoder.Decode(buf[:n], p.Width, p.Height)
|
||||
}
|
||||
return nil, errEmptyFrame
|
||||
return nil, func() {}, errEmptyFrame
|
||||
})
|
||||
|
||||
return r, nil
|
||||
@@ -184,13 +237,46 @@ func (c *camera) Properties() []prop.Media {
|
||||
properties := make([]prop.Media, 0)
|
||||
for format := range c.cam.GetSupportedFormats() {
|
||||
for _, frameSize := range c.cam.GetSupportedFrameSizes(format) {
|
||||
properties = append(properties, prop.Media{
|
||||
Video: prop.Video{
|
||||
Width: int(frameSize.MaxWidth),
|
||||
Height: int(frameSize.MaxHeight),
|
||||
FrameFormat: c.formats[format],
|
||||
},
|
||||
})
|
||||
supportedFormat, ok := c.formats[format]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if frameSize.StepWidth == 0 || frameSize.StepHeight == 0 {
|
||||
properties = append(properties, prop.Media{
|
||||
Video: prop.Video{
|
||||
Width: int(frameSize.MaxWidth),
|
||||
Height: int(frameSize.MaxHeight),
|
||||
FrameFormat: supportedFormat,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
// FIXME: we should probably use a custom data structure to capture all of the supported resolutions
|
||||
for _, supportedResolution := range supportedResolutions {
|
||||
minWidth, minHeight := int(frameSize.MinWidth), int(frameSize.MinHeight)
|
||||
maxWidth, maxHeight := int(frameSize.MaxWidth), int(frameSize.MaxHeight)
|
||||
stepWidth, stepHeight := int(frameSize.StepWidth), int(frameSize.StepHeight)
|
||||
width, height := supportedResolution[0], supportedResolution[1]
|
||||
|
||||
if width < minWidth || width > maxWidth ||
|
||||
height < minHeight || height > maxHeight {
|
||||
continue
|
||||
}
|
||||
|
||||
if (width-minWidth)%stepWidth != 0 ||
|
||||
(height-minHeight)%stepHeight != 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
properties = append(properties, prop.Media{
|
||||
Video: prop.Video{
|
||||
Width: width,
|
||||
Height: height,
|
||||
FrameFormat: supportedFormat,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return properties
|
||||
|
@@ -116,10 +116,10 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
|
||||
|
||||
img := &image.YCbCr{}
|
||||
|
||||
r := video.ReaderFunc(func() (image.Image, error) {
|
||||
r := video.ReaderFunc(func() (image.Image, func(), error) {
|
||||
b, ok := <-c.ch
|
||||
if !ok {
|
||||
return nil, io.EOF
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
img.Y = b[:nPix]
|
||||
img.Cb = b[nPix : nPix+nPix/2]
|
||||
@@ -128,7 +128,7 @@ func (c *camera) VideoRecord(p prop.Media) (video.Reader, error) {
|
||||
img.CStride = p.Width / 2
|
||||
img.SubsampleRatio = image.YCbCrSubsampleRatio422
|
||||
img.Rect = image.Rect(0, 0, p.Width, p.Height)
|
||||
return img, nil
|
||||
return img, func() {}, nil
|
||||
})
|
||||
return r, nil
|
||||
}
|
||||
|
@@ -1 +1,204 @@
|
||||
package microphone
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
"unsafe"
|
||||
|
||||
"github.com/gen2brain/malgo"
|
||||
"github.com/pion/mediadevices/internal/logging"
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
const (
|
||||
maxDeviceIDLength = 20
|
||||
// TODO: should replace this with a more flexible approach
|
||||
sampleRateStep = 1000
|
||||
initialBufferSize = 1024
|
||||
)
|
||||
|
||||
var logger = logging.NewLogger("mediadevices/driver/microphone")
|
||||
var ctx *malgo.AllocatedContext
|
||||
var hostEndian binary.ByteOrder
|
||||
var (
|
||||
errUnsupportedFormat = errors.New("the provided audio format is not supported")
|
||||
)
|
||||
|
||||
type microphone struct {
|
||||
malgo.DeviceInfo
|
||||
chunkChan chan []byte
|
||||
}
|
||||
|
||||
func init() {
|
||||
var err error
|
||||
/*
|
||||
backends := []malgo.Backend{
|
||||
malgo.BackendPulseaudio,
|
||||
malgo.BackendAlsa,
|
||||
}
|
||||
*/
|
||||
ctx, err = malgo.InitContext(nil, malgo.ContextConfig{}, func(message string) {
|
||||
logger.Debugf("%v\n", message)
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
devices, err := ctx.Devices(malgo.Capture)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, device := range devices {
|
||||
// TODO: Detect default device and prioritize it
|
||||
driver.GetManager().Register(newMicrophone(device), driver.Info{
|
||||
Label: device.ID.String(),
|
||||
DeviceType: driver.Microphone,
|
||||
})
|
||||
}
|
||||
|
||||
// Decide which endian
|
||||
switch v := *(*uint16)(unsafe.Pointer(&([]byte{0x12, 0x34}[0]))); v {
|
||||
case 0x1234:
|
||||
hostEndian = binary.BigEndian
|
||||
case 0x3412:
|
||||
hostEndian = binary.LittleEndian
|
||||
default:
|
||||
panic(fmt.Sprintf("failed to determine host endianness: %x", v))
|
||||
}
|
||||
}
|
||||
|
||||
func newMicrophone(info malgo.DeviceInfo) *microphone {
|
||||
return µphone{
|
||||
DeviceInfo: info,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *microphone) Open() error {
|
||||
m.chunkChan = make(chan []byte, 1)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) Close() error {
|
||||
if m.chunkChan != nil {
|
||||
close(m.chunkChan)
|
||||
m.chunkChan = nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) AudioRecord(inputProp prop.Media) (audio.Reader, error) {
|
||||
var config malgo.DeviceConfig
|
||||
var callbacks malgo.DeviceCallbacks
|
||||
|
||||
decoder, err := wave.NewDecoder(&wave.RawFormat{
|
||||
SampleSize: inputProp.SampleSize,
|
||||
IsFloat: inputProp.IsFloat,
|
||||
Interleaved: inputProp.IsInterleaved,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
config.DeviceType = malgo.Capture
|
||||
config.PerformanceProfile = malgo.LowLatency
|
||||
config.Capture.Channels = uint32(inputProp.ChannelCount)
|
||||
config.SampleRate = uint32(inputProp.SampleRate)
|
||||
if inputProp.SampleSize == 4 && inputProp.IsFloat {
|
||||
config.Capture.Format = malgo.FormatF32
|
||||
} else if inputProp.SampleSize == 2 && !inputProp.IsFloat {
|
||||
config.Capture.Format = malgo.FormatS16
|
||||
} else {
|
||||
return nil, errUnsupportedFormat
|
||||
}
|
||||
|
||||
onRecvChunk := func(_, chunk []byte, framecount uint32) {
|
||||
m.chunkChan <- chunk
|
||||
}
|
||||
callbacks.Data = onRecvChunk
|
||||
|
||||
device, err := malgo.InitDevice(ctx.Context, config, callbacks)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = device.Start()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return audio.ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
chunk, ok := <-m.chunkChan
|
||||
if !ok {
|
||||
device.Stop()
|
||||
device.Uninit()
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
decodedChunk, err := decoder.Decode(hostEndian, chunk, inputProp.ChannelCount)
|
||||
// FIXME: the decoder should also fill this information
|
||||
decodedChunk.(*wave.Float32Interleaved).Size.SamplingRate = inputProp.SampleRate
|
||||
return decodedChunk, func() {}, err
|
||||
}), nil
|
||||
}
|
||||
|
||||
func (m *microphone) Properties() []prop.Media {
|
||||
var supportedProps []prop.Media
|
||||
logger.Debug("Querying properties")
|
||||
|
||||
var isBigEndian bool
|
||||
// miniaudio only uses the host endian
|
||||
if hostEndian == binary.BigEndian {
|
||||
isBigEndian = true
|
||||
}
|
||||
|
||||
for ch := m.MinChannels; ch <= m.MaxChannels; ch++ {
|
||||
for sampleRate := m.MinSampleRate; sampleRate <= m.MaxSampleRate; sampleRate += sampleRateStep {
|
||||
for i := 0; i < int(m.FormatCount); i++ {
|
||||
format := m.Formats[i]
|
||||
|
||||
supportedProp := prop.Media{
|
||||
Audio: prop.Audio{
|
||||
ChannelCount: int(ch),
|
||||
SampleRate: int(sampleRate),
|
||||
IsBigEndian: isBigEndian,
|
||||
// miniaudio only supports interleaved at the moment
|
||||
IsInterleaved: true,
|
||||
},
|
||||
}
|
||||
|
||||
switch malgo.FormatType(format) {
|
||||
case malgo.FormatF32:
|
||||
supportedProp.SampleSize = 4
|
||||
supportedProp.IsFloat = true
|
||||
case malgo.FormatS16:
|
||||
supportedProp.SampleSize = 2
|
||||
supportedProp.IsFloat = false
|
||||
}
|
||||
|
||||
supportedProps = append(supportedProps, supportedProp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: remove this hardcoded value. Malgo doesn't support "ma_context_get_device_info" API yet. The above iterations
|
||||
// will always return nothing as of now
|
||||
supportedProps = append(supportedProps, prop.Media{
|
||||
Audio: prop.Audio{
|
||||
Latency: time.Millisecond * 20,
|
||||
ChannelCount: 1,
|
||||
SampleRate: 48000,
|
||||
SampleSize: 4,
|
||||
IsFloat: true,
|
||||
IsBigEndian: isBigEndian,
|
||||
IsInterleaved: true,
|
||||
},
|
||||
})
|
||||
return supportedProps
|
||||
}
|
||||
|
@@ -1,137 +0,0 @@
|
||||
package microphone
|
||||
|
||||
import (
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/jfreymuth/pulse"
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
type microphone struct {
|
||||
c *pulse.Client
|
||||
id string
|
||||
samplesChan chan<- []float32
|
||||
}
|
||||
|
||||
func init() {
|
||||
pa, err := pulse.NewClient()
|
||||
if err != nil {
|
||||
// No pulseaudio
|
||||
return
|
||||
}
|
||||
defer pa.Close()
|
||||
sources, err := pa.ListSources()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defaultSource, err := pa.DefaultSource()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
for _, source := range sources {
|
||||
priority := driver.PriorityNormal
|
||||
if defaultSource.ID() == source.ID() {
|
||||
priority = driver.PriorityHigh
|
||||
}
|
||||
driver.GetManager().Register(µphone{id: source.ID()}, driver.Info{
|
||||
Label: source.ID(),
|
||||
DeviceType: driver.Microphone,
|
||||
Priority: priority,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (m *microphone) Open() error {
|
||||
var err error
|
||||
m.c, err = pulse.NewClient()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) Close() error {
|
||||
if m.samplesChan != nil {
|
||||
close(m.samplesChan)
|
||||
m.samplesChan = nil
|
||||
}
|
||||
|
||||
m.c.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) AudioRecord(p prop.Media) (audio.Reader, error) {
|
||||
var options []pulse.RecordOption
|
||||
if p.ChannelCount == 1 {
|
||||
options = append(options, pulse.RecordMono)
|
||||
} else {
|
||||
options = append(options, pulse.RecordStereo)
|
||||
}
|
||||
latency := p.Latency.Seconds()
|
||||
|
||||
src, err := m.c.SourceByID(m.id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
options = append(options,
|
||||
pulse.RecordSampleRate(p.SampleRate),
|
||||
pulse.RecordLatency(latency),
|
||||
pulse.RecordSource(src),
|
||||
)
|
||||
|
||||
samplesChan := make(chan []float32, 1)
|
||||
|
||||
handler := func(b []float32) (int, error) {
|
||||
samplesChan <- b
|
||||
return len(b), nil
|
||||
}
|
||||
|
||||
stream, err := m.c.NewRecord(pulse.Float32Writer(handler), options...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := audio.ReaderFunc(func() (wave.Audio, error) {
|
||||
buff, ok := <-samplesChan
|
||||
if !ok {
|
||||
stream.Close()
|
||||
return nil, io.EOF
|
||||
}
|
||||
|
||||
a := wave.NewFloat32Interleaved(
|
||||
wave.ChunkInfo{
|
||||
Channels: p.ChannelCount,
|
||||
Len: len(buff) / p.ChannelCount,
|
||||
},
|
||||
)
|
||||
copy(a.Data, buff)
|
||||
|
||||
return a, nil
|
||||
})
|
||||
|
||||
stream.Start()
|
||||
m.samplesChan = samplesChan
|
||||
return reader, nil
|
||||
}
|
||||
|
||||
func (m *microphone) Properties() []prop.Media {
|
||||
// TODO: Get actual properties
|
||||
monoProp := prop.Media{
|
||||
Audio: prop.Audio{
|
||||
SampleRate: 48000,
|
||||
Latency: time.Millisecond * 20,
|
||||
ChannelCount: 1,
|
||||
},
|
||||
}
|
||||
|
||||
stereoProp := monoProp
|
||||
stereoProp.ChannelCount = 2
|
||||
|
||||
return []prop.Media{monoProp, stereoProp}
|
||||
}
|
@@ -1,347 +0,0 @@
|
||||
package microphone
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"golang.org/x/sys/windows"
|
||||
"io"
|
||||
"time"
|
||||
"unsafe"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
const (
|
||||
// bufferNumber * prop.Audio.Latency is the maximum blockable duration
|
||||
// to get data without dropping chunks.
|
||||
bufferNumber = 5
|
||||
)
|
||||
|
||||
// Windows APIs
|
||||
var (
|
||||
winmm = windows.NewLazySystemDLL("Winmm.dll")
|
||||
waveInOpen = winmm.NewProc("waveInOpen")
|
||||
waveInStart = winmm.NewProc("waveInStart")
|
||||
waveInStop = winmm.NewProc("waveInStop")
|
||||
waveInReset = winmm.NewProc("waveInReset")
|
||||
waveInClose = winmm.NewProc("waveInClose")
|
||||
waveInPrepareHeader = winmm.NewProc("waveInPrepareHeader")
|
||||
waveInAddBuffer = winmm.NewProc("waveInAddBuffer")
|
||||
waveInUnprepareHeader = winmm.NewProc("waveInUnprepareHeader")
|
||||
)
|
||||
|
||||
type buffer struct {
|
||||
waveHdr
|
||||
data []int16
|
||||
}
|
||||
|
||||
func newBuffer(samples int) *buffer {
|
||||
b := make([]int16, samples)
|
||||
return &buffer{
|
||||
waveHdr: waveHdr{
|
||||
// Sharing Go memory with Windows C API without reference.
|
||||
// Make sure that the lifetime of the buffer struct is longer
|
||||
// than the final access from cbWaveIn.
|
||||
lpData: uintptr(unsafe.Pointer(&b[0])),
|
||||
dwBufferLength: uint32(samples * 2),
|
||||
},
|
||||
data: b,
|
||||
}
|
||||
}
|
||||
|
||||
type microphone struct {
|
||||
hWaveIn windows.Pointer
|
||||
buf map[uintptr]*buffer
|
||||
chBuf chan *buffer
|
||||
closed chan struct{}
|
||||
}
|
||||
|
||||
func init() {
|
||||
// TODO: enum devices
|
||||
driver.GetManager().Register(µphone{}, driver.Info{
|
||||
Label: "default",
|
||||
DeviceType: driver.Microphone,
|
||||
})
|
||||
}
|
||||
|
||||
func (m *microphone) Open() error {
|
||||
m.chBuf = make(chan *buffer)
|
||||
m.buf = make(map[uintptr]*buffer)
|
||||
m.closed = make(chan struct{})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) cbWaveIn(hWaveIn windows.Pointer, uMsg uint, dwInstance, dwParam1, dwParam2 *int32) uintptr {
|
||||
switch uMsg {
|
||||
case MM_WIM_DATA:
|
||||
b := m.buf[uintptr(unsafe.Pointer(dwParam1))]
|
||||
m.chBuf <- b
|
||||
|
||||
case MM_WIM_OPEN:
|
||||
case MM_WIM_CLOSE:
|
||||
close(m.chBuf)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *microphone) Close() error {
|
||||
if m.hWaveIn == nil {
|
||||
return nil
|
||||
}
|
||||
close(m.closed)
|
||||
|
||||
ret, _, _ := waveInStop.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return err
|
||||
}
|
||||
// All enqueued buffers are marked done by waveInReset.
|
||||
ret, _, _ = waveInReset.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return err
|
||||
}
|
||||
for _, buf := range m.buf {
|
||||
// Detach buffers from waveIn API.
|
||||
ret, _, _ := waveInUnprepareHeader.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
uintptr(unsafe.Pointer(&buf.waveHdr)),
|
||||
uintptr(unsafe.Sizeof(buf.waveHdr)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// Now, it's ready to free the buffers.
|
||||
// As microphone struct still has reference to the buffers,
|
||||
// they will be GC-ed once microphone is reopened or unreferenced.
|
||||
|
||||
ret, _, _ = waveInClose.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return err
|
||||
}
|
||||
<-m.chBuf
|
||||
m.hWaveIn = nil
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) AudioRecord(p prop.Media) (audio.Reader, error) {
|
||||
for i := 0; i < bufferNumber; i++ {
|
||||
b := newBuffer(
|
||||
int(uint64(p.Latency) * uint64(p.SampleRate) / uint64(time.Second)),
|
||||
)
|
||||
// Map the buffer by its data head address to restore access to the Go struct
|
||||
// in callback function. Don't resize the buffer after it.
|
||||
m.buf[uintptr(unsafe.Pointer(&b.waveHdr))] = b
|
||||
}
|
||||
|
||||
waveFmt := &waveFormatEx{
|
||||
wFormatTag: WAVE_FORMAT_PCM,
|
||||
nChannels: uint16(p.ChannelCount),
|
||||
nSamplesPerSec: uint32(p.SampleRate),
|
||||
nAvgBytesPerSec: uint32(p.SampleRate * p.ChannelCount * 2),
|
||||
nBlockAlign: uint16(p.ChannelCount * 2),
|
||||
wBitsPerSample: 16,
|
||||
}
|
||||
ret, _, _ := waveInOpen.Call(
|
||||
uintptr(unsafe.Pointer(&m.hWaveIn)),
|
||||
WAVE_MAPPER,
|
||||
uintptr(unsafe.Pointer(waveFmt)),
|
||||
windows.NewCallback(m.cbWaveIn),
|
||||
0,
|
||||
CALLBACK_FUNCTION,
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, buf := range m.buf {
|
||||
// Attach buffers to waveIn API.
|
||||
ret, _, _ := waveInPrepareHeader.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
uintptr(unsafe.Pointer(&buf.waveHdr)),
|
||||
uintptr(unsafe.Sizeof(buf.waveHdr)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
for _, buf := range m.buf {
|
||||
// Enqueue buffers.
|
||||
ret, _, _ := waveInAddBuffer.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
uintptr(unsafe.Pointer(&buf.waveHdr)),
|
||||
uintptr(unsafe.Sizeof(buf.waveHdr)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
ret, _, _ = waveInStart.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// TODO: detect microphone device disconnection and return EOF
|
||||
|
||||
reader := audio.ReaderFunc(func() (wave.Audio, error) {
|
||||
b, ok := <-m.chBuf
|
||||
if !ok {
|
||||
return nil, io.EOF
|
||||
}
|
||||
|
||||
select {
|
||||
case <-m.closed:
|
||||
default:
|
||||
// Re-enqueue used buffer.
|
||||
ret, _, _ := waveInAddBuffer.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
uintptr(unsafe.Pointer(&b.waveHdr)),
|
||||
uintptr(unsafe.Sizeof(b.waveHdr)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
a := wave.NewFloat32Interleaved(
|
||||
wave.ChunkInfo{
|
||||
Channels: p.ChannelCount,
|
||||
Len: (int(b.waveHdr.dwBytesRecorded) / 2) / p.ChannelCount,
|
||||
},
|
||||
)
|
||||
|
||||
j := 0
|
||||
for i := 0; i < a.Size.Len; i++ {
|
||||
for ch := 0; ch < a.Size.Channels; ch++ {
|
||||
a.SetFloat32(i, ch, wave.Float32Sample(float32(b.data[j])/0x8000))
|
||||
j++
|
||||
}
|
||||
}
|
||||
|
||||
return a, nil
|
||||
})
|
||||
return reader, nil
|
||||
}
|
||||
|
||||
func (m *microphone) Properties() []prop.Media {
|
||||
// TODO: Get actual properties
|
||||
monoProp := prop.Media{
|
||||
Audio: prop.Audio{
|
||||
SampleRate: 48000,
|
||||
Latency: time.Millisecond * 20,
|
||||
ChannelCount: 1,
|
||||
},
|
||||
}
|
||||
|
||||
stereoProp := monoProp
|
||||
stereoProp.ChannelCount = 2
|
||||
|
||||
return []prop.Media{monoProp, stereoProp}
|
||||
}
|
||||
|
||||
// Windows API structures
|
||||
|
||||
type waveFormatEx struct {
|
||||
wFormatTag uint16
|
||||
nChannels uint16
|
||||
nSamplesPerSec uint32
|
||||
nAvgBytesPerSec uint32
|
||||
nBlockAlign uint16
|
||||
wBitsPerSample uint16
|
||||
cbSize uint16
|
||||
}
|
||||
|
||||
type waveHdr struct {
|
||||
lpData uintptr
|
||||
dwBufferLength uint32
|
||||
dwBytesRecorded uint32
|
||||
dwUser *uint32
|
||||
dwFlags uint32
|
||||
dwLoops uint32
|
||||
lpNext *waveHdr
|
||||
reserved *uint32
|
||||
}
|
||||
|
||||
// Windows consts
|
||||
|
||||
const (
|
||||
MMSYSERR_NOERROR = 0
|
||||
MMSYSERR_ERROR = 1
|
||||
MMSYSERR_BADDEVICEID = 2
|
||||
MMSYSERR_NOTENABLED = 3
|
||||
MMSYSERR_ALLOCATED = 4
|
||||
MMSYSERR_INVALHANDLE = 5
|
||||
MMSYSERR_NODRIVER = 6
|
||||
MMSYSERR_NOMEM = 7
|
||||
MMSYSERR_NOTSUPPORTED = 8
|
||||
MMSYSERR_BADERRNUM = 9
|
||||
MMSYSERR_INVALFLAG = 10
|
||||
MMSYSERR_INVALPARAM = 11
|
||||
MMSYSERR_HANDLEBUSY = 12
|
||||
MMSYSERR_INVALIDALIAS = 13
|
||||
MMSYSERR_BADDB = 14
|
||||
MMSYSERR_KEYNOTFOUND = 15
|
||||
MMSYSERR_READERROR = 16
|
||||
MMSYSERR_WRITEERROR = 17
|
||||
MMSYSERR_DELETEERROR = 18
|
||||
MMSYSERR_VALNOTFOUND = 19
|
||||
MMSYSERR_NODRIVERCB = 20
|
||||
|
||||
WAVERR_BADFORMAT = 32
|
||||
WAVERR_STILLPLAYING = 33
|
||||
WAVERR_UNPREPARED = 34
|
||||
WAVERR_SYNC = 35
|
||||
|
||||
WAVE_MAPPER = 0xFFFF
|
||||
WAVE_FORMAT_PCM = 1
|
||||
|
||||
CALLBACK_NULL = 0
|
||||
CALLBACK_WINDOW = 0x10000
|
||||
CALLBACK_TASK = 0x20000
|
||||
CALLBACK_FUNCTION = 0x30000
|
||||
CALLBACK_THREAD = CALLBACK_TASK
|
||||
CALLBACK_EVENT = 0x50000
|
||||
|
||||
MM_WIM_OPEN = 0x3BE
|
||||
MM_WIM_CLOSE = 0x3BF
|
||||
MM_WIM_DATA = 0x3C0
|
||||
)
|
||||
|
||||
var errWinmm = map[uintptr]error{
|
||||
MMSYSERR_NOERROR: nil,
|
||||
MMSYSERR_ERROR: errors.New("error"),
|
||||
MMSYSERR_BADDEVICEID: errors.New("bad device id"),
|
||||
MMSYSERR_NOTENABLED: errors.New("not enabled"),
|
||||
MMSYSERR_ALLOCATED: errors.New("already allocated"),
|
||||
MMSYSERR_INVALHANDLE: errors.New("invalid handler"),
|
||||
MMSYSERR_NODRIVER: errors.New("no driver"),
|
||||
MMSYSERR_NOMEM: errors.New("no memory"),
|
||||
MMSYSERR_NOTSUPPORTED: errors.New("not supported"),
|
||||
MMSYSERR_BADERRNUM: errors.New("band error number"),
|
||||
MMSYSERR_INVALFLAG: errors.New("invalid flag"),
|
||||
MMSYSERR_INVALPARAM: errors.New("invalid param"),
|
||||
MMSYSERR_HANDLEBUSY: errors.New("handle busy"),
|
||||
MMSYSERR_INVALIDALIAS: errors.New("invalid alias"),
|
||||
MMSYSERR_BADDB: errors.New("bad db"),
|
||||
MMSYSERR_KEYNOTFOUND: errors.New("key not found"),
|
||||
MMSYSERR_READERROR: errors.New("read error"),
|
||||
MMSYSERR_WRITEERROR: errors.New("write error"),
|
||||
MMSYSERR_DELETEERROR: errors.New("delete error"),
|
||||
MMSYSERR_VALNOTFOUND: errors.New("value not found"),
|
||||
MMSYSERR_NODRIVERCB: errors.New("no driver cb"),
|
||||
WAVERR_BADFORMAT: errors.New("bad format"),
|
||||
WAVERR_STILLPLAYING: errors.New("still playing"),
|
||||
WAVERR_UNPREPARED: errors.New("unprepared"),
|
||||
WAVERR_SYNC: errors.New("sync"),
|
||||
}
|
@@ -68,9 +68,9 @@ func (s *screen) VideoRecord(p prop.Media) (video.Reader, error) {
|
||||
var dst image.RGBA
|
||||
reader := s.reader
|
||||
|
||||
r := video.ReaderFunc(func() (image.Image, error) {
|
||||
r := video.ReaderFunc(func() (image.Image, func(), error) {
|
||||
<-s.tick.C
|
||||
return reader.Read().ToRGBA(&dst), nil
|
||||
return reader.Read().ToRGBA(&dst), func() {}, nil
|
||||
})
|
||||
return r, nil
|
||||
}
|
||||
|
@@ -103,10 +103,10 @@ func (d *dummy) VideoRecord(p prop.Media) (video.Reader, error) {
|
||||
d.tick = tick
|
||||
closed := d.closed
|
||||
|
||||
r := video.ReaderFunc(func() (image.Image, error) {
|
||||
r := video.ReaderFunc(func() (image.Image, func(), error) {
|
||||
select {
|
||||
case <-closed:
|
||||
return nil, io.EOF
|
||||
return nil, func() {}, io.EOF
|
||||
default:
|
||||
}
|
||||
|
||||
@@ -130,7 +130,7 @@ func (d *dummy) VideoRecord(p prop.Media) (video.Reader, error) {
|
||||
CStride: p.Width / 2,
|
||||
SubsampleRatio: image.YCbCrSubsampleRatio422,
|
||||
Rect: image.Rect(0, 0, p.Width, p.Height),
|
||||
}, nil
|
||||
}, func() {}, nil
|
||||
})
|
||||
|
||||
return r, nil
|
||||
|
@@ -6,6 +6,7 @@ import (
|
||||
"image/jpeg"
|
||||
)
|
||||
|
||||
func decodeMJPEG(frame []byte, width, height int) (image.Image, error) {
|
||||
return jpeg.Decode(bytes.NewReader(frame))
|
||||
func decodeMJPEG(frame []byte, width, height int) (image.Image, func(), error) {
|
||||
img, err := jpeg.Decode(bytes.NewReader(frame))
|
||||
return img, func() {}, err
|
||||
}
|
||||
|
@@ -3,8 +3,6 @@ package frame
|
||||
type Format string
|
||||
|
||||
const (
|
||||
// YUV Formats
|
||||
|
||||
// FormatI420 https://www.fourcc.org/pixel-format/yuv-i420/
|
||||
FormatI420 Format = "I420"
|
||||
// FormatI444 is a YUV format without sub-sampling
|
||||
@@ -16,18 +14,11 @@ const (
|
||||
// FormatUYVY https://www.fourcc.org/pixel-format/yuv-uyvy/
|
||||
FormatUYVY = "UYVY"
|
||||
|
||||
// RGB Formats
|
||||
|
||||
// FormatRGBA https://www.fourcc.org/pixel-format/rgb-rgba/
|
||||
FormatRGBA Format = "RGBA"
|
||||
|
||||
// Compressed Formats
|
||||
|
||||
// FormatMJPEG https://www.fourcc.org/mjpg/
|
||||
FormatMJPEG = "MJPEG"
|
||||
)
|
||||
|
||||
// YUV aliases
|
||||
|
||||
// FormatYUYV is an alias of FormatYUY2
|
||||
const FormatYUYV = FormatYUY2
|
||||
|
@@ -5,7 +5,7 @@ import (
|
||||
)
|
||||
|
||||
func NewDecoder(f Format) (Decoder, error) {
|
||||
var decoder DecoderFunc
|
||||
var decoder decoderFunc
|
||||
|
||||
switch f {
|
||||
case FormatI420:
|
||||
|
@@ -3,12 +3,12 @@ package frame
|
||||
import "image"
|
||||
|
||||
type Decoder interface {
|
||||
Decode(frame []byte, width, height int) (image.Image, error)
|
||||
Decode(frame []byte, width, height int) (image.Image, func(), error)
|
||||
}
|
||||
|
||||
// DecoderFunc is a proxy type for Decoder
|
||||
type DecoderFunc func(frame []byte, width, height int) (image.Image, error)
|
||||
type decoderFunc func(frame []byte, width, height int) (image.Image, func(), error)
|
||||
|
||||
func (f DecoderFunc) Decode(frame []byte, width, height int) (image.Image, error) {
|
||||
func (f decoderFunc) Decode(frame []byte, width, height int) (image.Image, func(), error) {
|
||||
return f(frame, width, height)
|
||||
}
|
||||
|
@@ -5,13 +5,13 @@ import (
|
||||
"image"
|
||||
)
|
||||
|
||||
func decodeI420(frame []byte, width, height int) (image.Image, error) {
|
||||
func decodeI420(frame []byte, width, height int) (image.Image, func(), error) {
|
||||
yi := width * height
|
||||
cbi := yi + width*height/4
|
||||
cri := cbi + width*height/4
|
||||
|
||||
if cri > len(frame) {
|
||||
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), cri)
|
||||
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), cri)
|
||||
}
|
||||
|
||||
return &image.YCbCr{
|
||||
@@ -22,15 +22,15 @@ func decodeI420(frame []byte, width, height int) (image.Image, error) {
|
||||
CStride: width / 2,
|
||||
SubsampleRatio: image.YCbCrSubsampleRatio420,
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}, nil
|
||||
}, func() {}, nil
|
||||
}
|
||||
|
||||
func decodeNV21(frame []byte, width, height int) (image.Image, error) {
|
||||
func decodeNV21(frame []byte, width, height int) (image.Image, func(), error) {
|
||||
yi := width * height
|
||||
ci := yi + width*height/2
|
||||
|
||||
if ci > len(frame) {
|
||||
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), ci)
|
||||
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), ci)
|
||||
}
|
||||
|
||||
var cb, cr []byte
|
||||
@@ -47,5 +47,5 @@ func decodeNV21(frame []byte, width, height int) (image.Image, error) {
|
||||
CStride: width / 2,
|
||||
SubsampleRatio: image.YCbCrSubsampleRatio420,
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}, nil
|
||||
}, func() {}, nil
|
||||
}
|
||||
|
@@ -12,13 +12,13 @@ import (
|
||||
// void decodeUYVYCGO(uint8_t* y, uint8_t* cb, uint8_t* cr, uint8_t* uyvy, int width, int height);
|
||||
import "C"
|
||||
|
||||
func decodeYUY2(frame []byte, width, height int) (image.Image, error) {
|
||||
func decodeYUY2(frame []byte, width, height int) (image.Image, func(), error) {
|
||||
yi := width * height
|
||||
ci := yi / 2
|
||||
fi := yi + 2*ci
|
||||
|
||||
if len(frame) != fi {
|
||||
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
|
||||
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
|
||||
}
|
||||
|
||||
y := make([]byte, yi)
|
||||
@@ -41,16 +41,16 @@ func decodeYUY2(frame []byte, width, height int) (image.Image, error) {
|
||||
CStride: width / 2,
|
||||
SubsampleRatio: image.YCbCrSubsampleRatio422,
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}, nil
|
||||
}, func() {}, nil
|
||||
}
|
||||
|
||||
func decodeUYVY(frame []byte, width, height int) (image.Image, error) {
|
||||
func decodeUYVY(frame []byte, width, height int) (image.Image, func(), error) {
|
||||
yi := width * height
|
||||
ci := yi / 2
|
||||
fi := yi + 2*ci
|
||||
|
||||
if len(frame) != fi {
|
||||
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
|
||||
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
|
||||
}
|
||||
|
||||
y := make([]byte, yi)
|
||||
@@ -73,5 +73,5 @@ func decodeUYVY(frame []byte, width, height int) (image.Image, error) {
|
||||
CStride: width / 2,
|
||||
SubsampleRatio: image.YCbCrSubsampleRatio422,
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}, nil
|
||||
}, func() {}, nil
|
||||
}
|
||||
|
@@ -7,13 +7,13 @@ import (
|
||||
"image"
|
||||
)
|
||||
|
||||
func decodeYUY2(frame []byte, width, height int) (image.Image, error) {
|
||||
func decodeYUY2(frame []byte, width, height int) (image.Image, func(), error) {
|
||||
yi := width * height
|
||||
ci := yi / 2
|
||||
fi := yi + 2*ci
|
||||
|
||||
if len(frame) != fi {
|
||||
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
|
||||
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
|
||||
}
|
||||
|
||||
y := make([]byte, yi)
|
||||
@@ -39,16 +39,16 @@ func decodeYUY2(frame []byte, width, height int) (image.Image, error) {
|
||||
CStride: width / 2,
|
||||
SubsampleRatio: image.YCbCrSubsampleRatio422,
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}, nil
|
||||
}, func() {}, nil
|
||||
}
|
||||
|
||||
func decodeUYVY(frame []byte, width, height int) (image.Image, error) {
|
||||
func decodeUYVY(frame []byte, width, height int) (image.Image, func(), error) {
|
||||
yi := width * height
|
||||
ci := yi / 2
|
||||
fi := yi + 2*ci
|
||||
|
||||
if len(frame) != fi {
|
||||
return nil, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
|
||||
return nil, func() {}, fmt.Errorf("frame length (%d) less than expected (%d)", len(frame), fi)
|
||||
}
|
||||
|
||||
y := make([]byte, yi)
|
||||
@@ -74,5 +74,5 @@ func decodeUYVY(frame []byte, width, height int) (image.Image, error) {
|
||||
CStride: width / 2,
|
||||
SubsampleRatio: image.YCbCrSubsampleRatio422,
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}, nil
|
||||
}, func() {}, nil
|
||||
}
|
||||
|
@@ -27,7 +27,7 @@ func TestDecodeYUY2(t *testing.T) {
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}
|
||||
|
||||
img, err := decodeYUY2(input, width, height)
|
||||
img, _, err := decodeYUY2(input, width, height)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@@ -56,7 +56,7 @@ func TestDecodeUYVY(t *testing.T) {
|
||||
Rect: image.Rect(0, 0, width, height),
|
||||
}
|
||||
|
||||
img, err := decodeUYVY(input, width, height)
|
||||
img, _, err := decodeUYVY(input, width, height)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@@ -77,7 +77,7 @@ func BenchmarkDecodeYUY2(b *testing.B) {
|
||||
b.Run(fmt.Sprintf("%dx%d", sz.width, sz.height), func(b *testing.B) {
|
||||
input := make([]byte, sz.width*sz.height*2)
|
||||
for i := 0; i < b.N; i++ {
|
||||
_, err := decodeYUY2(input, sz.width, sz.height)
|
||||
_, _, err := decodeYUY2(input, sz.width, sz.height)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
@@ -5,13 +5,22 @@ import (
|
||||
)
|
||||
|
||||
type Reader interface {
|
||||
Read() (wave.Audio, error)
|
||||
// Read reads data from the source. The caller is responsible to release the memory that's associated
|
||||
// with data by calling the given release function. When err is not nil, the caller MUST NOT call release
|
||||
// as data is going to be nil (no memory was given). Otherwise, the caller SHOULD call release after
|
||||
// using the data. The caller is NOT REQUIRED to call release, as this is only a part of memory management
|
||||
// optimization. If release is not called, the source is forced to allocate a new memory, which also means
|
||||
// there will be new allocations during streaming, and old unused memory will become garbage. As a consequence,
|
||||
// these garbage will put a lot of pressure to the garbage collector and makes it to run more often and finish
|
||||
// slower as the heap memory usage increases and more garbage to collect.
|
||||
Read() (chunk wave.Audio, release func(), err error)
|
||||
}
|
||||
|
||||
type ReaderFunc func() (wave.Audio, error)
|
||||
type ReaderFunc func() (chunk wave.Audio, release func(), err error)
|
||||
|
||||
func (rf ReaderFunc) Read() (wave.Audio, error) {
|
||||
return rf()
|
||||
func (rf ReaderFunc) Read() (chunk wave.Audio, release func(), err error) {
|
||||
chunk, release, err = rf()
|
||||
return
|
||||
}
|
||||
|
||||
// TransformFunc produces a new Reader that will produces a transformed audio
|
||||
|
@@ -1,89 +0,0 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/faiface/beep"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
type beepStreamer struct {
|
||||
err error
|
||||
r Reader
|
||||
}
|
||||
|
||||
func ToBeep(r Reader) beep.Streamer {
|
||||
if r == nil {
|
||||
panic("FromReader requires a non-nil Reader")
|
||||
}
|
||||
|
||||
return &beepStreamer{r: r}
|
||||
}
|
||||
|
||||
func (b *beepStreamer) Stream(samples [][2]float64) (int, bool) {
|
||||
// Since there was an error, the stream has to be drained
|
||||
if b.err != nil {
|
||||
return 0, false
|
||||
}
|
||||
|
||||
d, err := b.r.Read()
|
||||
if err != nil {
|
||||
b.err = err
|
||||
if err != io.EOF {
|
||||
return 0, false
|
||||
}
|
||||
}
|
||||
|
||||
n := d.ChunkInfo().Len
|
||||
for i := 0; i < n; i++ {
|
||||
samples[i][0] = float64(wave.Float32SampleFormat.Convert(d.At(i, 0)).(wave.Float32Sample))
|
||||
samples[i][1] = float64(wave.Float32SampleFormat.Convert(d.At(i, 1)).(wave.Float32Sample))
|
||||
}
|
||||
|
||||
return n, true
|
||||
}
|
||||
|
||||
func (b *beepStreamer) Err() error {
|
||||
return b.err
|
||||
}
|
||||
|
||||
type beepReader struct {
|
||||
s beep.Streamer
|
||||
buff [][2]float64
|
||||
size int
|
||||
}
|
||||
|
||||
func FromBeep(s beep.Streamer) Reader {
|
||||
if s == nil {
|
||||
panic("FromStreamer requires a non-nil beep.Streamer")
|
||||
}
|
||||
|
||||
return &beepReader{
|
||||
s: s,
|
||||
buff: make([][2]float64, 1024), // TODO: configure chunk size
|
||||
}
|
||||
}
|
||||
|
||||
func (r *beepReader) Read() (wave.Audio, error) {
|
||||
out := wave.NewFloat32Interleaved(
|
||||
wave.ChunkInfo{Len: len(r.buff), Channels: 2, SamplingRate: 48000},
|
||||
)
|
||||
|
||||
n, ok := r.s.Stream(r.buff)
|
||||
if !ok {
|
||||
err := r.s.Err()
|
||||
if err == nil {
|
||||
err = io.EOF
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i := 0; i < n; i++ {
|
||||
out.SetFloat32(i, 0, wave.Float32Sample(r.buff[i][0]))
|
||||
out.SetFloat32(i, 1, wave.Float32Sample(r.buff[i][1]))
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
76
pkg/io/audio/broadcast.go
Normal file
76
pkg/io/audio/broadcast.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/io"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
var errEmptySource = errors.New("Source can't be nil")
|
||||
|
||||
// Broadcaster is a specialized video broadcaster.
|
||||
type Broadcaster struct {
|
||||
ioBroadcaster *io.Broadcaster
|
||||
}
|
||||
|
||||
type BroadcasterConfig struct {
|
||||
Core *io.BroadcasterConfig
|
||||
}
|
||||
|
||||
// NewBroadcaster creates a new broadcaster. Source is expected to drop chunks
|
||||
// when any of the readers is slower than the source.
|
||||
func NewBroadcaster(source Reader, config *BroadcasterConfig) *Broadcaster {
|
||||
var coreConfig *io.BroadcasterConfig
|
||||
|
||||
if config != nil {
|
||||
coreConfig = config.Core
|
||||
}
|
||||
|
||||
broadcaster := io.NewBroadcaster(io.ReaderFunc(func() (interface{}, func(), error) {
|
||||
return source.Read()
|
||||
}), coreConfig)
|
||||
|
||||
return &Broadcaster{broadcaster}
|
||||
}
|
||||
|
||||
// NewReader creates a new reader. Each reader will retrieve the same data from the source.
|
||||
// copyFn is used to copy the data from the source to individual readers. Broadcaster uses a small ring
|
||||
// buffer, this means that slow readers might miss some data if they're really late and the data is no longer
|
||||
// in the ring buffer.
|
||||
func (broadcaster *Broadcaster) NewReader(copyChunk bool) Reader {
|
||||
copyFn := func(src interface{}) interface{} { return src }
|
||||
|
||||
if copyChunk {
|
||||
buffer := wave.NewBuffer()
|
||||
copyFn = func(src interface{}) interface{} {
|
||||
realSrc, _ := src.(wave.Audio)
|
||||
buffer.StoreCopy(realSrc)
|
||||
return buffer.Load()
|
||||
}
|
||||
}
|
||||
|
||||
reader := broadcaster.ioBroadcaster.NewReader(copyFn)
|
||||
return ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
data, _, err := reader.Read()
|
||||
chunk, _ := data.(wave.Audio)
|
||||
return chunk, func() {}, err
|
||||
})
|
||||
}
|
||||
|
||||
// ReplaceSource replaces the underlying source. This operation is thread safe.
|
||||
func (broadcaster *Broadcaster) ReplaceSource(source Reader) error {
|
||||
return broadcaster.ioBroadcaster.ReplaceSource(io.ReaderFunc(func() (interface{}, func(), error) {
|
||||
return source.Read()
|
||||
}))
|
||||
}
|
||||
|
||||
// Source retrieves the underlying source. This operation is thread safe.
|
||||
func (broadcaster *Broadcaster) Source() Reader {
|
||||
source := broadcaster.ioBroadcaster.Source()
|
||||
return ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
data, _, err := source.Read()
|
||||
img, _ := data.(wave.Audio)
|
||||
return img, func() {}, err
|
||||
})
|
||||
}
|
54
pkg/io/audio/broadcast_test.go
Normal file
54
pkg/io/audio/broadcast_test.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
func TestBroadcast(t *testing.T) {
|
||||
chunk := wave.NewFloat32Interleaved(wave.ChunkInfo{
|
||||
Len: 8,
|
||||
Channels: 2,
|
||||
SamplingRate: 48000,
|
||||
})
|
||||
|
||||
source := ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
return chunk, func() {}, nil
|
||||
})
|
||||
|
||||
broadcaster := NewBroadcaster(source, nil)
|
||||
readerWithoutCopy1 := broadcaster.NewReader(false)
|
||||
readerWithoutCopy2 := broadcaster.NewReader(false)
|
||||
actualWithoutCopy1, _, err := readerWithoutCopy1.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
actualWithoutCopy2, _, err := readerWithoutCopy2.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if &actualWithoutCopy1.(*wave.Float32Interleaved).Data[0] != &actualWithoutCopy2.(*wave.Float32Interleaved).Data[0] {
|
||||
t.Fatal("Expected underlying buffer for frame with copy to be the same from broadcaster's buffer")
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(chunk, actualWithoutCopy1) {
|
||||
t.Fatal("Expected actual frame without copy to be the same with the original")
|
||||
}
|
||||
|
||||
readerWithCopy := broadcaster.NewReader(true)
|
||||
actualWithCopy, _, err := readerWithCopy.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if &actualWithCopy.(*wave.Float32Interleaved).Data[0] == &actualWithoutCopy1.(*wave.Float32Interleaved).Data[0] {
|
||||
t.Fatal("Expected underlying buffer for frame with copy to be different from broadcaster's buffer")
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(chunk, actualWithCopy) {
|
||||
t.Fatal("Expected actual frame without copy to be the same with the original")
|
||||
}
|
||||
}
|
89
pkg/io/audio/buffer.go
Normal file
89
pkg/io/audio/buffer.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
var errUnsupported = errors.New("unsupported audio format")
|
||||
|
||||
// NewBuffer creates audio transform to buffer signal to have exact nSample samples.
|
||||
func NewBuffer(nSamples int) TransformFunc {
|
||||
var inBuff wave.Audio
|
||||
|
||||
return func(r Reader) Reader {
|
||||
return ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
for {
|
||||
if inBuff != nil && inBuff.ChunkInfo().Len >= nSamples {
|
||||
break
|
||||
}
|
||||
|
||||
buff, _, err := r.Read()
|
||||
if err != nil {
|
||||
return nil, func() {}, err
|
||||
}
|
||||
switch b := buff.(type) {
|
||||
case *wave.Float32Interleaved:
|
||||
ib, ok := inBuff.(*wave.Float32Interleaved)
|
||||
if !ok || ib.Size.Channels != b.Size.Channels {
|
||||
ib = wave.NewFloat32Interleaved(
|
||||
wave.ChunkInfo{
|
||||
SamplingRate: b.Size.SamplingRate,
|
||||
Channels: b.Size.Channels,
|
||||
Len: nSamples,
|
||||
},
|
||||
)
|
||||
ib.Data = ib.Data[:0]
|
||||
ib.Size.Len = 0
|
||||
inBuff = ib
|
||||
}
|
||||
ib.Data = append(ib.Data, b.Data...)
|
||||
ib.Size.Len += b.Size.Len
|
||||
|
||||
case *wave.Int16Interleaved:
|
||||
ib, ok := inBuff.(*wave.Int16Interleaved)
|
||||
if !ok || ib.Size.Channels != b.Size.Channels {
|
||||
ib = wave.NewInt16Interleaved(
|
||||
wave.ChunkInfo{
|
||||
SamplingRate: b.Size.SamplingRate,
|
||||
Channels: b.Size.Channels,
|
||||
Len: nSamples,
|
||||
},
|
||||
)
|
||||
ib.Data = ib.Data[:0]
|
||||
ib.Size.Len = 0
|
||||
inBuff = ib
|
||||
}
|
||||
ib.Data = append(ib.Data, b.Data...)
|
||||
ib.Size.Len += b.Size.Len
|
||||
|
||||
default:
|
||||
return nil, func() {}, errUnsupported
|
||||
}
|
||||
}
|
||||
switch ib := inBuff.(type) {
|
||||
case *wave.Int16Interleaved:
|
||||
ibCopy := *ib
|
||||
ibCopy.Size.Len = nSamples
|
||||
n := nSamples * ib.Size.Channels
|
||||
ibCopy.Data = make([]int16, n)
|
||||
copy(ibCopy.Data, ib.Data)
|
||||
ib.Data = ib.Data[n:]
|
||||
ib.Size.Len -= nSamples
|
||||
return &ibCopy, func() {}, nil
|
||||
|
||||
case *wave.Float32Interleaved:
|
||||
ibCopy := *ib
|
||||
ibCopy.Size.Len = nSamples
|
||||
n := nSamples * ib.Size.Channels
|
||||
ibCopy.Data = make([]float32, n)
|
||||
copy(ibCopy.Data, ib.Data)
|
||||
ib.Data = ib.Data[n:]
|
||||
ib.Size.Len -= nSamples
|
||||
return &ibCopy, func() {}, nil
|
||||
}
|
||||
return nil, func() {}, errUnsupported
|
||||
})
|
||||
}
|
||||
}
|
72
pkg/io/audio/buffer_test.go
Normal file
72
pkg/io/audio/buffer_test.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"io"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
func TestBuffer(t *testing.T) {
|
||||
input := []wave.Audio{
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 1, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{1, 2},
|
||||
},
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{3, 4, 5, 6, 7, 8},
|
||||
},
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 2, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{9, 10, 11, 12},
|
||||
},
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 7, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26},
|
||||
},
|
||||
}
|
||||
expected := []wave.Audio{
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{1, 2, 3, 4, 5, 6},
|
||||
},
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{7, 8, 9, 10, 11, 12},
|
||||
},
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{13, 14, 15, 16, 17, 18},
|
||||
},
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{19, 20, 21, 22, 23, 24},
|
||||
},
|
||||
}
|
||||
|
||||
trans := NewBuffer(3)
|
||||
|
||||
var iSent int
|
||||
r := trans(ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
if iSent < len(input) {
|
||||
iSent++
|
||||
return input[iSent-1], func() {}, nil
|
||||
}
|
||||
return nil, func() {}, io.EOF
|
||||
}))
|
||||
|
||||
for i := 0; ; i++ {
|
||||
a, _, err := r.Read()
|
||||
if err != nil {
|
||||
if err == io.EOF && i >= len(expected) {
|
||||
break
|
||||
}
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected[i], a) {
|
||||
t.Errorf("Expected wave[%d]: %v, got: %v", i, expected[i], a)
|
||||
}
|
||||
}
|
||||
}
|
55
pkg/io/audio/detect.go
Normal file
55
pkg/io/audio/detect.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
// DetectChanges will detect chunk and audio property changes. For audio property detection,
|
||||
// since it's time related, interval will be used to determine the sample rate.
|
||||
func DetectChanges(interval time.Duration, onChange func(prop.Media)) TransformFunc {
|
||||
return func(r Reader) Reader {
|
||||
var currentProp prop.Media
|
||||
var chunkCount uint
|
||||
return ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
var dirty bool
|
||||
|
||||
chunk, _, err := r.Read()
|
||||
if err != nil {
|
||||
return nil, func() {}, err
|
||||
}
|
||||
|
||||
info := chunk.ChunkInfo()
|
||||
if currentProp.ChannelCount != info.Channels {
|
||||
currentProp.ChannelCount = info.Channels
|
||||
dirty = true
|
||||
}
|
||||
|
||||
if currentProp.SampleRate != info.SamplingRate {
|
||||
currentProp.SampleRate = info.SamplingRate
|
||||
dirty = true
|
||||
}
|
||||
|
||||
var latency time.Duration
|
||||
if currentProp.SampleRate != 0 {
|
||||
latency = time.Duration(chunk.ChunkInfo().Len) * time.Second / time.Nanosecond / time.Duration(currentProp.SampleRate)
|
||||
}
|
||||
if currentProp.Latency != latency {
|
||||
currentProp.Latency = latency
|
||||
dirty = true
|
||||
}
|
||||
|
||||
// TODO: Also detect sample format changes?
|
||||
// TODO: Add audio detect changes. As of now, there's no useful property to track.
|
||||
|
||||
if dirty {
|
||||
onChange(currentProp)
|
||||
}
|
||||
|
||||
chunkCount++
|
||||
return chunk, func() {}, nil
|
||||
})
|
||||
}
|
||||
}
|
76
pkg/io/audio/detect_test.go
Normal file
76
pkg/io/audio/detect_test.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
func TestDetectChanges(t *testing.T) {
|
||||
buildSource := func(p prop.Media) (Reader, func(prop.Media)) {
|
||||
return ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
return wave.NewFloat32Interleaved(wave.ChunkInfo{
|
||||
Len: 960,
|
||||
Channels: p.ChannelCount,
|
||||
SamplingRate: p.SampleRate,
|
||||
}), func() {}, nil
|
||||
}), func(newProp prop.Media) {
|
||||
p = newProp
|
||||
}
|
||||
}
|
||||
|
||||
t.Run("OnChangeCalledBeforeFirstFrame", func(t *testing.T) {
|
||||
var detectBeforeFirstChunk bool
|
||||
var expected prop.Media
|
||||
var actual prop.Media
|
||||
expected.ChannelCount = 2
|
||||
expected.SampleRate = 48000
|
||||
expected.Latency = time.Millisecond * 20
|
||||
src, _ := buildSource(expected)
|
||||
src = DetectChanges(time.Second, func(p prop.Media) {
|
||||
actual = p
|
||||
detectBeforeFirstChunk = true
|
||||
})(src)
|
||||
|
||||
_, _, err := src.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !detectBeforeFirstChunk {
|
||||
t.Fatal("on change callback should have called before first chunk")
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(actual, expected) {
|
||||
t.Fatalf("Received an unexpected prop\nExpected:\n%v\nActual:\n%v\n", expected, actual)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("DetectChangesOnEveryUpdate", func(t *testing.T) {
|
||||
var expected prop.Media
|
||||
var actual prop.Media
|
||||
expected.ChannelCount = 2
|
||||
expected.SampleRate = 48000
|
||||
expected.Latency = 20 * time.Millisecond
|
||||
src, update := buildSource(expected)
|
||||
src = DetectChanges(time.Second, func(p prop.Media) {
|
||||
actual = p
|
||||
})(src)
|
||||
|
||||
for channelCount := 1; channelCount < 8; channelCount++ {
|
||||
expected.ChannelCount = channelCount
|
||||
update(expected)
|
||||
_, _, err := src.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(actual, expected) {
|
||||
t.Fatalf("Received an unexpected prop\nExpected:\n%v\nActual:\n%v\n", expected, actual)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
40
pkg/io/audio/mixer.go
Normal file
40
pkg/io/audio/mixer.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
"github.com/pion/mediadevices/pkg/wave/mixer"
|
||||
)
|
||||
|
||||
// NewChannelMixer creates audio transform to mix audio channels.
|
||||
func NewChannelMixer(channels int, mixer mixer.ChannelMixer) TransformFunc {
|
||||
return func(r Reader) Reader {
|
||||
return ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
buff, _, err := r.Read()
|
||||
if err != nil {
|
||||
return nil, func() {}, err
|
||||
}
|
||||
ci := buff.ChunkInfo()
|
||||
if ci.Channels == channels {
|
||||
return buff, func() {}, nil
|
||||
}
|
||||
|
||||
ci.Channels = channels
|
||||
|
||||
var mixed wave.Audio
|
||||
switch buff.(type) {
|
||||
case *wave.Int16Interleaved:
|
||||
mixed = wave.NewInt16Interleaved(ci)
|
||||
case *wave.Int16NonInterleaved:
|
||||
mixed = wave.NewInt16NonInterleaved(ci)
|
||||
case *wave.Float32Interleaved:
|
||||
mixed = wave.NewFloat32Interleaved(ci)
|
||||
case *wave.Float32NonInterleaved:
|
||||
mixed = wave.NewFloat32NonInterleaved(ci)
|
||||
}
|
||||
if err := mixer.Mix(mixed, buff); err != nil {
|
||||
return nil, func() {}, err
|
||||
}
|
||||
return mixed, func() {}, nil
|
||||
})
|
||||
}
|
||||
}
|
57
pkg/io/audio/mixer_test.go
Normal file
57
pkg/io/audio/mixer_test.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package audio
|
||||
|
||||
import (
|
||||
"io"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
"github.com/pion/mediadevices/pkg/wave/mixer"
|
||||
)
|
||||
|
||||
func TestMixer(t *testing.T) {
|
||||
input := []wave.Audio{
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 1, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{1, 3},
|
||||
},
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 3, Channels: 2, SamplingRate: 1234},
|
||||
Data: []int16{2, 4, 3, 5, 4, 6},
|
||||
},
|
||||
}
|
||||
expected := []wave.Audio{
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 1, Channels: 1, SamplingRate: 1234},
|
||||
Data: []int16{2},
|
||||
},
|
||||
&wave.Int16Interleaved{
|
||||
Size: wave.ChunkInfo{Len: 3, Channels: 1, SamplingRate: 1234},
|
||||
Data: []int16{3, 4, 5},
|
||||
},
|
||||
}
|
||||
|
||||
trans := NewChannelMixer(1, &mixer.MonoMixer{})
|
||||
|
||||
var iSent int
|
||||
r := trans(ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
if iSent < len(input) {
|
||||
iSent++
|
||||
return input[iSent-1], func() {}, nil
|
||||
}
|
||||
return nil, func() {}, io.EOF
|
||||
}))
|
||||
|
||||
for i := 0; ; i++ {
|
||||
a, _, err := r.Read()
|
||||
if err != nil {
|
||||
if err == io.EOF && i >= len(expected) {
|
||||
break
|
||||
}
|
||||
t.Fatal(err)
|
||||
}
|
||||
if !reflect.DeepEqual(expected[i], a) {
|
||||
t.Errorf("Expected wave[%d]: %v, got: %v", i, expected[i], a)
|
||||
}
|
||||
}
|
||||
}
|
162
pkg/io/broadcast.go
Normal file
162
pkg/io/broadcast.go
Normal file
@@ -0,0 +1,162 @@
|
||||
package io
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
maskReading = 1 << 63
|
||||
defaultBroadcasterRingSize = 32
|
||||
// TODO: If the data source has fps greater than 30, they'll see some
|
||||
// fps fluctuation. But, 30 fps should be enough for general cases.
|
||||
defaultBroadcasterRingPollDuration = time.Millisecond * 33
|
||||
)
|
||||
|
||||
var errEmptySource = fmt.Errorf("Source can't be nil")
|
||||
|
||||
type broadcasterData struct {
|
||||
data interface{}
|
||||
count uint32
|
||||
err error
|
||||
}
|
||||
|
||||
type broadcasterRing struct {
|
||||
// reading (1 bit) + reserved (31 bits) + data count (32 bits)
|
||||
// IMPORTANT: state has to be the first element in struct, otherwise LoadUint64 will panic in 32 bits systems
|
||||
// due to unallignment
|
||||
state uint64
|
||||
buffer []atomic.Value
|
||||
pollDuration time.Duration
|
||||
}
|
||||
|
||||
func newBroadcasterRing(size uint, pollDuration time.Duration) *broadcasterRing {
|
||||
return &broadcasterRing{buffer: make([]atomic.Value, size), pollDuration: pollDuration}
|
||||
}
|
||||
|
||||
func (ring *broadcasterRing) index(count uint32) int {
|
||||
return int(count) % len(ring.buffer)
|
||||
}
|
||||
|
||||
func (ring *broadcasterRing) acquire(count uint32) func(*broadcasterData) {
|
||||
// Reader has reached the latest data, should read from the source.
|
||||
// Only allow 1 reader to read from the source. When there are more than 1 readers,
|
||||
// the other readers will need to share the same data that the first reader gets from
|
||||
// the source.
|
||||
state := uint64(count)
|
||||
if atomic.CompareAndSwapUint64(&ring.state, state, state|maskReading) {
|
||||
return func(data *broadcasterData) {
|
||||
i := ring.index(count)
|
||||
ring.buffer[i].Store(data)
|
||||
atomic.StoreUint64(&ring.state, uint64(count+1))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (ring *broadcasterRing) get(count uint32) *broadcasterData {
|
||||
for {
|
||||
reading := uint64(count) | maskReading
|
||||
// TODO: since it's lockless, it spends a lot of resources in the scheduling.
|
||||
for atomic.LoadUint64(&ring.state) == reading {
|
||||
// Yield current goroutine to let other goroutines to run instead
|
||||
time.Sleep(ring.pollDuration)
|
||||
}
|
||||
|
||||
i := ring.index(count)
|
||||
data := ring.buffer[i].Load().(*broadcasterData)
|
||||
if data.count == count {
|
||||
return data
|
||||
}
|
||||
|
||||
count++
|
||||
}
|
||||
}
|
||||
|
||||
func (ring *broadcasterRing) lastCount() uint32 {
|
||||
// ring.state always keeps track the next count, so we need to subtract it by 1 to get the
|
||||
// last count
|
||||
return uint32(atomic.LoadUint64(&ring.state)) - 1
|
||||
}
|
||||
|
||||
// Broadcaster is a generic pull-based broadcaster. Broadcaster is unique in a sense that
|
||||
// readers can come and go at anytime, and readers don't need to close or notify broadcaster.
|
||||
type Broadcaster struct {
|
||||
source atomic.Value
|
||||
buffer *broadcasterRing
|
||||
}
|
||||
|
||||
// BroadcasterConfig is a config to control broadcaster behaviour
|
||||
type BroadcasterConfig struct {
|
||||
// BufferSize configures the underlying ring buffer size that's being used
|
||||
// to avoid data lost for late readers. The default value is 32.
|
||||
BufferSize uint
|
||||
// PollDuration configures the sleep duration in waiting for new data to come.
|
||||
// The default value is 33 ms.
|
||||
PollDuration time.Duration
|
||||
}
|
||||
|
||||
// NewBroadcaster creates a new broadcaster. Source is expected to drop frames
|
||||
// when any of the readers is slower than the source.
|
||||
func NewBroadcaster(source Reader, config *BroadcasterConfig) *Broadcaster {
|
||||
pollDuration := defaultBroadcasterRingPollDuration
|
||||
var bufferSize uint = defaultBroadcasterRingSize
|
||||
if config != nil {
|
||||
if config.PollDuration != 0 {
|
||||
pollDuration = config.PollDuration
|
||||
}
|
||||
|
||||
if config.BufferSize != 0 {
|
||||
bufferSize = config.BufferSize
|
||||
}
|
||||
}
|
||||
|
||||
var broadcaster Broadcaster
|
||||
broadcaster.buffer = newBroadcasterRing(bufferSize, pollDuration)
|
||||
broadcaster.ReplaceSource(source)
|
||||
|
||||
return &broadcaster
|
||||
}
|
||||
|
||||
// NewReader creates a new reader. Each reader will retrieve the same data from the source.
|
||||
// copyFn is used to copy the data from the source to individual readers. Broadcaster uses a small ring
|
||||
// buffer, this means that slow readers might miss some data if they're really late and the data is no longer
|
||||
// in the ring buffer.
|
||||
func (broadcaster *Broadcaster) NewReader(copyFn func(interface{}) interface{}) Reader {
|
||||
currentCount := broadcaster.buffer.lastCount()
|
||||
|
||||
return ReaderFunc(func() (data interface{}, release func(), err error) {
|
||||
currentCount++
|
||||
if push := broadcaster.buffer.acquire(currentCount); push != nil {
|
||||
data, _, err = broadcaster.source.Load().(Reader).Read()
|
||||
push(&broadcasterData{
|
||||
data: data,
|
||||
err: err,
|
||||
count: currentCount,
|
||||
})
|
||||
} else {
|
||||
ringData := broadcaster.buffer.get(currentCount)
|
||||
data, err, currentCount = ringData.data, ringData.err, ringData.count
|
||||
}
|
||||
|
||||
data = copyFn(data)
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
// ReplaceSource replaces the underlying source. This operation is thread safe.
|
||||
func (broadcaster *Broadcaster) ReplaceSource(source Reader) error {
|
||||
if source == nil {
|
||||
return errEmptySource
|
||||
}
|
||||
|
||||
broadcaster.source.Store(source)
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReplaceSource retrieves the underlying source. This operation is thread safe.
|
||||
func (broadcaster *Broadcaster) Source() Reader {
|
||||
return broadcaster.source.Load().(Reader)
|
||||
}
|
148
pkg/io/broadcast_test.go
Normal file
148
pkg/io/broadcast_test.go
Normal file
@@ -0,0 +1,148 @@
|
||||
package io
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"runtime"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestBroadcast(t *testing.T) {
|
||||
// https://github.com/pion/mediadevices/issues/198
|
||||
if runtime.GOOS == "darwin" {
|
||||
t.Skip("Skipping because Darwin CI is not reliable for timing related tests.")
|
||||
}
|
||||
frames := make([]int, 5*30) // 5 seconds worth of frames
|
||||
for i := range frames {
|
||||
frames[i] = i
|
||||
}
|
||||
|
||||
routinePauseConds := []struct {
|
||||
src bool
|
||||
dst bool
|
||||
expectedFPS float64
|
||||
expectedDrop float64
|
||||
}{
|
||||
{
|
||||
src: false,
|
||||
dst: false,
|
||||
expectedFPS: 30,
|
||||
},
|
||||
{
|
||||
src: true,
|
||||
dst: false,
|
||||
expectedFPS: 20,
|
||||
expectedDrop: 10,
|
||||
},
|
||||
{
|
||||
src: false,
|
||||
dst: true,
|
||||
expectedFPS: 20,
|
||||
expectedDrop: 10,
|
||||
},
|
||||
}
|
||||
|
||||
for _, pauseCond := range routinePauseConds {
|
||||
pauseCond := pauseCond
|
||||
t.Run(fmt.Sprintf("SrcPause-%v/DstPause-%v", pauseCond.src, pauseCond.dst), func(t *testing.T) {
|
||||
for n := 1; n <= 256; n *= 16 {
|
||||
n := n
|
||||
|
||||
t.Run(fmt.Sprintf("Readers-%d", n), func(t *testing.T) {
|
||||
var src Reader
|
||||
interval := time.NewTicker(time.Millisecond * 33) // 30 fps
|
||||
defer interval.Stop()
|
||||
frameCount := 0
|
||||
frameSent := 0
|
||||
lastSend := time.Now()
|
||||
src = ReaderFunc(func() (interface{}, func(), error) {
|
||||
if pauseCond.src && frameSent == 30 {
|
||||
time.Sleep(time.Second)
|
||||
}
|
||||
<-interval.C
|
||||
|
||||
now := time.Now()
|
||||
if interval := now.Sub(lastSend); interval > time.Millisecond*33*3/2 {
|
||||
// Source reader should drop frames to catch up the latest frame.
|
||||
drop := int(interval/(time.Millisecond*33)) - 1
|
||||
frameCount += drop
|
||||
t.Logf("Skipped %d frames", drop)
|
||||
}
|
||||
lastSend = now
|
||||
frame := frames[frameCount]
|
||||
frameCount++
|
||||
frameSent++
|
||||
return frame, func() {}, nil
|
||||
})
|
||||
broadcaster := NewBroadcaster(src, nil)
|
||||
var done uint32
|
||||
duration := time.Second * 3
|
||||
fpsChan := make(chan []float64)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(n)
|
||||
for i := 0; i < n; i++ {
|
||||
go func() {
|
||||
reader := broadcaster.NewReader(func(src interface{}) interface{} { return src })
|
||||
count := 0
|
||||
lastFrameCount := -1
|
||||
droppedFrames := 0
|
||||
wg.Done()
|
||||
wg.Wait()
|
||||
for atomic.LoadUint32(&done) == 0 {
|
||||
if pauseCond.dst && count == 30 {
|
||||
time.Sleep(time.Second)
|
||||
}
|
||||
frame, _, err := reader.Read()
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
frameCount := frame.(int)
|
||||
droppedFrames += (frameCount - lastFrameCount - 1)
|
||||
lastFrameCount = frameCount
|
||||
count++
|
||||
}
|
||||
|
||||
fps := float64(count) / duration.Seconds()
|
||||
if fps < pauseCond.expectedFPS-2 || fps > pauseCond.expectedFPS+2 {
|
||||
t.Fatal("Unexpected average FPS")
|
||||
}
|
||||
|
||||
droppedFramesPerSecond := float64(droppedFrames) / duration.Seconds()
|
||||
if droppedFramesPerSecond < pauseCond.expectedDrop-2 || droppedFramesPerSecond > pauseCond.expectedDrop+2 {
|
||||
t.Fatal("Unexpected drop count")
|
||||
}
|
||||
|
||||
fpsChan <- []float64{fps, droppedFramesPerSecond, float64(lastFrameCount)}
|
||||
}()
|
||||
}
|
||||
|
||||
time.Sleep(duration)
|
||||
atomic.StoreUint32(&done, 1)
|
||||
|
||||
var fpsAvg float64
|
||||
var droppedFramesPerSecondAvg float64
|
||||
var lastFrameCountAvg float64
|
||||
var count int
|
||||
for metric := range fpsChan {
|
||||
fps, droppedFramesPerSecond, lastFrameCount := metric[0], metric[1], metric[2]
|
||||
fpsAvg += fps
|
||||
droppedFramesPerSecondAvg += droppedFramesPerSecond
|
||||
lastFrameCountAvg += lastFrameCount
|
||||
count++
|
||||
if count == n {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
t.Log("Average FPS :", fpsAvg/float64(n))
|
||||
t.Log("Average dropped frames per second:", droppedFramesPerSecondAvg/float64(n))
|
||||
t.Log("Last frame count (src) :", frameCount)
|
||||
t.Log("Average last frame count (dst) :", lastFrameCountAvg/float64(n))
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
11
pkg/io/io.go
11
pkg/io/io.go
@@ -1,11 +0,0 @@
|
||||
package io
|
||||
|
||||
// Copy copies data from src to dst. If dst is not big enough, return an
|
||||
// InsufficientBufferError.
|
||||
func Copy(dst, src []byte) (n int, err error) {
|
||||
if len(dst) < len(src) {
|
||||
return 0, &InsufficientBufferError{len(src)}
|
||||
}
|
||||
|
||||
return copy(dst, src), nil
|
||||
}
|
@@ -1,45 +0,0 @@
|
||||
package io
|
||||
|
||||
import (
|
||||
"log"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCopy(t *testing.T) {
|
||||
var dst []byte
|
||||
src := make([]byte, 4)
|
||||
|
||||
n, err := Copy(dst, src)
|
||||
if err == nil {
|
||||
t.Fatal("expected err to be non-nill")
|
||||
}
|
||||
|
||||
if n != 0 {
|
||||
t.Fatalf("expected n to be 0, but got %d", n)
|
||||
}
|
||||
|
||||
e, ok := err.(*InsufficientBufferError)
|
||||
if !ok {
|
||||
t.Fatalf("expected error to be InsufficientBufferError")
|
||||
}
|
||||
|
||||
if e.RequiredSize != len(src) {
|
||||
t.Fatalf("expected required size to be %d, but got %d", len(src), e.RequiredSize)
|
||||
}
|
||||
|
||||
dst = make([]byte, 2*e.RequiredSize)
|
||||
n, err = Copy(dst, src)
|
||||
if err != nil {
|
||||
t.Fatalf("expected to not get an error after expanding the buffer")
|
||||
}
|
||||
|
||||
if n != len(src) {
|
||||
t.Fatalf("expected n to be %d, but got %d", len(src), n)
|
||||
}
|
||||
|
||||
for i := 0; i < len(src); i++ {
|
||||
if src[i] != dst[i] {
|
||||
log.Fatalf("expected value at %d to be %d, but got %d", i, src[i], dst[i])
|
||||
}
|
||||
}
|
||||
}
|
23
pkg/io/reader.go
Normal file
23
pkg/io/reader.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package io
|
||||
|
||||
// Reader is a generic data reader. In the future, interface{} should be replaced by a generic type
|
||||
// to provide strong type.
|
||||
type Reader interface {
|
||||
// Read reads data from the source. The caller is responsible to release the memory that's associated
|
||||
// with data by calling the given release function. When err is not nil, the caller MUST NOT call release
|
||||
// as data is going to be nil (no memory was given). Otherwise, the caller SHOULD call release after
|
||||
// using the data. The caller is NOT REQUIRED to call release, as this is only a part of memory management
|
||||
// optimization. If release is not called, the source is forced to allocate a new memory, which also means
|
||||
// there will be new allocations during streaming, and old unused memory will become garbage. As a consequence,
|
||||
// these garbage will put a lot of pressure to the garbage collector and makes it to run more often and finish
|
||||
// slower as the heap memory usage increases and more garbage to collect.
|
||||
Read() (data interface{}, release func(), err error)
|
||||
}
|
||||
|
||||
// ReaderFunc is a proxy type for Reader
|
||||
type ReaderFunc func() (data interface{}, release func(), err error)
|
||||
|
||||
func (f ReaderFunc) Read() (data interface{}, release func(), err error) {
|
||||
data, release, err = f()
|
||||
return
|
||||
}
|
76
pkg/io/video/broadcast.go
Normal file
76
pkg/io/video/broadcast.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package video
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"image"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/io"
|
||||
)
|
||||
|
||||
var errEmptySource = fmt.Errorf("Source can't be nil")
|
||||
|
||||
// Broadcaster is a specialized video broadcaster.
|
||||
type Broadcaster struct {
|
||||
ioBroadcaster *io.Broadcaster
|
||||
}
|
||||
|
||||
type BroadcasterConfig struct {
|
||||
Core *io.BroadcasterConfig
|
||||
}
|
||||
|
||||
// NewBroadcaster creates a new broadcaster. Source is expected to drop frames
|
||||
// when any of the readers is slower than the source.
|
||||
func NewBroadcaster(source Reader, config *BroadcasterConfig) *Broadcaster {
|
||||
var coreConfig *io.BroadcasterConfig
|
||||
|
||||
if config != nil {
|
||||
coreConfig = config.Core
|
||||
}
|
||||
|
||||
broadcaster := io.NewBroadcaster(io.ReaderFunc(func() (interface{}, func(), error) {
|
||||
return source.Read()
|
||||
}), coreConfig)
|
||||
|
||||
return &Broadcaster{broadcaster}
|
||||
}
|
||||
|
||||
// NewReader creates a new reader. Each reader will retrieve the same data from the source.
|
||||
// copyFn is used to copy the data from the source to individual readers. Broadcaster uses a small ring
|
||||
// buffer, this means that slow readers might miss some data if they're really late and the data is no longer
|
||||
// in the ring buffer.
|
||||
func (broadcaster *Broadcaster) NewReader(copyFrame bool) Reader {
|
||||
copyFn := func(src interface{}) interface{} { return src }
|
||||
|
||||
if copyFrame {
|
||||
buffer := NewFrameBuffer(0)
|
||||
copyFn = func(src interface{}) interface{} {
|
||||
realSrc, _ := src.(image.Image)
|
||||
buffer.StoreCopy(realSrc)
|
||||
return buffer.Load()
|
||||
}
|
||||
}
|
||||
|
||||
reader := broadcaster.ioBroadcaster.NewReader(copyFn)
|
||||
return ReaderFunc(func() (image.Image, func(), error) {
|
||||
data, _, err := reader.Read()
|
||||
img, _ := data.(image.Image)
|
||||
return img, func() {}, err
|
||||
})
|
||||
}
|
||||
|
||||
// ReplaceSource replaces the underlying source. This operation is thread safe.
|
||||
func (broadcaster *Broadcaster) ReplaceSource(source Reader) error {
|
||||
return broadcaster.ioBroadcaster.ReplaceSource(io.ReaderFunc(func() (interface{}, func(), error) {
|
||||
return source.Read()
|
||||
}))
|
||||
}
|
||||
|
||||
// Source retrieves the underlying source. This operation is thread safe.
|
||||
func (broadcaster *Broadcaster) Source() Reader {
|
||||
source := broadcaster.ioBroadcaster.Source()
|
||||
return ReaderFunc(func() (image.Image, func(), error) {
|
||||
data, _, err := source.Read()
|
||||
img, _ := data.(image.Image)
|
||||
return img, func() {}, err
|
||||
})
|
||||
}
|
49
pkg/io/video/broadcast_test.go
Normal file
49
pkg/io/video/broadcast_test.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package video
|
||||
|
||||
import (
|
||||
"image"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBroadcast(t *testing.T) {
|
||||
resolution := image.Rect(0, 0, 1920, 1080)
|
||||
img := image.NewGray(resolution)
|
||||
source := ReaderFunc(func() (image.Image, func(), error) {
|
||||
return img, func() {}, nil
|
||||
})
|
||||
|
||||
broadcaster := NewBroadcaster(source, nil)
|
||||
readerWithoutCopy1 := broadcaster.NewReader(false)
|
||||
readerWithoutCopy2 := broadcaster.NewReader(false)
|
||||
actualWithoutCopy1, _, err := readerWithoutCopy1.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
actualWithoutCopy2, _, err := readerWithoutCopy2.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if &actualWithoutCopy1.(*image.Gray).Pix[0] != &actualWithoutCopy2.(*image.Gray).Pix[0] {
|
||||
t.Fatal("Expected underlying buffer for frame with copy to be the same from broadcaster's buffer")
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(img, actualWithoutCopy1) {
|
||||
t.Fatal("Expected actual frame without copy to be the same with the original")
|
||||
}
|
||||
|
||||
readerWithCopy := broadcaster.NewReader(true)
|
||||
actualWithCopy, _, err := readerWithCopy.Read()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if &actualWithCopy.(*image.Gray).Pix[0] == &actualWithoutCopy1.(*image.Gray).Pix[0] {
|
||||
t.Fatal("Expected underlying buffer for frame with copy to be different from broadcaster's buffer")
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(img, actualWithCopy) {
|
||||
t.Fatal("Expected actual frame without copy to be the same with the original")
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user