mirror of
https://github.com/pion/mediadevices.git
synced 2025-09-27 04:46:10 +08:00
Compare commits
7 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f396092609 | ||
![]() |
ee6cf08c44 | ||
![]() |
6a211aa19f | ||
![]() |
b089610c27 | ||
![]() |
1d34ec9c5d | ||
![]() |
7bd3efc8b7 | ||
![]() |
8396fd7aac |
@@ -9,14 +9,14 @@ import (
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/webrtc/v2"
|
||||
|
||||
// If you don't like vpx, you can also use x264 by importing as below
|
||||
// "github.com/pion/mediadevices/pkg/codec/x264" // This is required to use h264 video encoder
|
||||
// If you don't like x264, you can also use vpx by importing as below
|
||||
// "github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
|
||||
// or you can also use openh264 for alternative h264 implementation
|
||||
// "github.com/pion/mediadevices/pkg/codec/openh264"
|
||||
// or if you use a raspberry pi like, you can use mmal for using its hardware encoder
|
||||
// "github.com/pion/mediadevices/pkg/codec/mmal"
|
||||
"github.com/pion/mediadevices/pkg/codec/opus" // This is required to use VP8/VP9 video encoder
|
||||
"github.com/pion/mediadevices/pkg/codec/vpx" // This is required to use VP8/VP9 video encoder
|
||||
"github.com/pion/mediadevices/pkg/codec/opus" // This is required to use opus audio encoder
|
||||
"github.com/pion/mediadevices/pkg/codec/x264" // This is required to use h264 video encoder
|
||||
|
||||
// Note: If you don't have a camera or microphone or your adapters are not supported,
|
||||
// you can always swap your adapters with our dummy adapters below.
|
||||
@@ -44,18 +44,18 @@ func main() {
|
||||
signal.Decode(signal.MustReadStdin(), &offer)
|
||||
|
||||
// Create a new RTCPeerConnection
|
||||
vp8Params, err := vpx.NewVP8Params()
|
||||
x264Params, err := x264.NewParams()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
vp8Params.BitRate = 300_000 // 300kbps
|
||||
x264Params.BitRate = 500_000 // 500kbps
|
||||
|
||||
opusParams, err := opus.NewParams()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
codecSelector := mediadevices.NewCodecSelector(
|
||||
mediadevices.WithVideoEncoders(&vp8Params),
|
||||
mediadevices.WithVideoEncoders(&x264Params),
|
||||
mediadevices.WithAudioEncoders(&opusParams),
|
||||
)
|
||||
|
||||
|
@@ -1,3 +1,5 @@
|
||||
// +build e2e
|
||||
|
||||
package mediadevices
|
||||
|
||||
import (
|
||||
|
@@ -47,7 +47,7 @@ Encoder *enc_new(x264_param_t param, char *preset, int *rc) {
|
||||
e->param.b_repeat_headers = 1;
|
||||
e->param.b_annexb = 1;
|
||||
|
||||
if (x264_param_apply_profile(&e->param, "baseline") < 0) {
|
||||
if (x264_param_apply_profile(&e->param, "high") < 0) {
|
||||
*rc = ERR_APPLY_PROFILE;
|
||||
goto fail;
|
||||
}
|
||||
@@ -95,4 +95,4 @@ void enc_close(Encoder *e, int *rc) {
|
||||
x264_encoder_close(e->h);
|
||||
x264_picture_clean(&e->pic_in);
|
||||
free(e);
|
||||
}
|
||||
}
|
||||
|
@@ -1 +1,204 @@
|
||||
package microphone
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"time"
|
||||
"unsafe"
|
||||
|
||||
"github.com/gen2brain/malgo"
|
||||
"github.com/pion/mediadevices/internal/logging"
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
const (
|
||||
maxDeviceIDLength = 20
|
||||
// TODO: should replace this with a more flexible approach
|
||||
sampleRateStep = 1000
|
||||
initialBufferSize = 1024
|
||||
)
|
||||
|
||||
var logger = logging.NewLogger("mediadevices/driver/microphone")
|
||||
var ctx *malgo.AllocatedContext
|
||||
var hostEndian binary.ByteOrder
|
||||
var (
|
||||
errUnsupportedFormat = errors.New("the provided audio format is not supported")
|
||||
)
|
||||
|
||||
type microphone struct {
|
||||
malgo.DeviceInfo
|
||||
chunkChan chan []byte
|
||||
}
|
||||
|
||||
func init() {
|
||||
var err error
|
||||
/*
|
||||
backends := []malgo.Backend{
|
||||
malgo.BackendPulseaudio,
|
||||
malgo.BackendAlsa,
|
||||
}
|
||||
*/
|
||||
ctx, err = malgo.InitContext(nil, malgo.ContextConfig{}, func(message string) {
|
||||
logger.Debugf("%v\n", message)
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
devices, err := ctx.Devices(malgo.Capture)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, device := range devices {
|
||||
// TODO: Detect default device and prioritize it
|
||||
driver.GetManager().Register(newMicrophone(device), driver.Info{
|
||||
Label: device.ID.String(),
|
||||
DeviceType: driver.Microphone,
|
||||
})
|
||||
}
|
||||
|
||||
// Decide which endian
|
||||
switch v := *(*uint16)(unsafe.Pointer(&([]byte{0x12, 0x34}[0]))); v {
|
||||
case 0x1234:
|
||||
hostEndian = binary.BigEndian
|
||||
case 0x3412:
|
||||
hostEndian = binary.LittleEndian
|
||||
default:
|
||||
panic(fmt.Sprintf("failed to determine host endianness: %x", v))
|
||||
}
|
||||
}
|
||||
|
||||
func newMicrophone(info malgo.DeviceInfo) *microphone {
|
||||
return µphone{
|
||||
DeviceInfo: info,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *microphone) Open() error {
|
||||
m.chunkChan = make(chan []byte, 1)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) Close() error {
|
||||
if m.chunkChan != nil {
|
||||
close(m.chunkChan)
|
||||
m.chunkChan = nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) AudioRecord(inputProp prop.Media) (audio.Reader, error) {
|
||||
var config malgo.DeviceConfig
|
||||
var callbacks malgo.DeviceCallbacks
|
||||
|
||||
decoder, err := wave.NewDecoder(&wave.RawFormat{
|
||||
SampleSize: inputProp.SampleSize,
|
||||
IsFloat: inputProp.IsFloat,
|
||||
Interleaved: inputProp.IsInterleaved,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
config.DeviceType = malgo.Capture
|
||||
config.PerformanceProfile = malgo.LowLatency
|
||||
config.Capture.Channels = uint32(inputProp.ChannelCount)
|
||||
config.SampleRate = uint32(inputProp.SampleRate)
|
||||
if inputProp.SampleSize == 4 && inputProp.IsFloat {
|
||||
config.Capture.Format = malgo.FormatF32
|
||||
} else if inputProp.SampleSize == 2 && !inputProp.IsFloat {
|
||||
config.Capture.Format = malgo.FormatS16
|
||||
} else {
|
||||
return nil, errUnsupportedFormat
|
||||
}
|
||||
|
||||
onRecvChunk := func(_, chunk []byte, framecount uint32) {
|
||||
m.chunkChan <- chunk
|
||||
}
|
||||
callbacks.Data = onRecvChunk
|
||||
|
||||
device, err := malgo.InitDevice(ctx.Context, config, callbacks)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = device.Start()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return audio.ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
chunk, ok := <-m.chunkChan
|
||||
if !ok {
|
||||
device.Stop()
|
||||
device.Uninit()
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
decodedChunk, err := decoder.Decode(hostEndian, chunk, inputProp.ChannelCount)
|
||||
// FIXME: the decoder should also fill this information
|
||||
decodedChunk.(*wave.Float32Interleaved).Size.SamplingRate = inputProp.SampleRate
|
||||
return decodedChunk, func() {}, err
|
||||
}), nil
|
||||
}
|
||||
|
||||
func (m *microphone) Properties() []prop.Media {
|
||||
var supportedProps []prop.Media
|
||||
logger.Debug("Querying properties")
|
||||
|
||||
var isBigEndian bool
|
||||
// miniaudio only uses the host endian
|
||||
if hostEndian == binary.BigEndian {
|
||||
isBigEndian = true
|
||||
}
|
||||
|
||||
for ch := m.MinChannels; ch <= m.MaxChannels; ch++ {
|
||||
for sampleRate := m.MinSampleRate; sampleRate <= m.MaxSampleRate; sampleRate += sampleRateStep {
|
||||
for i := 0; i < int(m.FormatCount); i++ {
|
||||
format := m.Formats[i]
|
||||
|
||||
supportedProp := prop.Media{
|
||||
Audio: prop.Audio{
|
||||
ChannelCount: int(ch),
|
||||
SampleRate: int(sampleRate),
|
||||
IsBigEndian: isBigEndian,
|
||||
// miniaudio only supports interleaved at the moment
|
||||
IsInterleaved: true,
|
||||
},
|
||||
}
|
||||
|
||||
switch malgo.FormatType(format) {
|
||||
case malgo.FormatF32:
|
||||
supportedProp.SampleSize = 4
|
||||
supportedProp.IsFloat = true
|
||||
case malgo.FormatS16:
|
||||
supportedProp.SampleSize = 2
|
||||
supportedProp.IsFloat = false
|
||||
}
|
||||
|
||||
supportedProps = append(supportedProps, supportedProp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: remove this hardcoded value. Malgo doesn't support "ma_context_get_device_info" API yet. The above iterations
|
||||
// will always return nothing as of now
|
||||
supportedProps = append(supportedProps, prop.Media{
|
||||
Audio: prop.Audio{
|
||||
Latency: time.Millisecond * 20,
|
||||
ChannelCount: 1,
|
||||
SampleRate: 48000,
|
||||
SampleSize: 4,
|
||||
IsFloat: true,
|
||||
IsBigEndian: isBigEndian,
|
||||
IsInterleaved: true,
|
||||
},
|
||||
})
|
||||
return supportedProps
|
||||
}
|
||||
|
@@ -1,138 +0,0 @@
|
||||
package microphone
|
||||
|
||||
import (
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/jfreymuth/pulse"
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
type microphone struct {
|
||||
c *pulse.Client
|
||||
id string
|
||||
samplesChan chan<- []int16
|
||||
}
|
||||
|
||||
func init() {
|
||||
pa, err := pulse.NewClient()
|
||||
if err != nil {
|
||||
// No pulseaudio
|
||||
return
|
||||
}
|
||||
defer pa.Close()
|
||||
sources, err := pa.ListSources()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
defaultSource, err := pa.DefaultSource()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
for _, source := range sources {
|
||||
priority := driver.PriorityNormal
|
||||
if defaultSource.ID() == source.ID() {
|
||||
priority = driver.PriorityHigh
|
||||
}
|
||||
driver.GetManager().Register(µphone{id: source.ID()}, driver.Info{
|
||||
Label: source.ID(),
|
||||
DeviceType: driver.Microphone,
|
||||
Priority: priority,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (m *microphone) Open() error {
|
||||
var err error
|
||||
m.c, err = pulse.NewClient()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) Close() error {
|
||||
if m.samplesChan != nil {
|
||||
close(m.samplesChan)
|
||||
m.samplesChan = nil
|
||||
}
|
||||
|
||||
m.c.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) AudioRecord(p prop.Media) (audio.Reader, error) {
|
||||
var options []pulse.RecordOption
|
||||
if p.ChannelCount == 1 {
|
||||
options = append(options, pulse.RecordMono)
|
||||
} else {
|
||||
options = append(options, pulse.RecordStereo)
|
||||
}
|
||||
latency := p.Latency.Seconds()
|
||||
|
||||
src, err := m.c.SourceByID(m.id)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
options = append(options,
|
||||
pulse.RecordSampleRate(p.SampleRate),
|
||||
pulse.RecordLatency(latency),
|
||||
pulse.RecordSource(src),
|
||||
)
|
||||
|
||||
samplesChan := make(chan []int16, 1)
|
||||
|
||||
handler := func(b []int16) (int, error) {
|
||||
samplesChan <- b
|
||||
return len(b), nil
|
||||
}
|
||||
|
||||
stream, err := m.c.NewRecord(pulse.Int16Writer(handler), options...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
reader := audio.ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
buff, ok := <-samplesChan
|
||||
if !ok {
|
||||
stream.Close()
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
a := wave.NewInt16Interleaved(
|
||||
wave.ChunkInfo{
|
||||
Channels: p.ChannelCount,
|
||||
Len: len(buff) / p.ChannelCount,
|
||||
SamplingRate: p.SampleRate,
|
||||
},
|
||||
)
|
||||
copy(a.Data, buff)
|
||||
|
||||
return a, func() {}, nil
|
||||
})
|
||||
|
||||
stream.Start()
|
||||
m.samplesChan = samplesChan
|
||||
return reader, nil
|
||||
}
|
||||
|
||||
func (m *microphone) Properties() []prop.Media {
|
||||
// TODO: Get actual properties
|
||||
monoProp := prop.Media{
|
||||
Audio: prop.Audio{
|
||||
SampleRate: 48000,
|
||||
Latency: time.Millisecond * 20,
|
||||
ChannelCount: 1,
|
||||
},
|
||||
}
|
||||
|
||||
stereoProp := monoProp
|
||||
stereoProp.ChannelCount = 2
|
||||
|
||||
return []prop.Media{monoProp, stereoProp}
|
||||
}
|
@@ -1,348 +0,0 @@
|
||||
package microphone
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"golang.org/x/sys/windows"
|
||||
"io"
|
||||
"time"
|
||||
"unsafe"
|
||||
|
||||
"github.com/pion/mediadevices/pkg/driver"
|
||||
"github.com/pion/mediadevices/pkg/io/audio"
|
||||
"github.com/pion/mediadevices/pkg/prop"
|
||||
"github.com/pion/mediadevices/pkg/wave"
|
||||
)
|
||||
|
||||
const (
|
||||
// bufferNumber * prop.Audio.Latency is the maximum blockable duration
|
||||
// to get data without dropping chunks.
|
||||
bufferNumber = 5
|
||||
)
|
||||
|
||||
// Windows APIs
|
||||
var (
|
||||
winmm = windows.NewLazySystemDLL("Winmm.dll")
|
||||
waveInOpen = winmm.NewProc("waveInOpen")
|
||||
waveInStart = winmm.NewProc("waveInStart")
|
||||
waveInStop = winmm.NewProc("waveInStop")
|
||||
waveInReset = winmm.NewProc("waveInReset")
|
||||
waveInClose = winmm.NewProc("waveInClose")
|
||||
waveInPrepareHeader = winmm.NewProc("waveInPrepareHeader")
|
||||
waveInAddBuffer = winmm.NewProc("waveInAddBuffer")
|
||||
waveInUnprepareHeader = winmm.NewProc("waveInUnprepareHeader")
|
||||
)
|
||||
|
||||
type buffer struct {
|
||||
waveHdr
|
||||
data []int16
|
||||
}
|
||||
|
||||
func newBuffer(samples int) *buffer {
|
||||
b := make([]int16, samples)
|
||||
return &buffer{
|
||||
waveHdr: waveHdr{
|
||||
// Sharing Go memory with Windows C API without reference.
|
||||
// Make sure that the lifetime of the buffer struct is longer
|
||||
// than the final access from cbWaveIn.
|
||||
lpData: uintptr(unsafe.Pointer(&b[0])),
|
||||
dwBufferLength: uint32(samples * 2),
|
||||
},
|
||||
data: b,
|
||||
}
|
||||
}
|
||||
|
||||
type microphone struct {
|
||||
hWaveIn windows.Pointer
|
||||
buf map[uintptr]*buffer
|
||||
chBuf chan *buffer
|
||||
closed chan struct{}
|
||||
}
|
||||
|
||||
func init() {
|
||||
// TODO: enum devices
|
||||
driver.GetManager().Register(µphone{}, driver.Info{
|
||||
Label: "default",
|
||||
DeviceType: driver.Microphone,
|
||||
})
|
||||
}
|
||||
|
||||
func (m *microphone) Open() error {
|
||||
m.chBuf = make(chan *buffer)
|
||||
m.buf = make(map[uintptr]*buffer)
|
||||
m.closed = make(chan struct{})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) cbWaveIn(hWaveIn windows.Pointer, uMsg uint, dwInstance, dwParam1, dwParam2 *int32) uintptr {
|
||||
switch uMsg {
|
||||
case MM_WIM_DATA:
|
||||
b := m.buf[uintptr(unsafe.Pointer(dwParam1))]
|
||||
m.chBuf <- b
|
||||
|
||||
case MM_WIM_OPEN:
|
||||
case MM_WIM_CLOSE:
|
||||
close(m.chBuf)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (m *microphone) Close() error {
|
||||
if m.hWaveIn == nil {
|
||||
return nil
|
||||
}
|
||||
close(m.closed)
|
||||
|
||||
ret, _, _ := waveInStop.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return err
|
||||
}
|
||||
// All enqueued buffers are marked done by waveInReset.
|
||||
ret, _, _ = waveInReset.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return err
|
||||
}
|
||||
for _, buf := range m.buf {
|
||||
// Detach buffers from waveIn API.
|
||||
ret, _, _ := waveInUnprepareHeader.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
uintptr(unsafe.Pointer(&buf.waveHdr)),
|
||||
uintptr(unsafe.Sizeof(buf.waveHdr)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// Now, it's ready to free the buffers.
|
||||
// As microphone struct still has reference to the buffers,
|
||||
// they will be GC-ed once microphone is reopened or unreferenced.
|
||||
|
||||
ret, _, _ = waveInClose.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return err
|
||||
}
|
||||
<-m.chBuf
|
||||
m.hWaveIn = nil
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *microphone) AudioRecord(p prop.Media) (audio.Reader, error) {
|
||||
for i := 0; i < bufferNumber; i++ {
|
||||
b := newBuffer(
|
||||
int(uint64(p.Latency) * uint64(p.SampleRate) / uint64(time.Second)),
|
||||
)
|
||||
// Map the buffer by its data head address to restore access to the Go struct
|
||||
// in callback function. Don't resize the buffer after it.
|
||||
m.buf[uintptr(unsafe.Pointer(&b.waveHdr))] = b
|
||||
}
|
||||
|
||||
waveFmt := &waveFormatEx{
|
||||
wFormatTag: WAVE_FORMAT_PCM,
|
||||
nChannels: uint16(p.ChannelCount),
|
||||
nSamplesPerSec: uint32(p.SampleRate),
|
||||
nAvgBytesPerSec: uint32(p.SampleRate * p.ChannelCount * 2),
|
||||
nBlockAlign: uint16(p.ChannelCount * 2),
|
||||
wBitsPerSample: 16,
|
||||
}
|
||||
ret, _, _ := waveInOpen.Call(
|
||||
uintptr(unsafe.Pointer(&m.hWaveIn)),
|
||||
WAVE_MAPPER,
|
||||
uintptr(unsafe.Pointer(waveFmt)),
|
||||
windows.NewCallback(m.cbWaveIn),
|
||||
0,
|
||||
CALLBACK_FUNCTION,
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, buf := range m.buf {
|
||||
// Attach buffers to waveIn API.
|
||||
ret, _, _ := waveInPrepareHeader.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
uintptr(unsafe.Pointer(&buf.waveHdr)),
|
||||
uintptr(unsafe.Sizeof(buf.waveHdr)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
for _, buf := range m.buf {
|
||||
// Enqueue buffers.
|
||||
ret, _, _ := waveInAddBuffer.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
uintptr(unsafe.Pointer(&buf.waveHdr)),
|
||||
uintptr(unsafe.Sizeof(buf.waveHdr)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
ret, _, _ = waveInStart.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// TODO: detect microphone device disconnection and return EOF
|
||||
|
||||
reader := audio.ReaderFunc(func() (wave.Audio, func(), error) {
|
||||
b, ok := <-m.chBuf
|
||||
if !ok {
|
||||
return nil, func() {}, io.EOF
|
||||
}
|
||||
|
||||
select {
|
||||
case <-m.closed:
|
||||
default:
|
||||
// Re-enqueue used buffer.
|
||||
ret, _, _ := waveInAddBuffer.Call(
|
||||
uintptr(unsafe.Pointer(m.hWaveIn)),
|
||||
uintptr(unsafe.Pointer(&b.waveHdr)),
|
||||
uintptr(unsafe.Sizeof(b.waveHdr)),
|
||||
)
|
||||
if err := errWinmm[ret]; err != nil {
|
||||
return nil, func() {}, err
|
||||
}
|
||||
}
|
||||
|
||||
a := wave.NewInt16Interleaved(
|
||||
wave.ChunkInfo{
|
||||
Channels: p.ChannelCount,
|
||||
Len: (int(b.waveHdr.dwBytesRecorded) / 2) / p.ChannelCount,
|
||||
SamplingRate: p.SampleRate,
|
||||
},
|
||||
)
|
||||
|
||||
j := 0
|
||||
for i := 0; i < a.Size.Len; i++ {
|
||||
for ch := 0; ch < a.Size.Channels; ch++ {
|
||||
a.SetInt16(i, ch, wave.Int16Sample(b.data[j]))
|
||||
j++
|
||||
}
|
||||
}
|
||||
|
||||
return a, func() {}, nil
|
||||
})
|
||||
return reader, nil
|
||||
}
|
||||
|
||||
func (m *microphone) Properties() []prop.Media {
|
||||
// TODO: Get actual properties
|
||||
monoProp := prop.Media{
|
||||
Audio: prop.Audio{
|
||||
SampleRate: 48000,
|
||||
Latency: time.Millisecond * 20,
|
||||
ChannelCount: 1,
|
||||
},
|
||||
}
|
||||
|
||||
stereoProp := monoProp
|
||||
stereoProp.ChannelCount = 2
|
||||
|
||||
return []prop.Media{monoProp, stereoProp}
|
||||
}
|
||||
|
||||
// Windows API structures
|
||||
|
||||
type waveFormatEx struct {
|
||||
wFormatTag uint16
|
||||
nChannels uint16
|
||||
nSamplesPerSec uint32
|
||||
nAvgBytesPerSec uint32
|
||||
nBlockAlign uint16
|
||||
wBitsPerSample uint16
|
||||
cbSize uint16
|
||||
}
|
||||
|
||||
type waveHdr struct {
|
||||
lpData uintptr
|
||||
dwBufferLength uint32
|
||||
dwBytesRecorded uint32
|
||||
dwUser *uint32
|
||||
dwFlags uint32
|
||||
dwLoops uint32
|
||||
lpNext *waveHdr
|
||||
reserved *uint32
|
||||
}
|
||||
|
||||
// Windows consts
|
||||
|
||||
const (
|
||||
MMSYSERR_NOERROR = 0
|
||||
MMSYSERR_ERROR = 1
|
||||
MMSYSERR_BADDEVICEID = 2
|
||||
MMSYSERR_NOTENABLED = 3
|
||||
MMSYSERR_ALLOCATED = 4
|
||||
MMSYSERR_INVALHANDLE = 5
|
||||
MMSYSERR_NODRIVER = 6
|
||||
MMSYSERR_NOMEM = 7
|
||||
MMSYSERR_NOTSUPPORTED = 8
|
||||
MMSYSERR_BADERRNUM = 9
|
||||
MMSYSERR_INVALFLAG = 10
|
||||
MMSYSERR_INVALPARAM = 11
|
||||
MMSYSERR_HANDLEBUSY = 12
|
||||
MMSYSERR_INVALIDALIAS = 13
|
||||
MMSYSERR_BADDB = 14
|
||||
MMSYSERR_KEYNOTFOUND = 15
|
||||
MMSYSERR_READERROR = 16
|
||||
MMSYSERR_WRITEERROR = 17
|
||||
MMSYSERR_DELETEERROR = 18
|
||||
MMSYSERR_VALNOTFOUND = 19
|
||||
MMSYSERR_NODRIVERCB = 20
|
||||
|
||||
WAVERR_BADFORMAT = 32
|
||||
WAVERR_STILLPLAYING = 33
|
||||
WAVERR_UNPREPARED = 34
|
||||
WAVERR_SYNC = 35
|
||||
|
||||
WAVE_MAPPER = 0xFFFF
|
||||
WAVE_FORMAT_PCM = 1
|
||||
|
||||
CALLBACK_NULL = 0
|
||||
CALLBACK_WINDOW = 0x10000
|
||||
CALLBACK_TASK = 0x20000
|
||||
CALLBACK_FUNCTION = 0x30000
|
||||
CALLBACK_THREAD = CALLBACK_TASK
|
||||
CALLBACK_EVENT = 0x50000
|
||||
|
||||
MM_WIM_OPEN = 0x3BE
|
||||
MM_WIM_CLOSE = 0x3BF
|
||||
MM_WIM_DATA = 0x3C0
|
||||
)
|
||||
|
||||
var errWinmm = map[uintptr]error{
|
||||
MMSYSERR_NOERROR: nil,
|
||||
MMSYSERR_ERROR: errors.New("error"),
|
||||
MMSYSERR_BADDEVICEID: errors.New("bad device id"),
|
||||
MMSYSERR_NOTENABLED: errors.New("not enabled"),
|
||||
MMSYSERR_ALLOCATED: errors.New("already allocated"),
|
||||
MMSYSERR_INVALHANDLE: errors.New("invalid handler"),
|
||||
MMSYSERR_NODRIVER: errors.New("no driver"),
|
||||
MMSYSERR_NOMEM: errors.New("no memory"),
|
||||
MMSYSERR_NOTSUPPORTED: errors.New("not supported"),
|
||||
MMSYSERR_BADERRNUM: errors.New("band error number"),
|
||||
MMSYSERR_INVALFLAG: errors.New("invalid flag"),
|
||||
MMSYSERR_INVALPARAM: errors.New("invalid param"),
|
||||
MMSYSERR_HANDLEBUSY: errors.New("handle busy"),
|
||||
MMSYSERR_INVALIDALIAS: errors.New("invalid alias"),
|
||||
MMSYSERR_BADDB: errors.New("bad db"),
|
||||
MMSYSERR_KEYNOTFOUND: errors.New("key not found"),
|
||||
MMSYSERR_READERROR: errors.New("read error"),
|
||||
MMSYSERR_WRITEERROR: errors.New("write error"),
|
||||
MMSYSERR_DELETEERROR: errors.New("delete error"),
|
||||
MMSYSERR_VALNOTFOUND: errors.New("value not found"),
|
||||
MMSYSERR_NODRIVERCB: errors.New("no driver cb"),
|
||||
WAVERR_BADFORMAT: errors.New("bad format"),
|
||||
WAVERR_STILLPLAYING: errors.New("still playing"),
|
||||
WAVERR_UNPREPARED: errors.New("unprepared"),
|
||||
WAVERR_SYNC: errors.New("sync"),
|
||||
}
|
3
track.go
3
track.go
@@ -292,6 +292,9 @@ func newAudioTrackFromDriver(d driver.Driver, recorder driver.AudioRecorder, con
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// FIXME: The current audio detection and audio encoder can only work with a static latency. Since the latency from the driver
|
||||
// can fluctuate, we need to stabilize it. Maybe there's a better way for doing this?
|
||||
reader = audio.NewBuffer(int(constraints.selectedMedia.Latency.Seconds() * float64(constraints.selectedMedia.SampleRate)))(reader)
|
||||
return newAudioTrackFromReader(d, reader, selector), nil
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user