mirror of
https://github.com/aler9/gortsplib
synced 2025-10-30 18:16:29 +08:00
rtp*: improve coverage
This commit is contained in:
@@ -57,6 +57,19 @@ func (d *Decoder) Decode(byts []byte) ([][]byte, time.Duration, error) {
|
|||||||
|
|
||||||
// DecodeRTP decodes AUs from a rtp.Packet.
|
// DecodeRTP decodes AUs from a rtp.Packet.
|
||||||
func (d *Decoder) DecodeRTP(pkt *rtp.Packet) ([][]byte, time.Duration, error) {
|
func (d *Decoder) DecodeRTP(pkt *rtp.Packet) ([][]byte, time.Duration, error) {
|
||||||
|
if len(pkt.Payload) < 2 {
|
||||||
|
d.state = decoderStateInitial
|
||||||
|
return nil, 0, fmt.Errorf("payload is too short")
|
||||||
|
}
|
||||||
|
|
||||||
|
// AU-headers-length
|
||||||
|
headersLen := binary.BigEndian.Uint16(pkt.Payload)
|
||||||
|
if (headersLen % 16) != 0 {
|
||||||
|
d.state = decoderStateInitial
|
||||||
|
return nil, 0, fmt.Errorf("invalid AU-headers-length (%d)", headersLen)
|
||||||
|
}
|
||||||
|
pkt.Payload = pkt.Payload[2:]
|
||||||
|
|
||||||
switch d.state {
|
switch d.state {
|
||||||
case decoderStateInitial:
|
case decoderStateInitial:
|
||||||
if !d.initialTsSet {
|
if !d.initialTsSet {
|
||||||
@@ -65,17 +78,6 @@ func (d *Decoder) DecodeRTP(pkt *rtp.Packet) ([][]byte, time.Duration, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if pkt.Header.Marker {
|
if pkt.Header.Marker {
|
||||||
if len(pkt.Payload) < 2 {
|
|
||||||
return nil, 0, fmt.Errorf("payload is too short")
|
|
||||||
}
|
|
||||||
|
|
||||||
// AU-headers-length
|
|
||||||
headersLen := binary.BigEndian.Uint16(pkt.Payload)
|
|
||||||
if (headersLen % 16) != 0 {
|
|
||||||
return nil, 0, fmt.Errorf("invalid AU-headers-length (%d)", headersLen)
|
|
||||||
}
|
|
||||||
pkt.Payload = pkt.Payload[2:]
|
|
||||||
|
|
||||||
// AU-headers
|
// AU-headers
|
||||||
// AAC headers are 16 bits, where
|
// AAC headers are 16 bits, where
|
||||||
// * 13 bits are data size
|
// * 13 bits are data size
|
||||||
@@ -112,49 +114,47 @@ func (d *Decoder) DecodeRTP(pkt *rtp.Packet) ([][]byte, time.Duration, error) {
|
|||||||
return aus, d.decodeTimestamp(pkt.Timestamp), nil
|
return aus, d.decodeTimestamp(pkt.Timestamp), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// AU-headers-length
|
|
||||||
headersLen := binary.BigEndian.Uint16(pkt.Payload)
|
|
||||||
if headersLen != 16 {
|
if headersLen != 16 {
|
||||||
return nil, 0, fmt.Errorf("invalid AU-headers-length (%d)", headersLen)
|
return nil, 0, fmt.Errorf("a fragmented packet can only contain one AU")
|
||||||
}
|
}
|
||||||
|
|
||||||
// AU-header
|
// AU-header
|
||||||
header := binary.BigEndian.Uint16(pkt.Payload[2:])
|
header := binary.BigEndian.Uint16(pkt.Payload)
|
||||||
dataLen := header >> 3
|
dataLen := header >> 3
|
||||||
auIndex := header & 0x03
|
auIndex := header & 0x03
|
||||||
if auIndex != 0 {
|
if auIndex != 0 {
|
||||||
return nil, 0, fmt.Errorf("AU-index field is not zero")
|
return nil, 0, fmt.Errorf("AU-index field is not zero")
|
||||||
}
|
}
|
||||||
|
pkt.Payload = pkt.Payload[2:]
|
||||||
|
|
||||||
if len(pkt.Payload) < int(dataLen) {
|
if len(pkt.Payload) < int(dataLen) {
|
||||||
return nil, 0, fmt.Errorf("payload is too short")
|
return nil, 0, fmt.Errorf("payload is too short")
|
||||||
}
|
}
|
||||||
|
|
||||||
d.fragmentedBuf = append(d.fragmentedBuf, pkt.Payload[4:]...)
|
d.fragmentedBuf = append(d.fragmentedBuf, pkt.Payload...)
|
||||||
|
|
||||||
d.state = decoderStateReadingFragmented
|
d.state = decoderStateReadingFragmented
|
||||||
return nil, 0, ErrMorePacketsNeeded
|
return nil, 0, ErrMorePacketsNeeded
|
||||||
|
|
||||||
default: // decoderStateReadingFragmented
|
default: // decoderStateReadingFragmented
|
||||||
// AU-headers-length
|
|
||||||
headersLen := binary.BigEndian.Uint16(pkt.Payload)
|
|
||||||
if headersLen != 16 {
|
if headersLen != 16 {
|
||||||
return nil, 0, fmt.Errorf("invalid AU-headers-length (%d)", headersLen)
|
return nil, 0, fmt.Errorf("a fragmented packet can only contain one AU")
|
||||||
}
|
}
|
||||||
|
|
||||||
// AU-header
|
// AU-header
|
||||||
header := binary.BigEndian.Uint16(pkt.Payload[2:])
|
header := binary.BigEndian.Uint16(pkt.Payload)
|
||||||
dataLen := header >> 3
|
dataLen := header >> 3
|
||||||
auIndex := header & 0x03
|
auIndex := header & 0x03
|
||||||
if auIndex != 0 {
|
if auIndex != 0 {
|
||||||
return nil, 0, fmt.Errorf("AU-index field is not zero")
|
return nil, 0, fmt.Errorf("AU-index field is not zero")
|
||||||
}
|
}
|
||||||
|
pkt.Payload = pkt.Payload[2:]
|
||||||
|
|
||||||
if len(pkt.Payload) < int(dataLen) {
|
if len(pkt.Payload) < int(dataLen) {
|
||||||
return nil, 0, fmt.Errorf("payload is too short")
|
return nil, 0, fmt.Errorf("payload is too short")
|
||||||
}
|
}
|
||||||
|
|
||||||
d.fragmentedBuf = append(d.fragmentedBuf, pkt.Payload[4:]...)
|
d.fragmentedBuf = append(d.fragmentedBuf, pkt.Payload...)
|
||||||
|
|
||||||
if !pkt.Header.Marker {
|
if !pkt.Header.Marker {
|
||||||
return nil, 0, ErrMorePacketsNeeded
|
return nil, 0, ErrMorePacketsNeeded
|
||||||
|
|||||||
@@ -152,7 +152,7 @@ var cases = []struct {
|
|||||||
{
|
{
|
||||||
"fragmented",
|
"fragmented",
|
||||||
[][]byte{
|
[][]byte{
|
||||||
bytes.Repeat([]byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}, 256),
|
bytes.Repeat([]byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}, 512),
|
||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
[][]byte{
|
[][]byte{
|
||||||
@@ -165,10 +165,17 @@ var cases = []struct {
|
|||||||
),
|
),
|
||||||
mergeBytes(
|
mergeBytes(
|
||||||
[]byte{
|
[]byte{
|
||||||
0x80, 0xe0, 0x44, 0xee, 0x88, 0x77, 0x66, 0x55,
|
0x80, 0x60, 0x44, 0xee, 0x88, 0x77, 0x66, 0x55,
|
||||||
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10, 0x02, 0x50,
|
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10, 0x05, 0xb0,
|
||||||
},
|
},
|
||||||
bytes.Repeat([]byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}, 74),
|
bytes.Repeat([]byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}, 182),
|
||||||
|
),
|
||||||
|
mergeBytes(
|
||||||
|
[]byte{
|
||||||
|
0x80, 0xe0, 0x44, 0xef, 0x88, 0x77, 0x66, 0x55,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10, 0x04, 0xa0,
|
||||||
|
},
|
||||||
|
bytes.Repeat([]byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}, 148),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -257,46 +264,153 @@ func TestDecode(t *testing.T) {
|
|||||||
func TestDecodeErrors(t *testing.T) {
|
func TestDecodeErrors(t *testing.T) {
|
||||||
for _, ca := range []struct {
|
for _, ca := range []struct {
|
||||||
name string
|
name string
|
||||||
byts []byte
|
pkts [][]byte
|
||||||
err string
|
err string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
"missing payload",
|
"missing payload",
|
||||||
[]byte{
|
[][]byte{
|
||||||
0x80, 0xe0, 0x44, 0xed, 0x88, 0x77, 0x6a, 0x15,
|
{
|
||||||
0x9d, 0xbb, 0x78, 0x12,
|
0x80, 0xe0, 0x44, 0xed, 0x88, 0x77, 0x6a, 0x15,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"payload is too short",
|
"payload is too short",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"missing au header",
|
"missing au header",
|
||||||
[]byte{
|
[][]byte{
|
||||||
0x80, 0xe0, 0x44, 0xed, 0x88, 0x77, 0x6a, 0x15,
|
{
|
||||||
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10,
|
0x80, 0xe0, 0x44, 0xed, 0x88, 0x77, 0x6a, 0x15,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"payload is too short",
|
"payload is too short",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"missing au",
|
"missing au",
|
||||||
[]byte{
|
[][]byte{
|
||||||
0x80, 0xe0, 0x44, 0xed, 0x88, 0x77, 0x6a, 0x15,
|
{
|
||||||
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10, 0x0a, 0xd8,
|
0x80, 0xe0, 0x44, 0xed, 0x88, 0x77, 0x6a, 0x15,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10, 0x0a, 0xd8,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"payload is too short",
|
"payload is too short",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"invalid au headers length",
|
||||||
|
[][]byte{
|
||||||
|
{
|
||||||
|
0x80, 0xe0, 0x44, 0xed, 0x88, 0x77, 0x6a, 0x15,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x09,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"invalid AU-headers-length (9)",
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"au index not zero",
|
"au index not zero",
|
||||||
[]byte{
|
[][]byte{
|
||||||
0x80, 0xe0, 0x44, 0xed, 0x88, 0x77, 0x6a, 0x15,
|
{
|
||||||
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10, 0x0a, 0xd8 | 0x01,
|
0x80, 0xe0, 0x44, 0xed, 0x88, 0x77, 0x6a, 0x15,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10, 0x0a, 0xd8 | 0x01,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
"AU-index field is not zero",
|
"AU-index field is not zero",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"fragmented with multiple AUs",
|
||||||
|
[][]byte{
|
||||||
|
{
|
||||||
|
0x80, 0x60, 0x0e, 0xa2, 0x0e, 0x01, 0x9b, 0xb7,
|
||||||
|
0x35, 0x6e, 0xcb, 0x3b, 0x00, 0x20,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"a fragmented packet can only contain one AU",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fragmented with AU index not zero",
|
||||||
|
[][]byte{
|
||||||
|
{
|
||||||
|
0x80, 0x60, 0x0e, 0xa2, 0x0e, 0x01, 0x9b, 0xb7,
|
||||||
|
0x35, 0x6e, 0xcb, 0x3b, 0x00, 0x10, 0x0a, 0xd8 | 0x01,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"AU-index field is not zero",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fragmented with missing au",
|
||||||
|
[][]byte{
|
||||||
|
{
|
||||||
|
0x80, 0x60, 0x0e, 0xa2, 0x0e, 0x01, 0x9b, 0xb7,
|
||||||
|
0x35, 0x6e, 0xcb, 0x3b, 0x00, 0x10, 0x0a, 0xd8,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"payload is too short",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fragmented with multiple AUs in 2nd packet",
|
||||||
|
[][]byte{
|
||||||
|
mergeBytes(
|
||||||
|
[]byte{
|
||||||
|
0x80, 0x60, 0x44, 0xed, 0x88, 0x77, 0x66, 0x55,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x0, 0x10, 0x5, 0xb0,
|
||||||
|
},
|
||||||
|
bytes.Repeat([]byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}, 182),
|
||||||
|
),
|
||||||
|
mergeBytes(
|
||||||
|
[]byte{
|
||||||
|
0x80, 0xe0, 0x44, 0xee, 0x88, 0x77, 0x66, 0x55,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x20,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
"a fragmented packet can only contain one AU",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fragmented with au index not zero in 2nd packet",
|
||||||
|
[][]byte{
|
||||||
|
mergeBytes(
|
||||||
|
[]byte{
|
||||||
|
0x80, 0x60, 0x44, 0xed, 0x88, 0x77, 0x66, 0x55,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x0, 0x10, 0x5, 0xb0,
|
||||||
|
},
|
||||||
|
bytes.Repeat([]byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}, 182),
|
||||||
|
),
|
||||||
|
mergeBytes(
|
||||||
|
[]byte{
|
||||||
|
0x80, 0xe0, 0x44, 0xee, 0x88, 0x77, 0x66, 0x55,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10, 0x0a, 0xd8 | 0x01,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
"AU-index field is not zero",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"fragmented without payload in 2nd packet",
|
||||||
|
[][]byte{
|
||||||
|
mergeBytes(
|
||||||
|
[]byte{
|
||||||
|
0x80, 0x60, 0x44, 0xed, 0x88, 0x77, 0x66, 0x55,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x0, 0x10, 0x5, 0xb0,
|
||||||
|
},
|
||||||
|
bytes.Repeat([]byte{0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07}, 182),
|
||||||
|
),
|
||||||
|
mergeBytes(
|
||||||
|
[]byte{
|
||||||
|
0x80, 0xe0, 0x44, 0xee, 0x88, 0x77, 0x66, 0x55,
|
||||||
|
0x9d, 0xbb, 0x78, 0x12, 0x00, 0x10, 0x0a, 0xd8,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
},
|
||||||
|
"payload is too short",
|
||||||
|
},
|
||||||
} {
|
} {
|
||||||
t.Run(ca.name, func(t *testing.T) {
|
t.Run(ca.name, func(t *testing.T) {
|
||||||
d := NewDecoder(48000)
|
d := NewDecoder(48000)
|
||||||
_, _, err := d.Decode(ca.byts)
|
var err error
|
||||||
require.NotEqual(t, ErrMorePacketsNeeded, err)
|
for _, pkt := range ca.pkts {
|
||||||
|
_, _, err = d.Decode(pkt)
|
||||||
|
}
|
||||||
require.Equal(t, ca.err, err.Error())
|
require.Equal(t, ca.err, err.Error())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -390,7 +390,6 @@ func TestDecodeErrors(t *testing.T) {
|
|||||||
for _, pkt := range ca.pkts {
|
for _, pkt := range ca.pkts {
|
||||||
_, _, err = d.Decode(pkt)
|
_, _, err = d.Decode(pkt)
|
||||||
}
|
}
|
||||||
require.NotEqual(t, ErrMorePacketsNeeded, err)
|
|
||||||
require.Equal(t, ca.err, err.Error())
|
require.Equal(t, ca.err, err.Error())
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user