#include "libavutil/time.h"
#include "libavcodec/get_bits.h"
#include "avformat.h"
-#include "mpegts.h"
#include "network.h"
+#include "srtp.h"
#include "url.h"
#include "rtpdec.h"
#include "rtpdec_formats.h"
.codec_id = AV_CODEC_ID_OPUS,
};
-/* statistics functions */
+static RTPDynamicProtocolHandler t140_dynamic_handler = { /* RFC 4103 */
+ .enc_name = "t140",
+ .codec_type = AVMEDIA_TYPE_DATA,
+ .codec_id = AV_CODEC_ID_TEXT,
+};
+
static RTPDynamicProtocolHandler *rtp_first_dynamic_payload_handler = NULL;
void ff_register_dynamic_payload_handler(RTPDynamicProtocolHandler *handler)
rtp_first_dynamic_payload_handler = handler;
}
-void av_register_rtp_dynamic_payload_handlers(void)
+void ff_register_rtp_dynamic_payload_handlers(void)
{
- ff_register_dynamic_payload_handler(&ff_mp4v_es_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_mpeg4_generic_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_ac3_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_amr_nb_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_amr_wb_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_dv_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_g726_16_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_g726_24_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_g726_32_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_g726_40_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_h261_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_h263_1998_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_h263_2000_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_h263_rfc2190_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_h264_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_hevc_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_ilbc_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_jpeg_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_vorbis_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_theora_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_qdm2_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_svq3_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_mp4a_latm_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_vp8_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_qcelp_dynamic_handler);
- ff_register_dynamic_payload_handler(&realmedia_mp3_dynamic_handler);
- ff_register_dynamic_payload_handler(&speex_dynamic_handler);
- ff_register_dynamic_payload_handler(&opus_dynamic_handler);
-
- ff_register_dynamic_payload_handler(&ff_ms_rtp_asf_pfv_handler);
+ ff_register_dynamic_payload_handler(&ff_mp4v_es_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_mpeg_audio_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_mpeg_audio_robust_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_mpeg_video_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_mpeg4_generic_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_mpegts_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_ms_rtp_asf_pfa_handler);
-
+ ff_register_dynamic_payload_handler(&ff_ms_rtp_asf_pfv_handler);
+ ff_register_dynamic_payload_handler(&ff_qcelp_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_qdm2_dynamic_handler);
ff_register_dynamic_payload_handler(&ff_qt_rtp_aud_handler);
ff_register_dynamic_payload_handler(&ff_qt_rtp_vid_handler);
ff_register_dynamic_payload_handler(&ff_quicktime_rtp_aud_handler);
ff_register_dynamic_payload_handler(&ff_quicktime_rtp_vid_handler);
-
- ff_register_dynamic_payload_handler(&ff_g726_16_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_g726_24_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_g726_32_dynamic_handler);
- ff_register_dynamic_payload_handler(&ff_g726_40_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_svq3_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_theora_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_vorbis_dynamic_handler);
+ ff_register_dynamic_payload_handler(&ff_vp8_dynamic_handler);
+ ff_register_dynamic_payload_handler(&opus_dynamic_handler);
+ ff_register_dynamic_payload_handler(&realmedia_mp3_dynamic_handler);
+ ff_register_dynamic_payload_handler(&speex_dynamic_handler);
+ ff_register_dynamic_payload_handler(&t140_dynamic_handler);
}
RTPDynamicProtocolHandler *ff_rtp_handler_find_by_name(const char *name,
RTPDynamicProtocolHandler *handler;
for (handler = rtp_first_dynamic_payload_handler;
handler; handler = handler->next)
- if (!av_strcasecmp(name, handler->enc_name) &&
+ if (handler->enc_name &&
+ !av_strcasecmp(name, handler->enc_name) &&
codec_type == handler->codec_type)
return handler;
return NULL;
return AVERROR_INVALIDDATA;
}
+ s->last_rtcp_reception_time = av_gettime_relative();
s->last_rtcp_ntp_time = AV_RB64(buf + 8);
s->last_rtcp_timestamp = AV_RB32(buf + 16);
if (s->first_rtcp_ntp_time == AV_NOPTS_VALUE) {
s->first_rtcp_ntp_time = s->last_rtcp_ntp_time;
if (!s->base_timestamp)
s->base_timestamp = s->last_rtcp_timestamp;
- s->rtcp_ts_offset = s->last_rtcp_timestamp - s->base_timestamp;
+ s->rtcp_ts_offset = (int32_t)(s->last_rtcp_timestamp - s->base_timestamp);
}
break;
return 1;
}
+static void rtcp_update_jitter(RTPStatistics *s, uint32_t sent_timestamp,
+ uint32_t arrival_timestamp)
+{
+ // Most of this is pretty straight from RFC 3550 appendix A.8
+ uint32_t transit = arrival_timestamp - sent_timestamp;
+ uint32_t prev_transit = s->transit;
+ int32_t d = transit - prev_transit;
+ // Doing the FFABS() call directly on the "transit - prev_transit"
+ // expression doesn't work, since it's an unsigned expression. Doing the
+ // transit calculation in unsigned is desired though, since it most
+ // probably will need to wrap around.
+ d = FFABS(d);
+ s->transit = transit;
+ if (!prev_transit)
+ return;
+ s->jitter += d - (int32_t) ((s->jitter + 8) >> 4);
+}
+
int ff_rtp_check_and_send_back_rr(RTPDemuxContext *s, URLContext *fd,
AVIOContext *avio, int count)
{
uint32_t extended_max;
uint32_t expected_interval;
uint32_t received_interval;
- uint32_t lost_interval;
+ int32_t lost_interval;
uint32_t expected;
uint32_t fraction;
- uint64_t ntp_time = s->last_rtcp_ntp_time; // TODO: Get local ntp time?
if ((!fd && !avio) || (count < 1))
return -1;
avio_wb32(pb, 0); /* delay since last SR */
} else {
uint32_t middle_32_bits = s->last_rtcp_ntp_time >> 16; // this is valid, right? do we need to handle 64 bit values special?
- uint32_t delay_since_last = ntp_time - s->last_rtcp_ntp_time;
+ uint32_t delay_since_last = av_rescale(av_gettime_relative() - s->last_rtcp_reception_time,
+ 65536, AV_TIME_BASE);
avio_wb32(pb, middle_32_bits); /* last SR timestamp */
avio_wb32(pb, delay_since_last); /* delay since last SR */
AVIOContext *pb;
uint8_t *buf;
int64_t now;
- uint16_t first_missing, missing_mask;
+ uint16_t first_missing = 0, missing_mask = 0;
if (!fd && !avio)
return -1;
/* Send new feedback if enough time has elapsed since the last
* feedback packet. */
- now = av_gettime();
+ now = av_gettime_relative();
if (s->last_feedback_time &&
(now - s->last_feedback_time) < MIN_FEEDBACK_INTERVAL)
return 0;
/**
* open a new RTP parse context for stream 'st'. 'st' can be NULL for
- * MPEG2-TS streams to indicate that they should be demuxed inside the
- * rtp demux (otherwise AV_CODEC_ID_MPEG2TS packets are returned)
+ * MPEG2-TS streams.
*/
RTPDemuxContext *ff_rtp_parse_open(AVFormatContext *s1, AVStream *st,
int payload_type, int queue_size)
s->st = st;
s->queue_size = queue_size;
rtp_init_statistics(&s->statistics, 0);
- if (!strcmp(ff_rtp_enc_name(payload_type), "MP2T")) {
- s->ts = ff_mpegts_parse_open(s->ic);
- if (s->ts == NULL) {
- av_free(s);
- return NULL;
- }
- } else if (st) {
+ if (st) {
switch (st->codec->codec_id) {
- case AV_CODEC_ID_MPEG1VIDEO:
- case AV_CODEC_ID_MPEG2VIDEO:
- case AV_CODEC_ID_MP2:
- case AV_CODEC_ID_MP3:
- case AV_CODEC_ID_MPEG4:
- case AV_CODEC_ID_H263:
- case AV_CODEC_ID_H264:
- st->need_parsing = AVSTREAM_PARSE_FULL;
- break;
- case AV_CODEC_ID_VORBIS:
- st->need_parsing = AVSTREAM_PARSE_HEADERS;
- break;
case AV_CODEC_ID_ADPCM_G722:
/* According to RFC 3551, the stream clock rate is 8000
* even if the sample rate is 16000. */
s->handler = handler;
}
+void ff_rtp_parse_set_crypto(RTPDemuxContext *s, const char *suite,
+ const char *params)
+{
+ if (!ff_srtp_set_crypto(&s->srtp, suite, params))
+ s->srtp_enabled = 1;
+}
+
/**
* This was the second switch in rtp_parse packet.
* Normalizes time, if required, sets stream_index, etc.
static int rtp_parse_packet_internal(RTPDemuxContext *s, AVPacket *pkt,
const uint8_t *buf, int len)
{
- unsigned int ssrc, h;
- int payload_type, seq, ret, flags = 0;
- int ext;
+ unsigned int ssrc;
+ int payload_type, seq, flags = 0;
+ int ext, csrc;
AVStream *st;
uint32_t timestamp;
int rv = 0;
+ csrc = buf[0] & 0x0f;
ext = buf[0] & 0x10;
payload_type = buf[1] & 0x7f;
if (buf[1] & 0x80)
len -= 12;
buf += 12;
+ len -= 4 * csrc;
+ buf += 4 * csrc;
+ if (len < 0)
+ return AVERROR_INVALIDDATA;
+
/* RFC 3550 Section 5.3.1 RTP Header Extension handling */
if (ext) {
if (len < 4)
buf += ext;
}
- if (!st) {
- /* specific MPEG2-TS demux support */
- ret = ff_mpegts_parse_packet(s->ts, pkt, buf, len);
- /* The only error that can be returned from ff_mpegts_parse_packet
- * is "no more data to return from the provided buffer", so return
- * AVERROR(EAGAIN) for all errors */
- if (ret < 0)
- return AVERROR(EAGAIN);
- if (ret < len) {
- s->read_buf_size = FFMIN(len - ret, sizeof(s->buf));
- memcpy(s->buf, buf + ret, s->read_buf_size);
- s->read_buf_index = 0;
- return 1;
- }
- return 0;
- } else if (s->handler && s->handler->parse_packet) {
+ if (s->handler && s->handler->parse_packet) {
rv = s->handler->parse_packet(s->ic, s->dynamic_protocol_context,
s->st, pkt, ×tamp, buf, len, seq,
flags);
- } else {
- /* At this point, the RTP header has been stripped;
- * This is ASSUMING that there is only 1 CSRC, which isn't wise. */
- switch (st->codec->codec_id) {
- case AV_CODEC_ID_MP2:
- case AV_CODEC_ID_MP3:
- /* better than nothing: skip MPEG audio RTP header */
- if (len <= 4)
- return -1;
- h = AV_RB32(buf);
- len -= 4;
- buf += 4;
- av_new_packet(pkt, len);
- memcpy(pkt->data, buf, len);
- break;
- case AV_CODEC_ID_MPEG1VIDEO:
- case AV_CODEC_ID_MPEG2VIDEO:
- /* better than nothing: skip MPEG video RTP header */
- if (len <= 4)
- return -1;
- h = AV_RB32(buf);
- buf += 4;
- len -= 4;
- if (h & (1 << 26)) {
- /* MPEG-2 */
- if (len <= 4)
- return -1;
- buf += 4;
- len -= 4;
- }
- av_new_packet(pkt, len);
- memcpy(pkt->data, buf, len);
- break;
- default:
- av_new_packet(pkt, len);
- memcpy(pkt->data, buf, len);
- break;
- }
-
+ } else if (st) {
+ if ((rv = av_new_packet(pkt, len)) < 0)
+ return rv;
+ memcpy(pkt->data, buf, len);
pkt->stream_index = st->index;
+ } else {
+ return AVERROR(EINVAL);
}
// now perform timestamp things....
static void enqueue_packet(RTPDemuxContext *s, uint8_t *buf, int len)
{
uint16_t seq = AV_RB16(buf + 2);
- RTPPacket *cur = s->queue, *prev = NULL, *packet;
+ RTPPacket **cur = &s->queue, *packet;
/* Find the correct place in the queue to insert the packet */
- while (cur) {
- int16_t diff = seq - cur->seq;
+ while (*cur) {
+ int16_t diff = seq - (*cur)->seq;
if (diff < 0)
break;
- prev = cur;
- cur = cur->next;
+ cur = &(*cur)->next;
}
packet = av_mallocz(sizeof(*packet));
if (!packet)
return;
- packet->recvtime = av_gettime();
+ packet->recvtime = av_gettime_relative();
packet->seq = seq;
packet->len = len;
packet->buf = buf;
- packet->next = cur;
- if (prev)
- prev->next = packet;
- else
- s->queue = packet;
+ packet->next = *cur;
+ *cur = packet;
s->queue_len++;
}
uint8_t **bufptr, int len)
{
uint8_t *buf = bufptr ? *bufptr : NULL;
- int ret, flags = 0;
+ int flags = 0;
uint32_t timestamp;
int rv = 0;
if (s->prev_ret <= 0)
return rtp_parse_queued_packet(s, pkt);
/* return the next packets, if any */
- if (s->st && s->handler && s->handler->parse_packet) {
+ if (s->handler && s->handler->parse_packet) {
/* timestamp should be overwritten by parse_packet, if not,
* the packet is left with pts == AV_NOPTS_VALUE */
timestamp = RTP_NOTS_VALUE;
flags);
finalize_packet(s, pkt, timestamp);
return rv;
- } else {
- // TODO: Move to a dynamic packet handler (like above)
- if (s->read_buf_index >= s->read_buf_size)
- return AVERROR(EAGAIN);
- ret = ff_mpegts_parse_packet(s->ts, pkt, s->buf + s->read_buf_index,
- s->read_buf_size - s->read_buf_index);
- if (ret < 0)
- return AVERROR(EAGAIN);
- s->read_buf_index += ret;
- if (s->read_buf_index < s->read_buf_size)
- return 1;
- else
- return 0;
}
}
return rtcp_parse_packet(s, buf, len);
}
+ if (s->st) {
+ int64_t received = av_gettime_relative();
+ uint32_t arrival_ts = av_rescale_q(received, AV_TIME_BASE_Q,
+ s->st->time_base);
+ timestamp = AV_RB32(buf + 4);
+ // Calculate the jitter immediately, before queueing the packet
+ // into the reordering queue.
+ rtcp_update_jitter(&s->statistics, timestamp, arrival_ts);
+ }
+
if ((s->seq == 0 && !s->queue) || s->queue_size <= 1) {
/* First packet, or no reordering */
return rtp_parse_packet_internal(s, pkt, buf, len);
int ff_rtp_parse_packet(RTPDemuxContext *s, AVPacket *pkt,
uint8_t **bufptr, int len)
{
- int rv = rtp_parse_one_packet(s, pkt, bufptr, len);
+ int rv;
+ if (s->srtp_enabled && bufptr && ff_srtp_decrypt(&s->srtp, *bufptr, &len) < 0)
+ return -1;
+ rv = rtp_parse_one_packet(s, pkt, bufptr, len);
s->prev_ret = rv;
while (rv == AVERROR(EAGAIN) && has_next_packet(s))
rv = rtp_parse_queued_packet(s, pkt);
void ff_rtp_parse_close(RTPDemuxContext *s)
{
ff_rtp_reset_packet_queue(s);
- if (!strcmp(ff_rtp_enc_name(s->payload_type), "MP2T")) {
- ff_mpegts_parse_close(s->ts);
- }
+ ff_srtp_free(&s->srtp);
av_free(s);
}
-int ff_parse_fmtp(AVStream *stream, PayloadContext *data, const char *p,
- int (*parse_fmtp)(AVStream *stream,
+int ff_parse_fmtp(AVFormatContext *s,
+ AVStream *stream, PayloadContext *data, const char *p,
+ int (*parse_fmtp)(AVFormatContext *s,
+ AVStream *stream,
PayloadContext *data,
- char *attr, char *value))
+ const char *attr, const char *value))
{
char attr[256];
char *value;
while (ff_rtsp_next_attr_and_value(&p,
attr, sizeof(attr),
value, value_size)) {
- res = parse_fmtp(stream, data, attr, value);
+ res = parse_fmtp(s, stream, data, attr, value);
if (res < 0 && res != AVERROR_PATCHWELCOME) {
av_free(value);
return res;
int ff_rtp_finalize_packet(AVPacket *pkt, AVIOContext **dyn_buf, int stream_idx)
{
+ int ret;
av_init_packet(pkt);
pkt->size = avio_close_dyn_buf(*dyn_buf, &pkt->data);
pkt->stream_index = stream_idx;
- pkt->destruct = av_destruct_packet;
- *dyn_buf = NULL;
+ *dyn_buf = NULL;
+ if ((ret = av_packet_from_data(pkt, pkt->data, pkt->size)) < 0) {
+ av_freep(&pkt->data);
+ return ret;
+ }
return pkt->size;
}