Commit 5d6a40bc authored by Michael Niedermayer's avatar Michael Niedermayer

Merge remote-tracking branch 'qatar/master'

* qatar/master:
  rtsp: Don't use av_malloc(0) if there are no streams
  rtsp: Don't use uninitialized data if there are no streams
  vaapi: mpeg2: fix slice_vertical_position calculation.
  hwaccel: mpeg2: decode first field, if requested.
  cosmetics: Fix indentation
  rtsp: Don't expose the MS-RTSP RTX data stream to the caller
Merged-by: 's avatarMichael Niedermayer <michaelni@gmx.at>
parents 4f043c8a 9294f538
...@@ -1635,6 +1635,12 @@ static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size) ...@@ -1635,6 +1635,12 @@ static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size)
return -1; return -1;
} }
if (s->avctx->hwaccel &&
(s->avctx->slice_flags & SLICE_FLAG_ALLOW_FIELD)) {
if (s->avctx->hwaccel->end_frame(s->avctx) < 0)
av_log(avctx, AV_LOG_ERROR, "hardware accelerator failed to decode first field\n");
}
for (i = 0; i < 4; i++) { for (i = 0; i < 4; i++) {
s->current_picture.f.data[i] = s->current_picture_ptr->f.data[i]; s->current_picture.f.data[i] = s->current_picture_ptr->f.data[i];
if (s->picture_structure == PICT_BOTTOM_FIELD) { if (s->picture_structure == PICT_BOTTOM_FIELD) {
......
...@@ -132,7 +132,7 @@ static int vaapi_mpeg2_decode_slice(AVCodecContext *avctx, const uint8_t *buffer ...@@ -132,7 +132,7 @@ static int vaapi_mpeg2_decode_slice(AVCodecContext *avctx, const uint8_t *buffer
return -1; return -1;
slice_param->macroblock_offset = macroblock_offset; slice_param->macroblock_offset = macroblock_offset;
slice_param->slice_horizontal_position = s->mb_x; slice_param->slice_horizontal_position = s->mb_x;
slice_param->slice_vertical_position = s->mb_y; slice_param->slice_vertical_position = s->mb_y >> (s->picture_structure != PICT_FRAME);
slice_param->quantiser_scale_code = quantiser_scale_code; slice_param->quantiser_scale_code = quantiser_scale_code;
slice_param->intra_slice_flag = intra_slice_flag; slice_param->intra_slice_flag = intra_slice_flag;
return 0; return 0;
......
...@@ -385,7 +385,7 @@ RTPDemuxContext *ff_rtp_parse_open(AVFormatContext *s1, AVStream *st, URLContext ...@@ -385,7 +385,7 @@ RTPDemuxContext *ff_rtp_parse_open(AVFormatContext *s1, AVStream *st, URLContext
av_free(s); av_free(s);
return NULL; return NULL;
} }
} else { } else if (st) {
switch(st->codec->codec_id) { switch(st->codec->codec_id) {
case CODEC_ID_MPEG1VIDEO: case CODEC_ID_MPEG1VIDEO:
case CODEC_ID_MPEG2VIDEO: case CODEC_ID_MPEG2VIDEO:
......
...@@ -169,6 +169,9 @@ static int amr_parse_sdp_line(AVFormatContext *s, int st_index, ...@@ -169,6 +169,9 @@ static int amr_parse_sdp_line(AVFormatContext *s, int st_index,
const char *p; const char *p;
int ret; int ret;
if (st_index < 0)
return 0;
/* Parse an fmtp line this one: /* Parse an fmtp line this one:
* a=fmtp:97 octet-align=1; interleaving=0 * a=fmtp:97 octet-align=1; interleaving=0
* That is, a normal fmtp: line followed by semicolon & space * That is, a normal fmtp: line followed by semicolon & space
......
...@@ -130,6 +130,8 @@ int ff_wms_parse_sdp_a_line(AVFormatContext *s, const char *p) ...@@ -130,6 +130,8 @@ int ff_wms_parse_sdp_a_line(AVFormatContext *s, const char *p)
static int asfrtp_parse_sdp_line(AVFormatContext *s, int stream_index, static int asfrtp_parse_sdp_line(AVFormatContext *s, int stream_index,
PayloadContext *asf, const char *line) PayloadContext *asf, const char *line)
{ {
if (stream_index < 0)
return 0;
if (av_strstart(line, "stream:", &line)) { if (av_strstart(line, "stream:", &line)) {
RTSPState *rt = s->priv_data; RTSPState *rt = s->priv_data;
......
...@@ -357,10 +357,15 @@ static void h264_free_context(PayloadContext *data) ...@@ -357,10 +357,15 @@ static void h264_free_context(PayloadContext *data)
static int parse_h264_sdp_line(AVFormatContext *s, int st_index, static int parse_h264_sdp_line(AVFormatContext *s, int st_index,
PayloadContext *h264_data, const char *line) PayloadContext *h264_data, const char *line)
{ {
AVStream *stream = s->streams[st_index]; AVStream *stream;
AVCodecContext *codec = stream->codec; AVCodecContext *codec;
const char *p = line; const char *p = line;
if (st_index < 0)
return 0;
stream = s->streams[st_index];
codec = stream->codec;
assert(h264_data->cookie == MAGIC_COOKIE); assert(h264_data->cookie == MAGIC_COOKIE);
if (av_strstart(p, "framesize:", &p)) { if (av_strstart(p, "framesize:", &p)) {
......
...@@ -168,6 +168,9 @@ static int latm_parse_sdp_line(AVFormatContext *s, int st_index, ...@@ -168,6 +168,9 @@ static int latm_parse_sdp_line(AVFormatContext *s, int st_index,
{ {
const char *p; const char *p;
if (st_index < 0)
return 0;
if (av_strstart(line, "fmtp:", &p)) if (av_strstart(line, "fmtp:", &p))
return ff_parse_fmtp(s->streams[st_index], data, p, parse_fmtp); return ff_parse_fmtp(s->streams[st_index], data, p, parse_fmtp);
......
...@@ -223,6 +223,9 @@ static int parse_sdp_line(AVFormatContext *s, int st_index, ...@@ -223,6 +223,9 @@ static int parse_sdp_line(AVFormatContext *s, int st_index,
{ {
const char *p; const char *p;
if (st_index < 0)
return 0;
if (av_strstart(line, "fmtp:", &p)) if (av_strstart(line, "fmtp:", &p))
return ff_parse_fmtp(s->streams[st_index], data, p, parse_fmtp); return ff_parse_fmtp(s->streams[st_index], data, p, parse_fmtp);
......
...@@ -376,6 +376,9 @@ static int xiph_parse_sdp_line(AVFormatContext *s, int st_index, ...@@ -376,6 +376,9 @@ static int xiph_parse_sdp_line(AVFormatContext *s, int st_index,
{ {
const char *p; const char *p;
if (st_index < 0)
return 0;
if (av_strstart(line, "fmtp:", &p)) { if (av_strstart(line, "fmtp:", &p)) {
return ff_parse_fmtp(s->streams[st_index], data, p, return ff_parse_fmtp(s->streams[st_index], data, p,
xiph_parse_fmtp_pair); xiph_parse_fmtp_pair);
......
...@@ -374,6 +374,10 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1, ...@@ -374,6 +374,10 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1,
if (!strcmp(ff_rtp_enc_name(rtsp_st->sdp_payload_type), "MP2T")) { if (!strcmp(ff_rtp_enc_name(rtsp_st->sdp_payload_type), "MP2T")) {
/* no corresponding stream */ /* no corresponding stream */
} else if (rt->server_type == RTSP_SERVER_WMS &&
codec_type == AVMEDIA_TYPE_DATA) {
/* RTX stream, a stream that carries all the other actual
* audio/video streams. Don't expose this to the callers. */
} else { } else {
st = avformat_new_stream(s, NULL); st = avformat_new_stream(s, NULL);
if (!st) if (!st)
...@@ -430,9 +434,11 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1, ...@@ -430,9 +434,11 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1,
/* NOTE: rtpmap is only supported AFTER the 'm=' tag */ /* NOTE: rtpmap is only supported AFTER the 'm=' tag */
get_word(buf1, sizeof(buf1), &p); get_word(buf1, sizeof(buf1), &p);
payload_type = atoi(buf1); payload_type = atoi(buf1);
st = s->streams[s->nb_streams - 1];
rtsp_st = rt->rtsp_streams[rt->nb_rtsp_streams - 1]; rtsp_st = rt->rtsp_streams[rt->nb_rtsp_streams - 1];
if (rtsp_st->stream_index >= 0) {
st = s->streams[rtsp_st->stream_index];
sdp_parse_rtpmap(s, st, rtsp_st, payload_type, p); sdp_parse_rtpmap(s, st, rtsp_st, payload_type, p);
}
} else if (av_strstart(p, "fmtp:", &p) || } else if (av_strstart(p, "fmtp:", &p) ||
av_strstart(p, "framesize:", &p)) { av_strstart(p, "framesize:", &p)) {
/* NOTE: fmtp is only supported AFTER the 'a=rtpmap:xxx' tag */ /* NOTE: fmtp is only supported AFTER the 'a=rtpmap:xxx' tag */
...@@ -467,14 +473,15 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1, ...@@ -467,14 +473,15 @@ static void sdp_parse_line(AVFormatContext *s, SDPParseState *s1,
if (rt->server_type == RTSP_SERVER_WMS) if (rt->server_type == RTSP_SERVER_WMS)
ff_wms_parse_sdp_a_line(s, p); ff_wms_parse_sdp_a_line(s, p);
if (s->nb_streams > 0) { if (s->nb_streams > 0) {
rtsp_st = rt->rtsp_streams[rt->nb_rtsp_streams - 1];
if (rt->server_type == RTSP_SERVER_REAL) if (rt->server_type == RTSP_SERVER_REAL)
ff_real_parse_sdp_a_line(s, s->nb_streams - 1, p); ff_real_parse_sdp_a_line(s, rtsp_st->stream_index, p);
rtsp_st = rt->rtsp_streams[rt->nb_rtsp_streams - 1];
if (rtsp_st->dynamic_handler && if (rtsp_st->dynamic_handler &&
rtsp_st->dynamic_handler->parse_sdp_a_line) rtsp_st->dynamic_handler->parse_sdp_a_line)
rtsp_st->dynamic_handler->parse_sdp_a_line(s, rtsp_st->dynamic_handler->parse_sdp_a_line(s,
s->nb_streams - 1, rtsp_st->stream_index,
rtsp_st->dynamic_protocol_context, buf); rtsp_st->dynamic_protocol_context, buf);
} }
} }
...@@ -1245,8 +1252,9 @@ int ff_rtsp_make_setup_request(AVFormatContext *s, const char *host, int port, ...@@ -1245,8 +1252,9 @@ int ff_rtsp_make_setup_request(AVFormatContext *s, const char *host, int port,
* UDP. When trying to set it up for TCP streams, the server * UDP. When trying to set it up for TCP streams, the server
* will return an error. Therefore, we skip those streams. */ * will return an error. Therefore, we skip those streams. */
if (rt->server_type == RTSP_SERVER_WMS && if (rt->server_type == RTSP_SERVER_WMS &&
(rtsp_st->stream_index < 0 ||
s->streams[rtsp_st->stream_index]->codec->codec_type == s->streams[rtsp_st->stream_index]->codec->codec_type ==
AVMEDIA_TYPE_DATA) AVMEDIA_TYPE_DATA))
continue; continue;
snprintf(transport, sizeof(transport) - 1, snprintf(transport, sizeof(transport) - 1,
"%s/TCP;", trans_pref); "%s/TCP;", trans_pref);
...@@ -1378,7 +1386,7 @@ int ff_rtsp_make_setup_request(AVFormatContext *s, const char *host, int port, ...@@ -1378,7 +1386,7 @@ int ff_rtsp_make_setup_request(AVFormatContext *s, const char *host, int port,
goto fail; goto fail;
} }
if (reply->timeout > 0) if (rt->nb_rtsp_streams && reply->timeout > 0)
rt->timeout = reply->timeout; rt->timeout = reply->timeout;
if (rt->server_type == RTSP_SERVER_REAL) if (rt->server_type == RTSP_SERVER_REAL)
......
...@@ -159,8 +159,9 @@ static int rtsp_read_header(AVFormatContext *s) ...@@ -159,8 +159,9 @@ static int rtsp_read_header(AVFormatContext *s)
if (ret) if (ret)
return ret; return ret;
rt->real_setup_cache = av_mallocz(2 * s->nb_streams * sizeof(*rt->real_setup_cache)); rt->real_setup_cache = !s->nb_streams ? NULL :
if (!rt->real_setup_cache) av_mallocz(2 * s->nb_streams * sizeof(*rt->real_setup_cache));
if (!rt->real_setup_cache && s->nb_streams)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
rt->real_setup = rt->real_setup_cache + s->nb_streams; rt->real_setup = rt->real_setup_cache + s->nb_streams;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment