Commit 9891004b authored by Michael Niedermayer's avatar Michael Niedermayer

Merge remote branch 'qatar/master'

* qatar/master:
Partially merged:flvdec: Allow parsing keyframes metadata without seeking in most cases
  Error out if vaapi is not found
  avio: undeprecate av_url_read_fseek/fpause under nicer names
  libvo-*: Don't use deprecated sample format names and enum names
DUPLICATE  flvdec: Fix support for flvtool2 "keyframes based" generated index
DUPLICATE  libavcodec: Use "const enum AVSampleFormat[]" in AVCodec initialization
  Fix the conversion of AV_SAMPLE_FMT_FLT and _DBL to AV_SAMPLE_FMT_S32.
  Convert some undefined 1<<31 shifts into 1U<<31.

Conflicts:
	configure
	libavcodec/libvo-aacenc.c
	libavcodec/libvo-amrwbenc.c
	libavformat/flvdec.c
Marged-by: 's avatarMichael Niedermayer <michaelni@gmx.at>
parents a0f17473 578d6861
......@@ -2872,6 +2872,7 @@ done
check_lib math.h sin -lm
disabled crystalhd || check_lib libcrystalhd/libcrystalhd_if.h DtsCrystalHDVersion -lcrystalhd || disable crystalhd
enabled vaapi && require vaapi va/va.h vaInitialize -lva
check_mathfunc exp2
check_mathfunc exp2f
......
......@@ -969,19 +969,19 @@ static inline float *VMUL4S(float *dst, const float *v, unsigned idx,
union float754 s = { .f = *scale };
union float754 t;
t.i = s.i ^ (sign & 1<<31);
t.i = s.i ^ (sign & 1U<<31);
*dst++ = v[idx & 3] * t.f;
sign <<= nz & 1; nz >>= 1;
t.i = s.i ^ (sign & 1<<31);
t.i = s.i ^ (sign & 1U<<31);
*dst++ = v[idx>>2 & 3] * t.f;
sign <<= nz & 1; nz >>= 1;
t.i = s.i ^ (sign & 1<<31);
t.i = s.i ^ (sign & 1U<<31);
*dst++ = v[idx>>4 & 3] * t.f;
sign <<= nz & 1; nz >>= 1;
t.i = s.i ^ (sign & 1<<31);
t.i = s.i ^ (sign & 1U<<31);
*dst++ = v[idx>>6 & 3] * t.f;
return dst;
......@@ -1174,11 +1174,11 @@ static int decode_spectrum_and_dequant(AACContext *ac, float coef[1024],
b += 4;
n = (1 << b) + SHOW_UBITS(re, gb, b);
LAST_SKIP_BITS(re, gb, b);
*icf++ = cbrt_tab[n] | (bits & 1<<31);
*icf++ = cbrt_tab[n] | (bits & 1U<<31);
bits <<= 1;
} else {
unsigned v = ((const uint32_t*)vq)[cb_idx & 15];
*icf++ = (bits & 1<<31) | v;
*icf++ = (bits & 1U<<31) | v;
bits <<= !!v;
}
cb_idx >>= 4;
......
......@@ -145,8 +145,8 @@ if(ctx->fmt_pair == ofmt + AV_SAMPLE_FMT_NB*ifmt){\
else CONV(AV_SAMPLE_FMT_U8 , uint8_t, AV_SAMPLE_FMT_S32, (*(const int32_t*)pi>>24) + 0x80)
else CONV(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_S32, *(const int32_t*)pi>>16)
else CONV(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_S32, *(const int32_t*)pi)
else CONV(AV_SAMPLE_FMT_FLT, float , AV_SAMPLE_FMT_S32, *(const int32_t*)pi*(1.0 / (1<<31)))
else CONV(AV_SAMPLE_FMT_DBL, double , AV_SAMPLE_FMT_S32, *(const int32_t*)pi*(1.0 / (1<<31)))
else CONV(AV_SAMPLE_FMT_FLT, float , AV_SAMPLE_FMT_S32, *(const int32_t*)pi*(1.0 / (1U<<31)))
else CONV(AV_SAMPLE_FMT_DBL, double , AV_SAMPLE_FMT_S32, *(const int32_t*)pi*(1.0 / (1U<<31)))
else CONV(AV_SAMPLE_FMT_U8 , uint8_t, AV_SAMPLE_FMT_FLT, av_clip_uint8( lrintf(*(const float*)pi * (1<<7)) + 0x80))
else CONV(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, av_clip_int16( lrintf(*(const float*)pi * (1<<15))))
else CONV(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, av_clipl_int32(llrintf(*(const float*)pi * (1U<<31))))
......
......@@ -2610,7 +2610,7 @@ static inline uint32_t clipf_c_one(uint32_t a, uint32_t mini,
{
if(a > mini) return mini;
else if((a^(1<<31)) > maxisign) return maxi;
else if((a^(1U<<31)) > maxisign) return maxi;
else return a;
}
......@@ -2618,7 +2618,7 @@ static void vector_clipf_c_opposite_sign(float *dst, const float *src, float *mi
int i;
uint32_t mini = *(uint32_t*)min;
uint32_t maxi = *(uint32_t*)max;
uint32_t maxisign = maxi ^ (1<<31);
uint32_t maxisign = maxi ^ (1U<<31);
uint32_t *dsti = (uint32_t*)dst;
const uint32_t *srci = (const uint32_t*)src;
for(i=0; i<len; i+=8) {
......
......@@ -180,7 +180,7 @@ static int decode_frame(AVCodecContext *avctx,
return -1;
}
/* bit 31 means same as previous pic */
f->pict_type = (header & (1<<31))? FF_P_TYPE : FF_I_TYPE;
f->pict_type = (header & (1U<<31))? FF_P_TYPE : FF_I_TYPE;
f->key_frame = f->pict_type == FF_I_TYPE;
if (f->pict_type == FF_I_TYPE) {
......@@ -223,7 +223,7 @@ static int decode_frame(AVCodecContext *avctx,
return -1;
}
/* bit 31 means same as previous pic */
f->pict_type = (header & (1<<31))? FF_P_TYPE : FF_I_TYPE;
f->pict_type = (header & (1U<<31))? FF_P_TYPE : FF_I_TYPE;
f->key_frame = f->pict_type == FF_I_TYPE;
if (f->pict_type == FF_I_TYPE) {
......
......@@ -122,7 +122,7 @@ AVCodec ff_libvo_aacenc_encoder = {
aac_encode_frame,
aac_encode_close,
NULL,
.sample_fmts = (const enum SampleFormat[]){SAMPLE_FMT_S16,SAMPLE_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("VisualOn libvo-aacenc AAC"),
.sample_fmts = (const enum AVSampleFormat[]){AV_SAMPLE_FMT_S16,AV_SAMPLE_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("libvo-aacenc AAC"),
};
......@@ -120,8 +120,8 @@ AVCodec ff_libvo_amrwbenc_encoder = {
amr_wb_encode_frame,
amr_wb_encode_close,
NULL,
.sample_fmts = (const enum SampleFormat[]){SAMPLE_FMT_S16,SAMPLE_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("VisualOn libvo-amrwbenc Adaptive Multi-Rate "
.sample_fmts = (const enum AVSampleFormat[]){AV_SAMPLE_FMT_S16,AV_SAMPLE_FMT_NONE},
.long_name = NULL_IF_CONFIG_SMALL("libvo-amrwbenc Adaptive Multi-Rate "
"(AMR) Wide-Band"),
};
......@@ -122,7 +122,7 @@ static void ff_imdct_calc_altivec(FFTContext *s, FFTSample *output, const FFTSam
int n = 1 << s->mdct_bits;
int n4 = n >> 2;
int n16 = n >> 4;
vec_u32 sign = {1<<31,1<<31,1<<31,1<<31};
vec_u32 sign = {1U<<31,1U<<31,1U<<31,1U<<31};
vec_u32 *p0 = (vec_u32*)(output+n4);
vec_u32 *p1 = (vec_u32*)(output+n4*3);
......
......@@ -394,7 +394,7 @@ static void put_float(PutBitContext *pb, float f)
mant = (int)ldexp(frexp(f, &exp), 20);
exp += 788 - 20;
if (mant < 0) {
res |= (1 << 31);
res |= (1U << 31);
mant = -mant;
}
res |= mant | (exp << 21);
......
......@@ -1234,7 +1234,7 @@ static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts, int
/* Try using the protocol's read_seek if available */
if(s->pb) {
int ret = ffio_read_seek(s->pb, stream_index, pts, flags);
int ret = avio_seek_time(s->pb, stream_index, pts, flags);
if(ret >= 0)
asf_reset_header(s);
if (ret != AVERROR(ENOSYS))
......
......@@ -618,4 +618,31 @@ int udp_get_file_handle(URLContext *h);
*/
const char *avio_enum_protocols(void **opaque, int output);
/**
* Pause and resume playing - only meaningful if using a network streaming
* protocol (e.g. MMS).
* @param pause 1 for pause, 0 for resume
*/
int avio_pause(AVIOContext *h, int pause);
/**
* Seek to a given timestamp relative to some component stream.
* Only meaningful if using a network streaming protocol (e.g. MMS.).
* @param stream_index The stream index that the timestamp is relative to.
* If stream_index is (-1) the timestamp should be in AV_TIME_BASE
* units from the beginning of the presentation.
* If a stream_index >= 0 is used and the protocol does not support
* seeking based on component streams, the call will fail with ENOTSUP.
* @param timestamp timestamp in AVStream.time_base units
* or if there is no stream specified then in AV_TIME_BASE units.
* @param flags Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE
* and AVSEEK_FLAG_ANY. The protocol may silently ignore
* AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will
* fail with ENOTSUP if used and not supported.
* @return >= 0 on success
* @see AVInputFormat::read_seek
*/
int64_t avio_seek_time(AVIOContext *h, int stream_index,
int64_t timestamp, int flags);
#endif /* AVFORMAT_AVIO_H */
......@@ -67,32 +67,6 @@ uint64_t ffio_read_varlen(AVIOContext *bc);
/** @warning must be called before any I/O */
int ffio_set_buf_size(AVIOContext *s, int buf_size);
/**
* Pause and resume playing - only meaningful if using a network streaming
* protocol (e.g. MMS).
* @param pause 1 for pause, 0 for resume
*/
int ffio_read_pause(AVIOContext *h, int pause);
/**
* Seek to a given timestamp relative to some component stream.
* Only meaningful if using a network streaming protocol (e.g. MMS.).
* @param stream_index The stream index that the timestamp is relative to.
* If stream_index is (-1) the timestamp should be in AV_TIME_BASE
* units from the beginning of the presentation.
* If a stream_index >= 0 is used and the protocol does not support
* seeking based on component streams, the call will fail with ENOTSUP.
* @param timestamp timestamp in AVStream.time_base units
* or if there is no stream specified then in AV_TIME_BASE units.
* @param flags Optional combination of AVSEEK_FLAG_BACKWARD, AVSEEK_FLAG_BYTE
* and AVSEEK_FLAG_ANY. The protocol may silently ignore
* AVSEEK_FLAG_BACKWARD and AVSEEK_FLAG_ANY, but AVSEEK_FLAG_BYTE will
* fail with ENOTSUP if used and not supported.
* @return >= 0 on success
* @see AVInputFormat::read_seek
*/
int64_t ffio_read_seek (AVIOContext *h, int stream_index,
int64_t timestamp, int flags);
void ffio_init_checksum(AVIOContext *s,
unsigned long (*update_checksum)(unsigned long c, const uint8_t *p, unsigned int len),
unsigned long checksum);
......
......@@ -412,12 +412,12 @@ void put_flush_packet(AVIOContext *s)
}
int av_url_read_fpause(AVIOContext *s, int pause)
{
return ffio_read_pause(s, pause);
return avio_pause(s, pause);
}
int64_t av_url_read_fseek(AVIOContext *s, int stream_index,
int64_t timestamp, int flags)
{
return ffio_read_seek(s, stream_index, timestamp, flags);
return avio_seek_time(s, stream_index, timestamp, flags);
}
void init_checksum(AVIOContext *s,
unsigned long (*update_checksum)(unsigned long c, const uint8_t *p, unsigned int len),
......@@ -1022,14 +1022,14 @@ int url_fget_max_packet_size(AVIOContext *s)
}
#endif
int ffio_read_pause(AVIOContext *s, int pause)
int avio_pause(AVIOContext *s, int pause)
{
if (!s->read_pause)
return AVERROR(ENOSYS);
return s->read_pause(s->opaque, pause);
}
int64_t ffio_read_seek(AVIOContext *s, int stream_index,
int64_t avio_seek_time(AVIOContext *s, int stream_index,
int64_t timestamp, int flags)
{
URLContext *h = s->opaque;
......
......@@ -130,8 +130,8 @@ static int parse_keyframes_index(AVFormatContext *s, AVIOContext *ioc, AVStream
char str_val[256];
int64_t *times = NULL;
int64_t *filepositions = NULL;
int ret = 0;
int64_t initial_pos = url_ftell(ioc);
int ret = AVERROR(ENOSYS);
int64_t initial_pos = avio_tell(ioc);
while (avio_tell(ioc) < max_pos - 2 && amf_get_string(ioc, str_val, sizeof(str_val)) > 0) {
int64_t** current_array;
......@@ -164,6 +164,12 @@ static int parse_keyframes_index(AVFormatContext *s, AVIOContext *ioc, AVStream
goto finish;
current_array[0][i] = av_int2dbl(avio_rb64(ioc));
}
if (times && filepositions) {
// All done, exiting at a position allowing amf_parse_object
// to finish parsing the object
ret = 0;
break;
}
}
if (timeslen == fileposlen) {
......@@ -520,7 +526,7 @@ leave:
static int flv_read_seek(AVFormatContext *s, int stream_index,
int64_t ts, int flags)
{
return ffio_read_seek(s->pb, stream_index, ts, flags);
return avio_seek_time(s->pb, stream_index, ts, flags);
}
#if 0 /* don't know enough to implement this */
......@@ -541,7 +547,7 @@ static int flv_read_seek2(AVFormatContext *s, int stream_index,
ts = av_rescale_rnd(ts, 1000, AV_TIME_BASE,
flags & AVSEEK_FLAG_BACKWARD ? AV_ROUND_DOWN : AV_ROUND_UP);
}
ret = ffio_read_seek(s->pb, stream_index, ts, flags);
ret = avio_seek_time(s->pb, stream_index, ts, flags);
}
if (ret == AVERROR(ENOSYS))
......
......@@ -2600,7 +2600,7 @@ int av_read_play(AVFormatContext *s)
if (s->iformat->read_play)
return s->iformat->read_play(s);
if (s->pb)
return ffio_read_pause(s->pb, 0);
return avio_pause(s->pb, 0);
return AVERROR(ENOSYS);
}
......@@ -2609,7 +2609,7 @@ int av_read_pause(AVFormatContext *s)
if (s->iformat->read_pause)
return s->iformat->read_pause(s);
if (s->pb)
return ffio_read_pause(s->pb, 1);
return avio_pause(s->pb, 1);
return AVERROR(ENOSYS);
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment