Commit 3e9245a9 authored by Michael Niedermayer's avatar Michael Niedermayer

store searched distance in index, so we dont waste time searching for...

store searched distance in index, so we dont waste time searching for keyframes where we already searched
asf seek fixes & use keyframe pos/timestamp cache

Originally committed as revision 2709 to svn://svn.ffmpeg.org/ffmpeg/trunk
parent fb204d4b
...@@ -1355,11 +1355,17 @@ static int64_t asf_read_pts(AVFormatContext *s, int64_t *ppos, int stream_index) ...@@ -1355,11 +1355,17 @@ static int64_t asf_read_pts(AVFormatContext *s, int64_t *ppos, int stream_index)
ASFStream *asf_st; ASFStream *asf_st;
int64_t pts; int64_t pts;
int64_t pos= *ppos; int64_t pos= *ppos;
int i;
int64_t start_pos[s->nb_streams];
for(i=0; i<s->nb_streams; i++){
start_pos[i]= pos;
}
//printf("asf_read_pts\n"); //printf("asf_read_pts\n");
url_fseek(&s->pb, pos*asf->packet_size + s->data_offset, SEEK_SET); url_fseek(&s->pb, pos*asf->packet_size + s->data_offset, SEEK_SET);
asf_reset_header(s); asf_reset_header(s);
do{ for(;;){
if (av_read_frame(s, pkt) < 0){ if (av_read_frame(s, pkt) < 0){
printf("seek failed\n"); printf("seek failed\n");
return AV_NOPTS_VALUE; return AV_NOPTS_VALUE;
...@@ -1367,11 +1373,23 @@ static int64_t asf_read_pts(AVFormatContext *s, int64_t *ppos, int stream_index) ...@@ -1367,11 +1373,23 @@ static int64_t asf_read_pts(AVFormatContext *s, int64_t *ppos, int stream_index)
pts= pkt->pts; pts= pkt->pts;
av_free_packet(pkt); av_free_packet(pkt);
}while(pkt->stream_index != stream_index || !(pkt->flags&PKT_FLAG_KEY)); if(pkt->flags&PKT_FLAG_KEY){
asf_st= s->streams[stream_index]->priv_data; i= pkt->stream_index;
asf_st= s->streams[i]->priv_data;
assert((asf_st->packet_pos - s->data_offset) % asf->packet_size == 0); assert((asf_st->packet_pos - s->data_offset) % asf->packet_size == 0);
*ppos= (asf_st->packet_pos - s->data_offset) / asf->packet_size; pos= (asf_st->packet_pos - s->data_offset) / asf->packet_size;
av_add_index_entry(s->streams[i], pos, pts, pos - start_pos[i] + 1, AVINDEX_KEYFRAME);
start_pos[i]= pos + 1;
if(pkt->stream_index == stream_index)
break;
}
}
*ppos= pos;
//printf("found keyframe at %Ld stream %d stamp:%Ld\n", *ppos, stream_index, pts); //printf("found keyframe at %Ld stream %d stamp:%Ld\n", *ppos, stream_index, pts);
return pts; return pts;
...@@ -1380,6 +1398,7 @@ static int64_t asf_read_pts(AVFormatContext *s, int64_t *ppos, int stream_index) ...@@ -1380,6 +1398,7 @@ static int64_t asf_read_pts(AVFormatContext *s, int64_t *ppos, int stream_index)
static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts) static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts)
{ {
ASFContext *asf = s->priv_data; ASFContext *asf = s->priv_data;
AVStream *st;
int64_t pos; int64_t pos;
int64_t pos_min, pos_max, pts_min, pts_max, cur_pts, pos_limit; int64_t pos_min, pos_max, pts_min, pts_max, cur_pts, pos_limit;
...@@ -1389,24 +1408,60 @@ static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts) ...@@ -1389,24 +1408,60 @@ static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts)
if (asf->packet_size <= 0) if (asf->packet_size <= 0)
return -1; return -1;
pts_max=
pts_min= AV_NOPTS_VALUE;
pos_max= pos_limit= -1; // gcc thinks its uninitalized
st= s->streams[stream_index];
if(st->index_entries){
AVIndexEntry *e;
int index;
index= av_index_search_timestamp(st, pts);
e= &st->index_entries[index];
if(e->timestamp <= pts){
pos_min= e->pos;
pts_min= e->timestamp;
#ifdef DEBUG_SEEK
printf("unsing cached pos_min=0x%llx dts_min=%0.3f\n",
pos_min,pts_min / 90000.0);
#endif
}else{
assert(index==0);
}
index++;
if(index < st->nb_index_entries){
e= &st->index_entries[index];
assert(e->timestamp >= pts);
pos_max= e->pos;
pts_max= e->timestamp;
pos_limit= pos_max - e->min_distance;
#ifdef DEBUG_SEEK
printf("unsing cached pos_max=0x%llx dts_max=%0.3f\n",
pos_max,pts_max / 90000.0);
#endif
}
}
if(pts_min == AV_NOPTS_VALUE){
pos_min = 0; pos_min = 0;
pts_min = asf_read_pts(s, &pos_min, stream_index); pts_min = asf_read_pts(s, &pos_min, stream_index);
if (pts_min == AV_NOPTS_VALUE) return -1; if (pts_min == AV_NOPTS_VALUE) return -1;
}
if(pts_max == AV_NOPTS_VALUE){
pos_max = (url_filesize(url_fileno(&s->pb)) - 1 - s->data_offset) / asf->packet_size; //FIXME wrong pos_max = (url_filesize(url_fileno(&s->pb)) - 1 - s->data_offset) / asf->packet_size; //FIXME wrong
pts_max = pts_min + s->duration; pts_max = s->duration; //FIXME wrong
pos_limit= pos_max; pos_limit= pos_max;
}
while (pos_min < pos_limit) { while (pos_min < pos_limit) {
int64_t start_pos; int64_t start_pos;
assert(pos_limit <= pos_max); assert(pos_limit <= pos_max);
// interpolate position (better than dichotomy) // interpolate position (better than dichotomy)
pos = (int64_t)((double)(pos_limit - pos_min) * pos = (int64_t)((double)(pos_limit - pos_min) *
(double)(pts - pts_min) / (double)(pts - pts_min) /
(double)(pts_max - pts_min)) + pos_min; (double)(pts_max - pts_min)) + pos_min;
pos/= asf->packet_size;
if(pos <= pos_min) if(pos <= pos_min)
pos= pos_min + 1; pos= pos_min + 1;
else if(pos > pos_limit) else if(pos > pos_limit)
...@@ -1415,8 +1470,11 @@ static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts) ...@@ -1415,8 +1470,11 @@ static int asf_read_seek(AVFormatContext *s, int stream_index, int64_t pts)
// read the next timestamp // read the next timestamp
cur_pts = asf_read_pts(s, &pos, stream_index); cur_pts = asf_read_pts(s, &pos, stream_index);
#ifdef DEBUG_SEEK
printf("%Ld %Ld %Ld / %Ld %Ld %Ld target:%Ld limit:%Ld start:%Ld\n", pos_min, pos, pos_max, pts_min, cur_pts, pts_max, pts, pos_limit, start_pos);
#endif
if (cur_pts == AV_NOPTS_VALUE) { if (cur_pts == AV_NOPTS_VALUE) {
printf("NOPTS\n");
return -1; return -1;
} else if (pts < cur_pts) { } else if (pts < cur_pts) {
pos_limit = start_pos - 1; pos_limit = start_pos - 1;
......
...@@ -196,9 +196,8 @@ typedef struct AVIndexEntry { ...@@ -196,9 +196,8 @@ typedef struct AVIndexEntry {
int64_t timestamp; int64_t timestamp;
#define AVINDEX_KEYFRAME 0x0001 #define AVINDEX_KEYFRAME 0x0001
/* the following 2 flags indicate that the next/prev keyframe is known, and scaning for it isnt needed */ /* the following 2 flags indicate that the next/prev keyframe is known, and scaning for it isnt needed */
#define AVINDEX_NEXT_KNOWN 0x0002
#define AVINDEX_PREV_KNOWN 0x0004
int flags; int flags;
int min_distance; /* min distance between this and the previous keyframe, used to avoid unneeded searching */
} AVIndexEntry; } AVIndexEntry;
typedef struct AVStream { typedef struct AVStream {
...@@ -549,8 +548,8 @@ void av_set_pts_info(AVFormatContext *s, int pts_wrap_bits, ...@@ -549,8 +548,8 @@ void av_set_pts_info(AVFormatContext *s, int pts_wrap_bits,
int av_find_default_stream_index(AVFormatContext *s); int av_find_default_stream_index(AVFormatContext *s);
int av_index_search_timestamp(AVStream *st, int timestamp); int av_index_search_timestamp(AVStream *st, int timestamp);
void av_add_index_entry(AVStream *st, int av_add_index_entry(AVStream *st,
int64_t pos, int64_t timestamp, int flags); int64_t pos, int64_t timestamp, int distance, int flags);
/* media file output */ /* media file output */
int av_set_parameters(AVFormatContext *s, AVFormatParameters *ap); int av_set_parameters(AVFormatContext *s, AVFormatParameters *ap);
......
...@@ -909,7 +909,7 @@ static int mpegps_read_pes_header(AVFormatContext *s, ...@@ -909,7 +909,7 @@ static int mpegps_read_pes_header(AVFormatContext *s,
int i; int i;
for(i=0; i<s->nb_streams; i++){ for(i=0; i<s->nb_streams; i++){
if(startcode == s->streams[i]->id) { if(startcode == s->streams[i]->id) {
av_add_index_entry(s->streams[i], *ppos, dts, 0 /* FIXME keyframe? */); av_add_index_entry(s->streams[i], *ppos, dts, 0, 0 /* FIXME keyframe? */);
} }
} }
} }
......
...@@ -861,8 +861,8 @@ static void av_read_frame_flush(AVFormatContext *s) ...@@ -861,8 +861,8 @@ static void av_read_frame_flush(AVFormatContext *s)
} }
/* add a index entry into a sorted list updateing if it is already there */ /* add a index entry into a sorted list updateing if it is already there */
void av_add_index_entry(AVStream *st, int av_add_index_entry(AVStream *st,
int64_t pos, int64_t timestamp, int flags) int64_t pos, int64_t timestamp, int distance, int flags)
{ {
AVIndexEntry *entries, *ie; AVIndexEntry *entries, *ie;
int index; int index;
...@@ -890,12 +890,17 @@ void av_add_index_entry(AVStream *st, ...@@ -890,12 +890,17 @@ void av_add_index_entry(AVStream *st,
} }
st->nb_index_entries++; st->nb_index_entries++;
} }
}else }else{
ie= &entries[st->nb_index_entries++]; index= st->nb_index_entries++;
ie= &entries[index];
}
ie->pos = pos; ie->pos = pos;
ie->timestamp = timestamp; ie->timestamp = timestamp;
ie->min_distance= distance;
ie->flags = flags; ie->flags = flags;
return index;
} }
/* build an index for raw streams using a parser */ /* build an index for raw streams using a parser */
...@@ -916,7 +921,7 @@ static void av_build_index_raw(AVFormatContext *s) ...@@ -916,7 +921,7 @@ static void av_build_index_raw(AVFormatContext *s)
if (pkt->stream_index == 0 && st->parser && if (pkt->stream_index == 0 && st->parser &&
(pkt->flags & PKT_FLAG_KEY)) { (pkt->flags & PKT_FLAG_KEY)) {
av_add_index_entry(st, st->parser->frame_offset, pkt->dts, av_add_index_entry(st, st->parser->frame_offset, pkt->dts,
AVINDEX_KEYFRAME); 0, AVINDEX_KEYFRAME);
} }
av_free_packet(pkt); av_free_packet(pkt);
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment