Commit 54974c62 authored by Anton Khirnov's avatar Anton Khirnov

error_resilience: decouple ER from MpegEncContext

parent d9ebb00d
This diff is collapsed.
/*
*
* This file is part of Libav.
*
* Libav is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* Libav is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Libav; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_ERROR_RESILIENCE_H
#define AVCODEC_ERROR_RESILIENCE_H
///< current MB is the first after a resync marker
#define VP_START 1
#define ER_AC_ERROR 2
#define ER_DC_ERROR 4
#define ER_MV_ERROR 8
#define ER_AC_END 16
#define ER_DC_END 32
#define ER_MV_END 64
#define ER_MB_ERROR (ER_AC_ERROR|ER_DC_ERROR|ER_MV_ERROR)
#define ER_MB_END (ER_AC_END|ER_DC_END|ER_MV_END)
typedef struct ERContext {
AVCodecContext *avctx;
DSPContext *dsp;
int *mb_index2xy;
int mb_num;
int mb_width, mb_height;
int mb_stride;
int b8_stride;
int error_count, error_occurred;
uint8_t *error_status_table;
uint8_t *er_temp_buffer;
int16_t *dc_val[3];
uint8_t *mbskip_table;
uint8_t *mbintra_table;
int mv[2][4][2];
struct Picture *cur_pic;
struct Picture *last_pic;
struct Picture *next_pic;
uint16_t pp_time;
uint16_t pb_time;
int quarter_sample;
int partitioned_frame;
int ref_count;
void (*decode_mb)(void *opaque, int ref, int mv_dir, int mv_type,
int (*mv)[2][4][2],
int mb_x, int mb_y, int mb_intra, int mb_skipped);
void *opaque;
} ERContext;
void ff_er_frame_start(ERContext *s);
void ff_er_frame_end(ERContext *s);
void ff_er_add_slice(ERContext *s, int startx, int starty, int endx, int endy,
int status);
#endif /* AVCODEC_ERROR_RESILIENCE_H */
......@@ -609,7 +609,7 @@ retry:
if(ff_MPV_frame_start(s, avctx) < 0)
return -1;
ff_er_frame_start(s);
ff_mpeg_er_frame_start(s);
/* decode each macroblock */
s->mb_x=0;
......
......@@ -188,7 +188,7 @@ static int decode_slice(MpegEncContext *s){
/* per-row end of slice checks */
if(s->msmpeg4_version){
if(s->resync_mb_y + s->slice_height == s->mb_y){
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_END);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_END);
return 0;
}
......@@ -229,7 +229,7 @@ static int decode_slice(MpegEncContext *s){
if(s->loop_filter)
ff_h263_loop_filter(s);
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_MB_END&part_mask);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_MB_END&part_mask);
s->padding_bug_score--;
......@@ -242,11 +242,11 @@ static int decode_slice(MpegEncContext *s){
return 0;
}else if(ret==SLICE_NOEND){
av_log(s->avctx, AV_LOG_ERROR, "Slice mismatch at MB: %d\n", xy);
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x+1, s->mb_y, ER_MB_END&part_mask);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x+1, s->mb_y, ER_MB_END&part_mask);
return -1;
}
av_log(s->avctx, AV_LOG_ERROR, "Error at MB: %d\n", xy);
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_MB_ERROR&part_mask);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_MB_ERROR&part_mask);
return -1;
}
......@@ -325,7 +325,7 @@ static int decode_slice(MpegEncContext *s){
else if(left<0){
av_log(s->avctx, AV_LOG_ERROR, "overreading %d bits\n", -left);
}else
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_END);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_END);
return 0;
}
......@@ -334,7 +334,7 @@ static int decode_slice(MpegEncContext *s){
get_bits_left(&s->gb),
show_bits(&s->gb, 24), s->padding_bug_score);
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_MB_END&part_mask);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_MB_END&part_mask);
return -1;
}
......@@ -638,7 +638,7 @@ retry:
return -1;
}
ff_er_frame_start(s);
ff_mpeg_er_frame_start(s);
//the second part of the wmv2 header contains the MB skip bits which are stored in current_picture->mb_type
//which is not available before ff_MPV_frame_start()
......@@ -662,7 +662,7 @@ retry:
if(ff_h263_resync(s)<0)
break;
if (prev_y * s->mb_width + prev_x < s->mb_y * s->mb_width + s->mb_x)
s->error_occurred = 1;
s->er.error_occurred = 1;
}
if(s->msmpeg4_version<4 && s->h263_pred)
......@@ -673,7 +673,7 @@ retry:
if (s->msmpeg4_version && s->msmpeg4_version<4 && s->pict_type==AV_PICTURE_TYPE_I)
if(!CONFIG_MSMPEG4_DECODER || ff_msmpeg4_decode_ext_header(s, buf_size) < 0){
s->error_status_table[s->mb_num-1]= ER_MB_ERROR;
s->er.error_status_table[s->mb_num - 1] = ER_MB_ERROR;
}
assert(s->bitstream_buffer_size==0);
......@@ -710,7 +710,7 @@ frame_end:
}
intrax8_decoded:
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
if (avctx->hwaccel) {
if (avctx->hwaccel->end_frame(avctx) < 0)
......
......@@ -78,6 +78,33 @@ static const enum AVPixelFormat hwaccel_pixfmt_list_h264_jpeg_420[] = {
AV_PIX_FMT_NONE
};
static void h264_er_decode_mb(void *opaque, int ref, int mv_dir, int mv_type,
int (*mv)[2][4][2],
int mb_x, int mb_y, int mb_intra, int mb_skipped)
{
H264Context *h = opaque;
MpegEncContext *s = &h->s;
s->mb_x = mb_x;
s->mb_y = mb_y;
h->mb_xy = s->mb_x + s->mb_y * s->mb_stride;
memset(h->non_zero_count_cache, 0, sizeof(h->non_zero_count_cache));
assert(ref >= 0);
/* FIXME: It is possible albeit uncommon that slice references
* differ between slices. We take the easy approach and ignore
* it for now. If this turns out to have any relevance in
* practice then correct remapping should be added. */
if (ref >= h->ref_count[0])
ref = 0;
fill_rectangle(&s->current_picture.f.ref_index[0][4 * h->mb_xy],
2, 2, 2, ref, 1);
fill_rectangle(&h->ref_cache[0][scan8[0]], 4, 4, 8, ref, 1);
fill_rectangle(h->mv_cache[0][scan8[0]], 4, 4, 8,
pack16to32(s->mv[0][0][0], s->mv[0][0][1]), 4);
assert(!FRAME_MBAFF);
ff_h264_hl_decode_mb(h);
}
/**
* Check if the top & left blocks are available if needed and
* change the dc mode so it only uses the available blocks.
......@@ -960,6 +987,9 @@ static int context_init(H264Context *h)
h->ref_cache[1][scan8[7] + 1] =
h->ref_cache[1][scan8[13] + 1] = PART_NOT_AVAILABLE;
h->s.er.decode_mb = h264_er_decode_mb;
h->s.er.opaque = h;
return 0;
fail:
......@@ -1299,7 +1329,7 @@ int ff_h264_frame_start(H264Context *h)
if (ff_MPV_frame_start(s, s->avctx) < 0)
return -1;
ff_er_frame_start(s);
ff_mpeg_er_frame_start(s);
/*
* ff_MPV_frame_start uses pict_type to derive key_frame.
* This is incorrect for H.264; IDR markings must be used.
......@@ -2340,7 +2370,7 @@ static int field_end(H264Context *h, int in_setup)
* causes problems for the first MB line, too.
*/
if (!FIELD_PICTURE)
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
ff_MPV_frame_end(s);
......@@ -3068,11 +3098,13 @@ static int decode_slice_header(H264Context *h, H264Context *h0)
if (h->slice_type_nos != AV_PICTURE_TYPE_I) {
s->last_picture_ptr = &h->ref_list[0][0];
s->last_picture_ptr->owner2 = s;
s->er.last_pic = s->last_picture_ptr;
ff_copy_picture(&s->last_picture, s->last_picture_ptr);
}
if (h->slice_type_nos == AV_PICTURE_TYPE_B) {
s->next_picture_ptr = &h->ref_list[1][0];
s->next_picture_ptr->owner2 = s;
s->er.next_pic = s->next_picture_ptr;
ff_copy_picture(&s->next_picture, s->next_picture_ptr);
}
......@@ -3639,6 +3671,15 @@ static void decode_finish_row(H264Context *h)
s->picture_structure == PICT_BOTTOM_FIELD);
}
static void er_add_slice(H264Context *h, int startx, int starty,
int endx, int endy, int status)
{
ERContext *er = &h->s.er;
er->ref_count = h->ref_count[0];
ff_er_add_slice(er, startx, starty, endx, endy, status);
}
static int decode_slice(struct AVCodecContext *avctx, void *arg)
{
H264Context *h = *(void **)arg;
......@@ -3686,7 +3727,7 @@ static int decode_slice(struct AVCodecContext *avctx, void *arg)
if ((s->workaround_bugs & FF_BUG_TRUNCATED) &&
h->cabac.bytestream > h->cabac.bytestream_end + 2) {
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x - 1,
er_add_slice(h, s->resync_mb_x, s->resync_mb_y, s->mb_x - 1,
s->mb_y, ER_MB_END);
if (s->mb_x >= lf_x_start)
loop_filter(h, lf_x_start, s->mb_x + 1);
......@@ -3697,7 +3738,7 @@ static int decode_slice(struct AVCodecContext *avctx, void *arg)
"error while decoding MB %d %d, bytestream (%td)\n",
s->mb_x, s->mb_y,
h->cabac.bytestream_end - h->cabac.bytestream);
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x,
er_add_slice(h, s->resync_mb_x, s->resync_mb_y, s->mb_x,
s->mb_y, ER_MB_ERROR);
return -1;
}
......@@ -3717,7 +3758,7 @@ static int decode_slice(struct AVCodecContext *avctx, void *arg)
if (eos || s->mb_y >= s->mb_height) {
tprintf(s->avctx, "slice end %d %d\n",
get_bits_count(&s->gb), s->gb.size_in_bits);
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x - 1,
er_add_slice(h, s->resync_mb_x, s->resync_mb_y, s->mb_x - 1,
s->mb_y, ER_MB_END);
if (s->mb_x > lf_x_start)
loop_filter(h, lf_x_start, s->mb_x);
......@@ -3744,7 +3785,7 @@ static int decode_slice(struct AVCodecContext *avctx, void *arg)
if (ret < 0) {
av_log(h->s.avctx, AV_LOG_ERROR,
"error while decoding MB %d %d\n", s->mb_x, s->mb_y);
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x,
er_add_slice(h, s->resync_mb_x, s->resync_mb_y, s->mb_x,
s->mb_y, ER_MB_ERROR);
return -1;
}
......@@ -3764,13 +3805,13 @@ static int decode_slice(struct AVCodecContext *avctx, void *arg)
get_bits_count(&s->gb), s->gb.size_in_bits);
if (get_bits_left(&s->gb) == 0) {
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y,
er_add_slice(h, s->resync_mb_x, s->resync_mb_y,
s->mb_x - 1, s->mb_y,
ER_MB_END);
return 0;
} else {
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y,
er_add_slice(h, s->resync_mb_x, s->resync_mb_y,
s->mb_x - 1, s->mb_y,
ER_MB_END);
......@@ -3783,7 +3824,7 @@ static int decode_slice(struct AVCodecContext *avctx, void *arg)
tprintf(s->avctx, "slice end %d %d\n",
get_bits_count(&s->gb), s->gb.size_in_bits);
if (get_bits_left(&s->gb) == 0) {
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y,
er_add_slice(h, s->resync_mb_x, s->resync_mb_y,
s->mb_x - 1, s->mb_y,
ER_MB_END);
if (s->mb_x > lf_x_start)
......@@ -3791,7 +3832,7 @@ static int decode_slice(struct AVCodecContext *avctx, void *arg)
return 0;
} else {
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x,
er_add_slice(h, s->resync_mb_x, s->resync_mb_y, s->mb_x,
s->mb_y, ER_MB_ERROR);
return -1;
......@@ -3823,7 +3864,7 @@ static int execute_decode_slices(H264Context *h, int context_count)
for (i = 1; i < context_count; i++) {
hx = h->thread_context[i];
hx->s.err_recognition = avctx->err_recognition;
hx->s.error_count = 0;
hx->s.er.error_count = 0;
}
avctx->execute(avctx, decode_slice, h->thread_context,
......@@ -3836,7 +3877,7 @@ static int execute_decode_slices(H264Context *h, int context_count)
s->droppable = hx->s.droppable;
s->picture_structure = hx->s.picture_structure;
for (i = 1; i < context_count; i++)
h->s.error_count += h->thread_context[i]->s.error_count;
h->s.er.error_count += h->thread_context[i]->s.er.error_count;
}
return 0;
......
......@@ -784,7 +784,7 @@ int ff_intrax8_decode_picture(IntraX8Context * const w, int dquant, int quant_of
}
error:
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y,
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y,
(s->mb_x>>1)-1, (s->mb_y>>1)-1,
ER_MB_END );
return 0;
......
......@@ -1565,7 +1565,7 @@ static int mpeg_field_start(MpegEncContext *s, const uint8_t *buf, int buf_size)
if (ff_MPV_frame_start(s, avctx) < 0)
return -1;
ff_er_frame_start(s);
ff_mpeg_er_frame_start(s);
/* first check if we must repeat the frame */
s->current_picture_ptr->f.repeat_pict = 0;
......@@ -1856,7 +1856,7 @@ static int slice_decode_thread(AVCodecContext *c, void *arg)
int mb_y = s->start_mb_y;
const int field_pic = s->picture_structure != PICT_FRAME;
s->error_count = (3 * (s->end_mb_y - s->start_mb_y) * s->mb_width) >> field_pic;
s->er.error_count = (3 * (s->end_mb_y - s->start_mb_y) * s->mb_width) >> field_pic;
for (;;) {
uint32_t start_code;
......@@ -1866,14 +1866,14 @@ static int slice_decode_thread(AVCodecContext *c, void *arg)
emms_c();
av_dlog(c, "ret:%d resync:%d/%d mb:%d/%d ts:%d/%d ec:%d\n",
ret, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y,
s->start_mb_y, s->end_mb_y, s->error_count);
s->start_mb_y, s->end_mb_y, s->er.error_count);
if (ret < 0) {
if (c->err_recognition & AV_EF_EXPLODE)
return ret;
if (s->resync_mb_x >= 0 && s->resync_mb_y >= 0)
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_AC_ERROR | ER_DC_ERROR | ER_MV_ERROR);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_AC_ERROR | ER_DC_ERROR | ER_MV_ERROR);
} else {
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_AC_END | ER_DC_END | ER_MV_END);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_AC_END | ER_DC_END | ER_MV_END);
}
if (s->mb_y == s->end_mb_y)
......@@ -1915,7 +1915,7 @@ static int slice_end(AVCodecContext *avctx, AVFrame *pict)
s->current_picture_ptr->f.qscale_type = FF_QSCALE_TYPE_MPEG2;
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
ff_MPV_frame_end(s);
......@@ -2210,7 +2210,7 @@ static int decode_chunks(AVCodecContext *avctx,
avctx->execute(avctx, slice_decode_thread, &s2->thread_context[0], NULL, s->slice_count, sizeof(void*));
for (i = 0; i < s->slice_count; i++)
s2->error_count += s2->thread_context[i]->error_count;
s2->er.error_count += s2->thread_context[i]->er.error_count;
}
if (CONFIG_MPEG_VDPAU_DECODER && avctx->codec->capabilities & CODEC_CAP_HWACCEL_VDPAU)
......@@ -2252,7 +2252,7 @@ static int decode_chunks(AVCodecContext *avctx,
s2->thread_context, NULL,
s->slice_count, sizeof(void*));
for (i = 0; i < s->slice_count; i++)
s2->error_count += s2->thread_context[i]->error_count;
s2->er.error_count += s2->thread_context[i]->er.error_count;
s->slice_count = 0;
}
if (last_code == 0 || last_code == SLICE_MIN_START_CODE) {
......@@ -2411,9 +2411,9 @@ static int decode_chunks(AVCodecContext *avctx,
if (avctx->err_recognition & AV_EF_EXPLODE)
return ret;
if (s2->resync_mb_x >= 0 && s2->resync_mb_y >= 0)
ff_er_add_slice(s2, s2->resync_mb_x, s2->resync_mb_y, s2->mb_x, s2->mb_y, ER_AC_ERROR | ER_DC_ERROR | ER_MV_ERROR);
ff_er_add_slice(&s2->er, s2->resync_mb_x, s2->resync_mb_y, s2->mb_x, s2->mb_y, ER_AC_ERROR | ER_DC_ERROR | ER_MV_ERROR);
} else {
ff_er_add_slice(s2, s2->resync_mb_x, s2->resync_mb_y, s2->mb_x-1, s2->mb_y, ER_AC_END | ER_DC_END | ER_MV_END);
ff_er_add_slice(&s2->er, s2->resync_mb_x, s2->resync_mb_y, s2->mb_x-1, s2->mb_y, ER_AC_END | ER_DC_END | ER_MV_END);
}
}
}
......
......@@ -796,13 +796,13 @@ int ff_mpeg4_decode_partitions(MpegEncContext *s)
mb_num= mpeg4_decode_partition_a(s);
if(mb_num<0){
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, part_a_error);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, part_a_error);
return -1;
}
if(s->resync_mb_x + s->resync_mb_y*s->mb_width + mb_num > s->mb_num){
av_log(s->avctx, AV_LOG_ERROR, "slice below monitor ...\n");
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, part_a_error);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, part_a_error);
return -1;
}
......@@ -823,15 +823,15 @@ int ff_mpeg4_decode_partitions(MpegEncContext *s)
return -1;
}
}
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, part_a_end);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, part_a_end);
if( mpeg4_decode_partition_b(s, mb_num) < 0){
if(s->pict_type==AV_PICTURE_TYPE_P)
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_DC_ERROR);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x, s->mb_y, ER_DC_ERROR);
return -1;
}else{
if(s->pict_type==AV_PICTURE_TYPE_P)
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_DC_END);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_DC_END);
}
return 0;
......
......@@ -147,6 +147,33 @@ const enum AVPixelFormat ff_hwaccel_pixfmt_list_420[] = {
AV_PIX_FMT_NONE
};
static void mpeg_er_decode_mb(void *opaque, int ref, int mv_dir, int mv_type,
int (*mv)[2][4][2],
int mb_x, int mb_y, int mb_intra, int mb_skipped)
{
MpegEncContext *s = opaque;
s->mv_dir = mv_dir;
s->mv_type = mv_type;
s->mb_intra = mb_intra;
s->mb_skipped = mb_skipped;
s->mb_x = mb_x;
s->mb_y = mb_y;
memcpy(s->mv, mv, sizeof(*mv));
ff_init_block_index(s);
ff_update_block_index(s);
s->dsp.clear_blocks(s->block[0]);
s->dest[0] = s->current_picture.f.data[0] + (s->mb_y * 16 * s->linesize) + s->mb_x * 16;
s->dest[1] = s->current_picture.f.data[1] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
s->dest[2] = s->current_picture.f.data[2] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
assert(ref == 0);
ff_MPV_decode_mb(s, s->block);
}
const uint8_t *avpriv_mpv_find_start_code(const uint8_t *restrict p,
const uint8_t *end,
uint32_t * restrict state)
......@@ -723,6 +750,43 @@ void ff_MPV_decode_defaults(MpegEncContext *s)
ff_MPV_common_defaults(s);
}
static int init_er(MpegEncContext *s)
{
ERContext *er = &s->er;
int mb_array_size = s->mb_height * s->mb_stride;
int i;
er->avctx = s->avctx;
er->dsp = &s->dsp;
er->mb_index2xy = s->mb_index2xy;
er->mb_num = s->mb_num;
er->mb_width = s->mb_width;
er->mb_height = s->mb_height;
er->mb_stride = s->mb_stride;
er->b8_stride = s->b8_stride;
er->er_temp_buffer = av_malloc(s->mb_height * s->mb_stride);
er->error_status_table = av_mallocz(mb_array_size);
if (!er->er_temp_buffer || !er->error_status_table)
goto fail;
er->mbskip_table = s->mbskip_table;
er->mbintra_table = s->mbintra_table;
for (i = 0; i < FF_ARRAY_ELEMS(s->dc_val); i++)
er->dc_val[i] = s->dc_val[i];
er->decode_mb = mpeg_er_decode_mb;
er->opaque = s;
return 0;
fail:
av_freep(&er->er_temp_buffer);
av_freep(&er->error_status_table);
return AVERROR(ENOMEM);
}
/**
* Initialize and allocates MpegEncContext fields dependent on the resolution.
*/
......@@ -801,11 +865,6 @@ static int init_context_frame(MpegEncContext *s)
}
FF_ALLOC_OR_GOTO(s->avctx, s->er_temp_buffer,
mb_array_size * sizeof(uint8_t), fail);
FF_ALLOCZ_OR_GOTO(s->avctx, s->error_status_table,
mb_array_size * sizeof(uint8_t), fail);
if (s->codec_id == AV_CODEC_ID_MPEG4 ||
(s->flags & CODEC_FLAG_INTERLACED_ME)) {
/* interlaced direct mode decoding tables */
......@@ -873,7 +932,7 @@ static int init_context_frame(MpegEncContext *s)
2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
}
return 0;
return init_er(s);
fail:
return AVERROR(ENOMEM);
}
......@@ -1054,8 +1113,8 @@ static int free_context_frame(MpegEncContext *s)
av_freep(&s->mbskip_table);
av_freep(&s->error_status_table);
av_freep(&s->er_temp_buffer);
av_freep(&s->er.error_status_table);
av_freep(&s->er.er_temp_buffer);
av_freep(&s->mb_index2xy);
av_freep(&s->lambda_table);
av_freep(&s->cplx_tab);
......@@ -1589,7 +1648,7 @@ void ff_MPV_frame_end(MpegEncContext *s)
// just to make sure that all data is rendered.
if (CONFIG_MPEG_XVMC_DECODER && s->avctx->xvmc_acceleration) {
ff_xvmc_field_end(s);
} else if ((s->error_count || s->encoding) &&
} else if ((s->er.error_count || s->encoding) &&
!s->avctx->hwaccel &&
!(s->avctx->codec->capabilities & CODEC_CAP_HWACCEL_VDPAU) &&
s->unrestricted_mv &&
......@@ -2792,6 +2851,22 @@ void ff_set_qscale(MpegEncContext * s, int qscale)
void ff_MPV_report_decode_progress(MpegEncContext *s)
{
if (s->pict_type != AV_PICTURE_TYPE_B && !s->partitioned_frame && !s->error_occurred)
if (s->pict_type != AV_PICTURE_TYPE_B && !s->partitioned_frame && !s->er.error_occurred)
ff_thread_report_progress(&s->current_picture_ptr->f, s->mb_y, 0);
}
void ff_mpeg_er_frame_start(MpegEncContext *s)
{
ERContext *er = &s->er;
er->cur_pic = s->current_picture_ptr;
er->last_pic = s->last_picture_ptr;
er->next_pic = s->next_picture_ptr;
er->pp_time = s->pp_time;
er->pb_time = s->pb_time;
er->quarter_sample = s->quarter_sample;
er->partitioned_frame = s->partitioned_frame;
ff_er_frame_start(er);
}
......@@ -30,6 +30,7 @@
#include "avcodec.h"
#include "dsputil.h"
#include "error_resilience.h"
#include "get_bits.h"
#include "put_bits.h"
#include "ratecontrol.h"
......@@ -484,19 +485,6 @@ typedef struct MpegEncContext {
int last_bits; ///< temp var used for calculating the above vars
/* error concealment / resync */
int error_count, error_occurred;
uint8_t *error_status_table; ///< table of the error status of each MB
#define VP_START 1 ///< current MB is the first after a resync marker
#define ER_AC_ERROR 2
#define ER_DC_ERROR 4
#define ER_MV_ERROR 8
#define ER_AC_END 16
#define ER_DC_END 32
#define ER_MV_END 64
#define ER_MB_ERROR (ER_AC_ERROR|ER_DC_ERROR|ER_MV_ERROR)
#define ER_MB_END (ER_AC_END|ER_DC_END|ER_MV_END)
int resync_mb_x; ///< x position of last resync marker
int resync_mb_y; ///< y position of last resync marker
GetBitContext last_resync_gb; ///< used to search for the next resync marker
......@@ -701,15 +689,14 @@ typedef struct MpegEncContext {
int mpv_flags; ///< flags set by private options
int quantizer_noise_shaping;
/* error resilience stuff */
uint8_t *er_temp_buffer;
/* temp buffers for rate control */
float *cplx_tab, *bits_tab;
/* flag to indicate a reinitialization is required, e.g. after
* a frame size change */
int context_reinit;
ERContext er;
} MpegEncContext;
#define REBASE_PICTURE(pic, new_ctx, old_ctx) \
......@@ -787,9 +774,7 @@ int ff_mpeg_update_thread_context(AVCodecContext *dst, const AVCodecContext *src
const uint8_t *avpriv_mpv_find_start_code(const uint8_t *p, const uint8_t *end, uint32_t *state);
void ff_set_qscale(MpegEncContext * s, int qscale);
void ff_er_frame_start(MpegEncContext *s);
void ff_er_frame_end(MpegEncContext *s);
void ff_er_add_slice(MpegEncContext *s, int startx, int starty, int endx, int endy, int status);
void ff_mpeg_er_frame_start(MpegEncContext *s);
int ff_dct_common_init(MpegEncContext *s);
void ff_convert_matrix(DSPContext *dsp, int (*qmat)[64], uint16_t (*qmat16)[2][64],
......
......@@ -3037,7 +3037,7 @@ static void merge_context_after_encode(MpegEncContext *dst, MpegEncContext *src)
MERGE(b_count);
MERGE(skip_count);
MERGE(misc_bits);
MERGE(error_count);
MERGE(er.error_count);
MERGE(padding_bug_score);
MERGE(current_picture.f.error[0]);
MERGE(current_picture.f.error[1]);
......
......@@ -406,7 +406,7 @@ static int decode_wmv9(AVCodecContext *avctx, const uint8_t *buf, int buf_size,
return ret;
}
ff_er_frame_start(s);
ff_mpeg_er_frame_start(s);
v->bits = buf_size * 8;
......@@ -419,7 +419,7 @@ static int decode_wmv9(AVCodecContext *avctx, const uint8_t *buf, int buf_size,
ff_vc1_decode_blocks(v);
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
ff_MPV_frame_end(s);
......
......@@ -528,13 +528,13 @@ static int rv10_decode_packet(AVCodecContext *avctx,
if ((s->mb_x == 0 && s->mb_y == 0) || s->current_picture_ptr==NULL) {
if(s->current_picture_ptr){ //FIXME write parser so we always have complete frames?
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
ff_MPV_frame_end(s);
s->mb_x= s->mb_y = s->resync_mb_x = s->resync_mb_y= 0;
}
if(ff_MPV_frame_start(s, avctx) < 0)
return -1;
ff_er_frame_start(s);
ff_mpeg_er_frame_start(s);
} else {
if (s->current_picture_ptr->f.pict_type != s->pict_type) {
av_log(s->avctx, AV_LOG_ERROR, "Slice type mismatch\n");
......@@ -626,7 +626,7 @@ static int rv10_decode_packet(AVCodecContext *avctx,
if(ret == SLICE_END) break;
}
ff_er_add_slice(s, start_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_END);
ff_er_add_slice(&s->er, start_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_END);
return active_bits_size;
}
......@@ -697,7 +697,7 @@ static int rv10_decode_frame(AVCodecContext *avctx,
}
if(s->current_picture_ptr != NULL && s->mb_y>=s->mb_height){
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
ff_MPV_frame_end(s);
if (s->pict_type == AV_PICTURE_TYPE_B || s->low_delay) {
......
......@@ -1428,7 +1428,7 @@ static int rv34_decode_slice(RV34DecContext *r, int end, const uint8_t* buf, int
else
res = rv34_decode_intra_macroblock(r, r->intra_types + s->mb_x * 4 + 4);
if(res < 0){
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_ERROR);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_ERROR);
return -1;
}
if (++s->mb_x == s->mb_width) {
......@@ -1451,7 +1451,7 @@ static int rv34_decode_slice(RV34DecContext *r, int end, const uint8_t* buf, int
s->first_slice_line=0;
s->mb_num_left--;
}
ff_er_add_slice(s, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_END);
ff_er_add_slice(&s->er, s->resync_mb_x, s->resync_mb_y, s->mb_x-1, s->mb_y, ER_MB_END);
return s->mb_y == s->mb_height;
}
......@@ -1564,7 +1564,7 @@ static int finish_frame(AVCodecContext *avctx, AVFrame *pict)
MpegEncContext *s = &r->s;
int got_picture = 0;
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
ff_MPV_frame_end(s);
s->mb_num_left = 0;
......@@ -1646,7 +1646,7 @@ int ff_rv34_decode_frame(AVCodecContext *avctx,
if (s->mb_num_left > 0) {
av_log(avctx, AV_LOG_ERROR, "New frame but still %d MB left.",
s->mb_num_left);
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
ff_MPV_frame_end(s);
}
......@@ -1667,7 +1667,7 @@ int ff_rv34_decode_frame(AVCodecContext *avctx,
s->pict_type = si.type ? si.type : AV_PICTURE_TYPE_I;
if (ff_MPV_frame_start(s, s->avctx) < 0)
return -1;
ff_er_frame_start(s);
ff_mpeg_er_frame_start(s);
if (!r->tmp_b_block_base) {
int i;
......@@ -1766,7 +1766,7 @@ int ff_rv34_decode_frame(AVCodecContext *avctx,
av_log(avctx, AV_LOG_INFO, "marking unfished frame as finished\n");
/* always mark the current frame as finished, frame-mt supports
* only complete frames */
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
ff_MPV_frame_end(s);
s->mb_num_left = 0;
ff_thread_report_progress(&s->current_picture_ptr->f, INT_MAX, 0);
......
......@@ -4434,7 +4434,7 @@ static void vc1_decode_i_blocks(VC1Context *v)
if (v->s.loop_filter) vc1_loop_filter_iblk(v, v->pq);
if (get_bits_count(&s->gb) > v->bits) {
ff_er_add_slice(s, 0, 0, s->mb_x, s->mb_y, ER_MB_ERROR);
ff_er_add_slice(&s->er, 0, 0, s->mb_x, s->mb_y, ER_MB_ERROR);
av_log(s->avctx, AV_LOG_ERROR, "Bits overconsumption: %i > %i\n",
get_bits_count(&s->gb), v->bits);
return;
......@@ -4452,7 +4452,7 @@ static void vc1_decode_i_blocks(VC1Context *v)
/* This is intentionally mb_height and not end_mb_y - unlike in advanced
* profile, these only differ are when decoding MSS2 rectangles. */
ff_er_add_slice(s, 0, 0, s->mb_width - 1, s->mb_height - 1, ER_MB_END);
ff_er_add_slice(&s->er, 0, 0, s->mb_width - 1, s->mb_height - 1, ER_MB_END);
}
/** Decode blocks of I-frame for advanced profile
......@@ -4562,7 +4562,7 @@ static void vc1_decode_i_blocks_adv(VC1Context *v)
if (get_bits_count(&s->gb) > v->bits) {
// TODO: may need modification to handle slice coding
ff_er_add_slice(s, 0, s->start_mb_y, s->mb_x, s->mb_y, ER_MB_ERROR);
ff_er_add_slice(&s->er, 0, s->start_mb_y, s->mb_x, s->mb_y, ER_MB_ERROR);
av_log(s->avctx, AV_LOG_ERROR, "Bits overconsumption: %i > %i\n",
get_bits_count(&s->gb), v->bits);
return;
......@@ -4586,7 +4586,7 @@ static void vc1_decode_i_blocks_adv(VC1Context *v)
}
if (v->s.loop_filter)
ff_draw_horiz_band(s, (s->end_mb_y-1)*16, 16);
ff_er_add_slice(s, 0, s->start_mb_y << v->field_mode, s->mb_width - 1,
ff_er_add_slice(&s->er, 0, s->start_mb_y << v->field_mode, s->mb_width - 1,
(s->end_mb_y << v->field_mode) - 1, ER_MB_END);
}
......@@ -4638,7 +4638,7 @@ static void vc1_decode_p_blocks(VC1Context *v)
vc1_apply_p_loop_filter(v);
if (get_bits_count(&s->gb) > v->bits || get_bits_count(&s->gb) < 0) {
// TODO: may need modification to handle slice coding
ff_er_add_slice(s, 0, s->start_mb_y, s->mb_x, s->mb_y, ER_MB_ERROR);
ff_er_add_slice(&s->er, 0, s->start_mb_y, s->mb_x, s->mb_y, ER_MB_ERROR);
av_log(s->avctx, AV_LOG_ERROR, "Bits overconsumption: %i > %i at %ix%i\n",
get_bits_count(&s->gb), v->bits, s->mb_x, s->mb_y);
return;
......@@ -4661,7 +4661,7 @@ static void vc1_decode_p_blocks(VC1Context *v)
}
if (s->end_mb_y >= s->start_mb_y)
ff_draw_horiz_band(s, (s->end_mb_y - 1) * 16, 16);
ff_er_add_slice(s, 0, s->start_mb_y << v->field_mode, s->mb_width - 1,
ff_er_add_slice(&s->er, 0, s->start_mb_y << v->field_mode, s->mb_width - 1,
(s->end_mb_y << v->field_mode) - 1, ER_MB_END);
}
......@@ -4707,7 +4707,7 @@ static void vc1_decode_b_blocks(VC1Context *v)
vc1_decode_b_mb(v);
if (get_bits_count(&s->gb) > v->bits || get_bits_count(&s->gb) < 0) {
// TODO: may need modification to handle slice coding
ff_er_add_slice(s, 0, s->start_mb_y, s->mb_x, s->mb_y, ER_MB_ERROR);
ff_er_add_slice(&s->er, 0, s->start_mb_y, s->mb_x, s->mb_y, ER_MB_ERROR);
av_log(s->avctx, AV_LOG_ERROR, "Bits overconsumption: %i > %i at %ix%i\n",
get_bits_count(&s->gb), v->bits, s->mb_x, s->mb_y);
return;
......@@ -4722,7 +4722,7 @@ static void vc1_decode_b_blocks(VC1Context *v)
}
if (v->s.loop_filter)
ff_draw_horiz_band(s, (s->end_mb_y - 1) * 16, 16);
ff_er_add_slice(s, 0, s->start_mb_y << v->field_mode, s->mb_width - 1,
ff_er_add_slice(&s->er, 0, s->start_mb_y << v->field_mode, s->mb_width - 1,
(s->end_mb_y << v->field_mode) - 1, ER_MB_END);
}
......@@ -4730,7 +4730,7 @@ static void vc1_decode_skip_blocks(VC1Context *v)
{
MpegEncContext *s = &v->s;
ff_er_add_slice(s, 0, s->start_mb_y, s->mb_width - 1, s->end_mb_y - 1, ER_MB_END);
ff_er_add_slice(&s->er, 0, s->start_mb_y, s->mb_width - 1, s->end_mb_y - 1, ER_MB_END);
s->first_slice_line = 1;
for (s->mb_y = s->start_mb_y; s->mb_y < s->end_mb_y; s->mb_y++) {
s->mb_x = 0;
......@@ -5558,7 +5558,7 @@ static int vc1_decode_frame(AVCodecContext *avctx, void *data,
if (avctx->hwaccel->end_frame(avctx) < 0)
goto err;
} else {
ff_er_frame_start(s);
ff_mpeg_er_frame_start(s);
v->bits = buf_size * 8;
v->end_mb_x = s->mb_width;
......@@ -5635,7 +5635,7 @@ static int vc1_decode_frame(AVCodecContext *avctx, void *data,
get_bits_count(&s->gb), s->gb.size_in_bits);
// if (get_bits_count(&s->gb) > buf_size * 8)
// return -1;
ff_er_frame_end(s);
ff_er_frame_end(&s->er);
}
ff_MPV_frame_end(s);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment