h263.c 11.7 KB
Newer Older
Fabrice Bellard's avatar
Fabrice Bellard committed
1
/*
2
 * H263/MPEG4 backend for encoder and decoder
3
 * Copyright (c) 2000,2001 Fabrice Bellard
4
 * H263+ support.
5
 * Copyright (c) 2001 Juan J. Sierralta P
6
 * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
Fabrice Bellard's avatar
Fabrice Bellard committed
7
 *
8 9 10
 * This file is part of FFmpeg.
 *
 * FFmpeg is free software; you can redistribute it and/or
11 12
 * modify it under the terms of the GNU Lesser General Public
 * License as published by the Free Software Foundation; either
13
 * version 2.1 of the License, or (at your option) any later version.
Fabrice Bellard's avatar
Fabrice Bellard committed
14
 *
15
 * FFmpeg is distributed in the hope that it will be useful,
Fabrice Bellard's avatar
Fabrice Bellard committed
16
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 18
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Lesser General Public License for more details.
Fabrice Bellard's avatar
Fabrice Bellard committed
19
 *
20
 * You should have received a copy of the GNU Lesser General Public
21
 * License along with FFmpeg; if not, write to the Free Software
22
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Fabrice Bellard's avatar
Fabrice Bellard committed
23
 */
Michael Niedermayer's avatar
Michael Niedermayer committed
24 25

/**
26
 * @file
Michael Niedermayer's avatar
Michael Niedermayer committed
27
 * h263/mpeg4 codec.
Michael Niedermayer's avatar
Michael Niedermayer committed
28
 */
29

30 31
#include <limits.h>

Fabrice Bellard's avatar
Fabrice Bellard committed
32 33
#include "avcodec.h"
#include "mpegvideo.h"
34
#include "h263.h"
Fabrice Bellard's avatar
Fabrice Bellard committed
35
#include "h263data.h"
36
#include "mathops.h"
37
#include "unary.h"
38
#include "flv.h"
39
#include "mpeg4video.h"
Fabrice Bellard's avatar
Fabrice Bellard committed
40

41

42
uint8_t ff_h263_static_rl_table_store[2][2][2*MAX_RUN + MAX_LEVEL + 3];
43

44

45 46
void ff_h263_update_motion_val(MpegEncContext * s){
    const int mb_xy = s->mb_y * s->mb_stride + s->mb_x;
47
               //FIXME a lot of that is only needed for !low_delay
48
    const int wrap = s->b8_stride;
49
    const int xy = s->block_index[0];
50

51
    s->current_picture.mbskip_table[mb_xy] = s->mb_skipped;
52 53 54 55 56 57 58 59 60 61 62 63 64 65 66

    if(s->mv_type != MV_TYPE_8X8){
        int motion_x, motion_y;
        if (s->mb_intra) {
            motion_x = 0;
            motion_y = 0;
        } else if (s->mv_type == MV_TYPE_16X16) {
            motion_x = s->mv[0][0][0];
            motion_y = s->mv[0][0][1];
        } else /*if (s->mv_type == MV_TYPE_FIELD)*/ {
            int i;
            motion_x = s->mv[0][0][0] + s->mv[0][1][0];
            motion_y = s->mv[0][0][1] + s->mv[0][1][1];
            motion_x = (motion_x>>1) | (motion_x&1);
            for(i=0; i<2; i++){
67 68
                s->p_field_mv_table[i][0][mb_xy][0]= s->mv[0][i][0];
                s->p_field_mv_table[i][0][mb_xy][1]= s->mv[0][i][1];
69
            }
70 71 72 73
            s->current_picture.ref_index[0][4*mb_xy    ] =
            s->current_picture.ref_index[0][4*mb_xy + 1] = s->field_select[0][0];
            s->current_picture.ref_index[0][4*mb_xy + 2] =
            s->current_picture.ref_index[0][4*mb_xy + 3] = s->field_select[0][1];
74
        }
75

76
        /* no update if 8X8 because it has been done during parsing */
77 78 79 80 81 82 83 84
        s->current_picture.motion_val[0][xy][0]            = motion_x;
        s->current_picture.motion_val[0][xy][1]            = motion_y;
        s->current_picture.motion_val[0][xy + 1][0]        = motion_x;
        s->current_picture.motion_val[0][xy + 1][1]        = motion_y;
        s->current_picture.motion_val[0][xy + wrap][0]     = motion_x;
        s->current_picture.motion_val[0][xy + wrap][1]     = motion_y;
        s->current_picture.motion_val[0][xy + 1 + wrap][0] = motion_x;
        s->current_picture.motion_val[0][xy + 1 + wrap][1] = motion_y;
85 86 87
    }

    if(s->encoding){ //FIXME encoding MUST be cleaned up
88
        if (s->mv_type == MV_TYPE_8X8)
89
            s->current_picture.mb_type[mb_xy] = MB_TYPE_L0 | MB_TYPE_8x8;
90
        else if(s->mb_intra)
91
            s->current_picture.mb_type[mb_xy] = MB_TYPE_INTRA;
92
        else
93
            s->current_picture.mb_type[mb_xy] = MB_TYPE_L0 | MB_TYPE_16x16;
94 95 96
    }
}

97
int ff_h263_pred_dc(MpegEncContext * s, int n, int16_t **dc_val_ptr)
98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137
{
    int x, y, wrap, a, c, pred_dc;
    int16_t *dc_val;

    /* find prediction */
    if (n < 4) {
        x = 2 * s->mb_x + (n & 1);
        y = 2 * s->mb_y + ((n & 2) >> 1);
        wrap = s->b8_stride;
        dc_val = s->dc_val[0];
    } else {
        x = s->mb_x;
        y = s->mb_y;
        wrap = s->mb_stride;
        dc_val = s->dc_val[n - 4 + 1];
    }
    /* B C
     * A X
     */
    a = dc_val[(x - 1) + (y) * wrap];
    c = dc_val[(x) + (y - 1) * wrap];

    /* No prediction outside GOB boundary */
    if(s->first_slice_line && n!=3){
        if(n!=2) c= 1024;
        if(n!=1 && s->mb_x == s->resync_mb_x) a= 1024;
    }
    /* just DC prediction */
    if (a != 1024 && c != 1024)
        pred_dc = (a + c) >> 1;
    else if (a != 1024)
        pred_dc = a;
    else
        pred_dc = c;

    /* we assume pred is positive */
    *dc_val_ptr = &dc_val[x + y * wrap];
    return pred_dc;
}

Michael Niedermayer's avatar
Michael Niedermayer committed
138
void ff_h263_loop_filter(MpegEncContext * s){
Michael Niedermayer's avatar
Michael Niedermayer committed
139 140 141 142 143 144 145
    int qp_c;
    const int linesize  = s->linesize;
    const int uvlinesize= s->uvlinesize;
    const int xy = s->mb_y * s->mb_stride + s->mb_x;
    uint8_t *dest_y = s->dest[0];
    uint8_t *dest_cb= s->dest[1];
    uint8_t *dest_cr= s->dest[2];
146

147
//    if(s->pict_type==AV_PICTURE_TYPE_B && !s->readable) return;
Michael Niedermayer's avatar
Michael Niedermayer committed
148 149 150 151 152

    /*
       Diag Top
       Left Center
    */
153
    if (!IS_SKIP(s->current_picture.mb_type[xy])) {
Michael Niedermayer's avatar
Michael Niedermayer committed
154 155 156 157 158 159 160
        qp_c= s->qscale;
        s->dsp.h263_v_loop_filter(dest_y+8*linesize  , linesize, qp_c);
        s->dsp.h263_v_loop_filter(dest_y+8*linesize+8, linesize, qp_c);
    }else
        qp_c= 0;

    if(s->mb_y){
161
        int qp_dt, qp_tt, qp_tc;
Michael Niedermayer's avatar
Michael Niedermayer committed
162

163
        if (IS_SKIP(s->current_picture.mb_type[xy - s->mb_stride]))
164
            qp_tt=0;
165
        else
166
            qp_tt = s->current_picture.qscale_table[xy - s->mb_stride];
Michael Niedermayer's avatar
Michael Niedermayer committed
167

168
        if(qp_c)
Michael Niedermayer's avatar
Michael Niedermayer committed
169 170
            qp_tc= qp_c;
        else
171
            qp_tc= qp_tt;
172

Michael Niedermayer's avatar
Michael Niedermayer committed
173 174 175 176
        if(qp_tc){
            const int chroma_qp= s->chroma_qscale_table[qp_tc];
            s->dsp.h263_v_loop_filter(dest_y  ,   linesize, qp_tc);
            s->dsp.h263_v_loop_filter(dest_y+8,   linesize, qp_tc);
177

Michael Niedermayer's avatar
Michael Niedermayer committed
178 179 180
            s->dsp.h263_v_loop_filter(dest_cb , uvlinesize, chroma_qp);
            s->dsp.h263_v_loop_filter(dest_cr , uvlinesize, chroma_qp);
        }
181

182 183
        if(qp_tt)
            s->dsp.h263_h_loop_filter(dest_y-8*linesize+8  ,   linesize, qp_tt);
184

Michael Niedermayer's avatar
Michael Niedermayer committed
185
        if(s->mb_x){
186
            if (qp_tt || IS_SKIP(s->current_picture.mb_type[xy - 1 - s->mb_stride]))
187
                qp_dt= qp_tt;
Michael Niedermayer's avatar
Michael Niedermayer committed
188
            else
189
                qp_dt = s->current_picture.qscale_table[xy - 1 - s->mb_stride];
190

Michael Niedermayer's avatar
Michael Niedermayer committed
191 192 193 194
            if(qp_dt){
                const int chroma_qp= s->chroma_qscale_table[qp_dt];
                s->dsp.h263_h_loop_filter(dest_y -8*linesize  ,   linesize, qp_dt);
                s->dsp.h263_h_loop_filter(dest_cb-8*uvlinesize, uvlinesize, chroma_qp);
Michael Niedermayer's avatar
Michael Niedermayer committed
195
                s->dsp.h263_h_loop_filter(dest_cr-8*uvlinesize, uvlinesize, chroma_qp);
Michael Niedermayer's avatar
Michael Niedermayer committed
196 197 198 199 200 201 202 203 204
            }
        }
    }

    if(qp_c){
        s->dsp.h263_h_loop_filter(dest_y +8,   linesize, qp_c);
        if(s->mb_y + 1 == s->mb_height)
            s->dsp.h263_h_loop_filter(dest_y+8*linesize+8,   linesize, qp_c);
    }
205

Michael Niedermayer's avatar
Michael Niedermayer committed
206 207
    if(s->mb_x){
        int qp_lc;
208
        if (qp_c || IS_SKIP(s->current_picture.mb_type[xy - 1]))
Michael Niedermayer's avatar
Michael Niedermayer committed
209 210
            qp_lc= qp_c;
        else
211
            qp_lc = s->current_picture.qscale_table[xy - 1];
212

Michael Niedermayer's avatar
Michael Niedermayer committed
213 214 215 216 217 218 219 220 221 222 223 224
        if(qp_lc){
            s->dsp.h263_h_loop_filter(dest_y,   linesize, qp_lc);
            if(s->mb_y + 1 == s->mb_height){
                const int chroma_qp= s->chroma_qscale_table[qp_lc];
                s->dsp.h263_h_loop_filter(dest_y +8*  linesize,   linesize, qp_lc);
                s->dsp.h263_h_loop_filter(dest_cb             , uvlinesize, chroma_qp);
                s->dsp.h263_h_loop_filter(dest_cr             , uvlinesize, chroma_qp);
            }
        }
    }
}

Diego Biurrun's avatar
Diego Biurrun committed
225
void ff_h263_pred_acdc(MpegEncContext * s, int16_t *block, int n)
226
{
227
    int x, y, wrap, a, c, pred_dc, scale, i;
228
    int16_t *dc_val, *ac_val, *ac_val1;
229 230 231

    /* find prediction */
    if (n < 4) {
232 233 234
        x = 2 * s->mb_x + (n & 1);
        y = 2 * s->mb_y + (n>> 1);
        wrap = s->b8_stride;
235
        dc_val = s->dc_val[0];
236
        ac_val = s->ac_val[0][0];
237 238
        scale = s->y_dc_scale;
    } else {
239 240 241
        x = s->mb_x;
        y = s->mb_y;
        wrap = s->mb_stride;
242
        dc_val = s->dc_val[n - 4 + 1];
243
        ac_val = s->ac_val[n - 4 + 1][0];
244 245
        scale = s->c_dc_scale;
    }
246

247 248
    ac_val += ((y) * wrap + (x)) * 16;
    ac_val1 = ac_val;
249

250
    /* B C
251
     * A X
252 253 254
     */
    a = dc_val[(x - 1) + (y) * wrap];
    c = dc_val[(x) + (y - 1) * wrap];
255

256
    /* No prediction outside GOB boundary */
Michael Niedermayer's avatar
Michael Niedermayer committed
257 258 259 260
    if(s->first_slice_line && n!=3){
        if(n!=2) c= 1024;
        if(n!=1 && s->mb_x == s->resync_mb_x) a= 1024;
    }
261

262
    if (s->ac_pred) {
Michael Niedermayer's avatar
Michael Niedermayer committed
263
        pred_dc = 1024;
264 265
        if (s->h263_aic_dir) {
            /* left prediction */
266 267 268
            if (a != 1024) {
                ac_val -= 16;
                for(i=1;i<8;i++) {
269
                    block[s->dsp.idct_permutation[i<<3]] += ac_val[i];
270 271
                }
                pred_dc = a;
272 273 274
            }
        } else {
            /* top prediction */
275 276 277
            if (c != 1024) {
                ac_val -= 16 * wrap;
                for(i=1;i<8;i++) {
278
                    block[s->dsp.idct_permutation[i   ]] += ac_val[i + 8];
279 280
                }
                pred_dc = c;
281 282
            }
        }
283 284 285 286 287 288 289 290
    } else {
        /* just DC prediction */
        if (a != 1024 && c != 1024)
            pred_dc = (a + c) >> 1;
        else if (a != 1024)
            pred_dc = a;
        else
            pred_dc = c;
291
    }
292

293 294
    /* we assume pred is positive */
    block[0]=block[0]*scale + pred_dc;
295

296 297
    if (block[0] < 0)
        block[0] = 0;
298
    else
Michael Niedermayer's avatar
Michael Niedermayer committed
299
        block[0] |= 1;
300

301 302
    /* Update AC/DC tables */
    dc_val[(x) + (y) * wrap] = block[0];
303

304 305
    /* left copy */
    for(i=1;i<8;i++)
306
        ac_val1[i    ] = block[s->dsp.idct_permutation[i<<3]];
307 308
    /* top copy */
    for(i=1;i<8;i++)
309
        ac_val1[8 + i] = block[s->dsp.idct_permutation[i   ]];
310 311
}

312 313
int16_t *ff_h263_pred_motion(MpegEncContext * s, int block, int dir,
                             int *px, int *py)
Fabrice Bellard's avatar
Fabrice Bellard committed
314
{
315
    int wrap;
316 317 318 319
    int16_t *A, *B, *C, (*mot_val)[2];
    static const int off[4]= {2, 1, 1, -1};

    wrap = s->b8_stride;
320
    mot_val = s->current_picture.motion_val[dir] + s->block_index[block];
321 322 323 324

    A = mot_val[ - 1];
    /* special case for first (slice) line */
    if (s->first_slice_line && block<3) {
325
        // we can't just change some MVs to simulate that as we need them for the B frames (and ME)
326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345
        // and if we ever support non rectangular objects than we need to do a few ifs here anyway :(
        if(block==0){ //most common case
            if(s->mb_x  == s->resync_mb_x){ //rare
                *px= *py = 0;
            }else if(s->mb_x + 1 == s->resync_mb_x && s->h263_pred){ //rare
                C = mot_val[off[block] - wrap];
                if(s->mb_x==0){
                    *px = C[0];
                    *py = C[1];
                }else{
                    *px = mid_pred(A[0], 0, C[0]);
                    *py = mid_pred(A[1], 0, C[1]);
                }
            }else{
                *px = A[0];
                *py = A[1];
            }
        }else if(block==1){
            if(s->mb_x + 1 == s->resync_mb_x && s->h263_pred){ //rare
                C = mot_val[off[block] - wrap];
346 347 348 349 350 351 352 353 354 355 356
                *px = mid_pred(A[0], 0, C[0]);
                *py = mid_pred(A[1], 0, C[1]);
            }else{
                *px = A[0];
                *py = A[1];
            }
        }else{ /* block==2*/
            B = mot_val[ - wrap];
            C = mot_val[off[block] - wrap];
            if(s->mb_x == s->resync_mb_x) //rare
                A[0]=A[1]=0;
357

358 359 360
            *px = mid_pred(A[0], B[0], C[0]);
            *py = mid_pred(A[1], B[1], C[1]);
        }
361
    } else {
362 363 364 365
        B = mot_val[ - wrap];
        C = mot_val[off[block] - wrap];
        *px = mid_pred(A[0], B[0], C[0]);
        *py = mid_pred(A[1], B[1], C[1]);
366
    }
367
    return *mot_val;
368 369
}

370

371 372 373 374 375 376 377 378 379 380
/**
 * Get the GOB height based on picture height.
 */
int ff_h263_get_gob_height(MpegEncContext *s){
    if (s->height <= 400)
        return 1;
    else if (s->height <= 800)
        return  2;
    else
        return 4;
Fabrice Bellard's avatar
Fabrice Bellard committed
381
}