mpegvideo_xvmc.c 12.1 KB
Newer Older
1 2 3 4
/*
 * XVideo Motion Compensation
 * Copyright (c) 2003 Ivan Kalvachev
 *
5 6 7
 * This file is part of FFmpeg.
 *
 * FFmpeg is free software; you can redistribute it and/or
8 9
 * modify it under the terms of the GNU Lesser General Public
 * License as published by the Free Software Foundation; either
10
 * version 2.1 of the License, or (at your option) any later version.
11
 *
12
 * FFmpeg is distributed in the hope that it will be useful,
13 14 15 16 17
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 * Lesser General Public License for more details.
 *
 * You should have received a copy of the GNU Lesser General Public
18
 * License along with FFmpeg; if not, write to the Free Software
19
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 21
 */

Ivan Kalvachev's avatar
Ivan Kalvachev committed
22
#include <limits.h>
23
#include <X11/extensions/XvMC.h>
Ivan Kalvachev's avatar
Ivan Kalvachev committed
24 25 26 27 28 29 30

#include "avcodec.h"
#include "mpegvideo.h"

#undef NDEBUG
#include <assert.h>

31
#include "xvmc.h"
32
#include "xvmc_internal.h"
33

34
/**
35
 * Initialize the block field of the MpegEncContext pointer passed as
36
 * parameter after making sure that the data is not corrupted.
37 38
 * In order to implement something like direct rendering instead of decoding
 * coefficients in s->blocks and then copying them, copy them directly
39
 * into the data_blocks array provided by xvmc.
40
 */
41
void ff_xvmc_init_block(MpegEncContext *s)
42
{
Mans Rullgard's avatar
Mans Rullgard committed
43
    struct xvmc_pix_fmt *render = (struct xvmc_pix_fmt*)s->current_picture.f.data[2];
44
    assert(render && render->xvmc_id == AV_XVMC_ID);
45

Diego Biurrun's avatar
Diego Biurrun committed
46
    s->block = (int16_t (*)[64])(render->data_blocks + render->next_free_data_block_num * 64);
47 48
}

49
/**
50
 * Fill individual block pointers, so there are no gaps in the data_block array
51
 * in case not all blocks in the macroblock are coded.
52
 */
53
void ff_xvmc_pack_pblocks(MpegEncContext *s, int cbp)
54
{
55
    int i, j = 0;
56
    const int mb_block_count = 4 + (1 << s->chroma_format);
57

58 59
    cbp <<= 12-mb_block_count;
    for (i = 0; i < mb_block_count; i++) {
60
        if (cbp & (1 << 11))
61
            s->pblocks[i] = &s->block[j++];
62
        else
63
            s->pblocks[i] = NULL;
64
        cbp += cbp;
65
    }
Ivan Kalvachev's avatar
Ivan Kalvachev committed
66 67
}

68
/**
69
 * Find and store the surfaces that are used as reference frames.
70 71 72
 * This function should be called for every new field and/or frame.
 * It should be safe to call the function a few times for the same field.
 */
Diego Biurrun's avatar
Diego Biurrun committed
73
int ff_xvmc_field_start(MpegEncContext *s, AVCodecContext *avctx)
74
{
Mans Rullgard's avatar
Mans Rullgard committed
75
    struct xvmc_pix_fmt *last, *next, *render = (struct xvmc_pix_fmt*)s->current_picture.f.data[2];
76
    const int mb_block_count = 4 + (1 << s->chroma_format);
Ivan Kalvachev's avatar
Ivan Kalvachev committed
77

78
    assert(avctx);
79
    if (!render || render->xvmc_id != AV_XVMC_ID ||
80 81 82 83
        !render->data_blocks || !render->mv_blocks ||
        (unsigned int)render->allocated_mv_blocks   > INT_MAX/(64*6) ||
        (unsigned int)render->allocated_data_blocks > INT_MAX/64     ||
        !render->p_surface) {
84 85
        av_log(avctx, AV_LOG_ERROR,
               "Render token doesn't look as expected.\n");
86
        return -1; // make sure that this is a render packet
87
    }
Ivan Kalvachev's avatar
Ivan Kalvachev committed
88

Diego Biurrun's avatar
Diego Biurrun committed
89
    if (render->filled_mv_blocks_num) {
90
        av_log(avctx, AV_LOG_ERROR,
91
               "Rendering surface contains %i unprocessed blocks.\n",
Diego Biurrun's avatar
Diego Biurrun committed
92
               render->filled_mv_blocks_num);
93
        return -1;
94
    }
95
    if (render->allocated_mv_blocks   < 1 ||
96 97 98 99 100
        render->allocated_data_blocks <  render->allocated_mv_blocks*mb_block_count ||
        render->start_mv_blocks_num   >= render->allocated_mv_blocks                ||
        render->next_free_data_block_num >
                        render->allocated_data_blocks -
                        mb_block_count*(render->allocated_mv_blocks-render->start_mv_blocks_num)) {
101 102 103 104
        av_log(avctx, AV_LOG_ERROR,
               "Rendering surface doesn't provide enough block structures to work with.\n");
        return -1;
    }
Ivan Kalvachev's avatar
Ivan Kalvachev committed
105

Ivan Kalvachev's avatar
Ivan Kalvachev committed
106 107
    render->picture_structure = s->picture_structure;
    render->flags             = s->first_field ? 0 : XVMC_SECOND_FIELD;
108 109
    render->p_future_surface  = NULL;
    render->p_past_surface    = NULL;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
110

111
    switch(s->pict_type) {
112
        case  AV_PICTURE_TYPE_I:
113
            return 0; // no prediction from other frames
114
        case  AV_PICTURE_TYPE_B:
Mans Rullgard's avatar
Mans Rullgard committed
115
            next = (struct xvmc_pix_fmt*)s->next_picture.f.data[2];
116
            if (!next)
117
                return -1;
118
            if (next->xvmc_id != AV_XVMC_ID)
119
                return -1;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
120
            render->p_future_surface = next->p_surface;
121
            // no return here, going to set forward prediction
122
        case  AV_PICTURE_TYPE_P:
Mans Rullgard's avatar
Mans Rullgard committed
123
            last = (struct xvmc_pix_fmt*)s->last_picture.f.data[2];
124
            if (!last)
125
                last = render; // predict second field from the first
126
            if (last->xvmc_id != AV_XVMC_ID)
127
                return -1;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
128 129
            render->p_past_surface = last->p_surface;
            return 0;
130
    }
Ivan Kalvachev's avatar
Ivan Kalvachev committed
131 132 133 134

return -1;
}

135
/**
136
 * Complete frame/field rendering by passing any remaining blocks.
137 138
 * Normally ff_draw_horiz_band() is called for each slice, however,
 * some leftover blocks, for example from error_resilience(), may remain.
139 140
 * It should be safe to call the function a few times for the same field.
 */
141
void ff_xvmc_field_end(MpegEncContext *s)
142
{
Mans Rullgard's avatar
Mans Rullgard committed
143
    struct xvmc_pix_fmt *render = (struct xvmc_pix_fmt*)s->current_picture.f.data[2];
144
    assert(render);
Ivan Kalvachev's avatar
Ivan Kalvachev committed
145

146
    if (render->filled_mv_blocks_num > 0)
147
        ff_mpeg_draw_horiz_band(s, 0, 0);
Ivan Kalvachev's avatar
Ivan Kalvachev committed
148 149
}

150
/**
151 152
 * Synthesize the data needed by XvMC to render one macroblock of data.
 * Fill all relevant fields, if necessary do IDCT.
153
 */
154
void ff_xvmc_decode_mb(MpegEncContext *s)
155
{
156
    XvMCMacroBlock *mv_block;
157
    struct xvmc_pix_fmt *render;
158
    int i, cbp, blocks_per_mb;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
159

160
    const int mb_xy = s->mb_y * s->mb_stride + s->mb_x;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
161 162


163
    if (s->encoding) {
164
        av_log(s->avctx, AV_LOG_ERROR, "XVMC doesn't support encoding!!!\n");
Dominik Mierzejewski's avatar
Dominik Mierzejewski committed
165
        return;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
166 167
    }

168
    // from MPV_decode_mb(), update DC predictors for P macroblocks
Ivan Kalvachev's avatar
Ivan Kalvachev committed
169 170 171 172 173 174
    if (!s->mb_intra) {
        s->last_dc[0] =
        s->last_dc[1] =
        s->last_dc[2] =  128 << s->intra_dc_precision;
    }

175
    // MC doesn't skip blocks
176
    s->mb_skipped = 0;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
177 178


179 180
    // Do I need to export quant when I could not perform postprocessing?
    // Anyway, it doesn't hurt.
181
    s->current_picture.qscale_table[mb_xy] = s->qscale;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
182

183
    // start of XVMC-specific code
Mans Rullgard's avatar
Mans Rullgard committed
184
    render = (struct xvmc_pix_fmt*)s->current_picture.f.data[2];
185
    assert(render);
186
    assert(render->xvmc_id == AV_XVMC_ID);
Ivan Kalvachev's avatar
Ivan Kalvachev committed
187
    assert(render->mv_blocks);
188

189
    // take the next free macroblock
190
    mv_block = &render->mv_blocks[render->start_mv_blocks_num +
191
                                  render->filled_mv_blocks_num];
Ivan Kalvachev's avatar
Ivan Kalvachev committed
192

193 194
    mv_block->x        = s->mb_x;
    mv_block->y        = s->mb_y;
195
    mv_block->dct_type = s->interlaced_dct; // XVMC_DCT_TYPE_FRAME/FIELD;
196
    if (s->mb_intra) {
197
        mv_block->macroblock_type = XVMC_MB_TYPE_INTRA; // no MC, all done
198
    } else {
Ivan Kalvachev's avatar
Ivan Kalvachev committed
199 200
        mv_block->macroblock_type = XVMC_MB_TYPE_PATTERN;

201
        if (s->mv_dir & MV_DIR_FORWARD) {
202
            mv_block->macroblock_type |= XVMC_MB_TYPE_MOTION_FORWARD;
203
            // PMV[n][dir][xy] = mv[dir][n][xy]
Ivan Kalvachev's avatar
Ivan Kalvachev committed
204 205 206 207 208
            mv_block->PMV[0][0][0] = s->mv[0][0][0];
            mv_block->PMV[0][0][1] = s->mv[0][0][1];
            mv_block->PMV[1][0][0] = s->mv[0][1][0];
            mv_block->PMV[1][0][1] = s->mv[0][1][1];
        }
209
        if (s->mv_dir & MV_DIR_BACKWARD) {
210
            mv_block->macroblock_type |= XVMC_MB_TYPE_MOTION_BACKWARD;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
211 212 213 214 215 216
            mv_block->PMV[0][1][0] = s->mv[1][0][0];
            mv_block->PMV[0][1][1] = s->mv[1][0][1];
            mv_block->PMV[1][1][0] = s->mv[1][1][0];
            mv_block->PMV[1][1][1] = s->mv[1][1][1];
        }

217
        switch(s->mv_type) {
Ivan Kalvachev's avatar
Ivan Kalvachev committed
218 219 220 221 222 223 224 225
            case  MV_TYPE_16X16:
                mv_block->motion_type = XVMC_PREDICTION_FRAME;
                break;
            case  MV_TYPE_16X8:
                mv_block->motion_type = XVMC_PREDICTION_16x8;
                break;
            case  MV_TYPE_FIELD:
                mv_block->motion_type = XVMC_PREDICTION_FIELD;
226 227 228 229 230
                if (s->picture_structure == PICT_FRAME) {
                    mv_block->PMV[0][0][1] <<= 1;
                    mv_block->PMV[1][0][1] <<= 1;
                    mv_block->PMV[0][1][1] <<= 1;
                    mv_block->PMV[1][1][1] <<= 1;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
231 232 233 234
                }
                break;
            case  MV_TYPE_DMV:
                mv_block->motion_type = XVMC_PREDICTION_DUAL_PRIME;
235
                if (s->picture_structure == PICT_FRAME) {
Ivan Kalvachev's avatar
Ivan Kalvachev committed
236

237
                    mv_block->PMV[0][0][0] = s->mv[0][0][0];      // top from top
238
                    mv_block->PMV[0][0][1] = s->mv[0][0][1] << 1;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
239

240
                    mv_block->PMV[0][1][0] = s->mv[0][0][0];      // bottom from bottom
241
                    mv_block->PMV[0][1][1] = s->mv[0][0][1] << 1;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
242

243
                    mv_block->PMV[1][0][0] = s->mv[0][2][0];      // dmv00, top from bottom
244
                    mv_block->PMV[1][0][1] = s->mv[0][2][1] << 1; // dmv01
Ivan Kalvachev's avatar
Ivan Kalvachev committed
245

246
                    mv_block->PMV[1][1][0] = s->mv[0][3][0];      // dmv10, bottom from top
247
                    mv_block->PMV[1][1][1] = s->mv[0][3][1] << 1; // dmv11
Ivan Kalvachev's avatar
Ivan Kalvachev committed
248

249
                } else {
250 251
                    mv_block->PMV[0][1][0] = s->mv[0][2][0];      // dmv00
                    mv_block->PMV[0][1][1] = s->mv[0][2][1];      // dmv01
Ivan Kalvachev's avatar
Ivan Kalvachev committed
252 253 254 255 256 257 258 259
                }
                break;
            default:
                assert(0);
        }

        mv_block->motion_vertical_field_select = 0;

260
        // set correct field references
261
        if (s->mv_type == MV_TYPE_FIELD || s->mv_type == MV_TYPE_16X8) {
262
            mv_block->motion_vertical_field_select |= s->field_select[0][0];
263 264 265
            mv_block->motion_vertical_field_select |= s->field_select[1][0] << 1;
            mv_block->motion_vertical_field_select |= s->field_select[0][1] << 2;
            mv_block->motion_vertical_field_select |= s->field_select[1][1] << 3;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
266
        }
267
    } // !intra
268
    // time to handle data blocks
Ivan Kalvachev's avatar
Ivan Kalvachev committed
269
    mv_block->index = render->next_free_data_block_num;
270

Ivan Kalvachev's avatar
Ivan Kalvachev committed
271
    blocks_per_mb = 6;
272
    if (s->chroma_format >= 2) {
Ivan Kalvachev's avatar
Ivan Kalvachev committed
273
        blocks_per_mb = 4 + (1 << s->chroma_format);
Ivan Kalvachev's avatar
Ivan Kalvachev committed
274
    }
275

276
    // calculate cbp
277
    cbp = 0;
278
    for (i = 0; i < blocks_per_mb; i++) {
279
        cbp += cbp;
280
        if (s->block_last_index[i] >= 0)
281 282
            cbp++;
    }
283

284
    if (s->flags & CODEC_FLAG_GRAY) {
285
        if (s->mb_intra) {                                   // intra frames are always full chroma blocks
286
            for (i = 4; i < blocks_per_mb; i++) {
287
                memset(s->pblocks[i], 0, sizeof(*s->pblocks[i]));  // so we need to clear them
288
                if (!render->unsigned_intra)
289
                    *s->pblocks[i][0] = 1 << 10;
290
            }
291
        } else {
292
            cbp &= 0xf << (blocks_per_mb - 4);
293
            blocks_per_mb = 4;                               // luminance blocks only
294
        }
295
    }
Ivan Kalvachev's avatar
Ivan Kalvachev committed
296
    mv_block->coded_block_pattern = cbp;
297
    if (cbp == 0)
Ivan Kalvachev's avatar
Ivan Kalvachev committed
298 299
        mv_block->macroblock_type &= ~XVMC_MB_TYPE_PATTERN;

300 301
    for (i = 0; i < blocks_per_mb; i++) {
        if (s->block_last_index[i] >= 0) {
302
            // I do not have unsigned_intra MOCO to test, hope it is OK.
303
            if (s->mb_intra && (render->idct || !render->unsigned_intra))
304
                *s->pblocks[i][0] -= 1 << 10;
305
            if (!render->idct) {
306
                s->dsp.idct(*s->pblocks[i]);
307 308 309 310
                /* It is unclear if MC hardware requires pixel diff values to be
                 * in the range [-255;255]. TODO: Clipping if such hardware is
                 * ever found. As of now it would only be an unnecessary
                 * slowdown. */
Ivan Kalvachev's avatar
Ivan Kalvachev committed
311
            }
312
            // copy blocks only if the codec doesn't support pblocks reordering
313
            if (s->avctx->xvmc_acceleration == 1) {
Ivan Kalvachev's avatar
Ivan Kalvachev committed
314
                memcpy(&render->data_blocks[render->next_free_data_block_num*64],
315
                       s->pblocks[i], sizeof(*s->pblocks[i]));
316 317
            }
            render->next_free_data_block_num++;
Ivan Kalvachev's avatar
Ivan Kalvachev committed
318 319 320 321
        }
    }
    render->filled_mv_blocks_num++;

322 323 324 325 326
    assert(render->filled_mv_blocks_num     <= render->allocated_mv_blocks);
    assert(render->next_free_data_block_num <= render->allocated_data_blocks);
    /* The above conditions should not be able to fail as long as this function
     * is used and the following 'if ()' automatically calls a callback to free
     * blocks. */
Ivan Kalvachev's avatar
Ivan Kalvachev committed
327 328


329
    if (render->filled_mv_blocks_num == render->allocated_mv_blocks)
330
        ff_mpeg_draw_horiz_band(s, 0, 0);
Ivan Kalvachev's avatar
Ivan Kalvachev committed
331
}