Commit 92cbd775 authored by Michael Niedermayer's avatar Michael Niedermayer

Merge commit 'd48c2063'

* commit 'd48c2063':
  qtrleenc: use the AVFrame API properly.
  ulti: use the AVFrame API properly.
  vc1: use the AVFrame API properly.
  flashsv: use the AVFrame API properly.

Conflicts:
	libavcodec/flashsv.c
	libavcodec/qtrleenc.c
	libavcodec/ulti.c
	libavcodec/vc1dec.c
Merged-by: 's avatarMichael Niedermayer <michaelni@gmx.at>
parents 9ad477d9 d48c2063
......@@ -50,7 +50,7 @@ typedef struct BlockInfo {
typedef struct FlashSVContext {
AVCodecContext *avctx;
AVFrame frame;
AVFrame *frame;
int image_width, image_height;
int block_width, block_height;
uint8_t *tmpblock;
......@@ -99,6 +99,19 @@ static int decode_hybrid(const uint8_t *sptr, uint8_t *dptr, int dx, int dy,
return sptr - orig_src;
}
static av_cold int flashsv_decode_end(AVCodecContext *avctx)
{
FlashSVContext *s = avctx->priv_data;
inflateEnd(&s->zstream);
/* release the frame if needed */
av_frame_free(&s->frame);
/* free the tmpblock */
av_freep(&s->tmpblock);
return 0;
}
static av_cold int flashsv_decode_init(AVCodecContext *avctx)
{
FlashSVContext *s = avctx->priv_data;
......@@ -114,7 +127,12 @@ static av_cold int flashsv_decode_init(AVCodecContext *avctx)
return 1;
}
avctx->pix_fmt = AV_PIX_FMT_BGR24;
avcodec_get_frame_defaults(&s->frame);
s->frame = av_frame_alloc();
if (!s->frame) {
flashsv_decode_end(avctx);
return AVERROR(ENOMEM);
}
return 0;
}
......@@ -206,18 +224,18 @@ static int flashsv_decode_block(AVCodecContext *avctx, AVPacket *avpkt,
/* Flash Screen Video stores the image upside down, so copy
* lines to destination in reverse order. */
for (k = 1; k <= s->diff_height; k++) {
memcpy(s->frame.data[0] + x_pos * 3 +
(s->image_height - y_pos - k) * s->frame.linesize[0],
memcpy(s->frame->data[0] + x_pos * 3 +
(s->image_height - y_pos - k) * s->frame->linesize[0],
line, width * 3);
/* advance source pointer to next line */
line += width * 3;
}
} else {
/* hybrid 15-bit/palette mode */
decode_hybrid(s->tmpblock, s->frame.data[0],
decode_hybrid(s->tmpblock, s->frame->data[0],
s->image_height - (y_pos + 1 + s->diff_height),
x_pos, s->diff_height, width,
s->frame.linesize[0], s->pal);
s->frame->linesize[0], s->pal);
}
skip_bits_long(gb, 8 * block_size); /* skip the consumed bits */
return 0;
......@@ -344,7 +362,7 @@ static int flashsv_decode_frame(AVCodecContext *avctx, void *data,
s->image_width, s->image_height, s->block_width, s->block_height,
h_blocks, v_blocks, h_part, v_part);
if ((ret = ff_reget_buffer(avctx, &s->frame)) < 0)
if ((ret = ff_reget_buffer(avctx, s->frame)) < 0)
return ret;
/* loop over all block columns */
......@@ -369,7 +387,7 @@ static int flashsv_decode_frame(AVCodecContext *avctx, void *data,
s->diff_height = cur_blk_height;
if (8 * size > get_bits_left(&gb)) {
av_frame_unref(&s->frame);
av_frame_unref(s->frame);
return AVERROR_INVALIDDATA;
}
......@@ -429,11 +447,11 @@ static int flashsv_decode_frame(AVCodecContext *avctx, void *data,
if (has_diff) {
int k;
int off = (s->image_height - y_pos - 1) * s->frame.linesize[0];
int off = (s->image_height - y_pos - 1) * s->frame->linesize[0];
for (k = 0; k < cur_blk_height; k++) {
int x = off - k * s->frame.linesize[0] + x_pos * 3;
memcpy(s->frame.data[0] + x, s->keyframe + x,
int x = off - k * s->frame->linesize[0] + x_pos * 3;
memcpy(s->frame->data[0] + x, s->keyframe + x,
cur_blk_width * 3);
}
}
......@@ -451,17 +469,17 @@ static int flashsv_decode_frame(AVCodecContext *avctx, void *data,
}
if (s->is_keyframe && s->ver == 2) {
if (!s->keyframe) {
s->keyframe = av_malloc(s->frame.linesize[0] * avctx->height);
s->keyframe = av_malloc(s->frame->linesize[0] * avctx->height);
if (!s->keyframe) {
av_log(avctx, AV_LOG_ERROR, "Cannot allocate image data\n");
return AVERROR(ENOMEM);
}
}
memcpy(s->keyframe, s->frame.data[0],
s->frame.linesize[0] * avctx->height);
memcpy(s->keyframe, s->frame->data[0],
s->frame->linesize[0] * avctx->height);
}
if ((ret = av_frame_ref(data, &s->frame)) < 0)
if ((ret = av_frame_ref(data, s->frame)) < 0)
return ret;
*got_frame = 1;
......@@ -474,19 +492,6 @@ static int flashsv_decode_frame(AVCodecContext *avctx, void *data,
return buf_size;
}
static av_cold int flashsv_decode_end(AVCodecContext *avctx)
{
FlashSVContext *s = avctx->priv_data;
inflateEnd(&s->zstream);
/* release the frame if needed */
av_frame_unref(&s->frame);
/* free the tmpblock */
av_freep(&s->tmpblock);
return 0;
}
#if CONFIG_FLASHSV_DECODER
AVCodec ff_flashsv_decoder = {
.name = "flashsv",
......
......@@ -36,7 +36,6 @@
typedef struct QtrleEncContext {
AVCodecContext *avctx;
AVFrame frame;
int pixel_size;
AVPicture previous_frame;
unsigned int max_buf_size;
......@@ -61,6 +60,19 @@ typedef struct QtrleEncContext {
uint8_t* skip_table;
} QtrleEncContext;
static av_cold int qtrle_encode_end(AVCodecContext *avctx)
{
QtrleEncContext *s = avctx->priv_data;
av_frame_free(&avctx->coded_frame);
avpicture_free(&s->previous_frame);
av_free(s->rlecode_table);
av_free(s->length_table);
av_free(s->skip_table);
return 0;
}
static av_cold int qtrle_encode_init(AVCodecContext *avctx)
{
QtrleEncContext *s = avctx->priv_data;
......@@ -108,7 +120,13 @@ static av_cold int qtrle_encode_init(AVCodecContext *avctx)
+ 15 /* header + footer */
+ s->avctx->height*2 /* skip code+rle end */
+ s->logical_width/MAX_RLE_BULK + 1 /* rle codes */;
avctx->coded_frame = &s->frame;
avctx->coded_frame = av_frame_alloc();
if (!avctx->coded_frame) {
qtrle_encode_end(avctx);
return AVERROR(ENOMEM);
}
return 0;
}
......@@ -197,7 +215,7 @@ static void qtrle_encode_line(QtrleEncContext *s, const AVFrame *p, int line, ui
}
}
if (!s->frame.key_frame && !memcmp(this_line, prev_line, s->pixel_size))
if (!s->avctx->coded_frame->key_frame && !memcmp(this_line, prev_line, s->pixel_size))
skipcount = FFMIN(skipcount + 1, MAX_RLE_SKIP);
else
skipcount = 0;
......@@ -308,7 +326,7 @@ static int encode_frame(QtrleEncContext *s, const AVFrame *p, uint8_t *buf)
int end_line = s->avctx->height;
uint8_t *orig_buf = buf;
if (!s->frame.key_frame) {
if (!s->avctx->coded_frame->key_frame) {
unsigned line_size = s->logical_width * s->pixel_size;
for (start_line = 0; start_line < s->avctx->height; start_line++)
if (memcmp(p->data[0] + start_line*p->linesize[0],
......@@ -346,11 +364,9 @@ static int qtrle_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
const AVFrame *pict, int *got_packet)
{
QtrleEncContext * const s = avctx->priv_data;
AVFrame * const p = &s->frame;
AVFrame * const p = avctx->coded_frame;
int ret;
*p = *pict;
if ((ret = ff_alloc_packet2(avctx, pkt, s->max_buf_size)) < 0)
return ret;
......@@ -367,7 +383,8 @@ static int qtrle_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
pkt->size = encode_frame(s, pict, pkt->data);
/* save the current frame */
av_picture_copy(&s->previous_frame, (AVPicture *)p, avctx->pix_fmt, avctx->width, avctx->height);
av_picture_copy(&s->previous_frame, (const AVPicture *)pict,
avctx->pix_fmt, avctx->width, avctx->height);
if (p->key_frame)
pkt->flags |= AV_PKT_FLAG_KEY;
......@@ -376,17 +393,6 @@ static int qtrle_encode_frame(AVCodecContext *avctx, AVPacket *pkt,
return 0;
}
static av_cold int qtrle_encode_end(AVCodecContext *avctx)
{
QtrleEncContext *s = avctx->priv_data;
avpicture_free(&s->previous_frame);
av_free(s->rlecode_table);
av_free(s->length_table);
av_free(s->skip_table);
return 0;
}
AVCodec ff_qtrle_encoder = {
.name = "qtrle",
.long_name = NULL_IF_CONFIG_SMALL("QuickTime Animation (RLE) video"),
......
......@@ -37,7 +37,7 @@
typedef struct UltimotionDecodeContext {
AVCodecContext *avctx;
int width, height, blocks;
AVFrame frame;
AVFrame *frame;
const uint8_t *ulti_codebook;
GetByteContext gb;
} UltimotionDecodeContext;
......@@ -51,19 +51,19 @@ static av_cold int ulti_decode_init(AVCodecContext *avctx)
s->height = avctx->height;
s->blocks = (s->width / 8) * (s->height / 8);
avctx->pix_fmt = AV_PIX_FMT_YUV410P;
avctx->coded_frame = &s->frame;
avctx->coded_frame = (AVFrame*) &s->frame;
s->ulti_codebook = ulti_codebook;
avcodec_get_frame_defaults(&s->frame);
s->frame = av_frame_alloc();
if (!s->frame)
return AVERROR(ENOMEM);
return 0;
}
static av_cold int ulti_decode_end(AVCodecContext *avctx){
UltimotionDecodeContext *s = avctx->priv_data;
AVFrame *pic = &s->frame;
av_frame_unref(pic);
av_frame_free(&s->frame);
return 0;
}
......@@ -227,7 +227,7 @@ static int ulti_decode_frame(AVCodecContext *avctx,
int skip;
int tmp;
if ((ret = ff_reget_buffer(avctx, &s->frame)) < 0)
if ((ret = ff_reget_buffer(avctx, s->frame)) < 0)
return ret;
bytestream2_init(&s->gb, buf, buf_size);
......@@ -368,7 +368,7 @@ static int ulti_decode_frame(AVCodecContext *avctx,
Luma[14] = (tmp >> 6) & 0x3F;
Luma[15] = tmp & 0x3F;
ulti_convert_yuv(&s->frame, tx, ty, Luma, chroma);
ulti_convert_yuv(s->frame, tx, ty, Luma, chroma);
} else {
if (bytestream2_get_bytes_left(&s->gb) < 4)
goto err;
......@@ -380,20 +380,20 @@ static int ulti_decode_frame(AVCodecContext *avctx,
Y[1] = tmp & 0x3F;
Y[2] = bytestream2_get_byteu(&s->gb) & 0x3F;
Y[3] = bytestream2_get_byteu(&s->gb) & 0x3F;
ulti_grad(&s->frame, tx, ty, Y, chroma, angle); //draw block
ulti_grad(s->frame, tx, ty, Y, chroma, angle); //draw block
} else { // some patterns
int f0, f1;
f0 = bytestream2_get_byteu(&s->gb);
f1 = tmp;
Y[0] = bytestream2_get_byteu(&s->gb) & 0x3F;
Y[1] = bytestream2_get_byteu(&s->gb) & 0x3F;
ulti_pattern(&s->frame, tx, ty, f1, f0, Y[0], Y[1], chroma);
ulti_pattern(s->frame, tx, ty, f1, f0, Y[0], Y[1], chroma);
}
}
break;
}
if(code != 3)
ulti_grad(&s->frame, tx, ty, Y, chroma, angle); // draw block
ulti_grad(s->frame, tx, ty, Y, chroma, angle); // draw block
}
blocks++;
x += 8;
......@@ -405,7 +405,7 @@ static int ulti_decode_frame(AVCodecContext *avctx,
}
*got_frame = 1;
if ((ret = av_frame_ref(data, &s->frame)) < 0)
if ((ret = av_frame_ref(data, s->frame)) < 0)
return ret;
return buf_size;
......
......@@ -378,7 +378,7 @@ typedef struct VC1Context{
//@{
int new_sprite;
int two_sprites;
AVFrame sprite_output_frame;
AVFrame *sprite_output_frame;
int output_width, output_height, sprite_width, sprite_height;
uint8_t* sr_rows[2][2]; ///< Sprite resizer line cache
//@}
......
......@@ -5379,8 +5379,8 @@ static void vc1_draw_sprites(VC1Context *v, SpriteData* sd)
int width = v->output_width>>!!plane;
for (row = 0; row < v->output_height>>!!plane; row++) {
uint8_t *dst = v->sprite_output_frame.data[plane] +
v->sprite_output_frame.linesize[plane] * row;
uint8_t *dst = v->sprite_output_frame->data[plane] +
v->sprite_output_frame->linesize[plane] * row;
for (sprite = 0; sprite <= v->two_sprites; sprite++) {
uint8_t *iplane = s->current_picture.f.data[plane];
......@@ -5471,8 +5471,8 @@ static int vc1_decode_sprites(VC1Context *v, GetBitContext* gb)
v->two_sprites = 0;
}
av_frame_unref(&v->sprite_output_frame);
if ((ret = ff_get_buffer(avctx, &v->sprite_output_frame, 0)) < 0)
av_frame_unref(v->sprite_output_frame);
if ((ret = ff_get_buffer(avctx, v->sprite_output_frame, 0)) < 0)
return ret;
vc1_draw_sprites(v, &sd);
......@@ -5699,6 +5699,10 @@ static av_cold int vc1_decode_init(AVCodecContext *avctx)
v->res_sprite = (avctx->codec_id == AV_CODEC_ID_VC1IMAGE);
}
v->sprite_output_frame = av_frame_alloc();
if (!v->sprite_output_frame)
return AVERROR(ENOMEM);
avctx->profile = v->profile;
if (v->profile == PROFILE_ADVANCED)
avctx->level = v->level;
......@@ -5745,7 +5749,7 @@ av_cold int ff_vc1_decode_end(AVCodecContext *avctx)
VC1Context *v = avctx->priv_data;
int i;
av_frame_unref(&v->sprite_output_frame);
av_frame_free(&v->sprite_output_frame);
for (i = 0; i < 4; i++)
av_freep(&v->sr_rows[i >> 1][i & 1]);
......@@ -6200,7 +6204,7 @@ image:
if (vc1_decode_sprites(v, &s->gb))
goto err;
#endif
if ((ret = av_frame_ref(pict, &v->sprite_output_frame)) < 0)
if ((ret = av_frame_ref(pict, v->sprite_output_frame)) < 0)
goto err;
*got_frame = 1;
} else {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment