Commit b4c915f4 authored by Mark Thompson's avatar Mark Thompson

lavc: Add coded bitstream read/write support for H.264

(cherry picked from commit acf06f45)
(cherry picked from commit 768eb918)
(cherry picked from commit e7f64191)
parent 6734eef6
......@@ -2151,6 +2151,7 @@ CONFIG_EXTRA="
bswapdsp
cabac
cbs
cbs_h264
dirac_parse
dvprofile
exif
......@@ -2404,6 +2405,7 @@ w32threads_deps="atomics_native"
threads_if_any="$THREADS_LIST"
# subsystems
cbs_h264_select="cbs golomb"
dct_select="rdft"
dirac_parse_select="golomb"
error_resilience_select="me_cmp"
......
......@@ -60,6 +60,7 @@ OBJS-$(CONFIG_BLOCKDSP) += blockdsp.o
OBJS-$(CONFIG_BSWAPDSP) += bswapdsp.o
OBJS-$(CONFIG_CABAC) += cabac.o
OBJS-$(CONFIG_CBS) += cbs.o
OBJS-$(CONFIG_CBS_H264) += cbs_h2645.o h2645_parse.o
OBJS-$(CONFIG_CRYSTALHD) += crystalhd.o
OBJS-$(CONFIG_DCT) += dct.o dct32_fixed.o dct32_float.o
OBJS-$(CONFIG_ERROR_RESILIENCE) += error_resilience.o
......
......@@ -28,6 +28,9 @@
static const CodedBitstreamType *cbs_type_table[] = {
#if CONFIG_CBS_H264
&ff_cbs_type_h264,
#endif
};
int ff_cbs_init(CodedBitstreamContext **ctx_ptr,
......
......@@ -42,6 +42,8 @@ struct CodedBitstreamType;
/**
* The codec-specific type of a bitstream unit.
*
* H.264 / AVC: nal_unit_type
*/
typedef uint32_t CodedBitstreamUnitType;
......
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_CBS_H264_H
#define AVCODEC_CBS_H264_H
#include <stddef.h>
#include <stdint.h>
#include "cbs_h2645.h"
#include "h264.h"
enum {
// This limit is arbitrary - it is sufficient for one message of each
// type plus some repeats, and will therefore easily cover all sane
// streams. However, it is possible to make technically-valid streams
// for which it will fail (for example, by including a large number of
// user-data-unregistered messages).
H264_MAX_SEI_PAYLOADS = 64,
};
typedef struct H264RawNALUnitHeader {
uint8_t forbidden_zero_bit;
uint8_t nal_ref_idc;
uint8_t nal_unit_type;
uint8_t svc_extension_flag;
uint8_t avc_3d_extension_flag;
} H264RawNALUnitHeader;
typedef struct H264RawScalingList {
int8_t delta_scale[64];
} H264RawScalingList;
typedef struct H264RawHRD {
uint8_t cpb_cnt_minus1;
uint8_t bit_rate_scale;
uint8_t cpb_size_scale;
uint32_t bit_rate_value_minus1[H264_MAX_CPB_CNT];
uint32_t cpb_size_value_minus1[H264_MAX_CPB_CNT];
uint8_t cbr_flag[H264_MAX_CPB_CNT];
uint8_t initial_cpb_removal_delay_length_minus1;
uint8_t cpb_removal_delay_length_minus1;
uint8_t dpb_output_delay_length_minus1;
uint8_t time_offset_length;
} H264RawHRD;
typedef struct H264RawVUI {
uint8_t aspect_ratio_info_present_flag;
uint8_t aspect_ratio_idc;
uint16_t sar_width;
uint16_t sar_height;
uint8_t overscan_info_present_flag;
uint8_t overscan_appropriate_flag;
uint8_t video_signal_type_present_flag;
uint8_t video_format;
uint8_t video_full_range_flag;
uint8_t colour_description_present_flag;
uint8_t colour_primaries;
uint8_t transfer_characteristics;
uint8_t matrix_coefficients;
uint8_t chroma_loc_info_present_flag;
uint8_t chroma_sample_loc_type_top_field;
uint8_t chroma_sample_loc_type_bottom_field;
uint8_t timing_info_present_flag;
uint32_t num_units_in_tick;
uint32_t time_scale;
uint8_t fixed_frame_rate_flag;
uint8_t nal_hrd_parameters_present_flag;
H264RawHRD nal_hrd_parameters;
uint8_t vcl_hrd_parameters_present_flag;
H264RawHRD vcl_hrd_parameters;
uint8_t low_delay_hrd_flag;
uint8_t pic_struct_present_flag;
uint8_t bitstream_restriction_flag;
uint8_t motion_vectors_over_pic_boundaries_flag;
uint8_t max_bytes_per_pic_denom;
uint8_t max_bits_per_mb_denom;
uint8_t log2_max_mv_length_horizontal;
uint8_t log2_max_mv_length_vertical;
uint8_t max_num_reorder_frames;
uint8_t max_dec_frame_buffering;
} H264RawVUI;
typedef struct H264RawSPS {
H264RawNALUnitHeader nal_unit_header;
uint8_t profile_idc;
uint8_t constraint_set0_flag;
uint8_t constraint_set1_flag;
uint8_t constraint_set2_flag;
uint8_t constraint_set3_flag;
uint8_t constraint_set4_flag;
uint8_t constraint_set5_flag;
uint8_t reserved_zero_2bits;
uint8_t level_idc;
uint8_t seq_parameter_set_id;
uint8_t chroma_format_idc;
uint8_t separate_colour_plane_flag;
uint8_t bit_depth_luma_minus8;
uint8_t bit_depth_chroma_minus8;
uint8_t qpprime_y_zero_transform_bypass_flag;
uint8_t seq_scaling_matrix_present_flag;
uint8_t seq_scaling_list_present_flag[12];
H264RawScalingList scaling_list_4x4[6];
H264RawScalingList scaling_list_8x8[6];
uint8_t log2_max_frame_num_minus4;
uint8_t pic_order_cnt_type;
uint8_t log2_max_pic_order_cnt_lsb_minus4;
uint8_t delta_pic_order_always_zero_flag;
int32_t offset_for_non_ref_pic;
int32_t offset_for_top_to_bottom_field;
uint8_t num_ref_frames_in_pic_order_cnt_cycle;
int32_t offset_for_ref_frame[256];
uint8_t max_num_ref_frames;
uint8_t gaps_in_frame_num_allowed_flag;
uint16_t pic_width_in_mbs_minus1;
uint16_t pic_height_in_map_units_minus1;
uint8_t frame_mbs_only_flag;
uint8_t mb_adaptive_frame_field_flag;
uint8_t direct_8x8_inference_flag;
uint8_t frame_cropping_flag;
uint16_t frame_crop_left_offset;
uint16_t frame_crop_right_offset;
uint16_t frame_crop_top_offset;
uint16_t frame_crop_bottom_offset;
uint8_t vui_parameters_present_flag;
H264RawVUI vui;
} H264RawSPS;
typedef struct H264RawSPSExtension {
H264RawNALUnitHeader nal_unit_header;
uint8_t seq_parameter_set_id;
uint8_t aux_format_idc;
uint8_t bit_depth_aux_minus8;
uint8_t alpha_incr_flag;
uint16_t alpha_opaque_value;
uint16_t alpha_transparent_value;
uint8_t additional_extension_flag;
} H264RawSPSExtension;
typedef struct H264RawPPS {
H264RawNALUnitHeader nal_unit_header;
uint8_t pic_parameter_set_id;
uint8_t seq_parameter_set_id;
uint8_t entropy_coding_mode_flag;
uint8_t bottom_field_pic_order_in_frame_present_flag;
uint8_t num_slice_groups_minus1;
uint8_t slice_group_map_type;
uint16_t run_length_minus1[H264_MAX_SLICE_GROUPS];
uint16_t top_left[H264_MAX_SLICE_GROUPS];
uint16_t bottom_right[H264_MAX_SLICE_GROUPS];
uint8_t slice_group_change_direction_flag;
uint16_t slice_group_change_rate_minus1;
uint16_t pic_size_in_map_units_minus1;
uint8_t slice_group_id[H264_MAX_MB_PIC_SIZE];
uint8_t num_ref_idx_l0_default_active_minus1;
uint8_t num_ref_idx_l1_default_active_minus1;
uint8_t weighted_pred_flag;
uint8_t weighted_bipred_idc;
int8_t pic_init_qp_minus26;
int8_t pic_init_qs_minus26;
int8_t chroma_qp_index_offset;
uint8_t deblocking_filter_control_present_flag;
uint8_t constrained_intra_pred_flag;
uint8_t more_rbsp_data;
uint8_t redundant_pic_cnt_present_flag;
uint8_t transform_8x8_mode_flag;
uint8_t pic_scaling_matrix_present_flag;
uint8_t pic_scaling_list_present_flag[12];
H264RawScalingList scaling_list_4x4[6];
H264RawScalingList scaling_list_8x8[6];
int8_t second_chroma_qp_index_offset;
} H264RawPPS;
typedef struct H264RawAUD {
H264RawNALUnitHeader nal_unit_header;
uint8_t primary_pic_type;
} H264RawAUD;
typedef struct H264RawSEIBufferingPeriod {
uint8_t seq_parameter_set_id;
struct {
uint32_t initial_cpb_removal_delay[H264_MAX_CPB_CNT];
uint32_t initial_cpb_removal_delay_offset[H264_MAX_CPB_CNT];
} nal, vcl;
} H264RawSEIBufferingPeriod;
typedef struct H264RawSEIPicTimestamp {
uint8_t ct_type;
uint8_t nuit_field_based_flag;
uint8_t counting_type;
uint8_t full_timestamp_flag;
uint8_t discontinuity_flag;
uint8_t cnt_dropped_flag;
uint8_t n_frames;
uint8_t seconds_flag;
uint8_t seconds_value;
uint8_t minutes_flag;
uint8_t minutes_value;
uint8_t hours_flag;
uint8_t hours_value;
uint32_t time_offset;
} H264RawSEIPicTimestamp;
typedef struct H264RawSEIPicTiming {
uint32_t cpb_removal_delay;
uint32_t dpb_output_delay;
uint8_t pic_struct;
uint8_t clock_timestamp_flag[3];
H264RawSEIPicTimestamp timestamp[3];
} H264RawSEIPicTiming;
typedef struct H264RawSEIUserDataRegistered {
uint8_t itu_t_t35_country_code;
uint8_t itu_t_t35_country_code_extension_byte;
uint8_t *data;
size_t data_length;
} H264RawSEIUserDataRegistered;
typedef struct H264RawSEIUserDataUnregistered {
uint8_t uuid_iso_iec_11578[16];
uint8_t *data;
size_t data_length;
} H264RawSEIUserDataUnregistered;
typedef struct H264RawSEIRecoveryPoint {
uint16_t recovery_frame_cnt;
uint8_t exact_match_flag;
uint8_t broken_link_flag;
uint8_t changing_slice_group_idc;
} H264RawSEIRecoveryPoint;
typedef struct H264RawSEIDisplayOrientation {
uint8_t display_orientation_cancel_flag;
uint8_t hor_flip;
uint8_t ver_flip;
uint16_t anticlockwise_rotation;
uint16_t display_orientation_repetition_period;
uint8_t display_orientation_extension_flag;
} H264RawSEIDisplayOrientation;
typedef struct H264RawSEIPayload {
uint32_t payload_type;
uint32_t payload_size;
union {
H264RawSEIBufferingPeriod buffering_period;
H264RawSEIPicTiming pic_timing;
// H264RawSEIFiller filler -> no fields.
H264RawSEIUserDataRegistered user_data_registered;
H264RawSEIUserDataUnregistered user_data_unregistered;
H264RawSEIRecoveryPoint recovery_point;
H264RawSEIDisplayOrientation display_orientation;
struct {
uint8_t *data;
size_t data_length;
} other;
} payload;
} H264RawSEIPayload;
typedef struct H264RawSEI {
H264RawNALUnitHeader nal_unit_header;
H264RawSEIPayload payload[H264_MAX_SEI_PAYLOADS];
uint8_t payload_count;
} H264RawSEI;
typedef struct H264RawSliceHeader {
H264RawNALUnitHeader nal_unit_header;
uint32_t first_mb_in_slice;
uint8_t slice_type;
uint8_t pic_parameter_set_id;
uint8_t colour_plane_id;
uint16_t frame_num;
uint8_t field_pic_flag;
uint8_t bottom_field_flag;
uint16_t idr_pic_id;
uint16_t pic_order_cnt_lsb;
int32_t delta_pic_order_cnt_bottom;
int32_t delta_pic_order_cnt[2];
uint8_t redundant_pic_cnt;
uint8_t direct_spatial_mv_pred_flag;
uint8_t num_ref_idx_active_override_flag;
uint8_t num_ref_idx_l0_active_minus1;
uint8_t num_ref_idx_l1_active_minus1;
uint8_t ref_pic_list_modification_flag_l0;
uint8_t ref_pic_list_modification_flag_l1;
struct {
uint8_t modification_of_pic_nums_idc;
int32_t abs_diff_pic_num_minus1;
uint8_t long_term_pic_num;
} rplm_l0[H264_MAX_RPLM_COUNT], rplm_l1[H264_MAX_RPLM_COUNT];
uint8_t luma_log2_weight_denom;
uint8_t chroma_log2_weight_denom;
uint8_t luma_weight_l0_flag[H264_MAX_REFS];
int8_t luma_weight_l0[H264_MAX_REFS];
int8_t luma_offset_l0[H264_MAX_REFS];
uint8_t chroma_weight_l0_flag[H264_MAX_REFS];
int8_t chroma_weight_l0[H264_MAX_REFS][2];
int8_t chroma_offset_l0[H264_MAX_REFS][2];
uint8_t luma_weight_l1_flag[H264_MAX_REFS];
int8_t luma_weight_l1[H264_MAX_REFS];
int8_t luma_offset_l1[H264_MAX_REFS];
uint8_t chroma_weight_l1_flag[H264_MAX_REFS];
int8_t chroma_weight_l1[H264_MAX_REFS][2];
int8_t chroma_offset_l1[H264_MAX_REFS][2];
uint8_t no_output_of_prior_pics_flag;
uint8_t long_term_reference_flag;
uint8_t adaptive_ref_pic_marking_mode_flag;
struct {
uint8_t memory_management_control_operation;
int32_t difference_of_pic_nums_minus1;
uint8_t long_term_pic_num;
uint8_t long_term_frame_idx;
uint8_t max_long_term_frame_idx_plus1;
} mmco[H264_MAX_MMCO_COUNT];
uint8_t cabac_init_idc;
int8_t slice_qp_delta;
uint8_t sp_for_switch_flag;
int8_t slice_qs_delta;
uint8_t disable_deblocking_filter_idc;
int8_t slice_alpha_c0_offset_div2;
int8_t slice_beta_offset_div2;
uint16_t slice_group_change_cycle;
} H264RawSliceHeader;
typedef struct H264RawSlice {
H264RawSliceHeader header;
uint8_t *data;
size_t data_size;
int data_bit_start;
} H264RawSlice;
typedef struct CodedBitstreamH264Context {
// Reader/writer context in common with the H.265 implementation.
CodedBitstreamH2645Context common;
// All currently available parameter sets. These are updated when
// any parameter set NAL unit is read/written with this context.
H264RawSPS *sps[H264_MAX_SPS_COUNT];
H264RawPPS *pps[H264_MAX_PPS_COUNT];
// The currently active parameter sets. These are updated when any
// NAL unit refers to the relevant parameter set. These pointers
// must also be present in the arrays above.
const H264RawSPS *active_sps;
const H264RawPPS *active_pps;
// The NAL unit type of the most recent normal slice. This is required
// to be able to read/write auxiliary slices, because IdrPicFlag is
// otherwise unknown.
uint8_t last_slice_nal_unit_type;
} CodedBitstreamH264Context;
#endif /* AVCODEC_CBS_H264_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include "libavutil/attributes.h"
#include "libavutil/avassert.h"
#include "bytestream.h"
#include "cbs.h"
#include "cbs_internal.h"
#include "cbs_h264.h"
#include "golomb.h"
#include "h264.h"
#include "h264_sei.h"
#include "h2645_parse.h"
static int cbs_read_ue_golomb(CodedBitstreamContext *ctx, GetBitContext *gbc,
const char *name, uint32_t *write_to,
uint32_t range_min, uint32_t range_max)
{
uint32_t value;
int position, i, j;
unsigned int k;
char bits[65];
position = get_bits_count(gbc);
for (i = 0; i < 32; i++) {
if (get_bits_left(gbc) < i + 1) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid ue-golomb code at "
"%s: bitstream ended.\n", name);
return AVERROR_INVALIDDATA;
}
k = get_bits1(gbc);
bits[i] = k ? '1' : '0';
if (k)
break;
}
if (i >= 32) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid ue-golomb code at "
"%s: more than 31 zeroes.\n", name);
return AVERROR_INVALIDDATA;
}
value = 1;
for (j = 0; j < i; j++) {
k = get_bits1(gbc);
bits[i + j + 1] = k ? '1' : '0';
value = value << 1 | k;
}
bits[i + j + 1] = 0;
--value;
if (ctx->trace_enable)
ff_cbs_trace_syntax_element(ctx, position, name, bits, value);
if (value < range_min || value > range_max) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: "
"%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n",
name, value, range_min, range_max);
return AVERROR_INVALIDDATA;
}
*write_to = value;
return 0;
}
static int cbs_read_se_golomb(CodedBitstreamContext *ctx, GetBitContext *gbc,
const char *name, int32_t *write_to,
int32_t range_min, int32_t range_max)
{
int32_t value;
int position, i, j;
unsigned int k;
uint32_t v;
char bits[65];
position = get_bits_count(gbc);
for (i = 0; i < 32; i++) {
if (get_bits_left(gbc) < i + 1) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid se-golomb code at "
"%s: bitstream ended.\n", name);
return AVERROR_INVALIDDATA;
}
k = get_bits1(gbc);
bits[i] = k ? '1' : '0';
if (k)
break;
}
if (i >= 32) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid se-golomb code at "
"%s: more than 31 zeroes.\n", name);
return AVERROR_INVALIDDATA;
}
v = 1;
for (j = 0; j < i; j++) {
k = get_bits1(gbc);
bits[i + j + 1] = k ? '1' : '0';
v = v << 1 | k;
}
bits[i + j + 1] = 0;
if (v & 1)
value = -(int32_t)(v / 2);
else
value = v / 2;
if (ctx->trace_enable)
ff_cbs_trace_syntax_element(ctx, position, name, bits, value);
if (value < range_min || value > range_max) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: "
"%"PRId32", but must be in [%"PRId32",%"PRId32"].\n",
name, value, range_min, range_max);
return AVERROR_INVALIDDATA;
}
*write_to = value;
return 0;
}
static int cbs_write_ue_golomb(CodedBitstreamContext *ctx, PutBitContext *pbc,
const char *name, uint32_t value,
uint32_t range_min, uint32_t range_max)
{
int len;
if (value < range_min || value > range_max) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: "
"%"PRIu32", but must be in [%"PRIu32",%"PRIu32"].\n",
name, value, range_min, range_max);
return AVERROR_INVALIDDATA;
}
av_assert0(value != UINT32_MAX);
len = av_log2(value + 1);
if (put_bits_left(pbc) < 2 * len + 1)
return AVERROR(ENOSPC);
if (ctx->trace_enable) {
char bits[65];
int i;
for (i = 0; i < len; i++)
bits[i] = '0';
bits[len] = '1';
for (i = 0; i < len; i++)
bits[len + i + 1] = (value + 1) >> (len - i - 1) & 1 ? '1' : '0';
bits[len + len + 1] = 0;
ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), name, bits, value);
}
put_bits(pbc, len, 0);
if (len + 1 < 32)
put_bits(pbc, len + 1, value + 1);
else
put_bits32(pbc, value + 1);
return 0;
}
static int cbs_write_se_golomb(CodedBitstreamContext *ctx, PutBitContext *pbc,
const char *name, int32_t value,
int32_t range_min, int32_t range_max)
{
int len;
uint32_t uvalue;
if (value < range_min || value > range_max) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "%s out of range: "
"%"PRId32", but must be in [%"PRId32",%"PRId32"].\n",
name, value, range_min, range_max);
return AVERROR_INVALIDDATA;
}
av_assert0(value != INT32_MIN);
if (value == 0)
uvalue = 0;
else if (value > 0)
uvalue = 2 * (uint32_t)value - 1;
else
uvalue = 2 * (uint32_t)-value;
len = av_log2(uvalue + 1);
if (put_bits_left(pbc) < 2 * len + 1)
return AVERROR(ENOSPC);
if (ctx->trace_enable) {
char bits[65];
int i;
for (i = 0; i < len; i++)
bits[i] = '0';
bits[len] = '1';
for (i = 0; i < len; i++)
bits[len + i + 1] = (uvalue + 1) >> (len - i - 1) & 1 ? '1' : '0';
bits[len + len + 1] = 0;
ff_cbs_trace_syntax_element(ctx, put_bits_count(pbc), name, bits, value);
}
put_bits(pbc, len, 0);
if (len + 1 < 32)
put_bits(pbc, len + 1, uvalue + 1);
else
put_bits32(pbc, uvalue + 1);
return 0;
}
#define HEADER(name) do { \
ff_cbs_trace_header(ctx, name); \
} while (0)
#define CHECK(call) do { \
err = (call); \
if (err < 0) \
return err; \
} while (0)
#define FUNC_NAME(rw, codec, name) cbs_ ## codec ## _ ## rw ## _ ## name
#define FUNC_H264(rw, name) FUNC_NAME(rw, h264, name)
#define READ
#define READWRITE read
#define RWContext GetBitContext
#define xu(width, name, var, range_min, range_max) do { \
uint32_t value = range_min; \
CHECK(ff_cbs_read_unsigned(ctx, rw, width, #name, \
&value, range_min, range_max)); \
var = value; \
} while (0)
#define xue(name, var, range_min, range_max) do { \
uint32_t value = range_min; \
CHECK(cbs_read_ue_golomb(ctx, rw, #name, \
&value, range_min, range_max)); \
var = value; \
} while (0)
#define xse(name, var, range_min, range_max) do { \
int32_t value = range_min; \
CHECK(cbs_read_se_golomb(ctx, rw, #name, \
&value, range_min, range_max)); \
var = value; \
} while (0)
#define u(width, name, range_min, range_max) \
xu(width, name, current->name, range_min, range_max)
#define flag(name) u(1, name, 0, 1)
#define ue(name, range_min, range_max) \
xue(name, current->name, range_min, range_max)
#define se(name, range_min, range_max) \
xse(name, current->name, range_min, range_max)
#define infer(name, value) do { \
current->name = value; \
} while (0)
static int cbs_h2645_read_more_rbsp_data(GetBitContext *gbc)
{
int bits_left = get_bits_left(gbc);
if (bits_left > 8)
return 1;
if (show_bits(gbc, bits_left) == 1 << (bits_left - 1))
return 0;
return 1;
}
#define more_rbsp_data(var) ((var) = cbs_h2645_read_more_rbsp_data(rw))
#define byte_alignment(rw) (get_bits_count(rw) % 8)
#define allocate(name, size) do { \
name = av_mallocz(size); \
if (!name) \
return AVERROR(ENOMEM); \
} while (0)
#define FUNC(name) FUNC_H264(READWRITE, name)
#include "cbs_h264_syntax_template.c"
#undef FUNC
#undef READ
#undef READWRITE
#undef RWContext
#undef xu
#undef xue
#undef xse
#undef u
#undef flag
#undef ue
#undef se
#undef infer
#undef more_rbsp_data
#undef byte_alignment
#undef allocate
#define WRITE
#define READWRITE write
#define RWContext PutBitContext
#define xu(width, name, var, range_min, range_max) do { \
uint32_t value = var; \
CHECK(ff_cbs_write_unsigned(ctx, rw, width, #name, \
value, range_min, range_max)); \
} while (0)
#define xue(name, var, range_min, range_max) do { \
uint32_t value = var; \
CHECK(cbs_write_ue_golomb(ctx, rw, #name, \
value, range_min, range_max)); \
} while (0)
#define xse(name, var, range_min, range_max) do { \
int32_t value = var; \
CHECK(cbs_write_se_golomb(ctx, rw, #name, \
value, range_min, range_max)); \
} while (0)
#define u(width, name, range_min, range_max) \
xu(width, name, current->name, range_min, range_max)
#define flag(name) u(1, name, 0, 1)
#define ue(name, range_min, range_max) \
xue(name, current->name, range_min, range_max)
#define se(name, range_min, range_max) \
xse(name, current->name, range_min, range_max)
#define infer(name, value) do { \
if (current->name != (value)) { \
av_log(ctx->log_ctx, AV_LOG_WARNING, "Warning: " \
"%s does not match inferred value: " \
"%"PRId64", but should be %"PRId64".\n", \
#name, (int64_t)current->name, (int64_t)(value)); \
} \
} while (0)
#define more_rbsp_data(var) (var)
#define byte_alignment(rw) (put_bits_count(rw) % 8)
#define allocate(name, size) do { \
if (!name) { \
av_log(ctx->log_ctx, AV_LOG_ERROR, "%s must be set " \
"for writing.\n", #name); \
return AVERROR_INVALIDDATA; \
} \
} while (0)
#define FUNC(name) FUNC_H264(READWRITE, name)
#include "cbs_h264_syntax_template.c"
#undef FUNC
#undef WRITE
#undef READWRITE
#undef RWContext
#undef xu
#undef xue
#undef xse
#undef u
#undef flag
#undef ue
#undef se
#undef infer
#undef more_rbsp_data
#undef byte_alignment
#undef allocate
static void cbs_h264_free_sei(H264RawSEI *sei)
{
int i;
for (i = 0; i < sei->payload_count; i++) {
H264RawSEIPayload *payload = &sei->payload[i];
switch (payload->payload_type) {
case H264_SEI_TYPE_BUFFERING_PERIOD:
case H264_SEI_TYPE_PIC_TIMING:
case H264_SEI_TYPE_RECOVERY_POINT:
case H264_SEI_TYPE_DISPLAY_ORIENTATION:
break;
case H264_SEI_TYPE_USER_DATA_REGISTERED:
av_freep(&payload->payload.user_data_registered.data);
break;
case H264_SEI_TYPE_USER_DATA_UNREGISTERED:
av_freep(&payload->payload.user_data_unregistered.data);
break;
default:
av_freep(&payload->payload.other.data);
break;
}
}
}
static void cbs_h264_free_slice(H264RawSlice *slice)
{
av_freep(&slice->data);
}
static void cbs_h264_free_nal_unit(CodedBitstreamUnit *unit)
{
switch (unit->type) {
case H264_NAL_SEI:
cbs_h264_free_sei(unit->content);
break;
case H264_NAL_IDR_SLICE:
case H264_NAL_SLICE:
cbs_h264_free_slice(unit->content);
break;
}
av_freep(&unit->content);
}
static int cbs_h2645_fragment_add_nals(CodedBitstreamContext *ctx,
CodedBitstreamFragment *frag,
const H2645Packet *packet)
{
int err, i;
for (i = 0; i < packet->nb_nals; i++) {
const H2645NAL *nal = &packet->nals[i];
size_t size = nal->size;
uint8_t *data;
// Remove trailing zeroes.
while (size > 0 && nal->data[size - 1] == 0)
--size;
av_assert0(size > 0);
data = av_malloc(size + AV_INPUT_BUFFER_PADDING_SIZE);
if (!data)
return AVERROR(ENOMEM);
memcpy(data, nal->data, size);
memset(data + size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
err = ff_cbs_insert_unit_data(ctx, frag, -1, nal->type,
data, size);
if (err < 0) {
av_freep(&data);
return err;
}
}
return 0;
}
static int cbs_h2645_split_fragment(CodedBitstreamContext *ctx,
CodedBitstreamFragment *frag,
int header)
{
enum AVCodecID codec_id = ctx->codec->codec_id;
CodedBitstreamH2645Context *priv = ctx->priv_data;
GetByteContext gbc;
int err;
av_assert0(frag->data && frag->nb_units == 0);
if (frag->data_size == 0)
return 0;
if (header && frag->data[0] && codec_id == AV_CODEC_ID_H264) {
// AVCC header.
size_t size, start, end;
int i, count, version;
priv->mp4 = 1;
bytestream2_init(&gbc, frag->data, frag->data_size);
if (bytestream2_get_bytes_left(&gbc) < 6)
return AVERROR_INVALIDDATA;
version = bytestream2_get_byte(&gbc);
if (version != 1) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid AVCC header: "
"first byte %u.", version);
return AVERROR_INVALIDDATA;
}
bytestream2_skip(&gbc, 3);
priv->nal_length_size = (bytestream2_get_byte(&gbc) & 3) + 1;
// SPS array.
count = bytestream2_get_byte(&gbc) & 0x1f;
start = bytestream2_tell(&gbc);
for (i = 0; i < count; i++) {
if (bytestream2_get_bytes_left(&gbc) < 2 * (count - i))
return AVERROR_INVALIDDATA;
size = bytestream2_get_be16(&gbc);
if (bytestream2_get_bytes_left(&gbc) < size)
return AVERROR_INVALIDDATA;
bytestream2_skip(&gbc, size);
}
end = bytestream2_tell(&gbc);
err = ff_h2645_packet_split(&priv->read_packet,
frag->data + start, end - start,
ctx->log_ctx, 1, 2, AV_CODEC_ID_H264, 1);
if (err < 0) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split AVCC SPS array.\n");
return err;
}
err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet);
if (err < 0)
return err;
// PPS array.
count = bytestream2_get_byte(&gbc);
start = bytestream2_tell(&gbc);
for (i = 0; i < count; i++) {
if (bytestream2_get_bytes_left(&gbc) < 2 * (count - i))
return AVERROR_INVALIDDATA;
size = bytestream2_get_be16(&gbc);
if (bytestream2_get_bytes_left(&gbc) < size)
return AVERROR_INVALIDDATA;
bytestream2_skip(&gbc, size);
}
end = bytestream2_tell(&gbc);
err = ff_h2645_packet_split(&priv->read_packet,
frag->data + start, end - start,
ctx->log_ctx, 1, 2, AV_CODEC_ID_H264, 1);
if (err < 0) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Failed to split AVCC PPS array.\n");
return err;
}
err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet);
if (err < 0)
return err;
if (bytestream2_get_bytes_left(&gbc) > 0) {
av_log(ctx->log_ctx, AV_LOG_WARNING, "%u bytes left at end of AVCC "
"header.\n", bytestream2_get_bytes_left(&gbc));
}
} else {
// Annex B, or later MP4 with already-known parameters.
err = ff_h2645_packet_split(&priv->read_packet,
frag->data, frag->data_size,
ctx->log_ctx,
priv->mp4, priv->nal_length_size,
codec_id, 1);
if (err < 0)
return err;
err = cbs_h2645_fragment_add_nals(ctx, frag, &priv->read_packet);
if (err < 0)
return err;
}
return 0;
}
#define cbs_h2645_replace_ps(h26n, ps_name, ps_var, id_element) \
static int cbs_h26 ## h26n ## _replace_ ## ps_var(CodedBitstreamContext *ctx, \
const H26 ## h26n ## Raw ## ps_name *ps_var) \
{ \
CodedBitstreamH26 ## h26n ## Context *priv = ctx->priv_data; \
unsigned int id = ps_var->id_element; \
if (id > FF_ARRAY_ELEMS(priv->ps_var)) { \
av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid " #ps_name \
" id : %d.\n", id); \
return AVERROR_INVALIDDATA; \
} \
av_freep(&priv->ps_var[id]); \
priv->ps_var[id] = av_malloc(sizeof(*ps_var)); \
if (!priv->ps_var[id]) \
return AVERROR(ENOMEM); \
memcpy(priv->ps_var[id], ps_var, sizeof(*ps_var)); \
return 0; \
}
cbs_h2645_replace_ps(4, SPS, sps, seq_parameter_set_id)
cbs_h2645_replace_ps(4, PPS, pps, pic_parameter_set_id)
static int cbs_h264_read_nal_unit(CodedBitstreamContext *ctx,
CodedBitstreamUnit *unit)
{
GetBitContext gbc;
int err;
err = init_get_bits(&gbc, unit->data, 8 * unit->data_size);
if (err < 0)
return err;
switch (unit->type) {
case H264_NAL_SPS:
{
H264RawSPS *sps;
sps = av_mallocz(sizeof(*sps));
if (!sps)
return AVERROR(ENOMEM);
err = cbs_h264_read_sps(ctx, &gbc, sps);
if (err >= 0)
err = cbs_h264_replace_sps(ctx, sps);
if (err < 0) {
av_free(sps);
return err;
}
unit->content = sps;
}
break;
case H264_NAL_SPS_EXT:
{
H264RawSPSExtension *sps_ext;
sps_ext = av_mallocz(sizeof(*sps_ext));
if (!sps_ext)
return AVERROR(ENOMEM);
err = cbs_h264_read_sps_extension(ctx, &gbc, sps_ext);
if (err < 0) {
av_free(sps_ext);
return err;
}
unit->content = sps_ext;
}
break;
case H264_NAL_PPS:
{
H264RawPPS *pps;
pps = av_mallocz(sizeof(*pps));
if (!pps)
return AVERROR(ENOMEM);
err = cbs_h264_read_pps(ctx, &gbc, pps);
if (err >= 0)
err = cbs_h264_replace_pps(ctx, pps);
if (err < 0) {
av_free(pps);
return err;
}
unit->content = pps;
}
break;
case H264_NAL_SLICE:
case H264_NAL_IDR_SLICE:
case H264_NAL_AUXILIARY_SLICE:
{
H264RawSlice *slice;
int pos, len;
slice = av_mallocz(sizeof(*slice));
if (!slice)
return AVERROR(ENOMEM);
err = cbs_h264_read_slice_header(ctx, &gbc, &slice->header);
if (err < 0) {
av_free(slice);
return err;
}
pos = get_bits_count(&gbc);
len = unit->data_size;
if (!unit->data[len - 1]) {
int z;
for (z = 0; z < len && !unit->data[len - z - 1]; z++);
av_log(ctx->log_ctx, AV_LOG_DEBUG, "Deleted %d trailing zeroes "
"from slice data.\n", z);
len -= z;
}
slice->data_size = len - pos / 8;
slice->data = av_malloc(slice->data_size +
AV_INPUT_BUFFER_PADDING_SIZE);
if (!slice->data) {
av_free(slice);
return AVERROR(ENOMEM);
}
memcpy(slice->data,
unit->data + pos / 8, slice->data_size);
memset(slice->data + slice->data_size, 0,
AV_INPUT_BUFFER_PADDING_SIZE);
slice->data_bit_start = pos % 8;
unit->content = slice;
}
break;
case H264_NAL_AUD:
{
H264RawAUD *aud;
aud = av_mallocz(sizeof(*aud));
if (!aud)
return AVERROR(ENOMEM);
err = cbs_h264_read_aud(ctx, &gbc, aud);
if (err < 0) {
av_free(aud);
return err;
}
unit->content = aud;
}
break;
case H264_NAL_SEI:
{
H264RawSEI *sei;
sei = av_mallocz(sizeof(*sei));
if (!sei)
return AVERROR(ENOMEM);
err = cbs_h264_read_sei(ctx, &gbc, sei);
if (err < 0) {
cbs_h264_free_sei(sei);
return err;
}
unit->content = sei;
}
break;
default:
return AVERROR(ENOSYS);
}
return 0;
}
static int cbs_h264_write_nal_unit(CodedBitstreamContext *ctx,
CodedBitstreamUnit *unit,
PutBitContext *pbc)
{
int err;
switch (unit->type) {
case H264_NAL_SPS:
{
H264RawSPS *sps = unit->content;
err = cbs_h264_write_sps(ctx, pbc, sps);
if (err < 0)
return err;
err = cbs_h264_replace_sps(ctx, sps);
if (err < 0)
return err;
}
break;
case H264_NAL_SPS_EXT:
{
H264RawSPSExtension *sps_ext;
err = cbs_h264_write_sps_extension(ctx, pbc, sps_ext);
if (err < 0)
return err;
}
break;
case H264_NAL_PPS:
{
H264RawPPS *pps = unit->content;
err = cbs_h264_write_pps(ctx, pbc, pps);
if (err < 0)
return err;
err = cbs_h264_replace_pps(ctx, pps);
if (err < 0)
return err;
}
break;
case H264_NAL_SLICE:
case H264_NAL_IDR_SLICE:
{
H264RawSlice *slice = unit->content;
GetBitContext gbc;
int bits_left, end, zeroes;
err = cbs_h264_write_slice_header(ctx, pbc, &slice->header);
if (err < 0)
return err;
if (slice->data) {
if (slice->data_size * 8 + 8 > put_bits_left(pbc))
return AVERROR(ENOSPC);
init_get_bits(&gbc, slice->data, slice->data_size * 8);
skip_bits_long(&gbc, slice->data_bit_start);
// Copy in two-byte blocks, but stop before copying the
// rbsp_stop_one_bit in the final byte.
while (get_bits_left(&gbc) > 23)
put_bits(pbc, 16, get_bits(&gbc, 16));
bits_left = get_bits_left(&gbc);
end = get_bits(&gbc, bits_left);
// rbsp_stop_one_bit must be present here.
av_assert0(end);
zeroes = ff_ctz(end);
if (bits_left > zeroes + 1)
put_bits(pbc, bits_left - zeroes - 1,
end >> (zeroes + 1));
put_bits(pbc, 1, 1);
while (put_bits_count(pbc) % 8 != 0)
put_bits(pbc, 1, 0);
} else {
// No slice data - that was just the header.
// (Bitstream may be unaligned!)
}
}
break;
case H264_NAL_AUD:
{
err = cbs_h264_write_aud(ctx, pbc, unit->content);
if (err < 0)
return err;
}
break;
case H264_NAL_SEI:
{
err = cbs_h264_write_sei(ctx, pbc, unit->content);
if (err < 0)
return err;
}
break;
default:
av_log(ctx->log_ctx, AV_LOG_ERROR, "Write unimplemented for "
"NAL unit type %"PRIu32".\n", unit->type);
return AVERROR_PATCHWELCOME;
}
return 0;
}
static int cbs_h2645_write_nal_unit(CodedBitstreamContext *ctx,
CodedBitstreamUnit *unit)
{
CodedBitstreamH2645Context *priv = ctx->priv_data;
enum AVCodecID codec_id = ctx->codec->codec_id;
PutBitContext pbc;
int err;
if (!priv->write_buffer) {
// Initial write buffer size is 1MB.
priv->write_buffer_size = 1024 * 1024;
reallocate_and_try_again:
err = av_reallocp(&priv->write_buffer, priv->write_buffer_size);
if (err < 0) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Unable to allocate a "
"sufficiently large write buffer (last attempt "
"%zu bytes).\n", priv->write_buffer_size);
return err;
}
}
init_put_bits(&pbc, priv->write_buffer, priv->write_buffer_size);
err = cbs_h264_write_nal_unit(ctx, unit, &pbc);
if (err == AVERROR(ENOSPC)) {
// Overflow.
priv->write_buffer_size *= 2;
goto reallocate_and_try_again;
}
// Overflow but we didn't notice.
av_assert0(put_bits_count(&pbc) <= 8 * priv->write_buffer_size);
if (err < 0) {
// Write failed for some other reason.
return err;
}
if (put_bits_count(&pbc) % 8)
unit->data_bit_padding = 8 - put_bits_count(&pbc) % 8;
else
unit->data_bit_padding = 0;
unit->data_size = (put_bits_count(&pbc) + 7) / 8;
flush_put_bits(&pbc);
err = av_reallocp(&unit->data, unit->data_size);
if (err < 0)
return err;
memcpy(unit->data, priv->write_buffer, unit->data_size);
return 0;
}
static int cbs_h2645_assemble_fragment(CodedBitstreamContext *ctx,
CodedBitstreamFragment *frag)
{
uint8_t *data;
size_t max_size, dp, sp;
int err, i, zero_run;
for (i = 0; i < frag->nb_units; i++) {
// Data should already all have been written when we get here.
av_assert0(frag->units[i].data);
}
max_size = 0;
for (i = 0; i < frag->nb_units; i++) {
// Start code + content with worst-case emulation prevention.
max_size += 3 + frag->units[i].data_size * 3 / 2;
}
data = av_malloc(max_size);
if (!data)
return AVERROR(ENOMEM);
dp = 0;
for (i = 0; i < frag->nb_units; i++) {
CodedBitstreamUnit *unit = &frag->units[i];
if (unit->data_bit_padding > 0) {
if (i < frag->nb_units - 1)
av_log(ctx->log_ctx, AV_LOG_WARNING, "Probably invalid "
"unaligned padding on non-final NAL unit.\n");
else
frag->data_bit_padding = unit->data_bit_padding;
}
if (unit->type == H264_NAL_SPS ||
unit->type == H264_NAL_PPS ||
i == 0 /* (Assume this is the start of an access unit.) */) {
// zero_byte
data[dp++] = 0;
}
// start_code_prefix_one_3bytes
data[dp++] = 0;
data[dp++] = 0;
data[dp++] = 1;
zero_run = 0;
for (sp = 0; sp < unit->data_size; sp++) {
if (zero_run < 2) {
if (unit->data[sp] == 0)
++zero_run;
else
zero_run = 0;
} else {
if ((unit->data[sp] & ~3) == 0) {
// emulation_prevention_three_byte
data[dp++] = 3;
}
zero_run = unit->data[sp] == 0;
}
data[dp++] = unit->data[sp];
}
}
av_assert0(dp <= max_size);
err = av_reallocp(&data, dp);
if (err)
return err;
frag->data = data;
frag->data_size = dp;
return 0;
}
static void cbs_h264_close(CodedBitstreamContext *ctx)
{
CodedBitstreamH264Context *h264 = ctx->priv_data;
int i;
ff_h2645_packet_uninit(&h264->common.read_packet);
av_freep(&h264->common.write_buffer);
for (i = 0; i < FF_ARRAY_ELEMS(h264->sps); i++)
av_freep(&h264->sps[i]);
for (i = 0; i < FF_ARRAY_ELEMS(h264->pps); i++)
av_freep(&h264->pps[i]);
}
const CodedBitstreamType ff_cbs_type_h264 = {
.codec_id = AV_CODEC_ID_H264,
.priv_data_size = sizeof(CodedBitstreamH264Context),
.split_fragment = &cbs_h2645_split_fragment,
.read_unit = &cbs_h264_read_nal_unit,
.write_unit = &cbs_h2645_write_nal_unit,
.assemble_fragment = &cbs_h2645_assemble_fragment,
.free_unit = &cbs_h264_free_nal_unit,
.close = &cbs_h264_close,
};
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#ifndef AVCODEC_CBS_H2645_H
#define AVCODEC_CBS_H2645_H
#include <stddef.h>
#include <stdint.h>
#include "h2645_parse.h"
typedef struct CodedBitstreamH2645Context {
// If set, the stream being read is in MP4 (AVCC/HVCC) format. If not
// set, the stream is assumed to be in annex B format.
int mp4;
// Size in bytes of the NAL length field for MP4 format.
int nal_length_size;
// Packet reader.
H2645Packet read_packet;
// Write buffer
uint8_t *write_buffer;
size_t write_buffer_size;
} CodedBitstreamH2645Context;
#endif /* AVCODEC_CBS_H2645_H */
/*
* This file is part of FFmpeg.
*
* FFmpeg is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* FFmpeg is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with FFmpeg; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
static int FUNC(rbsp_trailing_bits)(CodedBitstreamContext *ctx, RWContext *rw)
{
int err;
av_unused int one = 1, zero = 0;
xu(1, rbsp_stop_one_bit, one, 1, 1);
while (byte_alignment(rw) != 0)
xu(1, rbsp_alignment_zero_bit, zero, 0, 0);
return 0;
}
static int FUNC(nal_unit_header)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawNALUnitHeader *current,
uint32_t valid_type_mask)
{
int err;
u(1, forbidden_zero_bit, 0, 0);
u(2, nal_ref_idc, 0, 3);
u(5, nal_unit_type, 0, 31);
if (!(1 << current->nal_unit_type & valid_type_mask)) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid NAL unit type %d.\n",
current->nal_unit_type);
return AVERROR_INVALIDDATA;
}
if (current->nal_unit_type == 14 ||
current->nal_unit_type == 20 ||
current->nal_unit_type == 21) {
if (current->nal_unit_type != 21)
flag(svc_extension_flag);
else
flag(avc_3d_extension_flag);
if (current->svc_extension_flag) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "SVC not supported.\n");
return AVERROR_PATCHWELCOME;
} else if (current->avc_3d_extension_flag) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "3DAVC not supported.\n");
return AVERROR_PATCHWELCOME;
} else {
av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC not supported.\n");
return AVERROR_PATCHWELCOME;
}
}
return 0;
}
static int FUNC(scaling_list)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawScalingList *current,
int size_of_scaling_list)
{
int err, i, scale;
scale = 8;
for (i = 0; i < size_of_scaling_list; i++) {
xse(delta_scale, current->delta_scale[i], -128, +127);
scale = (scale + current->delta_scale[i] + 256) % 256;
if (scale == 0)
break;
}
return 0;
}
static int FUNC(hrd_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawHRD *current)
{
int err, i;
ue(cpb_cnt_minus1, 0, 31);
u(4, bit_rate_scale, 0, 15);
u(4, cpb_size_scale, 0, 15);
for (i = 0; i <= current->cpb_cnt_minus1; i++) {
ue(bit_rate_value_minus1[i], 0, UINT32_MAX - 1);
ue(cpb_size_value_minus1[i], 0, UINT32_MAX - 1);
flag(cbr_flag[i]);
}
u(5, initial_cpb_removal_delay_length_minus1, 0, 31);
u(5, cpb_removal_delay_length_minus1, 0, 31);
u(5, dpb_output_delay_length_minus1, 0, 31);
u(5, time_offset_length, 0, 31);
return 0;
}
static int FUNC(vui_parameters)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawVUI *current, H264RawSPS *sps)
{
int err;
flag(aspect_ratio_info_present_flag);
if (current->aspect_ratio_info_present_flag) {
u(8, aspect_ratio_idc, 0, 255);
if (current->aspect_ratio_idc == 255) {
u(16, sar_width, 0, 65535);
u(16, sar_height, 0, 65535);
}
} else {
infer(aspect_ratio_idc, 0);
}
flag(overscan_info_present_flag);
if (current->overscan_info_present_flag)
flag(overscan_appropriate_flag);
flag(video_signal_type_present_flag);
if (current->video_signal_type_present_flag) {
u(3, video_format, 0, 7);
flag(video_full_range_flag);
flag(colour_description_present_flag);
if (current->colour_description_present_flag) {
u(8, colour_primaries, 0, 255);
u(8, transfer_characteristics, 0, 255);
u(8, matrix_coefficients, 0, 255);
}
} else {
infer(video_format, 5);
infer(video_full_range_flag, 0);
infer(colour_primaries, 2);
infer(transfer_characteristics, 2);
infer(matrix_coefficients, 2);
}
flag(chroma_loc_info_present_flag);
if (current->chroma_loc_info_present_flag) {
ue(chroma_sample_loc_type_top_field, 0, 5);
ue(chroma_sample_loc_type_bottom_field, 0, 5);
} else {
infer(chroma_sample_loc_type_top_field, 0);
infer(chroma_sample_loc_type_bottom_field, 0);
}
flag(timing_info_present_flag);
if (current->timing_info_present_flag) {
u(32, num_units_in_tick, 1, UINT32_MAX);
u(32, time_scale, 1, UINT32_MAX);
flag(fixed_frame_rate_flag);
} else {
infer(fixed_frame_rate_flag, 0);
}
flag(nal_hrd_parameters_present_flag);
if (current->nal_hrd_parameters_present_flag)
CHECK(FUNC(hrd_parameters)(ctx, rw, &current->nal_hrd_parameters));
flag(vcl_hrd_parameters_present_flag);
if (current->vcl_hrd_parameters_present_flag)
CHECK(FUNC(hrd_parameters)(ctx, rw, &current->vcl_hrd_parameters));
if (current->nal_hrd_parameters_present_flag ||
current->vcl_hrd_parameters_present_flag)
flag(low_delay_hrd_flag);
else
infer(low_delay_hrd_flag, 1 - current->fixed_frame_rate_flag);
flag(pic_struct_present_flag);
flag(bitstream_restriction_flag);
if (current->bitstream_restriction_flag) {
flag(motion_vectors_over_pic_boundaries_flag);
ue(max_bytes_per_pic_denom, 0, 16);
ue(max_bits_per_mb_denom, 0, 16);
ue(log2_max_mv_length_horizontal, 0, 16);
ue(log2_max_mv_length_vertical, 0, 16);
ue(max_num_reorder_frames, 0, H264_MAX_DPB_FRAMES);
ue(max_dec_frame_buffering, 0, H264_MAX_DPB_FRAMES);
} else {
infer(motion_vectors_over_pic_boundaries_flag, 1);
infer(max_bytes_per_pic_denom, 2);
infer(max_bits_per_mb_denom, 1);
infer(log2_max_mv_length_horizontal, 16);
infer(log2_max_mv_length_vertical, 16);
if ((sps->profile_idc == 44 || sps->profile_idc == 86 ||
sps->profile_idc == 110 || sps->profile_idc == 110 ||
sps->profile_idc == 122 || sps->profile_idc == 244) &&
sps->constraint_set3_flag) {
infer(max_num_reorder_frames, 0);
infer(max_dec_frame_buffering, 0);
} else {
infer(max_num_reorder_frames, H264_MAX_DPB_FRAMES);
infer(max_dec_frame_buffering, H264_MAX_DPB_FRAMES);
}
}
return 0;
}
static int FUNC(sps)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSPS *current)
{
int err, i;
HEADER("Sequence Parameter Set");
CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
1 << H264_NAL_SPS));
u(8, profile_idc, 0, 255);
flag(constraint_set0_flag);
flag(constraint_set1_flag);
flag(constraint_set2_flag);
flag(constraint_set3_flag);
flag(constraint_set4_flag);
flag(constraint_set5_flag);
u(2, reserved_zero_2bits, 0, 0);
u(8, level_idc, 0, 255);
ue(seq_parameter_set_id, 0, 31);
if (current->profile_idc == 100 || current->profile_idc == 110 ||
current->profile_idc == 122 || current->profile_idc == 244 ||
current->profile_idc == 44 || current->profile_idc == 83 ||
current->profile_idc == 86 || current->profile_idc == 118 ||
current->profile_idc == 128 || current->profile_idc == 138) {
ue(chroma_format_idc, 0, 3);
if (current->chroma_format_idc == 3)
flag(separate_colour_plane_flag);
else
infer(separate_colour_plane_flag, 0);
ue(bit_depth_luma_minus8, 0, 6);
ue(bit_depth_chroma_minus8, 0, 6);
flag(qpprime_y_zero_transform_bypass_flag);
flag(seq_scaling_matrix_present_flag);
if (current->seq_scaling_matrix_present_flag) {
for (i = 0; i < ((current->chroma_format_idc != 3) ? 8 : 12); i++) {
flag(seq_scaling_list_present_flag[i]);
if (current->seq_scaling_list_present_flag[i]) {
if (i < 6)
CHECK(FUNC(scaling_list)(ctx, rw,
&current->scaling_list_4x4[i],
16));
else
CHECK(FUNC(scaling_list)(ctx, rw,
&current->scaling_list_8x8[i - 6],
64));
}
}
}
} else {
infer(chroma_format_idc, current->profile_idc == 183 ? 0 : 1);
infer(separate_colour_plane_flag, 0);
infer(bit_depth_luma_minus8, 0);
infer(bit_depth_chroma_minus8, 0);
}
ue(log2_max_frame_num_minus4, 0, 12);
ue(pic_order_cnt_type, 0, 2);
if (current->pic_order_cnt_type == 0) {
ue(log2_max_pic_order_cnt_lsb_minus4, 0, 12);
} else if (current->pic_order_cnt_type == 1) {
flag(delta_pic_order_always_zero_flag);
se(offset_for_non_ref_pic, INT32_MIN + 1, INT32_MAX);
se(offset_for_top_to_bottom_field, INT32_MIN + 1, INT32_MAX);
ue(num_ref_frames_in_pic_order_cnt_cycle, 0, 255);
for (i = 0; i < current->num_ref_frames_in_pic_order_cnt_cycle; i++)
se(offset_for_ref_frame[i], INT32_MIN + 1, INT32_MAX);
}
ue(max_num_ref_frames, 0, H264_MAX_DPB_FRAMES);
flag(gaps_in_frame_num_allowed_flag);
ue(pic_width_in_mbs_minus1, 0, H264_MAX_MB_WIDTH);
ue(pic_height_in_map_units_minus1, 0, H264_MAX_MB_HEIGHT);
flag(frame_mbs_only_flag);
if (!current->frame_mbs_only_flag)
flag(mb_adaptive_frame_field_flag);
flag(direct_8x8_inference_flag);
flag(frame_cropping_flag);
if (current->frame_cropping_flag) {
ue(frame_crop_left_offset, 0, H264_MAX_WIDTH);
ue(frame_crop_right_offset, 0, H264_MAX_WIDTH);
ue(frame_crop_top_offset, 0, H264_MAX_HEIGHT);
ue(frame_crop_bottom_offset, 0, H264_MAX_HEIGHT);
}
flag(vui_parameters_present_flag);
if (current->vui_parameters_present_flag)
CHECK(FUNC(vui_parameters)(ctx, rw, &current->vui, current));
CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
return 0;
}
static int FUNC(sps_extension)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSPSExtension *current)
{
int err;
HEADER("Sequence Parameter Set Extension");
CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
1 << H264_NAL_SPS_EXT));
ue(seq_parameter_set_id, 0, 31);
ue(aux_format_idc, 0, 3);
if (current->aux_format_idc != 0) {
int bits;
ue(bit_depth_aux_minus8, 0, 4);
flag(alpha_incr_flag);
bits = current->bit_depth_aux_minus8 + 9;
u(bits, alpha_opaque_value, 0, (1 << bits) - 1);
u(bits, alpha_transparent_value, 0, (1 << bits) - 1);
}
flag(additional_extension_flag);
CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
return 0;
}
static int FUNC(pps)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawPPS *current)
{
CodedBitstreamH264Context *h264 = ctx->priv_data;
const H264RawSPS *sps;
int err, i;
HEADER("Picture Parameter Set");
CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
1 << H264_NAL_PPS));
ue(pic_parameter_set_id, 0, 255);
ue(seq_parameter_set_id, 0, 31);
sps = h264->sps[current->seq_parameter_set_id];
if (!sps) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
current->seq_parameter_set_id);
return AVERROR_INVALIDDATA;
}
flag(entropy_coding_mode_flag);
flag(bottom_field_pic_order_in_frame_present_flag);
ue(num_slice_groups_minus1, 0, 7);
if (current->num_slice_groups_minus1 > 0) {
unsigned int pic_size;
int iGroup;
pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
(sps->pic_height_in_map_units_minus1 + 1);
ue(slice_group_map_type, 0, 6);
if (current->slice_group_map_type == 0) {
for (iGroup = 0; iGroup <= current->num_slice_groups_minus1; iGroup++)
ue(run_length_minus1[iGroup], 0, pic_size - 1);
} else if (current->slice_group_map_type == 2) {
for (iGroup = 0; iGroup < current->num_slice_groups_minus1; iGroup++) {
ue(top_left[iGroup], 0, pic_size - 1);
ue(bottom_right[iGroup], current->top_left[iGroup], pic_size - 1);
}
} else if (current->slice_group_map_type == 3 ||
current->slice_group_map_type == 4 ||
current->slice_group_map_type == 5) {
flag(slice_group_change_direction_flag);
ue(slice_group_change_rate_minus1, 0, pic_size - 1);
} else if (current->slice_group_map_type == 6) {
ue(pic_size_in_map_units_minus1, pic_size - 1, pic_size - 1);
for (i = 0; i <= current->pic_size_in_map_units_minus1; i++)
u(av_log2(2 * current->num_slice_groups_minus1 + 1),
slice_group_id[i], 0, current->num_slice_groups_minus1);
}
}
ue(num_ref_idx_l0_default_active_minus1, 0, 31);
ue(num_ref_idx_l1_default_active_minus1, 0, 31);
flag(weighted_pred_flag);
u(2, weighted_bipred_idc, 0, 2);
se(pic_init_qp_minus26, -26 - 6 * sps->bit_depth_luma_minus8, +25);
se(pic_init_qs_minus26, -26, +25);
se(chroma_qp_index_offset, -12, +12);
flag(deblocking_filter_control_present_flag);
flag(constrained_intra_pred_flag);
flag(redundant_pic_cnt_present_flag);
if (more_rbsp_data(current->more_rbsp_data))
{
flag(transform_8x8_mode_flag);
flag(pic_scaling_matrix_present_flag);
if (current->pic_scaling_matrix_present_flag) {
for (i = 0; i < 6 + (((sps->chroma_format_idc != 3) ? 2 : 6) *
current->transform_8x8_mode_flag); i++) {
flag(pic_scaling_list_present_flag[i]);
if (current->pic_scaling_list_present_flag[i]) {
if (i < 6)
CHECK(FUNC(scaling_list)(ctx, rw,
&current->scaling_list_4x4[i],
16));
else
CHECK(FUNC(scaling_list)(ctx, rw,
&current->scaling_list_8x8[i - 6],
64));
}
}
}
se(second_chroma_qp_index_offset, -12, +12);
} else {
infer(transform_8x8_mode_flag, 0);
infer(pic_scaling_matrix_present_flag, 0);
infer(second_chroma_qp_index_offset, current->chroma_qp_index_offset);
}
CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
return 0;
}
static int FUNC(sei_buffering_period)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSEIBufferingPeriod *current)
{
CodedBitstreamH264Context *h264 = ctx->priv_data;
const H264RawSPS *sps;
int err, i, length;
ue(seq_parameter_set_id, 0, 31);
sps = h264->sps[current->seq_parameter_set_id];
if (!sps) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
current->seq_parameter_set_id);
return AVERROR_INVALIDDATA;
}
h264->active_sps = sps;
if (sps->vui.nal_hrd_parameters_present_flag) {
for (i = 0; i <= sps->vui.nal_hrd_parameters.cpb_cnt_minus1; i++) {
length = sps->vui.nal_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
xu(length, initial_cpb_removal_delay[SchedSelIdx],
current->nal.initial_cpb_removal_delay[i],
0, (1 << (uint64_t)length) - 1);
xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
current->nal.initial_cpb_removal_delay_offset[i],
0, (1 << (uint64_t)length) - 1);
}
}
if (sps->vui.vcl_hrd_parameters_present_flag) {
for (i = 0; i <= sps->vui.vcl_hrd_parameters.cpb_cnt_minus1; i++) {
length = sps->vui.vcl_hrd_parameters.initial_cpb_removal_delay_length_minus1 + 1;
xu(length, initial_cpb_removal_delay[SchedSelIdx],
current->vcl.initial_cpb_removal_delay[i],
0, (1 << (uint64_t)length) - 1);
xu(length, initial_cpb_removal_delay_offset[SchedSelIdx],
current->vcl.initial_cpb_removal_delay_offset[i],
0, (1 << (uint64_t)length) - 1);
}
}
return 0;
}
static int FUNC(sei_pic_timestamp)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSEIPicTimestamp *current)
{
CodedBitstreamH264Context *h264 = ctx->priv_data;
const H264RawSPS *sps;
uint8_t time_offset_length;
int err;
u(2, ct_type, 0, 2);
flag(nuit_field_based_flag);
u(5, counting_type, 0, 6);
flag(full_timestamp_flag);
flag(discontinuity_flag);
flag(cnt_dropped_flag);
u(8, n_frames, 0, 255);
if (current->full_timestamp_flag) {
u(6, seconds_value, 0, 59);
u(6, minutes_value, 0, 59);
u(5, hours_value, 0, 23);
} else {
flag(seconds_flag);
if (current->seconds_flag) {
u(6, seconds_value, 0, 59);
flag(minutes_flag);
if (current->minutes_flag) {
u(6, minutes_value, 0, 59);
flag(hours_flag);
if (current->hours_flag)
u(5, hours_value, 0, 23);
}
}
}
sps = h264->active_sps;
if (sps->vui.nal_hrd_parameters_present_flag)
time_offset_length = sps->vui.nal_hrd_parameters.time_offset_length;
else if (sps->vui.vcl_hrd_parameters_present_flag)
time_offset_length = sps->vui.vcl_hrd_parameters.time_offset_length;
else
time_offset_length = 24;
if (time_offset_length > 0)
u(time_offset_length, time_offset,
0, (1 << (uint64_t)time_offset_length) - 1);
else
infer(time_offset, 0);
return 0;
}
static int FUNC(sei_pic_timing)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSEIPicTiming *current)
{
CodedBitstreamH264Context *h264 = ctx->priv_data;
const H264RawSPS *sps;
int err;
sps = h264->active_sps;
if (!sps) {
av_log(ctx->log_ctx, AV_LOG_ERROR,
"No active SPS for pic_timing.\n");
return AVERROR_INVALIDDATA;
}
if (sps->vui.nal_hrd_parameters_present_flag ||
sps->vui.vcl_hrd_parameters_present_flag) {
const H264RawHRD *hrd;
if (sps->vui.nal_hrd_parameters_present_flag)
hrd = &sps->vui.nal_hrd_parameters;
else if (sps->vui.vcl_hrd_parameters_present_flag)
hrd = &sps->vui.vcl_hrd_parameters;
else {
av_log(ctx->log_ctx, AV_LOG_ERROR,
"No HRD parameters for pic_timing.\n");
return AVERROR_INVALIDDATA;
}
u(hrd->cpb_removal_delay_length_minus1 + 1, cpb_removal_delay,
0, (1 << (uint64_t)hrd->cpb_removal_delay_length_minus1) + 1);
u(hrd->dpb_output_delay_length_minus1 + 1, dpb_output_delay,
0, (1 << (uint64_t)hrd->dpb_output_delay_length_minus1) + 1);
}
if (sps->vui.pic_struct_present_flag) {
static const int num_clock_ts[9] = {
1, 1, 1, 2, 2, 3, 3, 2, 3
};
int i;
u(4, pic_struct, 0, 8);
if (current->pic_struct > 8)
return AVERROR_INVALIDDATA;
for (i = 0; i < num_clock_ts[current->pic_struct]; i++) {
flag(clock_timestamp_flag[i]);
if (current->clock_timestamp_flag[i])
CHECK(FUNC(sei_pic_timestamp)(ctx, rw, &current->timestamp[i]));
}
}
return 0;
}
static int FUNC(sei_user_data_registered)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSEIUserDataRegistered *current,
uint32_t *payload_size)
{
int err, i, j;
u(8, itu_t_t35_country_code, 0x00, 0xff);
if (current->itu_t_t35_country_code != 0xff)
i = 1;
else {
u(8, itu_t_t35_country_code_extension_byte, 0x00, 0xff);
i = 2;
}
#ifdef READ
if (*payload_size < i) {
av_log(ctx->log_ctx, AV_LOG_ERROR,
"Invalid SEI user data registered payload.\n");
return AVERROR_INVALIDDATA;
}
current->data_length = *payload_size - i;
#else
*payload_size = i + current->data_length;
#endif
allocate(current->data, current->data_length);
for (j = 0; j < current->data_length; j++)
xu(8, itu_t_t35_payload_byte, current->data[j], 0x00, 0xff);
return 0;
}
static int FUNC(sei_user_data_unregistered)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSEIUserDataUnregistered *current,
uint32_t *payload_size)
{
int err, i;
#ifdef READ
if (*payload_size < 16) {
av_log(ctx->log_ctx, AV_LOG_ERROR,
"Invalid SEI user data unregistered payload.\n");
return AVERROR_INVALIDDATA;
}
current->data_length = *payload_size - 16;
#else
*payload_size = 16 + current->data_length;
#endif
for (i = 0; i < 16; i++) {
xu(8, uuid_iso_iec_11578,
current->uuid_iso_iec_11578[i], 0x00, 0xff);
}
allocate(current->data, current->data_length);
for (i = 0; i < current->data_length; i++)
xu(8, user_data_payload_byte, current->data[i], 0x00, 0xff);
return 0;
}
static int FUNC(sei_recovery_point)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSEIRecoveryPoint *current)
{
int err;
ue(recovery_frame_cnt, 0, 65535);
flag(exact_match_flag);
flag(broken_link_flag);
u(2, changing_slice_group_idc, 0, 2);
return 0;
}
static int FUNC(sei_display_orientation)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSEIDisplayOrientation *current)
{
int err;
flag(display_orientation_cancel_flag);
if (!current->display_orientation_cancel_flag) {
flag(hor_flip);
flag(ver_flip);
u(16, anticlockwise_rotation, 0, 65535);
ue(display_orientation_repetition_period, 0, 16384);
flag(display_orientation_extension_flag);
}
return 0;
}
static int FUNC(sei_payload)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSEIPayload *current)
{
int err, i;
int start_position, end_position;
#ifdef READ
start_position = get_bits_count(rw);
#else
start_position = put_bits_count(rw);
#endif
switch (current->payload_type) {
case H264_SEI_TYPE_BUFFERING_PERIOD:
CHECK(FUNC(sei_buffering_period)
(ctx, rw, &current->payload.buffering_period));
break;
case H264_SEI_TYPE_PIC_TIMING:
CHECK(FUNC(sei_pic_timing)
(ctx, rw, &current->payload.pic_timing));
break;
case H264_SEI_TYPE_FILLER_PAYLOAD:
{
av_unused int ff_byte = 0xff;
for (i = 0; i < current->payload_size; i++)
xu(8, ff_byte, ff_byte, 0xff, 0xff);
}
break;
case H264_SEI_TYPE_USER_DATA_REGISTERED:
CHECK(FUNC(sei_user_data_registered)
(ctx, rw, &current->payload.user_data_registered, &current->payload_size));
break;
case H264_SEI_TYPE_USER_DATA_UNREGISTERED:
CHECK(FUNC(sei_user_data_unregistered)
(ctx, rw, &current->payload.user_data_unregistered, &current->payload_size));
break;
case H264_SEI_TYPE_RECOVERY_POINT:
CHECK(FUNC(sei_recovery_point)
(ctx, rw, &current->payload.recovery_point));
break;
case H264_SEI_TYPE_DISPLAY_ORIENTATION:
CHECK(FUNC(sei_display_orientation)
(ctx, rw, &current->payload.display_orientation));
break;
default:
{
allocate(current->payload.other.data, current->payload_size);
for (i = 0; i < current->payload_size; i++)
xu(8, payload_byte, current->payload.other.data[i], 0, 255);
}
}
if (byte_alignment(rw)) {
av_unused int one = 1, zero = 0;
xu(1, bit_equal_to_one, one, 1, 1);
while (byte_alignment(rw))
xu(1, bit_equal_to_zero, zero, 0, 0);
}
#ifdef READ
end_position = get_bits_count(rw);
if (end_position < start_position + 8 * current->payload_size) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Incorrect SEI payload length: "
"header %d bits, actually %d bits.\n",
8 * current->payload_size,
end_position - start_position);
return AVERROR_INVALIDDATA;
}
#else
end_position = put_bits_count(rw);
current->payload_size = (end_position - start_position) / 8;
#endif
return 0;
}
static int FUNC(sei)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSEI *current)
{
int err, k;
HEADER("Supplemental Enhancement Information");
CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
1 << H264_NAL_SEI));
#ifdef READ
for (k = 0; k < H264_MAX_SEI_PAYLOADS; k++) {
uint32_t payload_type = 0;
uint32_t payload_size = 0;
uint32_t tmp;
while (show_bits(rw, 8) == 0xff) {
xu(8, ff_byte, tmp, 0xff, 0xff);
payload_type += 255;
}
xu(8, last_payload_type_byte, tmp, 0, 254);
payload_type += tmp;
while (show_bits(rw, 8) == 0xff) {
xu(8, ff_byte, tmp, 0xff, 0xff);
payload_size += 255;
}
xu(8, last_payload_size_byte, tmp, 0, 254);
payload_size += tmp;
current->payload[k].payload_type = payload_type;
current->payload[k].payload_size = payload_size;
CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
if (!cbs_h2645_read_more_rbsp_data(rw))
break;
}
if (k >= H264_MAX_SEI_PAYLOADS) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many payloads in "
"SEI message: found %d.\n", k);
return AVERROR_INVALIDDATA;
}
current->payload_count = k + 1;
#else
for (k = 0; k < current->payload_count; k++) {
PutBitContext start_state;
uint32_t tmp;
int need_size, i;
// Somewhat clumsy: we write the payload twice when
// we don't know the size in advance. This will mess
// with trace output, but is otherwise harmless.
start_state = *rw;
need_size = !current->payload[k].payload_size;
for (i = 0; i < 1 + need_size; i++) {
*rw = start_state;
tmp = current->payload[k].payload_type;
while (tmp >= 255) {
xu(8, ff_byte, 0xff, 0xff, 0xff);
tmp -= 255;
}
xu(8, last_payload_type_byte, tmp, 0, 254);
tmp = current->payload[k].payload_size;
while (tmp >= 255) {
xu(8, ff_byte, 0xff, 0xff, 0xff);
tmp -= 255;
}
xu(8, last_payload_size_byte, tmp, 0, 254);
CHECK(FUNC(sei_payload)(ctx, rw, &current->payload[k]));
}
}
#endif
CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
return 0;
}
static int FUNC(aud)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawAUD *current)
{
int err;
HEADER("Access Unit Delimiter");
CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
1 << H264_NAL_AUD));
u(3, primary_pic_type, 0, 7);
CHECK(FUNC(rbsp_trailing_bits)(ctx, rw));
return 0;
}
static int FUNC(ref_pic_list_modification)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSliceHeader *current)
{
CodedBitstreamH264Context *h264 = ctx->priv_data;
const H264RawSPS *sps = h264->active_sps;
int err, i, mopn;
if (current->slice_type % 5 != 2 &&
current->slice_type % 5 != 4) {
flag(ref_pic_list_modification_flag_l0);
if (current->ref_pic_list_modification_flag_l0) {
for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
xue(modification_of_pic_nums_idc,
current->rplm_l0[i].modification_of_pic_nums_idc, 0, 3);
mopn = current->rplm_l0[i].modification_of_pic_nums_idc;
if (mopn == 3)
break;
if (mopn == 0 || mopn == 1)
xue(abs_diff_pic_num_minus1,
current->rplm_l0[i].abs_diff_pic_num_minus1,
0, (1 + current->field_pic_flag) *
(1 << (sps->log2_max_frame_num_minus4 + 4)));
else if (mopn == 2)
xue(long_term_pic_num,
current->rplm_l0[i].long_term_pic_num,
0, sps->max_num_ref_frames - 1);
}
}
}
if (current->slice_type % 5 == 1) {
flag(ref_pic_list_modification_flag_l1);
if (current->ref_pic_list_modification_flag_l1) {
for (i = 0; i < H264_MAX_RPLM_COUNT; i++) {
xue(modification_of_pic_nums_idc,
current->rplm_l1[i].modification_of_pic_nums_idc, 0, 3);
mopn = current->rplm_l1[i].modification_of_pic_nums_idc;
if (mopn == 3)
break;
if (mopn == 0 || mopn == 1)
xue(abs_diff_pic_num_minus1,
current->rplm_l1[i].abs_diff_pic_num_minus1,
0, (1 + current->field_pic_flag) *
(1 << (sps->log2_max_frame_num_minus4 + 4)));
else if (mopn == 2)
xue(long_term_pic_num,
current->rplm_l1[i].long_term_pic_num,
0, sps->max_num_ref_frames - 1);
}
}
}
return 0;
}
static int FUNC(pred_weight_table)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSliceHeader *current)
{
CodedBitstreamH264Context *h264 = ctx->priv_data;
const H264RawSPS *sps = h264->active_sps;
int chroma;
int err, i, j;
ue(luma_log2_weight_denom, 0, 7);
chroma = !sps->separate_colour_plane_flag && sps->chroma_format_idc != 0;
if (chroma)
ue(chroma_log2_weight_denom, 0, 7);
for (i = 0; i <= current->num_ref_idx_l0_active_minus1; i++) {
flag(luma_weight_l0_flag[i]);
if (current->luma_weight_l0_flag[i]) {
se(luma_weight_l0[i], -128, +127);
se(luma_offset_l0[i], -128, +127);
}
if (chroma) {
flag(chroma_weight_l0_flag[i]);
if (current->chroma_weight_l0_flag[i]) {
for (j = 0; j < 2; j++) {
se(chroma_weight_l0[i][j], -128, +127);
se(chroma_offset_l0[i][j], -128, +127);
}
}
}
}
if (current->slice_type % 5 == 1) {
for (i = 0; i <= current->num_ref_idx_l1_active_minus1; i++) {
flag(luma_weight_l1_flag[i]);
if (current->luma_weight_l1_flag[i]) {
se(luma_weight_l1[i], -128, +127);
se(luma_offset_l1[i], -128, +127);
}
if (chroma) {
flag(chroma_weight_l1_flag[i]);
if (current->chroma_weight_l1_flag[i]) {
for (j = 0; j < 2; j++) {
se(chroma_weight_l1[i][j], -128, +127);
se(chroma_offset_l1[i][j], -128, +127);
}
}
}
}
}
return 0;
}
static int FUNC(dec_ref_pic_marking)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSliceHeader *current, int idr_pic_flag)
{
CodedBitstreamH264Context *h264 = ctx->priv_data;
const H264RawSPS *sps = h264->active_sps;
int err, i;
uint32_t mmco;
if (idr_pic_flag) {
flag(no_output_of_prior_pics_flag);
flag(long_term_reference_flag);
} else {
flag(adaptive_ref_pic_marking_mode_flag);
if (current->adaptive_ref_pic_marking_mode_flag) {
for (i = 0; i < H264_MAX_MMCO_COUNT; i++) {
xue(memory_management_control_operation,
current->mmco[i].memory_management_control_operation,
0, 6);
mmco = current->mmco[i].memory_management_control_operation;
if (mmco == 0)
break;
if (mmco == 1 || mmco == 3)
xue(difference_of_pic_nums_minus1,
current->mmco[i].difference_of_pic_nums_minus1,
0, INT32_MAX);
if (mmco == 2)
xue(long_term_pic_num,
current->mmco[i].long_term_pic_num,
0, sps->max_num_ref_frames - 1);
if (mmco == 3 || mmco == 6)
xue(long_term_frame_idx,
current->mmco[i].long_term_frame_idx,
0, sps->max_num_ref_frames - 1);
if (mmco == 4)
xue(max_long_term_frame_idx_plus1,
current->mmco[i].max_long_term_frame_idx_plus1,
0, sps->max_num_ref_frames);
}
if (i == H264_MAX_MMCO_COUNT) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Too many "
"memory management control operations.\n");
return AVERROR_INVALIDDATA;
}
}
}
return 0;
}
static int FUNC(slice_header)(CodedBitstreamContext *ctx, RWContext *rw,
H264RawSliceHeader *current)
{
CodedBitstreamH264Context *h264 = ctx->priv_data;
const H264RawSPS *sps;
const H264RawPPS *pps;
int err;
int idr_pic_flag;
int slice_type_i, slice_type_p, slice_type_b;
int slice_type_si, slice_type_sp;
HEADER("Slice Header");
CHECK(FUNC(nal_unit_header)(ctx, rw, &current->nal_unit_header,
1 << H264_NAL_SLICE |
1 << H264_NAL_IDR_SLICE |
1 << H264_NAL_AUXILIARY_SLICE));
if (current->nal_unit_header.nal_unit_type == H264_NAL_AUXILIARY_SLICE) {
if (!h264->last_slice_nal_unit_type) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Auxiliary slice "
"is not decodable without the main picture "
"in the same access unit.\n");
return AVERROR_INVALIDDATA;
}
} else {
h264->last_slice_nal_unit_type =
current->nal_unit_header.nal_unit_type;
}
idr_pic_flag = h264->last_slice_nal_unit_type == H264_NAL_IDR_SLICE;
ue(first_mb_in_slice, 0, H264_MAX_MB_PIC_SIZE - 1);
ue(slice_type, 0, 9);
slice_type_i = current->slice_type % 5 == 2;
slice_type_p = current->slice_type % 5 == 0;
slice_type_b = current->slice_type % 5 == 1;
slice_type_si = current->slice_type % 5 == 4;
slice_type_sp = current->slice_type % 5 == 3;
if (idr_pic_flag && !(slice_type_i || slice_type_si)) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "Invalid slice type %d "
"for IDR picture.\n", current->slice_type);
return AVERROR_INVALIDDATA;
}
ue(pic_parameter_set_id, 0, 255);
pps = h264->pps[current->pic_parameter_set_id];
if (!pps) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "PPS id %d not available.\n",
current->pic_parameter_set_id);
return AVERROR_INVALIDDATA;
}
h264->active_pps = pps;
sps = h264->sps[pps->seq_parameter_set_id];
if (!sps) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "SPS id %d not available.\n",
pps->seq_parameter_set_id);
return AVERROR_INVALIDDATA;
}
h264->active_sps = sps;
if (sps->separate_colour_plane_flag)
u(2, colour_plane_id, 0, 2);
u(sps->log2_max_frame_num_minus4 + 4, frame_num,
0, (1 << (sps->log2_max_frame_num_minus4 + 4)) - 1);
if (!sps->frame_mbs_only_flag) {
flag(field_pic_flag);
if (current->field_pic_flag)
flag(bottom_field_flag);
else
infer(bottom_field_flag, 0);
} else {
infer(field_pic_flag, 0);
infer(bottom_field_flag, 0);
}
if (idr_pic_flag)
ue(idr_pic_id, 0, 65535);
if (sps->pic_order_cnt_type == 0) {
u(sps->log2_max_pic_order_cnt_lsb_minus4 + 4, pic_order_cnt_lsb,
0, (1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4)) - 1);
if (pps->bottom_field_pic_order_in_frame_present_flag &&
!current->field_pic_flag)
se(delta_pic_order_cnt_bottom, INT32_MIN + 1, INT32_MAX);
} else if (sps->pic_order_cnt_type == 1) {
if (!sps->delta_pic_order_always_zero_flag) {
se(delta_pic_order_cnt[0], INT32_MIN + 1, INT32_MAX);
if (pps->bottom_field_pic_order_in_frame_present_flag &&
!current->field_pic_flag)
se(delta_pic_order_cnt[1], INT32_MIN + 1, INT32_MAX);
else
infer(delta_pic_order_cnt[1], 0);
} else {
infer(delta_pic_order_cnt[0], 0);
infer(delta_pic_order_cnt[1], 0);
}
}
if (pps->redundant_pic_cnt_present_flag)
ue(redundant_pic_cnt, 0, 127);
if (slice_type_b)
flag(direct_spatial_mv_pred_flag);
if (slice_type_p || slice_type_sp || slice_type_b) {
flag(num_ref_idx_active_override_flag);
if (current->num_ref_idx_active_override_flag) {
ue(num_ref_idx_l0_active_minus1, 0, 31);
if (slice_type_b)
ue(num_ref_idx_l1_active_minus1, 0, 31);
} else {
infer(num_ref_idx_l0_active_minus1,
pps->num_ref_idx_l0_default_active_minus1);
infer(num_ref_idx_l1_active_minus1,
pps->num_ref_idx_l1_default_active_minus1);
}
}
if (current->nal_unit_header.nal_unit_type == 20 ||
current->nal_unit_header.nal_unit_type == 21) {
av_log(ctx->log_ctx, AV_LOG_ERROR, "MVC / 3DAVC not supported.\n");
return AVERROR_PATCHWELCOME;
} else {
CHECK(FUNC(ref_pic_list_modification)(ctx, rw, current));
}
if ((pps->weighted_pred_flag && (slice_type_p || slice_type_sp)) ||
(pps->weighted_bipred_idc == 1 && slice_type_b)) {
CHECK(FUNC(pred_weight_table)(ctx, rw, current));
}
if (current->nal_unit_header.nal_ref_idc != 0) {
CHECK(FUNC(dec_ref_pic_marking)(ctx, rw, current, idr_pic_flag));
}
if (pps->entropy_coding_mode_flag &&
!slice_type_i && !slice_type_si) {
ue(cabac_init_idc, 0, 2);
}
se(slice_qp_delta, - 51 - 6 * sps->bit_depth_luma_minus8,
+ 51 + 6 * sps->bit_depth_luma_minus8);
if (slice_type_sp || slice_type_si) {
if (slice_type_sp)
flag(sp_for_switch_flag);
se(slice_qs_delta, -51, +51);
}
if (pps->deblocking_filter_control_present_flag) {
ue(disable_deblocking_filter_idc, 0, 2);
if (current->disable_deblocking_filter_idc != 1) {
se(slice_alpha_c0_offset_div2, -6, +6);
se(slice_beta_offset_div2, -6, +6);
} else {
infer(slice_alpha_c0_offset_div2, 0);
infer(slice_beta_offset_div2, 0);
}
} else {
infer(disable_deblocking_filter_idc, 0);
infer(slice_alpha_c0_offset_div2, 0);
infer(slice_beta_offset_div2, 0);
}
if (pps->num_slice_groups_minus1 > 0 &&
pps->slice_group_map_type >= 3 &&
pps->slice_group_map_type <= 5) {
unsigned int pic_size, max, bits;
pic_size = (sps->pic_width_in_mbs_minus1 + 1) *
(sps->pic_height_in_map_units_minus1 + 1);
max = (pic_size + pps->slice_group_change_rate_minus1) /
(pps->slice_group_change_rate_minus1 + 1);
bits = av_log2(2 * max - 1);
u(bits, slice_group_change_cycle, 0, max);
}
if (pps->entropy_coding_mode_flag) {
av_unused int one = 1;
while (byte_alignment(rw))
xu(1, cabac_alignment_one_bit, one, 1, 1);
}
return 0;
}
......@@ -83,4 +83,7 @@ int ff_cbs_write_unsigned(CodedBitstreamContext *ctx, PutBitContext *pbc,
uint32_t range_min, uint32_t range_max);
extern const CodedBitstreamType ff_cbs_type_h264;
#endif /* AVCODEC_CBS_INTERNAL_H */
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment