Commit 289a6bb8 authored by Philip Langdale's avatar Philip Langdale

cuvid: Pass bit depth information to decoder

Although cuvid can only output 8bit, it can consume HEVC Main10 if
the bit depth is set properly. In cases where >8bit is not supported,
this change is still beneficial as the decoder will fail to be
created instead of plowing throw and decoding as 8bit.
parent 843aff3c
......@@ -181,6 +181,7 @@ static int CUDAAPI cuvid_handle_video_sequence(void *opaque, CUVIDEOFORMAT* form
cuinfo.ulNumDecodeSurfaces = MAX_FRAME_COUNT;
cuinfo.ulNumOutputSurfaces = 1;
cuinfo.ulCreationFlags = cudaVideoCreate_PreferCUVID;
cuinfo.bitDepthMinus8 = format->bit_depth_luma_minus8;
if (format->progressive_sequence) {
ctx->deint_mode = cuinfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Weave;
......@@ -573,6 +574,7 @@ static int cuvid_test_dummy_decoder(AVCodecContext *avctx, CUVIDPARSERPARAMS *cu
cuinfo.ulNumDecodeSurfaces = MAX_FRAME_COUNT;
cuinfo.ulNumOutputSurfaces = 1;
cuinfo.ulCreationFlags = cudaVideoCreate_PreferCUVID;
cuinfo.bitDepthMinus8 = 0;
cuinfo.DeinterlaceMode = cudaVideoDeinterlaceMode_Weave;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment