Commit a4e21baa authored by Chris Berov's avatar Chris Berov Committed by Justin Ruggles

yuv4mpeg: cosmetics: pretty-printing

Signed-off-by: 's avatarJustin Ruggles <justin.ruggles@gmail.com>
parent eda87312
...@@ -39,23 +39,24 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf) ...@@ -39,23 +39,24 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf)
char inter; char inter;
const char *colorspace = ""; const char *colorspace = "";
st = s->streams[0]; st = s->streams[0];
width = st->codec->width; width = st->codec->width;
height = st->codec->height; height = st->codec->height;
av_reduce(&raten, &rated, st->codec->time_base.den, st->codec->time_base.num, (1UL<<31)-1); av_reduce(&raten, &rated, st->codec->time_base.den,
st->codec->time_base.num, (1UL << 31) - 1);
aspectn = st->sample_aspect_ratio.num; aspectn = st->sample_aspect_ratio.num;
aspectd = st->sample_aspect_ratio.den; aspectd = st->sample_aspect_ratio.den;
if ( aspectn == 0 && aspectd == 1 ) aspectd = 0; // 0:0 means unknown if (aspectn == 0 && aspectd == 1)
aspectd = 0; // 0:0 means unknown
inter = 'p'; /* progressive is the default */ inter = 'p'; /* progressive is the default */
if (st->codec->coded_frame && st->codec->coded_frame->interlaced_frame) { if (st->codec->coded_frame && st->codec->coded_frame->interlaced_frame)
inter = st->codec->coded_frame->top_field_first ? 't' : 'b'; inter = st->codec->coded_frame->top_field_first ? 't' : 'b';
}
switch(st->codec->pix_fmt) { switch (st->codec->pix_fmt) {
case PIX_FMT_GRAY8: case PIX_FMT_GRAY8:
colorspace = " Cmono"; colorspace = " Cmono";
break; break;
...@@ -63,9 +64,11 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf) ...@@ -63,9 +64,11 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf)
colorspace = " C411 XYSCSS=411"; colorspace = " C411 XYSCSS=411";
break; break;
case PIX_FMT_YUV420P: case PIX_FMT_YUV420P:
colorspace = (st->codec->chroma_sample_location == AVCHROMA_LOC_TOPLEFT)?" C420paldv XYSCSS=420PALDV": switch (st->codec->chroma_sample_location) {
(st->codec->chroma_sample_location == AVCHROMA_LOC_LEFT) ?" C420mpeg2 XYSCSS=420MPEG2": case AVCHROMA_LOC_TOPLEFT: colorspace = " C420paldv XYSCSS=420PALDV"; break;
" C420jpeg XYSCSS=420JPEG"; case AVCHROMA_LOC_LEFT: colorspace = " C420mpeg2 XYSCSS=420MPEG2"; break;
default: colorspace = " C420jpeg XYSCSS=420JPEG"; break;
}
break; break;
case PIX_FMT_YUV422P: case PIX_FMT_YUV422P:
colorspace = " C422 XYSCSS=422"; colorspace = " C422 XYSCSS=422";
...@@ -77,13 +80,8 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf) ...@@ -77,13 +80,8 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf)
/* construct stream header, if this is the first frame */ /* construct stream header, if this is the first frame */
n = snprintf(buf, Y4M_LINE_MAX, "%s W%d H%d F%d:%d I%c A%d:%d%s\n", n = snprintf(buf, Y4M_LINE_MAX, "%s W%d H%d F%d:%d I%c A%d:%d%s\n",
Y4M_MAGIC, Y4M_MAGIC, width, height, raten, rated, inter,
width, aspectn, aspectd, colorspace);
height,
raten, rated,
inter,
aspectn, aspectd,
colorspace);
return n; return n;
} }
...@@ -96,7 +94,7 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt) ...@@ -96,7 +94,7 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
int* first_pkt = s->priv_data; int* first_pkt = s->priv_data;
int width, height, h_chroma_shift, v_chroma_shift; int width, height, h_chroma_shift, v_chroma_shift;
int i; int i;
char buf2[Y4M_LINE_MAX+1]; char buf2[Y4M_LINE_MAX + 1];
char buf1[20]; char buf1[20];
uint8_t *ptr, *ptr1, *ptr2; uint8_t *ptr, *ptr1, *ptr2;
...@@ -106,7 +104,8 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt) ...@@ -106,7 +104,8 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
if (*first_pkt) { if (*first_pkt) {
*first_pkt = 0; *first_pkt = 0;
if (yuv4_generate_header(s, buf2) < 0) { if (yuv4_generate_header(s, buf2) < 0) {
av_log(s, AV_LOG_ERROR, "Error. YUV4MPEG stream header write failed.\n"); av_log(s, AV_LOG_ERROR,
"Error. YUV4MPEG stream header write failed.\n");
return AVERROR(EIO); return AVERROR(EIO);
} else { } else {
avio_write(pb, buf2, strlen(buf2)); avio_write(pb, buf2, strlen(buf2));
...@@ -118,31 +117,32 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt) ...@@ -118,31 +117,32 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
snprintf(buf1, sizeof(buf1), "%s\n", Y4M_FRAME_MAGIC); snprintf(buf1, sizeof(buf1), "%s\n", Y4M_FRAME_MAGIC);
avio_write(pb, buf1, strlen(buf1)); avio_write(pb, buf1, strlen(buf1));
width = st->codec->width; width = st->codec->width;
height = st->codec->height; height = st->codec->height;
ptr = picture->data[0]; ptr = picture->data[0];
for(i=0;i<height;i++) { for (i = 0; i < height; i++) {
avio_write(pb, ptr, width); avio_write(pb, ptr, width);
ptr += picture->linesize[0]; ptr += picture->linesize[0];
} }
if (st->codec->pix_fmt != PIX_FMT_GRAY8){ if (st->codec->pix_fmt != PIX_FMT_GRAY8) {
// Adjust for smaller Cb and Cr planes // Adjust for smaller Cb and Cr planes
avcodec_get_chroma_sub_sample(st->codec->pix_fmt, &h_chroma_shift, &v_chroma_shift); avcodec_get_chroma_sub_sample(st->codec->pix_fmt, &h_chroma_shift,
width >>= h_chroma_shift; &v_chroma_shift);
height >>= v_chroma_shift; width >>= h_chroma_shift;
height >>= v_chroma_shift;
ptr1 = picture->data[1];
ptr2 = picture->data[2]; ptr1 = picture->data[1];
for(i=0;i<height;i++) { /* Cb */ ptr2 = picture->data[2];
avio_write(pb, ptr1, width); for (i = 0; i < height; i++) { /* Cb */
ptr1 += picture->linesize[1]; avio_write(pb, ptr1, width);
} ptr1 += picture->linesize[1];
for(i=0;i<height;i++) { /* Cr */ }
avio_write(pb, ptr2, width); for (i = 0; i < height; i++) { /* Cr */
avio_write(pb, ptr2, width);
ptr2 += picture->linesize[2]; ptr2 += picture->linesize[2];
} }
} }
avio_flush(pb); avio_flush(pb);
return 0; return 0;
...@@ -150,19 +150,21 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt) ...@@ -150,19 +150,21 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
static int yuv4_write_header(AVFormatContext *s) static int yuv4_write_header(AVFormatContext *s)
{ {
int* first_pkt = s->priv_data; int *first_pkt = s->priv_data;
if (s->nb_streams != 1) if (s->nb_streams != 1)
return AVERROR(EIO); return AVERROR(EIO);
if (s->streams[0]->codec->pix_fmt == PIX_FMT_YUV411P) { if (s->streams[0]->codec->pix_fmt == PIX_FMT_YUV411P) {
av_log(s, AV_LOG_ERROR, "Warning: generating rarely used 4:1:1 YUV stream, some mjpegtools might not work.\n"); av_log(s, AV_LOG_ERROR, "Warning: generating rarely used 4:1:1 YUV "
} "stream, some mjpegtools might not work.\n");
else if ((s->streams[0]->codec->pix_fmt != PIX_FMT_YUV420P) && } else if ((s->streams[0]->codec->pix_fmt != PIX_FMT_YUV420P) &&
(s->streams[0]->codec->pix_fmt != PIX_FMT_YUV422P) && (s->streams[0]->codec->pix_fmt != PIX_FMT_YUV422P) &&
(s->streams[0]->codec->pix_fmt != PIX_FMT_GRAY8) && (s->streams[0]->codec->pix_fmt != PIX_FMT_GRAY8) &&
(s->streams[0]->codec->pix_fmt != PIX_FMT_YUV444P)) { (s->streams[0]->codec->pix_fmt != PIX_FMT_YUV444P)) {
av_log(s, AV_LOG_ERROR, "ERROR: yuv4mpeg only handles yuv444p, yuv422p, yuv420p, yuv411p and gray pixel formats. Use -pix_fmt to select one.\n"); av_log(s, AV_LOG_ERROR, "ERROR: yuv4mpeg only handles yuv444p, "
"yuv422p, yuv420p, yuv411p and gray pixel formats. "
"Use -pix_fmt to select one.\n");
return AVERROR(EIO); return AVERROR(EIO);
} }
...@@ -180,7 +182,7 @@ AVOutputFormat ff_yuv4mpegpipe_muxer = { ...@@ -180,7 +182,7 @@ AVOutputFormat ff_yuv4mpegpipe_muxer = {
.video_codec = CODEC_ID_RAWVIDEO, .video_codec = CODEC_ID_RAWVIDEO,
.write_header = yuv4_write_header, .write_header = yuv4_write_header,
.write_packet = yuv4_write_packet, .write_packet = yuv4_write_packet,
.flags = AVFMT_RAWPICTURE, .flags = AVFMT_RAWPICTURE,
}; };
#endif #endif
...@@ -190,85 +192,96 @@ AVOutputFormat ff_yuv4mpegpipe_muxer = { ...@@ -190,85 +192,96 @@ AVOutputFormat ff_yuv4mpegpipe_muxer = {
static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap) static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap)
{ {
char header[MAX_YUV4_HEADER+10]; // Include headroom for the longest option char header[MAX_YUV4_HEADER + 10]; // Include headroom for
char *tokstart,*tokend,*header_end; // the longest option
char *tokstart, *tokend, *header_end;
int i; int i;
AVIOContext *pb = s->pb; AVIOContext *pb = s->pb;
int width=-1, height=-1, raten=0, rated=0, aspectn=0, aspectd=0; int width = -1, height = -1, raten = 0,
enum PixelFormat pix_fmt=PIX_FMT_NONE,alt_pix_fmt=PIX_FMT_NONE; rated = 0, aspectn = 0, aspectd = 0;
enum PixelFormat pix_fmt = PIX_FMT_NONE, alt_pix_fmt = PIX_FMT_NONE;
enum AVChromaLocation chroma_sample_location = AVCHROMA_LOC_UNSPECIFIED; enum AVChromaLocation chroma_sample_location = AVCHROMA_LOC_UNSPECIFIED;
AVStream *st; AVStream *st;
struct frame_attributes *s1 = s->priv_data; struct frame_attributes *s1 = s->priv_data;
for (i=0; i<MAX_YUV4_HEADER; i++) { for (i = 0; i < MAX_YUV4_HEADER; i++) {
header[i] = avio_r8(pb); header[i] = avio_r8(pb);
if (header[i] == '\n') { if (header[i] == '\n') {
header[i+1] = 0x20; // Add a space after last option. Makes parsing "444" vs "444alpha" easier. header[i + 1] = 0x20; // Add a space after last option.
header[i+2] = 0; // Makes parsing "444" vs "444alpha" easier.
header[i + 2] = 0;
break; break;
} }
} }
if (i == MAX_YUV4_HEADER) return -1; if (i == MAX_YUV4_HEADER)
if (strncmp(header, Y4M_MAGIC, strlen(Y4M_MAGIC))) return -1; return -1;
if (strncmp(header, Y4M_MAGIC, strlen(Y4M_MAGIC)))
return -1;
s1->interlaced_frame = 0; s1->interlaced_frame = 0;
s1->top_field_first = 0; s1->top_field_first = 0;
header_end = &header[i+1]; // Include space header_end = &header[i + 1]; // Include space
for(tokstart = &header[strlen(Y4M_MAGIC) + 1]; tokstart < header_end; tokstart++) { for (tokstart = &header[strlen(Y4M_MAGIC) + 1];
if (*tokstart==0x20) continue; tokstart < header_end; tokstart++) {
if (*tokstart == 0x20)
continue;
switch (*tokstart++) { switch (*tokstart++) {
case 'W': // Width. Required. case 'W': // Width. Required.
width = strtol(tokstart, &tokend, 10); width = strtol(tokstart, &tokend, 10);
tokstart=tokend; tokstart = tokend;
break; break;
case 'H': // Height. Required. case 'H': // Height. Required.
height = strtol(tokstart, &tokend, 10); height = strtol(tokstart, &tokend, 10);
tokstart=tokend; tokstart = tokend;
break; break;
case 'C': // Color space case 'C': // Color space
if (strncmp("420jpeg",tokstart,7)==0) { if (strncmp("420jpeg", tokstart, 7) == 0) {
pix_fmt = PIX_FMT_YUV420P; pix_fmt = PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_CENTER; chroma_sample_location = AVCHROMA_LOC_CENTER;
} else if (strncmp("420mpeg2",tokstart,8)==0) { } else if (strncmp("420mpeg2", tokstart, 8) == 0) {
pix_fmt = PIX_FMT_YUV420P; pix_fmt = PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_LEFT; chroma_sample_location = AVCHROMA_LOC_LEFT;
} else if (strncmp("420paldv", tokstart, 8)==0) { } else if (strncmp("420paldv", tokstart, 8) == 0) {
pix_fmt = PIX_FMT_YUV420P; pix_fmt = PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_TOPLEFT; chroma_sample_location = AVCHROMA_LOC_TOPLEFT;
} else if (strncmp("411", tokstart, 3)==0) } else if (strncmp("411", tokstart, 3) == 0)
pix_fmt = PIX_FMT_YUV411P; pix_fmt = PIX_FMT_YUV411P;
else if (strncmp("422", tokstart, 3)==0) else if (strncmp("422", tokstart, 3) == 0)
pix_fmt = PIX_FMT_YUV422P; pix_fmt = PIX_FMT_YUV422P;
else if (strncmp("444alpha", tokstart, 8)==0) { else if (strncmp("444alpha", tokstart, 8) == 0 ) {
av_log(s, AV_LOG_ERROR, "Cannot handle 4:4:4:4 YUV4MPEG stream.\n"); av_log(s, AV_LOG_ERROR, "Cannot handle 4:4:4:4 "
"YUV4MPEG stream.\n");
return -1; return -1;
} else if (strncmp("444", tokstart, 3)==0) } else if (strncmp("444", tokstart, 3) == 0)
pix_fmt = PIX_FMT_YUV444P; pix_fmt = PIX_FMT_YUV444P;
else if (strncmp("mono",tokstart, 4)==0) { else if (strncmp("mono", tokstart, 4) == 0) {
pix_fmt = PIX_FMT_GRAY8; pix_fmt = PIX_FMT_GRAY8;
} else { } else {
av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains an unknown pixel format.\n"); av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains an unknown "
"pixel format.\n");
return -1; return -1;
} }
while(tokstart<header_end&&*tokstart!=0x20) tokstart++; while (tokstart < header_end && *tokstart != 0x20)
tokstart++;
break; break;
case 'I': // Interlace type case 'I': // Interlace type
switch (*tokstart++){ switch (*tokstart++){
case '?': case '?':
break; break;
case 'p': case 'p':
s1->interlaced_frame=0; s1->interlaced_frame = 0;
break; break;
case 't': case 't':
s1->interlaced_frame=1; s1->interlaced_frame = 1;
s1->top_field_first=1; s1->top_field_first = 1;
break; break;
case 'b': case 'b':
s1->interlaced_frame=1; s1->interlaced_frame = 1;
s1->top_field_first=0; s1->top_field_first = 0;
break; break;
case 'm': case 'm':
av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains mixed interlaced and non-interlaced frames.\n"); av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains mixed "
"interlaced and non-interlaced frames.\n");
return -1; return -1;
default: default:
av_log(s, AV_LOG_ERROR, "YUV4MPEG has invalid header.\n"); av_log(s, AV_LOG_ERROR, "YUV4MPEG has invalid header.\n");
...@@ -276,36 +289,39 @@ static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap) ...@@ -276,36 +289,39 @@ static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap)
} }
break; break;
case 'F': // Frame rate case 'F': // Frame rate
sscanf(tokstart,"%d:%d",&raten,&rated); // 0:0 if unknown sscanf(tokstart, "%d:%d", &raten, &rated); // 0:0 if unknown
while(tokstart<header_end&&*tokstart!=0x20) tokstart++; while (tokstart < header_end && *tokstart != 0x20)
tokstart++;
break; break;
case 'A': // Pixel aspect case 'A': // Pixel aspect
sscanf(tokstart,"%d:%d",&aspectn,&aspectd); // 0:0 if unknown sscanf(tokstart, "%d:%d", &aspectn, &aspectd); // 0:0 if unknown
while(tokstart<header_end&&*tokstart!=0x20) tokstart++; while (tokstart < header_end && *tokstart != 0x20)
tokstart++;
break; break;
case 'X': // Vendor extensions case 'X': // Vendor extensions
if (strncmp("YSCSS=",tokstart,6)==0) { if (strncmp("YSCSS=", tokstart, 6) == 0) {
// Older nonstandard pixel format representation // Older nonstandard pixel format representation
tokstart+=6; tokstart += 6;
if (strncmp("420JPEG",tokstart,7)==0) if (strncmp("420JPEG", tokstart, 7) == 0)
alt_pix_fmt=PIX_FMT_YUV420P; alt_pix_fmt = PIX_FMT_YUV420P;
else if (strncmp("420MPEG2",tokstart,8)==0) else if (strncmp("420MPEG2", tokstart, 8) == 0)
alt_pix_fmt=PIX_FMT_YUV420P; alt_pix_fmt = PIX_FMT_YUV420P;
else if (strncmp("420PALDV",tokstart,8)==0) else if (strncmp("420PALDV", tokstart, 8) == 0)
alt_pix_fmt=PIX_FMT_YUV420P; alt_pix_fmt = PIX_FMT_YUV420P;
else if (strncmp("411",tokstart,3)==0) else if (strncmp("411", tokstart, 3) == 0)
alt_pix_fmt=PIX_FMT_YUV411P; alt_pix_fmt = PIX_FMT_YUV411P;
else if (strncmp("422",tokstart,3)==0) else if (strncmp("422", tokstart, 3) == 0)
alt_pix_fmt=PIX_FMT_YUV422P; alt_pix_fmt = PIX_FMT_YUV422P;
else if (strncmp("444",tokstart,3)==0) else if (strncmp("444", tokstart, 3) == 0)
alt_pix_fmt=PIX_FMT_YUV444P; alt_pix_fmt = PIX_FMT_YUV444P;
} }
while(tokstart<header_end&&*tokstart!=0x20) tokstart++; while (tokstart < header_end && *tokstart != 0x20)
tokstart++;
break; break;
} }
} }
if ((width == -1) || (height == -1)) { if (width == -1 || height == -1) {
av_log(s, AV_LOG_ERROR, "YUV4MPEG has invalid header.\n"); av_log(s, AV_LOG_ERROR, "YUV4MPEG has invalid header.\n");
return -1; return -1;
} }
...@@ -329,16 +345,16 @@ static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap) ...@@ -329,16 +345,16 @@ static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap)
} }
st = avformat_new_stream(s, NULL); st = avformat_new_stream(s, NULL);
if(!st) if (!st)
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
st->codec->width = width; st->codec->width = width;
st->codec->height = height; st->codec->height = height;
av_reduce(&raten, &rated, raten, rated, (1UL<<31)-1); av_reduce(&raten, &rated, raten, rated, (1UL << 31) - 1);
avpriv_set_pts_info(st, 64, rated, raten); avpriv_set_pts_info(st, 64, rated, raten);
st->codec->pix_fmt = pix_fmt; st->codec->pix_fmt = pix_fmt;
st->codec->codec_type = AVMEDIA_TYPE_VIDEO; st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
st->codec->codec_id = CODEC_ID_RAWVIDEO; st->codec->codec_id = CODEC_ID_RAWVIDEO;
st->sample_aspect_ratio= (AVRational){aspectn, aspectd}; st->sample_aspect_ratio = (AVRational){ aspectn, aspectd };
st->codec->chroma_sample_location = chroma_sample_location; st->codec->chroma_sample_location = chroma_sample_location;
return 0; return 0;
...@@ -352,17 +368,19 @@ static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt) ...@@ -352,17 +368,19 @@ static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt)
AVStream *st = s->streams[0]; AVStream *st = s->streams[0];
struct frame_attributes *s1 = s->priv_data; struct frame_attributes *s1 = s->priv_data;
for (i=0; i<MAX_FRAME_HEADER; i++) { for (i = 0; i < MAX_FRAME_HEADER; i++) {
header[i] = avio_r8(s->pb); header[i] = avio_r8(s->pb);
if (header[i] == '\n') { if (header[i] == '\n') {
header[i+1] = 0; header[i + 1] = 0;
break; break;
} }
} }
if (i == MAX_FRAME_HEADER) return -1; if (i == MAX_FRAME_HEADER)
if (strncmp(header, Y4M_FRAME_MAGIC, strlen(Y4M_FRAME_MAGIC))) return -1; return -1;
if (strncmp(header, Y4M_FRAME_MAGIC, strlen(Y4M_FRAME_MAGIC)))
return -1;
width = st->codec->width; width = st->codec->width;
height = st->codec->height; height = st->codec->height;
packet_size = avpicture_get_size(st->codec->pix_fmt, width, height); packet_size = avpicture_get_size(st->codec->pix_fmt, width, height);
...@@ -372,9 +390,9 @@ static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt) ...@@ -372,9 +390,9 @@ static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt)
if (av_get_packet(s->pb, pkt, packet_size) != packet_size) if (av_get_packet(s->pb, pkt, packet_size) != packet_size)
return AVERROR(EIO); return AVERROR(EIO);
if (s->streams[0]->codec->coded_frame) { if (st->codec->coded_frame) {
s->streams[0]->codec->coded_frame->interlaced_frame = s1->interlaced_frame; st->codec->coded_frame->interlaced_frame = s1->interlaced_frame;
s->streams[0]->codec->coded_frame->top_field_first = s1->top_field_first; st->codec->coded_frame->top_field_first = s1->top_field_first;
} }
pkt->stream_index = 0; pkt->stream_index = 0;
...@@ -384,7 +402,7 @@ static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt) ...@@ -384,7 +402,7 @@ static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt)
static int yuv4_probe(AVProbeData *pd) static int yuv4_probe(AVProbeData *pd)
{ {
/* check file header */ /* check file header */
if (strncmp(pd->buf, Y4M_MAGIC, sizeof(Y4M_MAGIC)-1)==0) if (strncmp(pd->buf, Y4M_MAGIC, sizeof(Y4M_MAGIC) - 1) == 0)
return AVPROBE_SCORE_MAX; return AVPROBE_SCORE_MAX;
else else
return 0; return 0;
...@@ -398,6 +416,6 @@ AVInputFormat ff_yuv4mpegpipe_demuxer = { ...@@ -398,6 +416,6 @@ AVInputFormat ff_yuv4mpegpipe_demuxer = {
.read_probe = yuv4_probe, .read_probe = yuv4_probe,
.read_header = yuv4_read_header, .read_header = yuv4_read_header,
.read_packet = yuv4_read_packet, .read_packet = yuv4_read_packet,
.extensions = "y4m" .extensions = "y4m"
}; };
#endif #endif
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment