Commit 80bc648e authored by Paul B Mahol's avatar Paul B Mahol

avfilter: add tlut2 filter

parent f2d23ec0
...@@ -30,6 +30,7 @@ version <next>: ...@@ -30,6 +30,7 @@ version <next>:
- libvmaf video filter - libvmaf video filter
- Dolby E decoder and SMPTE 337M demuxer - Dolby E decoder and SMPTE 337M demuxer
- unpremultiply video filter - unpremultiply video filter
- tlut2 video filter
version 3.3: version 3.3:
- CrystalHD decoder moved to new decode API - CrystalHD decoder moved to new decode API
......
...@@ -9946,9 +9946,13 @@ lutyuv=u='(val-maxval/2)*2+maxval/2':v='(val-maxval/2)*2+maxval/2' ...@@ -9946,9 +9946,13 @@ lutyuv=u='(val-maxval/2)*2+maxval/2':v='(val-maxval/2)*2+maxval/2'
@end example @end example
@end itemize @end itemize
@section lut2 @section lut2, tlut2
Compute and apply a lookup table from two video inputs. The @code{lut2} filter takes two input streams and outputs one
stream.
The @code{tlut2} (time lut2) filter takes two consecutive frames
from one single stream.
This filter accepts the following parameters: This filter accepts the following parameters:
@table @option @table @option
......
...@@ -312,6 +312,7 @@ OBJS-$(CONFIG_THRESHOLD_FILTER) += vf_threshold.o framesync2.o ...@@ -312,6 +312,7 @@ OBJS-$(CONFIG_THRESHOLD_FILTER) += vf_threshold.o framesync2.o
OBJS-$(CONFIG_THUMBNAIL_FILTER) += vf_thumbnail.o OBJS-$(CONFIG_THUMBNAIL_FILTER) += vf_thumbnail.o
OBJS-$(CONFIG_TILE_FILTER) += vf_tile.o OBJS-$(CONFIG_TILE_FILTER) += vf_tile.o
OBJS-$(CONFIG_TINTERLACE_FILTER) += vf_tinterlace.o OBJS-$(CONFIG_TINTERLACE_FILTER) += vf_tinterlace.o
OBJS-$(CONFIG_TLUT2_FILTER) += vf_lut2.o framesync2.o
OBJS-$(CONFIG_TRANSPOSE_FILTER) += vf_transpose.o OBJS-$(CONFIG_TRANSPOSE_FILTER) += vf_transpose.o
OBJS-$(CONFIG_TRIM_FILTER) += trim.o OBJS-$(CONFIG_TRIM_FILTER) += trim.o
OBJS-$(CONFIG_UNPREMULTIPLY_FILTER) += vf_premultiply.o framesync2.o OBJS-$(CONFIG_UNPREMULTIPLY_FILTER) += vf_premultiply.o framesync2.o
......
...@@ -323,6 +323,7 @@ static void register_all(void) ...@@ -323,6 +323,7 @@ static void register_all(void)
REGISTER_FILTER(THUMBNAIL, thumbnail, vf); REGISTER_FILTER(THUMBNAIL, thumbnail, vf);
REGISTER_FILTER(TILE, tile, vf); REGISTER_FILTER(TILE, tile, vf);
REGISTER_FILTER(TINTERLACE, tinterlace, vf); REGISTER_FILTER(TINTERLACE, tinterlace, vf);
REGISTER_FILTER(TLUT2, tlut2, vf);
REGISTER_FILTER(TRANSPOSE, transpose, vf); REGISTER_FILTER(TRANSPOSE, transpose, vf);
REGISTER_FILTER(TRIM, trim, vf); REGISTER_FILTER(TRIM, trim, vf);
REGISTER_FILTER(UNPREMULTIPLY, unpremultiply, vf); REGISTER_FILTER(UNPREMULTIPLY, unpremultiply, vf);
......
...@@ -30,8 +30,8 @@ ...@@ -30,8 +30,8 @@
#include "libavutil/version.h" #include "libavutil/version.h"
#define LIBAVFILTER_VERSION_MAJOR 6 #define LIBAVFILTER_VERSION_MAJOR 6
#define LIBAVFILTER_VERSION_MINOR 96 #define LIBAVFILTER_VERSION_MINOR 97
#define LIBAVFILTER_VERSION_MICRO 101 #define LIBAVFILTER_VERSION_MICRO 100
#define LIBAVFILTER_VERSION_INT AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, \ #define LIBAVFILTER_VERSION_INT AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, \
LIBAVFILTER_VERSION_MINOR, \ LIBAVFILTER_VERSION_MINOR, \
......
...@@ -61,6 +61,8 @@ typedef struct LUT2Context { ...@@ -61,6 +61,8 @@ typedef struct LUT2Context {
int width[4], height[4]; int width[4], height[4];
int nb_planes; int nb_planes;
int depth, depthx, depthy; int depth, depthx, depthy;
int tlut2;
AVFrame *prev_frame; /* only used with tlut2 */
void (*lut2)(struct LUT2Context *s, AVFrame *dst, AVFrame *srcx, AVFrame *srcy); void (*lut2)(struct LUT2Context *s, AVFrame *dst, AVFrame *srcx, AVFrame *srcy);
...@@ -70,7 +72,7 @@ typedef struct LUT2Context { ...@@ -70,7 +72,7 @@ typedef struct LUT2Context {
#define OFFSET(x) offsetof(LUT2Context, x) #define OFFSET(x) offsetof(LUT2Context, x)
#define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
static const AVOption lut2_options[] = { static const AVOption options[] = {
{ "c0", "set component #0 expression", OFFSET(comp_expr_str[0]), AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS }, { "c0", "set component #0 expression", OFFSET(comp_expr_str[0]), AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS },
{ "c1", "set component #1 expression", OFFSET(comp_expr_str[1]), AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS }, { "c1", "set component #1 expression", OFFSET(comp_expr_str[1]), AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS },
{ "c2", "set component #2 expression", OFFSET(comp_expr_str[2]), AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS }, { "c2", "set component #2 expression", OFFSET(comp_expr_str[2]), AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS },
...@@ -83,6 +85,8 @@ static av_cold void uninit(AVFilterContext *ctx) ...@@ -83,6 +85,8 @@ static av_cold void uninit(AVFilterContext *ctx)
LUT2Context *s = ctx->priv; LUT2Context *s = ctx->priv;
int i; int i;
av_frame_free(&s->prev_frame);
for (i = 0; i < 4; i++) { for (i = 0; i < 4; i++) {
av_expr_free(s->comp_expr[i]); av_expr_free(s->comp_expr[i]);
s->comp_expr[i] = NULL; s->comp_expr[i] = NULL;
...@@ -133,6 +137,11 @@ static int config_inputx(AVFilterLink *inlink) ...@@ -133,6 +137,11 @@ static int config_inputx(AVFilterLink *inlink)
s->depthx = desc->comp[0].depth; s->depthx = desc->comp[0].depth;
s->var_values[VAR_BITDEPTHX] = s->depthx; s->var_values[VAR_BITDEPTHX] = s->depthx;
if (s->tlut2) {
s->depthy = desc->comp[0].depth;
s->var_values[VAR_BITDEPTHY] = s->depthy;
}
return 0; return 0;
} }
...@@ -232,13 +241,64 @@ static int config_output(AVFilterLink *outlink) ...@@ -232,13 +241,64 @@ static int config_output(AVFilterLink *outlink)
{ {
AVFilterContext *ctx = outlink->src; AVFilterContext *ctx = outlink->src;
LUT2Context *s = ctx->priv; LUT2Context *s = ctx->priv;
AVFilterLink *srcx = ctx->inputs[0];
AVFilterLink *srcy = ctx->inputs[1];
FFFrameSyncIn *in;
int p, ret; int p, ret;
s->depth = s->depthx + s->depthy; s->depth = s->depthx + s->depthy;
s->lut2 = s->depth > 16 ? lut2_16bit : lut2_8bit;
for (p = 0; p < s->nb_planes; p++) {
s->lut[p] = av_malloc_array(1 << s->depth, sizeof(uint16_t));
if (!s->lut[p])
return AVERROR(ENOMEM);
}
for (p = 0; p < s->nb_planes; p++) {
double res;
int x, y;
/* create the parsed expression */
av_expr_free(s->comp_expr[p]);
s->comp_expr[p] = NULL;
ret = av_expr_parse(&s->comp_expr[p], s->comp_expr_str[p],
var_names, NULL, NULL, NULL, NULL, 0, ctx);
if (ret < 0) {
av_log(ctx, AV_LOG_ERROR,
"Error when parsing the expression '%s' for the component %d.\n",
s->comp_expr_str[p], p);
return AVERROR(EINVAL);
}
/* compute the lut */
for (y = 0; y < (1 << s->depthx); y++) {
s->var_values[VAR_Y] = y;
for (x = 0; x < (1 << s->depthx); x++) {
s->var_values[VAR_X] = x;
res = av_expr_eval(s->comp_expr[p], s->var_values, s);
if (isnan(res)) {
av_log(ctx, AV_LOG_ERROR,
"Error when evaluating the expression '%s' for the values %d and %d for the component %d.\n",
s->comp_expr_str[p], x, y, p);
return AVERROR(EINVAL);
}
s->lut[p][(y << s->depthx) + x] = res;
}
}
}
return 0;
}
static int lut2_config_output(AVFilterLink *outlink)
{
AVFilterContext *ctx = outlink->src;
LUT2Context *s = ctx->priv;
AVFilterLink *srcx = ctx->inputs[0];
AVFilterLink *srcy = ctx->inputs[1];
FFFrameSyncIn *in;
int ret;
if (srcx->format != srcy->format) { if (srcx->format != srcy->format) {
av_log(ctx, AV_LOG_ERROR, "inputs must be of same pixel format\n"); av_log(ctx, AV_LOG_ERROR, "inputs must be of same pixel format\n");
return AVERROR(EINVAL); return AVERROR(EINVAL);
...@@ -281,47 +341,8 @@ static int config_output(AVFilterLink *outlink) ...@@ -281,47 +341,8 @@ static int config_output(AVFilterLink *outlink)
s->fs.opaque = s; s->fs.opaque = s;
s->fs.on_event = process_frame; s->fs.on_event = process_frame;
s->lut2 = s->depth > 16 ? lut2_16bit : lut2_8bit; if ((ret = config_output(outlink)) < 0)
return ret;
for (p = 0; p < s->nb_planes; p++) {
s->lut[p] = av_malloc_array(1 << s->depth, sizeof(uint16_t));
if (!s->lut[p])
return AVERROR(ENOMEM);
}
for (p = 0; p < s->nb_planes; p++) {
double res;
int x, y;
/* create the parsed expression */
av_expr_free(s->comp_expr[p]);
s->comp_expr[p] = NULL;
ret = av_expr_parse(&s->comp_expr[p], s->comp_expr_str[p],
var_names, NULL, NULL, NULL, NULL, 0, ctx);
if (ret < 0) {
av_log(ctx, AV_LOG_ERROR,
"Error when parsing the expression '%s' for the component %d.\n",
s->comp_expr_str[p], p);
return AVERROR(EINVAL);
}
/* compute the lut */
for (y = 0; y < (1 << s->depthx); y++) {
s->var_values[VAR_Y] = y;
for (x = 0; x < (1 << s->depthx); x++) {
s->var_values[VAR_X] = x;
res = av_expr_eval(s->comp_expr[p], s->var_values, s);
if (isnan(res)) {
av_log(ctx, AV_LOG_ERROR,
"Error when evaluating the expression '%s' for the values %d and %d for the component %d.\n",
s->comp_expr_str[p], x, y, p);
return AVERROR(EINVAL);
}
s->lut[p][(y << s->depthx) + x] = res;
}
}
}
return ff_framesync2_configure(&s->fs); return ff_framesync2_configure(&s->fs);
} }
...@@ -350,11 +371,13 @@ static const AVFilterPad outputs[] = { ...@@ -350,11 +371,13 @@ static const AVFilterPad outputs[] = {
{ {
.name = "default", .name = "default",
.type = AVMEDIA_TYPE_VIDEO, .type = AVMEDIA_TYPE_VIDEO,
.config_props = config_output, .config_props = lut2_config_output,
}, },
{ NULL } { NULL }
}; };
#define lut2_options options
AVFILTER_DEFINE_CLASS(lut2); AVFILTER_DEFINE_CLASS(lut2);
AVFilter ff_vf_lut2 = { AVFilter ff_vf_lut2 = {
...@@ -369,3 +392,73 @@ AVFilter ff_vf_lut2 = { ...@@ -369,3 +392,73 @@ AVFilter ff_vf_lut2 = {
.outputs = outputs, .outputs = outputs,
.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL, .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL,
}; };
#if CONFIG_TLUT2_FILTER
static av_cold int init(AVFilterContext *ctx)
{
LUT2Context *s = ctx->priv;
s->tlut2 = !strcmp(ctx->filter->name, "tlut2");
return 0;
}
static int tlut2_filter_frame(AVFilterLink *inlink, AVFrame *frame)
{
LUT2Context *s = inlink->dst->priv;
AVFilterLink *outlink = inlink->dst->outputs[0];
if (s->prev_frame) {
AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
if (!out) {
av_frame_free(&s->prev_frame);
s->prev_frame = frame;
return AVERROR(ENOMEM);
}
av_frame_copy_props(out, frame);
s->lut2(s, out, frame, s->prev_frame);
av_frame_free(&s->prev_frame);
s->prev_frame = frame;
return ff_filter_frame(outlink, out);
}
s->prev_frame = frame;
return 0;
}
#define tlut2_options options
AVFILTER_DEFINE_CLASS(tlut2);
static const AVFilterPad tlut2_inputs[] = {
{
.name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.filter_frame = tlut2_filter_frame,
.config_props = config_inputx,
},
{ NULL }
};
static const AVFilterPad tlut2_outputs[] = {
{
.name = "default",
.type = AVMEDIA_TYPE_VIDEO,
.config_props = config_output,
},
{ NULL }
};
AVFilter ff_vf_tlut2 = {
.name = "tlut2",
.description = NULL_IF_CONFIG_SMALL("Compute and apply a lookup table from two successive frames."),
.priv_size = sizeof(LUT2Context),
.priv_class = &tlut2_class,
.query_formats = query_formats,
.init = init,
.uninit = uninit,
.inputs = tlut2_inputs,
.outputs = tlut2_outputs,
};
#endif
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment