2 * Copyright (c) 2011 Stefano Sabatini
4 * This file is part of FFmpeg.
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 * Compute a look-up table for binding the input value to the output
24 * value, and apply it to input video.
27 #include "libavutil/attributes.h"
28 #include "libavutil/common.h"
29 #include "libavutil/eval.h"
30 #include "libavutil/opt.h"
31 #include "libavutil/pixdesc.h"
33 #include "drawutils.h"
38 static const char *const var_names
[] = {
39 "w", ///< width of the input video
40 "h", ///< height of the input video
41 "val", ///< input value for the pixel
42 "maxval", ///< max value for the pixel
43 "minval", ///< min value for the pixel
44 "negval", ///< negated value
60 typedef struct LutContext
{
62 uint8_t lut
[4][256]; ///< lookup table for each component
63 char *comp_expr_str
[4];
66 double var_values
[VAR_VARS_NB
];
69 int negate_alpha
; /* only used by negate */
80 #define OFFSET(x) offsetof(LutContext, x)
81 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
83 static const AVOption options
[] = {
84 { "c0", "set component #0 expression", OFFSET(comp_expr_str
[0]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
85 { "c1", "set component #1 expression", OFFSET(comp_expr_str
[1]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
86 { "c2", "set component #2 expression", OFFSET(comp_expr_str
[2]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
87 { "c3", "set component #3 expression", OFFSET(comp_expr_str
[3]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
88 { "y", "set Y expression", OFFSET(comp_expr_str
[Y
]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
89 { "u", "set U expression", OFFSET(comp_expr_str
[U
]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
90 { "v", "set V expression", OFFSET(comp_expr_str
[V
]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
91 { "r", "set R expression", OFFSET(comp_expr_str
[R
]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
92 { "g", "set G expression", OFFSET(comp_expr_str
[G
]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
93 { "b", "set B expression", OFFSET(comp_expr_str
[B
]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
94 { "a", "set A expression", OFFSET(comp_expr_str
[A
]), AV_OPT_TYPE_STRING
, { .str
= "val" }, .flags
= FLAGS
},
98 static av_cold
void uninit(AVFilterContext
*ctx
)
100 LutContext
*s
= ctx
->priv
;
103 for (i
= 0; i
< 4; i
++) {
104 av_expr_free(s
->comp_expr
[i
]);
105 s
->comp_expr
[i
] = NULL
;
106 av_freep(&s
->comp_expr_str
[i
]);
110 #define YUV_FORMATS \
111 AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV420P, \
112 AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV440P, \
113 AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P, \
114 AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ420P, \
117 #define RGB_FORMATS \
118 AV_PIX_FMT_ARGB, AV_PIX_FMT_RGBA, \
119 AV_PIX_FMT_ABGR, AV_PIX_FMT_BGRA, \
120 AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24
122 static const enum AVPixelFormat yuv_pix_fmts
[] = { YUV_FORMATS
, AV_PIX_FMT_NONE
};
123 static const enum AVPixelFormat rgb_pix_fmts
[] = { RGB_FORMATS
, AV_PIX_FMT_NONE
};
124 static const enum AVPixelFormat all_pix_fmts
[] = { RGB_FORMATS
, YUV_FORMATS
, AV_PIX_FMT_NONE
};
126 static int query_formats(AVFilterContext
*ctx
)
128 LutContext
*s
= ctx
->priv
;
130 const enum AVPixelFormat
*pix_fmts
= s
->is_rgb
? rgb_pix_fmts
:
131 s
->is_yuv
? yuv_pix_fmts
:
134 ff_set_common_formats(ctx
, ff_make_format_list(pix_fmts
));
139 * Clip value val in the minval - maxval range.
141 static double clip(void *opaque
, double val
)
143 LutContext
*s
= opaque
;
144 double minval
= s
->var_values
[VAR_MINVAL
];
145 double maxval
= s
->var_values
[VAR_MAXVAL
];
147 return av_clip(val
, minval
, maxval
);
151 * Compute gamma correction for value val, assuming the minval-maxval
152 * range, val is clipped to a value contained in the same interval.
154 static double compute_gammaval(void *opaque
, double gamma
)
156 LutContext
*s
= opaque
;
157 double val
= s
->var_values
[VAR_CLIPVAL
];
158 double minval
= s
->var_values
[VAR_MINVAL
];
159 double maxval
= s
->var_values
[VAR_MAXVAL
];
161 return pow((val
-minval
)/(maxval
-minval
), gamma
) * (maxval
-minval
)+minval
;
164 static double (* const funcs1
[])(void *, double) = {
166 (void *)compute_gammaval
,
170 static const char * const funcs1_names
[] = {
176 static int config_props(AVFilterLink
*inlink
)
178 AVFilterContext
*ctx
= inlink
->dst
;
179 LutContext
*s
= ctx
->priv
;
180 const AVPixFmtDescriptor
*desc
= av_pix_fmt_desc_get(inlink
->format
);
181 uint8_t rgba_map
[4]; /* component index -> RGBA color index map */
185 s
->hsub
= desc
->log2_chroma_w
;
186 s
->vsub
= desc
->log2_chroma_h
;
188 s
->var_values
[VAR_W
] = inlink
->w
;
189 s
->var_values
[VAR_H
] = inlink
->h
;
191 switch (inlink
->format
) {
192 case AV_PIX_FMT_YUV410P
:
193 case AV_PIX_FMT_YUV411P
:
194 case AV_PIX_FMT_YUV420P
:
195 case AV_PIX_FMT_YUV422P
:
196 case AV_PIX_FMT_YUV440P
:
197 case AV_PIX_FMT_YUV444P
:
198 case AV_PIX_FMT_YUVA420P
:
199 case AV_PIX_FMT_YUVA422P
:
200 case AV_PIX_FMT_YUVA444P
:
201 min
[Y
] = min
[U
] = min
[V
] = 16;
203 max
[U
] = max
[V
] = 240;
204 min
[A
] = 0; max
[A
] = 255;
207 min
[0] = min
[1] = min
[2] = min
[3] = 0;
208 max
[0] = max
[1] = max
[2] = max
[3] = 255;
211 s
->is_yuv
= s
->is_rgb
= 0;
212 if (ff_fmt_is_in(inlink
->format
, yuv_pix_fmts
)) s
->is_yuv
= 1;
213 else if (ff_fmt_is_in(inlink
->format
, rgb_pix_fmts
)) s
->is_rgb
= 1;
216 ff_fill_rgba_map(rgba_map
, inlink
->format
);
217 s
->step
= av_get_bits_per_pixel(desc
) >> 3;
220 for (color
= 0; color
< desc
->nb_components
; color
++) {
222 int comp
= s
->is_rgb
? rgba_map
[color
] : color
;
224 /* create the parsed expression */
225 av_expr_free(s
->comp_expr
[color
]);
226 s
->comp_expr
[color
] = NULL
;
227 ret
= av_expr_parse(&s
->comp_expr
[color
], s
->comp_expr_str
[color
],
228 var_names
, funcs1_names
, funcs1
, NULL
, NULL
, 0, ctx
);
230 av_log(ctx
, AV_LOG_ERROR
,
231 "Error when parsing the expression '%s' for the component %d and color %d.\n",
232 s
->comp_expr_str
[comp
], comp
, color
);
233 return AVERROR(EINVAL
);
236 /* compute the lut */
237 s
->var_values
[VAR_MAXVAL
] = max
[color
];
238 s
->var_values
[VAR_MINVAL
] = min
[color
];
240 for (val
= 0; val
< 256; val
++) {
241 s
->var_values
[VAR_VAL
] = val
;
242 s
->var_values
[VAR_CLIPVAL
] = av_clip(val
, min
[color
], max
[color
]);
243 s
->var_values
[VAR_NEGVAL
] =
244 av_clip(min
[color
] + max
[color
] - s
->var_values
[VAR_VAL
],
245 min
[color
], max
[color
]);
247 res
= av_expr_eval(s
->comp_expr
[color
], s
->var_values
, s
);
249 av_log(ctx
, AV_LOG_ERROR
,
250 "Error when evaluating the expression '%s' for the value %d for the component %d.\n",
251 s
->comp_expr_str
[color
], val
, comp
);
252 return AVERROR(EINVAL
);
254 s
->lut
[comp
][val
] = av_clip((int)res
, min
[color
], max
[color
]);
255 av_log(ctx
, AV_LOG_DEBUG
, "val[%d][%d] = %d\n", comp
, val
, s
->lut
[comp
][val
]);
262 static int filter_frame(AVFilterLink
*inlink
, AVFrame
*in
)
264 AVFilterContext
*ctx
= inlink
->dst
;
265 LutContext
*s
= ctx
->priv
;
266 AVFilterLink
*outlink
= ctx
->outputs
[0];
268 uint8_t *inrow
, *outrow
, *inrow0
, *outrow0
;
269 int i
, j
, plane
, direct
= 0;
271 if (av_frame_is_writable(in
)) {
275 out
= ff_get_video_buffer(outlink
, outlink
->w
, outlink
->h
);
278 return AVERROR(ENOMEM
);
280 av_frame_copy_props(out
, in
);
285 inrow0
= in
->data
[0];
286 outrow0
= out
->data
[0];
288 for (i
= 0; i
< in
->height
; i
++) {
290 const uint8_t (*tab
)[256] = (const uint8_t (*)[256])s
->lut
;
293 for (j
= 0; j
< w
; j
++) {
295 case 4: outrow
[3] = tab
[3][inrow
[3]]; // Fall-through
296 case 3: outrow
[2] = tab
[2][inrow
[2]]; // Fall-through
297 case 2: outrow
[1] = tab
[1][inrow
[1]]; // Fall-through
298 default: outrow
[0] = tab
[0][inrow
[0]];
303 inrow0
+= in
->linesize
[0];
304 outrow0
+= out
->linesize
[0];
308 for (plane
= 0; plane
< 4 && in
->data
[plane
] && in
->linesize
[plane
]; plane
++) {
309 int vsub
= plane
== 1 || plane
== 2 ? s
->vsub
: 0;
310 int hsub
= plane
== 1 || plane
== 2 ? s
->hsub
: 0;
311 int h
= FF_CEIL_RSHIFT(inlink
->h
, vsub
);
312 int w
= FF_CEIL_RSHIFT(inlink
->w
, hsub
);
314 inrow
= in
->data
[plane
];
315 outrow
= out
->data
[plane
];
317 for (i
= 0; i
< h
; i
++) {
318 const uint8_t *tab
= s
->lut
[plane
];
319 for (j
= 0; j
< w
; j
++)
320 outrow
[j
] = tab
[inrow
[j
]];
321 inrow
+= in
->linesize
[plane
];
322 outrow
+= out
->linesize
[plane
];
330 return ff_filter_frame(outlink
, out
);
333 static const AVFilterPad inputs
[] = {
335 .type
= AVMEDIA_TYPE_VIDEO
,
336 .filter_frame
= filter_frame
,
337 .config_props
= config_props
,
341 static const AVFilterPad outputs
[] = {
343 .type
= AVMEDIA_TYPE_VIDEO
,
348 #define DEFINE_LUT_FILTER(name_, description_) \
349 AVFilter ff_vf_##name_ = { \
351 .description = NULL_IF_CONFIG_SMALL(description_), \
352 .priv_size = sizeof(LutContext), \
353 .priv_class = &name_ ## _class, \
354 .init = name_##_init, \
356 .query_formats = query_formats, \
358 .outputs = outputs, \
359 .flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, \
362 #if CONFIG_LUT_FILTER
364 #define lut_options options
365 AVFILTER_DEFINE_CLASS(lut
);
367 static int lut_init(AVFilterContext
*ctx
)
372 DEFINE_LUT_FILTER(lut
, "Compute and apply a lookup table to the RGB/YUV input video.");
375 #if CONFIG_LUTYUV_FILTER
377 #define lutyuv_options options
378 AVFILTER_DEFINE_CLASS(lutyuv
);
380 static av_cold
int lutyuv_init(AVFilterContext
*ctx
)
382 LutContext
*s
= ctx
->priv
;
389 DEFINE_LUT_FILTER(lutyuv
, "Compute and apply a lookup table to the YUV input video.");
392 #if CONFIG_LUTRGB_FILTER
394 #define lutrgb_options options
395 AVFILTER_DEFINE_CLASS(lutrgb
);
397 static av_cold
int lutrgb_init(AVFilterContext
*ctx
)
399 LutContext
*s
= ctx
->priv
;
406 DEFINE_LUT_FILTER(lutrgb
, "Compute and apply a lookup table to the RGB input video.");
409 #if CONFIG_NEGATE_FILTER
411 static const AVOption negate_options
[] = {
412 { "negate_alpha", NULL
, OFFSET(negate_alpha
), AV_OPT_TYPE_INT
, { .i64
= 0 }, 0, 1, FLAGS
},
416 AVFILTER_DEFINE_CLASS(negate
);
418 static av_cold
int negate_init(AVFilterContext
*ctx
)
420 LutContext
*s
= ctx
->priv
;
423 av_log(ctx
, AV_LOG_DEBUG
, "negate_alpha:%d\n", s
->negate_alpha
);
425 for (i
= 0; i
< 4; i
++) {
426 s
->comp_expr_str
[i
] = av_strdup((i
== 3 && !s
->negate_alpha
) ?
428 if (!s
->comp_expr_str
[i
]) {
430 return AVERROR(ENOMEM
);
437 DEFINE_LUT_FILTER(negate
, "Negate input video.");