2 * Copyright (c) 2013 Paul B Mahol
4 * This file is part of FFmpeg.
6 * FFmpeg is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * FFmpeg is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with FFmpeg; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 #include "libavutil/imgutils.h"
22 #include "libavutil/eval.h"
23 #include "libavutil/opt.h"
24 #include "libavutil/pixfmt.h"
26 #include "bufferqueue.h"
29 #include "dualinput.h"
64 static const char *const var_names
[] = { "X", "Y", "W", "H", "SW", "SH", "T", "N", "A", "B", "TOP", "BOTTOM", NULL
};
65 enum { VAR_X
, VAR_Y
, VAR_W
, VAR_H
, VAR_SW
, VAR_SH
, VAR_T
, VAR_N
, VAR_A
, VAR_B
, VAR_TOP
, VAR_BOTTOM
, VAR_VARS_NB
};
67 typedef struct FilterParams
{
72 void (*blend
)(const uint8_t *top
, int top_linesize
,
73 const uint8_t *bottom
, int bottom_linesize
,
74 uint8_t *dst
, int dst_linesize
,
75 int width
, int start
, int end
,
76 struct FilterParams
*param
, double *values
);
79 typedef struct ThreadData
{
80 const AVFrame
*top
, *bottom
;
90 FFDualInputContext dinput
;
91 int hsub
, vsub
; ///< chroma subsampling values
94 enum BlendMode all_mode
;
97 FilterParams params
[4];
100 #define OFFSET(x) offsetof(BlendContext, x)
101 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
103 static const AVOption blend_options
[] = {
104 { "c0_mode", "set component #0 blend mode", OFFSET(params
[0].mode
), AV_OPT_TYPE_INT
, {.i64
=0}, 0, BLEND_NB
-1, FLAGS
, "mode"},
105 { "c1_mode", "set component #1 blend mode", OFFSET(params
[1].mode
), AV_OPT_TYPE_INT
, {.i64
=0}, 0, BLEND_NB
-1, FLAGS
, "mode"},
106 { "c2_mode", "set component #2 blend mode", OFFSET(params
[2].mode
), AV_OPT_TYPE_INT
, {.i64
=0}, 0, BLEND_NB
-1, FLAGS
, "mode"},
107 { "c3_mode", "set component #3 blend mode", OFFSET(params
[3].mode
), AV_OPT_TYPE_INT
, {.i64
=0}, 0, BLEND_NB
-1, FLAGS
, "mode"},
108 { "all_mode", "set blend mode for all components", OFFSET(all_mode
), AV_OPT_TYPE_INT
, {.i64
=-1},-1, BLEND_NB
-1, FLAGS
, "mode"},
109 { "addition", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_ADDITION
}, 0, 0, FLAGS
, "mode" },
110 { "and", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_AND
}, 0, 0, FLAGS
, "mode" },
111 { "average", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_AVERAGE
}, 0, 0, FLAGS
, "mode" },
112 { "burn", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_BURN
}, 0, 0, FLAGS
, "mode" },
113 { "darken", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_DARKEN
}, 0, 0, FLAGS
, "mode" },
114 { "difference", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_DIFFERENCE
}, 0, 0, FLAGS
, "mode" },
115 { "divide", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_DIVIDE
}, 0, 0, FLAGS
, "mode" },
116 { "dodge", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_DODGE
}, 0, 0, FLAGS
, "mode" },
117 { "exclusion", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_EXCLUSION
}, 0, 0, FLAGS
, "mode" },
118 { "hardlight", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_HARDLIGHT
}, 0, 0, FLAGS
, "mode" },
119 { "lighten", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_LIGHTEN
}, 0, 0, FLAGS
, "mode" },
120 { "multiply", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_MULTIPLY
}, 0, 0, FLAGS
, "mode" },
121 { "negation", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_NEGATION
}, 0, 0, FLAGS
, "mode" },
122 { "normal", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_NORMAL
}, 0, 0, FLAGS
, "mode" },
123 { "or", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_OR
}, 0, 0, FLAGS
, "mode" },
124 { "overlay", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_OVERLAY
}, 0, 0, FLAGS
, "mode" },
125 { "phoenix", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_PHOENIX
}, 0, 0, FLAGS
, "mode" },
126 { "pinlight", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_PINLIGHT
}, 0, 0, FLAGS
, "mode" },
127 { "reflect", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_REFLECT
}, 0, 0, FLAGS
, "mode" },
128 { "screen", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_SCREEN
}, 0, 0, FLAGS
, "mode" },
129 { "softlight", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_SOFTLIGHT
}, 0, 0, FLAGS
, "mode" },
130 { "subtract", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_SUBTRACT
}, 0, 0, FLAGS
, "mode" },
131 { "vividlight", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_VIVIDLIGHT
}, 0, 0, FLAGS
, "mode" },
132 { "xor", "", 0, AV_OPT_TYPE_CONST
, {.i64
=BLEND_XOR
}, 0, 0, FLAGS
, "mode" },
133 { "c0_expr", "set color component #0 expression", OFFSET(params
[0].expr_str
), AV_OPT_TYPE_STRING
, {.str
=NULL
}, CHAR_MIN
, CHAR_MAX
, FLAGS
},
134 { "c1_expr", "set color component #1 expression", OFFSET(params
[1].expr_str
), AV_OPT_TYPE_STRING
, {.str
=NULL
}, CHAR_MIN
, CHAR_MAX
, FLAGS
},
135 { "c2_expr", "set color component #2 expression", OFFSET(params
[2].expr_str
), AV_OPT_TYPE_STRING
, {.str
=NULL
}, CHAR_MIN
, CHAR_MAX
, FLAGS
},
136 { "c3_expr", "set color component #3 expression", OFFSET(params
[3].expr_str
), AV_OPT_TYPE_STRING
, {.str
=NULL
}, CHAR_MIN
, CHAR_MAX
, FLAGS
},
137 { "all_expr", "set expression for all color components", OFFSET(all_expr
), AV_OPT_TYPE_STRING
, {.str
=NULL
}, CHAR_MIN
, CHAR_MAX
, FLAGS
},
138 { "c0_opacity", "set color component #0 opacity", OFFSET(params
[0].opacity
), AV_OPT_TYPE_DOUBLE
, {.dbl
=1}, 0, 1, FLAGS
},
139 { "c1_opacity", "set color component #1 opacity", OFFSET(params
[1].opacity
), AV_OPT_TYPE_DOUBLE
, {.dbl
=1}, 0, 1, FLAGS
},
140 { "c2_opacity", "set color component #2 opacity", OFFSET(params
[2].opacity
), AV_OPT_TYPE_DOUBLE
, {.dbl
=1}, 0, 1, FLAGS
},
141 { "c3_opacity", "set color component #3 opacity", OFFSET(params
[3].opacity
), AV_OPT_TYPE_DOUBLE
, {.dbl
=1}, 0, 1, FLAGS
},
142 { "all_opacity", "set opacity for all color components", OFFSET(all_opacity
), AV_OPT_TYPE_DOUBLE
, {.dbl
=1}, 0, 1, FLAGS
},
143 { "shortest", "force termination when the shortest input terminates", OFFSET(dinput
.shortest
), AV_OPT_TYPE_INT
, {.i64
=0}, 0, 1, FLAGS
},
144 { "repeatlast", "repeat last bottom frame", OFFSET(dinput
.repeatlast
), AV_OPT_TYPE_INT
, {.i64
=1}, 0, 1, FLAGS
},
148 AVFILTER_DEFINE_CLASS(blend
);
150 static void blend_normal(const uint8_t *top
, int top_linesize
,
151 const uint8_t *bottom
, int bottom_linesize
,
152 uint8_t *dst
, int dst_linesize
,
153 int width
, int start
, int end
,
154 FilterParams
*param
, double *values
)
156 av_image_copy_plane(dst
, dst_linesize
, top
, top_linesize
, width
, end
- start
);
159 #define DEFINE_BLEND(name, expr) \
160 static void blend_## name(const uint8_t *top, int top_linesize, \
161 const uint8_t *bottom, int bottom_linesize, \
162 uint8_t *dst, int dst_linesize, \
163 int width, int start, int end, \
164 FilterParams *param, double *values) \
166 double opacity = param->opacity; \
169 for (i = start; i < end; i++) { \
170 for (j = 0; j < width; j++) { \
171 dst[j] = top[j] + ((expr) - top[j]) * opacity; \
173 dst += dst_linesize; \
174 top += top_linesize; \
175 bottom += bottom_linesize; \
182 #define MULTIPLY(x, a, b) ((x) * (((a) * (b)) / 255))
183 #define SCREEN(x, a, b) (255 - (x) * ((255 - (a)) * (255 - (b)) / 255))
184 #define BURN(a, b) (((a) == 0) ? (a) : FFMAX(0, 255 - ((255 - (b)) << 8) / (a)))
185 #define DODGE(a, b) (((a) == 255) ? (a) : FFMIN(255, (((b) << 8) / (255 - (a)))))
187 DEFINE_BLEND(addition
, FFMIN(255, A
+ B
))
188 DEFINE_BLEND(average
, (A
+ B
) / 2)
189 DEFINE_BLEND(subtract
, FFMAX(0, A
- B
))
190 DEFINE_BLEND(multiply
, MULTIPLY(1, A
, B
))
191 DEFINE_BLEND(negation
, 255 - FFABS(255 - A
- B
))
192 DEFINE_BLEND(difference
, FFABS(A
- B
))
193 DEFINE_BLEND(screen
, SCREEN(1, A
, B
))
194 DEFINE_BLEND(overlay
, (A
< 128) ? MULTIPLY(2, A
, B
) : SCREEN(2, A
, B
))
195 DEFINE_BLEND(hardlight
, (B
< 128) ? MULTIPLY(2, B
, A
) : SCREEN(2, B
, A
))
196 DEFINE_BLEND(darken
, FFMIN(A
, B
))
197 DEFINE_BLEND(lighten
, FFMAX(A
, B
))
198 DEFINE_BLEND(divide
, ((float)A
/ ((float)B
) * 255))
199 DEFINE_BLEND(dodge
, DODGE(A
, B
))
200 DEFINE_BLEND(burn
, BURN(A
, B
))
201 DEFINE_BLEND(softlight
, (A
> 127) ? B
+ (255 - B
) * (A
- 127.5) / 127.5 * (0.5 - FFABS(B
- 127.5) / 255): B
- B
* ((127.5 - A
) / 127.5) * (0.5 - FFABS(B
- 127.5)/255))
202 DEFINE_BLEND(exclusion
, A
+ B
- 2 * A
* B
/ 255)
203 DEFINE_BLEND(pinlight
, (B
< 128) ? FFMIN(A
, 2 * B
) : FFMAX(A
, 2 * (B
- 128)))
204 DEFINE_BLEND(phoenix
, FFMIN(A
, B
) - FFMAX(A
, B
) + 255)
205 DEFINE_BLEND(reflect
, (B
== 255) ? B
: FFMIN(255, (A
* A
/ (255 - B
))))
206 DEFINE_BLEND(and, A
& B
)
207 DEFINE_BLEND(or, A
| B
)
208 DEFINE_BLEND(xor, A
^ B
)
209 DEFINE_BLEND(vividlight
, (B
< 128) ? BURN(A
, 2 * B
) : DODGE(A
, 2 * (B
- 128)))
211 static void blend_expr(const uint8_t *top
, int top_linesize
,
212 const uint8_t *bottom
, int bottom_linesize
,
213 uint8_t *dst
, int dst_linesize
,
214 int width
, int start
, int end
,
215 FilterParams
*param
, double *values
)
217 AVExpr
*e
= param
->e
;
220 for (y
= start
; y
< end
; y
++) {
222 for (x
= 0; x
< width
; x
++) {
224 values
[VAR_TOP
] = values
[VAR_A
] = top
[x
];
225 values
[VAR_BOTTOM
] = values
[VAR_B
] = bottom
[x
];
226 dst
[x
] = av_expr_eval(e
, values
, NULL
);
230 bottom
+= bottom_linesize
;
234 static int filter_slice(AVFilterContext
*ctx
, void *arg
, int jobnr
, int nb_jobs
)
236 ThreadData
*td
= arg
;
237 int slice_start
= (td
->h
* jobnr
) / nb_jobs
;
238 int slice_end
= (td
->h
* (jobnr
+1)) / nb_jobs
;
239 const uint8_t *top
= td
->top
->data
[td
->plane
];
240 const uint8_t *bottom
= td
->bottom
->data
[td
->plane
];
241 uint8_t *dst
= td
->dst
->data
[td
->plane
];
242 double values
[VAR_VARS_NB
];
244 values
[VAR_N
] = td
->inlink
->frame_count
;
245 values
[VAR_T
] = td
->dst
->pts
== AV_NOPTS_VALUE
? NAN
: td
->dst
->pts
* av_q2d(td
->inlink
->time_base
);
246 values
[VAR_W
] = td
->w
;
247 values
[VAR_H
] = td
->h
;
248 values
[VAR_SW
] = td
->w
/ (double)td
->dst
->width
;
249 values
[VAR_SH
] = td
->h
/ (double)td
->dst
->height
;
251 td
->param
->blend(top
+ slice_start
* td
->top
->linesize
[td
->plane
],
252 td
->top
->linesize
[td
->plane
],
253 bottom
+ slice_start
* td
->bottom
->linesize
[td
->plane
],
254 td
->bottom
->linesize
[td
->plane
],
255 dst
+ slice_start
* td
->dst
->linesize
[td
->plane
],
256 td
->dst
->linesize
[td
->plane
],
257 td
->w
, slice_start
, slice_end
, td
->param
, &values
[0]);
261 static AVFrame
*blend_frame(AVFilterContext
*ctx
, AVFrame
*top_buf
,
262 const AVFrame
*bottom_buf
)
264 BlendContext
*b
= ctx
->priv
;
265 AVFilterLink
*inlink
= ctx
->inputs
[0];
266 AVFilterLink
*outlink
= ctx
->outputs
[0];
270 dst_buf
= ff_get_video_buffer(outlink
, outlink
->w
, outlink
->h
);
273 av_frame_copy_props(dst_buf
, top_buf
);
275 for (plane
= 0; plane
< b
->nb_planes
; plane
++) {
276 int hsub
= plane
== 1 || plane
== 2 ? b
->hsub
: 0;
277 int vsub
= plane
== 1 || plane
== 2 ? b
->vsub
: 0;
278 int outw
= FF_CEIL_RSHIFT(dst_buf
->width
, hsub
);
279 int outh
= FF_CEIL_RSHIFT(dst_buf
->height
, vsub
);
280 FilterParams
*param
= &b
->params
[plane
];
281 ThreadData td
= { .top
= top_buf
, .bottom
= bottom_buf
, .dst
= dst_buf
,
282 .w
= outw
, .h
= outh
, .param
= param
, .plane
= plane
,
285 ctx
->internal
->execute(ctx
, filter_slice
, &td
, NULL
, FFMIN(outh
, ctx
->graph
->nb_threads
));
288 av_frame_free(&top_buf
);
293 static av_cold
int init(AVFilterContext
*ctx
)
295 BlendContext
*b
= ctx
->priv
;
298 for (plane
= 0; plane
< FF_ARRAY_ELEMS(b
->params
); plane
++) {
299 FilterParams
*param
= &b
->params
[plane
];
301 if (b
->all_mode
>= 0)
302 param
->mode
= b
->all_mode
;
303 if (b
->all_opacity
< 1)
304 param
->opacity
= b
->all_opacity
;
306 switch (param
->mode
) {
307 case BLEND_ADDITION
: param
->blend
= blend_addition
; break;
308 case BLEND_AND
: param
->blend
= blend_and
; break;
309 case BLEND_AVERAGE
: param
->blend
= blend_average
; break;
310 case BLEND_BURN
: param
->blend
= blend_burn
; break;
311 case BLEND_DARKEN
: param
->blend
= blend_darken
; break;
312 case BLEND_DIFFERENCE
: param
->blend
= blend_difference
; break;
313 case BLEND_DIVIDE
: param
->blend
= blend_divide
; break;
314 case BLEND_DODGE
: param
->blend
= blend_dodge
; break;
315 case BLEND_EXCLUSION
: param
->blend
= blend_exclusion
; break;
316 case BLEND_HARDLIGHT
: param
->blend
= blend_hardlight
; break;
317 case BLEND_LIGHTEN
: param
->blend
= blend_lighten
; break;
318 case BLEND_MULTIPLY
: param
->blend
= blend_multiply
; break;
319 case BLEND_NEGATION
: param
->blend
= blend_negation
; break;
320 case BLEND_NORMAL
: param
->blend
= blend_normal
; break;
321 case BLEND_OR
: param
->blend
= blend_or
; break;
322 case BLEND_OVERLAY
: param
->blend
= blend_overlay
; break;
323 case BLEND_PHOENIX
: param
->blend
= blend_phoenix
; break;
324 case BLEND_PINLIGHT
: param
->blend
= blend_pinlight
; break;
325 case BLEND_REFLECT
: param
->blend
= blend_reflect
; break;
326 case BLEND_SCREEN
: param
->blend
= blend_screen
; break;
327 case BLEND_SOFTLIGHT
: param
->blend
= blend_softlight
; break;
328 case BLEND_SUBTRACT
: param
->blend
= blend_subtract
; break;
329 case BLEND_VIVIDLIGHT
: param
->blend
= blend_vividlight
; break;
330 case BLEND_XOR
: param
->blend
= blend_xor
; break;
333 if (b
->all_expr
&& !param
->expr_str
) {
334 param
->expr_str
= av_strdup(b
->all_expr
);
335 if (!param
->expr_str
)
336 return AVERROR(ENOMEM
);
338 if (param
->expr_str
) {
339 ret
= av_expr_parse(¶m
->e
, param
->expr_str
, var_names
,
340 NULL
, NULL
, NULL
, NULL
, 0, ctx
);
343 param
->blend
= blend_expr
;
347 b
->dinput
.process
= blend_frame
;
351 static int query_formats(AVFilterContext
*ctx
)
353 static const enum AVPixelFormat pix_fmts
[] = {
354 AV_PIX_FMT_YUVA444P
, AV_PIX_FMT_YUVA422P
, AV_PIX_FMT_YUVA420P
,
355 AV_PIX_FMT_YUVJ444P
, AV_PIX_FMT_YUVJ440P
, AV_PIX_FMT_YUVJ422P
,AV_PIX_FMT_YUVJ420P
, AV_PIX_FMT_YUVJ411P
,
356 AV_PIX_FMT_YUV444P
, AV_PIX_FMT_YUV440P
, AV_PIX_FMT_YUV422P
, AV_PIX_FMT_YUV420P
, AV_PIX_FMT_YUV411P
, AV_PIX_FMT_YUV410P
,
357 AV_PIX_FMT_GBRP
, AV_PIX_FMT_GBRAP
, AV_PIX_FMT_GRAY8
, AV_PIX_FMT_NONE
360 ff_set_common_formats(ctx
, ff_make_format_list(pix_fmts
));
364 static int config_output(AVFilterLink
*outlink
)
366 AVFilterContext
*ctx
= outlink
->src
;
367 AVFilterLink
*toplink
= ctx
->inputs
[TOP
];
368 AVFilterLink
*bottomlink
= ctx
->inputs
[BOTTOM
];
369 BlendContext
*b
= ctx
->priv
;
370 const AVPixFmtDescriptor
*pix_desc
= av_pix_fmt_desc_get(toplink
->format
);
373 if (toplink
->format
!= bottomlink
->format
) {
374 av_log(ctx
, AV_LOG_ERROR
, "inputs must be of same pixel format\n");
375 return AVERROR(EINVAL
);
377 if (toplink
->w
!= bottomlink
->w
||
378 toplink
->h
!= bottomlink
->h
||
379 toplink
->sample_aspect_ratio
.num
!= bottomlink
->sample_aspect_ratio
.num
||
380 toplink
->sample_aspect_ratio
.den
!= bottomlink
->sample_aspect_ratio
.den
) {
381 av_log(ctx
, AV_LOG_ERROR
, "First input link %s parameters "
382 "(size %dx%d, SAR %d:%d) do not match the corresponding "
383 "second input link %s parameters (%dx%d, SAR %d:%d)\n",
384 ctx
->input_pads
[TOP
].name
, toplink
->w
, toplink
->h
,
385 toplink
->sample_aspect_ratio
.num
,
386 toplink
->sample_aspect_ratio
.den
,
387 ctx
->input_pads
[BOTTOM
].name
, bottomlink
->w
, bottomlink
->h
,
388 bottomlink
->sample_aspect_ratio
.num
,
389 bottomlink
->sample_aspect_ratio
.den
);
390 return AVERROR(EINVAL
);
393 outlink
->w
= toplink
->w
;
394 outlink
->h
= toplink
->h
;
395 outlink
->time_base
= toplink
->time_base
;
396 outlink
->sample_aspect_ratio
= toplink
->sample_aspect_ratio
;
397 outlink
->frame_rate
= toplink
->frame_rate
;
399 b
->hsub
= pix_desc
->log2_chroma_w
;
400 b
->vsub
= pix_desc
->log2_chroma_h
;
401 b
->nb_planes
= av_pix_fmt_count_planes(toplink
->format
);
403 if ((ret
= ff_dualinput_init(ctx
, &b
->dinput
)) < 0)
409 static av_cold
void uninit(AVFilterContext
*ctx
)
411 BlendContext
*b
= ctx
->priv
;
414 ff_dualinput_uninit(&b
->dinput
);
415 for (i
= 0; i
< FF_ARRAY_ELEMS(b
->params
); i
++)
416 av_expr_free(b
->params
[i
].e
);
419 static int request_frame(AVFilterLink
*outlink
)
421 BlendContext
*b
= outlink
->src
->priv
;
422 return ff_dualinput_request_frame(&b
->dinput
, outlink
);
425 static int filter_frame(AVFilterLink
*inlink
, AVFrame
*buf
)
427 BlendContext
*b
= inlink
->dst
->priv
;
428 return ff_dualinput_filter_frame(&b
->dinput
, inlink
, buf
);
431 static const AVFilterPad blend_inputs
[] = {
434 .type
= AVMEDIA_TYPE_VIDEO
,
435 .filter_frame
= filter_frame
,
438 .type
= AVMEDIA_TYPE_VIDEO
,
439 .filter_frame
= filter_frame
,
444 static const AVFilterPad blend_outputs
[] = {
447 .type
= AVMEDIA_TYPE_VIDEO
,
448 .config_props
= config_output
,
449 .request_frame
= request_frame
,
454 AVFilter ff_vf_blend
= {
456 .description
= NULL_IF_CONFIG_SMALL("Blend two video frames into each other."),
459 .priv_size
= sizeof(BlendContext
),
460 .query_formats
= query_formats
,
461 .inputs
= blend_inputs
,
462 .outputs
= blend_outputs
,
463 .priv_class
= &blend_class
,
464 .flags
= AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL
| AVFILTER_FLAG_SLICE_THREADS
,