Browse code

avfilter: add lut2 filter

Paul B Mahol authored on 2016/08/24 17:36:31
Showing 6 changed files
... ...
@@ -20,6 +20,7 @@ version <next>:
20 20
 - fifo muxer
21 21
 - maskedclamp filter
22 22
 - hysteresis filter
23
+- lut2 filter
23 24
 
24 25
 
25 26
 version 3.1:
... ...
@@ -9253,6 +9253,44 @@ lutyuv=u='(val-maxval/2)*2+maxval/2':v='(val-maxval/2)*2+maxval/2'
9253 9253
 @end example
9254 9254
 @end itemize
9255 9255
 
9256
+@section lut2
9257
+
9258
+Compute and apply a lookup table from two video inputs.
9259
+
9260
+This filter accepts the following parameters:
9261
+@table @option
9262
+@item c0
9263
+set first pixel component expression
9264
+@item c1
9265
+set second pixel component expression
9266
+@item c2
9267
+set third pixel component expression
9268
+@item c3
9269
+set fourth pixel component expression, corresponds to the alpha component
9270
+@end table
9271
+
9272
+Each of them specifies the expression to use for computing the lookup table for
9273
+the corresponding pixel component values.
9274
+
9275
+The exact component associated to each of the @var{c*} options depends on the
9276
+format in inputs.
9277
+
9278
+The expressions can contain the following constants:
9279
+
9280
+@table @option
9281
+@item w
9282
+@item h
9283
+The input width and height.
9284
+
9285
+@item x
9286
+The first input value for the pixel component.
9287
+
9288
+@item y
9289
+The second input value for the pixel component.
9290
+@end table
9291
+
9292
+All expressions default to "x".
9293
+
9256 9294
 @section maskedclamp
9257 9295
 
9258 9296
 Clamp the first input stream with the second input and third input stream.
... ...
@@ -205,6 +205,7 @@ OBJS-$(CONFIG_KERNDEINT_FILTER)              += vf_kerndeint.o
205 205
 OBJS-$(CONFIG_LENSCORRECTION_FILTER)         += vf_lenscorrection.o
206 206
 OBJS-$(CONFIG_LOOP_FILTER)                   += f_loop.o
207 207
 OBJS-$(CONFIG_LUT_FILTER)                    += vf_lut.o
208
+OBJS-$(CONFIG_LUT2_FILTER)                   += vf_lut2.o framesync.o
208 209
 OBJS-$(CONFIG_LUT3D_FILTER)                  += vf_lut3d.o
209 210
 OBJS-$(CONFIG_LUTRGB_FILTER)                 += vf_lut.o
210 211
 OBJS-$(CONFIG_LUTYUV_FILTER)                 += vf_lut.o
... ...
@@ -222,6 +222,7 @@ void avfilter_register_all(void)
222 222
     REGISTER_FILTER(LENSCORRECTION, lenscorrection, vf);
223 223
     REGISTER_FILTER(LOOP,           loop,           vf);
224 224
     REGISTER_FILTER(LUT,            lut,            vf);
225
+    REGISTER_FILTER(LUT2,           lut2,           vf);
225 226
     REGISTER_FILTER(LUT3D,          lut3d,          vf);
226 227
     REGISTER_FILTER(LUTRGB,         lutrgb,         vf);
227 228
     REGISTER_FILTER(LUTYUV,         lutyuv,         vf);
... ...
@@ -30,7 +30,7 @@
30 30
 #include "libavutil/version.h"
31 31
 
32 32
 #define LIBAVFILTER_VERSION_MAJOR   6
33
-#define LIBAVFILTER_VERSION_MINOR  54
33
+#define LIBAVFILTER_VERSION_MINOR  55
34 34
 #define LIBAVFILTER_VERSION_MICRO 100
35 35
 
36 36
 #define LIBAVFILTER_VERSION_INT AV_VERSION_INT(LIBAVFILTER_VERSION_MAJOR, \
37 37
new file mode 100644
... ...
@@ -0,0 +1,373 @@
0
+/*
1
+ * Copyright (c) 2016 Paul B Mahol
2
+ *
3
+ * This file is part of FFmpeg.
4
+ *
5
+ * FFmpeg is free software; you can redistribute it and/or
6
+ * modify it under the terms of the GNU Lesser General Public
7
+ * License as published by the Free Software Foundation; either
8
+ * version 2.1 of the License, or (at your option) any later version.
9
+ *
10
+ * FFmpeg is distributed in the hope that it will be useful,
11
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
12
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13
+ * Lesser General Public License for more details.
14
+ *
15
+ * You should have received a copy of the GNU Lesser General Public
16
+ * License along with FFmpeg; if not, write to the Free Software
17
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
18
+ */
19
+
20
+#include "libavutil/attributes.h"
21
+#include "libavutil/common.h"
22
+#include "libavutil/eval.h"
23
+#include "libavutil/opt.h"
24
+#include "libavutil/pixdesc.h"
25
+#include "avfilter.h"
26
+#include "drawutils.h"
27
+#include "formats.h"
28
+#include "internal.h"
29
+#include "video.h"
30
+#include "framesync.h"
31
+
32
+static const char *const var_names[] = {
33
+    "w",        ///< width of the input video
34
+    "h",        ///< height of the input video
35
+    "x",        ///< input value for the pixel from input #1
36
+    "y",        ///< input value for the pixel from input #2
37
+    NULL
38
+};
39
+
40
+enum var_name {
41
+    VAR_W,
42
+    VAR_H,
43
+    VAR_X,
44
+    VAR_Y,
45
+    VAR_VARS_NB
46
+};
47
+
48
+typedef struct LUT2Context {
49
+    const AVClass *class;
50
+
51
+    char   *comp_expr_str[4];
52
+
53
+    AVExpr *comp_expr[4];
54
+    double var_values[VAR_VARS_NB];
55
+    uint16_t *lut[4];  ///< lookup table for each component
56
+    int width[4], height[4];
57
+    int nb_planes;
58
+    int depth, depthx, depthy;
59
+
60
+    void (*lut2)(struct LUT2Context *s, AVFrame *dst, AVFrame *srcx, AVFrame *srcy);
61
+
62
+    FFFrameSync fs;
63
+} LUT2Context;
64
+
65
+#define OFFSET(x) offsetof(LUT2Context, x)
66
+#define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
67
+
68
+static const AVOption lut2_options[] = {
69
+    { "c0", "set component #0 expression", OFFSET(comp_expr_str[0]),  AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS },
70
+    { "c1", "set component #1 expression", OFFSET(comp_expr_str[1]),  AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS },
71
+    { "c2", "set component #2 expression", OFFSET(comp_expr_str[2]),  AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS },
72
+    { "c3", "set component #3 expression", OFFSET(comp_expr_str[3]),  AV_OPT_TYPE_STRING, { .str = "x" }, .flags = FLAGS },
73
+    { NULL }
74
+};
75
+
76
+static av_cold void uninit(AVFilterContext *ctx)
77
+{
78
+    LUT2Context *s = ctx->priv;
79
+    int i;
80
+
81
+    for (i = 0; i < 4; i++) {
82
+        av_expr_free(s->comp_expr[i]);
83
+        s->comp_expr[i] = NULL;
84
+        av_freep(&s->comp_expr_str[i]);
85
+        av_freep(&s->lut[i]);
86
+    }
87
+}
88
+
89
+static int query_formats(AVFilterContext *ctx)
90
+{
91
+    static const enum AVPixelFormat pix_fmts[] = {
92
+        AV_PIX_FMT_YUVA444P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV440P,
93
+        AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ440P,
94
+        AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUV420P,
95
+        AV_PIX_FMT_YUVJ422P, AV_PIX_FMT_YUVJ420P,
96
+        AV_PIX_FMT_YUVJ411P, AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV410P,
97
+        AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV444P9,
98
+        AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV444P10,
99
+        AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV440P12,
100
+        AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA444P9,
101
+        AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA444P10,
102
+        AV_PIX_FMT_GBRP, AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10,
103
+        AV_PIX_FMT_GBRP12,
104
+        AV_PIX_FMT_GBRAP, AV_PIX_FMT_GBRAP12,
105
+        AV_PIX_FMT_GRAY8,
106
+        AV_PIX_FMT_NONE
107
+    };
108
+
109
+    return ff_set_common_formats(ctx, ff_make_format_list(pix_fmts));
110
+}
111
+
112
+static int config_inputx(AVFilterLink *inlink)
113
+{
114
+    AVFilterContext *ctx = inlink->dst;
115
+    LUT2Context *s = ctx->priv;
116
+    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
117
+    int hsub = desc->log2_chroma_w;
118
+    int vsub = desc->log2_chroma_h;
119
+
120
+    s->nb_planes = av_pix_fmt_count_planes(inlink->format);
121
+    s->height[1] = s->height[2] = AV_CEIL_RSHIFT(inlink->h, vsub);
122
+    s->height[0] = s->height[3] = inlink->h;
123
+    s->width[1]  = s->width[2]  = AV_CEIL_RSHIFT(inlink->w, hsub);
124
+    s->width[0]  = s->width[3]  = inlink->w;
125
+
126
+    s->var_values[VAR_W] = inlink->w;
127
+    s->var_values[VAR_H] = inlink->h;
128
+    s->depthx = desc->comp[0].depth;
129
+
130
+    return 0;
131
+}
132
+
133
+static int config_inputy(AVFilterLink *inlink)
134
+{
135
+    AVFilterContext *ctx = inlink->dst;
136
+    LUT2Context *s = ctx->priv;
137
+    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
138
+
139
+    s->depthy = desc->comp[0].depth;
140
+
141
+    return 0;
142
+}
143
+
144
+static void lut2_8bit(struct LUT2Context *s, AVFrame *out, AVFrame *srcx, AVFrame *srcy)
145
+{
146
+    int p, y, x;
147
+
148
+    for (p = 0; p < s->nb_planes; p++) {
149
+        const uint16_t *lut = s->lut[p];
150
+        const uint8_t *srcxx, *srcyy;
151
+        uint8_t *dst;
152
+
153
+        dst   = out->data[p];
154
+        srcxx = srcx->data[p];
155
+        srcyy = srcy->data[p];
156
+
157
+        for (y = 0; y < s->height[p]; y++) {
158
+            for (x = 0; x < s->width[p]; x++) {
159
+                dst[x] = lut[(srcyy[x] << s->depthx) | srcxx[x]];
160
+            }
161
+
162
+            dst   += out->linesize[p];
163
+            srcxx += srcx->linesize[p];
164
+            srcyy += srcy->linesize[p];
165
+        }
166
+    }
167
+}
168
+
169
+static void lut2_16bit(struct LUT2Context *s, AVFrame *out, AVFrame *srcx, AVFrame *srcy)
170
+{
171
+    int p, y, x;
172
+
173
+    for (p = 0; p < s->nb_planes; p++) {
174
+        const uint16_t *lut = s->lut[p];
175
+        const uint16_t *srcxx, *srcyy;
176
+        uint16_t *dst;
177
+
178
+        dst   = (uint16_t *)out->data[p];
179
+        srcxx = (uint16_t *)srcx->data[p];
180
+        srcyy = (uint16_t *)srcy->data[p];
181
+
182
+        for (y = 0; y < s->height[p]; y++) {
183
+            for (x = 0; x < s->width[p]; x++) {
184
+                dst[x] = lut[(srcyy[x] << s->depthx) | srcxx[x]];
185
+            }
186
+
187
+            dst   += out->linesize[p]  / 2;
188
+            srcxx += srcx->linesize[p] / 2;
189
+            srcyy += srcy->linesize[p] / 2;
190
+        }
191
+    }
192
+}
193
+
194
+static int process_frame(FFFrameSync *fs)
195
+{
196
+    AVFilterContext *ctx = fs->parent;
197
+    LUT2Context *s = fs->opaque;
198
+    AVFilterLink *outlink = ctx->outputs[0];
199
+    AVFrame *out, *srcx, *srcy;
200
+    int ret;
201
+
202
+    if ((ret = ff_framesync_get_frame(&s->fs, 0, &srcx, 0)) < 0 ||
203
+        (ret = ff_framesync_get_frame(&s->fs, 1, &srcy, 0)) < 0)
204
+        return ret;
205
+
206
+    if (ctx->is_disabled) {
207
+        out = av_frame_clone(srcx);
208
+        if (!out)
209
+            return AVERROR(ENOMEM);
210
+    } else {
211
+        out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
212
+        if (!out)
213
+            return AVERROR(ENOMEM);
214
+        av_frame_copy_props(out, srcx);
215
+
216
+        s->lut2(s, out, srcx, srcy);
217
+    }
218
+
219
+    out->pts = av_rescale_q(s->fs.pts, s->fs.time_base, outlink->time_base);
220
+
221
+    return ff_filter_frame(outlink, out);
222
+}
223
+
224
+static int config_output(AVFilterLink *outlink)
225
+{
226
+    AVFilterContext *ctx = outlink->src;
227
+    LUT2Context *s = ctx->priv;
228
+    AVFilterLink *srcx = ctx->inputs[0];
229
+    AVFilterLink *srcy = ctx->inputs[1];
230
+    FFFrameSyncIn *in;
231
+    int p, ret;
232
+
233
+    s->depth = s->depthx + s->depthy;
234
+
235
+    if (srcx->format != srcy->format) {
236
+        av_log(ctx, AV_LOG_ERROR, "inputs must be of same pixel format\n");
237
+        return AVERROR(EINVAL);
238
+    }
239
+    if (srcx->w                       != srcy->w ||
240
+        srcx->h                       != srcy->h ||
241
+        srcx->sample_aspect_ratio.num != srcy->sample_aspect_ratio.num ||
242
+        srcx->sample_aspect_ratio.den != srcy->sample_aspect_ratio.den) {
243
+        av_log(ctx, AV_LOG_ERROR, "First input link %s parameters "
244
+               "(size %dx%d, SAR %d:%d) do not match the corresponding "
245
+               "second input link %s parameters (%dx%d, SAR %d:%d)\n",
246
+               ctx->input_pads[0].name, srcx->w, srcx->h,
247
+               srcx->sample_aspect_ratio.num,
248
+               srcx->sample_aspect_ratio.den,
249
+               ctx->input_pads[1].name,
250
+               srcy->w, srcy->h,
251
+               srcy->sample_aspect_ratio.num,
252
+               srcy->sample_aspect_ratio.den);
253
+        return AVERROR(EINVAL);
254
+    }
255
+
256
+    outlink->w = srcx->w;
257
+    outlink->h = srcx->h;
258
+    outlink->time_base = srcx->time_base;
259
+    outlink->sample_aspect_ratio = srcx->sample_aspect_ratio;
260
+    outlink->frame_rate = srcx->frame_rate;
261
+
262
+    if ((ret = ff_framesync_init(&s->fs, ctx, 2)) < 0)
263
+        return ret;
264
+
265
+    in = s->fs.in;
266
+    in[0].time_base = srcx->time_base;
267
+    in[1].time_base = srcy->time_base;
268
+    in[0].sync   = 1;
269
+    in[0].before = EXT_STOP;
270
+    in[0].after  = EXT_INFINITY;
271
+    in[1].sync   = 1;
272
+    in[1].before = EXT_STOP;
273
+    in[1].after  = EXT_INFINITY;
274
+    s->fs.opaque   = s;
275
+    s->fs.on_event = process_frame;
276
+
277
+    s->lut2 = s->depth > 16 ? lut2_16bit : lut2_8bit;
278
+
279
+    for (p = 0; p < s->nb_planes; p++) {
280
+        s->lut[p] = av_malloc_array(1 << s->depth, sizeof(uint16_t));
281
+        if (!s->lut[p])
282
+            return AVERROR(ENOMEM);
283
+    }
284
+
285
+    for (p = 0; p < s->nb_planes; p++) {
286
+        double res;
287
+        int x, y;
288
+
289
+        /* create the parsed expression */
290
+        av_expr_free(s->comp_expr[p]);
291
+        s->comp_expr[p] = NULL;
292
+        ret = av_expr_parse(&s->comp_expr[p], s->comp_expr_str[p],
293
+                            var_names, NULL, NULL, NULL, NULL, 0, ctx);
294
+        if (ret < 0) {
295
+            av_log(ctx, AV_LOG_ERROR,
296
+                   "Error when parsing the expression '%s' for the component %d.\n",
297
+                   s->comp_expr_str[p], p);
298
+            return AVERROR(EINVAL);
299
+        }
300
+
301
+        /* compute the lut */
302
+        for (y = 0; y < (1 << s->depthx); y++) {
303
+            s->var_values[VAR_Y] = y;
304
+            for (x = 0; x < (1 << s->depthx); x++) {
305
+                s->var_values[VAR_X] = x;
306
+                res = av_expr_eval(s->comp_expr[p], s->var_values, s);
307
+                if (isnan(res)) {
308
+                    av_log(ctx, AV_LOG_ERROR,
309
+                           "Error when evaluating the expression '%s' for the values %d and %d for the component %d.\n",
310
+                           s->comp_expr_str[p], x, y, p);
311
+                    return AVERROR(EINVAL);
312
+                }
313
+
314
+                s->lut[p][(y << s->depthx) + x] = res;
315
+            }
316
+        }
317
+    }
318
+
319
+    return ff_framesync_configure(&s->fs);
320
+}
321
+
322
+static int filter_frame(AVFilterLink *inlink, AVFrame *buf)
323
+{
324
+    LUT2Context *s = inlink->dst->priv;
325
+    return ff_framesync_filter_frame(&s->fs, inlink, buf);
326
+}
327
+
328
+static int request_frame(AVFilterLink *outlink)
329
+{
330
+    LUT2Context *s = outlink->src->priv;
331
+    return ff_framesync_request_frame(&s->fs, outlink);
332
+}
333
+
334
+static const AVFilterPad inputs[] = {
335
+    {
336
+        .name         = "srcx",
337
+        .type         = AVMEDIA_TYPE_VIDEO,
338
+        .filter_frame = filter_frame,
339
+        .config_props = config_inputx,
340
+    },
341
+    {
342
+        .name         = "srcy",
343
+        .type         = AVMEDIA_TYPE_VIDEO,
344
+        .filter_frame = filter_frame,
345
+        .config_props = config_inputy,
346
+    },
347
+    { NULL }
348
+};
349
+
350
+static const AVFilterPad outputs[] = {
351
+    {
352
+        .name          = "default",
353
+        .type          = AVMEDIA_TYPE_VIDEO,
354
+        .config_props  = config_output,
355
+        .request_frame = request_frame,
356
+    },
357
+    { NULL }
358
+};
359
+
360
+AVFILTER_DEFINE_CLASS(lut2);
361
+
362
+AVFilter ff_vf_lut2 = {
363
+    .name          = "lut2",
364
+    .description   = NULL_IF_CONFIG_SMALL("Compute and apply a lookup table from two video inputs."),
365
+    .priv_size     = sizeof(LUT2Context),
366
+    .priv_class    = &lut2_class,
367
+    .uninit        = uninit,
368
+    .query_formats = query_formats,
369
+    .inputs        = inputs,
370
+    .outputs       = outputs,
371
+    .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_INTERNAL,
372
+};